Compare commits

..

3 Commits

Author SHA1 Message Date
martabal
59300d2097 feat: preload textual model 2024-09-25 18:22:54 +02:00
martabal
d34d631dd4 feat: preload textual model 2024-09-25 17:39:55 +02:00
martabal
708a53a1eb feat: preload textual model 2024-09-16 17:53:43 +02:00
3509 changed files with 279374 additions and 378501 deletions

View File

@@ -1,67 +0,0 @@
{
"name": "Immich - Backend, Frontend and ML",
"service": "immich-server",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"dockerComposeFile": [
"../docker/docker-compose.dev.yml",
"./server/container-compose-overrides.yml"
],
"customizations": {
"vscode": {
"extensions": [
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
]
}
},
"forwardPorts": [3000, 9231, 9230, 2283],
"portsAttributes": {
"3000": {
"label": "Immich - Frontend HTTP",
"description": "The frontend of the Immich project",
"onAutoForward": "openBrowserOnce"
},
"2283": {
"label": "Immich - API Server - HTTP",
"description": "The API server of the Immich project"
},
"9231": {
"label": "Immich - API Server - DEBUG",
"description": "The API server of the Immich project"
},
"9230": {
"label": "Immich - Workers - DEBUG",
"description": "The workers of the Immich project"
}
},
"overrideCommand": true,
"workspaceFolder": "/workspaces/immich",
"remoteUser": "node",
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
}

View File

@@ -1,34 +0,0 @@
services:
immich-server:
build:
target: dev-container-mobile
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override # bind mount host to /workspaces/immich
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION}/photos:/data
- ${UPLOAD_LOCATION}/photos/upload:/data/upload
- /etc/localtime:/etc/localtime:ro
database:
volumes:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
cli_node_modules:
e2e_node_modules:
open_api_node_modules:
server_node_modules:
web_node_modules:
# UPLOAD_LOCATION must be set to a absolute path or vol-upload
vol-upload:
# DB_DATA_LOCATION must be set to a absolute path or vol-database
vol-database:

View File

@@ -1,52 +0,0 @@
{
"name": "Immich - Mobile",
"service": "immich-server",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"dockerComposeFile": [
"../../docker/docker-compose.dev.yml",
"./container-compose-overrides.yml"
],
"customizations": {
"vscode": {
"extensions": [
"Dart-Code.dart-code",
"Dart-Code.flutter",
"dcmdev.dcm-vscode-extension",
"esbenp.prettier-vscode",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
]
}
},
"forwardPorts": [],
"overrideCommand": true,
"workspaceFolder": "/workspaces/immich",
"remoteUser": "node",
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
}

View File

@@ -1,81 +0,0 @@
#!/bin/bash
export IMMICH_PORT="${DEV_SERVER_PORT:-2283}"
export DEV_PORT="${DEV_PORT:-3000}"
# search for immich directory inside workspace.
# /workspaces/immich is the bind mount, but other directories can be mounted if runing
# Devcontainer: Clone [repository|pull request] in container volumne
WORKSPACES_DIR="/workspaces"
IMMICH_DIR="$WORKSPACES_DIR/immich"
IMMICH_DEVCONTAINER_LOG="$HOME/immich-devcontainer.log"
log() {
# Display command on console, log with timestamp to file
echo "$*"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" >>"$IMMICH_DEVCONTAINER_LOG"
}
run_cmd() {
# Ensure log directory exists
mkdir -p "$(dirname "$IMMICH_DEVCONTAINER_LOG")"
log "$@"
# Execute command: display normally on console, log with timestamps to file
"$@" 2>&1 | tee >(while IFS= read -r line; do
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $line" >>"$IMMICH_DEVCONTAINER_LOG"
done)
# Preserve exit status
return "${PIPESTATUS[0]}"
}
# Find directories excluding /workspaces/immich
mapfile -t other_dirs < <(find "$WORKSPACES_DIR" -mindepth 1 -maxdepth 1 -type d ! -path "$IMMICH_DIR" ! -name ".*")
if [ ${#other_dirs[@]} -gt 1 ]; then
log "Error: More than one directory found in $WORKSPACES_DIR other than $IMMICH_DIR."
exit 1
elif [ ${#other_dirs[@]} -eq 1 ]; then
export IMMICH_WORKSPACE="${other_dirs[0]}"
else
export IMMICH_WORKSPACE="$IMMICH_DIR"
fi
log "Found immich workspace in $IMMICH_WORKSPACE"
log ""
fix_permissions() {
log "Fixing permissions for ${IMMICH_WORKSPACE}"
# Change ownership for directories that exist
for dir in "${IMMICH_WORKSPACE}/.vscode" \
"${IMMICH_WORKSPACE}/server/upload" \
"${IMMICH_WORKSPACE}/.pnpm-store" \
"${IMMICH_WORKSPACE}/.github/node_modules" \
"${IMMICH_WORKSPACE}/cli/node_modules" \
"${IMMICH_WORKSPACE}/e2e/node_modules" \
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \
"${IMMICH_WORKSPACE}/server/node_modules" \
"${IMMICH_WORKSPACE}/server/dist" \
"${IMMICH_WORKSPACE}/web/node_modules" \
"${IMMICH_WORKSPACE}/web/dist"; do
if [ -d "$dir" ]; then
run_cmd sudo chown node -R "$dir"
fi
done
log ""
}
install_dependencies() {
log "Installing dependencies"
(
cd "${IMMICH_WORKSPACE}" || exit 1
export CI=1 FROZEN=1 OFFLINE=1
run_cmd make setup-web-dev setup-server-dev
)
log ""
}

View File

@@ -1,34 +0,0 @@
services:
immich-server:
build:
target: dev-container-server
env_file: !reset []
hostname: immich-dev
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override
- ..:/workspaces/immich
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/data
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/data/upload
- /etc/localtime:/etc/localtime:ro
immich-web:
env_file: !reset []
immich-machine-learning:
env_file: !reset []
database:
env_file: !reset []
environment: !override
POSTGRES_PASSWORD: ${DB_PASSWORD-postgres}
POSTGRES_USER: ${DB_USERNAME-postgres}
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
POSTGRES_INITDB_ARGS: '--data-checksums'
POSTGRES_HOST_AUTH_METHOD: md5
volumes:
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
redis:
env_file: !reset []
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
upload1-devcontainer-volume:
upload2-devcontainer-volume:
postgres-devcontainer-volume:

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Preparing Immich Nest API Server"
log ""
export CI=1
run_cmd pnpm --filter immich install
log "Starting Nest API Server"
log ""
cd "${IMMICH_WORKSPACE}/server" || (
log "Immich workspace not found"jj
exit 1
)
while true; do
run_cmd pnpm --filter immich exec nest start --debug "0.0.0.0:9230" --watch
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,29 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
export CI=1
log "Preparing Immich Web Frontend"
log ""
run_cmd pnpm --filter @immich/sdk install
run_cmd pnpm --filter @immich/sdk build
run_cmd pnpm --filter immich-web install
log "Starting Immich Web Frontend"
log ""
cd "${IMMICH_WORKSPACE}/web" || (
log "Immich Workspace not found"
exit 1
)
until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_PORT}/api/server/config"; do
log "Waiting for api server..."
sleep 1
done
while true; do
run_cmd pnpm --filter immich-web exec vite dev --host 0.0.0.0 --port "${DEV_PORT}"
log "Web crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,17 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Setting up Immich dev container..."
fix_permissions
log "Setup complete, please wait while backend and frontend services automatically start"
log
log "If necessary, the services may be manually started using"
log
log "$ /immich-devcontainer/container-start-backend.sh"
log "$ /immich-devcontainer/container-start-frontend.sh"
log
log "From different terminal windows, as these scripts automatically restart the server"
log "on error, and will continuously run in a loop"

View File

@@ -1,41 +1,33 @@
.vscode/ .vscode/
.github/ .github/
.git/ .git/
.env*
*.log
*.tmp
*.temp
**/Dockerfile
**/node_modules/
**/.pnpm-store/
**/dist/
**/coverage/
**/build/
design/ design/
docker/ docker/
!docker/scripts !docker/scripts
docs/ docs/
!docs/package.json
!docs/package-lock.json
e2e/ e2e/
!e2e/package.json
!e2e/package-lock.json
fastlane/ fastlane/
machine-learning/ machine-learning/
misc/ misc/
mobile/ mobile/
open-api/typescript-sdk/build/ cli/coverage/
!open-api/typescript-sdk/package.json cli/dist/
!open-api/typescript-sdk/package-lock.json cli/node_modules/
open-api/typescript-sdk/build/
open-api/typescript-sdk/node_modules/
server/coverage/
server/node_modules/
server/upload/ server/upload/
server/src/queries server/src/queries
server/dist/
server/www/ server/www/
web/node_modules/
web/coverage/
web/.svelte-kit web/.svelte-kit
web/build/
web/.env

12
.gitattributes vendored
View File

@@ -6,18 +6,6 @@ mobile/openapi/**/*.dart linguist-generated=true
mobile/lib/**/*.g.dart -diff -merge mobile/lib/**/*.g.dart -diff -merge
mobile/lib/**/*.g.dart linguist-generated=true mobile/lib/**/*.g.dart linguist-generated=true
mobile/lib/**/*.drift.dart -diff -merge
mobile/lib/**/*.drift.dart linguist-generated=true
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
mobile/lib/infrastructure/repositories/db.repository.steps.dart -diff -merge
mobile/lib/infrastructure/repositories/db.repository.steps.dart linguist-generated=true
mobile/test/drift/main/generated/** -diff -merge
mobile/test/drift/main/generated/** linguist-generated=true
open-api/typescript-sdk/fetch-client.ts -diff -merge open-api/typescript-sdk/fetch-client.ts -diff -merge
open-api/typescript-sdk/fetch-client.ts linguist-generated=true open-api/typescript-sdk/fetch-client.ts linguist-generated=true

1
.github/.nvmrc vendored
View File

@@ -1 +0,0 @@
22.18.0

View File

@@ -1,4 +0,0 @@
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

View File

@@ -1,5 +1,5 @@
title: '[Feature] feature-name-goes-here' title: "[Feature] feature-name-goes-here"
labels: ['feature'] labels: ["feature"]
body: body:
- type: markdown - type: markdown
@@ -11,9 +11,10 @@ body:
- type: checkboxes - type: checkboxes
attributes: attributes:
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request. label: I have searched the existing feature requests to make sure this is not a duplicate request.
options: options:
- label: 'Yes' - label: "Yes"
required: true
- type: textarea - type: textarea
id: feature id: feature

2
.github/FUNDING.yml vendored
View File

@@ -1 +1 @@
custom: ['https://buy.immich.app', 'https://immich.store'] custom: ['https://buy.immich.app']

View File

@@ -1,12 +1,6 @@
name: Report an issue with Immich name: Report an issue with Immich
description: Report an issue with Immich description: Report an issue with Immich
body: body:
- type: checkboxes
attributes:
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
options:
- label: 'Yes'
- type: markdown - type: markdown
attributes: attributes:
value: | value: |
@@ -64,11 +58,6 @@ body:
- label: Web - label: Web
- label: Mobile - label: Mobile
- type: input
attributes:
label: Device make and model
placeholder: Samsung S25 Android 16
- type: textarea - type: textarea
validations: validations:
required: true required: true
@@ -88,7 +77,7 @@ body:
id: repro id: repro
attributes: attributes:
label: Reproduction steps label: Reproduction steps
description: 'How do you trigger this bug? Please walk us through it step by step.' description: "How do you trigger this bug? Please walk us through it step by step."
value: | value: |
1. 1.
2. 2.
@@ -101,13 +90,12 @@ body:
id: logs id: logs
attributes: attributes:
label: Relevant log output label: Relevant log output
description: description: Please copy and paste any relevant logs below. (code formatting is
Please copy and paste any relevant logs below. (code formatting is
enabled, no need for backticks) enabled, no need for backticks)
render: shell render: shell
validations: validations:
required: false required: false
- type: textarea - type: textarea
attributes: attributes:
label: Additional information label: Additional information

View File

@@ -1 +1,2 @@
blank_issues_enabled: false
blank_pull_request_template_enabled: false blank_pull_request_template_enabled: false

View File

@@ -0,0 +1,22 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
## Screenshots (if appropriate):
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable

7
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,7 @@
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

View File

@@ -1,9 +0,0 @@
{
"scripts": {
"format": "prettier --check .",
"format:fix": "prettier --write ."
},
"devDependencies": {
"prettier": "^3.5.3"
}
}

View File

@@ -1,36 +0,0 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
<details><summary><h2>Screenshots (if appropriate)</h2></summary>
<!-- Images go below this line. -->
</details>
<!-- API endpoint changes (if relevant)
## API Changes
The `/api/something` endpoint is now `/api/something-else`
-->
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable
- [ ] I have no unrelated changes in the PR.
- [ ] I have confirmed that any new dependencies are strictly necessary.
- [ ] I have written tests for new code (if applicable)
- [ ] I have followed naming conventions/patterns in the surrounding code
- [ ] All code in `src/services/` uses repositories implementations for database calls, filesystem operations, etc.
- [ ] All code in `src/repositories/` is pretty basic/simple and does not have any immich specific logic (that belongs in `src/services/`)

62
.github/release.yml vendored
View File

@@ -1,33 +1,29 @@
changelog: changelog:
categories: categories:
- title: 🚨 Breaking Changes - title: 🚨 Breaking Changes
labels: labels:
- changelog:breaking-change - changelog:breaking-change
- title: 🫥 Deprecated Changes - title: 🔒 Security
labels: labels:
- changelog:deprecated - changelog:security
- title: 🔒 Security - title: 🚀 Features
labels: labels:
- changelog:security - changelog:feature
- title: 🚀 Features - title: 🌟 Enhancements
labels: labels:
- changelog:feature - changelog:enhancement
- title: 🌟 Enhancements - title: 🐛 Bug fixes
labels: labels:
- changelog:enhancement - changelog:bugfix
- title: 🐛 Bug fixes - title: 📚 Documentation
labels: labels:
- changelog:bugfix - changelog:documentation
- title: 📚 Documentation - title: 🌐 Translations
labels: labels:
- changelog:documentation - changelog:translation
- title: 🌐 Translations
labels:
- changelog:translation

View File

@@ -7,15 +7,6 @@ on:
ref: ref:
required: false required: false
type: string type: string
secrets:
KEY_JKS:
required: true
ALIAS:
required: true
ANDROID_KEY_PASSWORD:
required: true
ANDROID_STORE_PASSWORD:
required: true
pull_request: pull_request:
push: push:
branches: [main] branches: [main]
@@ -24,125 +15,79 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
mobile: mobile:
- 'mobile/**' - 'mobile/**'
workflow:
- '.github/workflows/build-mobile.yml'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
build-sign-android: build-sign-android:
name: Build and sign Android name: Build and sign Android
needs: pre-job needs: pre-job
permissions:
contents: read
# Skip when PR from a fork # Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }} if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
runs-on: mich runs-on: macos-14
steps: steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - name: Determine ref
id: get-ref
run: |
input_ref="${{ inputs.ref }}"
github_ref="${{ github.sha }}"
ref="${input_ref:-$github_ref}"
echo "ref=$ref" >> $GITHUB_OUTPUT
- uses: actions/checkout@v4
with: with:
ref: ${{ inputs.ref || github.sha }} ref: ${{ steps.get-ref.outputs.ref }}
persist-credentials: false
- name: Create the Keystore - uses: actions/setup-java@v4
env:
KEY_JKS: ${{ secrets.KEY_JKS }}
working-directory: ./mobile
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with: with:
distribution: 'zulu' distribution: 'zulu'
java-version: '17' java-version: '17'
cache: 'gradle'
- name: Restore Gradle Cache
id: cache-gradle-restore
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.android/sdk
mobile/android/.gradle
mobile/.dart_tool
key: build-mobile-gradle-${{ runner.os }}-main
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0 uses: subosito/flutter-action@v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
cache: true cache: true
- name: Setup Android SDK - name: Create the Keystore
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2 env:
with: KEY_JKS: ${{ secrets.KEY_JKS }}
packages: '' working-directory: ./mobile
run: echo $KEY_JKS | base64 -d > android/key.jks
- name: Get Packages - name: Get Packages
working-directory: ./mobile working-directory: ./mobile
run: flutter pub get run: flutter pub get
- name: Generate translation file
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
working-directory: ./mobile
- name: Generate platform APIs
run: make pigeon
working-directory: ./mobile
- name: Build Android App Bundle - name: Build Android App Bundle
working-directory: ./mobile working-directory: ./mobile
env: env:
ALIAS: ${{ secrets.ALIAS }} ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }} ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }} ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
run: | run: |
if [[ $IS_MAIN == 'true' ]]; then flutter build apk --release
flutter build apk --release flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
else
flutter build apk --debug --split-per-abi --target-platform android-arm64
fi
- name: Publish Android Artifact - name: Publish Android Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@v4
with: with:
name: release-apk-signed name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk path: mobile/build/app/outputs/flutter-apk/*.apk
- name: Save Gradle Cache
id: cache-gradle-save
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
if: github.ref == 'refs/heads/main'
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.android/sdk
mobile/android/.gradle
mobile/.dart_tool
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}

View File

@@ -8,38 +8,31 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
cleanup: cleanup:
name: Cleanup name: Cleanup
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Cleanup - name: Cleanup
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REF: ${{ github.ref }}
run: | run: |
gh extension install actions/gh-actions-cache gh extension install actions/gh-actions-cache
REPO=${{ github.repository }} REPO=${{ github.repository }}
BRANCH=${{ github.ref }}
echo "Fetching list of cache keys" echo "Fetching list of cache keys"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 ) cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys. ## Setting this to not fail the workflow while deleting cache keys.
set +e set +e
echo "Deleting caches..." echo "Deleting caches..."
for cacheKey in $cacheKeysForPR for cacheKey in $cacheKeysForPR
do do
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done done
echo "Done" echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,7 @@ on:
- 'cli/**' - 'cli/**'
- '.github/workflows/cli.yml' - '.github/workflows/cli.yml'
pull_request: pull_request:
branches: [main]
paths: paths:
- 'cli/**' - 'cli/**'
- '.github/workflows/cli.yml' - '.github/workflows/cli.yml'
@@ -16,41 +17,31 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {} permissions:
packages: write
jobs: jobs:
publish: publish:
name: CLI Publish name: CLI Publish
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - uses: actions/checkout@v4
with: # Setup .npmrc file to publish to npm
persist-credentials: false - uses: actions/setup-node@v4
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'
cache: 'pnpm' - name: Prepare SDK
cache-dependency-path: '**/pnpm-lock.yaml' run: npm ci --prefix ../open-api/typescript-sdk/
- name: Build SDK
- name: Setup typescript-sdk run: npm run build --prefix ../open-api/typescript-sdk/
run: pnpm install && pnpm run build - run: npm ci
working-directory: ./open-api/typescript-sdk - run: npm run build
- run: npm publish
- run: pnpm install --frozen-lockfile
- run: pnpm build
- run: pnpm publish
if: ${{ github.event_name == 'release' }} if: ${{ github.event_name == 'release' }}
env: env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
@@ -58,25 +49,20 @@ jobs:
docker: docker:
name: Docker name: Docker
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
packages: write
needs: publish needs: publish
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 uses: docker/setup-buildx-action@v3.6.1
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@v3
if: ${{ !github.event.pull_request.head.repo.fork }} if: ${{ !github.event.pull_request.head.repo.fork }}
with: with:
registry: ghcr.io registry: ghcr.io
@@ -91,7 +77,7 @@ jobs:
- name: Generate docker image tags - name: Generate docker image tags
id: metadata id: metadata
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0 uses: docker/metadata-action@v5
with: with:
flavor: | flavor: |
latest=false latest=false
@@ -102,7 +88,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }} type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image - name: Build and push image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 uses: docker/build-push-action@v6.7.0
with: with:
file: cli/Dockerfile file: cli/Dockerfile
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64

View File

@@ -1,96 +0,0 @@
on:
issues:
types: [opened]
discussion:
types: [created]
name: Close likely duplicates
permissions: {}
jobs:
get_body:
runs-on: ubuntu-latest
env:
EVENT: ${{ toJSON(github.event) }}
outputs:
body: ${{ steps.get_body.outputs.body }}
steps:
- id: get_body
run: |
BODY=$(echo """$EVENT""" | jq -r '.issue // .discussion | .body' | base64 -w 0)
echo "body=$BODY" >> $GITHUB_OUTPUT
get_checkbox_json:
runs-on: ubuntu-latest
needs: get_body
container:
image: yshavit/mdq:0.8.0@sha256:c69224d34224a0043d9a3ee46679ba4a2a25afaac445f293d92afe13cd47fcea
outputs:
json: ${{ steps.get_checkbox.outputs.json }}
steps:
- id: get_checkbox
env:
BODY: ${{ needs.get_body.outputs.body }}
run: |
JSON=$(echo "$BODY" | base64 -d | /mdq --output json '# I have searched | - [?] Yes')
echo "json=$JSON" >> $GITHUB_OUTPUT
close_and_comment:
runs-on: ubuntu-latest
needs: get_checkbox_json
if: ${{ !fromJSON(needs.get_checkbox_json.outputs.json).items[0].list[0].checked }}
permissions:
issues: write
discussions: write
steps:
- name: Close issue
if: ${{ github.event_name == 'issues' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.issue.node_id }}
run: |
gh api graphql \
-f issueId="$NODE_ID" \
-f body="This issue has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
-f query='
mutation CommentAndCloseIssue($issueId: ID!, $body: String!) {
addComment(input: {
subjectId: $issueId,
body: $body
}) {
__typename
}
closeIssue(input: {
issueId: $issueId,
stateReason: DUPLICATE
}) {
__typename
}
}'
- name: Close discussion
if: ${{ github.event_name == 'discussion' && github.event.discussion.category.name == 'Feature Request' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.discussion.node_id }}
run: |
gh api graphql \
-f discussionId="$NODE_ID" \
-f body="This discussion has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
-f query='
mutation CommentAndCloseDiscussion($discussionId: ID!, $body: String!) {
addDiscussionComment(input: {
discussionId: $discussionId,
body: $body
}) {
__typename
}
closeDiscussion(input: {
discussionId: $discussionId,
reason: DUPLICATE
}) {
__typename
}
}'

View File

@@ -9,14 +9,14 @@
# the `language` matrix defined below to confirm you have the correct set of # the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages. # supported CodeQL languages.
# #
name: 'CodeQL' name: "CodeQL"
on: on:
push: push:
branches: ['main'] branches: [ "main" ]
pull_request: pull_request:
# The branches below must be a subset of the branches above # The branches below must be a subset of the branches above
branches: ['main'] branches: [ "main" ]
schedule: schedule:
- cron: '20 13 * * 1' - cron: '20 13 * * 1'
@@ -24,8 +24,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
analyze: analyze:
name: Analyze name: Analyze
@@ -38,44 +36,43 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
language: ['javascript', 'python'] language: [ 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9 uses: github/codeql-action/init@v3
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file. # By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file. # Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality # queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9
# Command-line programs to run using the OS shell. # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# If the Autobuild fails above, remove it and uncomment the following three lines. # Command-line programs to run using the OS shell.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# - run: | # If the Autobuild fails above, remove it and uncomment the following three lines.
# echo "Run, Build Application using script" # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis # - run: |
uses: github/codeql-action/analyze@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9 # echo "Run, Build Application using script"
with: # ./location_of_script_within_repo/buildscript.sh
category: '/language:${{matrix.language}}'
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

73
.github/workflows/docker-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
# This workflow runs on certain conditions to check for and potentially
# delete container images from the GHCR which no longer have an associated
# code branch.
# Requires a PAT with the correct scope set in the secrets.
#
# This workflow will not trigger runs on forked repos.
name: Docker Cleanup
on:
pull_request:
types:
- "closed"
push:
paths:
- ".github/workflows/docker-cleanup.yml"
concurrency:
group: registry-tags-cleanup
cancel-in-progress: false
jobs:
cleanup-images:
name: Cleanup Stale Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.8.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
is_org: "true"
do_delete: "true"
package_name: "${{ matrix.primary-name }}"
scheme: "pull_request"
repo_name: "immich"
match_regex: '^pr-(\d+)$|^(\d+)$'
cleanup-untagged-images:
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-22.04
needs:
- cleanup-images
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
- primary-name: "immich-build-cache"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.8.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
do_delete: "true"
is_org: "true"
package_name: "${{ matrix.primary-name }}"

View File

@@ -5,6 +5,7 @@ on:
push: push:
branches: [main] branches: [main]
pull_request: pull_request:
branches: [main]
release: release:
types: [published] types: [published]
@@ -12,182 +13,297 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {} permissions:
packages: write
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
server: server:
- 'server/**' - 'server/**'
- 'openapi/**' - 'openapi/**'
- 'web/**' - 'web/**'
- 'i18n/**'
machine-learning: machine-learning:
- 'machine-learning/**' - 'machine-learning/**'
workflow:
- '.github/workflows/docker.yml'
- '.github/workflows/multi-runner-build.yml'
- '.github/actions/image-build'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
retag_ml: retag_ml:
name: Re-Tag ML name: Re-Tag ML
needs: pre-job needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }} if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn'] suffix: ["", "-cuda", "-openvino", "-armnn"]
steps: steps:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image - name: Re-tag image
env: run: |
REGISTRY_NAME: 'ghcr.io' REGISTRY_NAME="ghcr.io"
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
TAG_OLD: main${{ matrix.suffix }} TAG_OLD=main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }} TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }} docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
retag_server: retag_server:
name: Re-Tag Server name: Re-Tag Server
needs: pre-job needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }} if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
suffix: [''] suffix: [""]
steps: steps:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image - name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-server
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: | run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REGISTRY_NAME="ghcr.io"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REPOSITORY=${{ github.repository_owner }}/immich-server
TAG_OLD=main${{ matrix.suffix }}
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
machine-learning:
build_and_push_ml:
name: Build and Push ML name: Build and Push ML
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }} if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest
env:
image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
strategy:
# Prevent a failure in one image from stopping the other builds
fail-fast: false
matrix:
include:
- platforms: linux/amd64,linux/arm64
device: cpu
- platforms: linux/amd64
device: cuda
suffix: -cuda
- platforms: linux/amd64
device: openvino
suffix: -openvino
- platforms: linux/arm64
device: armnn
suffix: -armnn
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.6.1
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
fi
- name: Build and push image
uses: docker/build-push-action@v6.7.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
build_and_push_server:
name: Build and Push Server
runs-on: ubuntu-latest
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
env:
image: immich-server
context: .
file: server/Dockerfile
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- device: cpu - platforms: linux/amd64,linux/arm64
tag-suffix: '' device: cpu
- device: cuda steps:
tag-suffix: '-cuda' - name: Checkout
platforms: linux/amd64 uses: actions/checkout@v4
- device: openvino
tag-suffix: '-openvino'
platforms: linux/amd64
- device: armnn
tag-suffix: '-armnn'
platforms: linux/arm64
- device: rknn
tag-suffix: '-rknn'
platforms: linux/arm64
- device: rocm
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
permissions:
contents: read
actions: read
packages: write
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
with:
image: immich-machine-learning
context: machine-learning
dockerfile: machine-learning/Dockerfile
platforms: ${{ matrix.platforms }}
runner-mapping: ${{ matrix.runner-mapping }}
tag-suffix: ${{ matrix.tag-suffix }}
dockerhub-push: ${{ github.event_name == 'release' }}
build-args: |
DEVICE=${{ matrix.device }}
server: - name: Set up QEMU
name: Build and Push Server uses: docker/setup-qemu-action@v3.2.0
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} - name: Set up Docker Buildx
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1 uses: docker/setup-buildx-action@v3.6.1
permissions:
contents: read - name: Login to Docker Hub
actions: read # Only push to Docker Hub when making a release
packages: write if: ${{ github.event_name == 'release' }}
secrets: uses: docker/login-action@v3
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} with:
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} username: ${{ secrets.DOCKERHUB_USERNAME }}
with: password: ${{ secrets.DOCKERHUB_TOKEN }}
image: immich-server
context: . - name: Login to GitHub Container Registry
dockerfile: server/Dockerfile uses: docker/login-action@v3
dockerhub-push: ${{ github.event_name == 'release' }} # Skip when PR from a fork
build-args: | if: ${{ !github.event.pull_request.head.repo.fork }}
DEVICE=cpu with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
fi
- name: Build and push image
uses: docker/build-push-action@v6.7.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
success-check-server: success-check-server:
name: Docker Build & Push Server Success name: Docker Build & Push Server Success
needs: [server, retag_server] needs: [build_and_push_server, retag_server]
permissions: {}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: always() if: always()
steps: steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4 - name: Any jobs failed?
with: if: ${{ contains(needs.*.result, 'failure') }}
needs: ${{ toJSON(needs) }} run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
success-check-ml: success-check-ml:
name: Docker Build & Push ML Success name: Docker Build & Push ML Success
needs: [machine-learning, retag_ml] needs: [build_and_push_ml, retag_ml]
permissions: {}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: always() if: always()
steps: steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4 - name: Any jobs failed?
with: if: ${{ contains(needs.*.result, 'failure') }}
needs: ${{ toJSON(needs) }} run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"

View File

@@ -3,6 +3,7 @@ on:
push: push:
branches: [main] branches: [main]
pull_request: pull_request:
branches: [main]
release: release:
types: [published] types: [published]
@@ -10,39 +11,27 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.found_paths.outputs.open-api == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
docs: docs:
- 'docs/**' - 'docs/**'
workflow:
- '.github/workflows/docs-build.yml'
open-api:
- 'open-api/immich-openapi-specs.json'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
build: build:
name: Docs Build name: Docs Build
needs: pre-job needs: pre-job
permissions:
contents: read
if: ${{ needs.pre-job.outputs.should_run == 'true' }} if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
@@ -51,33 +40,25 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './docs/.nvmrc' node-version-file: './docs/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run install - name: Run npm install
run: pnpm install run: npm ci
- name: Check formatting - name: Check formatting
run: pnpm format run: npm run format
- name: Run build - name: Run build
run: pnpm build run: npm run build
- name: Upload build output - name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@v4
with: with:
name: docs-build-output name: docs-build-output
path: docs/build/ path: docs/build/
include-hidden-files: true
retention-days: 1 retention-days: 1

View File

@@ -1,7 +1,7 @@
name: Docs deploy name: Docs deploy
on: on:
workflow_run: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here workflow_run:
workflows: ['Docs build'] workflows: ["Docs build"]
types: types:
- completed - completed
@@ -9,9 +9,6 @@ jobs:
checks: checks:
name: Docs Deploy Checks name: Docs Deploy Checks
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
actions: read
pull-requests: read
outputs: outputs:
parameters: ${{ steps.parameters.outputs.result }} parameters: ${{ steps.parameters.outputs.result }}
artifact: ${{ steps.get-artifact.outputs.result }} artifact: ${{ steps.get-artifact.outputs.result }}
@@ -20,7 +17,7 @@ jobs:
run: echo 'The triggering workflow did not succeed' && exit 1 run: echo 'The triggering workflow did not succeed' && exit 1
- name: Get artifact - name: Get artifact
id: get-artifact id: get-artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
with: with:
script: | script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
@@ -38,9 +35,7 @@ jobs:
return { found: true, id: matchArtifact.id }; return { found: true, id: matchArtifact.id };
- name: Determine deploy parameters - name: Determine deploy parameters
id: parameters id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
env:
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
with: with:
script: | script: |
const eventType = context.payload.workflow_run.event; const eventType = context.payload.workflow_run.event;
@@ -62,8 +57,7 @@ jobs:
} else if (eventType == "pull_request") { } else if (eventType == "pull_request") {
let pull_number = context.payload.workflow_run.pull_requests[0]?.number; let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
if(!pull_number) { if(!pull_number) {
const {HEAD_SHA} = process.env; const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
const items = response.data.items const items = response.data.items
if (items.length < 1) { if (items.length < 1) {
throw new Error("No pull request found for the commit") throw new Error("No pull request found for the commit")
@@ -101,36 +95,30 @@ jobs:
name: Docs Deploy name: Docs Deploy
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: checks needs: checks
permissions:
contents: read
actions: read
pull-requests: write
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }} if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Load parameters - name: Load parameters
id: parameters id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
env:
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
with: with:
script: | script: |
const parameters = JSON.parse(process.env.PARAM_JSON); const json = `${{ needs.checks.outputs.parameters }}`;
const parameters = JSON.parse(json);
core.setOutput("event", parameters.event); core.setOutput("event", parameters.event);
core.setOutput("name", parameters.name); core.setOutput("name", parameters.name);
core.setOutput("shouldDeploy", parameters.shouldDeploy); core.setOutput("shouldDeploy", parameters.shouldDeploy);
- run: |
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
- name: Download artifact - name: Download artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@v7
env:
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
with: with:
script: | script: |
let artifact = JSON.parse(process.env.ARTIFACT_JSON); let artifact = ${{ needs.checks.outputs.artifact }};
let download = await github.rest.actions.downloadArtifact({ let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
@@ -150,12 +138,12 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: "0.58.12"
tofu_version: '1.7.1' tofu_version: "1.7.1"
tg_dir: 'deployment/modules/cloudflare/docs' tg_dir: "deployment/modules/cloudflare/docs"
tg_command: 'apply' tg_command: "apply"
- name: Deploy Docs Subdomain Output - name: Deploy Docs Subdomain Output
id: docs-output id: docs-output
@@ -165,30 +153,27 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: "0.58.12"
tofu_version: '1.7.1' tofu_version: "1.7.1"
tg_dir: 'deployment/modules/cloudflare/docs' tg_dir: "deployment/modules/cloudflare/docs"
tg_command: 'output -json' tg_command: "output -json"
- name: Output Cleaning - name: Output Cleaning
id: clean id: clean
env:
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
run: | run: |
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .) TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$CLEANED" >> $GITHUB_OUTPUT echo "output=$TG_OUT" >> $GITHUB_OUTPUT
- name: Publish to Cloudflare Pages - name: Publish to Cloudflare Pages
# TODO: Action is deprecated uses: cloudflare/pages-action@v1
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }} projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
workingDirectory: 'docs' workingDirectory: "docs"
directory: 'build' directory: "build"
branch: ${{ steps.parameters.outputs.name }} branch: ${{ steps.parameters.outputs.name }}
wranglerVersion: '3' wranglerVersion: '3'
@@ -199,7 +184,7 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: '0.58.12'
tofu_version: '1.7.1' tofu_version: '1.7.1'
@@ -207,7 +192,7 @@ jobs:
tg_command: 'apply' tg_command: 'apply'
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@v3
if: ${{ steps.parameters.outputs.event == 'pr' }} if: ${{ steps.parameters.outputs.event == 'pr' }}
with: with:
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }} number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}

View File

@@ -1,39 +1,32 @@
name: Docs destroy name: Docs destroy
on: on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here pull_request_target:
types: [closed] types: [closed]
permissions: {}
jobs: jobs:
deploy: deploy:
name: Docs Destroy name: Docs Destroy
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Destroy Docs Subdomain - name: Destroy Docs Subdomain
env: env:
TF_VAR_prefix_name: 'pr-${{ github.event.number }}' TF_VAR_prefix_name: "pr-${{ github.event.number }}"
TF_VAR_prefix_event_type: 'pr' TF_VAR_prefix_event_type: "pr"
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@v2
with: with:
tg_version: '0.58.12' tg_version: "0.58.12"
tofu_version: '1.7.1' tofu_version: "1.7.1"
tg_dir: 'deployment/modules/cloudflare/docs' tg_dir: "deployment/modules/cloudflare/docs"
tg_command: 'destroy -refresh=false' tg_command: "destroy"
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@v3
with: with:
number: ${{ github.event.number }} number: ${{ github.event.number }}
delete: true delete: true

View File

@@ -1,57 +0,0 @@
name: Fix formatting
on:
pull_request:
types: [labeled]
permissions: {}
jobs:
fix-formatting:
runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'fix:formatting' }}
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: 'Checkout'
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Fix formatting
run: make install-all && make format-all
- name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
with:
default_author: github_actions
message: 'chore: fix formatting'
- name: Remove label
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
if: always()
with:
script: |
github.rest.issues.removeLabel({
issue_number: context.payload.pull_request.number,
owner: context.repo.owner,
repo: context.repo.repo,
name: 'fix:formatting'
})

View File

@@ -1,13 +0,0 @@
name: Org Checks
on:
pull_request_review:
pull_request:
jobs:
check-approvals:
name: Check for Team/Admin Review
uses: immich-app/devtools/.github/workflows/required-approval.yml@main
permissions:
pull-requests: read
contents: read

View File

@@ -1,24 +1,21 @@
name: PR Label Validation name: PR Label Validation
on: on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here pull_request_target:
types: [opened, labeled, unlabeled, synchronize] types: [opened, labeled, unlabeled, synchronize]
permissions: {}
jobs: jobs:
validate-release-label: validate-release-label:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: permissions:
issues: write issues: write
pull-requests: write pull-requests: read
steps: steps:
- name: Require PR to have a changelog label - name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1 uses: mheap/github-action-required-labels@v5
with: with:
mode: exactly mode: exactly
count: 1 count: 1
use_regex: true use_regex: true
labels: 'changelog:.*' labels: "changelog:.*"
add_comment: true add_comment: true
message: 'Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label.'

View File

@@ -1,8 +1,6 @@
name: 'Pull Request Labeler' name: "Pull Request Labeler"
on: on:
- pull_request_target # zizmor: ignore[dangerous-triggers] no attacker inputs are used here - pull_request_target
permissions: {}
jobs: jobs:
labeler: labeler:
@@ -11,4 +9,4 @@ jobs:
pull-requests: write pull-requests: write
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 - uses: actions/labeler@v5

View File

@@ -4,16 +4,12 @@ on:
pull_request: pull_request:
types: [opened, synchronize, reopened, edited] types: [opened, synchronize, reopened, edited]
permissions: {}
jobs: jobs:
validate-pr-title: validate-pr-title:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
pull-requests: write
steps: steps:
- name: PR Conventional Commit Validation - name: PR Conventional Commit Validation
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0 uses: ytanikin/PRConventionalCommits@1.2.0
with: with:
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]' task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
add_label: 'false' add_label: 'false'

View File

@@ -21,40 +21,35 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-root group: ${{ github.workflow }}-${{ github.ref }}-root
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
bump_version: bump_version:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }} ref: ${{ steps.push-tag.outputs.commit_long_sha }}
permissions: {} # No job-level permissions are needed because it uses the app-token
steps: steps:
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1 uses: actions/create-github-app-token@v1
with: with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }} app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }} private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout - name: Checkout
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with: with:
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Install uv - name: Install Poetry
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2 run: pipx install poetry
- name: Bump version - name: Bump version
env: run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
SERVER_BUMP: ${{ inputs.serverBump }}
MOBILE_BUMP: ${{ inputs.mobileBump }}
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
- name: Commit and tag - name: Commit and tag
id: push-tag id: push-tag
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4 uses: EndBug/add-and-commit@v9
with: with:
default_author: github_actions default_author: github_actions
message: 'chore: version ${{ env.IMMICH_VERSION }}' message: 'chore: version ${{ env.IMMICH_VERSION }}'
@@ -64,47 +59,30 @@ jobs:
build_mobile: build_mobile:
uses: ./.github/workflows/build-mobile.yml uses: ./.github/workflows/build-mobile.yml
needs: bump_version needs: bump_version
permissions: secrets: inherit
contents: read
secrets:
KEY_JKS: ${{ secrets.KEY_JKS }}
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
with: with:
ref: ${{ needs.bump_version.outputs.ref }} ref: ${{ needs.bump_version.outputs.ref }}
prepare_release: prepare_release:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build_mobile needs: build_mobile
permissions:
actions: read # To download the app artifact
# No content permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
steps:
- name: Checkout - name: Checkout
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with: with:
token: ${{ steps.generate-token.outputs.token }} token: ${{ secrets.ORG_RELEASE_TOKEN }}
persist-credentials: false
- name: Download APK - name: Download APK
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@v4
with: with:
name: release-apk-signed name: release-apk-signed
- name: Create draft release - name: Create draft release
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2 uses: softprops/action-gh-release@v2
with: with:
draft: true draft: true
tag_name: ${{ env.IMMICH_VERSION }} tag_name: ${{ env.IMMICH_VERSION }}
token: ${{ steps.generate-token.outputs.token }}
generate_release_notes: true generate_release_notes: true
body_path: misc/release/notes.tmpl body_path: misc/release/notes.tmpl
files: | files: |

View File

@@ -1,47 +0,0 @@
name: Preview label
on:
pull_request:
types: [labeled, closed]
permissions: {}
jobs:
comment-status:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'preview' }}
permissions:
pull-requests: write
steps:
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
with:
message-id: 'preview-status'
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
remove-label:
runs-on: ubuntu-latest
if: ${{ (github.event.action == 'closed' || github.event.pull_request.head.repo.fork) && contains(github.event.pull_request.labels.*.name, 'preview') }}
permissions:
pull-requests: write
steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
script: |
github.rest.issues.removeLabel({
issue_number: context.payload.pull_request.number,
owner: context.repo.owner,
repo: context.repo.repo,
name: 'preview'
})
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
if: ${{ github.event.pull_request.head.repo.fork }}
with:
message-id: 'preview-status'
message: 'PRs from forks cannot have preview environments.'
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
message-id: 'preview-status'
message: 'Preview environment has been removed.'

View File

@@ -4,37 +4,28 @@ on:
release: release:
types: [published] types: [published]
permissions: {} permissions:
packages: write
jobs: jobs:
publish: publish:
name: Publish `@immich/sdk` name: Publish `@immich/sdk`
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
steps: steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
# Setup .npmrc file to publish to npm # Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 - uses: actions/setup-node@v4
with: with:
node-version-file: './open-api/typescript-sdk/.nvmrc' node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install deps - name: Install deps
run: pnpm install --frozen-lockfile run: npm ci
- name: Build - name: Build
run: pnpm build run: npm run build
- name: Publish - name: Publish
run: pnpm publish run: npm publish
env: env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -9,127 +9,58 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
mobile: mobile:
- 'mobile/**' - 'mobile/**'
workflow:
- '.github/workflows/static_analysis.yml'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
mobile-dart-analyze: mobile-dart-analyze:
name: Run Dart Code Analysis name: Run Dart Code Analysis
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run == 'true' }} if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./mobile
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0 uses: subosito/flutter-action@v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
- name: Install dependencies - name: Install dependencies
run: dart pub get run: dart pub get
working-directory: ./mobile
- name: Install DCM
uses: CQLabs/setup-dcm@8697ae0790c0852e964a6ef1d768d62a6675481a # v2.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
version: auto
working-directory: ./mobile
- name: Generate translation file
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
- name: Run Build Runner
run: make build
- name: Generate platform API
run: make pigeon
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
mobile/**/*.g.dart
mobile/**/*.gr.dart
mobile/**/*.drift.dart
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date! Run 'make build' and 'make pigeon' inside the mobile directory"
echo "Changed files: ${CHANGED_FILES}"
exit 1
- name: Run dart analyze - name: Run dart analyze
run: dart analyze --fatal-infos run: dart analyze --fatal-infos
working-directory: ./mobile
- name: Run dart format - name: Run dart format
run: make format run: dart format lib/ --set-exit-if-changed
working-directory: ./mobile
- name: Run dart custom_lint - name: Run dart custom_lint
run: dart run custom_lint run: dart run custom_lint
working-directory: ./mobile
# TODO: Use https://github.com/CQLabs/dcm-action # Enable after riverpod generator migration is completed
- name: Run DCM # - name: Run dart custom lint
run: dcm analyze lib --fatal-style --fatal-warnings # run: dart run custom_lint
# working-directory: ./mobile
zizmor:
name: zizmor
runs-on: ubuntu-latest
permissions:
security-events: write
contents: read
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- name: Run zizmor 🌈
run: uvx zizmor --format=sarif . > results.sarif
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9
with:
sarif_file: results.sarif
category: zizmor

View File

@@ -4,17 +4,15 @@ on:
pull_request: pull_request:
push: push:
branches: [main] branches: [main]
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run_i18n: ${{ steps.found_paths.outputs.i18n == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
@@ -23,21 +21,15 @@ jobs:
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@v3
with: with:
filters: | filters: |
i18n:
- 'i18n/**'
web: web:
- 'web/**' - 'web/**'
- 'i18n/**'
- 'open-api/typescript-sdk/**' - 'open-api/typescript-sdk/**'
server: server:
- 'server/**' - 'server/**'
@@ -50,691 +42,537 @@ jobs:
- 'mobile/**' - 'mobile/**'
machine-learning: machine-learning:
- 'machine-learning/**' - 'machine-learning/**'
workflow:
- '.github/workflows/test.yml'
.github:
- '.github/**'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT" run: echo "should_force=${{ github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
server-unit-tests: server-unit-tests:
name: Test & Lint Server name: Test & Lint Server
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./server working-directory: ./server
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml' - name: Run npm install
- name: Run package manager install run: npm ci
run: pnpm install
- name: Run linter - name: Run linter
run: pnpm lint run: npm run lint
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run formatter - name: Run formatter
run: pnpm format run: npm run format
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run tsc - name: Run tsc
run: pnpm check run: npm run check
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run small tests & coverage
run: pnpm test - name: Run unit tests & coverage
run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
cli-unit-tests: cli-unit-tests:
name: Unit Test CLI name: Unit Test CLI
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk - name: Setup typescript-sdk
run: pnpm install && pnpm run build run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
- name: Install deps - name: Install deps
run: pnpm install run: npm ci
- name: Run linter - name: Run linter
run: pnpm lint run: npm run lint
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run formatter - name: Run formatter
run: pnpm format run: npm run format
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run tsc - name: Run tsc
run: pnpm check run: npm run check
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run unit tests & coverage - name: Run unit tests & coverage
run: pnpm test run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
cli-unit-tests-win: cli-unit-tests-win:
name: Unit Test CLI (Windows) name: Unit Test CLI (Windows)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: windows-latest runs-on: windows-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk - name: Setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
- name: Install deps - name: Install deps
run: pnpm install --frozen-lockfile run: npm ci
# Skip linter & formatter in Windows test. # Skip linter & formatter in Windows test.
- name: Run tsc - name: Run tsc
run: pnpm check run: npm run check
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run unit tests & coverage - name: Run unit tests & coverage
run: pnpm test run: npm run test:cov
if: ${{ !cancelled() }}
web-lint:
name: Lint Web
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Run pnpm install
run: pnpm rebuild && pnpm install --frozen-lockfile
- name: Run linter
run: pnpm lint:p
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: pnpm check:svelte
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
web-unit-tests: web-unit-tests:
name: Test Web name: Test & Lint Web
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }} if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./web working-directory: ./web
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './web/.nvmrc' node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk - name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
- name: Run npm install - name: Run npm install
run: pnpm install --frozen-lockfile run: npm ci
- name: Run linter
run: npm run lint
if: ${{ !cancelled() }}
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: npm run check:svelte
if: ${{ !cancelled() }}
- name: Run tsc - name: Run tsc
run: pnpm check:typescript run: npm run check:typescript
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run unit tests & coverage - name: Run unit tests & coverage
run: pnpm test run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
i18n-tests:
name: Test i18n
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_i18n == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install dependencies
run: pnpm --filter=immich-web install --frozen-lockfile
- name: Format
run: pnpm --filter=immich-web format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
i18n/**
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: i18n files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
exit 1
e2e-tests-lint: e2e-tests-lint:
name: End-to-End Lint name: End-to-End Lint
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk - name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Install dependencies - name: Install dependencies
run: pnpm install --frozen-lockfile run: npm ci
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run linter - name: Run linter
run: pnpm lint run: npm run lint
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run formatter - name: Run formatter
run: pnpm format run: npm run format
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run tsc - name: Run tsc
run: pnpm check run: npm run check
if: ${{ !cancelled() }}
server-medium-tests:
name: Medium Tests (Server)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
- name: Run medium tests
run: pnpm test:medium
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
e2e-tests-server-cli: e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI) name: End-to-End Tests (Server & CLI)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: ${{ matrix.runner }} runs-on: mich
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with: with:
persist-credentials: false
submodules: 'recursive' submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk - name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run setup web
run: pnpm install --frozen-lockfile && pnpm exec svelte-kit sync
working-directory: ./web
if: ${{ !cancelled() }}
- name: Run setup cli - name: Run setup cli
run: pnpm install --frozen-lockfile && pnpm build run: npm ci && npm run build
working-directory: ./cli working-directory: ./cli
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Install dependencies - name: Install dependencies
run: pnpm install --frozen-lockfile run: npm ci
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Docker build - name: Docker build
run: docker compose build run: docker compose build
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli) - name: Run e2e tests (api & cli)
run: pnpm test run: npm run test
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
e2e-tests-web: e2e-tests-web:
name: End-to-End Tests (Web) name: End-to-End Tests (Web)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: ${{ matrix.runner }} runs-on: mich
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with: with:
persist-credentials: false
submodules: 'recursive' submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk - name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Install dependencies - name: Install dependencies
run: pnpm install --frozen-lockfile run: npm ci
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Install Playwright Browsers - name: Install Playwright Browsers
run: npx playwright install chromium --only-shell run: npx playwright install --with-deps chromium
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Docker build - name: Docker build
run: docker compose build run: docker compose build
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run e2e tests (web) - name: Run e2e tests (web)
run: npx playwright test run: npx playwright test
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
mobile-unit-tests: mobile-unit-tests:
name: Unit Test Mobile name: Unit Test Mobile
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }} if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0 uses: subosito/flutter-action@v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
- name: Generate translation file
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
working-directory: ./mobile
- name: Run tests - name: Run tests
working-directory: ./mobile working-directory: ./mobile
run: flutter test -j 1 run: flutter test -j 1
ml-unit-tests: ml-unit-tests:
name: Unit Test ML name: Unit Test ML
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }} if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./machine-learning working-directory: ./machine-learning
steps: steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v5
with: with:
persist-credentials: false python-version: 3.11
- name: Install uv cache: 'poetry'
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with:
# python-version: 3.11
# cache: 'uv'
- name: Install dependencies - name: Install dependencies
run: | run: |
uv sync --extra cpu poetry install --with dev --with cpu
- name: Lint with ruff - name: Lint with ruff
run: | run: |
uv run ruff check --output-format=github immich_ml poetry run ruff check --output-format=github app export
- name: Check black formatting - name: Check black formatting
run: | run: |
uv run black --check immich_ml poetry run black --check app export
- name: Run mypy type checking - name: Run mypy type checking
run: | run: |
uv run mypy --strict immich_ml/ poetry run mypy --install-types --non-interactive --strict app/
- name: Run tests and coverage - name: Run tests and coverage
run: | run: |
uv run pytest --cov=immich_ml --cov-report term-missing poetry run pytest app --cov=app --cov-report term-missing
github-files-formatting:
name: .github Files Formatting
needs: pre-job
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./.github
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: pnpm install --frozen-lockfile
- name: Run formatter
run: pnpm format
if: ${{ !cancelled() }}
shellcheck: shellcheck:
name: ShellCheck name: ShellCheck
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 - uses: actions/checkout@v4
with:
persist-credentials: false
- name: Run ShellCheck - name: Run ShellCheck
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0 uses: ludeeus/action-shellcheck@master
with: with:
ignore_paths: >- ignore_paths: >-
**/open-api/** **/openapi** **/node_modules/** **/open-api/**
**/openapi/**
**/node_modules/**
generated-api-up-to-date: generated-api-up-to-date:
name: OpenAPI Clients name: OpenAPI Clients
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install server dependencies - name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich install --frozen-lockfile run: npm --prefix=server ci
- name: Build the app - name: Build the app
run: pnpm --filter immich build run: npm --prefix=server run build
- name: Run API generation - name: Run API generation
run: make open-api run: make open-api
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v20
id: verify-changed-files id: verify-changed-files
with: with:
files: | files: |
mobile/openapi mobile/openapi
open-api/typescript-sdk open-api/typescript-sdk
open-api/immich-openapi-specs.json open-api/immich-openapi-specs.json
- name: Verify files have not changed - name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true' if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated files not up to date!" echo "ERROR: Generated files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1 exit 1
sql-schema-up-to-date:
name: SQL Schema Checks generated-typeorm-migrations-up-to-date:
name: TypeORM Checks
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
services: services:
postgres: postgres:
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:ec713143dca1a426eba2e03707c319e2ec3cc9d304ef767f777f8e297dee820c image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env: env:
POSTGRES_PASSWORD: postgres POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres POSTGRES_USER: postgres
POSTGRES_DB: immich POSTGRES_DB: immich
options: >- options: >-
--health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 --health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports: ports:
- 5432:5432 - 5432:5432
defaults: defaults:
run: run:
working-directory: ./server working-directory: ./server
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install server dependencies - name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile run: npm ci
- name: Build the app - name: Build the app
run: pnpm build run: npm run build
- name: Run existing migrations - name: Run existing migrations
run: pnpm migrations:run run: npm run typeorm:migrations:run
- name: Test npm run schema:reset command works - name: Test npm run schema:reset command works
run: pnpm schema:reset run: npm run typeorm:schema:reset
- name: Generate new migrations - name: Generate new migrations
continue-on-error: true continue-on-error: true
run: pnpm migrations:generate src/TestMigration run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v20
id: verify-changed-files id: verify-changed-files
with: with:
files: | files: |
server/src server/src/migrations/
- name: Verify migration files have not changed - name: Verify migration files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true' if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated migration files not up to date!" echo "ERROR: Generated migration files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
cat ./src/*-TestMigration.ts
exit 1 exit 1
- name: Run SQL generation - name: Run SQL generation
run: pnpm sync:sql run: npm run sync:sql
env: env:
DB_URL: postgres://postgres:postgres@localhost:5432/immich DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@v20
id: verify-changed-sql-files id: verify-changed-sql-files
with: with:
files: | files: |
server/src/queries server/src/queries
- name: Verify SQL files have not changed - name: Verify SQL files have not changed
if: steps.verify-changed-sql-files.outputs.files_changed == 'true' if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated SQL files not up to date!" echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
git diff
exit 1 exit 1
# mobile-integration-tests: # mobile-integration-tests:
# name: Run mobile end-to-end integration tests # name: Run mobile end-to-end integration tests
# runs-on: macos-latest # runs-on: macos-latest
# steps: # steps:
# - uses: actions/checkout@v4 # - uses: actions/checkout@v4
# - uses: actions/setup-java@v3 # - uses: actions/setup-java@v3
# with: # with:
# distribution: 'zulu' # distribution: 'zulu'
# java-version: '12.x' # java-version: '12.x'
# cache: 'gradle' # cache: 'gradle'
# - name: Cache android SDK # - name: Cache android SDK
# uses: actions/cache@v3 # uses: actions/cache@v3
# id: android-sdk # id: android-sdk
# with: # with:
# key: android-sdk # key: android-sdk
# path: | # path: |
# /usr/local/lib/android/ # /usr/local/lib/android/
# ~/.android # ~/.android
# - name: Cache Gradle # - name: Cache Gradle
# uses: actions/cache@v3 # uses: actions/cache@v3
# with: # with:
# path: | # path: |
# ./mobile/build/ # ./mobile/build/
# ./mobile/android/.gradle/ # ./mobile/android/.gradle/
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }} # key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
# - name: Setup Android SDK # - name: Setup Android SDK
# if: steps.android-sdk.outputs.cache-hit != 'true' # if: steps.android-sdk.outputs.cache-hit != 'true'
# uses: android-actions/setup-android@v2 # uses: android-actions/setup-android@v2
# - name: AVD cache # - name: AVD cache
# uses: actions/cache@v3 # uses: actions/cache@v3
# id: avd-cache # id: avd-cache
# with: # with:
# path: | # path: |
# ~/.android/avd/* # ~/.android/avd/*
# ~/.android/adb* # ~/.android/adb*
# key: avd-29 # key: avd-29
# - name: create AVD and generate snapshot for caching # - name: create AVD and generate snapshot for caching
# if: steps.avd-cache.outputs.cache-hit != 'true' # if: steps.avd-cache.outputs.cache-hit != 'true'
# uses: reactivecircus/android-emulator-runner@v2.27.0 # uses: reactivecircus/android-emulator-runner@v2.27.0
# with: # with:
# working-directory: ./mobile # working-directory: ./mobile
# cores: 2 # cores: 2
# api-level: 29 # api-level: 29
# arch: x86_64 # arch: x86_64
# profile: pixel # profile: pixel
# target: default # target: default
# force-avd-creation: false # force-avd-creation: false
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none # emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: false # disable-animations: false
# script: echo "Generated AVD snapshot for caching." # script: echo "Generated AVD snapshot for caching."
# - name: Setup Flutter SDK # - name: Setup Flutter SDK
# uses: subosito/flutter-action@v2 # uses: subosito/flutter-action@v2
# with: # with:
# channel: 'stable' # channel: 'stable'
# flutter-version: '3.7.3' # flutter-version: '3.7.3'
# cache: true # cache: true
# - name: Run integration tests # - name: Run integration tests
# uses: Wandalen/wretry.action@master # uses: Wandalen/wretry.action@master
# with: # with:
# action: reactivecircus/android-emulator-runner@v2.27.0 # action: reactivecircus/android-emulator-runner@v2.27.0
# with: | # with: |
# working-directory: ./mobile # working-directory: ./mobile
# cores: 2 # cores: 2
# api-level: 29 # api-level: 29
# arch: x86_64 # arch: x86_64
# profile: pixel # profile: pixel
# target: default # target: default
# force-avd-creation: false # force-avd-creation: false
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none # emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: true # disable-animations: true
# script: | # script: |
# flutter pub get # flutter pub get
# flutter test integration_test # flutter test integration_test
# attempt_limit: 3 # attempt_limit: 3

View File

@@ -1,57 +0,0 @@
name: Weblate checks
on:
pull_request:
branches: [main]
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
with:
filters: |
i18n:
- 'i18n/!(en)**\.json'
enforce-lock:
name: Check Weblate Lock
needs: [pre-job]
runs-on: ubuntu-latest
permissions: {}
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
steps:
- name: Check weblate lock
run: |
if [[ "false" = $(curl https://hosted.weblate.org/api/components/immich/immich/lock/ | jq .locked) ]]; then
exit 1
fi
- name: Find Pull Request
uses: juliangruber/find-pull-request-action@952b3bb1ddb2dcc0aa3479e98bb1c2d1a922f096 # v1.10.0
id: find-pr
with:
branch: chore/translations
- name: Fail if existing weblate PR
if: ${{ steps.find-pr.outputs.number }}
run: exit 1
success-check-lock:
name: Weblate Lock Check Success
needs: [enforce-lock]
runs-on: ubuntu-latest
permissions: {}
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}

4
.gitignore vendored
View File

@@ -3,7 +3,6 @@
.DS_Store .DS_Store
.vscode/* .vscode/*
!.vscode/launch.json !.vscode/launch.json
!.vscode/extensions.json
.idea .idea
docker/upload docker/upload
@@ -22,6 +21,3 @@ mobile/openapi/.openapi-generator/FILES
open-api/typescript-sdk/build open-api/typescript-sdk/build
mobile/android/fastlane/report.xml mobile/android/fastlane/report.xml
mobile/ios/fastlane/report.xml mobile/ios/fastlane/report.xml
vite.config.js.timestamp-*
.pnpm-store

View File

@@ -1,39 +0,0 @@
module.exports = {
hooks: {
readPackage: (pkg) => {
if (!pkg.name) {
return pkg;
}
switch (pkg.name) {
case "exiftool-vendored":
if (pkg.optionalDependencies["exiftool-vendored.pl"]) {
// make exiftool-vendored.pl a regular dependency
pkg.dependencies["exiftool-vendored.pl"] =
pkg.optionalDependencies["exiftool-vendored.pl"];
delete pkg.optionalDependencies["exiftool-vendored.pl"];
}
break;
case "sharp":
const optionalDeps = Object.keys(pkg.optionalDependencies).filter(
(dep) => dep.startsWith("@img")
);
for (const dep of optionalDeps) {
// remove all optionalDependencies from sharp (they will be compiled from source), except:
// include the precompiled musl version of sharp, for web
// include precompiled linux-x64 version of sharp, for server (stage: web-prod)
// include precompiled linux-arm64 version of sharp, for server (stage: web-prod)
if (
dep.includes("musl") ||
dep.includes("linux-x64") ||
dep.includes("linux-arm64")
) {
continue;
}
delete pkg.optionalDependencies[dep];
}
break;
}
return pkg;
},
},
};

View File

@@ -1,10 +0,0 @@
{
"recommendations": [
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"dbaeumer.vscode-eslint",
"dart-code.flutter",
"dart-code.dart-code",
"dcmdev.dcm-vscode-extension"
]
}

4
.vscode/launch.json vendored
View File

@@ -7,7 +7,7 @@
"restart": true, "restart": true,
"port": 9231, "port": 9231,
"name": "Immich API Server", "name": "Immich API Server",
"remoteRoot": "/usr/src/app/server", "remoteRoot": "/usr/src/app",
"localRoot": "${workspaceFolder}/server" "localRoot": "${workspaceFolder}/server"
}, },
{ {
@@ -16,7 +16,7 @@
"restart": true, "restart": true,
"port": 9230, "port": 9230,
"name": "Immich Workers", "name": "Immich Workers",
"remoteRoot": "/usr/src/app/server", "remoteRoot": "/usr/src/app",
"localRoot": "${workspaceFolder}/server" "localRoot": "${workspaceFolder}/server"
} }
] ]

78
.vscode/settings.json vendored
View File

@@ -1,64 +1,44 @@
{ {
"editor.formatOnSave": true,
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[css]": { "[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode", "editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true, "editor.tabSize": 2,
"editor.formatOnSave": true
},
"[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.tabSize": 2 "editor.tabSize": 2
}, },
"svelte.enable-ts-plugin": true,
"eslint.validate": [
"javascript",
"svelte"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"[dart]": { "[dart]": {
"editor.defaultFormatter": "Dart-Code.dart-code",
"editor.formatOnSave": true, "editor.formatOnSave": true,
"editor.selectionHighlight": false, "editor.selectionHighlight": false,
"editor.suggest.snippetsPreventQuickSuggestions": false, "editor.suggest.snippetsPreventQuickSuggestions": false,
"editor.suggestSelection": "first", "editor.suggestSelection": "first",
"editor.tabCompletion": "onlySnippets", "editor.tabCompletion": "onlySnippets",
"editor.wordBasedSuggestions": "off" "editor.wordBasedSuggestions": "off",
"editor.defaultFormatter": "Dart-Code.dart-code"
}, },
"[javascript]": { "cSpell.words": [
"editor.codeActionsOnSave": { "immich"
"source.organizeImports": "explicit", ],
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[svelte]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[typescript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"cSpell.words": ["immich"],
"editor.formatOnSave": true,
"eslint.validate": ["javascript", "svelte"],
"explorer.fileNesting.enabled": true, "explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": { "explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart", "*.ts": "${capture}.spec.ts,${capture}.mock.ts"
"*.ts": "${capture}.spec.ts,${capture}.mock.ts", }
"package.json": "package-lock.json, yarn.lock, pnpm-lock.yaml, bun.lockb, bun.lock, pnpm-workspace.yaml, .pnpmfile.cjs"
},
"svelte.enable-ts-plugin": true,
"typescript.preferences.importModuleSpecifier": "non-relative"
} }

72
.vscode/tasks.json vendored
View File

@@ -1,72 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Fix Permissions, Install Dependencies",
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich API Server (Nest)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Web Server (Vite)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Server and Web",
"dependsOn": ["Immich Web Server (Vite)", "Immich API Server (Nest)"],
"runOptions": {
"runOn": "folderOpen"
},
"problemMatcher": []
}
]
}

114
Makefile
View File

@@ -1,36 +1,24 @@
dev: dev:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans || make dev-down
dev-down: dev-down:
docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans
dev-update: dev-update:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
dev-scale: dev-scale:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
dev-docs:
npm --prefix docs run start
.PHONY: e2e .PHONY: e2e
e2e: e2e:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
e2e-update:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
e2e-down:
docker compose -f ./e2e/docker-compose.yml down --remove-orphans
prod: prod:
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
prod-down:
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
prod-scale: prod-scale:
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
.PHONY: open-api .PHONY: open-api
open-api: open-api:
@@ -43,7 +31,7 @@ open-api-typescript:
cd ./open-api && bash ./bin/generate-open-api.sh typescript cd ./open-api && bash ./bin/generate-open-api.sh typescript
sql: sql:
pnpm --filter immich run sync:sql npm --prefix server run sync:sql
attach-server: attach-server:
docker exec -it docker_immich-server_1 sh docker exec -it docker_immich-server_1 sh
@@ -51,83 +39,47 @@ attach-server:
renovate: renovate:
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
MODULES = e2e server web cli sdk docs .github MODULES = e2e server web cli sdk
# directory to package name mapping function
# cli = @immich/cli
# docs = documentation
# e2e = immich-e2e
# open-api/typescript-sdk = @immich/sdk
# server = immich
# web = immich-web
map-package = $(subst sdk,@immich/sdk,$(subst cli,@immich/cli,$(subst docs,documentation,$(subst e2e,immich-e2e,$(subst server,immich,$(subst web,immich-web,$1))))))
audit-%: audit-%:
pnpm --filter $(call map-package,$*) audit fix npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
install-%: install-%:
pnpm --filter $(call map-package,$*) install $(if $(FROZEN),--frozen-lockfile) $(if $(OFFLINE),--offline) npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
build-cli: build-sdk build-cli: build-sdk
build-web: build-sdk build-web: build-sdk
build-%: install-% build-%: install-%
pnpm --filter $(call map-package,$*) run build npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'build' >/dev/null \
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build || true
format-%: format-%:
pnpm --filter $(call map-package,$*) run format:fix npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run | grep 'format:fix' >/dev/null \
&& npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run format:fix || true
lint-%: lint-%:
pnpm --filter $(call map-package,$*) run lint:fix npm --prefix $* run lint:fix
lint-web:
pnpm --filter $(call map-package,$*) run lint:p
check-%: check-%:
pnpm --filter $(call map-package,$*) run check npm --prefix $* run check
check-web: check-web:
pnpm --filter immich-web run check:typescript npm --prefix web run check:typescript
pnpm --filter immich-web run check:svelte npm --prefix web run check:svelte
test-%: test-%:
pnpm --filter $(call map-package,$*) run test npm --prefix $* run test
test-e2e: test-e2e:
docker compose -f ./e2e/docker-compose.yml build docker compose -f ./e2e/docker-compose.yml build
pnpm --filter immich-e2e run test npm --prefix e2e run test
pnpm --filter immich-e2e run test:web npm --prefix e2e run test:web
test-medium:
docker run \
--rm \
-v ./server/src:/usr/src/app/src \
-v ./server/test:/usr/src/app/test \
-v ./server/vitest.config.medium.mjs:/usr/src/app/vitest.config.medium.mjs \
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
-e NODE_ENV=development \
immich-server:latest \
-c "pnpm test:medium -- --run"
test-medium-dev:
docker exec -it immich_server /bin/sh -c "pnpm run test:medium"
install-all: build-all: $(foreach M,$(MODULES),build-$M) ;
pnpm -r --filter '!documentation' install install-all: $(foreach M,$(MODULES),install-$M) ;
check-all: $(foreach M,$(MODULES),check-$M) ;
build-all: $(foreach M,$(filter-out e2e docs .github,$(MODULES)),build-$M) ; lint-all: $(foreach M,$(MODULES),lint-$M) ;
format-all: $(foreach M,$(MODULES),format-$M) ;
check-all: audit-all: $(foreach M,$(MODULES),audit-$M) ;
pnpm -r --filter '!documentation' run "/^(check|check\:svelte|check\:typescript)$/" hygiene-all: lint-all format-all check-all sql audit-all;
lint-all: test-all: $(foreach M,$(MODULES),test-$M) ;
pnpm -r --filter '!documentation' run lint:fix
format-all:
pnpm -r --filter '!documentation' run format:fix
audit-all:
pnpm -r --filter '!documentation' audit fix
hygiene-all: audit-all
pnpm -r --filter '!documentation' run "/(format:fix|check|check:svelte|check:typescript|sql)/"
test-all:
pnpm -r --filter '!documentation' run "/^test/"
clean: clean:
find . -name "node_modules" -type d -prune -exec rm -rf {} + find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
find . -name "dist" -type d -prune -exec rm -rf '{}' + find . -name "dist" -type d -prune -exec rm -rf '{}' +
find . -name "build" -type d -prune -exec rm -rf '{}' + find . -name "build" -type d -prune -exec rm -rf '{}' +
find . -name ".svelte-kit" -type d -prune -exec rm -rf '{}' + find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
find . -name "coverage" -type d -prune -exec rm -rf '{}' + docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
find . -name ".pnpm-store" -type d -prune -exec rm -rf '{}' + docker compose -f ./e2e/docker-compose.yml rm -v -f || true
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true
setup-server-dev: install-server
setup-web-dev: install-sdk build-sdk install-web

View File

@@ -1,11 +1,11 @@
<p align="center"> <p align="center">
<br/> <br/>
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a> <a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
<a href="https://discord.immich.app"> <a href="https://discord.immich.app">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/> <img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a> </a>
<br/> <br/>
<br/> <br/>
</p> </p>
<p align="center"> <p align="center">
@@ -17,25 +17,23 @@
<img src="design/immich-screenshots.png" title="Main Screenshot"> <img src="design/immich-screenshots.png" title="Main Screenshot">
</a> </a>
<br/> <br/>
<p align="center"> <p align="center">
<a href="readme_i18n/README_ca_ES.md">Català</a>
<a href="readme_i18n/README_es_ES.md">Español</a> <a href="readme_i18n/README_ca_ES.md">Català</a>
<a href="readme_i18n/README_fr_FR.md">Français</a> <a href="readme_i18n/README_es_ES.md">Español</a>
<a href="readme_i18n/README_it_IT.md">Italiano</a> <a href="readme_i18n/README_fr_FR.md">Français</a>
<a href="readme_i18n/README_ja_JP.md">日本語</a> <a href="readme_i18n/README_it_IT.md">Italiano</a>
<a href="readme_i18n/README_ko_KR.md">한국어</a> <a href="readme_i18n/README_ja_JP.md">日本語</a>
<a href="readme_i18n/README_de_DE.md">Deutsch</a> <a href="readme_i18n/README_ko_KR.md">한국어</a>
<a href="readme_i18n/README_nl_NL.md">Nederlands</a> <a href="readme_i18n/README_de_DE.md">Deutsch</a>
<a href="readme_i18n/README_tr_TR.md">Türkçe</a> <a href="readme_i18n/README_nl_NL.md">Nederlands</a>
<a href="readme_i18n/README_zh_CN.md">中文</a> <a href="readme_i18n/README_tr_TR.md">Türkçe</a>
<a href="readme_i18n/README_uk_UA.md">Українська</a> <a href="readme_i18n/README_zh_CN.md">中文</a>
<a href="readme_i18n/README_ru_RU.md">Русский</a> <a href="readme_i18n/README_ru_RU.md">Русский</a>
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a> <a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
<a href="readme_i18n/README_sv_SE.md">Svenska</a> <a href="readme_i18n/README_sv_SE.md">Svenska</a>
<a href="readme_i18n/README_ar_JO.md">العربية</a> <a href="readme_i18n/README_ar_JO.md">العربية</a>
<a href="readme_i18n/README_vi_VN.md">Tiếng Việt</a>
<a href="readme_i18n/README_th_TH.md">ภาษาไทย</a>
</p> </p>
## Disclaimer ## Disclaimer
@@ -61,7 +59,9 @@
## Demo ## Demo
Access the demo [here](https://demo.immich.app). For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`. Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
### Login credentials ### Login credentials
@@ -101,8 +101,6 @@ Access the demo [here](https://demo.immich.app). For the mobile app, you can use
| Offline support | Yes | No | | Offline support | Yes | No |
| Read-only gallery | Yes | Yes | | Read-only gallery | Yes | Yes |
| Stacked Photos | Yes | Yes | | Stacked Photos | Yes | Yes |
| Tags | No | Yes |
| Folder View | Yes | Yes |
## Translations ## Translations

View File

@@ -1 +1 @@
22.18.0 20.17.0

View File

@@ -1,14 +1,19 @@
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core FROM node:20.17.0-alpine3.20@sha256:2d07db07a2df6830718ae2a47db6fedce6745f5bcd174c398f2acdda90a11c03 AS core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
RUN npm ci
COPY open-api/typescript-sdk/ ./
RUN npm run build
WORKDIR /usr/src/app WORKDIR /usr/src/app
COPY package* pnpm* .pnpmfile.cjs ./
COPY ./cli ./cli/ COPY cli/package.json cli/package-lock.json ./
COPY ./open-api/typescript-sdk ./open-api/typescript-sdk/ RUN npm ci
RUN corepack enable pnpm && \
pnpm install --filter @immich/sdk --filter @immich/cli --frozen-lockfile && \ COPY cli .
pnpm --filter @immich/sdk build && \ RUN npm run build
pnpm --filter @immich/cli build
WORKDIR /import WORKDIR /import
ENTRYPOINT ["node", "/usr/src/app/cli/dist"] ENTRYPOINT ["node", "/usr/src/app/dist"]

View File

@@ -1,2 +0,0 @@
#!/usr/bin/env node
import '../dist/index.js';

View File

@@ -1,29 +1,39 @@
import { FlatCompat } from '@eslint/eslintrc';
import js from '@eslint/js'; import js from '@eslint/js';
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended'; import typescriptEslint from '@typescript-eslint/eslint-plugin';
import eslintPluginUnicorn from 'eslint-plugin-unicorn'; import tsParser from '@typescript-eslint/parser';
import globals from 'globals'; import globals from 'globals';
import path from 'node:path'; import path from 'node:path';
import { fileURLToPath } from 'node:url'; import { fileURLToPath } from 'node:url';
import typescriptEslint from 'typescript-eslint';
const __filename = fileURLToPath(import.meta.url); const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename); const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all,
});
export default typescriptEslint.config([ export default [
eslintPluginUnicorn.configs.recommended,
eslintPluginPrettierRecommended,
js.configs.recommended,
typescriptEslint.configs.recommended,
{ {
ignores: ['eslint.config.mjs', 'dist'], ignores: ['eslint.config.mjs', 'dist'],
}, },
...compat.extends(
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
'plugin:unicorn/recommended',
),
{ {
plugins: {
'@typescript-eslint': typescriptEslint,
},
languageOptions: { languageOptions: {
globals: { globals: {
...globals.node, ...globals.node,
}, },
parser: typescriptEslint.parser, parser: tsParser,
ecmaVersion: 5, ecmaVersion: 5,
sourceType: 'module', sourceType: 'module',
@@ -48,4 +58,4 @@ export default typescriptEslint.config([
'object-shorthand': ['error', 'always'], 'object-shorthand': ['error', 'always'],
}, },
}, },
]); ];

4509
cli/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,11 @@
{ {
"name": "@immich/cli", "name": "@immich/cli",
"version": "2.2.81", "version": "2.2.19",
"description": "Command Line Interface (CLI) for Immich", "description": "Command Line Interface (CLI) for Immich",
"type": "module", "type": "module",
"exports": "./dist/index.js", "exports": "./dist/index.js",
"bin": { "bin": {
"immich": "./bin/immich" "immich": "dist/index.js"
}, },
"license": "GNU Affero General Public License version 3", "license": "GNU Affero General Public License version 3",
"keywords": [ "keywords": [
@@ -19,27 +19,27 @@
"@types/byte-size": "^8.1.0", "@types/byte-size": "^8.1.0",
"@types/cli-progress": "^3.11.0", "@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12", "@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1", "@types/mock-fs": "^4.13.1",
"@types/node": "^22.17.1", "@types/node": "^20.16.5",
"@vitest/coverage-v8": "^3.0.0", "@typescript-eslint/eslint-plugin": "^8.0.0",
"@typescript-eslint/parser": "^8.0.0",
"@vitest/coverage-v8": "^2.0.5",
"byte-size": "^9.0.0", "byte-size": "^9.0.0",
"cli-progress": "^3.12.0", "cli-progress": "^3.12.0",
"commander": "^12.0.0", "commander": "^12.0.0",
"eslint": "^9.14.0", "eslint": "^9.0.0",
"eslint-config-prettier": "^10.1.8", "eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3", "eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^60.0.0", "eslint-plugin-unicorn": "^55.0.0",
"globals": "^16.0.0", "globals": "^15.9.0",
"mock-fs": "^5.2.0", "mock-fs": "^5.2.0",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0", "prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3", "typescript": "^5.3.3",
"typescript-eslint": "^8.28.0", "vite": "^5.0.12",
"vite": "^7.0.0",
"vite-tsconfig-paths": "^5.0.0", "vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0", "vitest": "^2.0.5",
"vitest-fetch-mock": "^0.4.0", "vitest-fetch-mock": "^0.3.0",
"yaml": "^2.3.1" "yaml": "^2.3.1"
}, },
"scripts": { "scripts": {
@@ -62,13 +62,11 @@
"node": ">=20.0.0" "node": ">=20.0.0"
}, },
"dependencies": { "dependencies": {
"chokidar": "^4.0.3",
"fast-glob": "^3.3.2", "fast-glob": "^3.3.2",
"fastq": "^1.17.1", "fastq": "^1.17.1",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21"
"micromatch": "^4.0.8"
}, },
"volta": { "volta": {
"node": "22.18.0" "node": "20.17.0"
} }
} }

View File

@@ -1,13 +1,12 @@
import * as fs from 'node:fs'; import * as fs from 'node:fs';
import * as os from 'node:os'; import * as os from 'node:os';
import * as path from 'node:path'; import * as path from 'node:path';
import { setTimeout as sleep } from 'node:timers/promises'; import { describe, expect, it, vi } from 'vitest';
import { describe, expect, it, MockedFunction, vi } from 'vitest';
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk'; import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
import createFetchMock from 'vitest-fetch-mock'; import createFetchMock from 'vitest-fetch-mock';
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset'; import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
vi.mock('@immich/sdk'); vi.mock('@immich/sdk');
@@ -200,112 +199,3 @@ describe('checkForDuplicates', () => {
}); });
}); });
}); });
describe('startWatch', () => {
let testFolder: string;
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
beforeEach(async () => {
vi.restoreAllMocks();
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
image: ['.jpg'],
sidecar: ['.xmp'],
video: ['.mp4'],
});
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
checkBulkUploadMocked.mockResolvedValue({
results: [],
});
});
it('should start watching a directory and upload new files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: [
expect.objectContaining({
id: testFilePath,
}),
],
},
});
});
it('should filter out unsupported files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const unsupportedFilePath = path.join(testFolder, 'test.txt');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: unsupportedFilePath,
}),
]),
},
});
});
it('should filger out ignored patterns', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const ignoredPattern = 'ignored';
const ignoredFolder = path.join(testFolder, ignoredPattern);
await fs.promises.mkdir(ignoredFolder, { recursive: true });
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: ignoredFilePath,
}),
]),
},
});
});
afterEach(async () => {
await fs.promises.rm(testFolder, { recursive: true, force: true });
});
});

View File

@@ -1,6 +1,5 @@
import { import {
Action, Action,
AssetBulkUploadCheckItem,
AssetBulkUploadCheckResult, AssetBulkUploadCheckResult,
AssetMediaResponseDto, AssetMediaResponseDto,
AssetMediaStatus, AssetMediaStatus,
@@ -12,18 +11,13 @@ import {
getSupportedMediaTypes, getSupportedMediaTypes,
} from '@immich/sdk'; } from '@immich/sdk';
import byteSize from 'byte-size'; import byteSize from 'byte-size';
import { Matcher, watch as watchFs } from 'chokidar'; import { Presets, SingleBar } from 'cli-progress';
import { MultiBar, Presets, SingleBar } from 'cli-progress';
import { chunk } from 'lodash-es'; import { chunk } from 'lodash-es';
import micromatch from 'micromatch';
import { Stats, createReadStream } from 'node:fs'; import { Stats, createReadStream } from 'node:fs';
import { stat, unlink } from 'node:fs/promises'; import { stat, unlink } from 'node:fs/promises';
import path, { basename } from 'node:path'; import path, { basename } from 'node:path';
import { Queue } from 'src/queue'; import { Queue } from 'src/queue';
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils'; import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
const UPLOAD_WATCH_BATCH_SIZE = 100;
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
const s = (count: number) => (count === 1 ? '' : 's'); const s = (count: number) => (count === 1 ? '' : 's');
@@ -41,9 +35,6 @@ export interface UploadOptionsDto {
albumName?: string; albumName?: string;
includeHidden?: boolean; includeHidden?: boolean;
concurrency: number; concurrency: number;
progress?: boolean;
watch?: boolean;
jsonOutput?: boolean;
} }
class UploadFile extends File { class UploadFile extends File {
@@ -63,100 +54,19 @@ class UploadFile extends File {
} }
} }
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
const { newFiles, duplicates } = await checkForDuplicates(files, options);
const newAssets = await uploadFiles(newFiles, options);
if (options.jsonOutput) {
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
}
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(
newAssets.map(({ filepath }) => filepath),
options,
);
};
export const startWatch = async (
paths: string[],
options: UploadOptionsDto,
{
batchSize = UPLOAD_WATCH_BATCH_SIZE,
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
}: { batchSize?: number; debounceTimeMs?: number } = {},
) => {
const watcherIgnored: Matcher[] = [];
const { image, video } = await getSupportedMediaTypes();
const extensions = new Set([...image, ...video]);
if (options.ignore) {
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
}
const pathsBatcher = new Batcher<string>({
batchSize,
debounceTimeMs,
onBatch: async (paths: string[]) => {
const uniquePaths = [...new Set(paths)];
await uploadBatch(uniquePaths, options);
},
});
const onFile = async (path: string, stats?: Stats) => {
if (stats?.isDirectory()) {
return;
}
const ext = '.' + path.split('.').pop()?.toLowerCase();
if (!ext || !extensions.has(ext)) {
return;
}
if (!options.progress) {
// logging when progress is disabled as it can cause issues with the progress bar rendering
console.log(`Change detected: ${path}`);
}
pathsBatcher.add(path);
};
const fsWatcher = watchFs(paths, {
ignoreInitial: true,
ignored: watcherIgnored,
alwaysStat: true,
awaitWriteFinish: true,
depth: options.recursive ? undefined : 1,
persistent: true,
})
.on('add', onFile)
.on('change', onFile)
.on('error', (error) => console.error(`Watcher error: ${error}`));
process.on('SIGINT', async () => {
console.log('Exiting...');
await fsWatcher.close();
process.exit();
});
};
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => { export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
await authenticate(baseOptions); await authenticate(baseOptions);
const scanFiles = await scan(paths, options); const scanFiles = await scan(paths, options);
if (scanFiles.length === 0) { if (scanFiles.length === 0) {
if (options.watch) { console.log('No files found, exiting');
console.log('No files found initially.'); return;
} else {
console.log('No files found, exiting');
return;
}
} }
if (options.watch) { const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
console.log('Watching for changes...'); const newAssets = await uploadFiles(newFiles, options);
await startWatch(paths, options); await updateAlbums([...newAssets, ...duplicates], options);
// watcher does not handle the initial scan await deleteFiles(newFiles, options);
// as the scan() is a more efficient quick start with batched results
}
await uploadBatch(scanFiles, options);
}; };
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => { const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
@@ -174,35 +84,29 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
return files; return files;
}; };
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => { export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
if (skipHash) { if (skipHash) {
console.log('Skipping hash check, assuming all files are new'); console.log('Skipping hash check, assuming all files are new');
return { newFiles: files, duplicates: [] }; return { newFiles: files, duplicates: [] };
} }
let multiBar: MultiBar | undefined; const progressBar = new SingleBar(
{ format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
if (progress) { progressBar.start(files.length, 0);
multiBar = new MultiBar(
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
} else {
console.log(`Received ${files.length} files, hashing...`);
}
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
const newFiles: string[] = []; const newFiles: string[] = [];
const duplicates: Asset[] = []; const duplicates: Asset[] = [];
const checkBulkUploadQueue = new Queue<AssetBulkUploadCheckItem[], void>( const queue = new Queue<string[], AssetBulkUploadCheckResults>(
async (assets: AssetBulkUploadCheckItem[]) => { async (filepaths: string[]) => {
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets } }); const dto = await Promise.all(
filepaths.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })),
);
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
const results = response.results as AssetBulkUploadCheckResults; const results = response.results as AssetBulkUploadCheckResults;
for (const { id: filepath, assetId, action } of results) { for (const { id: filepath, assetId, action } of results) {
if (action === Action.Accept) { if (action === Action.Accept) {
newFiles.push(filepath); newFiles.push(filepath);
@@ -211,46 +115,19 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
duplicates.push({ id: assetId as string, filepath }); duplicates.push({ id: assetId as string, filepath });
} }
} }
progressBar.increment(filepaths.length);
checkProgressBar?.increment(assets.length);
},
{ concurrency, retry: 3 },
);
const results: { id: string; checksum: string }[] = [];
let checkBulkUploadRequests: AssetBulkUploadCheckItem[] = [];
const queue = new Queue<string, AssetBulkUploadCheckItem[]>(
async (filepath: string): Promise<AssetBulkUploadCheckItem[]> => {
const dto = { id: filepath, checksum: await sha1(filepath) };
results.push(dto);
checkBulkUploadRequests.push(dto);
if (checkBulkUploadRequests.length === 5000) {
const batch = checkBulkUploadRequests;
checkBulkUploadRequests = [];
void checkBulkUploadQueue.push(batch);
}
hashProgressBar?.increment();
return results; return results;
}, },
{ concurrency, retry: 3 }, { concurrency, retry: 3 },
); );
for (const item of files) { for (const items of chunk(files, concurrency)) {
void queue.push(item); await queue.push(items);
} }
await queue.drained(); await queue.drained();
if (checkBulkUploadRequests.length > 0) { progressBar.stop();
void checkBulkUploadQueue.push(checkBulkUploadRequests);
}
await checkBulkUploadQueue.drained();
multiBar?.stop();
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`); console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
@@ -266,10 +143,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
return { newFiles, duplicates }; return { newFiles, duplicates };
}; };
export const uploadFiles = async ( export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
files: string[],
{ dryRun, concurrency, progress }: UploadOptionsDto,
): Promise<Asset[]> => {
if (files.length === 0) { if (files.length === 0) {
console.log('All assets were already uploaded, nothing to do.'); console.log('All assets were already uploaded, nothing to do.');
return []; return [];
@@ -289,20 +163,12 @@ export const uploadFiles = async (
return files.map((filepath) => ({ id: '', filepath })); return files.map((filepath) => ({ id: '', filepath }));
} }
let uploadProgress: SingleBar | undefined; const uploadProgress = new SingleBar(
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
if (progress) { Presets.shades_classic,
uploadProgress = new SingleBar( );
{ uploadProgress.start(totalSize, 0);
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}', uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
},
Presets.shades_classic,
);
} else {
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
}
uploadProgress?.start(totalSize, 0);
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
let duplicateCount = 0; let duplicateCount = 0;
let duplicateSize = 0; let duplicateSize = 0;
@@ -328,20 +194,20 @@ export const uploadFiles = async (
successSize += stats.size ?? 0; successSize += stats.size ?? 0;
} }
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) }); uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
return response; return response;
}, },
{ concurrency, retry: 3 }, { concurrency, retry: 3 },
); );
for (const item of files) { for (const filepath of files) {
void queue.push(item); await queue.push(filepath);
} }
await queue.drained(); await queue.drained();
uploadProgress?.stop(); uploadProgress.stop();
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`); console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
if (duplicateCount > 0) { if (duplicateCount > 0) {

View File

@@ -68,19 +68,7 @@ program
.env('IMMICH_UPLOAD_CONCURRENCY') .env('IMMICH_UPLOAD_CONCURRENCY')
.default(4), .default(4),
) )
.addOption(
new Option('-j, --json-output', 'Output detailed information in json format')
.env('IMMICH_JSON_OUTPUT')
.default(false),
)
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS')) .addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
.addOption(
new Option('--watch', 'Watch for changes and upload automatically')
.env('IMMICH_WATCH_CHANGES')
.default(false)
.implies({ progress: false }),
)
.argument('[paths...]', 'One or more paths to assets to be uploaded') .argument('[paths...]', 'One or more paths to assets to be uploaded')
.action((paths, options) => upload(paths, program.opts(), options)); .action((paths, options) => upload(paths, program.opts(), options));

View File

@@ -72,8 +72,8 @@ export class Queue<T, R> {
* @returns Promise<void> - The returned Promise will be resolved when all tasks in the queue have been processed by a worker. * @returns Promise<void> - The returned Promise will be resolved when all tasks in the queue have been processed by a worker.
* This promise could be ignored as it will not lead to a `unhandledRejection`. * This promise could be ignored as it will not lead to a `unhandledRejection`.
*/ */
drained(): Promise<void> { async drained(): Promise<void> {
return this.queue.drained(); await this.queue.drain();
} }
/** /**

View File

@@ -1,13 +1,11 @@
import mockfs from 'mock-fs'; import mockfs from 'mock-fs';
import { readFileSync } from 'node:fs'; import { readFileSync } from 'node:fs';
import { Batcher, CrawlOptions, crawl } from 'src/utils'; import { CrawlOptions, crawl } from 'src/utils';
import { Mock } from 'vitest';
interface Test { interface Test {
test: string; test: string;
options: Omit<CrawlOptions, 'extensions'>; options: Omit<CrawlOptions, 'extensions'>;
files: Record<string, boolean>; files: Record<string, boolean>;
skipOnWin32?: boolean;
} }
const cwd = process.cwd(); const cwd = process.cwd();
@@ -50,18 +48,6 @@ const tests: Test[] = [
'/photos/image.jpg': true, '/photos/image.jpg': true,
}, },
}, },
{
test: 'should crawl folders with quotes',
options: {
pathsToCrawl: ["/photo's/", '/photo"s/', '/photo`s/'],
},
files: {
"/photo's/image1.jpg": true,
'/photo"s/image2.jpg': true,
'/photo`s/image3.jpg': true,
},
skipOnWin32: true, // single quote interferes with mockfs root on Windows
},
{ {
test: 'should crawl a single file', test: 'should crawl a single file',
options: { options: {
@@ -129,7 +115,17 @@ const tests: Test[] = [
'/albums/image3.jpg': true, '/albums/image3.jpg': true,
}, },
}, },
{
test: 'should support globbing paths',
options: {
pathsToCrawl: ['/photos*'],
},
files: {
'/photos1/image1.jpg': true,
'/photos2/image2.jpg': true,
'/images/image3.jpg': false,
},
},
{ {
test: 'should crawl a single path without trailing slash', test: 'should crawl a single path without trailing slash',
options: { options: {
@@ -284,12 +280,8 @@ describe('crawl', () => {
}); });
describe('crawl', () => { describe('crawl', () => {
for (const { test: name, options, files, skipOnWin32 } of tests) { for (const { test, options, files } of tests) {
if (process.platform === 'win32' && skipOnWin32) { it(test, async () => {
test.skip(name);
continue;
}
it(name, async () => {
// The file contents is the same as the path. // The file contents is the same as the path.
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file]))); mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
@@ -304,38 +296,3 @@ describe('crawl', () => {
} }
}); });
}); });
describe('Batcher', () => {
let batcher: Batcher;
let onBatch: Mock;
beforeEach(() => {
onBatch = vi.fn();
batcher = new Batcher({ batchSize: 2, onBatch });
});
it('should trigger onBatch() when a batch limit is reached', async () => {
batcher.add('a');
batcher.add('b');
batcher.add('c');
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
});
it('should trigger onBatch() when flush() is called', async () => {
batcher.add('a');
batcher.flush();
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
});
it('should trigger onBatch() when debounce time reached', async () => {
vi.useFakeTimers();
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
batcher.add('a');
expect(onBatch).not.toHaveBeenCalled();
vi.advanceTimersByTime(200);
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
vi.useRealTimers();
});
});

View File

@@ -141,21 +141,25 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
} }
} }
if (patterns.length === 0) { let searchPattern: string;
if (patterns.length === 1) {
searchPattern = patterns[0];
} else if (patterns.length === 0) {
return crawledFiles; return crawledFiles;
} else {
searchPattern = '{' + patterns.join(',') + '}';
} }
const searchPatterns = patterns.map((pattern) => { if (recursive) {
let escapedPattern = pattern.replaceAll("'", "[']").replaceAll('"', '["]').replaceAll('`', '[`]'); searchPattern = searchPattern + '/**/';
if (recursive) { }
escapedPattern = escapedPattern + '/**';
}
return `${escapedPattern}/*.{${extensions.join(',')}}`;
});
const globbedFiles = await glob(searchPatterns, { searchPattern = `${searchPattern}/*.{${extensions.join(',')}}`;
const globbedFiles = await glob(searchPattern, {
absolute: true, absolute: true,
caseSensitiveMatch: false, caseSensitiveMatch: false,
onlyFiles: true,
dot: includeHidden, dot: includeHidden,
ignore: [`**/${exclusionPattern}`], ignore: [`**/${exclusionPattern}`],
}); });
@@ -172,64 +176,3 @@ export const sha1 = (filepath: string) => {
rs.on('end', () => resolve(hash.digest('hex'))); rs.on('end', () => resolve(hash.digest('hex')));
}); });
}; };
/**
* Batches items and calls onBatch to process them
* when the batch size is reached or the debounce time has passed.
*/
export class Batcher<T = unknown> {
private items: T[] = [];
private readonly batchSize: number;
private readonly debounceTimeMs?: number;
private readonly onBatch: (items: T[]) => void;
private debounceTimer?: NodeJS.Timeout;
constructor({
batchSize,
debounceTimeMs,
onBatch,
}: {
batchSize: number;
debounceTimeMs?: number;
onBatch: (items: T[]) => Promise<void>;
}) {
this.batchSize = batchSize;
this.debounceTimeMs = debounceTimeMs;
this.onBatch = onBatch;
}
private setDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
}
if (this.debounceTimeMs) {
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
}
}
private clearDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
this.debounceTimer = undefined;
}
}
add(item: T) {
this.items.push(item);
this.setDebounceTimer();
if (this.items.length >= this.batchSize) {
this.flush();
}
}
flush() {
this.clearDebounceTimer();
if (this.items.length === 0) {
return;
}
this.onBatch(this.items);
this.items = [];
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates. # Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" { provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.1" version = "4.41.0"
constraints = "4.52.1" constraints = "4.41.0"
hashes = [ hashes = [
"h1:2lHvafwGbLdmc9lYkuJFw3nsInaQjRpjX/JfIRKmq/M=", "h1:0mc+YrjQrcctGrGYDmzlcqcgSv9MYB74rvMaZylIKC8=",
"h1:596JomwjrtUrOSreq9NNCS+rj70+jOV+0pfja5MXiTI=", "h1:0zUx4vk4jOORQqn6xHBF7dO6N6bielFHdJ0mgF4Obn8=",
"h1:7mBOA5TVAIt3qAwPXKCtE0RSYeqij9v30mnksuBbpEg=", "h1:AsIZW3uLFNOZO7kL/K7/Y/S0IYxUV9Hz85NNk/3TTsA=",
"h1:ELVgzh4kHKBCYdL+2A8JjWS0E1snLUN3Mmz3Vo6qSfw=", "h1:FSgYM4+LHMbX/a4Y1kx7FPPWmXqS3/MQYzvjMJHHHWM=",
"h1:FGGM5yLFf72g3kSXM3LAN64Gf/AkXr5WCmhixgnP+l4=", "h1:Tx6Nh3BWP1x9L3KK/Eyi+ET0T26g3+jf1jyiuqpNIis=",
"h1:JupkJbQALcIVoMhHImrLeLDsQR1ET7VJLGC7ONxjqGU=", "h1:VRI9wu8P43xxfpeTndRwsisLnqncfnmEYMOEH5zH4pQ=",
"h1:KsaE4JNq+1uV1nJsuTcYar/8lyY6zKS5UBEpfYg3wvc=", "h1:YxQqmiES/Yanq/VfGqBEqg+VIO7FGhO88aKoWFHyGIg=",
"h1:NHZ5RJIzQDLhie/ykl3uI6UPfNQR9Lu5Ti7JPR6X904=", "h1:ZWHiaesjgDLKWlfdNj0oKyj/DWdxcfsO6NINu39zfpY=",
"h1:NfAuMbn6LQPLDtJhbzO1MX9JMIGLMa8K6CpekvtsuX8=", "h1:a2aCgDDBz3ccrr8YstIMl7VFnKo1xZAp+rOv59PPJ7U=",
"h1:e+vNKokamDsp/kJvFr2pRudzwEz2r49iZ/oSggw+1LY=", "h1:aRyv8tB6wBAF9lKsLEdiHyCqnK5LfZq0FqMXCcUB4UU=",
"h1:jnb4VdfNZ79I3yj7Q8x+JmOT+FxbfjjRfrF0dL0yCW8=", "h1:lXpuO7zv2uD2GzPE1ARxznreRAh+QHTc2lAJ7iOoFgY=",
"h1:kmF//O539d7NuHU7qIxDj7Wz4eJmLKFiI5glwQivldU=", "h1:sA1xq0QNQ4fH8SHXouYNq50xirVD18SamKQwPsBQrrY=",
"h1:s6XriaKwOgV4jvKAGPXkrxhhOQxpNU5dceZwi9Z/1k8=", "h1:v7sHvKq7oqMYPn47ULHFyIQsKD9o+6Xg/uHbxQUixEw=",
"h1:wt3WBEBAeSGTlC9OlnTlAALxRiK4SQgLy0KgBIS7qzs=", "h1:wo/x4atWyXuWGlfR6h5nH0YwBAmBwTRY27HtWP8ycLo=",
"zh:2fb95e1d3229b9b6c704e1a413c7481c60f139780d9641f657b6eb9b633b90f2", "zh:339d26e06dc6fb299ea8aad9476a60fd65bb1d40631ae8eeb81cddf2dd2bebc8",
"zh:379c7680983383862236e9e6e720c3114195c40526172188e88d0ffcf50dfe2e", "zh:3dec2ad96ac2c283fd34ce65781b55c4edbb4d5c5cb53da8e31537176c0ed562",
"zh:55533beb6cfc02d22ffda8cba8027bc2c841bb172cd637ed0d28323d41395f8f", "zh:5f63a5f8080319a2fff09d4d49944829fa708723436520787cfb60725ced80cf",
"zh:5abd70760e4eb1f37a1c307cbd2989ea7c9ba0afb93818c67c1d363a31f75703", "zh:67162c28ccea71cb8141ed15c0637e35621354ebe14878e0b75a8f160fc5505d",
"zh:699f1c8cd66129176fe659ebf0e6337632a8967a28d2630b6ae5948665c0c2ae", "zh:6ac1e07f5347b6395aca690ed22101bb25e957d25f986f760ff673a7adfd5ef6",
"zh:69c15acd73c451e89de6477059cda2f3ec200b48ae4b9ff3646c4d389fd3205e", "zh:70282a723c7b52fcabde2baad41c864ed3a8d69f0c4d27a6b6933cac434cffc6",
"zh:6e02b687de21b844f8266dff99e93e7c61fc8eb688f4bbb23803caceb251839e",
"zh:7a51d17b87ed87b7bebf2ad9fc7c3a74f16a1b44eee92c779c08eb89258c0496",
"zh:88ad84436837b0f55302f22748505972634e87400d6902260fd6b7ba1610f937",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f", "zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8d46c3d9f4f7ad20ac6ef01daa63f4e30a2d16dcb1bb5c7c7ee3dc6be38e9ca1", "zh:924cd23abc326c6b3914e2cd9c94c7832c2552e1e9ae258fb9fd9aedaa5f7ce7",
"zh:913d64e72a4929dae1d4793e2004f4f9a58b138ea337d9d94fa35cafbf06550a", "zh:a4b75e4c239879296259e7d54f1befbc7fdc16da2d62d1294e9f73add4cae61e",
"zh:c8d93cf86e2e49f6cec665cfe78b82c144cce15a8b2e30f343385fadd1251849", "zh:a6ceb08feb63b00c7141783b31e45a154c76fd8cdebbdf371074805f0053572d",
"zh:cc4f69397d9bc34a528a5609a024c3a48f54f21616c0008792dd417297add955", "zh:afae1843f9ba85f2f6d94108c65cf43a457e83531a632d44d863e935160cb2ba",
"zh:df99cdb8b064aad35ffea77e645cf6541d0b1b2ebc51b6d26c42031de60ab69e", "zh:bd6628ce60c778960a5755f7010b7e2cc5c6ff0341a21c175341b28058ec843d",
"zh:cd30866a1ff99d72b5fa1699db582fa4f25562e6ab21dcc6870324f3056108e0",
"zh:df5924cca691a8220aaaebb5cb55c3d6c32ff0a881f198695eff28155eb12b54",
"zh:e78d0696c941aba58df1cb36b8a0d25cd5f3963f01d9338fdbda74db58afdd49",
] ]
} }

View File

@@ -5,7 +5,7 @@ terraform {
required_providers { required_providers {
cloudflare = { cloudflare = {
source = "cloudflare/cloudflare" source = "cloudflare/cloudflare"
version = "4.52.1" version = "4.41.0"
} }
} }
} }

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates. # Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" { provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.1" version = "4.41.0"
constraints = "4.52.1" constraints = "4.41.0"
hashes = [ hashes = [
"h1:2lHvafwGbLdmc9lYkuJFw3nsInaQjRpjX/JfIRKmq/M=", "h1:0mc+YrjQrcctGrGYDmzlcqcgSv9MYB74rvMaZylIKC8=",
"h1:596JomwjrtUrOSreq9NNCS+rj70+jOV+0pfja5MXiTI=", "h1:0zUx4vk4jOORQqn6xHBF7dO6N6bielFHdJ0mgF4Obn8=",
"h1:7mBOA5TVAIt3qAwPXKCtE0RSYeqij9v30mnksuBbpEg=", "h1:AsIZW3uLFNOZO7kL/K7/Y/S0IYxUV9Hz85NNk/3TTsA=",
"h1:ELVgzh4kHKBCYdL+2A8JjWS0E1snLUN3Mmz3Vo6qSfw=", "h1:FSgYM4+LHMbX/a4Y1kx7FPPWmXqS3/MQYzvjMJHHHWM=",
"h1:FGGM5yLFf72g3kSXM3LAN64Gf/AkXr5WCmhixgnP+l4=", "h1:Tx6Nh3BWP1x9L3KK/Eyi+ET0T26g3+jf1jyiuqpNIis=",
"h1:JupkJbQALcIVoMhHImrLeLDsQR1ET7VJLGC7ONxjqGU=", "h1:VRI9wu8P43xxfpeTndRwsisLnqncfnmEYMOEH5zH4pQ=",
"h1:KsaE4JNq+1uV1nJsuTcYar/8lyY6zKS5UBEpfYg3wvc=", "h1:YxQqmiES/Yanq/VfGqBEqg+VIO7FGhO88aKoWFHyGIg=",
"h1:NHZ5RJIzQDLhie/ykl3uI6UPfNQR9Lu5Ti7JPR6X904=", "h1:ZWHiaesjgDLKWlfdNj0oKyj/DWdxcfsO6NINu39zfpY=",
"h1:NfAuMbn6LQPLDtJhbzO1MX9JMIGLMa8K6CpekvtsuX8=", "h1:a2aCgDDBz3ccrr8YstIMl7VFnKo1xZAp+rOv59PPJ7U=",
"h1:e+vNKokamDsp/kJvFr2pRudzwEz2r49iZ/oSggw+1LY=", "h1:aRyv8tB6wBAF9lKsLEdiHyCqnK5LfZq0FqMXCcUB4UU=",
"h1:jnb4VdfNZ79I3yj7Q8x+JmOT+FxbfjjRfrF0dL0yCW8=", "h1:lXpuO7zv2uD2GzPE1ARxznreRAh+QHTc2lAJ7iOoFgY=",
"h1:kmF//O539d7NuHU7qIxDj7Wz4eJmLKFiI5glwQivldU=", "h1:sA1xq0QNQ4fH8SHXouYNq50xirVD18SamKQwPsBQrrY=",
"h1:s6XriaKwOgV4jvKAGPXkrxhhOQxpNU5dceZwi9Z/1k8=", "h1:v7sHvKq7oqMYPn47ULHFyIQsKD9o+6Xg/uHbxQUixEw=",
"h1:wt3WBEBAeSGTlC9OlnTlAALxRiK4SQgLy0KgBIS7qzs=", "h1:wo/x4atWyXuWGlfR6h5nH0YwBAmBwTRY27HtWP8ycLo=",
"zh:2fb95e1d3229b9b6c704e1a413c7481c60f139780d9641f657b6eb9b633b90f2", "zh:339d26e06dc6fb299ea8aad9476a60fd65bb1d40631ae8eeb81cddf2dd2bebc8",
"zh:379c7680983383862236e9e6e720c3114195c40526172188e88d0ffcf50dfe2e", "zh:3dec2ad96ac2c283fd34ce65781b55c4edbb4d5c5cb53da8e31537176c0ed562",
"zh:55533beb6cfc02d22ffda8cba8027bc2c841bb172cd637ed0d28323d41395f8f", "zh:5f63a5f8080319a2fff09d4d49944829fa708723436520787cfb60725ced80cf",
"zh:5abd70760e4eb1f37a1c307cbd2989ea7c9ba0afb93818c67c1d363a31f75703", "zh:67162c28ccea71cb8141ed15c0637e35621354ebe14878e0b75a8f160fc5505d",
"zh:699f1c8cd66129176fe659ebf0e6337632a8967a28d2630b6ae5948665c0c2ae", "zh:6ac1e07f5347b6395aca690ed22101bb25e957d25f986f760ff673a7adfd5ef6",
"zh:69c15acd73c451e89de6477059cda2f3ec200b48ae4b9ff3646c4d389fd3205e", "zh:70282a723c7b52fcabde2baad41c864ed3a8d69f0c4d27a6b6933cac434cffc6",
"zh:6e02b687de21b844f8266dff99e93e7c61fc8eb688f4bbb23803caceb251839e",
"zh:7a51d17b87ed87b7bebf2ad9fc7c3a74f16a1b44eee92c779c08eb89258c0496",
"zh:88ad84436837b0f55302f22748505972634e87400d6902260fd6b7ba1610f937",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f", "zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8d46c3d9f4f7ad20ac6ef01daa63f4e30a2d16dcb1bb5c7c7ee3dc6be38e9ca1", "zh:924cd23abc326c6b3914e2cd9c94c7832c2552e1e9ae258fb9fd9aedaa5f7ce7",
"zh:913d64e72a4929dae1d4793e2004f4f9a58b138ea337d9d94fa35cafbf06550a", "zh:a4b75e4c239879296259e7d54f1befbc7fdc16da2d62d1294e9f73add4cae61e",
"zh:c8d93cf86e2e49f6cec665cfe78b82c144cce15a8b2e30f343385fadd1251849", "zh:a6ceb08feb63b00c7141783b31e45a154c76fd8cdebbdf371074805f0053572d",
"zh:cc4f69397d9bc34a528a5609a024c3a48f54f21616c0008792dd417297add955", "zh:afae1843f9ba85f2f6d94108c65cf43a457e83531a632d44d863e935160cb2ba",
"zh:df99cdb8b064aad35ffea77e645cf6541d0b1b2ebc51b6d26c42031de60ab69e", "zh:bd6628ce60c778960a5755f7010b7e2cc5c6ff0341a21c175341b28058ec843d",
"zh:cd30866a1ff99d72b5fa1699db582fa4f25562e6ab21dcc6870324f3056108e0",
"zh:df5924cca691a8220aaaebb5cb55c3d6c32ff0a881f198695eff28155eb12b54",
"zh:e78d0696c941aba58df1cb36b8a0d25cd5f3963f01d9338fdbda74db58afdd49",
] ]
} }

View File

@@ -5,7 +5,7 @@ terraform {
required_providers { required_providers {
cloudflare = { cloudflare = {
source = "cloudflare/cloudflare" source = "cloudflare/cloudflare"
version = "4.52.1" version = "4.41.0"
} }
} }
} }

View File

@@ -1,13 +1,4 @@
# # See:
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
# For development see:
# - https://immich.app/docs/developer/setup # - https://immich.app/docs/developer/setup
# - https://immich.app/docs/developer/troubleshooting # - https://immich.app/docs/developer/troubleshooting
@@ -16,21 +7,22 @@ name: immich-dev
services: services:
immich-server: immich-server:
container_name: immich_server container_name: immich_server
command: ['immich-dev'] command: ['/usr/src/app/bin/immich-dev']
image: immich-server-dev:latest image: immich-server-dev:latest
# extends: # extends:
# file: hwaccel.transcoding.yml # file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding # service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
user: '${UID:-1000}:${GID:-1000}'
build: build:
context: ../ context: ../
dockerfile: server/Dockerfile dockerfile: server/Dockerfile
target: dev target: dev
restart: unless-stopped restart: always
volumes: volumes:
- ..:/usr/src/app - ../server:/usr/src/app
- ${UPLOAD_LOCATION}/photos:/data - ../open-api:/usr/src/open-api
- ${UPLOAD_LOCATION}/photos/upload:/data/upload - ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
- /usr/src/app/node_modules
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
env_file: env_file:
- .env - .env
@@ -44,68 +36,54 @@ services:
IMMICH_BUILD_URL: https://github.com/immich-app/immich/actions/runs/9654404849 IMMICH_BUILD_URL: https://github.com/immich-app/immich/actions/runs/9654404849
IMMICH_BUILD_IMAGE: development IMMICH_BUILD_IMAGE: development
IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/community-guides
ulimits: ulimits:
nofile: nofile:
soft: 1048576 soft: 1048576
hard: 1048576 hard: 1048576
ports: ports:
- 3001:3001
- 9230:9230 - 9230:9230
- 9231:9231 - 9231:9231
- 2283:2283
depends_on: depends_on:
redis: - redis
condition: service_started - database
database:
condition: service_started
init:
condition: service_completed_successfully
healthcheck: healthcheck:
disable: false disable: false
immich-web: immich-web:
container_name: immich_web container_name: immich_web
image: immich-web-dev:latest image: immich-web-dev:latest
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
# user: 0:0
user: '${UID:-1000}:${GID:-1000}'
build: build:
context: ../ context: ../web
dockerfile: server/Dockerfile command: ['/usr/src/app/bin/immich-web']
target: dev
command: ['immich-web']
env_file: env_file:
- .env - .env
ports: ports:
- 3000:3000 - 2283:3000
- 24678:24678 - 24678:24678
volumes: volumes:
- ..:/usr/src/app - ../web:/usr/src/app
- ../open-api/:/usr/src/open-api/
- /usr/src/app/node_modules
ulimits: ulimits:
nofile: nofile:
soft: 1048576 soft: 1048576
hard: 1048576 hard: 1048576
restart: unless-stopped restart: unless-stopped
depends_on: depends_on:
immich-server: - immich-server
condition: service_started
init:
condition: service_completed_successfully
immich-machine-learning: immich-machine-learning:
container_name: immich_machine_learning container_name: immich_machine_learning
image: immich-machine-learning-dev:latest image: immich-machine-learning-dev:latest
# extends: # extends:
# file: hwaccel.ml.yml # file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference # service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build: build:
context: ../machine-learning context: ../machine-learning
dockerfile: Dockerfile dockerfile: Dockerfile
args: args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports: ports:
- 3003:3003 - 3003:3003
volumes: volumes:
@@ -121,13 +99,13 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:a137a2b60aca1a75130022d6bb96af423fefae4eb55faf395732db3544803280 image: redis:6.2-alpine@sha256:2d1463258f2764328496376f5d965f20c6a67f66ea2b06dc42af351f75248792
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:32324a2f41df5de9efe1af166b7008c3f55646f8d0e00d9550c16c9822366b4a image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file: env_file:
- .env - .env
environment: environment:
@@ -139,8 +117,29 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports: ports:
- 5432:5432 - 5432:5432
shm_size: 128mb healthcheck:
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command:
[
'postgres',
'-c',
'shared_preload_libraries=vectors.so',
'-c',
'search_path="$$user", public, vectors',
'-c',
'logging_collector=on',
'-c',
'max_wal_size=2GB',
'-c',
'shared_buffers=512MB',
'-c',
'wal_compression=on',
]
# set IMMICH_METRICS=true in .env to enable metrics
# immich-prometheus: # immich-prometheus:
# container_name: immich_prometheus # container_name: immich_prometheus
# ports: # ports:
@@ -161,14 +160,6 @@ services:
# volumes: # volumes:
# - grafana-data:/var/lib/grafana # - grafana-data:/var/lib/grafana
init:
container_name: init
image: busybox
env_file:
- .env
user: 0:0
command: sh -c 'for path in /usr/src/app/.pnpm-store /usr/src/app/server/node_modules /usr/src/app/.github/node_modules /usr/src/app/cli/node_modules /usr/src/app/docs/node_modules /usr/src/app/e2e/node_modules /usr/src/app/open-api/typescript-sdk/node_modules /usr/src/app/web/.svelte-kit /usr/src/app/web/coverage /usr/src/app/node_modules /usr/src/app/web/node_modules; do [ -e "$$path" ] && chown -R ${UID:-1000}:${GID:-1000} "$$path" || true; done'
volumes: volumes:
model-cache: model-cache:
prometheus-data: prometheus-data:

View File

@@ -1,12 +1,3 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
name: immich-prod name: immich-prod
services: services:
@@ -20,12 +11,12 @@ services:
context: ../ context: ../
dockerfile: server/Dockerfile dockerfile: server/Dockerfile
volumes: volumes:
- ${UPLOAD_LOCATION}/photos:/data - ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
env_file: env_file:
- .env - .env
ports: ports:
- 2283:2283 - 2283:3001
depends_on: depends_on:
- redis - redis
- database - database
@@ -38,12 +29,12 @@ services:
image: immich-machine-learning:latest image: immich-machine-learning:latest
# extends: # extends:
# file: hwaccel.ml.yml # file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference # service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build: build:
context: ../machine-learning context: ../machine-learning
dockerfile: Dockerfile dockerfile: Dockerfile
args: args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports: ports:
- 3003:3003 - 3003:3003
volumes: volumes:
@@ -56,14 +47,14 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:a137a2b60aca1a75130022d6bb96af423fefae4eb55faf395732db3544803280 image: redis:6.2-alpine@sha256:2d1463258f2764328496376f5d965f20c6a67f66ea2b06dc42af351f75248792
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
restart: always restart: always
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:32324a2f41df5de9efe1af166b7008c3f55646f8d0e00d9550c16c9822366b4a image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file: env_file:
- .env - .env
environment: environment:
@@ -75,15 +66,20 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports: ports:
- 5432:5432 - 5432:5432
shm_size: 128mb healthcheck:
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: ["postgres", "-c", "shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
restart: always restart: always
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics # set IMMICH_METRICS=true in .env to enable metrics
immich-prometheus: immich-prometheus:
container_name: immich_prometheus container_name: immich_prometheus
ports: ports:
- 9090:9090 - 9090:9090
image: prom/prometheus@sha256:63805ebb8d2b3920190daf1cb14a60871b16fd38bed42b857a3182bc621f4996 image: prom/prometheus@sha256:f6639335d34a77d9d9db382b92eeb7fc00934be8eae81dbc03b31cfe90411a94
volumes: volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml - ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus - prometheus-data:/prometheus
@@ -95,7 +91,7 @@ services:
command: ['./run.sh', '-disable-reporting'] command: ['./run.sh', '-disable-reporting']
ports: ports:
- 3000:3000 - 3000:3000
image: grafana/grafana:12.1.1-ubuntu@sha256:d1da838234ff2de93e0065ee1bf0e66d38f948dcc5d718c25fa6237e14b4424a image: grafana/grafana:11.2.0-ubuntu@sha256:8e2c13739563c3da9d45de96c6bcb63ba617cac8c571c060112c7fc8ad6914e9
volumes: volumes:
- grafana-data:/var/lib/grafana - grafana-data:/var/lib/grafana

View File

@@ -1,11 +1,10 @@
# #
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose # WARNING: Make sure to use the docker-compose.yml of the current release:
#
# Make sure to use the docker-compose.yml of the current release:
# #
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml # https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
# #
# The compose file on main may not be compatible with the latest release. # The compose file on main may not be compatible with the latest release.
#
name: immich name: immich
@@ -18,12 +17,12 @@ services:
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding # service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
volumes: volumes:
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file # Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
- ${UPLOAD_LOCATION}:/data - ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
env_file: env_file:
- .env - .env
ports: ports:
- '2283:2283' - 2283:3001
depends_on: depends_on:
- redis - redis
- database - database
@@ -33,12 +32,12 @@ services:
immich-machine-learning: immich-machine-learning:
container_name: immich_machine_learning container_name: immich_machine_learning
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag. # For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
# Example tag: ${IMMICH_VERSION:-release}-cuda # Example tag: ${IMMICH_VERSION:-release}-cuda
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release} image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration # extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
# file: hwaccel.ml.yml # file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable # service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
volumes: volumes:
- model-cache:/cache - model-cache:/cache
env_file: env_file:
@@ -49,25 +48,28 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:a137a2b60aca1a75130022d6bb96af423fefae4eb55faf395732db3544803280 image: docker.io/redis:6.2-alpine@sha256:2d1463258f2764328496376f5d965f20c6a67f66ea2b06dc42af351f75248792
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
restart: always restart: always
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:32324a2f41df5de9efe1af166b7008c3f55646f8d0e00d9550c16c9822366b4a image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
environment: environment:
POSTGRES_PASSWORD: ${DB_PASSWORD} POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME} POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME} POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums' POSTGRES_INITDB_ARGS: '--data-checksums'
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
# DB_STORAGE_TYPE: 'HDD'
volumes: volumes:
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file # Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data - ${DB_DATA_LOCATION}:/var/lib/postgresql/data
shm_size: 128mb healthcheck:
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: ["postgres", "-c", "shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
restart: always restart: always
volumes: volumes:

View File

@@ -2,8 +2,7 @@
# The location where your uploaded files are stored # The location where your uploaded files are stored
UPLOAD_LOCATION=./library UPLOAD_LOCATION=./library
# The location where your database files are stored
# The location where your database files are stored. Network shares are not supported for the database
DB_DATA_LOCATION=./postgres DB_DATA_LOCATION=./postgres
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List # To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List

View File

@@ -13,13 +13,6 @@ services:
volumes: volumes:
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver) - /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required) - /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
rknn:
security_opt:
- systempaths=unconfined
- apparmor=unconfined
devices:
- /dev/dri:/dev/dri
cpu: {} cpu: {}
@@ -33,13 +26,6 @@ services:
capabilities: capabilities:
- gpu - gpu
rocm:
group_add:
- video
devices:
- /dev/dri:/dev/dri
- /dev/kfd:/dev/kfd
openvino: openvino:
device_cgroup_rules: device_cgroup_rules:
- 'c 189:* rmw' - 'c 189:* rmw'

View File

@@ -48,7 +48,6 @@ services:
vaapi-wsl: # use this for VAAPI if you're running Immich in WSL2 vaapi-wsl: # use this for VAAPI if you're running Immich in WSL2
devices: devices:
- /dev/dri:/dev/dri - /dev/dri:/dev/dri
- /dev/dxg:/dev/dxg
volumes: volumes:
- /usr/lib/wsl:/usr/lib/wsl - /usr/lib/wsl:/usr/lib/wsl
environment: environment:

4
docs/.gitignore vendored
View File

@@ -18,6 +18,4 @@
npm-debug.log* npm-debug.log*
yarn-debug.log* yarn-debug.log*
yarn-error.log* yarn-error.log*
yarn.lock yarn.lock
/static/openapi.json

View File

@@ -1 +1 @@
22.18.0 20.17.0

View File

@@ -5,13 +5,13 @@ This website is built using [Docusaurus](https://docusaurus.io/), a modern stati
### Installation ### Installation
``` ```
$ pnpm install $ npm install
``` ```
### Local Development ### Local Development
``` ```
$ pnpm run start $ npm run start
``` ```
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server. This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
@@ -19,7 +19,7 @@ This command starts a local development server and opens up a browser window. Mo
### Build ### Build
``` ```
$ pnpm run build $ npm run build
``` ```
This command generates static content into the `build` directory and can be served using any static contents hosting service. This command generates static content into the `build` directory and can be served using any static contents hosting service.
@@ -29,13 +29,13 @@ This command generates static content into the `build` directory and can be serv
Using SSH: Using SSH:
``` ```
$ USE_SSH=true pnpm run deploy $ USE_SSH=true npm run deploy
``` ```
Not using SSH: Not using SSH:
``` ```
$ GIT_USER=<Your GitHub username> pnpm run deploy $ GIT_USER=<Your GitHub username> npm run deploy
``` ```
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch. If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.

View File

@@ -49,22 +49,13 @@ The behaviors differ based on your device manufacturer and operating system, but
On iOS (iPhone and iPad), the operating system determines if a particular app can invoke background tasks based on multiple factors, most of which the Immich app has no control over. To increase the likelihood that the background backup task is run, follow the steps below: On iOS (iPhone and iPad), the operating system determines if a particular app can invoke background tasks based on multiple factors, most of which the Immich app has no control over. To increase the likelihood that the background backup task is run, follow the steps below:
- Enable Background App Refresh for Immich in the iOS settings at `Settings > General > Background App Refresh`. - Enable Background App Refresh for Immich in the iOS settings at `Settings > General > Background App Refresh`.
- Disable **Low Power Mode** when not needed, as this can prevent apps from running in the background.
- Disable Background App Refresh for apps that don't need background tasks to run. This will reduce the competition for background task invocation for Immich. - Disable Background App Refresh for apps that don't need background tasks to run. This will reduce the competition for background task invocation for Immich.
- Use the Immich app more often. - Use the Immich app more often.
### Why are features in the mobile app not working with a self-signed certificate, Basic Auth, custom headers, or mutual TLS? ### Why are features not working with a self-signed cert or mTLS?
These network features are experimental. They often do not work with video playback, asset upload or download, and other features. Due to limitations in the upstream app/video library, using a self-signed TLS certificate or mutual TLS may break video playback or asset upload (both foreground and/or background).
Many of these limitations are tracked in [#15230](https://github.com/immich-app/immich/issues/15230). We recommend using a real SSL certificate from a free provider, for example [Let's Encrypt](https://letsencrypt.org/).
Instead of these experimental features, we recommend using the URL switching feature, a VPN, or a [free trusted SSL certificate](https://letsencrypt.org/) for your domain.
We are not actively developing these features and will not be able to provide support, but welcome contributions to improve them.
Please discuss any large PRs with our dev team to ensure your time is not wasted.
### Why isn't the mobile app updated yet?
The app stores can take a few days to approve new builds of the app. If you're impatient, android APKs can be downloaded from the GitHub releases.
--- ---
@@ -78,8 +69,7 @@ However, Immich will delete original files that have been trashed when the trash
### Why do my file names appear as a random string in the file manager? ### Why do my file names appear as a random string in the file manager?
When Storage Template is off (default) Immich saves the file names in a random string (also known as random UUIDs) to prevent duplicate file names. When Storage Template is off (default) Immich saves the file names in a random string (also known as random UUIDs) to prevent duplicate file names. To retrieve the original file names, you must enable the Storage Template and then run the STORAGE TEMPLATE MIGRATION job.
To retrieve the original file names, you must enable the Storage Template and then run the STORAGE TEMPLATE MIGRATION job.
It is recommended to read about [Storage Template](https://immich.app/docs/administration/storage-template) before activation. It is recommended to read about [Storage Template](https://immich.app/docs/administration/storage-template) before activation.
### Can I add my existing photo library? ### Can I add my existing photo library?
@@ -92,20 +82,11 @@ Template changes will only apply to _new_ assets. To retroactively apply the tem
### Why are only photos and not videos being uploaded to Immich? ### Why are only photos and not videos being uploaded to Immich?
This often happens when using a reverse proxy in front of Immich. This often happens when using a reverse proxy (such as Nginx or Cloudflare tunnel) in front of Immich. Make sure to set your reverse proxy to allow large `POST` requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Also, check the disk space of your reverse proxy. In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails.
Make sure to [set your reverse proxy](/docs/administration/reverse-proxy/) to allow large requests.
Also, check the disk space of your reverse proxy.
In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails.
If you are using Cloudflare Tunnel, please know that they set a maximum filesize of 100 MB that cannot be changed.
At times, files larger than this may work, potentially up to 1 GB. However, the official limit is 100 MB.
If you are having issues, we recommend switching to a different network deployment.
### Why are some photos stored in the file system with the wrong date? ### Why are some photos stored in the file system with the wrong date?
There are a few different scenarios that can lead to this situation. The solution is to rerun the storage migration job. There are a few different scenarios that can lead to this situation. The solution is to rerun the storage migration job. The job is only automatically run once per asset after upload. If metadata extraction originally failed, the jobs were cleared/canceled, etc., the job may not have run automatically the first time.
The job is only automatically run once per asset after upload. If metadata extraction originally failed, the jobs were cleared/canceled, etc.,
the job may not have run automatically the first time.
### How can I hide photos from the timeline? ### How can I hide photos from the timeline?
@@ -117,7 +98,7 @@ See [Backup and Restore](/docs/administration/backup-and-restore.md).
### Does Immich support reading existing face tag metadata? ### Does Immich support reading existing face tag metadata?
Yes, it creates new faces and persons from the imported asset metadata. For details see the [feature request #4348](https://github.com/immich-app/immich/discussions/4348) and [PR #6455](https://github.com/immich-app/immich/pull/6455). No, it currently does not. There is an [open feature request on GitHub](https://github.com/immich-app/immich/discussions/4348).
### Does Immich support the filtering of NSFW images? ### Does Immich support the filtering of NSFW images?
@@ -135,8 +116,7 @@ Also, there are additional jobs for person (face) thumbnails.
### Why do files from WhatsApp not appear with the correct date? ### Why do files from WhatsApp not appear with the correct date?
Files sent on WhatsApp are saved without metadata on the file. Therefore, Immich has no way of knowing the original date of the file when files are uploaded from WhatsApp, Files sent on WhatsApp are saved without metadata on the file. Therefore, Immich has no way of knowing the original date of the file when files are uploaded from WhatsApp, not the order of arrival on the device. [See #3527](https://github.com/immich-app/immich/issues/3527).
not the order of arrival on the device. [See #9116](https://github.com/immich-app/immich/discussions/9116).
### What happens if an asset exists in more than one account? ### What happens if an asset exists in more than one account?
@@ -164,35 +144,6 @@ For example, say you have existing transcodes with the policy "Videos higher tha
No. Our design principle is that the original assets should always be untouched. No. Our design principle is that the original assets should always be untouched.
### How can I mount a CIFS/Samba volume within Docker?
If you aren't able to or prefer not to mount Samba on the host (such as Windows environment), you can mount the volume within Docker.
Below is an example in the `docker-compose.yml`.
Change your username, password, local IP, and share name, and see below where the line `- originals:/usr/src/app/originals`,
correlates to the section where the volume `originals` was created. You can call this whatever you like, and map it to the docker container as you like.
For example you could change `originals:` to `Photos:`, and change `- originals:/usr/src/app/originals` to `Photos:/usr/src/app/photos`.
```diff
...
services:
immich-server:
...
volumes:
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
- ${UPLOAD_LOCATION}:/data
- /etc/localtime:/etc/localtime:ro
+ - originals:/usr/src/app/originals
...
volumes:
model-cache:
+ originals:
+ driver_opts:
+ type: cifs
+ o: 'iocharset=utf8,username=USERNAMEHERE,password=PASSWORDHERE,rw' # change to `ro` if read only desired
+ device: '//localipaddress/sharename'
```
--- ---
## Albums ## Albums
@@ -236,11 +187,11 @@ However, when the trash is emptied, the files will re-appear in the main timelin
### How does smart search work? ### How does smart search work?
Immich uses CLIP models. An ML model converts each image to an "embedding", which is essentially a string of numbers that semantically encodes what is in the image. The same is done for the text that you enter when you do a search, and that text embedding is then compared with those of the images to find similar ones. As such, there are no "tags", "labels", or "descriptions" generated that you can look at. For more information about CLIP and its capabilities, read about it [here](https://openai.com/research/clip). Immich uses CLIP models. For more information about CLIP and its capabilities, read about it [here](https://openai.com/research/clip).
### How does facial recognition work? ### How does facial recognition work?
See [How Facial Recognition Works](/docs/features/facial-recognition#how-facial-recognition-works) for details. See [How Facial Recognition Works](/docs/features/facial-recognition#How-Facial-Recognition-Works) for details.
### How can I disable machine learning? ### How can I disable machine learning?
@@ -262,7 +213,7 @@ No, this is not supported. Only models listed in the [Hugging Face][huggingface]
### I want to be able to search in other languages besides English. How can I do that? ### I want to be able to search in other languages besides English. How can I do that?
You can change to a multilingual CLIP model. See [here](/docs/features/searching#clip-models) for instructions. You can change to a multilingual CLIP model. See [here](/docs/features/smart-search#CLIP-model) for instructions.
### Does Immich support Facial Recognition for videos? ### Does Immich support Facial Recognition for videos?
@@ -273,7 +224,7 @@ Scanning the entire video for faces may be implemented in the future.
No. No.
:::tip :::tip
You can use [Smart Search](/docs/features/searching.md) for this to some extent. For example, if you have a Golden Retriever and a Chihuahua, type these words in the smart search and watch the results. You can use [Smart Search](/docs/features/smart-search.md) for this to some extent. For example, if you have a Golden Retriever and a Chihuahua, type these words in the smart search and watch the results.
::: :::
### I'm getting a lot of "faces" that aren't faces, what can I do? ### I'm getting a lot of "faces" that aren't faces, what can I do?
@@ -315,7 +266,7 @@ The initial backup is the most intensive due to the number of jobs running. The
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this. - For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
- At the container level, you can [set resource constraints](/docs/FAQ#can-i-limit-cpu-and-ram-usage) to lower usage further. - At the container level, you can [set resource constraints](/docs/FAQ#can-i-limit-cpu-and-ram-usage) to lower usage further.
- It's recommended to only apply these constraints _after_ taking some of the measures here for best performance. - It's recommended to only apply these constraints _after_ taking some of the measures here for best performance.
- If these changes are not enough, see [above](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning. - If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
### Can I limit CPU and RAM usage? ### Can I limit CPU and RAM usage?
@@ -357,7 +308,7 @@ Do not exaggerate with the job concurrency because you're probably thoroughly ov
### My server shows Server Status Offline | Version Unknown. What can I do? ### My server shows Server Status Offline | Version Unknown. What can I do?
You need to [enable WebSockets](/docs/administration/reverse-proxy/) on your reverse proxy. You need to enable WebSockets on your reverse proxy.
--- ---
@@ -382,13 +333,9 @@ You may need to add mount points or docker volumes for the following internal co
- `immich-machine-learning:/.cache` - `immich-machine-learning:/.cache`
- `redis:/data` - `redis:/data`
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION` and `/cache` for machine-learning. The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
:::note Docker Compose Volumes For a further hardened system, you can add the following block to every container except for `immich_postgres`.
The Docker Compose top level volume element does not support non-root access, all of the above volumes must be local volume mounts.
:::
For a further hardened system, you can add the following block to every container.
<details> <details>
<summary>docker-compose.yml</summary> <summary>docker-compose.yml</summary>
@@ -437,28 +384,29 @@ If the error says the worker is exiting, then this is normal. This is a feature
There are a few reasons why this can happen. There are a few reasons why this can happen.
If the error mentions SIGKILL or error code 137, it most likely means the service is running out of memory. If the error mentions SIGKILL or error code 137, it most likely means the service is running out of memory. Consider either increasing the server's RAM or moving the service to a server with more RAM.
Consider either increasing the server's RAM or moving the service to a server with more RAM.
If it mentions SIGILL (note the lack of a K) or error code 132, it most likely means your server's CPU is incompatible with Immich. If it mentions SIGILL (note the lack of a K) or error code 132, it most likely means your server's CPU is incompatible. This is unlikely to occur on version 1.92.0 or later. Consider upgrading if your version of Immich is below that.
If your version of Immich is below 1.92.0 and the crash occurs after logs about tracing or exporting a model, consider either upgrading or disabling the Tag Objects job.
## Database ## Database
### Why am I getting database ownership errors? ### Why am I getting database ownership errors?
If you get database errors such as `FATAL: data directory "/var/lib/postgresql/data" has wrong ownership` upon database startup, this is likely due to an issue with your filesystem. If you get database errors such as `FATAL: data directory "/var/lib/postgresql/data" has wrong ownership` upon database startup, this is likely due to an issue with your filesystem.
NTFS and ex/FAT/32 filesystems are not supported. See [here](/docs/install/requirements#special-requirements-for-windows-users) for more details. NTFS and ex/FAT/32 filesystems are not supported. See [here](/docs/install/environment-variables#supported-filesystems) for more details.
### How can I verify the integrity of my database? ### How can I verify the integrity of my database?
Database checksums are enabled by default for new installations since v1.104.0. You can check if they are enabled by running the following command. If you installed Immich using v1.104.0 or later, you likely have database checksums enabled by default. You can check this by running the following command.
A result of `on` means that checksums are enabled. A result of `on` means that checksums are enabled.
<details> <details>
<summary>Check if checksums are enabled</summary> <summary>Check if checksums are enabled</summary>
```bash ```bash
docker exec -it immich_postgres psql --dbname=postgres --username=<DB_USERNAME> --command="show data_checksums" docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="show data_checksums"
data_checksums data_checksums
---------------- ----------------
on on
@@ -467,13 +415,13 @@ docker exec -it immich_postgres psql --dbname=postgres --username=<DB_USERNAME>
</details> </details>
If checksums are enabled, you can check the status of the database with the following command. A normal result is all `0`s. If checksums are enabled, you can check the status of the database with the following command. A normal result is all zeroes.
<details> <details>
<summary>Check for database corruption</summary> <summary>Check for database corruption</summary>
```bash ```bash
docker exec -it immich_postgres psql --dbname=postgres --username=<DB_USERNAME> --command="SELECT datname, checksum_failures, checksum_last_failure FROM pg_stat_database WHERE datname IS NOT NULL" docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="SELECT datname, checksum_failures, checksum_last_failure FROM pg_stat_database WHERE datname IS NOT NULL"
datname | checksum_failures | checksum_last_failure datname | checksum_failures | checksum_last_failure
-----------+-------------------+----------------------- -----------+-------------------+-----------------------
postgres | 0 | postgres | 0 |
@@ -485,24 +433,6 @@ docker exec -it immich_postgres psql --dbname=postgres --username=<DB_USERNAME>
</details> </details>
You can also scan the Postgres database file structure for errors:
<details>
<summary>Scan for file structure errors</summary>
```bash
docker exec -it immich_postgres pg_amcheck --username=<DB_USERNAME> --heapallindexed --parent-check --rootdescend --progress --all --install-missing
```
A normal result will end something like this and return with an exit code of `0`:
```bash
7470/8832 relations (84%), 730829/734735 pages (99%)
8425/8832 relations (95%), 734367/734735 pages (99%)
8832/8832 relations (100%), 734735/734735 pages (100%)
```
</details>
If corruption is detected, you should immediately make a backup before performing any other work in the database. If corruption is detected, you should immediately make a backup before performing any other work in the database.
To do so, you may need to set the `zero_damaged_pages=on` flag for the database server to allow `pg_dumpall` to succeed. To do so, you may need to set the `zero_damaged_pages=on` flag for the database server to allow `pg_dumpall` to succeed.
After taking a backup, the recommended next step is to restore the database from a healthy backup before corruption was detected. After taking a backup, the recommended next step is to restore the database from a healthy backup before corruption was detected.

View File

@@ -5,10 +5,6 @@ import TabItem from '@theme/TabItem';
A [3-2-1 backup strategy](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) is recommended to protect your data. You should keep copies of your uploaded photos/videos as well as the Immich database for a comprehensive backup solution. This page provides an overview on how to backup the database and the location of user-uploaded pictures and videos. A template bash script that can be run as a cron job is provided [here](/docs/guides/template-backup-script.md) A [3-2-1 backup strategy](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) is recommended to protect your data. You should keep copies of your uploaded photos/videos as well as the Immich database for a comprehensive backup solution. This page provides an overview on how to backup the database and the location of user-uploaded pictures and videos. A template bash script that can be run as a cron job is provided [here](/docs/guides/template-backup-script.md)
:::danger
The instructions on this page show you how to prepare your Immich instance to be backed up, and which files to take a backup of. You still need to take care of using an actual backup tool to make a backup yourself.
:::
## Database ## Database
:::caution :::caution
@@ -19,104 +15,113 @@ Immich saves [file paths in the database](https://github.com/immich-app/immich/d
Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database. Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database.
::: :::
The recommended way to backup and restore the Immich database is to use the `pg_dumpall` command. When restoring, you need to delete the `DB_DATA_LOCATION` folder (if it exists) to reset the database.
:::caution :::caution
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored. It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
::: :::
### Automatic Database Dumps
:::warning
The automatic database dumps can be used to restore the database in the event of damage to the Postgres database files.
There is no monitoring for these dumps and you will not be notified if they are unsuccessful.
:::
:::caution
The database dumps do **NOT** contain any pictures or videos, only metadata. They are only usable with a copy of the other files in `UPLOAD_LOCATION` as outlined below.
:::
For disaster-recovery purposes, Immich will automatically create database dumps. The dumps are stored in `UPLOAD_LOCATION/backups`.
Please be sure to make your own, independent backup of the database together with the asset folders as noted below.
You can adjust the schedule and amount of kept database dumps in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
By default, Immich will keep the last 14 database dumps and create a new dump every day at 2:00 AM.
#### Trigger Dump
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
This dumps will count towards the last `X` dumps that will be kept based on your settings.
#### Restoring
We hope to make restoring simpler in future versions, for now you can find the database dumps in the `UPLOAD_LOCATION/backups` folder on your host.
Then please follow the steps in the following section for restoring the database.
### Manual Backup and Restore ### Manual Backup and Restore
<Tabs> <Tabs>
<TabItem value="Linux system" label="Linux system" default> <TabItem value="Linux system" label="Linux system" default>
```bash title='Backup' ```bash title='Backup'
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz" docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
``` ```
```bash title='Restore' ```bash title='Restore'
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch docker compose down -v # CAUTION! Deletes all Immich data to start from scratch
## Uncomment the next line and replace DB_DATA_LOCATION with your Postgres path to permanently reset the Postgres database ## Uncomment the next line and replace DB_DATA_LOCATION with your Postgres path to permanently reset the Postgres database
# rm -rf DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch # rm -rf DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch
docker compose pull # Update to latest version of Immich (if desired) docker compose pull # Update to latest version of Immich (if desired)
docker compose create # Create Docker containers for Immich apps without running them docker compose create # Create Docker containers for Immich apps without running them
docker start immich_postgres # Start Postgres server docker start immich_postgres # Start Postgres server
sleep 10 # Wait for Postgres server to start up sleep 10 # Wait for Postgres server to start up
# Check the database user if you deviated from the default gunzip < "/path/to/backup/dump.sql.gz" \
gunzip --stdout "/path/to/backup/dump.sql.gz" \
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \ | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
| docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup | docker exec -i immich_postgres psql --username=postgres # Restore Backup
docker compose up -d # Start remainder of Immich apps docker compose up -d # Start remainder of Immich apps
``` ```
</TabItem> </TabItem>
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)"> <TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
```powershell title='Backup' ```powershell title='Backup'
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>)) docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | Set-Content -Encoding utf8 "C:\path\to\backup\dump.sql"
``` ```
```powershell title='Restore' ```powershell title='Restore'
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch docker compose down -v # CAUTION! Deletes all Immich data to start from scratch
## Uncomment the next line and replace DB_DATA_LOCATION with your Postgres path to permanently reset the Postgres database ## Uncomment the next line and replace DB_DATA_LOCATION with your Postgres path to permanently reset the Postgres database
# Remove-Item -Recurse -Force DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch # Remove-Item -Recurse -Force DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch
## You should mount the backup (as a volume, example: `- 'C:\path\to\backup\dump.sql:/dump.sql'`) into the immich_postgres container using the docker-compose.yml docker compose pull # Update to latest version of Immich (if desired)
docker compose pull # Update to latest version of Immich (if desired) docker compose create # Create Docker containers for Immich apps without running them
docker compose create # Create Docker containers for Immich apps without running them docker start immich_postgres # Start Postgres server
docker start immich_postgres # Start Postgres server sleep 10 # Wait for Postgres server to start up
sleep 10 # Wait for Postgres server to start up gc "C:\path\to\backup\dump.sql" | docker exec -i immich_postgres psql --username=postgres # Restore Backup
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command docker compose up -d # Start remainder of Immich apps
# Check the database user if you deviated from the default. If your backup ends in `.gz`, replace `cat` with `gunzip --stdout`
cat "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=postgres --username=<DB_USERNAME>
exit # Exit the Docker shell
docker compose up -d # Start remainder of Immich apps
``` ```
</TabItem> </TabItem>
</Tabs> </Tabs>
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.), in which case you need to delete the `DB_DATA_LOCATION` folder to reset the database. Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.).
:::tip :::tip
Some deployment methods make it difficult to start the database without also starting the server. In these cases, you may set the environment variable `DB_SKIP_MIGRATIONS=true` before starting the services. This will prevent the server from running migrations that interfere with the restore process. Be sure to remove this variable and restart the services after the database is restored. Some deployment methods make it difficult to start the database without also starting the server or microservices. In these cases, you may set the environmental variable `DB_SKIP_MIGRATIONS=true` before starting the services. This will prevent the server from running migrations that interfere with the restore process. Note that both the server and microservices must have this variable set to prevent the migrations from running. Be sure to remove this variable and restart the services after the database is restored.
:::
### Automatic Database Backups
The database dumps can also be automated (using [this image](https://github.com/prodrigestivill/docker-postgres-backup-local)) by editing the docker compose file to match the following:
```yaml
services:
...
backup:
container_name: immich_db_dumper
image: prodrigestivill/postgres-backup-local:14
restart: always
env_file:
- .env
environment:
POSTGRES_HOST: database
POSTGRES_CLUSTER: 'TRUE'
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_DB: ${DB_DATABASE_NAME}
SCHEDULE: "@daily"
POSTGRES_EXTRA_OPTS: '--clean --if-exists'
BACKUP_DIR: /db_dumps
volumes:
- ./db_dumps:/db_dumps
depends_on:
- database
```
Then you can restore with the same command but pointed at the latest dump.
```bash title='Automated Restore'
gunzip < db_dumps/last/immich-latest.sql.gz \
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
| docker exec -i immich_postgres psql --username=postgres
```
:::note
If you see the error `ERROR: type "earth" does not exist`, or you have problems with Reverse Geocoding after a restore, add the following `sed` fragment to your restore command.
Example: `gunzip < "/path/to/backup/dump.sql.gz" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | docker exec -i immich_postgres psql --username=postgres`
::: :::
## Filesystem ## Filesystem
Immich stores two types of content in the filesystem: (a) original, unmodified assets (photos and videos), and (b) generated content. We recommend backing up the entire contents of `UPLOAD_LOCATION`, but only the original content is critical, which is stored in the following folders: Immich stores two types of content in the filesystem: (1) original, unmodified assets (photos and videos), and (2) generated content. Only the original content needs to be backed-up, which is stored in the following folders:
1. `UPLOAD_LOCATION/library` 1. `UPLOAD_LOCATION/library`
2. `UPLOAD_LOCATION/upload` 2. `UPLOAD_LOCATION/upload`
3. `UPLOAD_LOCATION/profile` 3. `UPLOAD_LOCATION/profile`
If you choose to back up only those folders, you will need to rerun the transcoding and thumbnail generation jobs for all assets after you restore from a backup.
:::caution :::caution
If you moved some of these folders onto a different storage device, such as `profile/`, make sure to adjust the backup path to match your setup If you moved some of these folders onto a different storage device, such as `profile/`, make sure to adjust the backup path to match your setup
::: :::
@@ -150,10 +155,12 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces. - Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`. - Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
- **Encoded Assets:** - **Encoded Assets:**
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed. - Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`. - Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
- **Postgres** - **Postgres**
- The Immich database containing all the information to allow the system to function properly. - The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version. **Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `DB_DATA_LOCATION`. - Stored in `DB_DATA_LOCATION`.
@@ -190,7 +197,7 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
- Stored in `UPLOAD_LOCATION/profile/<userID>`. - Stored in `UPLOAD_LOCATION/profile/<userID>`.
- **Thumbs Images:** - **Thumbs Images:**
- Preview images (blurred, small, large) for each asset and thumbnails for recognized faces. - Preview images (blurred, small, large) for each asset and thumbnails for recognized faces.
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`. - Stored in `UPLOCAD_LOCATION/thumbs/<userID>`.
- **Encoded Assets:** - **Encoded Assets:**
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed. - Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`. - Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
@@ -199,6 +206,7 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`. - Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload. - Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
- **Postgres** - **Postgres**
- The Immich database containing all the information to allow the system to function properly. - The Immich database containing all the information to allow the system to function properly.
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version. **Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
- Stored in `DB_DATA_LOCATION`. - Stored in `DB_DATA_LOCATION`.
@@ -216,10 +224,3 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files. Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files.
You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface. You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface.
::: :::
## Backup ordering
A backup of Immich should contain both the database and the asset files. When backing these up it's possible for them to get out of sync, potentially resulting in broken assets after you restore.
The best way of dealing with this is to stop the immich-server container while you take a backup. If nothing is changing then the backup will always be in sync.
If stopping the container is not an option, then the recommended order is to back up the database first, and the filesystem second. This way, the worst case scenario is that there are files on the filesystem that the database doesn't know about. If necessary, these can be (re)uploaded manually after a restore. If the backup is done the other way around, with the filesystem first and the database second, it's possible for the restored database to reference files that aren't in the filesystem backup, thus resulting in broken assets.

View File

@@ -18,10 +18,4 @@ You can use [this guide](/docs/guides/smtp-gmail) to use Gmail's SMTP server.
Users can manage their email notification settings from their account settings page on the web. They can choose to turn email notifications on or off for the following events: Users can manage their email notification settings from their account settings page on the web. They can choose to turn email notifications on or off for the following events:
<img src={require('./img/user-notifications-settings.webp').default} width="80%" title="User notification settings" /> <img src={require('./img/user-notifications-settings.png').default} width="80%" title="User notification settings" />
## Notification templates
You can override the default notification text with custom templates in HTML format. You can use tags to show dynamic tags in your templates.
<img src={require('./img/user-notifications-templates.webp').default} width="80%" title="User notification templates" />

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 167 KiB

After

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

Some files were not shown because too many files have changed in this diff Show More