Compare commits

..

1 Commits

Author SHA1 Message Date
Jason Rasmussen
23373f39cb feat: docker compose builder 2025-01-06 09:05:17 -05:00
1967 changed files with 94370 additions and 124993 deletions

View File

@@ -1,2 +0,0 @@
.env
library

View File

@@ -1,16 +1,2 @@
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:a20b8a3538313487ac9266875bbf733e544c1aa2091df2bb99ab592a6d4f7399
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:9791f4aa527774bc370c6bd2f6705ce5a686f1e6f204badd8dfaacce28c631ae
FROM ${BASEIMAGE}
# Flutter SDK
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
ENV FLUTTER_CHANNEL="stable"
ENV FLUTTER_VERSION="3.29.1"
ENV FLUTTER_HOME=/flutter
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
# Flutter SDK
RUN mkdir -p ${FLUTTER_HOME} \
&& curl -C - --output flutter.tar.xz https://storage.googleapis.com/flutter_infra_release/releases/${FLUTTER_CHANNEL}/linux/flutter_linux_${FLUTTER_VERSION}-${FLUTTER_CHANNEL}.tar.xz \
&& tar -xf flutter.tar.xz --strip-components=1 -C ${FLUTTER_HOME} \
&& rm flutter.tar.xz \
&& chown -R 1000:1000 ${FLUTTER_HOME}

View File

@@ -1,26 +1,20 @@
{
"name": "Immich",
"service": "immich-devcontainer",
"dockerComposeFile": [
"docker-compose.yml",
"../docker/docker-compose.dev.yml"
],
"customizations": {
"vscode": {
"extensions": [
"Dart-Code.dart-code",
"Dart-Code.flutter",
"dbaeumer.vscode-eslint",
"dcmdev.dcm-vscode-extension",
"esbenp.prettier-vscode",
"svelte.svelte-vscode"
]
}
},
"forwardPorts": [],
"initializeCommand": "bash .devcontainer/scripts/initializeCommand.sh",
"onCreateCommand": "bash .devcontainer/scripts/onCreateCommand.sh",
"overrideCommand": true,
"workspaceFolder": "/immich",
"remoteUser": "node"
"name": "Immich devcontainers",
"build": {
"dockerfile": "Dockerfile",
"args": {
"BASEIMAGE": "mcr.microsoft.com/devcontainers/typescript-node:22"
}
},
"customizations": {
"vscode": {
"extensions": [
"svelte.svelte-vscode"
]
}
},
"forwardPorts": [],
"postCreateCommand": "make install-all",
"remoteUser": "node"
}

View File

@@ -1,8 +0,0 @@
services:
immich-devcontainer:
build:
dockerfile: Dockerfile
extra_hosts:
- 'host.docker.internal:host-gateway'
volumes:
- ..:/immich:cached

View File

@@ -1,6 +0,0 @@
#!/bin/bash
# If .env file does not exist, create it by copying example.env from the docker folder
if [ ! -f ".devcontainer/.env" ]; then
cp docker/example.env .devcontainer/.env
fi

View File

@@ -1,25 +0,0 @@
#!/bin/bash
# Enable multiarch for arm64 if necessary
if [ "$(dpkg --print-architecture)" = "arm64" ]; then
sudo dpkg --add-architecture amd64 && \
sudo apt-get update && \
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
libc6:amd64 \
libstdc++6:amd64 \
libgcc1:amd64
fi
# Install DCM
wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg
sudo echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list
sudo apt-get update
sudo apt-get install dcm
dart --disable-analytics
# Install immich
cd /immich || exit
make install-all

3
.gitattributes vendored
View File

@@ -6,9 +6,6 @@ mobile/openapi/**/*.dart linguist-generated=true
mobile/lib/**/*.g.dart -diff -merge
mobile/lib/**/*.g.dart linguist-generated=true
mobile/lib/**/*.drift.dart -diff -merge
mobile/lib/**/*.drift.dart linguist-generated=true
open-api/typescript-sdk/fetch-client.ts -diff -merge
open-api/typescript-sdk/fetch-client.ts linguist-generated=true

1
.github/.nvmrc vendored
View File

@@ -1 +0,0 @@
22.14.0

View File

@@ -1,5 +1,5 @@
title: '[Feature] feature-name-goes-here'
labels: ['feature']
title: "[Feature] feature-name-goes-here"
labels: ["feature"]
body:
- type: markdown
@@ -11,9 +11,9 @@ body:
- type: checkboxes
attributes:
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
label: I have searched the existing feature requests to make sure this is not a duplicate request.
options:
- label: 'Yes'
- label: "Yes"
required: true
- type: textarea

2
.github/FUNDING.yml vendored
View File

@@ -1 +1 @@
custom: ['https://buy.immich.app', 'https://immich.store']
custom: ['https://buy.immich.app']

View File

@@ -1,13 +1,6 @@
name: Report an issue with Immich
description: Report an issue with Immich
body:
- type: checkboxes
attributes:
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
options:
- label: 'Yes'
required: true
- type: markdown
attributes:
value: |
@@ -84,7 +77,7 @@ body:
id: repro
attributes:
label: Reproduction steps
description: 'How do you trigger this bug? Please walk us through it step by step.'
description: "How do you trigger this bug? Please walk us through it step by step."
value: |
1.
2.
@@ -97,13 +90,12 @@ body:
id: logs
attributes:
label: Relevant log output
description:
Please copy and paste any relevant logs below. (code formatting is
description: Please copy and paste any relevant logs below. (code formatting is
enabled, no need for backticks)
render: shell
validations:
required: false
- type: textarea
attributes:
label: Additional information

View File

@@ -1 +1,2 @@
blank_issues_enabled: false
blank_pull_request_template_enabled: false

View File

@@ -0,0 +1,22 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
## Screenshots (if appropriate):
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable

28
.github/package-lock.json generated vendored
View File

@@ -1,28 +0,0 @@
{
"name": ".github",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"devDependencies": {
"prettier": "^3.5.3"
}
},
"node_modules/prettier": {
"version": "3.5.3",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
"dev": true,
"license": "MIT",
"bin": {
"prettier": "bin/prettier.cjs"
},
"engines": {
"node": ">=14"
},
"funding": {
"url": "https://github.com/prettier/prettier?sponsor=1"
}
}
}
}

View File

@@ -1,9 +0,0 @@
{
"scripts": {
"format": "prettier --check .",
"format:fix": "prettier --write ."
},
"devDependencies": {
"prettier": "^3.5.3"
}
}

View File

@@ -1,36 +0,0 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
<details><summary><h2>Screenshots (if appropriate)</h2></summary>
<!-- Images go below this line. -->
</details>
<!-- API endpoint changes (if relevant)
## API Changes
The `/api/something` endpoint is now `/api/something-else`
-->
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable
- [ ] I have no unrelated changes in the PR.
- [ ] I have confirmed that any new dependencies are strictly necessary.
- [ ] I have written tests for new code (if applicable)
- [ ] I have followed naming conventions/patterns in the surrounding code
- [ ] All code in `src/services/` uses repositories implementations for database calls, filesystem operations, etc.
- [ ] All code in `src/repositories/` is pretty basic/simple and does not have any immich specific logic (that belongs in `src/services/`)

66
.github/release.yml vendored
View File

@@ -1,33 +1,33 @@
changelog:
categories:
- title: 🚨 Breaking Changes
labels:
- changelog:breaking-change
- title: 🫥 Deprecated Changes
labels:
- changelog:deprecated
- title: 🔒 Security
labels:
- changelog:security
- title: 🚀 Features
labels:
- changelog:feature
- title: 🌟 Enhancements
labels:
- changelog:enhancement
- title: 🐛 Bug fixes
labels:
- changelog:bugfix
- title: 📚 Documentation
labels:
- changelog:documentation
- title: 🌐 Translations
labels:
- changelog:translation
changelog:
categories:
- title: 🚨 Breaking Changes
labels:
- changelog:breaking-change
- title: 🫥 Deprecated Changes
labels:
- changelog:deprecated
- title: 🔒 Security
labels:
- changelog:security
- title: 🚀 Features
labels:
- changelog:feature
- title: 🌟 Enhancements
labels:
- changelog:enhancement
- title: 🐛 Bug fixes
labels:
- changelog:bugfix
- title: 📚 Documentation
labels:
- changelog:documentation
- title: 🌐 Translations
labels:
- changelog:translation

View File

@@ -7,15 +7,6 @@ on:
ref:
required: false
type: string
secrets:
KEY_JKS:
required: true
ALIAS:
required: true
ANDROID_KEY_PASSWORD:
required: true
ANDROID_STORE_PASSWORD:
required: true
pull_request:
push:
branches: [main]
@@ -24,56 +15,52 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
uses: dorny/paths-filter@v3
with:
filters: |
mobile:
- 'mobile/**'
workflow:
- '.github/workflows/build-mobile.yml'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
run: echo "should_force=${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
build-sign-android:
name: Build and sign Android
needs: pre-job
permissions:
contents: read
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
runs-on: macos-14
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
ref: ${{ inputs.ref || github.sha }}
persist-credentials: false
- name: Determine ref
id: get-ref
run: |
input_ref="${{ inputs.ref }}"
github_ref="${{ github.sha }}"
ref="${input_ref:-$github_ref}"
echo "ref=$ref" >> $GITHUB_OUTPUT
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4
- uses: actions/checkout@v4
with:
ref: ${{ steps.get-ref.outputs.ref }}
- uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '17'
cache: 'gradle'
- name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
uses: subosito/flutter-action@v2
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -89,10 +76,6 @@ jobs:
working-directory: ./mobile
run: flutter pub get
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Build Android App Bundle
working-directory: ./mobile
env:
@@ -104,7 +87,7 @@ jobs:
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
- name: Publish Android Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@v4
with:
name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk

View File

@@ -8,38 +8,31 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
cleanup:
name: Cleanup
runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps:
- name: Check out code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Cleanup
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REF: ${{ github.ref }}
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH=${{ github.ref }}
echo "Fetching list of cache keys"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -6,6 +6,7 @@ on:
- 'cli/**'
- '.github/workflows/cli.yml'
pull_request:
branches: [main]
paths:
- 'cli/**'
- '.github/workflows/cli.yml'
@@ -16,25 +17,21 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
permissions:
packages: write
jobs:
publish:
name: CLI Publish
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- uses: actions/checkout@v4
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
- uses: actions/setup-node@v4
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
@@ -52,25 +49,20 @@ jobs:
docker:
name: Docker
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
needs: publish
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
uses: docker/setup-buildx-action@v3.8.0
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
uses: docker/login-action@v3
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
@@ -85,7 +77,7 @@ jobs:
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
uses: docker/metadata-action@v5
with:
flavor: |
latest=false
@@ -96,7 +88,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
uses: docker/build-push-action@v6.10.0
with:
file: cli/Dockerfile
platforms: linux/amd64,linux/arm64

View File

@@ -9,14 +9,14 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: 'CodeQL'
name: "CodeQL"
on:
push:
branches: ['main']
branches: [ "main" ]
pull_request:
# The branches below must be a subset of the branches above
branches: ['main']
branches: [ "main" ]
schedule:
- cron: '20 13 * * 1'
@@ -24,8 +24,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
analyze:
name: Analyze
@@ -38,44 +36,43 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ['javascript', 'python']
language: [ 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@45775bd8235c68ba998cffa5171334d58593da47 # v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@45775bd8235c68ba998cffa5171334d58593da47 # v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@45775bd8235c68ba998cffa5171334d58593da47 # v3
with:
category: '/language:${{matrix.language}}'
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

73
.github/workflows/docker-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
# This workflow runs on certain conditions to check for and potentially
# delete container images from the GHCR which no longer have an associated
# code branch.
# Requires a PAT with the correct scope set in the secrets.
#
# This workflow will not trigger runs on forked repos.
name: Docker Cleanup
on:
pull_request:
types:
- "closed"
push:
paths:
- ".github/workflows/docker-cleanup.yml"
concurrency:
group: registry-tags-cleanup
cancel-in-progress: false
jobs:
cleanup-images:
name: Cleanup Stale Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.9.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
is_org: "true"
do_delete: "true"
package_name: "${{ matrix.primary-name }}"
scheme: "pull_request"
repo_name: "immich"
match_regex: '^pr-(\d+)$|^(\d+)$'
cleanup-untagged-images:
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-24.04
needs:
- cleanup-images
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
- primary-name: "immich-build-cache"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.9.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
do_delete: "true"
is_org: "true"
package_name: "${{ matrix.primary-name }}"

View File

@@ -5,6 +5,7 @@ on:
push:
branches: [main]
pull_request:
branches: [main]
release:
types: [published]
@@ -12,23 +13,20 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
permissions:
packages: write
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
uses: dorny/paths-filter@v3
with:
filters: |
server:
@@ -38,184 +36,155 @@ jobs:
- 'i18n/**'
machine-learning:
- 'machine-learning/**'
workflow:
- '.github/workflows/docker.yml'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
retag_ml:
name: Re-Tag ML
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest
strategy:
matrix:
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
suffix: ["", "-cuda", "-openvino", "-armnn"]
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image
run: |
REGISTRY_NAME="ghcr.io"
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
TAG_OLD=main${{ matrix.suffix }}
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
retag_server:
name: Re-Tag Server
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest
strategy:
matrix:
suffix: ['']
suffix: [""]
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-server
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
REGISTRY_NAME="ghcr.io"
REPOSITORY=${{ github.repository_owner }}/immich-server
TAG_OLD=main${{ matrix.suffix }}
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
build_and_push_ml:
name: Build and Push ML
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ${{ matrix.runner }}
runs-on: ubuntu-latest
env:
image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
strategy:
# Prevent a failure in one image from stopping the other builds
fail-fast: false
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platforms: linux/amd64,linux/arm64
device: cpu
- platform: linux/arm64
runner: ubuntu-24.04-arm
device: cpu
- platform: linux/amd64
runner: ubuntu-latest
- platforms: linux/amd64
device: cuda
suffix: -cuda
- platform: linux/amd64
runner: mich
device: rocm
suffix: -rocm
- platform: linux/amd64
runner: ubuntu-latest
- platforms: linux/amd64
device: openvino
suffix: -openvino
- platform: linux/arm64
runner: ubuntu-24.04-arm
- platforms: linux/arm64
device: armnn
suffix: -armnn
- platform: linux/arm64
runner: ubuntu-24.04-arm
device: rknn
suffix: -rknn
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
uses: docker/setup-buildx-action@v3.8.0
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
uses: docker/login-action@v3
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate cache key suffix
env:
REF: ${{ github.ref_name }}
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
else
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
fi
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Generate cache target
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
# Essentially just ignore the cache output (forks can't write to registry cache)
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${{ matrix.device }}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
fi
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
- name: Build and push image
id: build
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
uses: docker/build-push-action@v6.10.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platforms }}
labels: ${{ steps.meta.outputs.labels }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
cache-from: |
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }}
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-main
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
@@ -223,310 +192,100 @@ jobs:
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: ml-digests-${{ matrix.device }}-${{ env.PLATFORM_PAIR }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge_ml:
name: Merge & Push ML
runs-on: ubuntu-latest
permissions:
contents: read
actions: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
env:
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
DOCKER_REPO: altran1502/immich-machine-learning
strategy:
matrix:
include:
- device: cpu
- device: cuda
suffix: -cuda
- device: rocm
suffix: -rocm
- device: openvino
suffix: -openvino
- device: armnn
suffix: -armnn
- device: rknn
suffix: -rknn
needs:
- build_and_push_ml
steps:
- name: Download digests
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with:
path: ${{ runner.temp }}/digests
pattern: ml-digests-${{ matrix.device }}-*
merge-multiple: true
- name: Login to Docker Hub
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GHCR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
with:
flavor: |
# Disable latest tag
latest=false
suffix=${{ matrix.suffix }}
images: |
name=${{ env.GHCR_REPO }}
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch
# Tag with pr-number
type=ref,event=pr
# Tag with long commit sha hash
type=sha,format=long,prefix=commit-
# Tag with git tag on release
type=ref,event=tag
type=raw,value=release,enable=${{ github.event_name == 'release' }}
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
run: |
# Process annotations
declare -a ANNOTATIONS=()
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
while IFS= read -r annotation; do
# Extract key and value by removing the manifest: prefix
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
key="${BASH_REMATCH[1]}"
value="${BASH_REMATCH[2]}"
# Use array to properly handle arguments with spaces
ANNOTATIONS+=(--annotation "index:$key=$value")
fi
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
fi
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
build_and_push_server:
name: Build and Push Server
runs-on: ${{ matrix.runner }}
permissions:
contents: read
packages: write
runs-on: ubuntu-latest
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
env:
image: immich-server
context: .
file: server/Dockerfile
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
strategy:
fail-fast: false
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
- platform: linux/arm64
runner: ubuntu-24.04-arm
- platforms: linux/amd64,linux/arm64
device: cpu
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
uses: docker/setup-buildx-action@v3.8.0
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
uses: docker/login-action@v3
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate cache key suffix
env:
REF: ${{ github.ref_name }}
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
else
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
fi
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Generate cache target
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
# Essentially just ignore the cache output (forks can't write to registry cache)
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
fi
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
- name: Build and push image
id: build
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
uses: docker/build-push-action@v6.10.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
cache-from: |
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }}
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-main
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
DEVICE=cpu
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: server-digests-${{ env.PLATFORM_PAIR }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge_server:
name: Merge & Push Server
runs-on: ubuntu-latest
permissions:
contents: read
actions: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
env:
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
DOCKER_REPO: altran1502/immich-server
needs:
- build_and_push_server
steps:
- name: Download digests
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with:
path: ${{ runner.temp }}/digests
pattern: server-digests-*
merge-multiple: true
- name: Login to Docker Hub
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GHCR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
with:
flavor: |
# Disable latest tag
latest=false
suffix=${{ matrix.suffix }}
images: |
name=${{ env.GHCR_REPO }}
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch
# Tag with pr-number
type=ref,event=pr
# Tag with long commit sha hash
type=sha,format=long,prefix=commit-
# Tag with git tag on release
type=ref,event=tag
type=raw,value=release,enable=${{ github.event_name == 'release' }}
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
run: |
# Process annotations
declare -a ANNOTATIONS=()
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
while IFS= read -r annotation; do
# Extract key and value by removing the manifest: prefix
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
key="${BASH_REMATCH[1]}"
value="${BASH_REMATCH[2]}"
# Use array to properly handle arguments with spaces
ANNOTATIONS+=(--annotation "index:$key=$value")
fi
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
fi
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
success-check-server:
name: Docker Build & Push Server Success
needs: [merge_server, retag_server]
permissions: {}
needs: [build_and_push_server, retag_server]
runs-on: ubuntu-latest
if: always()
steps:
@@ -539,8 +298,7 @@ jobs:
success-check-ml:
name: Docker Build & Push ML Success
needs: [merge_ml, retag_ml]
permissions: {}
needs: [build_and_push_ml, retag_ml]
runs-on: ubuntu-latest
if: always()
steps:

View File

@@ -3,6 +3,7 @@ on:
push:
branches: [main]
pull_request:
branches: [main]
release:
types: [published]
@@ -10,37 +11,27 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
uses: dorny/paths-filter@v3
with:
filters: |
docs:
- 'docs/**'
workflow:
- '.github/workflows/docs-build.yml'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT"
run: echo "should_force=${{ github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT"
build:
name: Docs Build
needs: pre-job
permissions:
contents: read
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest
defaults:
@@ -49,12 +40,10 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './docs/.nvmrc'
@@ -68,7 +57,7 @@ jobs:
run: npm run build
- name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
uses: actions/upload-artifact@v4
with:
name: docs-build-output
path: docs/build/

View File

@@ -1,7 +1,7 @@
name: Docs deploy
on:
workflow_run:
workflows: ['Docs build']
workflows: ["Docs build"]
types:
- completed
@@ -9,9 +9,6 @@ jobs:
checks:
name: Docs Deploy Checks
runs-on: ubuntu-latest
permissions:
actions: read
pull-requests: read
outputs:
parameters: ${{ steps.parameters.outputs.result }}
artifact: ${{ steps.get-artifact.outputs.result }}
@@ -20,7 +17,7 @@ jobs:
run: echo 'The triggering workflow did not succeed' && exit 1
- name: Get artifact
id: get-artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
uses: actions/github-script@v7
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
@@ -38,9 +35,7 @@ jobs:
return { found: true, id: matchArtifact.id };
- name: Determine deploy parameters
id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
env:
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
uses: actions/github-script@v7
with:
script: |
const eventType = context.payload.workflow_run.event;
@@ -62,8 +57,7 @@ jobs:
} else if (eventType == "pull_request") {
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
if(!pull_number) {
const {HEAD_SHA} = process.env;
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
const items = response.data.items
if (items.length < 1) {
throw new Error("No pull request found for the commit")
@@ -101,20 +95,14 @@ jobs:
name: Docs Deploy
runs-on: ubuntu-latest
needs: checks
permissions:
contents: read
actions: read
pull-requests: write
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Load parameters
id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
uses: actions/github-script@v7
with:
script: |
const json = `${{ needs.checks.outputs.parameters }}`;
@@ -127,7 +115,7 @@ jobs:
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
- name: Download artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
uses: actions/github-script@v7
with:
script: |
let artifact = ${{ needs.checks.outputs.artifact }};
@@ -150,12 +138,12 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
uses: gruntwork-io/terragrunt-action@v2
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
tg_dir: 'deployment/modules/cloudflare/docs'
tg_command: 'apply'
tg_version: "0.58.12"
tofu_version: "1.7.1"
tg_dir: "deployment/modules/cloudflare/docs"
tg_command: "apply"
- name: Deploy Docs Subdomain Output
id: docs-output
@@ -165,29 +153,27 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
uses: gruntwork-io/terragrunt-action@v2
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
tg_dir: 'deployment/modules/cloudflare/docs'
tg_command: 'output -json'
tg_version: "0.58.12"
tofu_version: "1.7.1"
tg_dir: "deployment/modules/cloudflare/docs"
tg_command: "output -json"
- name: Output Cleaning
id: clean
env:
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
run: |
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$CLEANED" >> $GITHUB_OUTPUT
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
- name: Publish to Cloudflare Pages
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
uses: cloudflare/pages-action@v1
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
workingDirectory: 'docs'
directory: 'build'
workingDirectory: "docs"
directory: "build"
branch: ${{ steps.parameters.outputs.name }}
wranglerVersion: '3'
@@ -198,7 +184,7 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
uses: gruntwork-io/terragrunt-action@v2
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
@@ -206,7 +192,7 @@ jobs:
tg_command: 'apply'
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
uses: actions-cool/maintain-one-comment@v3
if: ${{ steps.parameters.outputs.event == 'pr' }}
with:
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}

View File

@@ -3,37 +3,30 @@ on:
pull_request_target:
types: [closed]
permissions: {}
jobs:
deploy:
name: Docs Destroy
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Destroy Docs Subdomain
env:
TF_VAR_prefix_name: 'pr-${{ github.event.number }}'
TF_VAR_prefix_event_type: 'pr'
TF_VAR_prefix_name: "pr-${{ github.event.number }}"
TF_VAR_prefix_event_type: "pr"
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
uses: gruntwork-io/terragrunt-action@v2
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
tg_dir: 'deployment/modules/cloudflare/docs'
tg_command: 'destroy -refresh=false'
tg_version: "0.58.12"
tofu_version: "1.7.1"
tg_dir: "deployment/modules/cloudflare/docs"
tg_command: "destroy -refresh=false"
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
uses: actions-cool/maintain-one-comment@v3
with:
number: ${{ github.event.number }}
delete: true

View File

@@ -4,32 +4,28 @@ on:
pull_request:
types: [labeled]
permissions: {}
jobs:
fix-formatting:
runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'fix:formatting' }}
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: 'Checkout'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
@@ -37,13 +33,13 @@ jobs:
run: make install-all && make format-all
- name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
uses: EndBug/add-and-commit@v9
with:
default_author: github_actions
message: 'chore: fix formatting'
- name: Remove label
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
uses: actions/github-script@v7
if: always()
with:
script: |
@@ -53,3 +49,4 @@ jobs:
repo: context.repo.repo,
name: 'fix:formatting'
})

View File

@@ -4,8 +4,6 @@ on:
pull_request_target:
types: [opened, labeled, unlabeled, synchronize]
permissions: {}
jobs:
validate-release-label:
runs-on: ubuntu-latest
@@ -14,11 +12,11 @@ jobs:
pull-requests: write
steps:
- name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@388fd6af37b34cdfe5a23b37060e763217e58b03 # v5
uses: mheap/github-action-required-labels@v5
with:
mode: exactly
count: 1
use_regex: true
labels: 'changelog:.*'
labels: "changelog:.*"
add_comment: true
message: 'Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label.'
message: "Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label."

View File

@@ -1,8 +1,6 @@
name: 'Pull Request Labeler'
name: "Pull Request Labeler"
on:
- pull_request_target
permissions: {}
- pull_request_target
jobs:
labeler:
@@ -11,4 +9,4 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5
- uses: actions/labeler@v5

View File

@@ -4,16 +4,12 @@ on:
pull_request:
types: [opened, synchronize, reopened, edited]
permissions: {}
jobs:
validate-pr-title:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: PR Conventional Commit Validation
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0
uses: ytanikin/PRConventionalCommits@1.3.0
with:
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
add_label: 'false'

View File

@@ -21,37 +21,35 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-root
cancel-in-progress: true
permissions: {}
jobs:
bump_version:
runs-on: ubuntu-latest
outputs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
permissions: {} # No job-level permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2
uses: actions/create-github-app-token@v1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@v4
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Install uv
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
- name: Install Poetry
run: pipx install poetry
- name: Bump version
run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
- name: Commit and tag
id: push-tag
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
uses: EndBug/add-and-commit@v9
with:
default_author: github_actions
message: 'chore: version ${{ env.IMMICH_VERSION }}'
@@ -61,45 +59,30 @@ jobs:
build_mobile:
uses: ./.github/workflows/build-mobile.yml
needs: bump_version
secrets:
KEY_JKS: ${{ secrets.KEY_JKS }}
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
secrets: inherit
with:
ref: ${{ needs.bump_version.outputs.ref }}
prepare_release:
runs-on: ubuntu-latest
needs: build_mobile
permissions:
actions: read # To download the app artifact
# No content permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@v4
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: false
token: ${{ secrets.ORG_RELEASE_TOKEN }}
- name: Download APK
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
uses: actions/download-artifact@v4
with:
name: release-apk-signed
- name: Create draft release
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v2
uses: softprops/action-gh-release@v2
with:
draft: true
tag_name: ${{ env.IMMICH_VERSION }}
token: ${{ steps.generate-token.outputs.token }}
generate_release_notes: true
body_path: misc/release/notes.tmpl
files: |

View File

@@ -1,35 +0,0 @@
name: Preview label
on:
pull_request:
types: [labeled, closed]
permissions: {}
jobs:
comment-status:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'preview' }}
permissions:
pull-requests: write
steps:
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2
with:
message-id: 'preview-status'
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
remove-label:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'closed' && contains(github.event.pull_request.labels.*.name, 'preview') }}
permissions:
pull-requests: write
steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with:
script: |
github.rest.issues.removeLabel({
issue_number: context.payload.pull_request.number,
owner: context.repo.owner,
repo: context.repo.repo,
name: 'preview'
})

View File

@@ -4,24 +4,20 @@ on:
release:
types: [published]
permissions: {}
permissions:
packages: write
jobs:
publish:
name: Publish `@immich/sdk`
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./open-api/typescript-sdk
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- uses: actions/checkout@v4
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
- uses: actions/setup-node@v4
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'

View File

@@ -9,47 +9,37 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
uses: dorny/paths-filter@v3
with:
filters: |
mobile:
- 'mobile/**'
workflow:
- '.github/workflows/static_analysis.yml'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
mobile-dart-analyze:
name: Run Dart Code Analysis
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
uses: subosito/flutter-action@v2
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -58,32 +48,6 @@ jobs:
run: dart pub get
working-directory: ./mobile
- name: Generate translation file
run: make translation; dart format lib/generated/codegen_loader.g.dart
working-directory: ./mobile
- name: Run Build Runner
run: make build
working-directory: ./mobile
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
id: verify-changed-files
with:
files: |
mobile/**/*.g.dart
mobile/**/*.gr.dart
mobile/**/*.drift.dart
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
echo "Changed files: ${CHANGED_FILES}"
exit 1
- name: Run dart analyze
run: dart analyze --fatal-infos
working-directory: ./mobile
@@ -95,3 +59,8 @@ jobs:
- name: Run dart custom_lint
run: dart run custom_lint
working-directory: ./mobile
# Enable after riverpod generator migration is completed
# - name: Run dart custom lint
# run: dart run custom_lint
# working-directory: ./mobile

View File

@@ -9,13 +9,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
@@ -25,15 +21,11 @@ jobs:
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
uses: dorny/paths-filter@v3
with:
filters: |
web:
@@ -51,34 +43,26 @@ jobs:
- 'mobile/**'
machine-learning:
- 'machine-learning/**'
workflow:
- '.github/workflows/test.yml'
.github:
- '.github/**'
- name: Check if we should force jobs to run
id: should_force
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
server-unit-tests:
name: Test & Lint Server
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
@@ -106,20 +90,16 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './cli/.nvmrc'
@@ -151,20 +131,16 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: windows-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './cli/.nvmrc'
@@ -189,20 +165,16 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './web/.nvmrc'
@@ -238,20 +210,16 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './e2e/.nvmrc'
@@ -276,55 +244,43 @@ jobs:
run: npm run check
if: ${{ !cancelled() }}
server-medium-tests:
medium-tests-server:
name: Medium Tests (Server)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
runs-on: mich
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with:
node-version-file: './server/.nvmrc'
- name: Run npm install
run: npm ci
- name: Production build
if: ${{ !cancelled() }}
run: docker compose -f e2e/docker-compose.yml build
- name: Run medium tests
run: npm run test:medium
if: ${{ !cancelled() }}
run: make test-medium
e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI)
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './e2e/.nvmrc'
@@ -355,21 +311,18 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './e2e/.nvmrc'
@@ -399,15 +352,10 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- uses: actions/checkout@v4
- name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
uses: subosito/flutter-action@v2
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -420,99 +368,55 @@ jobs:
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./machine-learning
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v5
with:
persist-credentials: false
- name: Install uv
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with:
# python-version: 3.11
# cache: 'uv'
python-version: 3.11
cache: 'poetry'
- name: Install dependencies
run: |
uv sync --extra cpu
poetry install --with dev --with cpu
- name: Lint with ruff
run: |
uv run ruff check --output-format=github immich_ml
poetry run ruff check --output-format=github app export
- name: Check black formatting
run: |
uv run black --check immich_ml
poetry run black --check app export
- name: Run mypy type checking
run: |
uv run mypy --strict immich_ml/
poetry run mypy --install-types --non-interactive --strict app/
- name: Run tests and coverage
run: |
uv run pytest --cov=immich_ml --cov-report term-missing
github-files-formatting:
name: .github Files Formatting
needs: pre-job
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./.github
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
with:
node-version-file: './.github/.nvmrc'
- name: Run npm install
run: npm ci
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
poetry run pytest app --cov=app --cov-report term-missing
shellcheck:
name: ShellCheck
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- uses: actions/checkout@v4
- name: Run ShellCheck
uses: ludeeus/action-shellcheck@master
with:
ignore_paths: >-
**/open-api/**
**/openapi**
**/openapi/**
**/node_modules/**
generated-api-up-to-date:
name: OpenAPI Clients
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
@@ -526,7 +430,7 @@ jobs:
run: make open-api
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
uses: tj-actions/verify-changed-files@v20
id: verify-changed-files
with:
files: |
@@ -536,21 +440,17 @@ jobs:
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1
generated-typeorm-migrations-up-to-date:
name: TypeORM Checks
runs-on: ubuntu-latest
permissions:
contents: read
services:
postgres:
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
@@ -568,12 +468,10 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
@@ -584,29 +482,26 @@ jobs:
run: npm run build
- name: Run existing migrations
run: npm run migrations:run
run: npm run typeorm:migrations:run
- name: Test npm run schema:reset command works
run: npm run schema:reset
run: npm run typeorm:schema:reset
- name: Generate new migrations
continue-on-error: true
run: npm run migrations:generate TestMigration
run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
uses: tj-actions/verify-changed-files@v20
id: verify-changed-files
with:
files: |
server/src
server/src/migrations/
- name: Verify migration files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated migration files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
cat ./src/*-TestMigration.ts
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1
- name: Run SQL generation
@@ -615,7 +510,7 @@ jobs:
DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
uses: tj-actions/verify-changed-files@v20
id: verify-changed-sql-files
with:
files: |
@@ -623,11 +518,9 @@ jobs:
- name: Verify SQL files have not changed
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
run: |
echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
exit 1
# mobile-integration-tests:

View File

@@ -1,60 +0,0 @@
name: Weblate checks
on:
pull_request:
branches: [main]
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with:
filters: |
i18n:
- 'i18n/!(en)**\.json'
enforce-lock:
name: Check Weblate Lock
needs: [pre-job]
runs-on: ubuntu-latest
permissions: {}
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
steps:
- name: Check weblate lock
run: |
if [[ "false" = $(curl https://hosted.weblate.org/api/components/immich/immich/lock/ | jq .locked) ]]; then
exit 1
fi
- name: Find Pull Request
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1
id: find-pr
with:
branch: chore/translations
- name: Fail if existing weblate PR
if: ${{ steps.find-pr.outputs.number }}
run: exit 1
success-check-lock:
name: Weblate Lock Check Success
needs: [enforce-lock]
runs-on: ubuntu-latest
permissions: {}
if: always()
steps:
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"

View File

@@ -39,7 +39,6 @@
],
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart"
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
}
}

View File

@@ -39,7 +39,7 @@ attach-server:
renovate:
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
MODULES = e2e server web cli sdk docs .github
MODULES = e2e server web cli sdk docs
audit-%:
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
@@ -77,14 +77,14 @@ test-medium:
test-medium-dev:
docker exec -it immich_server /bin/sh -c "npm run test:medium"
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
build-all: $(foreach M,$(filter-out e2e,$(MODULES)),build-$M) ;
install-all: $(foreach M,$(MODULES),install-$M) ;
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
check-all: $(foreach M,$(filter-out sdk cli docs,$(MODULES)),check-$M) ;
lint-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),lint-$M) ;
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
audit-all: $(foreach M,$(MODULES),audit-$M) ;
hygiene-all: lint-all format-all check-all sql audit-all;
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
test-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),test-$M) ;
clean:
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +

View File

@@ -1,11 +1,11 @@
<p align="center">
<br/>
<br/>
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
<a href="https://discord.immich.app">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a>
<br/>
<br/>
<br/>
<br/>
</p>
<p align="center">
@@ -29,7 +29,6 @@
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
<a href="readme_i18n/README_zh_CN.md">中文</a>
<a href="readme_i18n/README_uk_UA.md">Українська</a>
<a href="readme_i18n/README_ru_RU.md">Русский</a>
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
@@ -61,7 +60,9 @@
## Demo
Access the demo [here](https://demo.immich.app). For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`.
Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
### Login credentials
@@ -102,7 +103,7 @@ Access the demo [here](https://demo.immich.app). For the mobile app, you can use
| Read-only gallery | Yes | Yes |
| Stacked Photos | Yes | Yes |
| Tags | No | Yes |
| Folder View | Yes | Yes |
| Folder View | No | Yes |
## Translations

View File

@@ -1 +1 @@
22.14.0
22.12.0

View File

@@ -1,4 +1,4 @@
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
FROM node:22.12.0-alpine3.20@sha256:96cc8323e25c8cc6ddcb8b965e135cfd57846e8003ec0d7bcec16c5fd5f6d39f AS core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

View File

@@ -1,29 +1,39 @@
import { FlatCompat } from '@eslint/eslintrc';
import js from '@eslint/js';
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
import eslintPluginUnicorn from 'eslint-plugin-unicorn';
import typescriptEslint from '@typescript-eslint/eslint-plugin';
import tsParser from '@typescript-eslint/parser';
import globals from 'globals';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import typescriptEslint from 'typescript-eslint';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all,
});
export default typescriptEslint.config([
eslintPluginUnicorn.configs.recommended,
eslintPluginPrettierRecommended,
js.configs.recommended,
typescriptEslint.configs.recommended,
export default [
{
ignores: ['eslint.config.mjs', 'dist'],
},
...compat.extends(
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
'plugin:unicorn/recommended',
),
{
plugins: {
'@typescript-eslint': typescriptEslint,
},
languageOptions: {
globals: {
...globals.node,
},
parser: typescriptEslint.parser,
parser: tsParser,
ecmaVersion: 5,
sourceType: 'module',
@@ -48,4 +58,4 @@ export default typescriptEslint.config([
'object-shorthand': ['error', 'always'],
},
},
]);
];

2486
cli/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.61",
"version": "2.2.37",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -19,26 +19,26 @@
"@types/byte-size": "^8.1.0",
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.14.0",
"@vitest/coverage-v8": "^3.0.0",
"@types/node": "^22.10.2",
"@typescript-eslint/eslint-plugin": "^8.15.0",
"@typescript-eslint/parser": "^8.15.0",
"@vitest/coverage-v8": "^2.0.5",
"byte-size": "^9.0.0",
"cli-progress": "^3.12.0",
"commander": "^12.0.0",
"eslint": "^9.14.0",
"eslint-config-prettier": "^10.0.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^57.0.0",
"globals": "^16.0.0",
"eslint-plugin-unicorn": "^56.0.1",
"globals": "^15.9.0",
"mock-fs": "^5.2.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3",
"typescript-eslint": "^8.28.0",
"vite": "^6.0.0",
"vite": "^5.0.12",
"vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0",
"vitest": "^2.0.5",
"vitest-fetch-mock": "^0.4.0",
"yaml": "^2.3.1"
},
@@ -62,13 +62,11 @@
"node": ">=20.0.0"
},
"dependencies": {
"chokidar": "^4.0.3",
"fast-glob": "^3.3.2",
"fastq": "^1.17.1",
"lodash-es": "^4.17.21",
"micromatch": "^4.0.8"
"lodash-es": "^4.17.21"
},
"volta": {
"node": "22.14.0"
"node": "22.12.0"
}
}

View File

@@ -1,13 +1,12 @@
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import { setTimeout as sleep } from 'node:timers/promises';
import { describe, expect, it, MockedFunction, vi } from 'vitest';
import { describe, expect, it, vi } from 'vitest';
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk';
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
import createFetchMock from 'vitest-fetch-mock';
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset';
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
vi.mock('@immich/sdk');
@@ -200,112 +199,3 @@ describe('checkForDuplicates', () => {
});
});
});
describe('startWatch', () => {
let testFolder: string;
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
beforeEach(async () => {
vi.restoreAllMocks();
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
image: ['.jpg'],
sidecar: ['.xmp'],
video: ['.mp4'],
});
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
checkBulkUploadMocked.mockResolvedValue({
results: [],
});
});
it('should start watching a directory and upload new files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: [
expect.objectContaining({
id: testFilePath,
}),
],
},
});
});
it('should filter out unsupported files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const unsupportedFilePath = path.join(testFolder, 'test.txt');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: unsupportedFilePath,
}),
]),
},
});
});
it('should filger out ignored patterns', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const ignoredPattern = 'ignored';
const ignoredFolder = path.join(testFolder, ignoredPattern);
await fs.promises.mkdir(ignoredFolder, { recursive: true });
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: ignoredFilePath,
}),
]),
},
});
});
afterEach(async () => {
await fs.promises.rm(testFolder, { recursive: true, force: true });
});
});

View File

@@ -12,18 +12,13 @@ import {
getSupportedMediaTypes,
} from '@immich/sdk';
import byteSize from 'byte-size';
import { Matcher, watch as watchFs } from 'chokidar';
import { MultiBar, Presets, SingleBar } from 'cli-progress';
import { chunk } from 'lodash-es';
import micromatch from 'micromatch';
import { Stats, createReadStream } from 'node:fs';
import { stat, unlink } from 'node:fs/promises';
import path, { basename } from 'node:path';
import { Queue } from 'src/queue';
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
const UPLOAD_WATCH_BATCH_SIZE = 100;
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
const s = (count: number) => (count === 1 ? '' : 's');
@@ -41,8 +36,6 @@ export interface UploadOptionsDto {
albumName?: string;
includeHidden?: boolean;
concurrency: number;
progress?: boolean;
watch?: boolean;
}
class UploadFile extends File {
@@ -62,94 +55,19 @@ class UploadFile extends File {
}
}
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
const { newFiles, duplicates } = await checkForDuplicates(files, options);
const newAssets = await uploadFiles(newFiles, options);
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(newFiles, options);
};
export const startWatch = async (
paths: string[],
options: UploadOptionsDto,
{
batchSize = UPLOAD_WATCH_BATCH_SIZE,
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
}: { batchSize?: number; debounceTimeMs?: number } = {},
) => {
const watcherIgnored: Matcher[] = [];
const { image, video } = await getSupportedMediaTypes();
const extensions = new Set([...image, ...video]);
if (options.ignore) {
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
}
const pathsBatcher = new Batcher<string>({
batchSize,
debounceTimeMs,
onBatch: async (paths: string[]) => {
const uniquePaths = [...new Set(paths)];
await uploadBatch(uniquePaths, options);
},
});
const onFile = async (path: string, stats?: Stats) => {
if (stats?.isDirectory()) {
return;
}
const ext = '.' + path.split('.').pop()?.toLowerCase();
if (!ext || !extensions.has(ext)) {
return;
}
if (!options.progress) {
// logging when progress is disabled as it can cause issues with the progress bar rendering
console.log(`Change detected: ${path}`);
}
pathsBatcher.add(path);
};
const fsWatcher = watchFs(paths, {
ignoreInitial: true,
ignored: watcherIgnored,
alwaysStat: true,
awaitWriteFinish: true,
depth: options.recursive ? undefined : 1,
persistent: true,
})
.on('add', onFile)
.on('change', onFile)
.on('error', (error) => console.error(`Watcher error: ${error}`));
process.on('SIGINT', async () => {
console.log('Exiting...');
await fsWatcher.close();
process.exit();
});
};
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
await authenticate(baseOptions);
const scanFiles = await scan(paths, options);
if (scanFiles.length === 0) {
if (options.watch) {
console.log('No files found initially.');
} else {
console.log('No files found, exiting');
return;
}
console.log('No files found, exiting');
return;
}
if (options.watch) {
console.log('Watching for changes...');
await startWatch(paths, options);
// watcher does not handle the initial scan
// as the scan() is a more efficient quick start with batched results
}
await uploadBatch(scanFiles, options);
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
const newAssets = await uploadFiles(newFiles, options);
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(newFiles, options);
};
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
@@ -167,25 +85,19 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
return files;
};
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => {
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
if (skipHash) {
console.log('Skipping hash check, assuming all files are new');
return { newFiles: files, duplicates: [] };
}
let multiBar: MultiBar | undefined;
const multiBar = new MultiBar(
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
if (progress) {
multiBar = new MultiBar(
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
} else {
console.log(`Received ${files.length} files, hashing...`);
}
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
const newFiles: string[] = [];
const duplicates: Asset[] = [];
@@ -205,7 +117,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
}
}
checkProgressBar?.increment(assets.length);
checkProgressBar.increment(assets.length);
},
{ concurrency, retry: 3 },
);
@@ -225,7 +137,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
void checkBulkUploadQueue.push(batch);
}
hashProgressBar?.increment();
hashProgressBar.increment();
return results;
},
{ concurrency, retry: 3 },
@@ -243,7 +155,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
await checkBulkUploadQueue.drained();
multiBar?.stop();
multiBar.stop();
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
@@ -259,10 +171,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
return { newFiles, duplicates };
};
export const uploadFiles = async (
files: string[],
{ dryRun, concurrency, progress }: UploadOptionsDto,
): Promise<Asset[]> => {
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
if (files.length === 0) {
console.log('All assets were already uploaded, nothing to do.');
return [];
@@ -282,20 +191,12 @@ export const uploadFiles = async (
return files.map((filepath) => ({ id: '', filepath }));
}
let uploadProgress: SingleBar | undefined;
if (progress) {
uploadProgress = new SingleBar(
{
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
},
Presets.shades_classic,
);
} else {
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
}
uploadProgress?.start(totalSize, 0);
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
const uploadProgress = new SingleBar(
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
Presets.shades_classic,
);
uploadProgress.start(totalSize, 0);
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
let duplicateCount = 0;
let duplicateSize = 0;
@@ -321,7 +222,7 @@ export const uploadFiles = async (
successSize += stats.size ?? 0;
}
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
return response;
},
@@ -334,7 +235,7 @@ export const uploadFiles = async (
await queue.drained();
uploadProgress?.stop();
uploadProgress.stop();
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
if (duplicateCount > 0) {

View File

@@ -69,13 +69,6 @@ program
.default(4),
)
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
.addOption(
new Option('--watch', 'Watch for changes and upload automatically')
.env('IMMICH_WATCH_CHANGES')
.default(false)
.implies({ progress: false }),
)
.argument('[paths...]', 'One or more paths to assets to be uploaded')
.action((paths, options) => upload(paths, program.opts(), options));

View File

@@ -1,13 +1,11 @@
import mockfs from 'mock-fs';
import { readFileSync } from 'node:fs';
import { Batcher, CrawlOptions, crawl } from 'src/utils';
import { Mock } from 'vitest';
import { CrawlOptions, crawl } from 'src/utils';
interface Test {
test: string;
options: Omit<CrawlOptions, 'extensions'>;
files: Record<string, boolean>;
skipOnWin32?: boolean;
}
const cwd = process.cwd();
@@ -50,18 +48,6 @@ const tests: Test[] = [
'/photos/image.jpg': true,
},
},
{
test: 'should crawl folders with quotes',
options: {
pathsToCrawl: ["/photo's/", '/photo"s/', '/photo`s/'],
},
files: {
"/photo's/image1.jpg": true,
'/photo"s/image2.jpg': true,
'/photo`s/image3.jpg': true,
},
skipOnWin32: true, // single quote interferes with mockfs root on Windows
},
{
test: 'should crawl a single file',
options: {
@@ -284,12 +270,8 @@ describe('crawl', () => {
});
describe('crawl', () => {
for (const { test: name, options, files, skipOnWin32 } of tests) {
if (process.platform === 'win32' && skipOnWin32) {
test.skip(name);
continue;
}
it(name, async () => {
for (const { test, options, files } of tests) {
it(test, async () => {
// The file contents is the same as the path.
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
@@ -304,38 +286,3 @@ describe('crawl', () => {
}
});
});
describe('Batcher', () => {
let batcher: Batcher;
let onBatch: Mock;
beforeEach(() => {
onBatch = vi.fn();
batcher = new Batcher({ batchSize: 2, onBatch });
});
it('should trigger onBatch() when a batch limit is reached', async () => {
batcher.add('a');
batcher.add('b');
batcher.add('c');
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
});
it('should trigger onBatch() when flush() is called', async () => {
batcher.add('a');
batcher.flush();
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
});
it('should trigger onBatch() when debounce time reached', async () => {
vi.useFakeTimers();
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
batcher.add('a');
expect(onBatch).not.toHaveBeenCalled();
vi.advanceTimersByTime(200);
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
vi.useRealTimers();
});
});

View File

@@ -146,7 +146,7 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
}
const searchPatterns = patterns.map((pattern) => {
let escapedPattern = pattern.replaceAll("'", "[']").replaceAll('"', '["]').replaceAll('`', '[`]');
let escapedPattern = pattern;
if (recursive) {
escapedPattern = escapedPattern + '/**';
}
@@ -172,64 +172,3 @@ export const sha1 = (filepath: string) => {
rs.on('end', () => resolve(hash.digest('hex')));
});
};
/**
* Batches items and calls onBatch to process them
* when the batch size is reached or the debounce time has passed.
*/
export class Batcher<T = unknown> {
private items: T[] = [];
private readonly batchSize: number;
private readonly debounceTimeMs?: number;
private readonly onBatch: (items: T[]) => void;
private debounceTimer?: NodeJS.Timeout;
constructor({
batchSize,
debounceTimeMs,
onBatch,
}: {
batchSize: number;
debounceTimeMs?: number;
onBatch: (items: T[]) => Promise<void>;
}) {
this.batchSize = batchSize;
this.debounceTimeMs = debounceTimeMs;
this.onBatch = onBatch;
}
private setDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
}
if (this.debounceTimeMs) {
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
}
}
private clearDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
this.debounceTimer = undefined;
}
}
add(item: T) {
this.items.push(item);
this.setDebounceTimer();
if (this.items.length >= this.batchSize) {
this.flush();
}
}
flush() {
this.clearDebounceTimer();
if (this.items.length === 0) {
return;
}
this.onBatch(this.items);
this.items = [];
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.0"
constraints = "4.52.0"
version = "4.48.0"
constraints = "4.48.0"
hashes = [
"h1:2BEJyXJtYC4B4nda/WCYUmuJYDaYk88F8t1pwPzr0iQ=",
"h1:4IASk5SESeWKQ7JU0+M7KApuF5mZyklvwMXPBabim3c=",
"h1:5ImZxxALSnWfH/4EXw/wFirSmk5Tr0ACmcysy51AafE=",
"h1:6TJ3dxLSin4ZKBJLsZDn95H2ZYnGm8S7GGHvvXuuMQU=",
"h1:IzTUjg9kQ4N3qizP9CjYLeHwjsuGgtxwXvfUQWyOLcA=",
"h1:NTaOQfYINA0YTG/V1/9+SYtgX1it63+cBugj4WK4FWc=",
"h1:PXH48LuJn329sCfMXprdMDk51EZaWFyajVvS03qhQLs=",
"h1:Pi5M+GeoMSN2eJ6QnIeXjBf19O+rby/74CfB2ocpv20=",
"h1:ShXZ2ZjBvm3thfoPPzPT8+OhyismnydQVkUAfI8X12w=",
"h1:WQ9hu0Wge2msBbODfottCSKgu8oKUrw4Opz+fDPVVHk=",
"h1:Z5yXML2DE0uH9UU+M0ut9JMQAORcwVZz1CxBHzeBmao=",
"h1:jqI2qKknpleS3JDSplyGYHMu0u9K/tor1ZOjFwDgEMk=",
"h1:kgfutDh14Q5nw4eg6qGFamFxIiY8Ae0FPKRBLDOzpcI=",
"h1:zCAO7GZmfYhWb+i6TfqlqhMeDyPZWGio2IzEzAh3YTs=",
"zh:19be1a91c982b902c42aba47766860dfa5dc151eed1e95fd39ca642229381ef0",
"zh:1de451c4d1ecf7efbe67b6dace3426ba810711afdd644b0f1b870364c8ae91f8",
"zh:352b4a2120173298622e669258744554339d959ac3a95607b117a48ee4a83238",
"zh:3c6f1346d9154afbd2d558fabb4b0150fc8d559aa961254144fe1bc17fe6032f",
"zh:4c4c92d53fb535b1e0eff26f222bbd627b97d3b4c891ec9c321268676d06152f",
"zh:53276f68006c9ceb7cdb10a6ccf91a5c1eadd1407a28edb5741e84e88d7e29e8",
"zh:7925a97773948171a63d4f65bb81ee92fd6d07a447e36012977313293a5435c9",
"zh:7dfb0a4496cfe032437386d0a2cd9229a1956e9c30bd920923c141b0f0440060",
"h1:0IKUOR32xEI1suS5QCOjfxjQ2mRd058btXk8hVnaOJ4=",
"h1:3YG6vu/bFPcYOeLdSUZhiAWiWKaFlOAR34z2o8cbE9k=",
"h1:FvGy06/i9AMtVkSIUnCrXNv5xF6jqBqMH8oPVLyeeAg=",
"h1:GXH7nIF0ocMqebbA41+fSGIYfM+VAM/PvTe7fJr8UrQ=",
"h1:H0ll0ph4404vFE868W3qJ3zhOyy4jbXrOMtdkViEZsU=",
"h1:SX42e3k73IcFcrQlZ2e/Veqt2tvCMy6fwlo5yNUktCE=",
"h1:Uu/gjBc99GefdPdSrlBwU75DWU0ZcwGcrd3ZFyTeL0s=",
"h1:VZw0uN41PWRmNlhg7Ze0Eh7cdoklX1oZbfNAXNYnU1I=",
"h1:cMdV7ql6PsFa4qtb0EoZSctvTaTqV7yplBSDwcLRCLc=",
"h1:ePGvSurmlqOCkD761vkhRmz7bsK36/EnIvx2Xy8TdXo=",
"h1:fOYufF+1bzw2N3aHLpkLB6E8VbZ4ysXDODYQOlwhwd4=",
"h1:qe8RbnWq0T4xhqjn9QcbO6YW5YDx47P+eJ0NUMIfwCc=",
"h1:tRD2av6PafHDP/b9jDQsG5/aX+lHeKxpbIEHYYLBVUc=",
"h1:zyl6Gvx/CFpwYW8pFFDesfO8Lxv+a6CopyAsIMhp54s=",
"zh:04c0a49c2b23140b2f21cfd0d52f9798d70d3bdae3831613e156aabe519bbc6c",
"zh:185f21b4834ba63e8df1f84aa34639d8a7e126429a4007bb5f9ad82f2602a997",
"zh:234724f52cb4c0c3f7313d3b2697caef26d921d134f26ae14801e7afac522f7b",
"zh:38a56fcd1b3e40706af995611c977816543b53f1e55fe2720944aae2b6828fcb",
"zh:419938f5430fc78eff933470aefbf94a460a478f867cf7761a3dea177b4eb153",
"zh:4b46d92bfde1deab7de7ba1a6bbf4ba7c711e4fd925341ddf09d4cc28dae03d8",
"zh:537acd4a31c752f1bae305ba7190f60b71ad1a459f22d464f3f914336c9e919f",
"zh:5ff36b005aad07697dd0b30d4f0c35dbcdc30dc52b41722552060792fa87ce04",
"zh:635c5ee419daea098060f794d9d7d999275301181e49562c4e4c08f043076937",
"zh:859277c330d61f91abe9e799389467ca11b77131bf34bedbef52f8da68b2bb49",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8d4aa79f0a414bb4163d771063c70cd991c8fac6c766e685bac2ee12903c5bd6",
"zh:a67540c13565616a7e7e51ee9366e88b0dc60046e1d75c72680e150bd02725bb",
"zh:a936383a4767f5393f38f622e92bf2d0c03fe04b69c284951f27345766c7b31b",
"zh:d4887d73c466ff036eecf50ad6404ba38fd82ea4855296b1846d244b0f13c380",
"zh:e9093c8bd5b6cd99c81666e315197791781b8f93afa14fc2e0f732d1bb2a44b7",
"zh:efd3b3f1ec59a37f635aa1d4efcf178734c2fcf8ddb0d56ea690bec342da8672",
"zh:927dfdb8d9aef37ead03fceaa29e87ba076a3dd24e19b6cefdbb0efe9987ff8c",
"zh:bbf2226f07f6b1e721877328e69ded4b64f9c196634d2e2429e3cfabbe41e532",
"zh:daeed873d6f38604232b46ee4a5830c85d195b967f8dbcafe2fcffa98daf9c5f",
"zh:f8f2fc4646c1ba44085612fa7f4dbb7cbcead43b4e661f2b98ddfb4f68afc758",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.52.0"
version = "4.48.0"
}
}
}

View File

@@ -2,37 +2,37 @@
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.0"
constraints = "4.52.0"
version = "4.48.0"
constraints = "4.48.0"
hashes = [
"h1:2BEJyXJtYC4B4nda/WCYUmuJYDaYk88F8t1pwPzr0iQ=",
"h1:4IASk5SESeWKQ7JU0+M7KApuF5mZyklvwMXPBabim3c=",
"h1:5ImZxxALSnWfH/4EXw/wFirSmk5Tr0ACmcysy51AafE=",
"h1:6TJ3dxLSin4ZKBJLsZDn95H2ZYnGm8S7GGHvvXuuMQU=",
"h1:IzTUjg9kQ4N3qizP9CjYLeHwjsuGgtxwXvfUQWyOLcA=",
"h1:NTaOQfYINA0YTG/V1/9+SYtgX1it63+cBugj4WK4FWc=",
"h1:PXH48LuJn329sCfMXprdMDk51EZaWFyajVvS03qhQLs=",
"h1:Pi5M+GeoMSN2eJ6QnIeXjBf19O+rby/74CfB2ocpv20=",
"h1:ShXZ2ZjBvm3thfoPPzPT8+OhyismnydQVkUAfI8X12w=",
"h1:WQ9hu0Wge2msBbODfottCSKgu8oKUrw4Opz+fDPVVHk=",
"h1:Z5yXML2DE0uH9UU+M0ut9JMQAORcwVZz1CxBHzeBmao=",
"h1:jqI2qKknpleS3JDSplyGYHMu0u9K/tor1ZOjFwDgEMk=",
"h1:kgfutDh14Q5nw4eg6qGFamFxIiY8Ae0FPKRBLDOzpcI=",
"h1:zCAO7GZmfYhWb+i6TfqlqhMeDyPZWGio2IzEzAh3YTs=",
"zh:19be1a91c982b902c42aba47766860dfa5dc151eed1e95fd39ca642229381ef0",
"zh:1de451c4d1ecf7efbe67b6dace3426ba810711afdd644b0f1b870364c8ae91f8",
"zh:352b4a2120173298622e669258744554339d959ac3a95607b117a48ee4a83238",
"zh:3c6f1346d9154afbd2d558fabb4b0150fc8d559aa961254144fe1bc17fe6032f",
"zh:4c4c92d53fb535b1e0eff26f222bbd627b97d3b4c891ec9c321268676d06152f",
"zh:53276f68006c9ceb7cdb10a6ccf91a5c1eadd1407a28edb5741e84e88d7e29e8",
"zh:7925a97773948171a63d4f65bb81ee92fd6d07a447e36012977313293a5435c9",
"zh:7dfb0a4496cfe032437386d0a2cd9229a1956e9c30bd920923c141b0f0440060",
"h1:0IKUOR32xEI1suS5QCOjfxjQ2mRd058btXk8hVnaOJ4=",
"h1:3YG6vu/bFPcYOeLdSUZhiAWiWKaFlOAR34z2o8cbE9k=",
"h1:FvGy06/i9AMtVkSIUnCrXNv5xF6jqBqMH8oPVLyeeAg=",
"h1:GXH7nIF0ocMqebbA41+fSGIYfM+VAM/PvTe7fJr8UrQ=",
"h1:H0ll0ph4404vFE868W3qJ3zhOyy4jbXrOMtdkViEZsU=",
"h1:SX42e3k73IcFcrQlZ2e/Veqt2tvCMy6fwlo5yNUktCE=",
"h1:Uu/gjBc99GefdPdSrlBwU75DWU0ZcwGcrd3ZFyTeL0s=",
"h1:VZw0uN41PWRmNlhg7Ze0Eh7cdoklX1oZbfNAXNYnU1I=",
"h1:cMdV7ql6PsFa4qtb0EoZSctvTaTqV7yplBSDwcLRCLc=",
"h1:ePGvSurmlqOCkD761vkhRmz7bsK36/EnIvx2Xy8TdXo=",
"h1:fOYufF+1bzw2N3aHLpkLB6E8VbZ4ysXDODYQOlwhwd4=",
"h1:qe8RbnWq0T4xhqjn9QcbO6YW5YDx47P+eJ0NUMIfwCc=",
"h1:tRD2av6PafHDP/b9jDQsG5/aX+lHeKxpbIEHYYLBVUc=",
"h1:zyl6Gvx/CFpwYW8pFFDesfO8Lxv+a6CopyAsIMhp54s=",
"zh:04c0a49c2b23140b2f21cfd0d52f9798d70d3bdae3831613e156aabe519bbc6c",
"zh:185f21b4834ba63e8df1f84aa34639d8a7e126429a4007bb5f9ad82f2602a997",
"zh:234724f52cb4c0c3f7313d3b2697caef26d921d134f26ae14801e7afac522f7b",
"zh:38a56fcd1b3e40706af995611c977816543b53f1e55fe2720944aae2b6828fcb",
"zh:419938f5430fc78eff933470aefbf94a460a478f867cf7761a3dea177b4eb153",
"zh:4b46d92bfde1deab7de7ba1a6bbf4ba7c711e4fd925341ddf09d4cc28dae03d8",
"zh:537acd4a31c752f1bae305ba7190f60b71ad1a459f22d464f3f914336c9e919f",
"zh:5ff36b005aad07697dd0b30d4f0c35dbcdc30dc52b41722552060792fa87ce04",
"zh:635c5ee419daea098060f794d9d7d999275301181e49562c4e4c08f043076937",
"zh:859277c330d61f91abe9e799389467ca11b77131bf34bedbef52f8da68b2bb49",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8d4aa79f0a414bb4163d771063c70cd991c8fac6c766e685bac2ee12903c5bd6",
"zh:a67540c13565616a7e7e51ee9366e88b0dc60046e1d75c72680e150bd02725bb",
"zh:a936383a4767f5393f38f622e92bf2d0c03fe04b69c284951f27345766c7b31b",
"zh:d4887d73c466ff036eecf50ad6404ba38fd82ea4855296b1846d244b0f13c380",
"zh:e9093c8bd5b6cd99c81666e315197791781b8f93afa14fc2e0f732d1bb2a44b7",
"zh:efd3b3f1ec59a37f635aa1d4efcf178734c2fcf8ddb0d56ea690bec342da8672",
"zh:927dfdb8d9aef37ead03fceaa29e87ba076a3dd24e19b6cefdbb0efe9987ff8c",
"zh:bbf2226f07f6b1e721877328e69ded4b64f9c196634d2e2429e3cfabbe41e532",
"zh:daeed873d6f38604232b46ee4a5830c85d195b967f8dbcafe2fcffa98daf9c5f",
"zh:f8f2fc4646c1ba44085612fa7f4dbb7cbcead43b4e661f2b98ddfb4f68afc758",
]
}

View File

@@ -5,7 +5,7 @@ terraform {
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.52.0"
version = "4.48.0"
}
}
}

10
docker/.gitignore vendored
View File

@@ -1 +1,9 @@
.env
.DS_Store
node_modules
/build
/.svelte-kit
/dist
/package
.env
.env.*
!.env.example

1
docker/.npmrc Normal file
View File

@@ -0,0 +1 @@
engine-strict=true

1
docker/.nvmrc Normal file
View File

@@ -0,0 +1 @@
22.11.0

14
docker/.prettierignore Normal file
View File

@@ -0,0 +1,14 @@
.DS_Store
node_modules
/build
/package
/coverage
.env
.env.*
!.env.example
*.md
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

9
docker/.prettierrc Normal file
View File

@@ -0,0 +1,9 @@
{
"jsonRecursiveSort": true,
"organizeImportsSkipDestructiveCodeActions": true,
"plugins": ["prettier-plugin-organize-imports", "prettier-plugin-sort-json"],
"printWidth": 120,
"semi": true,
"singleQuote": true,
"trailingComma": "all"
}

View File

@@ -1,13 +1,4 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
# For development see:
# See:
# - https://immich.app/docs/developer/setup
# - https://immich.app/docs/developer/troubleshooting
@@ -25,7 +16,7 @@ services:
context: ../
dockerfile: server/Dockerfile
target: dev
restart: unless-stopped
restart: always
volumes:
- ../server:/usr/src/app
- ../open-api:/usr/src/open-api
@@ -80,7 +71,6 @@ services:
- ../web:/usr/src/app
- ../i18n:/usr/src/i18n
- ../open-api/:/usr/src/open-api/
# - ../../ui:/usr/ui
- /usr/src/app/node_modules
ulimits:
nofile:
@@ -95,12 +85,12 @@ services:
image: immich-machine-learning-dev:latest
# extends:
# file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build:
context: ../machine-learning
dockerfile: Dockerfile
args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports:
- 3003:3003
volumes:
@@ -116,13 +106,13 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
image: redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
healthcheck:
test: redis-cli ping || exit 1
database:
container_name: immich_postgres
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file:
- .env
environment:

View File

@@ -1,12 +1,3 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
name: immich-prod
services:
@@ -38,12 +29,12 @@ services:
image: immich-machine-learning:latest
# extends:
# file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build:
context: ../machine-learning
dockerfile: Dockerfile
args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports:
- 3003:3003
volumes:
@@ -56,14 +47,14 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
image: redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
healthcheck:
test: redis-cli ping || exit 1
restart: always
database:
container_name: immich_postgres
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file:
- .env
environment:
@@ -77,12 +68,22 @@ services:
- 5432:5432
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
restart: always
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
@@ -90,7 +91,7 @@ services:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:502ad90314c7485892ce696cb14a99fceab9fc27af29f4b427f41bd39701a199
image: prom/prometheus@sha256:565ee86501224ebbb98fc10b332fa54440b100469924003359edf49cbce374bd
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus
@@ -99,10 +100,10 @@ services:
# add data source for http://immich-prometheus:9090 to get started
immich-grafana:
container_name: immich_grafana
command: [ './run.sh', '-disable-reporting' ]
command: ['./run.sh', '-disable-reporting']
ports:
- 3000:3000
image: grafana/grafana:11.6.0-ubuntu@sha256:fd8fa48213c624e1a95122f1d93abbf1cf1cbe85fc73212c1e599dbd76c63ff8
image: grafana/grafana:11.4.0-ubuntu@sha256:afccec22ba0e4815cca1d2bf3836e414322390dc78d77f1851976ffa8d61051c
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -1,11 +1,10 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
# WARNING: Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
#
name: immich
@@ -33,12 +32,12 @@ services:
immich-machine-learning:
container_name: immich_machine_learning
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag.
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
# Example tag: ${IMMICH_VERSION:-release}-cuda
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
# file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
volumes:
- model-cache:/cache
env_file:
@@ -49,14 +48,14 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
image: docker.io/redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
healthcheck:
test: redis-cli ping || exit 1
restart: always
database:
container_name: immich_postgres
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
@@ -67,12 +66,22 @@ services:
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
restart: always
volumes:

86
docker/eslint.config.mjs Normal file
View File

@@ -0,0 +1,86 @@
import { FlatCompat } from '@eslint/eslintrc';
import js from '@eslint/js';
import typescriptEslint from '@typescript-eslint/eslint-plugin';
import tsParser from '@typescript-eslint/parser';
import globals from 'globals';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all,
});
export default [
{
ignores: [
'**/.DS_Store',
'**/node_modules',
'dist',
'lib/docker-compose/types.ts',
'build',
'package',
'**/.env',
'**/.env.*',
'!**/.env.example',
'**/pnpm-lock.yaml',
'**/package-lock.json',
'**/yarn.lock',
'eslint.config.mjs',
'vite.config.js',
'coverage',
],
},
...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/recommended', 'plugin:unicorn/recommended'),
{
ignores: ['src/**'],
plugins: {
'@typescript-eslint': typescriptEslint,
},
languageOptions: {
globals: {
...globals.browser,
...globals.node,
NodeJS: true,
},
parser: tsParser,
ecmaVersion: 2022,
sourceType: 'module',
parserOptions: {
tsconfigRootDir: __dirname,
project: ['./tsconfig.json'],
},
},
rules: {
'@typescript-eslint/no-unused-vars': [
'warn',
{
argsIgnorePattern: '^_$',
varsIgnorePattern: '^_$',
},
],
curly: 2,
'unicorn/no-useless-undefined': 'off',
'unicorn/prefer-spread': 'off',
'unicorn/no-null': 'off',
'unicorn/prevent-abbreviations': 'off',
'unicorn/no-nested-ternary': 'off',
'unicorn/consistent-function-scoping': 'off',
'unicorn/prefer-top-level-await': 'off',
'unicorn/import-style': 'off',
'@typescript-eslint/await-thenable': 'error',
'@typescript-eslint/no-floating-promises': 'error',
'@typescript-eslint/no-misused-promises': 'error',
'@typescript-eslint/require-await': 'error',
'object-shorthand': ['error', 'always'],
},
},
];

View File

@@ -2,8 +2,7 @@
# The location where your uploaded files are stored
UPLOAD_LOCATION=./library
# The location where your database files are stored. Network shares are not supported for the database
# The location where your database files are stored
DB_DATA_LOCATION=./postgres
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List

View File

@@ -13,13 +13,6 @@ services:
volumes:
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
rknn:
security_opt:
- systempaths=unconfined
- apparmor=unconfined
devices:
- /dev/dri:/dev/dri
cpu: {}
@@ -33,13 +26,6 @@ services:
capabilities:
- gpu
rocm:
group_add:
- video
devices:
- /dev/dri:/dev/dri
- /dev/kfd:/dev/kfd
openvino:
device_cgroup_rules:
- 'c 189:* rmw'

View File

@@ -48,7 +48,6 @@ services:
vaapi-wsl: # use this for VAAPI if you're running Immich in WSL2
devices:
- /dev/dri:/dev/dri
- /dev/dxg:/dev/dxg
volumes:
- /usr/lib/wsl:/usr/lib/wsl
environment:

87
docker/lib/build.ts Normal file
View File

@@ -0,0 +1,87 @@
import { dump as dumpYaml } from 'js-yaml';
import { ComposeBuilder, ServiceBuilder } from 'lib/docker-compose/builder';
import { ContainerName, GeneratorOptions, ServiceName } from 'lib/types';
import { asQueryParams, getImmichEnvironment, getImmichVolumes, isExternalPostgres, isIoRedis } from 'lib/utils';
const RELEASE_VERSION = 'v1.122.0';
const postgresHealthCheck = [
'pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;',
`Chksum="$$(psql --dbname="$\${POSTGRES_DB}" --username="$\${POSTGRES_USER}" --tuples-only --no-align`,
`--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";`,
'echo "checksum failure count is $$Chksum";',
`[ "$$Chksum" = '0' ] || exit 1\n`,
].join(' ');
const postgresCommand = [
`postgres`,
`-c shared_preload_libraries=vectors.so`,
`-c 'search_path="$$user", public, vectors'`,
`-c logging_collector=on`,
`-c max_wal_size=2GB`,
`-c shared_buffers=512MB`,
`-c wal_compression=on`,
].join(' ');
const build = (options: GeneratorOptions) => {
const healthchecksEnabled = options.healthchecks ?? true;
const containerNames = options.containerNames ?? true;
const immichService = ServiceBuilder.create(ServiceName.ImmichServer)
.setImage(`ghcr.io/immich-app/immich-server:${RELEASE_VERSION}`)
.setContainerName(containerNames && ContainerName.ImmichServer)
.setRestartPolicy('always')
.setHealthcheck(healthchecksEnabled)
.setEnvironment(getImmichEnvironment(options))
.addExposedPort(2283)
.addVolumes(getImmichVolumes(options));
const machineLearningEnabled = options.machineLearning;
const modelCacheVolume = 'model-cache';
const machineLearningService =
machineLearningEnabled &&
ServiceBuilder.create(ServiceName.ImmichMachineLearning)
.setImage(`ghcr.io/immich-app/immich-machine-learning:${RELEASE_VERSION}-cuda`)
.setContainerName(containerNames && ContainerName.ImmichMachineLearning)
.setRestartPolicy('always')
.setHealthcheck(healthchecksEnabled)
.addVolume(`${modelCacheVolume}:/cache`);
const redisService = isIoRedis(options)
? false
: ServiceBuilder.create(ServiceName.Redis)
.setImage('docker.io/redis:6.2-alpine')
.setContainerName(containerNames && ContainerName.Redis)
.setRestartPolicy('always')
.setHealthcheck(healthchecksEnabled && 'redis-cli ping || exit 1');
const postgresService = isExternalPostgres(options)
? false
: ServiceBuilder.create(ServiceName.Postgres)
.setImage('docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0')
.setContainerName(containerNames && ContainerName.Postgres)
.setRestartPolicy('always')
.setEnvironment({
POSTGRES_PASSWORD: options.postgresPassword,
POSTGRES_USER: options.postgresUser,
POSTGRES_DB: options.postgresDatabase,
POSTGRES_INITDB_ARGS: '--data-checksums',
})
.setHealthcheck(healthchecksEnabled && postgresHealthCheck)
.setCommand(postgresCommand)
.addVolume(`${options.postgresDataLocation}:/var/lib/postgresql/data`);
const domain = 'https://get.immich.app';
const url = `${domain}/compose?${asQueryParams(options)}`;
return ComposeBuilder.create('immich')
.addComment(`This docker compose file was originally generated at https://get.immich.app/compose`)
.addComment(url)
.addComment(`${dumpYaml({ options }, { indent: 2 })}`)
.addService(immichService.addDependsOn(redisService).addDependsOn(postgresService))
.addService(machineLearningService)
.addService(redisService)
.addService(postgresService)
.addVolume(modelCacheVolume, machineLearningEnabled && {});
};
export const buildSpec = (options: GeneratorOptions) => build(options).asSpec();
export const buildYaml = (options: GeneratorOptions) => build(options).asYaml();

View File

@@ -0,0 +1,208 @@
import { dump as dumpYaml } from 'js-yaml';
import {
Command,
ComposeSpecification,
DefinitionsService,
DefinitionsVolume,
ListOfStrings,
} from 'lib/docker-compose/types';
type ServiceNameAccessor = { getName: () => string };
type ServiceBuildAccessor = { build: () => DefinitionsService };
const withNewLines = (yaml: string) =>
yaml.replaceAll(/(?<leading>[^:]\n)(?<key>[ ]{0,2}\S+:)$/gm, '$<leading>\n$<key>');
export class ComposeBuilder {
private spec: ComposeSpecification = {};
private comments: string[] = [];
private constructor(projectName?: string) {
if (projectName) {
this.setProjectName(projectName);
}
}
static create(projectName?: string) {
return new ComposeBuilder(projectName);
}
setProjectName(name: string) {
this.spec.name = name;
return this;
}
addComment(comment: string) {
this.comments.push(comment + '\n');
return this;
}
addService(spec: false | (ServiceNameAccessor & ServiceBuildAccessor)) {
if (!spec) {
return this;
}
if (!this.spec.services) {
this.spec.services = {};
}
this.spec.services[spec.getName()] = spec.build();
return this;
}
addVolume(name: string, volume: false | DefinitionsVolume) {
if (volume === false) {
return this;
}
if (!this.spec.volumes) {
this.spec.volumes = {};
}
this.spec.volumes[name] = volume;
return this;
}
asSpec() {
return this.spec;
}
asYaml() {
let prefix = '';
if (this.comments.length > 0) {
const comments =
this.comments
.flatMap((comment) => comment.split('\n'))
.join('\n')
.trim()
.split('\n')
.map((comment) => `# ${comment}`)
.join('\n') + '\n\n';
prefix += comments;
}
const spec = withNewLines(dumpYaml(this.spec, { indent: 2, lineWidth: 140 })).trim();
return prefix + spec;
}
}
export class ServiceBuilder {
private spec: DefinitionsService = {};
private constructor(private name: string) {}
static create(name: string) {
return new ServiceBuilder(name);
}
getName() {
return this.name;
}
setImage(image: string) {
this.spec.image = image;
return this;
}
setContainerName(name: false | string) {
if (name === false) {
return this;
}
this.spec.container_name = name;
return this;
}
addExposedPort(port: number | { internal: number; external: number }) {
if (typeof port === 'number') {
port = { internal: port, external: port };
}
const { internal, external } = port;
if (!this.spec.ports) {
this.spec.ports = [];
}
this.spec.ports.push(`${external}:${internal}`);
return this;
}
addDependsOn(service: false | string | ServiceNameAccessor) {
if (service === false) {
return this;
}
let serviceName = service as string;
if ('getName' in (service as ServiceNameAccessor)) {
serviceName = (service as ServiceNameAccessor).getName();
}
if (!this.spec.depends_on) {
this.spec.depends_on = [];
}
(this.spec.depends_on as ListOfStrings).push(serviceName);
return this;
}
setRestartPolicy(restart: string) {
this.spec.restart = restart;
return this;
}
setEnvironment(env: Record<string, string | number | undefined>) {
this.spec.environment = env;
return this;
}
setHealthcheck(test: boolean | string) {
if (test === true) {
return this;
}
if (test === false) {
this.spec.healthcheck = { disable: true };
return this;
}
this.spec.healthcheck = { test };
return this;
}
setCommand(command: Command) {
this.spec.command = command;
return this;
}
addVolume(volume: string) {
if (!this.spec.volumes) {
this.spec.volumes = [];
}
this.spec.volumes.push(volume);
return this;
}
addVolumes(volumes: string[]) {
for (const volume of volumes) {
this.addVolume(volume);
}
return this;
}
build() {
return this.spec;
}
}

View File

@@ -0,0 +1,937 @@
export type DefinitionsInclude =
| string
| {
path?: StringOrList;
env_file?: StringOrList;
project_directory?: string;
};
export type StringOrList = string | ListOfStrings;
export type ListOfStrings = string[];
export type DefinitionsDevelopment = {
watch?: {
ignore?: string[];
path: string;
action: 'rebuild' | 'sync' | 'sync+restart';
target?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Development;
export type Development = {
watch?: {
ignore?: string[];
path: string;
action: 'rebuild' | 'sync' | 'sync+restart';
target?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
export type DefinitionsDeployment = {
mode?: string;
endpoint_mode?: string;
replicas?: number | string;
labels?: ListOrDict;
rollback_config?: {
parallelism?: number | string;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number | string;
order?: 'start-first' | 'stop-first';
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
update_config?: {
parallelism?: number | string;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number | string;
order?: 'start-first' | 'stop-first';
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
resources?: {
limits?: {
cpus?: number | string;
memory?: string;
pids?: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
reservations?: {
cpus?: number | string;
memory?: string;
generic_resources?: DefinitionsGenericResources;
devices?: DefinitionsDevices;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
restart_policy?: {
condition?: string;
delay?: string;
max_attempts?: number | string;
window?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
placement?: {
constraints?: string[];
preferences?: {
spread?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
max_replicas_per_node?: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Deployment;
export type ListOrDict =
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` ".+".
*/
[k: string]: undefined | string | number | boolean | null;
}
| string[];
export type DefinitionsGenericResources = {
discrete_resource_spec?: {
kind?: string;
value?: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
export type DefinitionsDevices = {
capabilities: ListOfStrings;
count?: string | number;
device_ids?: ListOfStrings;
driver?: string;
options?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
export type Deployment = {
mode?: string;
endpoint_mode?: string;
replicas?: number | string;
labels?: ListOrDict;
rollback_config?: {
parallelism?: number | string;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number | string;
order?: 'start-first' | 'stop-first';
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
update_config?: {
parallelism?: number | string;
delay?: string;
failure_action?: string;
monitor?: string;
max_failure_ratio?: number | string;
order?: 'start-first' | 'stop-first';
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
resources?: {
limits?: {
cpus?: number | string;
memory?: string;
pids?: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
reservations?: {
cpus?: number | string;
memory?: string;
generic_resources?: DefinitionsGenericResources;
devices?: DefinitionsDevices;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
restart_policy?: {
condition?: string;
delay?: string;
max_attempts?: number | string;
window?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
placement?: {
constraints?: string[];
preferences?: {
spread?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
max_replicas_per_node?: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
export type ExtraHosts = {} | string[];
export type ServiceConfigOrSecret = (
| string
| {
source?: string;
target?: string;
uid?: string;
gid?: string;
mode?: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
export type Command = null | string | string[];
export type EnvFile =
| string
| (
| string
| {
path: string;
format?: string;
required?: boolean | string;
}
)[];
/**
* This interface was referenced by `PropertiesNetworks`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export type DefinitionsNetwork = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
ipam?: {
driver?: string;
config?: {
subnet?: string;
ip_range?: string;
gateway?: string;
aux_addresses?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
external?:
| boolean
| string
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
internal?: boolean | string;
enable_ipv6?: boolean | string;
attachable?: boolean | string;
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Network;
export type Network = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
ipam?: {
driver?: string;
config?: {
subnet?: string;
ip_range?: string;
gateway?: string;
aux_addresses?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}[];
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
external?:
| boolean
| string
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
internal?: boolean | string;
enable_ipv6?: boolean | string;
attachable?: boolean | string;
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
/**
* This interface was referenced by `PropertiesVolumes`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export type DefinitionsVolume = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
external?:
| boolean
| string
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} & Volume;
export type Volume = {
name?: string;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
external?:
| boolean
| string
| {
name?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
labels?: ListOrDict;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
/**
* The Compose file is a YAML file defining a multi-containers based application.
*/
export interface ComposeSpecification {
/**
* declared for backward compatibility, ignored.
*/
version?: string;
/**
* define the Compose project name, until user defines one explicitly.
*/
name?: string;
/**
* compose sub-projects to be included.
*/
include?: DefinitionsInclude[];
services?: PropertiesServices;
networks?: PropertiesNetworks;
volumes?: PropertiesVolumes;
secrets?: PropertiesSecrets;
configs?: PropertiesConfigs;
/**
* This interface was referenced by `ComposeSpecification`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesServices {
[k: string]: DefinitionsService;
}
/**
* This interface was referenced by `PropertiesServices`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsService {
develop?: DefinitionsDevelopment;
deploy?: DefinitionsDeployment;
annotations?: ListOrDict;
attach?: boolean | string;
build?:
| string
| {
context?: string;
dockerfile?: string;
dockerfile_inline?: string;
entitlements?: string[];
args?: ListOrDict;
ssh?: ListOrDict;
labels?: ListOrDict;
cache_from?: string[];
cache_to?: string[];
no_cache?: boolean | string;
additional_contexts?: ListOrDict;
network?: string;
pull?: boolean | string;
target?: string;
shm_size?: number | string;
extra_hosts?: ExtraHosts;
isolation?: string;
privileged?: boolean | string;
secrets?: ServiceConfigOrSecret;
tags?: string[];
ulimits?: Ulimits;
platforms?: string[];
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
blkio_config?: {
device_read_bps?: BlkioLimit[];
device_read_iops?: BlkioLimit[];
device_write_bps?: BlkioLimit[];
device_write_iops?: BlkioLimit[];
weight?: number | string;
weight_device?: BlkioWeight[];
};
cap_add?: string[];
cap_drop?: string[];
cgroup?: 'host' | 'private';
cgroup_parent?: string;
command?: Command;
configs?: ServiceConfigOrSecret;
container_name?: string;
cpu_count?: string | number;
cpu_percent?: string | number;
cpu_shares?: number | string;
cpu_quota?: number | string;
cpu_period?: number | string;
cpu_rt_period?: number | string;
cpu_rt_runtime?: number | string;
cpus?: number | string;
cpuset?: string;
credential_spec?: {
config?: string;
file?: string;
registry?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
depends_on?:
| ListOfStrings
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
[k: string]: {
restart?: boolean | string;
required?: boolean;
condition: 'service_started' | 'service_healthy' | 'service_completed_successfully';
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
};
device_cgroup_rules?: ListOfStrings;
devices?: (
| string
| {
source: string;
target?: string;
permissions?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
dns?: StringOrList;
dns_opt?: string[];
dns_search?: StringOrList;
domainname?: string;
entrypoint?: Command;
env_file?: EnvFile;
environment?: ListOrDict;
expose?: (string | number)[];
extends?:
| string
| {
service: string;
file?: string;
};
external_links?: string[];
extra_hosts?: ExtraHosts;
group_add?: (string | number)[];
healthcheck?: DefinitionsHealthcheck;
hostname?: string;
image?: string;
init?: boolean | string;
ipc?: string;
isolation?: string;
labels?: ListOrDict;
links?: string[];
logging?: {
driver?: string;
options?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number | null;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
mac_address?: string;
mem_limit?: number | string;
mem_reservation?: string | number;
mem_swappiness?: number | string;
memswap_limit?: number | string;
network_mode?: string;
networks?:
| ListOfStrings
| {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
[k: string]: {
aliases?: ListOfStrings;
ipv4_address?: string;
ipv6_address?: string;
link_local_ips?: ListOfStrings;
mac_address?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
priority?: number;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
} | null;
};
oom_kill_disable?: boolean | string;
oom_score_adj?: string | number;
pid?: string | null;
pids_limit?: number | string;
platform?: string;
ports?: (
| number
| string
| {
name?: string;
mode?: string;
host_ip?: string;
target?: number | string;
published?: string | number;
protocol?: string;
app_protocol?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
post_start?: DefinitionsServiceHook[];
pre_stop?: DefinitionsServiceHook[];
privileged?: boolean | string;
profiles?: ListOfStrings;
pull_policy?: 'always' | 'never' | 'if_not_present' | 'build' | 'missing';
read_only?: boolean | string;
restart?: string;
runtime?: string;
scale?: number | string;
security_opt?: string[];
shm_size?: number | string;
secrets?: ServiceConfigOrSecret;
sysctls?: ListOrDict;
stdin_open?: boolean | string;
stop_grace_period?: string;
stop_signal?: string;
storage_opt?: {
[k: string]: unknown;
};
tmpfs?: StringOrList;
tty?: boolean | string;
ulimits?: Ulimits;
user?: string;
uts?: string;
userns_mode?: string;
volumes?: (
| string
| {
type: string;
source?: string;
target?: string;
read_only?: boolean | string;
consistency?: string;
bind?: {
propagation?: string;
create_host_path?: boolean | string;
selinux?: 'z' | 'Z';
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
volume?: {
nocopy?: boolean | string;
subpath?: string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
tmpfs?: {
size?: number | string;
mode?: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
)[];
volumes_from?: string[];
working_dir?: string;
/**
* This interface was referenced by `DefinitionsService`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface Ulimits {
/**
* This interface was referenced by `Ulimits`'s JSON-Schema definition
* via the `patternProperty` "^[a-z]+$".
*/
[k: string]:
| (number | string)
| {
hard: number | string;
soft: number | string;
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
};
}
export interface BlkioLimit {
path?: string;
rate?: number | string;
}
export interface BlkioWeight {
path?: string;
weight?: number | string;
}
export interface DefinitionsHealthcheck {
disable?: boolean | string;
interval?: string;
retries?: number | string;
test?: string | string[];
timeout?: string;
start_period?: string;
start_interval?: string;
/**
* This interface was referenced by `DefinitionsHealthcheck`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface DefinitionsServiceHook {
command?: Command;
user?: string;
privileged?: boolean | string;
working_dir?: string;
environment?: ListOrDict;
/**
* This interface was referenced by `DefinitionsServiceHook`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesNetworks {
[k: string]: DefinitionsNetwork;
}
export interface PropertiesVolumes {
[k: string]: DefinitionsVolume;
}
export interface PropertiesSecrets {
[k: string]: DefinitionsSecret;
}
/**
* This interface was referenced by `PropertiesSecrets`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsSecret {
name?: string;
environment?: string;
file?: string;
external?:
| boolean
| string
| {
name?: string;
[k: string]: unknown;
};
labels?: ListOrDict;
driver?: string;
driver_opts?: {
/**
* This interface was referenced by `undefined`'s JSON-Schema definition
* via the `patternProperty` "^.+$".
*/
[k: string]: string | number;
};
template_driver?: string;
/**
* This interface was referenced by `DefinitionsSecret`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}
export interface PropertiesConfigs {
[k: string]: DefinitionsConfig;
}
/**
* This interface was referenced by `PropertiesConfigs`'s JSON-Schema definition
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
*/
export interface DefinitionsConfig {
name?: string;
content?: string;
environment?: string;
file?: string;
external?:
| boolean
| string
| {
name?: string;
[k: string]: unknown;
};
labels?: ListOrDict;
template_driver?: string;
/**
* This interface was referenced by `DefinitionsConfig`'s JSON-Schema definition
* via the `patternProperty` "^x-".
*/
[k: string]: unknown;
}

2
docker/lib/index.ts Normal file
View File

@@ -0,0 +1,2 @@
export * from 'lib/build';
export * from 'lib/types';

57
docker/lib/types.ts Normal file
View File

@@ -0,0 +1,57 @@
export enum ServiceName {
ImmichServer = 'immich-server',
ImmichMachineLearning = 'immich-machine-learning',
Postgres = 'immich-postgres',
Redis = 'immich-redis',
}
export enum ContainerName {
ImmichServer = 'immich-server',
ImmichMachineLearning = 'immich-machine-learning',
Postgres = 'immich-postgres',
Redis = 'immich-redis',
}
export type BaseOptions = {
releaseVersion: string;
healthchecks?: boolean;
machineLearning: boolean;
containerNames?: boolean;
serverTimeZone?: string;
};
export type GeneratorOptions = (BaseOptions & FolderOptions & PostgresOptions) & RedisOptions;
export type FolderOptions = {
baseLocation: string;
encodedVideoLocation?: string;
libraryLocation?: string;
uploadLocation?: string;
profileLocation?: string;
thumbnailsLocation?: string;
backupsLocation?: string;
};
export type PostgresOptions = InternalPostgresOptions | ExternalPostgresOptions;
export type InternalPostgresOptions = {
postgresUser: string;
postgresPassword: string;
postgresDatabase: string;
postgresDataLocation: string;
};
export type ExternalPostgresOptions = { postgresUrl: string; postgresVectorExtension?: VectorExtension };
export type RedisOptions = ExternalRedisOptions | IoRedisOptions | { redis: true };
export type ExternalRedisOptions = {
redisHost: string;
redisPort: number;
redisDbIndex?: number;
redisUsername?: string;
redisPassword?: string;
redisSocket?: string;
};
export type IoRedisOptions = { redisUrl: string };
export type VectorExtension = 'pgvector' | 'pgvecto.rs';
export type HardwareAccelerationPlatform = 'nvenc' | 'quicksync' | 'rkmpp' | 'vappi' | 'vaapi-wsl';

84
docker/lib/utils.ts Normal file
View File

@@ -0,0 +1,84 @@
import {
ExternalPostgresOptions,
ExternalRedisOptions,
GeneratorOptions,
IoRedisOptions,
PostgresOptions,
RedisOptions,
ServiceName,
} from 'lib/types';
export const isExternalPostgres = (options: PostgresOptions): options is ExternalPostgresOptions =>
'postgresUrl' in options;
export const isIoRedis = (options: RedisOptions): options is IoRedisOptions => 'redisUrl' in options;
export const isExternalRedis = (options: RedisOptions): options is ExternalRedisOptions => 'redisHost' in options;
export const asQueryParams = (values: Record<string, string | number | boolean | undefined>) => {
return new URLSearchParams(
Object.entries(values)
.filter(Boolean)
.map(([key, value]) => [key, String(value)]),
).toString();
};
export const getImmichVolumes = (options: GeneratorOptions) => {
const {
baseLocation,
encodedVideoLocation,
uploadLocation,
backupsLocation,
profileLocation,
libraryLocation,
thumbnailsLocation,
} = options;
const internalBaseLocation = '/usr/src/app/upload';
const volumes = [`${baseLocation}:${internalBaseLocation}`];
for (const { override, folder } of [
{ override: encodedVideoLocation, folder: 'encoded-video' },
{ override: libraryLocation, folder: 'library' },
{ override: uploadLocation, folder: 'upload' },
{ override: profileLocation, folder: 'profile' },
{ override: thumbnailsLocation, folder: 'thumbs' },
{ override: backupsLocation, folder: 'backups' },
]) {
if (override) {
volumes.push(`${override}:${internalBaseLocation}/${folder}`);
}
}
volumes.push(`/etc/localtime:/etc/localtime:ro`);
return volumes;
};
export const getImmichEnvironment = (options: GeneratorOptions) => {
const env: Record<string, string | number | undefined> = {};
if (isExternalPostgres(options)) {
env.DB_URL = options.postgresUrl;
env.DB_VECTOR_EXTENSION = options.postgresVectorExtension;
} else {
const { postgresUser, postgresPassword, postgresDatabase } = options;
env.DB_URL = `postgres://${postgresUser}:${postgresPassword}@${ServiceName.Postgres}:5432/${postgresDatabase}`;
}
if (isIoRedis(options)) {
env.REDIS_URL = options.redisUrl;
} else if (isExternalRedis(options)) {
env.REDIS_HOSTNAME = options.redisHost;
env.REDIS_PORT = options.redisPort;
env.REDIS_DBINDEX = options.redisDbIndex;
env.REDIS_USERNAME = options.redisUsername;
env.REDIS_PASSWORD = options.redisPassword;
env.REDIS_SOCKET = options.redisSocket;
} else {
env.REDIS_HOSTNAME = ServiceName.Redis;
}
env.TZ = options.serverTimeZone;
return env;
};

4555
docker/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

38
docker/package.json Normal file
View File

@@ -0,0 +1,38 @@
{
"name": "immich-docker",
"version": "0.0.0",
"description": "A docker-compose generator for Immich",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "vite build",
"generate": "npx tsx src/index.ts",
"lint": "eslint . --max-warnings 0",
"lint:fix": "npm run lint -- --fix",
"format": "prettier --check .",
"format:fix": "prettier --write ."
},
"type": "module",
"exports": "./dist/immich-docker.js",
"author": "team@immich.app",
"private": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
"js-yaml": "^4.1.0"
},
"devDependencies": {
"@eslint/eslintrc": "^3.2.0",
"@eslint/js": "^9.16.0",
"@types/js-yaml": "^4.0.9",
"@types/node": "^22.10.2",
"@typescript-eslint/eslint-plugin": "^8.18.0",
"@typescript-eslint/parser": "^8.18.0",
"eslint-plugin-unicorn": "^56.0.1",
"globals": "^15.13.0",
"json-schema-to-ts": "^3.1.1",
"prettier-plugin-organize-imports": "^4.1.0",
"prettier-plugin-sort-json": "^4.0.0",
"vite": "^6.0.3",
"vitest": "^2.1.8"
}
}

View File

@@ -1,5 +1,5 @@
global:
scrape_interval: 15s
scrape_interval: 15s
evaluation_interval: 15s
scrape_configs:

60
docker/src/index.ts Normal file
View File

@@ -0,0 +1,60 @@
import { mkdirSync, writeFileSync } from 'node:fs';
import { buildYaml } from '../lib/index';
import { GeneratorOptions } from '../lib/types';
const main = () => {
const commonOptions = {
releaseVersion: 'v1.122.0',
baseLocation: '/home/immich/library',
serverTimeZone: 'America/New_York',
healthchecks: true,
machineLearning: true,
containerNames: true,
// hardwareAcceleration: 'nvenc',
};
const postgresOptions = {
postgresUser: 'postgres',
postgresPassword: 'postgres',
postgresDatabase: 'immich',
postgresDataLocation: '/home/immich/database',
};
const defaultOptions: GeneratorOptions = { ...commonOptions, ...postgresOptions, redis: true };
const samples: Array<{ name: string; options: GeneratorOptions }> = [
{ name: 'defaults', options: defaultOptions },
{ name: 'no-names', options: { ...defaultOptions, containerNames: false } },
{ name: 'no-healthchecks', options: { ...defaultOptions, healthchecks: false } },
{ name: 'external-ioredis', options: { ...defaultOptions, redisUrl: 'ioredis://<base64>' } },
{ name: 'external-redis', options: { ...defaultOptions, redisHost: '192.168.0.5', redisPort: 1234 } },
{
name: 'external-postgres',
options: {
...defaultOptions,
postgresUrl: 'postgres://immich:immich@localhost:5432/immich',
postgresVectorExtension: 'pgvector',
},
},
{
name: 'split-storage',
options: {
...defaultOptions,
thumbnailsLocation: '/home/fast/thumbs',
encodedVideoLocation: '/home/fast/encoded-videos',
},
},
];
// TODO replace with vitest test files/scripts
mkdirSync('./examples', { recursive: true });
for (const { name, options } of samples) {
const spec = buildYaml(options);
const filename = `./examples/docker-compose.${name}.yaml`;
writeFileSync(filename, spec);
console.log(`Wrote ${filename}`);
}
};
main();

19
docker/tsconfig.json Normal file
View File

@@ -0,0 +1,19 @@
{
"compilerOptions": {
"allowJs": true,
"baseUrl": "./",
"checkJs": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"module": "es2020",
"moduleResolution": "bundler",
"outDir": "./dist",
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"strict": true,
"target": "es2022",
"types": []
},
"include": ["lib"]
}

25
docker/tsconfig.src.json Normal file
View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"allowSyntheticDefaultImports": true,
"baseUrl": "./",
"declaration": true,
"emitDecoratorMetadata": true,
"esModuleInterop": true,
"experimentalDecorators": true,
"forceConsistentCasingInFileNames": true,
"incremental": true,
"jsx": "react",
"lib": ["dom", "es2023"],
"module": "node16",
"moduleResolution": "node16",
"outDir": "./dist",
"preserveWatchOutput": true,
"removeComments": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": true,
"strict": true,
"target": "es2022"
},
"include": ["src", "lib"]
}

20
docker/vite.config.js Normal file
View File

@@ -0,0 +1,20 @@
import { resolve } from 'node:path';
import { defineConfig } from 'vite';
export default defineConfig({
resolve: {
alias: {
lib: resolve('lib'),
src: resolve('src'),
test: resolve('test'),
},
},
build: {
lib: {
entry: resolve(__dirname, 'lib/index.ts'),
name: 'immich-docker',
// the proper extensions will be added
fileName: 'immich-docker',
},
},
});

View File

@@ -1 +1 @@
22.14.0
22.12.0

View File

@@ -49,22 +49,13 @@ The behaviors differ based on your device manufacturer and operating system, but
On iOS (iPhone and iPad), the operating system determines if a particular app can invoke background tasks based on multiple factors, most of which the Immich app has no control over. To increase the likelihood that the background backup task is run, follow the steps below:
- Enable Background App Refresh for Immich in the iOS settings at `Settings > General > Background App Refresh`.
- Disable **Low Power Mode** when not needed, as this can prevent apps from running in the background.
- Disable Background App Refresh for apps that don't need background tasks to run. This will reduce the competition for background task invocation for Immich.
- Use the Immich app more often.
### Why are features in the mobile app not working with a self-signed certificate, Basic Auth, custom headers, or mutual TLS?
### Why are features not working with a self-signed cert or mTLS?
These network features are experimental. They often do not work with video playback, asset upload or download, and other features.
Many of these limitations are tracked in [#15230](https://github.com/immich-app/immich/issues/15230).
Instead of these experimental features, we recommend using the URL switching feature, a VPN, or a [free trusted SSL certificate](https://letsencrypt.org/) for your domain.
We are not actively developing these features and will not be able to provide support, but welcome contributions to improve them.
Please discuss any large PRs with our dev team to ensure your time is not wasted.
### Why isn't the mobile app updated yet?
The app stores can take a few days to approve new builds of the app. If you're impatient, android APKs can be downloaded from the GitHub releases.
Due to limitations in the upstream app/video library, using a self-signed TLS certificate or mutual TLS may break video playback or asset upload (both foreground and/or background).
We recommend using a real SSL certificate from a free provider, for example [Let's Encrypt](https://letsencrypt.org/).
---
@@ -97,7 +88,7 @@ Make sure to [set your reverse proxy](/docs/administration/reverse-proxy/) to al
Also, check the disk space of your reverse proxy.
In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails.
If you are using Cloudflare Tunnel, please know that they set a maximum filesize of 100 MB that cannot be changed.
If you are using Cloudflare Tunnel, please know that they set a maxiumum filesize of 100 MB that cannot be changed.
At times, files larger than this may work, potentially up to 1 GB. However, the official limit is 100 MB.
If you are having issues, we recommend switching to a different network deployment.
@@ -117,7 +108,7 @@ See [Backup and Restore](/docs/administration/backup-and-restore.md).
### Does Immich support reading existing face tag metadata?
Yes, it creates new faces and persons from the imported asset metadata. For details see the [feature request #4348](https://github.com/immich-app/immich/discussions/4348) and [PR #6455](https://github.com/immich-app/immich/pull/6455).
No, it currently does not. There is an [open feature request on GitHub](https://github.com/immich-app/immich/discussions/4348).
### Does Immich support the filtering of NSFW images?
@@ -164,35 +155,6 @@ For example, say you have existing transcodes with the policy "Videos higher tha
No. Our design principle is that the original assets should always be untouched.
### How can I mount a CIFS/Samba volume within Docker?
If you aren't able to or prefer not to mount Samba on the host (such as Windows environment), you can mount the volume within Docker.
Below is an example in the `docker-compose.yml`.
Change your username, password, local IP, and share name, and see below where the line `- originals:/usr/src/app/originals`,
correlates to the section where the volume `originals` was created. You can call this whatever you like, and map it to the docker container as you like.
For example you could change `originals:` to `Photos:`, and change `- originals:/usr/src/app/originals` to `Photos:/usr/src/app/photos`.
```diff
...
services:
immich-server:
...
volumes:
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
+ - originals:/usr/src/app/originals
...
volumes:
model-cache:
+ originals:
+ driver_opts:
+ type: cifs
+ o: 'iocharset=utf8,username=USERNAMEHERE,password=PASSWORDHERE,rw' # change to `ro` if read only desired
+ device: '//localipaddress/sharename'
```
---
## Albums
@@ -240,7 +202,7 @@ Immich uses CLIP models. An ML model converts each image to an "embedding", whic
### How does facial recognition work?
See [How Facial Recognition Works](/docs/features/facial-recognition#how-facial-recognition-works) for details.
See [How Facial Recognition Works](/docs/features/facial-recognition#How-Facial-Recognition-Works) for details.
### How can I disable machine learning?
@@ -262,7 +224,7 @@ No, this is not supported. Only models listed in the [Hugging Face][huggingface]
### I want to be able to search in other languages besides English. How can I do that?
You can change to a multilingual CLIP model. See [here](/docs/features/searching#clip-models) for instructions.
You can change to a multilingual CLIP model. See [here](/docs/features/smart-search#CLIP-model) for instructions.
### Does Immich support Facial Recognition for videos?
@@ -273,7 +235,7 @@ Scanning the entire video for faces may be implemented in the future.
No.
:::tip
You can use [Smart Search](/docs/features/searching.md) for this to some extent. For example, if you have a Golden Retriever and a Chihuahua, type these words in the smart search and watch the results.
You can use [Smart Search](/docs/features/smart-search.md) for this to some extent. For example, if you have a Golden Retriever and a Chihuahua, type these words in the smart search and watch the results.
:::
### I'm getting a lot of "faces" that aren't faces, what can I do?
@@ -315,7 +277,7 @@ The initial backup is the most intensive due to the number of jobs running. The
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
- At the container level, you can [set resource constraints](/docs/FAQ#can-i-limit-cpu-and-ram-usage) to lower usage further.
- It's recommended to only apply these constraints _after_ taking some of the measures here for best performance.
- If these changes are not enough, see [above](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
### Can I limit CPU and RAM usage?
@@ -458,7 +420,7 @@ A result of `on` means that checksums are enabled.
<summary>Check if checksums are enabled</summary>
```bash
docker exec -it immich_postgres psql --dbname=postgres --username=<DB_USERNAME> --command="show data_checksums"
docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="show data_checksums"
data_checksums
----------------
on
@@ -473,7 +435,7 @@ If checksums are enabled, you can check the status of the database with the foll
<summary>Check for database corruption</summary>
```bash
docker exec -it immich_postgres psql --dbname=postgres --username=<DB_USERNAME> --command="SELECT datname, checksum_failures, checksum_last_failure FROM pg_stat_database WHERE datname IS NOT NULL"
docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="SELECT datname, checksum_failures, checksum_last_failure FROM pg_stat_database WHERE datname IS NOT NULL"
datname | checksum_failures | checksum_last_failure
-----------+-------------------+-----------------------
postgres | 0 |
@@ -485,24 +447,6 @@ docker exec -it immich_postgres psql --dbname=postgres --username=<DB_USERNAME>
</details>
You can also scan the Postgres database file structure for errors:
<details>
<summary>Scan for file structure errors</summary>
```bash
docker exec -it immich_postgres pg_amcheck --username=postgres --heapallindexed --parent-check --rootdescend --progress --all --install-missing
```
A normal result will end something like this and return with an exit code of `0`:
```bash
7470/8832 relations (84%), 730829/734735 pages (99%)
8425/8832 relations (95%), 734367/734735 pages (99%)
8832/8832 relations (100%), 734735/734735 pages (100%)
```
</details>
If corruption is detected, you should immediately make a backup before performing any other work in the database.
To do so, you may need to set the `zero_damaged_pages=on` flag for the database server to allow `pg_dumpall` to succeed.
After taking a backup, the recommended next step is to restore the database from a healthy backup before corruption was detected.

View File

@@ -5,10 +5,6 @@ import TabItem from '@theme/TabItem';
A [3-2-1 backup strategy](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) is recommended to protect your data. You should keep copies of your uploaded photos/videos as well as the Immich database for a comprehensive backup solution. This page provides an overview on how to backup the database and the location of user-uploaded pictures and videos. A template bash script that can be run as a cron job is provided [here](/docs/guides/template-backup-script.md)
:::danger
The instructions on this page show you how to prepare your Immich instance to be backed up, and which files to take a backup of. You still need to take care of using an actual backup tool to make a backup yourself.
:::
## Database
:::caution
@@ -23,32 +19,15 @@ Refer to the official [postgres documentation](https://www.postgresql.org/docs/c
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
:::
### Automatic Database Dumps
### Automatic Database Backups
:::warning
The automatic database dumps can be used to restore the database in the event of damage to the Postgres database files.
There is no monitoring for these dumps and you will not be notified if they are unsuccessful.
:::
:::caution
The database dumps do **NOT** contain any pictures or videos, only metadata. They are only usable with a copy of the other files in `UPLOAD_LOCATION` as outlined below.
:::
For disaster-recovery purposes, Immich will automatically create database dumps. The dumps are stored in `UPLOAD_LOCATION/backups`.
Please be sure to make your own, independent backup of the database together with the asset folders as noted below.
You can adjust the schedule and amount of kept database dumps in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
By default, Immich will keep the last 14 database dumps and create a new dump every day at 2:00 AM.
#### Trigger Dump
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
This dumps will count towards the last `X` dumps that will be kept based on your settings.
Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
#### Restoring
We hope to make restoring simpler in future versions, for now you can find the database dumps in the `UPLOAD_LOCATION/backups` folder on your host.
We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
Then please follow the steps in the following section for restoring the database.
### Manual Backup and Restore
@@ -69,9 +48,9 @@ docker compose create # Create Docker containers for Immich apps witho
docker start immich_postgres # Start Postgres server
sleep 10 # Wait for Postgres server to start up
# Check the database user if you deviated from the default
gunzip --stdout "/path/to/backup/dump.sql.gz" \
gunzip < "/path/to/backup/dump.sql.gz" \
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
| docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup
| docker exec -i immich_postgres psql --username=postgres # Restore Backup
docker compose up -d # Start remainder of Immich apps
```
@@ -86,16 +65,18 @@ docker compose up -d # Start remainder of Immich apps
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch
## Uncomment the next line and replace DB_DATA_LOCATION with your Postgres path to permanently reset the Postgres database
# Remove-Item -Recurse -Force DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch
## You should mount the backup (as a volume, example: `- 'C:\path\to\backup\dump.sql:/dump.sql'`) into the immich_postgres container using the docker-compose.yml
docker compose pull # Update to latest version of Immich (if desired)
docker compose create # Create Docker containers for Immich apps without running them
docker start immich_postgres # Start Postgres server
sleep 10 # Wait for Postgres server to start up
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command
# Check the database user if you deviated from the default. If your backup ends in `.gz`, replace `cat` with `gunzip --stdout`
cat "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=postgres --username=<DB_USERNAME>
exit # Exit the Docker shell
docker compose up -d # Start remainder of Immich apps
## You should mount the backup (as a volume, example: - 'C:\path\to\backup\dump.sql':/dump.sql) into the immich_postgres container using the docker-compose.yml
docker compose pull # Update to latest version of Immich (if desired)
docker compose create # Create Docker containers for Immich apps without running them
docker start immich_postgres # Start Postgres server
sleep 10 # Wait for Postgres server to start up
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command
# Check the database user if you deviated from the default
cat "/dump.sql" \
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
| psql --username=postgres # Restore Backup
exit # Exit the Docker shell
docker compose up -d # Start remainder of Immich apps
```
</TabItem>
@@ -109,14 +90,12 @@ Some deployment methods make it difficult to start the database without also sta
## Filesystem
Immich stores two types of content in the filesystem: (a) original, unmodified assets (photos and videos), and (b) generated content. We recommend backing up the entire contents of `UPLOAD_LOCATION`, but only the original content is critical, which is stored in the following folders:
Immich stores two types of content in the filesystem: (1) original, unmodified assets (photos and videos), and (2) generated content. Only the original content needs to be backed-up, which is stored in the following folders:
1. `UPLOAD_LOCATION/library`
2. `UPLOAD_LOCATION/upload`
3. `UPLOAD_LOCATION/profile`
If you choose to back up only those folders, you will need to rerun the transcoding and thumbnail generation jobs for all assets after you restore from a backup.
:::caution
If you moved some of these folders onto a different storage device, such as `profile/`, make sure to adjust the backup path to match your setup
:::

View File

@@ -18,10 +18,10 @@ You can use [this guide](/docs/guides/smtp-gmail) to use Gmail's SMTP server.
Users can manage their email notification settings from their account settings page on the web. They can choose to turn email notifications on or off for the following events:
<img src={require('./img/user-notifications-settings.webp').default} width="80%" title="User notification settings" />
<img src={require('./img/user-notifications-settings.png').default} width="80%" title="User notification settings" />
## Notification templates
You can override the default notification text with custom templates in HTML format. You can use tags to show dynamic tags in your templates.
<img src={require('./img/user-notifications-templates.webp').default} width="80%" title="User notification templates" />
<img src={require('./img/user-notifications-templates.png').default} width="80%" title="User notification templates" />

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 167 KiB

After

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Some files were not shown because too many files have changed in this diff Show More