Compare commits

..

1 Commits

Author SHA1 Message Date
Jason Rasmussen
f7285191bd WIP 2024-04-22 16:51:24 -04:00
4557 changed files with 249997 additions and 523421 deletions

View File

@@ -1,67 +0,0 @@
{
"name": "Immich - Backend, Frontend and ML",
"service": "immich-server",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"dockerComposeFile": [
"../docker/docker-compose.dev.yml",
"./server/container-compose-overrides.yml"
],
"customizations": {
"vscode": {
"extensions": [
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
]
}
},
"forwardPorts": [3000, 9231, 9230, 2283],
"portsAttributes": {
"3000": {
"label": "Immich - Frontend HTTP",
"description": "The frontend of the Immich project",
"onAutoForward": "openBrowserOnce"
},
"2283": {
"label": "Immich - API Server - HTTP",
"description": "The API server of the Immich project"
},
"9231": {
"label": "Immich - API Server - DEBUG",
"description": "The API server of the Immich project"
},
"9230": {
"label": "Immich - Workers - DEBUG",
"description": "The workers of the Immich project"
}
},
"overrideCommand": true,
"workspaceFolder": "/workspaces/immich",
"remoteUser": "node",
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
}

View File

@@ -1,33 +0,0 @@
services:
immich-server:
build:
target: dev-container-mobile
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override # bind mount host to /workspaces/immich
- ..:/workspaces/immich
- cli_node_modules:/workspaces/immich/cli/node_modules
- e2e_node_modules:/workspaces/immich/e2e/node_modules
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
- server_node_modules:/workspaces/immich/server/node_modules
- web_node_modules:/workspaces/immich/web/node_modules
- ${UPLOAD_LOCATION}/photos:/data
- /etc/localtime:/etc/localtime:ro
database:
volumes:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
volumes:
# Node modules for each service to avoid conflicts and ensure consistent dependencies
cli_node_modules:
e2e_node_modules:
open_api_node_modules:
server_node_modules:
web_node_modules:
# UPLOAD_LOCATION must be set to a absolute path or vol-upload
vol-upload:
# DB_DATA_LOCATION must be set to a absolute path or vol-database
vol-database:

View File

@@ -1,52 +0,0 @@
{
"name": "Immich - Mobile",
"service": "immich-server",
"runServices": [
"immich-server",
"redis",
"database",
"immich-machine-learning"
],
"dockerComposeFile": [
"../../docker/docker-compose.dev.yml",
"./container-compose-overrides.yml"
],
"customizations": {
"vscode": {
"extensions": [
"Dart-Code.dart-code",
"Dart-Code.flutter",
"dcmdev.dcm-vscode-extension",
"esbenp.prettier-vscode",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"ms-vscode-remote.remote-containers",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"rvest.vs-code-prettier-eslint",
"bluebrown.yamlfmt",
"vkrishna04.cspell-sync",
"vitest.explorer",
"ms-playwright.playwright",
"ms-azuretools.vscode-docker"
]
}
},
"forwardPorts": [],
"overrideCommand": true,
"workspaceFolder": "/workspaces/immich",
"remoteUser": "node",
"userEnvProbe": "loginInteractiveShell",
"remoteEnv": {
// The location where your uploaded files are stored
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
// Connection secret for postgres. You should change it to a random password
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
// The database username
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
// The database name
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
}
}

View File

@@ -1,81 +0,0 @@
#!/bin/bash
export IMMICH_PORT="${DEV_SERVER_PORT:-2283}"
export DEV_PORT="${DEV_PORT:-3000}"
# search for immich directory inside workspace.
# /workspaces/immich is the bind mount, but other directories can be mounted if runing
# Devcontainer: Clone [repository|pull request] in container volumne
WORKSPACES_DIR="/workspaces"
IMMICH_DIR="$WORKSPACES_DIR/immich"
IMMICH_DEVCONTAINER_LOG="$HOME/immich-devcontainer.log"
log() {
# Display command on console, log with timestamp to file
echo "$*"
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" >>"$IMMICH_DEVCONTAINER_LOG"
}
run_cmd() {
# Ensure log directory exists
mkdir -p "$(dirname "$IMMICH_DEVCONTAINER_LOG")"
log "$@"
# Execute command: display normally on console, log with timestamps to file
"$@" 2>&1 | tee >(while IFS= read -r line; do
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $line" >>"$IMMICH_DEVCONTAINER_LOG"
done)
# Preserve exit status
return "${PIPESTATUS[0]}"
}
# Find directories excluding /workspaces/immich
mapfile -t other_dirs < <(find "$WORKSPACES_DIR" -mindepth 1 -maxdepth 1 -type d ! -path "$IMMICH_DIR" ! -name ".*")
if [ ${#other_dirs[@]} -gt 1 ]; then
log "Error: More than one directory found in $WORKSPACES_DIR other than $IMMICH_DIR."
exit 1
elif [ ${#other_dirs[@]} -eq 1 ]; then
export IMMICH_WORKSPACE="${other_dirs[0]}"
else
export IMMICH_WORKSPACE="$IMMICH_DIR"
fi
log "Found immich workspace in $IMMICH_WORKSPACE"
log ""
fix_permissions() {
log "Fixing permissions for ${IMMICH_WORKSPACE}"
# Change ownership for directories that exist
for dir in "${IMMICH_WORKSPACE}/.vscode" \
"${IMMICH_WORKSPACE}/server/upload" \
"${IMMICH_WORKSPACE}/.pnpm-store" \
"${IMMICH_WORKSPACE}/.github/node_modules" \
"${IMMICH_WORKSPACE}/cli/node_modules" \
"${IMMICH_WORKSPACE}/e2e/node_modules" \
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \
"${IMMICH_WORKSPACE}/server/node_modules" \
"${IMMICH_WORKSPACE}/server/dist" \
"${IMMICH_WORKSPACE}/web/node_modules" \
"${IMMICH_WORKSPACE}/web/dist"; do
if [ -d "$dir" ]; then
run_cmd sudo chown node -R "$dir"
fi
done
log ""
}
install_dependencies() {
log "Installing dependencies"
(
cd "${IMMICH_WORKSPACE}" || exit 1
export CI=1 FROZEN=1 OFFLINE=1
run_cmd make setup-web-dev setup-server-dev
)
log ""
}

View File

@@ -1,42 +0,0 @@
services:
immich-server:
build:
target: dev-container-server
env_file: !reset []
hostname: immich-dev
environment:
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
volumes: !override
- ..:/workspaces/immich
- ${UPLOAD_LOCATION:-upload-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/data
- /etc/localtime:/etc/localtime:ro
- pnpm-store:/usr/src/app/.pnpm-store
- server-node_modules:/usr/src/app/server/node_modules
- web-node_modules:/usr/src/app/web/node_modules
- github-node_modules:/usr/src/app/.github/node_modules
- cli-node_modules:/usr/src/app/cli/node_modules
- docs-node_modules:/usr/src/app/docs/node_modules
- e2e-node_modules:/usr/src/app/e2e/node_modules
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
immich-web:
env_file: !reset []
immich-machine-learning:
env_file: !reset []
database:
env_file: !reset []
environment: !override
POSTGRES_PASSWORD: ${DB_PASSWORD-postgres}
POSTGRES_USER: ${DB_USERNAME-postgres}
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
POSTGRES_INITDB_ARGS: '--data-checksums'
POSTGRES_HOST_AUTH_METHOD: md5
volumes:
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
redis:
env_file: !reset []
volumes:
upload-devcontainer-volume:
postgres-devcontainer-volume:

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Preparing Immich Nest API Server"
log ""
export CI=1
run_cmd pnpm --filter immich install
log "Starting Nest API Server"
log ""
cd "${IMMICH_WORKSPACE}/server" || (
log "Immich workspace not found"
exit 1
)
while true; do
run_cmd pnpm --filter immich exec nest start --debug "0.0.0.0:9230" --watch
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,29 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
export CI=1
log "Preparing Immich Web Frontend"
log ""
run_cmd pnpm --filter @immich/sdk install
run_cmd pnpm --filter @immich/sdk build
run_cmd pnpm --filter immich-web install
log "Starting Immich Web Frontend"
log ""
cd "${IMMICH_WORKSPACE}/web" || (
log "Immich Workspace not found"
exit 1
)
until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_PORT}/api/server/config"; do
log "Waiting for api server..."
sleep 1
done
while true; do
run_cmd pnpm --filter immich-web exec vite dev --host 0.0.0.0 --port "${DEV_PORT}"
log "Web crashed with exit code $?. Respawning in 3s ..."
sleep 3
done

View File

@@ -1,17 +0,0 @@
#!/bin/bash
# shellcheck source=common.sh
# shellcheck disable=SC1091
source /immich-devcontainer/container-common.sh
log "Setting up Immich dev container..."
fix_permissions
log "Setup complete, please wait while backend and frontend services automatically start"
log
log "If necessary, the services may be manually started using"
log
log "$ /immich-devcontainer/container-start-backend.sh"
log "$ /immich-devcontainer/container-start-frontend.sh"
log
log "From different terminal windows, as these scripts automatically restart the server"
log "on error, and will continuously run in a loop"

View File

@@ -1,41 +1,31 @@
.vscode/
.github/
.git/
.env*
*.log
*.tmp
*.temp
**/Dockerfile
**/node_modules/
**/.pnpm-store/
**/dist/
**/coverage/
**/build/
design/
docker/
!docker/scripts
docs/
!docs/package.json
!docs/package-lock.json
e2e/
!e2e/package.json
!e2e/package-lock.json
fastlane/
machine-learning/
misc/
mobile/
cli/coverage/
cli/dist/
cli/node_modules/
open-api/typescript-sdk/build/
!open-api/typescript-sdk/package.json
!open-api/typescript-sdk/package-lock.json
open-api/typescript-sdk/node_modules/
server/coverage/
server/node_modules/
server/upload/
server/src/queries
server/dist/
server/www/
server/test/assets/
web/node_modules/
web/coverage/
web/.svelte-kit
web/build/

14
.gitattributes vendored
View File

@@ -2,22 +2,12 @@ mobile/openapi/**/*.md -diff -merge
mobile/openapi/**/*.md linguist-generated=true
mobile/openapi/**/*.dart -diff -merge
mobile/openapi/**/*.dart linguist-generated=true
mobile/openapi/.openapi-generator/FILES -diff -merge
mobile/openapi/.openapi-generator/FILES linguist-generated=true
mobile/lib/**/*.g.dart -diff -merge
mobile/lib/**/*.g.dart linguist-generated=true
mobile/lib/**/*.drift.dart -diff -merge
mobile/lib/**/*.drift.dart linguist-generated=true
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
mobile/lib/infrastructure/repositories/db.repository.steps.dart -diff -merge
mobile/lib/infrastructure/repositories/db.repository.steps.dart linguist-generated=true
mobile/test/drift/main/generated/** -diff -merge
mobile/test/drift/main/generated/** linguist-generated=true
open-api/typescript-sdk/fetch-client.ts -diff -merge
open-api/typescript-sdk/fetch-client.ts linguist-generated=true

1
.github/.nvmrc vendored
View File

@@ -1 +0,0 @@
22.19.0

View File

@@ -1,4 +0,0 @@
# Ignore files for PNPM, NPM and YARN
pnpm-lock.yaml
package-lock.json
yarn.lock

View File

@@ -1,19 +1,18 @@
title: '[Feature] feature-name-goes-here'
labels: ['feature']
title: "[Feature] <feature-name-goes-here>"
labels: ["feature"]
body:
- type: markdown
attributes:
value: |
Please use this form to request new feature for Immich.
Stick to only a single feature per request. If you list multiple different features at once,
your request will be closed.
Please use this form to request new feature for Immich
- type: checkboxes
attributes:
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
label: I have searched the existing feature requests to make sure this is not a duplicate request.
options:
- label: 'Yes'
- label: "Yes"
required: true
- type: textarea
id: feature

6
.github/FUNDING.yml vendored
View File

@@ -1 +1,5 @@
custom: ['https://buy.immich.app', 'https://immich.store']
# These are supported funding model platforms
github: immich-app
liberapay: alex.tran1502
custom: https://www.buymeacoffee.com/altran1502

View File

@@ -1,12 +1,6 @@
name: Report an issue with Immich
description: Report an issue with Immich
body:
- type: checkboxes
attributes:
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
options:
- label: 'Yes'
- type: markdown
attributes:
value: |
@@ -64,11 +58,6 @@ body:
- label: Web
- label: Mobile
- type: input
attributes:
label: Device make and model
placeholder: Samsung S25 Android 16
- type: textarea
validations:
required: true
@@ -88,12 +77,13 @@ body:
id: repro
attributes:
label: Reproduction steps
description: 'How do you trigger this bug? Please walk us through it step by step.'
description: "How do you trigger this bug? Please walk us through it step by step."
value: |
1.
2.
3.
...
render: bash
validations:
required: true
@@ -101,13 +91,12 @@ body:
id: logs
attributes:
label: Relevant log output
description:
Please copy and paste any relevant logs below. (code formatting is
description: Please copy and paste any relevant logs below. (code formatting is
enabled, no need for backticks)
render: shell
validations:
required: false
- type: textarea
attributes:
label: Additional information

View File

@@ -1,14 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: I have a question or need support
url: https://discord.immich.app
- name: I have a question or need support
url: https://discord.gg/D8JsnBEuKb
about: We use GitHub for tracking bugs, please check out our Discord channel for freaky fast support.
- name: 📷 My photo or video has a date, time, or timezone problem
url: https://github.com/immich-app/immich/discussions/12650
about: Upload a sample file to this discussion and we will take a look
- name: 🌟 Feature request
- name: Feature Request
url: https://github.com/immich-app/immich/discussions/new?category=feature-request
about: Please use our GitHub Discussion for making feature requests.
- name: 🫣 I'm unsure where to go
url: https://discord.immich.app
- name: I'm unsure where to go
url: https://discord.gg/D8JsnBEuKb
about: If you are unsure where to go, then joining our Discord is recommended; Just ask!

View File

@@ -1 +1,2 @@
blank_issues_enabled: false
blank_pull_request_template_enabled: false

View File

@@ -0,0 +1,22 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
## Screenshots (if appropriate):
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable

7
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,7 @@
version: 2
updates:
# Maintain dependencies for GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"

38
.github/labeler.yml vendored
View File

@@ -1,38 +0,0 @@
cli:
- changed-files:
- any-glob-to-any-file:
- cli/src/**
documentation:
- changed-files:
- any-glob-to-any-file:
- docs/blob/**
- docs/docs/**
- docs/src/**
- docs/static/**
🖥web:
- changed-files:
- any-glob-to-any-file:
- web/src/**
- web/static/**
📱mobile:
- changed-files:
- any-glob-to-any-file:
- mobile/lib/**
- mobile/test/**
🗄server:
- changed-files:
- any-glob-to-any-file:
- server/src/**
- server/test/**
🧠machine-learning:
- changed-files:
- any-glob-to-any-file:
- machine-learning/app/**
changelog:translation:
- head-branch: ['^chore/translations$']

View File

@@ -1,9 +0,0 @@
{
"scripts": {
"format": "prettier --check .",
"format:fix": "prettier --write ."
},
"devDependencies": {
"prettier": "^3.5.3"
}
}

View File

@@ -1,40 +0,0 @@
## Description
<!--- Describe your changes in detail -->
<!--- Why is this change required? What problem does it solve? -->
<!--- If it fixes an open issue, please link to the issue here. -->
Fixes # (issue)
## How Has This Been Tested?
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
- [ ] Test A
- [ ] Test B
<details><summary><h2>Screenshots (if appropriate)</h2></summary>
<!-- Images go below this line. -->
</details>
<!-- API endpoint changes (if relevant)
## API Changes
The `/api/something` endpoint is now `/api/something-else`
-->
## Checklist:
- [ ] I have performed a self-review of my own code
- [ ] I have made corresponding changes to the documentation if applicable
- [ ] I have no unrelated changes in the PR.
- [ ] I have confirmed that any new dependencies are strictly necessary.
- [ ] I have written tests for new code (if applicable)
- [ ] I have followed naming conventions/patterns in the surrounding code
- [ ] All code in `src/services/` uses repositories implementations for database calls, filesystem operations, etc.
- [ ] All code in `src/repositories/` is pretty basic/simple and does not have any immich specific logic (that belongs in `src/services/`)
## Please describe to which degree, if any, an LLM was used in creating this pull request.
...

74
.github/release.yml vendored
View File

@@ -1,33 +1,41 @@
changelog:
categories:
- title: 🚨 Breaking Changes
labels:
- changelog:breaking-change
- title: 🫥 Deprecated Changes
labels:
- changelog:deprecated
- title: 🔒 Security
labels:
- changelog:security
- title: 🚀 Features
labels:
- changelog:feature
- title: 🌟 Enhancements
labels:
- changelog:enhancement
- title: 🐛 Bug fixes
labels:
- changelog:bugfix
- title: 📚 Documentation
labels:
- changelog:documentation
- title: 🌐 Translations
labels:
- changelog:translation
changelog:
categories:
- title: ⚠️ Breaking Changes
labels:
- breaking-change
- title: 🗄️ Server
labels:
- 🗄server
- title: 📱 Mobile
labels:
- 📱mobile
- title: 🖥️ Web
labels:
- 🖥web
- title: 🧠 Machine Learning
labels:
- 🧠machine-learning
- title: ⚡ CLI
labels:
- cli
- title: 📓 Documentation
labels:
- documentation
- title: 🔨 Maintenance
labels:
- deployment
- dependencies
- renovate
- maintenance
- tech-debt
- title: Other changes
labels:
- "*"

View File

@@ -7,15 +7,6 @@ on:
ref:
required: false
type: string
secrets:
KEY_JKS:
required: true
ALIAS:
required: true
ANDROID_KEY_PASSWORD:
required: true
ANDROID_STORE_PASSWORD:
required: true
pull_request:
push:
branches: [main]
@@ -24,119 +15,61 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@24820aa4ef67959b0dcf69a438cccf00d7c7042b # pre-job-action-v1.0.1
with:
filters: |
mobile:
- 'mobile/**'
force-filters: |
- '.github/workflows/build-mobile.yml'
force-events: 'workflow_call,workflow_dispatch'
build-sign-android:
name: Build and sign Android
needs: pre-job
permissions:
contents: read
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
runs-on: mich
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }}
runs-on: macos-14
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- name: Determine ref
id: get-ref
run: |
input_ref="${{ inputs.ref }}"
github_ref="${{ github.sha }}"
ref="${input_ref:-$github_ref}"
echo "ref=$ref" >> $GITHUB_OUTPUT
- uses: actions/checkout@v4
with:
ref: ${{ inputs.ref || github.sha }}
persist-credentials: false
ref: ${{ steps.get-ref.outputs.ref }}
- uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: '17'
cache: 'gradle'
- name: Setup Flutter SDK
uses: subosito/flutter-action@v2
with:
channel: 'stable'
flutter-version: '3.19.3'
cache: true
- name: Create the Keystore
env:
KEY_JKS: ${{ secrets.KEY_JKS }}
working-directory: ./mobile
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
with:
distribution: 'zulu'
java-version: '17'
- name: Restore Gradle Cache
id: cache-gradle-restore
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.android/sdk
mobile/android/.gradle
mobile/.dart_tool
key: build-mobile-gradle-${{ runner.os }}-main
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
cache: true
- name: Setup Android SDK
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2
with:
packages: ''
run: echo $KEY_JKS | base64 -d > android/key.jks
- name: Get Packages
working-directory: ./mobile
run: flutter pub get
- name: Generate translation file
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
working-directory: ./mobile
- name: Generate platform APIs
run: make pigeon
working-directory: ./mobile
- name: Build Android App Bundle
working-directory: ./mobile
env:
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
run: |
if [[ $IS_MAIN == 'true' ]]; then
flutter build apk --release
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
else
flutter build apk --debug --split-per-abi --target-platform android-arm64
fi
flutter build apk --release
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
- name: Publish Android Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk
- name: Save Gradle Cache
id: cache-gradle-save
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
if: github.ref == 'refs/heads/main'
with:
path: |
~/.gradle/caches
~/.gradle/wrapper
~/.android/sdk
mobile/android/.gradle
mobile/.dart_tool
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}

View File

@@ -8,38 +8,31 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
cleanup:
name: Cleanup
runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps:
- name: Check out code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Cleanup
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REF: ${{ github.ref }}
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH=${{ github.ref }}
echo "Fetching list of cache keys"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,82 +1,67 @@
name: CLI Build
on:
workflow_dispatch:
push:
branches: [main]
paths:
- 'cli/**'
- '.github/workflows/cli.yml'
- "cli/**"
- ".github/workflows/cli.yml"
pull_request:
branches: [main]
paths:
- 'cli/**'
- '.github/workflows/cli.yml'
release:
types: [published]
- "cli/**"
- ".github/workflows/cli.yml"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
permissions:
packages: write
jobs:
publish:
name: CLI Publish
name: Publish
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: actions/checkout@v4
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@v4
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
- run: pnpm install --frozen-lockfile
- run: pnpm build
- run: pnpm publish
if: ${{ github.event_name == 'release' }}
node-version: "20.x"
registry-url: "https://registry.npmjs.org"
- name: Prepare SDK
run: npm ci --prefix ../open-api/typescript-sdk/
- name: Build SDK
run: npm run build --prefix ../open-api/typescript-sdk/
- run: npm ci
- run: npm run build
- run: npm publish
if: ${{ github.event_name == 'workflow_dispatch' }}
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
docker:
name: Docker
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
needs: publish
steps:
- name: Checkout
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3.3.0
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
uses: docker/login-action@v3
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
@@ -91,22 +76,22 @@ jobs:
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@v5
with:
flavor: |
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/immich-cli
tags: |
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'release' }}
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'workflow_dispatch' }}
type=raw,value=latest,enable=${{ github.event_name == 'workflow_dispatch' }}
- name: Build and push image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v5.3.0
with:
file: cli/Dockerfile
platforms: linux/amd64,linux/arm64
push: ${{ github.event_name == 'release' }}
push: ${{ github.event_name == 'workflow_dispatch' }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.metadata.outputs.tags }}

View File

@@ -1,107 +0,0 @@
on:
issues:
types: [opened]
discussion:
types: [created]
name: Close likely duplicates
permissions: {}
jobs:
should_run:
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.should_run.outputs.run }}
steps:
- id: should_run
run: echo "run=${{ github.event_name == 'issues' || github.event.discussion.category.name == 'Feature Request' }}" >> $GITHUB_OUTPUT
get_body:
runs-on: ubuntu-latest
needs: should_run
if: ${{ needs.should_run.outputs.should_run == 'true' }}
env:
EVENT: ${{ toJSON(github.event) }}
outputs:
body: ${{ steps.get_body.outputs.body }}
steps:
- id: get_body
run: |
BODY=$(echo """$EVENT""" | jq -r '.issue // .discussion | .body' | base64 -w 0)
echo "body=$BODY" >> $GITHUB_OUTPUT
get_checkbox_json:
runs-on: ubuntu-latest
needs: [get_body, should_run]
if: ${{ needs.should_run.outputs.should_run == 'true' }}
container:
image: ghcr.io/immich-app/mdq:main@sha256:d8ae47cf2e6cf4e2559bd57a60b73674fe44f897cba2c2bddff2987a05be10a4
outputs:
checked: ${{ steps.get_checkbox.outputs.checked }}
steps:
- id: get_checkbox
env:
BODY: ${{ needs.get_body.outputs.body }}
run: |
CHECKED=$(echo "$BODY" | base64 -d | /mdq --output json '# I have searched | - [?] Yes' | jq '.items[0].list[0].checked // false')
echo "checked=$CHECKED" >> $GITHUB_OUTPUT
close_and_comment:
runs-on: ubuntu-latest
needs: [get_checkbox_json, should_run]
if: ${{ needs.should_run.outputs.should_run == 'true' && needs.get_checkbox_json.outputs.checked != 'true' }}
permissions:
issues: write
discussions: write
steps:
- name: Close issue
if: ${{ github.event_name == 'issues' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.issue.node_id }}
run: |
gh api graphql \
-f issueId="$NODE_ID" \
-f body="This issue has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
-f query='
mutation CommentAndCloseIssue($issueId: ID!, $body: String!) {
addComment(input: {
subjectId: $issueId,
body: $body
}) {
__typename
}
closeIssue(input: {
issueId: $issueId,
stateReason: DUPLICATE
}) {
__typename
}
}'
- name: Close discussion
if: ${{ github.event_name == 'discussion' && github.event.discussion.category.name == 'Feature Request' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.discussion.node_id }}
run: |
gh api graphql \
-f discussionId="$NODE_ID" \
-f body="This discussion has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
-f query='
mutation CommentAndCloseDiscussion($discussionId: ID!, $body: String!) {
addDiscussionComment(input: {
discussionId: $discussionId,
body: $body
}) {
__typename
}
closeDiscussion(input: {
discussionId: $discussionId,
reason: DUPLICATE
}) {
__typename
}
}'

View File

@@ -9,14 +9,14 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: 'CodeQL'
name: "CodeQL"
on:
push:
branches: ['main']
branches: [ "main" ]
pull_request:
# The branches below must be a subset of the branches above
branches: ['main']
branches: [ "main" ]
schedule:
- cron: '20 13 * * 1'
@@ -24,8 +24,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
analyze:
name: Analyze
@@ -38,44 +36,43 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ['javascript', 'python']
language: [ 'javascript', 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
with:
category: '/language:${{matrix.language}}'
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

73
.github/workflows/docker-cleanup.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
# This workflow runs on certain conditions to check for and potentially
# delete container images from the GHCR which no longer have an associated
# code branch.
# Requires a PAT with the correct scope set in the secrets.
#
# This workflow will not trigger runs on forked repos.
name: Docker Cleanup
on:
pull_request:
types:
- "closed"
push:
paths:
- ".github/workflows/docker-cleanup.yml"
concurrency:
group: registry-tags-cleanup
cancel-in-progress: false
jobs:
cleanup-images:
name: Cleanup Stale Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean temporary images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/ephemeral@v0.6.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
is_org: "true"
do_delete: "true"
package_name: "${{ matrix.primary-name }}"
scheme: "pull_request"
repo_name: "immich"
match_regex: '^pr-(\d+)$|^(\d+)$'
cleanup-untagged-images:
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
runs-on: ubuntu-22.04
needs:
- cleanup-images
strategy:
fail-fast: false
matrix:
include:
- primary-name: "immich-server"
- primary-name: "immich-machine-learning"
- primary-name: "immich-build-cache"
env:
# Requires a personal access token with the OAuth scope delete:packages
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
steps:
- name: Clean untagged images
if: "${{ env.TOKEN != '' }}"
uses: stumpylog/image-cleaner-action/untagged@v0.6.0
with:
token: "${{ env.TOKEN }}"
owner: "immich-app"
do_delete: "true"
is_org: "true"
package_name: "${{ matrix.primary-name }}"

View File

@@ -5,6 +5,7 @@ on:
push:
branches: [main]
pull_request:
branches: [main]
release:
types: [published]
@@ -12,175 +13,118 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
permissions:
packages: write
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@24820aa4ef67959b0dcf69a438cccf00d7c7042b # pre-job-action-v1.0.1
with:
filters: |
server:
- 'server/**'
- 'openapi/**'
- 'web/**'
- 'i18n/**'
machine-learning:
- 'machine-learning/**'
force-filters: |
- '.github/workflows/docker.yml'
- '.github/workflows/multi-runner-build.yml'
- '.github/actions/image-build'
force-events: 'workflow_dispatch,release'
retag_ml:
name: Re-Tag ML
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ fromJSON(needs.pre-job.outputs.should_run).machine-learning == false && !github.event.pull_request.head.repo.fork }}
build_and_push:
name: Build and Push
runs-on: ubuntu-latest
strategy:
matrix:
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
retag_server:
name: Re-Tag Server
needs: pre-job
permissions:
contents: read
packages: write
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == false && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest
strategy:
matrix:
suffix: ['']
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-server
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
machine-learning:
name: Build and Push ML
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).machine-learning == true }}
strategy:
# Prevent a failure in one image from stopping the other builds
fail-fast: false
matrix:
include:
- device: cpu
tag-suffix: ''
- device: cuda
tag-suffix: '-cuda'
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/amd64,linux/arm64
device: cpu
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/amd64
- device: openvino
tag-suffix: '-openvino'
device: cuda
suffix: -cuda
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/amd64
- device: armnn
tag-suffix: '-armnn'
device: openvino
suffix: -openvino
- image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
platforms: linux/arm64
- device: rknn
tag-suffix: '-rknn'
platforms: linux/arm64
- device: rocm
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
permissions:
contents: read
actions: read
packages: write
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
with:
image: immich-machine-learning
context: machine-learning
dockerfile: machine-learning/Dockerfile
platforms: ${{ matrix.platforms }}
runner-mapping: ${{ matrix.runner-mapping }}
tag-suffix: ${{ matrix.tag-suffix }}
dockerhub-push: ${{ github.event_name == 'release' }}
build-args: |
DEVICE=${{ matrix.device }}
device: armnn
suffix: -armnn
server:
name: Build and Push Server
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@129aeda75a450666ce96e8bc8126652e717917a7 # multi-runner-build-workflow-0.1.1
permissions:
contents: read
actions: read
packages: write
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
with:
image: immich-server
context: .
dockerfile: server/Dockerfile
dockerhub-push: ${{ github.event_name == 'release' }}
build-args: |
DEVICE=cpu
- image: immich-server
context: .
file: server/Dockerfile
platforms: linux/amd64,linux/arm64
device: cpu
success-check-server:
name: Docker Build & Push Server Success
needs: [server, retag_server]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
- name: Checkout
uses: actions/checkout@v4
success-check-ml:
name: Docker Build & Push ML Success
needs: [machine-learning, retag_ml]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.3.0
- name: Login to Docker Hub
# Only push to Docker Hub when making a release
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@v3
with:
needs: ${{ toJSON(needs) }}
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
# Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@v5
with:
flavor: |
# Disable latest tag
latest=false
images: |
name=ghcr.io/${{ github.repository_owner }}/${{matrix.image}}
name=altran1502/${{matrix.image}},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch,suffix=${{ matrix.suffix }}
# Tag with pr-number
type=ref,event=pr,suffix=${{ matrix.suffix }}
# Tag with git tag on release
type=ref,event=tag,suffix=${{ matrix.suffix }}
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
- name: Determine build cache output
id: cache-target
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Essentially just ignore the cache output (PR can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ matrix.image }}" >> $GITHUB_OUTPUT
fi
- name: Build and push image
uses: docker/build-push-action@v5.3.0
with:
context: ${{ matrix.context }}
file: ${{ matrix.file }}
platforms: ${{ matrix.platforms }}
# Skip pushing when PR from a fork
push: ${{ !github.event.pull_request.head.repo.fork }}
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
build-args: |
DEVICE=${{ matrix.device }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@@ -1,79 +0,0 @@
name: Docs build
on:
push:
branches: [main]
pull_request:
release:
types: [published]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@24820aa4ef67959b0dcf69a438cccf00d7c7042b # pre-job-action-v1.0.1
with:
filters: |
docs:
- 'docs/**'
open-api:
- 'open-api/immich-openapi-specs.json'
force-filters: |
- '.github/workflows/docs-build.yml'
force-events: 'release'
force-branches: 'main'
build:
name: Docs Build
needs: pre-job
permissions:
contents: read
if: ${{ fromJSON(needs.pre-job.outputs.should_run).docs == true }}
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./docs
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './docs/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run install
run: pnpm install
- name: Check formatting
run: pnpm format
- name: Run build
run: pnpm build
- name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: docs-build-output
path: docs/build/
include-hidden-files: true
retention-days: 1

View File

@@ -1,217 +0,0 @@
name: Docs deploy
on:
workflow_run: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
workflows: ['Docs build']
types:
- completed
jobs:
checks:
name: Docs Deploy Checks
runs-on: ubuntu-latest
permissions:
actions: read
pull-requests: read
outputs:
parameters: ${{ steps.parameters.outputs.result }}
artifact: ${{ steps.get-artifact.outputs.result }}
steps:
- if: ${{ github.event.workflow_run.conclusion != 'success' }}
run: echo 'The triggering workflow did not succeed' && exit 1
- name: Get artifact
id: get-artifact
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "docs-build-output"
})[0];
if (!matchArtifact) {
console.log("No artifact found with the name docs-build-output, build job was skipped")
return { found: false };
}
return { found: true, id: matchArtifact.id };
- name: Determine deploy parameters
id: parameters
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
env:
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
with:
script: |
const eventType = context.payload.workflow_run.event;
const isFork = context.payload.workflow_run.repository.fork;
let parameters;
console.log({eventType, isFork});
if (eventType == "push") {
const branch = context.payload.workflow_run.head_branch;
console.log({branch});
const shouldDeploy = !isFork && branch == "main";
parameters = {
event: "branch",
name: "main",
shouldDeploy
};
} else if (eventType == "pull_request") {
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
if(!pull_number) {
const {HEAD_SHA} = process.env;
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
const items = response.data.items
if (items.length < 1) {
throw new Error("No pull request found for the commit")
}
const pullRequestNumber = items[0].number
console.info("Pull request number is", pullRequestNumber)
pull_number = pullRequestNumber
}
const {data: pr} = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number
});
console.log({pull_number});
parameters = {
event: "pr",
name: `pr-${pull_number}`,
pr_number: pull_number,
shouldDeploy: true
};
} else if (eventType == "release") {
parameters = {
event: "release",
name: context.payload.workflow_run.head_branch,
shouldDeploy: !isFork
};
}
console.log(parameters);
return parameters;
deploy:
name: Docs Deploy
runs-on: ubuntu-latest
needs: checks
permissions:
contents: read
actions: read
pull-requests: write
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Load parameters
id: parameters
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
env:
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
with:
script: |
const parameters = JSON.parse(process.env.PARAM_JSON);
core.setOutput("event", parameters.event);
core.setOutput("name", parameters.name);
core.setOutput("shouldDeploy", parameters.shouldDeploy);
- name: Download artifact
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
env:
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
with:
script: |
let artifact = JSON.parse(process.env.ARTIFACT_JSON);
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: artifact.id,
archive_format: 'zip',
});
let fs = require('fs');
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/docs-build-output.zip`, Buffer.from(download.data));
- name: Unzip artifact
run: unzip "${{ github.workspace }}/docs-build-output.zip" -d "${{ github.workspace }}/docs/build"
- name: Deploy Docs Subdomain
env:
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
tg_dir: 'deployment/modules/cloudflare/docs'
tg_command: 'apply'
- name: Deploy Docs Subdomain Output
id: docs-output
env:
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
tg_dir: 'deployment/modules/cloudflare/docs'
tg_command: 'output -json'
- name: Output Cleaning
id: clean
env:
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
run: |
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$CLEANED" >> $GITHUB_OUTPUT
- name: Publish to Cloudflare Pages
# TODO: Action is deprecated
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
workingDirectory: 'docs'
directory: 'build'
branch: ${{ steps.parameters.outputs.name }}
wranglerVersion: '3'
- name: Deploy Docs Release Domain
if: ${{ steps.parameters.outputs.event == 'release' }}
env:
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
tg_dir: 'deployment/modules/cloudflare/docs-release'
tg_command: 'apply'
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
if: ${{ steps.parameters.outputs.event == 'pr' }}
with:
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
body: |
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
emojis: 'rocket'
body-include: '<!-- Docs PR URL -->'

View File

@@ -1,40 +0,0 @@
name: Docs destroy
on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
types: [closed]
permissions: {}
jobs:
deploy:
name: Docs Destroy
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Destroy Docs Subdomain
env:
TF_VAR_prefix_name: 'pr-${{ github.event.number }}'
TF_VAR_prefix_event_type: 'pr'
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
with:
tg_version: '0.58.12'
tofu_version: '1.7.1'
tg_dir: 'deployment/modules/cloudflare/docs'
tg_command: 'destroy -refresh=false'
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
with:
number: ${{ github.event.number }}
delete: true
body-include: '<!-- Docs PR URL -->'

View File

@@ -1,60 +0,0 @@
name: Fix formatting
on:
pull_request:
types: [labeled]
permissions: {}
jobs:
fix-formatting:
runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'fix:formatting' }}
permissions:
contents: write
pull-requests: write
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: 'Checkout'
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Fix formatting
run: make install-all && make format-all
- name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
with:
default_author: github_actions
message: 'chore: fix formatting'
- name: Remove label
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
if: always()
with:
script: |
github.rest.issues.removeLabel({
issue_number: context.payload.pull_request.number,
owner: context.repo.owner,
repo: context.repo.repo,
name: 'fix:formatting'
})

View File

@@ -1,112 +0,0 @@
name: Merge translations
on:
workflow_dispatch:
workflow_call:
secrets:
PUSH_O_MATIC_APP_ID:
required: true
PUSH_O_MATIC_APP_KEY:
required: true
WEBLATE_TOKEN:
required: true
permissions: {}
env:
WEBLATE_HOST: 'https://hosted.weblate.org'
WEBLATE_COMPONENT: 'immich/immich'
jobs:
merge:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Find translation PR
id: find_pr
env:
GH_TOKEN: ${{ github.token }}
run: |
set -euo pipefail
PR=$(gh pr list --repo $GITHUB_REPOSITORY --author weblate --json number,mergeable)
echo "$PR"
PR_NUMBER=$(echo "$PR" | jq '
if length == 1 then
.[0].number
else
error("Expected exactly 1 entry, got \(length)")
end
' 2>&1) || exit 1
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_OUTPUT
echo "Selected PR $PR_NUMBER"
if ! echo "$PR" | jq -e '.[0].mergeable == "MERGEABLE"'; then
echo "PR is not mergeable"
exit 1
fi
- name: Generate a token
id: generate_token
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Lock weblate
env:
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
run: |
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/lock/" -d lock=true
- name: Commit translations
env:
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
run: |
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/repository/" -d operation=commit
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/repository/" -d operation=push
- name: Merge PR
id: merge_pr
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
PR_NUMBER: ${{ steps.find_pr.outputs.PR_NUMBER }}
run: |
set -euo pipefail
REVIEW_ID=$(gh api -X POST "repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/reviews" --field event='APPROVE' --field body='Automatically merging translations PR' \
| jq '.id')
echo "REVIEW_ID=$REVIEW_ID" >> $GITHUB_OUTPUT
gh pr merge "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --auto --squash
- name: Wait for PR to merge
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
PR_NUMBER: ${{ steps.find_pr.outputs.PR_NUMBER }}
REVIEW_ID: ${{ steps.merge_pr.outputs.REVIEW_ID }}
run: |
# So we clean up no matter what
set +e
for i in {1..100}; do
if gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json state | jq -e '.state == "MERGED"'; then
echo "PR merged"
exit 0
else
echo "PR not merged yet, waiting..."
sleep 6
fi
done
echo "PR did not merge in time"
gh api -X PUT "repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/reviews/$REVIEW_ID/dismissals" --field message='Merge attempt timed out' --field event='DISMISS'
gh pr merge "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --disable-auto
exit 1
- name: Unlock weblate
env:
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
run: |
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/lock/" -d lock=false

View File

@@ -1,12 +0,0 @@
name: PR Conventional Commit
on:
pull_request:
types: [opened, synchronize, reopened, edited]
jobs:
validate-pr-title:
name: Validate PR Title (conventional commit)
uses: immich-app/devtools/.github/workflows/shared-pr-require-conventional-commit.yml@main
permissions:
pull-requests: write

View File

@@ -1,15 +0,0 @@
name: Zizmor
on:
pull_request:
push:
branches: [main]
jobs:
zizmor:
name: Zizmor
uses: immich-app/devtools/.github/workflows/shared-zizmor.yml@main
permissions:
actions: read
contents: read
security-events: write

View File

@@ -1,24 +0,0 @@
name: PR Label Validation
on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
types: [opened, labeled, unlabeled, synchronize]
permissions: {}
jobs:
validate-release-label:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
with:
mode: exactly
count: 1
use_regex: true
labels: 'changelog:.*'
add_comment: true
message: 'Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label.'

View File

@@ -1,14 +0,0 @@
name: 'Pull Request Labeler'
on:
- pull_request_target # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
permissions: {}
jobs:
labeler:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0

13
.github/workflows/pr-require-label.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
name: Enforce PR labels
on:
pull_request:
types: [labeled, unlabeled, opened, edited, synchronize]
jobs:
enforce-label:
name: Enforce label
runs-on: ubuntu-latest
steps:
- if: toJson(github.event.pull_request.labels) == '[]'
run: exit 1

View File

@@ -21,109 +21,63 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-root
cancel-in-progress: true
permissions: {}
jobs:
merge_translations:
uses: ./.github/workflows/merge-translations.yml
permissions:
pull-requests: write
secrets:
PUSH_O_MATIC_APP_ID: ${{ secrets.PUSH_O_MATIC_APP_ID }}
PUSH_O_MATIC_APP_KEY: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
bump_version:
runs-on: ubuntu-latest
outputs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
permissions: {} # No job-level permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
uses: actions/checkout@v4
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
token: ${{ secrets.ORG_RELEASE_TOKEN }}
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install Poetry
run: pipx install poetry
- name: Bump version
env:
SERVER_BUMP: ${{ inputs.serverBump }}
MOBILE_BUMP: ${{ inputs.mobileBump }}
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
- name: Commit and tag
id: push-tag
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
uses: EndBug/add-and-commit@v9
with:
default_author: github_actions
message: 'chore: version ${{ env.IMMICH_VERSION }}'
author_name: Alex The Bot
author_email: alex.tran1502@gmail.com
default_author: user_info
message: 'Version ${{ env.IMMICH_VERSION }}'
tag: ${{ env.IMMICH_VERSION }}
push: true
build_mobile:
uses: ./.github/workflows/build-mobile.yml
needs: bump_version
permissions:
contents: read
secrets:
KEY_JKS: ${{ secrets.KEY_JKS }}
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
secrets: inherit
with:
ref: ${{ needs.bump_version.outputs.ref }}
prepare_release:
runs-on: ubuntu-latest
needs: build_mobile
permissions:
actions: read # To download the app artifact
# No content permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
steps:
- name: Checkout
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
uses: actions/checkout@v4
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: false
token: ${{ secrets.ORG_RELEASE_TOKEN }}
- name: Download APK
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
uses: actions/download-artifact@v4
with:
name: release-apk-signed
- name: Create draft release
uses: softprops/action-gh-release@6cbd405e2c4e67a21c47fa9e383d020e4e28b836 # v2.3.3
uses: softprops/action-gh-release@v2
with:
draft: true
tag_name: ${{ env.IMMICH_VERSION }}
token: ${{ steps.generate-token.outputs.token }}
generate_release_notes: true
body_path: misc/release/notes.tmpl
files: |

View File

@@ -1,47 +0,0 @@
name: Preview label
on:
pull_request:
types: [labeled, closed]
permissions: {}
jobs:
comment-status:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'preview' }}
permissions:
pull-requests: write
steps:
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
with:
message-id: 'preview-status'
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
remove-label:
runs-on: ubuntu-latest
if: ${{ (github.event.action == 'closed' || github.event.pull_request.head.repo.fork) && contains(github.event.pull_request.labels.*.name, 'preview') }}
permissions:
pull-requests: write
steps:
- uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
with:
script: |
github.rest.issues.removeLabel({
issue_number: context.payload.pull_request.number,
owner: context.repo.owner,
repo: context.repo.repo,
name: 'preview'
})
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
if: ${{ github.event.pull_request.head.repo.fork }}
with:
message-id: 'preview-status'
message: 'PRs from forks cannot have preview environments.'
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
message-id: 'preview-status'
message: 'Preview environment has been removed.'

View File

@@ -4,37 +4,28 @@ on:
release:
types: [published]
permissions: {}
permissions:
packages: write
jobs:
publish:
name: Publish `@immich/sdk`
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./open-api/typescript-sdk
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- uses: actions/checkout@v4
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
- uses: actions/setup-node@v4
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
node-version: '20.x'
registry-url: 'https://registry.npmjs.org'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install deps
run: pnpm install --frozen-lockfile
run: npm ci
- name: Build
run: pnpm build
run: npm run build
- name: Publish
run: pnpm publish
run: npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -9,96 +9,35 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@24820aa4ef67959b0dcf69a438cccf00d7c7042b # pre-job-action-v1.0.1
with:
filters: |
mobile:
- 'mobile/**'
force-filters: |
- '.github/workflows/static_analysis.yml'
force-events: 'workflow_dispatch,release'
mobile-dart-analyze:
name: Run Dart Code Analysis
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./mobile
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@v2
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
channel: "stable"
flutter-version: "3.19.3"
- name: Install dependencies
run: dart pub get
- name: Install DCM
uses: CQLabs/setup-dcm@8697ae0790c0852e964a6ef1d768d62a6675481a # v2.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
version: auto
working-directory: ./mobile
- name: Generate translation file
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
- name: Run Build Runner
run: make build
- name: Generate platform API
run: make pigeon
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
mobile/**/*.g.dart
mobile/**/*.gr.dart
mobile/**/*.drift.dart
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date! Run 'make build' and 'make pigeon' inside the mobile directory"
echo "Changed files: ${CHANGED_FILES}"
exit 1
working-directory: ./mobile
- name: Run dart analyze
run: dart analyze --fatal-infos
working-directory: ./mobile
- name: Run dart format
run: make format
run: dart format lib/ --set-exit-if-changed
working-directory: ./mobile
# TODO: Re-enable after upgrading custom_lint
# - name: Run dart custom_lint
# Enable after riverpod generator migration is completed
# - name: Run dart custom lint
# run: dart run custom_lint
# TODO: Use https://github.com/CQLabs/dcm-action
- name: Run DCM
run: dcm analyze lib --fatal-style --fatal-warnings
# working-directory: ./mobile

View File

@@ -4,725 +4,423 @@ on:
pull_request:
push:
branches: [main]
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
pre-job:
doc-tests:
name: Docs
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.check.outputs.should_run }}
defaults:
run:
working-directory: ./docs
steps:
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@24820aa4ef67959b0dcf69a438cccf00d7c7042b # pre-job-action-v1.0.1
with:
filters: |
i18n:
- 'i18n/**'
web:
- 'web/**'
- 'i18n/**'
- 'open-api/typescript-sdk/**'
server:
- 'server/**'
cli:
- 'cli/**'
- 'open-api/typescript-sdk/**'
e2e:
- 'e2e/**'
mobile:
- 'mobile/**'
machine-learning:
- 'machine-learning/**'
.github:
- '.github/**'
force-filters: |
- '.github/workflows/test.yml'
force-events: 'workflow_dispatch'
- name: Checkout code
uses: actions/checkout@v4
- name: Run npm install
run: npm ci
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
- name: Run build
run: npm run build
if: ${{ !cancelled() }}
server-unit-tests:
name: Test & Lint Server
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
name: Server
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run package manager install
run: pnpm install
- name: Run linter
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
if: ${{ !cancelled() }}
- name: Run small tests & coverage
run: pnpm test
if: ${{ !cancelled() }}
cli-unit-tests:
name: Unit Test CLI
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).cli == true }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk
run: pnpm install && pnpm run build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: pnpm install
- name: Run linter
run: pnpm lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
if: ${{ !cancelled() }}
cli-unit-tests-win:
name: Unit Test CLI (Windows)
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).cli == true }}
runs-on: windows-latest
permissions:
contents: read
defaults:
run:
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: pnpm install --frozen-lockfile
# Skip linter & formatter in Windows test.
- name: Run tsc
run: pnpm check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
if: ${{ !cancelled() }}
web-lint:
name: Lint Web
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).web == true }}
runs-on: mich
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
- name: Run pnpm install
run: pnpm rebuild && pnpm install --frozen-lockfile
- name: Run linter
run: pnpm lint:p
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: pnpm check:svelte
if: ${{ !cancelled() }}
web-unit-tests:
name: Test Web
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).web == true }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
uses: actions/checkout@v4
- name: Run npm install
run: pnpm install --frozen-lockfile
- name: Run tsc
run: pnpm check:typescript
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: pnpm test
if: ${{ !cancelled() }}
i18n-tests:
name: Test i18n
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).i18n == true }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install dependencies
run: pnpm --filter=immich-web install --frozen-lockfile
- name: Format
run: pnpm --filter=immich-web format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
i18n/**
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: i18n files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
exit 1
e2e-tests-lint:
name: End-to-End Lint
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).e2e == true }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
if: ${{ !cancelled() }}
run: npm ci
- name: Run linter
run: pnpm lint
run: npm run lint
if: ${{ !cancelled() }}
- name: Run formatter
run: pnpm format
run: npm run format
if: ${{ !cancelled() }}
- name: Run tsc
run: pnpm check
run: npm run check
if: ${{ !cancelled() }}
server-medium-tests:
name: Medium Tests (Server)
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
- name: Run unit tests & coverage
run: npm run test:cov
if: ${{ !cancelled() }}
cli-unit-tests:
name: CLI
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./server
working-directory: ./cli
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@v4
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
- name: Run medium tests
run: pnpm test:medium
node-version: 20
- name: Setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Install deps
run: npm ci
- name: Run linter
run: npm run lint
if: ${{ !cancelled() }}
e2e-tests-server-cli:
name: End-to-End Tests (Server & CLI)
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).e2e == true || fromJSON(needs.pre-job.outputs.should_run).server == true || fromJSON(needs.pre-job.outputs.should_run).cli == true }}
runs-on: ${{ matrix.runner }}
permissions:
contents: read
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
- name: Run tsc
run: npm run check
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test:cov
if: ${{ !cancelled() }}
web-unit-tests:
name: Web
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Run npm install
run: npm ci
- name: Run linter
run: npm run lint
if: ${{ !cancelled() }}
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
- name: Run svelte checks
run: npm run check:svelte
if: ${{ !cancelled() }}
- name: Run tsc
run: npm run check:typescript
if: ${{ !cancelled() }}
- name: Run unit tests & coverage
run: npm run test:cov
if: ${{ !cancelled() }}
e2e-tests:
name: End-to-End Tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
uses: actions/checkout@v4
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@v4
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
node-version: 20
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Run setup web
run: pnpm install --frozen-lockfile && pnpm exec svelte-kit sync
working-directory: ./web
if: ${{ !cancelled() }}
- name: Run setup cli
run: pnpm install --frozen-lockfile && pnpm build
run: npm ci && npm run build
working-directory: ./cli
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
run: npm ci
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose build
- name: Run linter
run: npm run lint
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: pnpm test
- name: Run formatter
run: npm run format
if: ${{ !cancelled() }}
e2e-tests-web:
name: End-to-End Tests (Web)
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).e2e == true || fromJSON(needs.pre-job.outputs.should_run).web == true }}
runs-on: ${{ matrix.runner }}
permissions:
contents: read
defaults:
run:
working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
submodules: 'recursive'
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run setup typescript-sdk
run: pnpm install --frozen-lockfile && pnpm build
working-directory: ./open-api/typescript-sdk
if: ${{ !cancelled() }}
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Run tsc
run: npm run check
if: ${{ !cancelled() }}
- name: Install Playwright Browsers
run: npx playwright install chromium --only-shell
run: npx playwright install --with-deps chromium
if: ${{ !cancelled() }}
- name: Docker build
run: docker compose build
if: ${{ !cancelled() }}
- name: Run e2e tests (api & cli)
run: npm run test
if: ${{ !cancelled() }}
- name: Run e2e tests (web)
run: npx playwright test
if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}
mobile-unit-tests:
name: Unit Test Mobile
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
name: Mobile
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- uses: actions/checkout@v4
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@v2
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
- name: Generate translation file
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
working-directory: ./mobile
flutter-version: '3.19.3'
- name: Run tests
working-directory: ./mobile
run: flutter test -j 1
ml-unit-tests:
name: Unit Test ML
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).machine-learning == true }}
name: Machine Learning
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./machine-learning
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
- uses: actions/checkout@v4
- name: Install poetry
run: pipx install poetry
- uses: actions/setup-python@v5
with:
persist-credentials: false
- name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with:
# python-version: 3.11
# cache: 'uv'
python-version: 3.11
cache: 'poetry'
- name: Install dependencies
run: |
uv sync --extra cpu
poetry install --with dev --with cpu
- name: Lint with ruff
run: |
uv run ruff check --output-format=github immich_ml
poetry run ruff check --output-format=github app export
- name: Check black formatting
run: |
uv run black --check immich_ml
poetry run black --check app export
- name: Run mypy type checking
run: |
uv run mypy --strict immich_ml/
poetry run mypy --install-types --non-interactive --strict app/
- name: Run tests and coverage
run: |
uv run pytest --cov=immich_ml --cov-report term-missing
github-files-formatting:
name: .github Files Formatting
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run)['.github'] == true }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./.github
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './.github/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Run pnpm install
run: pnpm install --frozen-lockfile
- name: Run formatter
run: pnpm format
if: ${{ !cancelled() }}
poetry run pytest app --cov=app --cov-report term-missing
shellcheck:
name: ShellCheck
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- uses: actions/checkout@v4
- name: Run ShellCheck
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
uses: ludeeus/action-shellcheck@master
with:
ignore_paths: >-
**/open-api/** **/openapi** **/node_modules/**
**/open-api/**
**/openapi/**
**/node_modules/**
generated-api-up-to-date:
name: OpenAPI Clients
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich install --frozen-lockfile
- name: Build the app
run: pnpm --filter immich build
- uses: actions/checkout@v4
- name: Run API generation
run: ./bin/generate-open-api.sh
working-directory: open-api
run: make open-api
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
uses: tj-actions/verify-changed-files@v19
id: verify-changed-files
with:
files: |
mobile/openapi
open-api/typescript-sdk
open-api/immich-openapi-specs.json
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1
sql-schema-up-to-date:
name: SQL Schema Checks
generated-typeorm-migrations-up-to-date:
name: TypeORM Checks
runs-on: ubuntu-latest
permissions:
contents: read
services:
postgres:
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:da52bbead5d818adaa8077c8dcdaad0aaf93038c31ad8348b51f9f0ec1310a4d
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
POSTGRES_DB: immich
options: >-
--health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
defaults:
run:
working-directory: ./server
steps:
- name: Checkout code
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
with:
persist-credentials: false
- name: Setup pnpm
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
uses: actions/checkout@v4
- name: Install server dependencies
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
run: npm ci
- name: Build the app
run: pnpm build
run: npm run build
- name: Run existing migrations
run: pnpm migrations:run
run: npm run typeorm:migrations:run
- name: Test npm run schema:reset command works
run: pnpm schema:reset
run: npm run typeorm:schema:reset
- name: Generate new migrations
continue-on-error: true
run: pnpm migrations:generate src/TestMigration
run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
uses: tj-actions/verify-changed-files@v19
id: verify-changed-files
with:
files: |
server/src
server/src/migrations/
- name: Verify migration files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: Generated migration files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
cat ./src/*-TestMigration.ts
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1
- name: Run SQL generation
run: pnpm sync:sql
run: npm run sql:generate
env:
DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
uses: tj-actions/verify-changed-files@v19
id: verify-changed-sql-files
with:
files: |
server/src/queries
- name: Verify SQL files have not changed
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
run: |
echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
git diff
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
exit 1
# mobile-integration-tests:
# name: Run mobile end-to-end integration tests
# runs-on: macos-latest
# steps:
# - uses: actions/checkout@v4
# - uses: actions/setup-java@v3
# with:
# distribution: 'zulu'
# java-version: '12.x'
# cache: 'gradle'
# - name: Cache android SDK
# uses: actions/cache@v3
# id: android-sdk
# with:
# key: android-sdk
# path: |
# /usr/local/lib/android/
# ~/.android
# - name: Cache Gradle
# uses: actions/cache@v3
# with:
# path: |
# ./mobile/build/
# ./mobile/android/.gradle/
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
# - name: Setup Android SDK
# if: steps.android-sdk.outputs.cache-hit != 'true'
# uses: android-actions/setup-android@v2
# - name: AVD cache
# uses: actions/cache@v3
# id: avd-cache
# with:
# path: |
# ~/.android/avd/*
# ~/.android/adb*
# key: avd-29
# - name: create AVD and generate snapshot for caching
# if: steps.avd-cache.outputs.cache-hit != 'true'
# uses: reactivecircus/android-emulator-runner@v2.27.0
# with:
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: false
# script: echo "Generated AVD snapshot for caching."
# - name: Setup Flutter SDK
# uses: subosito/flutter-action@v2
# with:
# channel: 'stable'
# flutter-version: '3.7.3'
# cache: true
# - name: Run integration tests
# uses: Wandalen/wretry.action@master
# with:
# action: reactivecircus/android-emulator-runner@v2.27.0
# with: |
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: true
# script: |
# flutter pub get
# flutter test integration_test
# attempt_limit: 3
# mobile-integration-tests:
# name: Run mobile end-to-end integration tests
# runs-on: macos-latest
# steps:
# - uses: actions/checkout@v4
# - uses: actions/setup-java@v3
# with:
# distribution: 'zulu'
# java-version: '12.x'
# cache: 'gradle'
# - name: Cache android SDK
# uses: actions/cache@v3
# id: android-sdk
# with:
# key: android-sdk
# path: |
# /usr/local/lib/android/
# ~/.android
# - name: Cache Gradle
# uses: actions/cache@v3
# with:
# path: |
# ./mobile/build/
# ./mobile/android/.gradle/
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
# - name: Setup Android SDK
# if: steps.android-sdk.outputs.cache-hit != 'true'
# uses: android-actions/setup-android@v2
# - name: AVD cache
# uses: actions/cache@v3
# id: avd-cache
# with:
# path: |
# ~/.android/avd/*
# ~/.android/adb*
# key: avd-29
# - name: create AVD and generate snapshot for caching
# if: steps.avd-cache.outputs.cache-hit != 'true'
# uses: reactivecircus/android-emulator-runner@v2.27.0
# with:
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: false
# script: echo "Generated AVD snapshot for caching."
# - name: Setup Flutter SDK
# uses: subosito/flutter-action@v2
# with:
# channel: 'stable'
# flutter-version: '3.7.3'
# cache: true
# - name: Run integration tests
# uses: Wandalen/wretry.action@master
# with:
# action: reactivecircus/android-emulator-runner@v2.27.0
# with: |
# working-directory: ./mobile
# cores: 2
# api-level: 29
# arch: x86_64
# profile: pixel
# target: default
# force-avd-creation: false
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
# disable-animations: true
# script: |
# flutter pub get
# flutter test integration_test
# attempt_limit: 3

View File

@@ -1,61 +0,0 @@
name: Weblate checks
on:
pull_request:
branches: [main]
types:
- opened
- synchronize
- ready_for_review
- auto_merge_enabled
- auto_merge_disabled
permissions: {}
env:
BOT_NAME: immich-push-o-matic
jobs:
pre-job:
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@24820aa4ef67959b0dcf69a438cccf00d7c7042b # pre-job-action-v1.0.1
with:
filters: |
i18n:
- 'i18n/!(en)**\.json'
exclude-branches: 'chore/translations'
skip-force-logic: 'true'
enforce-lock:
name: Check Weblate Lock
needs: [pre-job]
runs-on: ubuntu-latest
permissions: {}
if: ${{ fromJSON(needs.pre-job.outputs.should_run).i18n == true }}
steps:
- name: Bot review status
env:
PR_NUMBER: ${{ github.event.pull_request.number || github.event.pull_request_review.pull_request.number }}
GH_TOKEN: ${{ github.token }}
run: |
# Then check for APPROVED by the bot, if absent fail
gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json reviews | jq -e '.reviews | map(select(.author.login == env.BOT_NAME and .state == "APPROVED")) | length > 0' \
|| (echo "The push-o-matic bot has not approved this PR yet" && exit 1)
success-check-lock:
name: Weblate Lock Check Success
needs: [enforce-lock]
runs-on: ubuntu-latest
permissions: {}
if: always()
steps:
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
with:
needs: ${{ toJSON(needs) }}

12
.gitignore vendored
View File

@@ -3,7 +3,6 @@
.DS_Store
.vscode/*
!.vscode/launch.json
!.vscode/extensions.json
.idea
docker/upload
@@ -15,16 +14,7 @@ mobile/gradle.properties
mobile/openapi/pubspec.lock
mobile/*.jks
mobile/libisar.dylib
mobile/openapi/test
mobile/openapi/doc
mobile/openapi/.openapi-generator/FILES
mobile/ios/build
open-api/typescript-sdk/build
mobile/android/fastlane/report.xml
mobile/ios/fastlane/report.xml
vite.config.js.timestamp-*
.pnpm-store
.devcontainer/library
.devcontainer/.env*
mobile/ios/fastlane/report.xml

4
.gitmodules vendored
View File

@@ -1,6 +1,6 @@
[submodule "mobile/.isar"]
path = mobile/.isar
url = https://github.com/isar/isar
[submodule "e2e/test-assets"]
path = e2e/test-assets
[submodule "server/test/assets"]
path = server/test/assets
url = https://github.com/immich-app/test-assets

View File

@@ -1,18 +0,0 @@
module.exports = {
hooks: {
readPackage: (pkg) => {
if (!pkg.name) {
return pkg;
}
if (pkg.name === "exiftool-vendored") {
if (pkg.optionalDependencies["exiftool-vendored.pl"]) {
// make exiftool-vendored.pl a regular dependency
pkg.dependencies["exiftool-vendored.pl"] =
pkg.optionalDependencies["exiftool-vendored.pl"];
delete pkg.optionalDependencies["exiftool-vendored.pl"];
}
}
return pkg;
},
},
};

View File

@@ -1,10 +0,0 @@
{
"recommendations": [
"esbenp.prettier-vscode",
"svelte.svelte-vscode",
"dbaeumer.vscode-eslint",
"dart-code.flutter",
"dart-code.dart-code",
"dcmdev.dcm-vscode-extension"
]
}

12
.vscode/launch.json vendored
View File

@@ -5,18 +5,18 @@
"type": "node",
"request": "attach",
"restart": true,
"port": 9231,
"name": "Immich API Server",
"remoteRoot": "/usr/src/app/server",
"port": 9230,
"name": "Immich Server",
"remoteRoot": "/usr/src/app",
"localRoot": "${workspaceFolder}/server"
},
{
"type": "node",
"request": "attach",
"restart": true,
"port": 9230,
"name": "Immich Workers",
"remoteRoot": "/usr/src/app/server",
"port": 9231,
"name": "Immich Microservices",
"remoteRoot": "/usr/src/app",
"localRoot": "${workspaceFolder}/server"
}
]

72
.vscode/settings.json vendored
View File

@@ -1,64 +1,34 @@
{
"[css]": {
"editor.formatOnSave": true,
"[javascript][typescript][css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.tabSize": 2
},
"svelte.enable-ts-plugin": true,
"eslint.validate": [
"javascript",
"svelte"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"[dart]": {
"editor.defaultFormatter": "Dart-Code.dart-code",
"editor.formatOnSave": true,
"editor.selectionHighlight": false,
"editor.suggest.snippetsPreventQuickSuggestions": false,
"editor.suggestSelection": "first",
"editor.tabCompletion": "onlySnippets",
"editor.wordBasedSuggestions": "off"
"editor.wordBasedSuggestions": "off",
"editor.defaultFormatter": "Dart-Code.dart-code"
},
"[javascript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[svelte]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[typescript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"cSpell.words": ["immich"],
"editor.formatOnSave": true,
"eslint.validate": ["javascript", "svelte"],
"cSpell.words": [
"immich"
],
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
"package.json": "package-lock.json, yarn.lock, pnpm-lock.yaml, bun.lockb, bun.lock, pnpm-workspace.yaml, .pnpmfile.cjs"
},
"svelte.enable-ts-plugin": true,
"typescript.preferences.importModuleSpecifier": "non-relative"
}
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
}
}

72
.vscode/tasks.json vendored
View File

@@ -1,72 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Fix Permissions, Install Dependencies",
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich API Server (Nest)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Web Server (Vite)",
"dependsOn": ["Fix Permissions, Install Dependencies"],
"type": "shell",
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
"presentation": {
"echo": true,
"reveal": "always",
"focus": false,
"panel": "dedicated",
"showReuseMessage": true,
"clear": false,
"group": "Devcontainer tasks",
"close": true
},
"runOptions": {
"runOn": "default"
},
"problemMatcher": []
},
{
"label": "Immich Server and Web",
"dependsOn": ["Immich Web Server (Vite)", "Immich API Server (Nest)"],
"runOptions": {
"runOn": "folderOpen"
},
"problemMatcher": []
}
]
}

View File

@@ -1,7 +1,5 @@
/.github/ @bo0tzz
/docker/ @bo0tzz
/server/ @danieldietzler
/web/ @danieldietzler
/machine-learning/ @mertalev
/e2e/ @danieldietzler
/mobile/ @shenlong-tanwen

View File

@@ -131,4 +131,4 @@ conduct enforcement ladder](https://github.com/mozilla/diversity).
For answers to common questions about this code of conduct, see the
FAQ at https://www.contributor-covenant.org/faq. Translations are
available at https://www.contributor-covenant.org/translations.
available at https://www.contributor-covenant.org/translations.

132
Makefile
View File

@@ -1,36 +1,30 @@
dev:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans || make dev-down
dev-down:
docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans
dev-update:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
dev-scale:
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
dev-docs:
npm --prefix docs run start
stage:
docker compose -f ./docker/docker-compose.staging.yml up --build -V --remove-orphans
pull-stage:
docker compose -f ./docker/docker-compose.staging.yml pull
.PHONY: e2e
e2e:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --remove-orphans
e2e-update:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
e2e-down:
docker compose -f ./e2e/docker-compose.yml down --remove-orphans
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
prod:
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
prod-down:
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
prod-scale:
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
.PHONY: open-api
open-api:
@@ -43,109 +37,7 @@ open-api-typescript:
cd ./open-api && bash ./bin/generate-open-api.sh typescript
sql:
pnpm --filter immich run sync:sql
npm --prefix server run sql:generate
attach-server:
docker exec -it docker_immich-server_1 sh
renovate:
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
# Directories that need to be created for volumes or build output
VOLUME_DIRS = \
./.pnpm-store \
./web/.svelte-kit \
./web/node_modules \
./web/coverage \
./e2e/node_modules \
./docs/node_modules \
./server/node_modules \
./open-api/typescript-sdk/node_modules \
./.github/node_modules \
./node_modules \
./cli/node_modules
# Include .env file if it exists
-include docker/.env
MODULES = e2e server web cli sdk docs .github
# directory to package name mapping function
# cli = @immich/cli
# docs = documentation
# e2e = immich-e2e
# open-api/typescript-sdk = @immich/sdk
# server = immich
# web = immich-web
map-package = $(subst sdk,@immich/sdk,$(subst cli,@immich/cli,$(subst docs,documentation,$(subst e2e,immich-e2e,$(subst server,immich,$(subst web,immich-web,$1))))))
audit-%:
pnpm --filter $(call map-package,$*) audit fix
install-%:
pnpm --filter $(call map-package,$*) install $(if $(FROZEN),--frozen-lockfile) $(if $(OFFLINE),--offline)
build-cli: build-sdk
build-web: build-sdk
build-%: install-%
pnpm --filter $(call map-package,$*) run build
format-%:
pnpm --filter $(call map-package,$*) run format:fix
lint-%:
pnpm --filter $(call map-package,$*) run lint:fix
lint-web:
pnpm --filter $(call map-package,$*) run lint:p
check-%:
pnpm --filter $(call map-package,$*) run check
check-web:
pnpm --filter immich-web run check:typescript
pnpm --filter immich-web run check:svelte
test-%:
pnpm --filter $(call map-package,$*) run test
test-e2e:
docker compose -f ./e2e/docker-compose.yml build
pnpm --filter immich-e2e run test
pnpm --filter immich-e2e run test:web
test-medium:
docker run \
--rm \
-v ./server/src:/usr/src/app/src \
-v ./server/test:/usr/src/app/test \
-v ./server/vitest.config.medium.mjs:/usr/src/app/vitest.config.medium.mjs \
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
-e NODE_ENV=development \
immich-server:latest \
-c "pnpm test:medium -- --run"
test-medium-dev:
docker exec -it immich_server /bin/sh -c "pnpm run test:medium"
install-all:
pnpm -r --filter '!documentation' install
build-all: $(foreach M,$(filter-out e2e docs .github,$(MODULES)),build-$M) ;
check-all:
pnpm -r --filter '!documentation' run "/^(check|check\:svelte|check\:typescript)$/"
lint-all:
pnpm -r --filter '!documentation' run lint:fix
format-all:
pnpm -r --filter '!documentation' run format:fix
audit-all:
pnpm -r --filter '!documentation' audit fix
hygiene-all: audit-all
pnpm -r --filter '!documentation' run "/(format:fix|check|check:svelte|check:typescript|sql)/"
test-all:
pnpm -r --filter '!documentation' run "/^test/"
clean:
find . -name "node_modules" -type d -prune -exec rm -rf {} +
find . -name "dist" -type d -prune -exec rm -rf '{}' +
find . -name "build" -type d -prune -exec rm -rf '{}' +
find . -name ".svelte-kit" -type d -prune -exec rm -rf '{}' +
find . -name "coverage" -type d -prune -exec rm -rf '{}' +
find . -name ".pnpm-store" -type d -prune -exec rm -rf '{}' +
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml down -v --remove-orphans || true
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml down -v --remove-orphans || true
setup-server-dev: install-server
setup-web-dev: install-sdk build-sdk install-web

View File

@@ -1,11 +1,11 @@
<p align="center">
<br/>
<br/>
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
<a href="https://discord.immich.app">
<a href="https://discord.gg/D8JsnBEuKb">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a>
<br/>
<br/>
<br/>
<br/>
</p>
<p align="center">
@@ -17,7 +17,6 @@
<img src="design/immich-screenshots.png" title="Main Screenshot">
</a>
<br/>
<p align="center">
<a href="readme_i18n/README_ca_ES.md">Català</a>
<a href="readme_i18n/README_es_ES.md">Español</a>
@@ -29,13 +28,7 @@
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
<a href="readme_i18n/README_zh_CN.md">中文</a>
<a href="readme_i18n/README_uk_UA.md">Українська</a>
<a href="readme_i18n/README_ru_RU.md">Русский</a>
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
<a href="readme_i18n/README_ar_JO.md">العربية</a>
<a href="readme_i18n/README_vi_VN.md">Tiếng Việt</a>
<a href="readme_i18n/README_th_TH.md">ภาษาไทย</a>
</p>
## Disclaimer
@@ -45,34 +38,45 @@
- ⚠️ **Do not use the app as the only way to store your photos and videos.**
- ⚠️ Always follow [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) backup plan for your precious photos and videos!
> [!NOTE]
> You can find the main documentation, including installation guides, at https://immich.app/.
## Content
## Links
- [Documentation](https://immich.app/docs)
- [About](https://immich.app/docs/overview/introduction)
- [Installation](https://immich.app/docs/install/requirements)
- [Roadmap](https://immich.app/roadmap)
- [Official Documentation](https://immich.app/docs)
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
- [Demo](#demo)
- [Features](#features)
- [Translations](https://immich.app/docs/developer/translations)
- [Contributing](https://immich.app/docs/overview/support-the-project)
- [Introduction](https://immich.app/docs/overview/introduction)
- [Installation](https://immich.app/docs/install/requirements)
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
- [Support The Project](#support-the-project)
## Documentation
You can find the main documentation, including installation guides, at https://immich.app/.
## Demo
Access the demo [here](https://demo.immich.app). For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`.
You can access the web demo at https://demo.immich.app
### Login credentials
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
| Email | Password |
| --------------- | -------- |
| demo@immich.app | demo |
```bash title="Demo Credential"
The credential
email: demo@immich.app
password: demo
```
```
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
```
## Activities
![Activities](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Repobeats analytics image")
## Features
| Features | Mobile | Web |
| :------------------------------------------- | ------ | --- |
| :--------------------------------------------- | -------- | ----- |
| Upload and view videos and photos | Yes | Yes |
| Auto backup when the app is opened | Yes | N/A |
| Prevent duplication of assets | Yes | Yes |
@@ -92,7 +96,7 @@ Access the demo [here](https://demo.immich.app). For the mobile app, you can use
| LivePhoto/MotionPhoto backup and playback | Yes | Yes |
| Support 360 degree image display | No | Yes |
| User-defined storage structure | Yes | Yes |
| Public Sharing | Yes | Yes |
| Public Sharing | No | Yes |
| Archive and Favorites | Yes | Yes |
| Global Map | Yes | Yes |
| Partner Sharing | Yes | Yes |
@@ -101,22 +105,31 @@ Access the demo [here](https://demo.immich.app). For the mobile app, you can use
| Offline support | Yes | No |
| Read-only gallery | Yes | Yes |
| Stacked Photos | Yes | Yes |
| Tags | No | Yes |
| Folder View | Yes | Yes |
## Translations
## Support the project
Read more about translations [here](https://immich.app/docs/developer/translations).
I've committed to this project, and I will not stop. I will keep updating the docs, adding new features, and fixing bugs. But I can't do it alone. So I need your help to give me additional motivation to keep going.
<a href="https://hosted.weblate.org/engage/immich/">
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
As our hosts in the [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) said, this is a massive undertaking of what the team and I are doing. And I would love to someday be able to do this full-time, and I am asking for your help to make that happen.
If you feel like this is the right cause and the app is something you are seeing yourself using for a long time, please consider supporting the project with the option below.
### Donation
- [Monthly donation](https://github.com/sponsors/immich-app) via GitHub Sponsors
- [One-time donation](https://github.com/sponsors/immich-app?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
- [Liberapay](https://liberapay.com/alex.tran1502/)
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
## Contributors
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>
## Repository activity
![Activities](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Repobeats analytics image")
## Star history
## Star History
<a href="https://star-history.com/#immich-app/immich&Date">
<picture>
@@ -125,9 +138,3 @@ Read more about translations [here](https://immich.app/docs/developer/translatio
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
</picture>
</a>
## Contributors
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>

View File

@@ -2,4 +2,4 @@
## Reporting a Vulnerability
Please report security issues to `security@immich.app`
Please report security issues to `alex.tran1502@gmail.com`

1
cli/.eslintignore Normal file
View File

@@ -0,0 +1 @@
/dist

28
cli/.eslintrc.cjs Normal file
View File

@@ -0,0 +1,28 @@
module.exports = {
parser: '@typescript-eslint/parser',
parserOptions: {
project: 'tsconfig.json',
sourceType: 'module',
tsconfigRootDir: __dirname,
},
plugins: ['@typescript-eslint/eslint-plugin'],
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended', 'plugin:unicorn/recommended'],
root: true,
env: {
node: true,
},
ignorePatterns: ['.eslintrc.js'],
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-floating-promises': 'error',
'unicorn/prefer-module': 'off',
'unicorn/prevent-abbreviations': 'off',
'unicorn/no-process-exit': 'off',
'unicorn/import-style': 'off',
curly: 2,
'prettier/prettier': 0,
},
};

View File

@@ -1 +0,0 @@
22.19.0

View File

@@ -1,14 +1,19 @@
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
FROM node:20-alpine3.19@sha256:ec0c413b1d84f3f7f67ec986ba885930c57b5318d2eb3abc6960ee05d4f2eb28 as core
WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
RUN npm ci
COPY open-api/typescript-sdk/ ./
RUN npm run build
WORKDIR /usr/src/app
COPY package* pnpm* .pnpmfile.cjs ./
COPY ./cli ./cli/
COPY ./open-api/typescript-sdk ./open-api/typescript-sdk/
RUN corepack enable pnpm && \
pnpm install --filter @immich/sdk --filter @immich/cli --frozen-lockfile && \
pnpm --filter @immich/sdk build && \
pnpm --filter @immich/cli build
COPY cli/package.json cli/package-lock.json ./
RUN npm ci
COPY cli .
RUN npm run build
WORKDIR /import
ENTRYPOINT ["node", "/usr/src/app/cli/dist"]
ENTRYPOINT ["node", "/usr/src/app/dist"]

View File

@@ -4,18 +4,8 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
# For developers
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
$ npm install
$ npm run build
Then, to build the open-api client run the following in the open-api folder:
$ ./bin/generate-open-api.sh
To run the Immich CLI from source, run the following in the cli folder:
$ npm install
$ npm run build
$ ts-node .
@@ -27,4 +17,3 @@ You can also build and install the CLI using
$ npm run build
$ npm install -g .
****

View File

@@ -1,2 +0,0 @@
#!/usr/bin/env node
import '../dist/index.js';

View File

@@ -1,51 +0,0 @@
import js from '@eslint/js';
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
import eslintPluginUnicorn from 'eslint-plugin-unicorn';
import globals from 'globals';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import typescriptEslint from 'typescript-eslint';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
export default typescriptEslint.config([
eslintPluginUnicorn.configs.recommended,
eslintPluginPrettierRecommended,
js.configs.recommended,
typescriptEslint.configs.recommended,
{
ignores: ['eslint.config.mjs', 'dist'],
},
{
languageOptions: {
globals: {
...globals.node,
},
parser: typescriptEslint.parser,
ecmaVersion: 5,
sourceType: 'module',
parserOptions: {
project: 'tsconfig.json',
tsconfigRootDir: __dirname,
},
},
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'off',
'@typescript-eslint/explicit-module-boundary-types': 'off',
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-floating-promises': 'error',
'unicorn/prefer-module': 'off',
'unicorn/prevent-abbreviations': 'off',
'unicorn/no-process-exit': 'off',
'unicorn/import-style': 'off',
curly: 2,
'prettier/prettier': 0,
'object-shorthand': ['error', 'always'],
},
},
]);

4757
cli/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,11 @@
{
"name": "@immich/cli",
"version": "2.2.90",
"version": "2.2.0",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
"bin": {
"immich": "./bin/immich"
"immich": "dist/index.js"
},
"license": "GNU Affero General Public License version 3",
"keywords": [
@@ -13,32 +13,30 @@
"cli"
],
"devDependencies": {
"@eslint/js": "^9.8.0",
"@immich/sdk": "file:../open-api/typescript-sdk",
"@types/byte-size": "^8.1.0",
"@types/cli-progress": "^3.11.0",
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.18.1",
"@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0",
"@types/node": "^20.3.1",
"@typescript-eslint/eslint-plugin": "^7.0.0",
"@typescript-eslint/parser": "^7.0.0",
"@vitest/coverage-v8": "^1.2.2",
"byte-size": "^8.1.1",
"cli-progress": "^3.12.0",
"commander": "^12.0.0",
"eslint": "^9.14.0",
"eslint-config-prettier": "^10.1.8",
"eslint": "^8.56.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^60.0.0",
"globals": "^16.0.0",
"eslint-plugin-unicorn": "^52.0.0",
"glob": "^10.3.1",
"mock-fs": "^5.2.0",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"prettier-plugin-organize-imports": "^3.2.4",
"typescript": "^5.3.3",
"typescript-eslint": "^8.28.0",
"vite": "^7.0.0",
"vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0",
"vitest-fetch-mock": "^0.4.0",
"vite": "^5.0.12",
"vite-tsconfig-paths": "^4.3.2",
"vitest": "^1.2.2",
"yaml": "^2.3.1"
},
"scripts": {
@@ -61,13 +59,6 @@
"node": ">=20.0.0"
},
"dependencies": {
"chokidar": "^4.0.3",
"fast-glob": "^3.3.2",
"fastq": "^1.17.1",
"lodash-es": "^4.17.21",
"micromatch": "^4.0.8"
},
"volta": {
"node": "22.19.0"
"lodash-es": "^4.17.21"
}
}

View File

@@ -1,311 +0,0 @@
import * as fs from 'node:fs';
import * as os from 'node:os';
import * as path from 'node:path';
import { setTimeout as sleep } from 'node:timers/promises';
import { describe, expect, it, MockedFunction, vi } from 'vitest';
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk';
import createFetchMock from 'vitest-fetch-mock';
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset';
vi.mock('@immich/sdk');
describe('getAlbumName', () => {
it('should return a non-undefined value', () => {
if (os.platform() === 'win32') {
// This is meaningless for Unix systems.
expect(getAlbumName(String.raw`D:\test\Filename.txt`, {} as UploadOptionsDto)).toBe('test');
}
expect(getAlbumName('D:/parentfolder/test/Filename.txt', {} as UploadOptionsDto)).toBe('test');
});
it('has higher priority to return `albumName` in `options`', () => {
expect(getAlbumName('/parentfolder/test/Filename.txt', { albumName: 'example' } as UploadOptionsDto)).toBe(
'example',
);
});
});
describe('uploadFiles', () => {
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
const testFilePath = path.join(testDir, 'test.png');
const testFileData = 'test';
const baseUrl = 'http://example.com';
const apiKey = 'key';
const retry = 3;
const fetchMocker = createFetchMock(vi);
beforeEach(() => {
// Create a test file
fs.writeFileSync(testFilePath, testFileData);
// Defaults
vi.mocked(defaults).baseUrl = baseUrl;
vi.mocked(defaults).headers = { 'x-api-key': apiKey };
fetchMocker.enableMocks();
fetchMocker.resetMocks();
});
it('returns new assets when upload file is successful', async () => {
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
return {
status: 200,
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
};
});
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
{
filepath: testFilePath,
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
},
]);
});
it('returns new assets when upload file retry is successful', async () => {
let counter = 0;
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
counter++;
if (counter < retry) {
throw new Error('Network error');
}
return {
status: 200,
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
};
});
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
{
filepath: testFilePath,
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
},
]);
});
it('returns new assets when upload file retry is failed', async () => {
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
throw new Error('Network error');
});
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([]);
});
});
describe('checkForDuplicates', () => {
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
const testFilePath = path.join(testDir, 'test.png');
const testFileData = 'test';
const testFileChecksum = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'; // SHA1
const retry = 3;
beforeEach(() => {
// Create a test file
fs.writeFileSync(testFilePath, testFileData);
});
it('checks duplicates', async () => {
vi.mocked(checkBulkUpload).mockResolvedValue({
results: [
{
action: Action.Accept,
id: testFilePath,
},
],
});
await checkForDuplicates([testFilePath], { concurrency: 1 });
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: [
{
checksum: testFileChecksum,
id: testFilePath,
},
],
},
});
});
it('returns duplicates when check duplicates is rejected', async () => {
vi.mocked(checkBulkUpload).mockResolvedValue({
results: [
{
action: Action.Reject,
id: testFilePath,
assetId: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
reason: Reason.Duplicate,
},
],
});
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [
{
filepath: testFilePath,
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
},
],
newFiles: [],
});
});
it('returns new assets when check duplicates is accepted', async () => {
vi.mocked(checkBulkUpload).mockResolvedValue({
results: [
{
action: Action.Accept,
id: testFilePath,
},
],
});
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [],
newFiles: [testFilePath],
});
});
it('returns results when check duplicates retry is successful', async () => {
let mocked = vi.mocked(checkBulkUpload);
for (let i = 1; i < retry; i++) {
mocked = mocked.mockRejectedValueOnce(new Error('Network error'));
}
mocked.mockResolvedValue({
results: [
{
action: Action.Accept,
id: testFilePath,
},
],
});
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [],
newFiles: [testFilePath],
});
});
it('returns results when check duplicates retry is failed', async () => {
vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error'));
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
duplicates: [],
newFiles: [],
});
});
});
describe('startWatch', () => {
let testFolder: string;
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
beforeEach(async () => {
vi.restoreAllMocks();
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
image: ['.jpg'],
sidecar: ['.xmp'],
video: ['.mp4'],
});
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
checkBulkUploadMocked.mockResolvedValue({
results: [],
});
});
it('should start watching a directory and upload new files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: [
expect.objectContaining({
id: testFilePath,
}),
],
},
});
});
it('should filter out unsupported files', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const unsupportedFilePath = path.join(testFolder, 'test.txt');
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: unsupportedFilePath,
}),
]),
},
});
});
it('should filger out ignored patterns', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const ignoredPattern = 'ignored';
const ignoredFolder = path.join(testFolder, ignoredPattern);
await fs.promises.mkdir(ignoredFolder, { recursive: true });
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
await sleep(100); // to debounce the watcher from considering the test file as a existing file
await fs.promises.writeFile(testFilePath, 'testjpg');
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
expect(checkBulkUpload).toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: testFilePath,
}),
]),
},
});
expect(checkBulkUpload).not.toHaveBeenCalledWith({
assetBulkUploadCheckDto: {
assets: expect.arrayContaining([
expect.objectContaining({
id: ignoredFilePath,
}),
]),
},
});
});
afterEach(async () => {
await fs.promises.rm(testFolder, { recursive: true, force: true });
});
});

View File

@@ -1,9 +1,7 @@
import {
Action,
AssetBulkUploadCheckItem,
AssetBulkUploadCheckResult,
AssetMediaResponseDto,
AssetMediaStatus,
AssetFileUploadResponseDto,
addAssetsToAlbum,
checkBulkUpload,
createAlbum,
@@ -12,18 +10,13 @@ import {
getSupportedMediaTypes,
} from '@immich/sdk';
import byteSize from 'byte-size';
import { Matcher, watch as watchFs } from 'chokidar';
import { MultiBar, Presets, SingleBar } from 'cli-progress';
import { Presets, SingleBar } from 'cli-progress';
import { chunk } from 'lodash-es';
import micromatch from 'micromatch';
import { Stats, createReadStream } from 'node:fs';
import { stat, unlink } from 'node:fs/promises';
import os from 'node:os';
import path, { basename } from 'node:path';
import { Queue } from 'src/queue';
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
const UPLOAD_WATCH_BATCH_SIZE = 100;
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
const s = (count: number) => (count === 1 ? '' : 's');
@@ -31,9 +24,9 @@ const s = (count: number) => (count === 1 ? '' : 's');
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
type Asset = { id: string; filepath: string };
export interface UploadOptionsDto {
interface UploadOptionsDto {
recursive?: boolean;
ignore?: string;
exclusionPatterns?: string[];
dryRun?: boolean;
skipHash?: boolean;
delete?: boolean;
@@ -41,9 +34,6 @@ export interface UploadOptionsDto {
albumName?: string;
includeHidden?: boolean;
concurrency: number;
progress?: boolean;
watch?: boolean;
jsonOutput?: boolean;
}
class UploadFile extends File {
@@ -63,100 +53,19 @@ class UploadFile extends File {
}
}
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
const { newFiles, duplicates } = await checkForDuplicates(files, options);
const newAssets = await uploadFiles(newFiles, options);
if (options.jsonOutput) {
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
}
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(
newAssets.map(({ filepath }) => filepath),
options,
);
};
export const startWatch = async (
paths: string[],
options: UploadOptionsDto,
{
batchSize = UPLOAD_WATCH_BATCH_SIZE,
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
}: { batchSize?: number; debounceTimeMs?: number } = {},
) => {
const watcherIgnored: Matcher[] = [];
const { image, video } = await getSupportedMediaTypes();
const extensions = new Set([...image, ...video]);
if (options.ignore) {
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
}
const pathsBatcher = new Batcher<string>({
batchSize,
debounceTimeMs,
onBatch: async (paths: string[]) => {
const uniquePaths = [...new Set(paths)];
await uploadBatch(uniquePaths, options);
},
});
const onFile = async (path: string, stats?: Stats) => {
if (stats?.isDirectory()) {
return;
}
const ext = '.' + path.split('.').pop()?.toLowerCase();
if (!ext || !extensions.has(ext)) {
return;
}
if (!options.progress) {
// logging when progress is disabled as it can cause issues with the progress bar rendering
console.log(`Change detected: ${path}`);
}
pathsBatcher.add(path);
};
const fsWatcher = watchFs(paths, {
ignoreInitial: true,
ignored: watcherIgnored,
alwaysStat: true,
awaitWriteFinish: true,
depth: options.recursive ? undefined : 1,
persistent: true,
})
.on('add', onFile)
.on('change', onFile)
.on('error', (error) => console.error(`Watcher error: ${error}`));
process.on('SIGINT', async () => {
console.log('Exiting...');
await fsWatcher.close();
process.exit();
});
};
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
await authenticate(baseOptions);
const scanFiles = await scan(paths, options);
if (scanFiles.length === 0) {
if (options.watch) {
console.log('No files found initially.');
} else {
console.log('No files found, exiting');
return;
}
console.log('No files found, exiting');
return;
}
if (options.watch) {
console.log('Watching for changes...');
await startWatch(paths, options);
// watcher does not handle the initial scan
// as the scan() is a more efficient quick start with batched results
}
await uploadBatch(scanFiles, options);
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
const newAssets = await uploadFiles(newFiles, options);
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(newFiles, options);
};
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
@@ -166,7 +75,7 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
const files = await crawl({
pathsToCrawl,
recursive: options.recursive,
exclusionPattern: options.ignore,
exclusionPatterns: options.exclusionPatterns,
includeHidden: options.includeHidden,
extensions: [...image, ...video],
});
@@ -174,102 +83,48 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
return files;
};
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => {
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
if (skipHash) {
console.log('Skipping hash check, assuming all files are new');
return { newFiles: files, duplicates: [] };
}
let multiBar: MultiBar | undefined;
const progressBar = new SingleBar(
{ format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
if (progress) {
multiBar = new MultiBar(
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
} else {
console.log(`Received ${files.length} files, hashing...`);
}
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
progressBar.start(files.length, 0);
const newFiles: string[] = [];
const duplicates: Asset[] = [];
const checkBulkUploadQueue = new Queue<AssetBulkUploadCheckItem[], void>(
async (assets: AssetBulkUploadCheckItem[]) => {
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets } });
try {
// TODO refactor into a queue
for (const items of chunk(files, concurrency)) {
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
const results = response.results as AssetBulkUploadCheckResults;
for (const { id: filepath, assetId, action } of results) {
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
if (action === Action.Accept) {
newFiles.push(filepath);
} else {
// rejects are always duplicates
duplicates.push({ id: assetId as string, filepath });
}
progressBar.increment();
}
checkProgressBar?.increment(assets.length);
},
{ concurrency, retry: 3 },
);
const results: { id: string; checksum: string }[] = [];
let checkBulkUploadRequests: AssetBulkUploadCheckItem[] = [];
const queue = new Queue<string, AssetBulkUploadCheckItem[]>(
async (filepath: string): Promise<AssetBulkUploadCheckItem[]> => {
const dto = { id: filepath, checksum: await sha1(filepath) };
results.push(dto);
checkBulkUploadRequests.push(dto);
if (checkBulkUploadRequests.length === 5000) {
const batch = checkBulkUploadRequests;
checkBulkUploadRequests = [];
void checkBulkUploadQueue.push(batch);
}
hashProgressBar?.increment();
return results;
},
{ concurrency, retry: 3 },
);
for (const item of files) {
void queue.push(item);
}
} finally {
progressBar.stop();
}
await queue.drained();
if (checkBulkUploadRequests.length > 0) {
void checkBulkUploadQueue.push(checkBulkUploadRequests);
}
await checkBulkUploadQueue.drained();
multiBar?.stop();
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
// Report failures
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
if (failedTasks.length > 0) {
console.log(`Failed to verify ${failedTasks.length} file${s(failedTasks.length)}:`);
for (const task of failedTasks) {
console.log(`- ${task.data} - ${task.error}`);
}
}
return { newFiles, duplicates };
};
export const uploadFiles = async (
files: string[],
{ dryRun, concurrency, progress }: UploadOptionsDto,
): Promise<Asset[]> => {
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
if (files.length === 0) {
console.log('All assets were already uploaded, nothing to do.');
return [];
@@ -286,23 +141,15 @@ export const uploadFiles = async (
if (dryRun) {
console.log(`Would have uploaded ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
return files.map((filepath) => ({ id: '', filepath }));
return [];
}
let uploadProgress: SingleBar | undefined;
if (progress) {
uploadProgress = new SingleBar(
{
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
},
Presets.shades_classic,
);
} else {
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
}
uploadProgress?.start(totalSize, 0);
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
const uploadProgress = new SingleBar(
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
Presets.shades_classic,
);
uploadProgress.start(totalSize, 0);
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
let duplicateCount = 0;
let duplicateSize = 0;
@@ -311,56 +158,41 @@ export const uploadFiles = async (
const newAssets: Asset[] = [];
const queue = new Queue<string, AssetMediaResponseDto>(
async (filepath: string) => {
const stats = statsMap.get(filepath);
if (!stats) {
throw new Error(`Stats not found for ${filepath}`);
}
try {
for (const items of chunk(files, concurrency)) {
await Promise.all(
items.map(async (filepath) => {
const stats = statsMap.get(filepath) as Stats;
const response = await uploadFile(filepath, stats);
const response = await uploadFile(filepath, stats);
newAssets.push({ id: response.id, filepath });
if (response.status === AssetMediaStatus.Duplicate) {
duplicateCount++;
duplicateSize += stats.size ?? 0;
} else {
successCount++;
successSize += stats.size ?? 0;
}
newAssets.push({ id: response.id, filepath });
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
if (response.duplicate) {
duplicateCount++;
duplicateSize += stats.size ?? 0;
} else {
successCount++;
successSize += stats.size ?? 0;
}
return response;
},
{ concurrency, retry: 3 },
);
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
for (const item of files) {
void queue.push(item);
return response;
}),
);
}
} finally {
uploadProgress.stop();
}
await queue.drained();
uploadProgress?.stop();
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
if (duplicateCount > 0) {
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
}
// Report failures
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
if (failedTasks.length > 0) {
console.log(`Failed to upload ${failedTasks.length} asset${s(failedTasks.length)}:`);
for (const task of failedTasks) {
console.log(`- ${task.data} - ${task.error}`);
}
}
return newAssets;
};
const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaResponseDto> => {
const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadResponseDto> => {
const { baseUrl, headers } = defaults;
const assetPath = path.parse(input);
@@ -393,7 +225,7 @@ const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaRespon
formData.append('sidecarData', sidecarData);
}
const response = await fetch(`${baseUrl}/assets`, {
const response = await fetch(`${baseUrl}/asset/upload`, {
method: 'post',
redirect: 'error',
headers: headers as Record<string, string>,
@@ -412,7 +244,7 @@ const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<
}
if (options.dryRun) {
console.log(`Would have deleted ${files.length} local asset${s(files.length)}`);
console.log(`Would now have deleted assets, but skipped due to dry run`);
return;
}
@@ -453,7 +285,7 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
if (dryRun) {
// TODO print asset counts for new albums
console.log(`Would have created ${newAlbums.size} new album${s(newAlbums.size)}`);
console.log(`Would have updated albums of ${assets.length} asset${s(assets.length)}`);
console.log(`Would have updated ${assets.length} asset${s(assets.length)}`);
return;
}
@@ -513,9 +345,7 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
}
};
// `filepath` valid format:
// - Windows: `D:\\test\\Filename.txt` or `D:/test/Filename.txt`
// - Unix: `/test/Filename.txt`
export const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
return options.albumName ?? path.basename(path.dirname(filepath));
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
return options.albumName ?? folderName;
};

View File

@@ -1,4 +1,4 @@
import { getMyUser } from '@immich/sdk';
import { getMyUserInfo } from '@immich/sdk';
import { existsSync } from 'node:fs';
import { mkdir, unlink } from 'node:fs/promises';
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
@@ -10,13 +10,13 @@ export const login = async (url: string, key: string, options: BaseOptions) => {
await connect(url, key);
const [error, user] = await withError(getMyUser());
const [error, userInfo] = await withError(getMyUserInfo());
if (error) {
logError(error, 'Failed to load user info');
process.exit(1);
}
console.log(`Logged in as ${user.email}`);
console.log(`Logged in as ${userInfo.email}`);
if (!existsSync(configDir)) {
// Create config folder if it doesn't exist

View File

@@ -1,4 +1,4 @@
import { getAssetStatistics, getMyUser, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
import { getAssetStatistics, getMyUserInfo, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
import { BaseOptions, authenticate } from 'src/utils';
export const serverInfo = async (options: BaseOptions) => {
@@ -8,7 +8,7 @@ export const serverInfo = async (options: BaseOptions) => {
getServerVersion(),
getSupportedMediaTypes(),
getAssetStatistics({}),
getMyUser(),
getMyUserInfo(),
]);
console.log(`Server Info (via ${userInfo.email})`);

View File

@@ -44,7 +44,7 @@ program
.description('Upload assets')
.usage('[paths...] [options]')
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
.addOption(new Option('-i, --ignore <pattern>', 'Pattern to ignore').env('IMMICH_IGNORE_PATHS'))
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS').default([]))
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
.addOption(new Option('-H, --include-hidden', 'Include hidden folders').env('IMMICH_INCLUDE_HIDDEN').default(false))
.addOption(
@@ -60,27 +60,14 @@ program
.addOption(
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
.env('IMMICH_DRY_RUN')
.default(false)
.conflicts('skipHash'),
.default(false),
)
.addOption(
new Option('-c, --concurrency <number>', 'Number of assets to upload at the same time')
.env('IMMICH_UPLOAD_CONCURRENCY')
.default(4),
)
.addOption(
new Option('-j, --json-output', 'Output detailed information in json format')
.env('IMMICH_JSON_OUTPUT')
.default(false),
)
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
.addOption(
new Option('--watch', 'Watch for changes and upload automatically')
.env('IMMICH_WATCH_CHANGES')
.default(false)
.implies({ progress: false }),
)
.argument('[paths...]', 'One or more paths to assets to be uploaded')
.action((paths, options) => upload(paths, program.opts(), options));

View File

@@ -1,131 +0,0 @@
import * as fastq from 'fastq';
import { uniqueId } from 'lodash-es';
export type Task<T, R> = {
readonly id: string;
status: 'idle' | 'processing' | 'succeeded' | 'failed';
data: T;
error: unknown | undefined;
count: number;
// TODO: Could be useful to adding progress property.
// TODO: Could be useful to adding start_at/end_at/duration properties.
result: undefined | R;
};
export type QueueOptions = {
verbose?: boolean;
concurrency?: number;
retry?: number;
// TODO: Could be useful to adding timeout property for retry.
};
export type ComputedQueueOptions = Required<QueueOptions>;
export const defaultQueueOptions = {
concurrency: 1,
retry: 0,
verbose: false,
};
/**
* An in-memory queue that processes tasks in parallel with a given concurrency.
* @see {@link https://www.npmjs.com/package/fastq}
* @template T - The type of the worker task data.
* @template R - The type of the worker output data.
*/
export class Queue<T, R> {
private readonly queue: fastq.queueAsPromised<string, Task<T, R>>;
private readonly store = new Map<string, Task<T, R>>();
readonly options: ComputedQueueOptions;
readonly worker: (data: T) => Promise<R>;
/**
* Create a new queue.
* @param worker - The worker function that processes the task.
* @param options - The queue options.
*/
constructor(worker: (data: T) => Promise<R>, options?: QueueOptions) {
this.options = { ...defaultQueueOptions, ...options };
this.worker = worker;
this.store = new Map<string, Task<T, R>>();
this.queue = this.buildQueue();
}
get tasks(): Task<T, R>[] {
const tasks: Task<T, R>[] = [];
for (const task of this.store.values()) {
tasks.push(task);
}
return tasks;
}
getTask(id: string): Task<T, R> {
const task = this.store.get(id);
if (!task) {
throw new Error(`Task with id ${id} not found`);
}
return task;
}
/**
* Wait for the queue to be empty.
* @returns Promise<void> - The returned Promise will be resolved when all tasks in the queue have been processed by a worker.
* This promise could be ignored as it will not lead to a `unhandledRejection`.
*/
drained(): Promise<void> {
return this.queue.drained();
}
/**
* Add a task at the end of the queue.
* @see {@link https://www.npmjs.com/package/fastq}
* @param data
* @returns Promise<void> - A Promise that will be fulfilled (rejected) when the task is completed successfully (unsuccessfully).
* This promise could be ignored as it will not lead to a `unhandledRejection`.
*/
async push(data: T): Promise<Task<T, R>> {
const id = uniqueId();
const task: Task<T, R> = { id, status: 'idle', error: undefined, count: 0, data, result: undefined };
this.store.set(id, task);
return this.queue.push(id);
}
// TODO: Support more function delegation to fastq.
private buildQueue(): fastq.queueAsPromised<string, Task<T, R>> {
return fastq.promise((id: string) => {
const task = this.getTask(id);
return this.work(task);
}, this.options.concurrency);
}
private async work(task: Task<T, R>): Promise<Task<T, R>> {
task.count += 1;
task.error = undefined;
task.status = 'processing';
if (this.options.verbose) {
console.log('[task] processing:', task);
}
try {
task.result = await this.worker(task.data);
task.status = 'succeeded';
if (this.options.verbose) {
console.log('[task] succeeded:', task);
}
return task;
} catch (error) {
task.error = error;
task.status = 'failed';
if (this.options.verbose) {
console.log('[task] failed:', task);
}
if (this.options.retry > 0 && task.count < this.options.retry) {
if (this.options.verbose) {
console.log('[task] retry:', task);
}
return this.work(task);
}
return task;
}
}
}

View File

@@ -1,21 +1,14 @@
import mockfs from 'mock-fs';
import { readFileSync } from 'node:fs';
import { Batcher, CrawlOptions, crawl } from 'src/utils';
import { Mock } from 'vitest';
import { CrawlOptions, crawl } from 'src/utils';
interface Test {
test: string;
options: Omit<CrawlOptions, 'extensions'>;
files: Record<string, boolean>;
skipOnWin32?: boolean;
}
const cwd = process.cwd();
const readContent = (path: string) => {
return readFileSync(path).toString();
};
const extensions = [
'.jpg',
'.jpeg',
@@ -50,18 +43,6 @@ const tests: Test[] = [
'/photos/image.jpg': true,
},
},
{
test: 'should crawl folders with quotes',
options: {
pathsToCrawl: ["/photo's/", '/photo"s/', '/photo`s/'],
},
files: {
"/photo's/image1.jpg": true,
'/photo"s/image2.jpg': true,
'/photo`s/image3.jpg': true,
},
skipOnWin32: true, // single quote interferes with mockfs root on Windows
},
{
test: 'should crawl a single file',
options: {
@@ -85,7 +66,7 @@ const tests: Test[] = [
test: 'should exclude by file extension',
options: {
pathsToCrawl: ['/photos/'],
exclusionPattern: '**/*.tif',
exclusionPatterns: ['**/*.tif'],
},
files: {
'/photos/image.jpg': true,
@@ -96,7 +77,7 @@ const tests: Test[] = [
test: 'should exclude by file extension without case sensitivity',
options: {
pathsToCrawl: ['/photos/'],
exclusionPattern: '**/*.TIF',
exclusionPatterns: ['**/*.TIF'],
},
files: {
'/photos/image.jpg': true,
@@ -107,7 +88,7 @@ const tests: Test[] = [
test: 'should exclude by folder',
options: {
pathsToCrawl: ['/photos/'],
exclusionPattern: '**/raw/**',
exclusionPatterns: ['**/raw/**'],
recursive: true,
},
files: {
@@ -129,7 +110,17 @@ const tests: Test[] = [
'/albums/image3.jpg': true,
},
},
{
test: 'should support globbing paths',
options: {
pathsToCrawl: ['/photos*'],
},
files: {
'/photos1/image1.jpg': true,
'/photos2/image2.jpg': true,
'/images/image3.jpg': false,
},
},
{
test: 'should crawl a single path without trailing slash',
options: {
@@ -227,7 +218,7 @@ const tests: Test[] = [
test: 'should support ignoring full filename',
options: {
pathsToCrawl: ['/photos'],
exclusionPattern: '**/image2.jpg',
exclusionPatterns: ['**/image2.jpg'],
},
files: {
'/photos/image1.jpg': true,
@@ -239,7 +230,7 @@ const tests: Test[] = [
test: 'should support ignoring file extensions',
options: {
pathsToCrawl: ['/photos'],
exclusionPattern: '**/*.png',
exclusionPatterns: ['**/*.png'],
},
files: {
'/photos/image1.jpg': true,
@@ -252,7 +243,7 @@ const tests: Test[] = [
options: {
pathsToCrawl: ['/photos'],
recursive: true,
exclusionPattern: '**/raw/**',
exclusionPatterns: ['**/raw/**'],
},
files: {
'/photos/image1.jpg': true,
@@ -265,10 +256,9 @@ const tests: Test[] = [
{
test: 'should support ignoring absolute paths',
options: {
// Currently, fast-glob has some caveat when dealing with `/`.
pathsToCrawl: ['/*s'],
pathsToCrawl: ['/'],
recursive: true,
exclusionPattern: '/images/**',
exclusionPatterns: ['/images/**'],
},
files: {
'/photos/image1.jpg': true,
@@ -284,58 +274,17 @@ describe('crawl', () => {
});
describe('crawl', () => {
for (const { test: name, options, files, skipOnWin32 } of tests) {
if (process.platform === 'win32' && skipOnWin32) {
test.skip(name);
continue;
}
it(name, async () => {
// The file contents is the same as the path.
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
for (const { test, options, files } of tests) {
it(test, async () => {
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
const actual = await crawl({ ...options, extensions });
const expected = Object.entries(files)
.filter((entry) => entry[1])
.map(([file]) => file);
// Compare file's content instead of path since a file can be represent in multiple ways.
expect(actual.map((path) => readContent(path)).sort()).toEqual(expected.sort());
expect(actual.sort()).toEqual(expected.sort());
});
}
});
});
describe('Batcher', () => {
let batcher: Batcher;
let onBatch: Mock;
beforeEach(() => {
onBatch = vi.fn();
batcher = new Batcher({ batchSize: 2, onBatch });
});
it('should trigger onBatch() when a batch limit is reached', async () => {
batcher.add('a');
batcher.add('b');
batcher.add('c');
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
});
it('should trigger onBatch() when flush() is called', async () => {
batcher.add('a');
batcher.flush();
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
});
it('should trigger onBatch() when debounce time reached', async () => {
vi.useFakeTimers();
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
batcher.add('a');
expect(onBatch).not.toHaveBeenCalled();
vi.advanceTimersByTime(200);
expect(onBatch).toHaveBeenCalledOnce();
expect(onBatch).toHaveBeenCalledWith(['a']);
vi.useRealTimers();
});
});

View File

@@ -1,10 +1,9 @@
import { getMyUser, init, isHttpError } from '@immich/sdk';
import { convertPathToPattern, glob } from 'fast-glob';
import { defaults, getMyUserInfo, isHttpError } from '@immich/sdk';
import { glob } from 'glob';
import { createHash } from 'node:crypto';
import { createReadStream } from 'node:fs';
import { readFile, stat, writeFile } from 'node:fs/promises';
import { platform } from 'node:os';
import { join, resolve } from 'node:path';
import { join } from 'node:path';
import yaml from 'yaml';
export interface BaseOptions {
@@ -47,9 +46,10 @@ export const connect = async (url: string, key: string) => {
// noop
}
init({ baseUrl: url, apiKey: key });
defaults.baseUrl = url;
defaults.headers = { 'x-api-key': key };
const [error] = await withError(getMyUser());
const [error] = await withError(getMyUserInfo());
if (isHttpError(error)) {
logError(error, 'Failed to connect to server');
process.exit(1);
@@ -104,16 +104,11 @@ export interface CrawlOptions {
pathsToCrawl: string[];
recursive?: boolean;
includeHidden?: boolean;
exclusionPattern?: string;
exclusionPatterns?: string[];
extensions: string[];
}
const convertPathToPatternOnWin = (path: string) => {
return platform() === 'win32' ? convertPathToPattern(path) : path;
};
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
if (pathsToCrawl.length === 0) {
@@ -125,42 +120,45 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
for await (const currentPath of pathsToCrawl) {
try {
const absolutePath = resolve(currentPath);
const stats = await stat(absolutePath);
const stats = await stat(currentPath);
if (stats.isFile() || stats.isSymbolicLink()) {
crawledFiles.push(absolutePath);
crawledFiles.push(currentPath);
} else {
patterns.push(convertPathToPatternOnWin(absolutePath));
patterns.push(currentPath);
}
} catch (error: any) {
if (error.code === 'ENOENT') {
patterns.push(convertPathToPatternOnWin(currentPath));
patterns.push(currentPath);
} else {
throw error;
}
}
}
if (patterns.length === 0) {
let searchPattern: string;
if (patterns.length === 1) {
searchPattern = patterns[0];
} else if (patterns.length === 0) {
return crawledFiles;
} else {
searchPattern = '{' + patterns.join(',') + '}';
}
const searchPatterns = patterns.map((pattern) => {
let escapedPattern = pattern.replaceAll("'", "[']").replaceAll('"', '["]').replaceAll('`', '[`]');
if (recursive) {
escapedPattern = escapedPattern + '/**';
}
return `${escapedPattern}/*.{${extensions.join(',')}}`;
if (recursive) {
searchPattern = searchPattern + '/**/';
}
searchPattern = `${searchPattern}/*.{${extensions.join(',')}}`;
const globbedFiles = await glob(searchPattern, {
absolute: true,
nocase: true,
nodir: true,
dot: includeHidden,
ignore: exclusionPatterns,
});
const globbedFiles = await glob(searchPatterns, {
absolute: true,
caseSensitiveMatch: false,
dot: includeHidden,
ignore: [`**/${exclusionPattern}`],
});
globbedFiles.push(...crawledFiles);
return globbedFiles.sort();
return [...crawledFiles, ...globbedFiles].sort();
};
export const sha1 = (filepath: string) => {
@@ -172,64 +170,3 @@ export const sha1 = (filepath: string) => {
rs.on('end', () => resolve(hash.digest('hex')));
});
};
/**
* Batches items and calls onBatch to process them
* when the batch size is reached or the debounce time has passed.
*/
export class Batcher<T = unknown> {
private items: T[] = [];
private readonly batchSize: number;
private readonly debounceTimeMs?: number;
private readonly onBatch: (items: T[]) => void;
private debounceTimer?: NodeJS.Timeout;
constructor({
batchSize,
debounceTimeMs,
onBatch,
}: {
batchSize: number;
debounceTimeMs?: number;
onBatch: (items: T[]) => Promise<void>;
}) {
this.batchSize = batchSize;
this.debounceTimeMs = debounceTimeMs;
this.onBatch = onBatch;
}
private setDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
}
if (this.debounceTimeMs) {
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
}
}
private clearDebounceTimer() {
if (this.debounceTimer) {
clearTimeout(this.debounceTimer);
this.debounceTimer = undefined;
}
}
add(item: T) {
this.items.push(item);
this.setDebounceTimer();
if (this.items.length >= this.batchSize) {
this.flush();
}
}
flush() {
this.clearDebounceTimer();
if (this.items.length === 0) {
return;
}
this.onBatch(this.items);
this.items = [];
}
}

37
cli/src/version.ts Normal file
View File

@@ -0,0 +1,37 @@
import { version } from '../package.json';
export interface ICliVersion {
major: number;
minor: number;
patch: number;
}
export class CliVersion implements ICliVersion {
constructor(
public readonly major: number,
public readonly minor: number,
public readonly patch: number,
) {}
toString() {
return `${this.major}.${this.minor}.${this.patch}`;
}
toJSON() {
const { major, minor, patch } = this;
return { major, minor, patch };
}
static fromString(version: string): CliVersion {
const regex = /v?(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)/i;
const matchResult = version.match(regex);
if (matchResult) {
const [, major, minor, patch] = matchResult.map(Number);
return new CliVersion(major, minor, patch);
} else {
throw new Error(`Invalid version format: ${version}`);
}
}
}
export const cliVersion = CliVersion.fromString(version);

View File

@@ -2,7 +2,6 @@ import { defineConfig } from 'vite';
import tsconfigPaths from 'vite-tsconfig-paths';
export default defineConfig({
resolve: { alias: { src: '/src' } },
build: {
rollupOptions: {
input: 'src/index.ts',

38
deployment/.gitignore vendored
View File

@@ -1,38 +0,0 @@
# OpenTofu
# Local .terraform directories
**/.terraform/*
# .tfstate files
*.tfstate
*.tfstate.*
# Crash log files
crash.log
crash.*.log
# Ignore override files as they are usually used to override resources locally and so
# are not checked in
override.tf
override.tf.json
*_override.tf
*_override.tf.json
# Include override files you do wish to add to version control using negated pattern
# !example_override.tf
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
# example: *tfplan*
# Ignore CLI configuration files
.terraformrc
terraform.rc
# Terragrunt
# terragrunt cache directories
**/.terragrunt-cache/*
# Terragrunt debug output file (when using `--terragrunt-debug` option)
# See: https://terragrunt.gruntwork.io/docs/reference/cli-options/#terragrunt-debug
terragrunt-debug.tfvars.json

View File

@@ -1,38 +0,0 @@
# This file is maintained automatically by "tofu init".
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.5"
constraints = "4.52.5"
hashes = [
"h1:+rfzF+16ZcWZWnTyW/p1HHTzYbPKX8Zt2nIFtR/+f+E=",
"h1:18bXaaOSq8MWKuMxo/4y7EB7/i7G90y5QsKHZRmkoDo=",
"h1:4vZVOpKeEQZsF2VrARRZFeL37Ed/gD4rRMtfnvWQres=",
"h1:BZOsTF83QPKXTAaYqxPKzdl1KRjk/L2qbPpFjM0w28A=",
"h1:CDuC+HXLvc1z6wkCRsSDcc/+QENIHEtssYshiWg3opA=",
"h1:DE+YFzLnqSe79pI2R4idRGx5QzLdrA7RXvngTkGfZ30=",
"h1:DfaJwH3Ml4yrRbdAY4AcDVy0QTQk5T3A622TXzS/u2E=",
"h1:EIDXP0W3kgIv2pecrFmqtK/DnlqkyckzBzhxKaXU+4A=",
"h1:EV4kYyaOnwGA0bh/3hU6Ezqnt1PFDxopH7i85e48IzY=",
"h1:M0iXabfzamU+MPDi0G9XACpbacFKMakmM+Z9HZ8HrsM=",
"h1:YWmCbGF/KbsrUzcYVBLscwLizidbp95TDQa0N2qpmVo=",
"h1:cxPcCB5gbrpUO1+IXkQYs1YTY50/0IlApCzGea0cwuQ=",
"h1:g6DldikTV2HXUu9uoeNY5FuLufgaYWF4ufgZg7wq62s=",
"h1:oi/Hrx9pwoQ+Z52CBC+rrowVH387EIj0qvnxQgDeI+0=",
"zh:1a3400cb38863b2585968d1876706bcfc67a148e1318a1d325c6c7704adc999b",
"zh:4c5062cb9e9da1676f06ae92b8370186d98976cc4c7030d3cd76df12af54282a",
"zh:52110f493b5f0587ef77a1cfd1a67001fd4c617b14c6502d732ab47352bdc2f7",
"zh:5aa536f9eaeb43823aaf2aa80e7d39b25ef2b383405ed034aa16a28b446a9238",
"zh:5cc39459a1c6be8a918f17054e4fbba573825ed5597dcada588fe99614d98a5b",
"zh:629ae6a7ba298815131da826474d199312d21cec53a4d5ded4fa56a692e6f072",
"zh:719cc7c75dc1d3eb30c22ff5102a017996d9788b948078c7e1c5b3446aeca661",
"zh:8698635a3ca04383c1e93b21d6963346bdae54d27177a48e4b1435b7f731731c",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8a9993f1dcadf1dd6ca43b23348abe374605d29945a2fafc07fb3457644e6a54",
"zh:b1b9a1e6bcc24d5863a664a411d2dc906373ae7a2399d2d65548ce7377057852",
"zh:b270184cdeec277218e84b94cb136fead753da717f9b9dc378e51907f3f00bb0",
"zh:dff2bc10071210181726ce270f954995fe42c696e61e2e8f874021fed02521e5",
"zh:e8e87b40b6a87dc097b0fdc20d3f725cec0d82abc9cc3755c1f89f8f6e8b0036",
"zh:ee964a6573d399a5dd22ce328fb38ca1207797a02248f14b2e4913ee390e7803",
]
}

View File

@@ -1,11 +0,0 @@
terraform {
backend "pg" {}
required_version = "~> 1.7"
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.52.5"
}
}
}

View File

@@ -1,14 +0,0 @@
resource "cloudflare_pages_domain" "immich_app_release_domain" {
account_id = var.cloudflare_account_id
project_name = data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name
domain = "immich.app"
}
resource "cloudflare_record" "immich_app_release_domain" {
name = "immich.app"
proxied = true
ttl = 1
type = "CNAME"
content = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
}

View File

@@ -1,3 +0,0 @@
provider "cloudflare" {
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
}

View File

@@ -1,27 +0,0 @@
data "terraform_remote_state" "api_keys_state" {
backend = "pg"
config = {
conn_str = var.tf_state_postgres_conn_str
schema_name = "prod_cloudflare_api_keys"
}
}
data "terraform_remote_state" "cloudflare_account" {
backend = "pg"
config = {
conn_str = var.tf_state_postgres_conn_str
schema_name = "prod_cloudflare_account"
}
}
data "terraform_remote_state" "cloudflare_immich_app_docs" {
backend = "pg"
config = {
conn_str = var.tf_state_postgres_conn_str
schema_name = "prod_cloudflare_immich_app_docs_${var.prefix_name}"
}
}

View File

@@ -1,20 +0,0 @@
terraform {
source = "."
extra_arguments custom_vars {
commands = get_terraform_commands_that_need_vars()
}
}
include {
path = find_in_parent_folders("state.hcl")
}
remote_state {
backend = "pg"
config = {
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
schema_name = "prod_cloudflare_immich_app_docs_release"
}
}

View File

@@ -1,4 +0,0 @@
variable "cloudflare_account_id" {}
variable "tf_state_postgres_conn_str" {}
variable "prefix_name" {}

View File

@@ -1,38 +0,0 @@
# This file is maintained automatically by "tofu init".
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/cloudflare/cloudflare" {
version = "4.52.5"
constraints = "4.52.5"
hashes = [
"h1:+rfzF+16ZcWZWnTyW/p1HHTzYbPKX8Zt2nIFtR/+f+E=",
"h1:18bXaaOSq8MWKuMxo/4y7EB7/i7G90y5QsKHZRmkoDo=",
"h1:4vZVOpKeEQZsF2VrARRZFeL37Ed/gD4rRMtfnvWQres=",
"h1:BZOsTF83QPKXTAaYqxPKzdl1KRjk/L2qbPpFjM0w28A=",
"h1:CDuC+HXLvc1z6wkCRsSDcc/+QENIHEtssYshiWg3opA=",
"h1:DE+YFzLnqSe79pI2R4idRGx5QzLdrA7RXvngTkGfZ30=",
"h1:DfaJwH3Ml4yrRbdAY4AcDVy0QTQk5T3A622TXzS/u2E=",
"h1:EIDXP0W3kgIv2pecrFmqtK/DnlqkyckzBzhxKaXU+4A=",
"h1:EV4kYyaOnwGA0bh/3hU6Ezqnt1PFDxopH7i85e48IzY=",
"h1:M0iXabfzamU+MPDi0G9XACpbacFKMakmM+Z9HZ8HrsM=",
"h1:YWmCbGF/KbsrUzcYVBLscwLizidbp95TDQa0N2qpmVo=",
"h1:cxPcCB5gbrpUO1+IXkQYs1YTY50/0IlApCzGea0cwuQ=",
"h1:g6DldikTV2HXUu9uoeNY5FuLufgaYWF4ufgZg7wq62s=",
"h1:oi/Hrx9pwoQ+Z52CBC+rrowVH387EIj0qvnxQgDeI+0=",
"zh:1a3400cb38863b2585968d1876706bcfc67a148e1318a1d325c6c7704adc999b",
"zh:4c5062cb9e9da1676f06ae92b8370186d98976cc4c7030d3cd76df12af54282a",
"zh:52110f493b5f0587ef77a1cfd1a67001fd4c617b14c6502d732ab47352bdc2f7",
"zh:5aa536f9eaeb43823aaf2aa80e7d39b25ef2b383405ed034aa16a28b446a9238",
"zh:5cc39459a1c6be8a918f17054e4fbba573825ed5597dcada588fe99614d98a5b",
"zh:629ae6a7ba298815131da826474d199312d21cec53a4d5ded4fa56a692e6f072",
"zh:719cc7c75dc1d3eb30c22ff5102a017996d9788b948078c7e1c5b3446aeca661",
"zh:8698635a3ca04383c1e93b21d6963346bdae54d27177a48e4b1435b7f731731c",
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
"zh:8a9993f1dcadf1dd6ca43b23348abe374605d29945a2fafc07fb3457644e6a54",
"zh:b1b9a1e6bcc24d5863a664a411d2dc906373ae7a2399d2d65548ce7377057852",
"zh:b270184cdeec277218e84b94cb136fead753da717f9b9dc378e51907f3f00bb0",
"zh:dff2bc10071210181726ce270f954995fe42c696e61e2e8f874021fed02521e5",
"zh:e8e87b40b6a87dc097b0fdc20d3f725cec0d82abc9cc3755c1f89f8f6e8b0036",
"zh:ee964a6573d399a5dd22ce328fb38ca1207797a02248f14b2e4913ee390e7803",
]
}

View File

@@ -1,11 +0,0 @@
terraform {
backend "pg" {}
required_version = "~> 1.7"
required_providers {
cloudflare = {
source = "cloudflare/cloudflare"
version = "4.52.5"
}
}
}

View File

@@ -1,26 +0,0 @@
resource "cloudflare_pages_domain" "immich_app_branch_domain" {
account_id = var.cloudflare_account_id
project_name = local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_name
domain = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
}
resource "cloudflare_record" "immich_app_branch_subdomain" {
name = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
proxied = true
ttl = 1
type = "CNAME"
content = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
}
output "immich_app_branch_subdomain" {
value = cloudflare_record.immich_app_branch_subdomain.hostname
}
output "immich_app_branch_pages_hostname" {
value = cloudflare_record.immich_app_branch_subdomain.content
}
output "pages_project_name" {
value = cloudflare_pages_domain.immich_app_branch_domain.project_name
}

View File

@@ -1,7 +0,0 @@
locals {
domain_name = "immich.app"
preview_prefix = contains(["branch", "pr"], var.prefix_event_type) ? "preview" : ""
archive_prefix = contains(["release"], var.prefix_event_type) ? "archive" : ""
deploy_domain_prefix = coalesce(local.preview_prefix, local.archive_prefix)
is_release = contains(["release"], var.prefix_event_type)
}

View File

@@ -1,3 +0,0 @@
provider "cloudflare" {
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
}

View File

@@ -1,17 +0,0 @@
data "terraform_remote_state" "api_keys_state" {
backend = "pg"
config = {
conn_str = var.tf_state_postgres_conn_str
schema_name = "prod_cloudflare_api_keys"
}
}
data "terraform_remote_state" "cloudflare_account" {
backend = "pg"
config = {
conn_str = var.tf_state_postgres_conn_str
schema_name = "prod_cloudflare_account"
}
}

View File

@@ -1,24 +0,0 @@
terraform {
source = "."
extra_arguments custom_vars {
commands = get_terraform_commands_that_need_vars()
}
}
include {
path = find_in_parent_folders("state.hcl")
}
locals {
prefix_name = get_env("TF_VAR_prefix_name")
}
remote_state {
backend = "pg"
config = {
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
schema_name = "prod_cloudflare_immich_app_docs_${local.prefix_name}"
}
}

View File

@@ -1,5 +0,0 @@
variable "cloudflare_account_id" {}
variable "tf_state_postgres_conn_str" {}
variable "prefix_name" {}
variable "prefix_event_type" {}

View File

@@ -1,20 +0,0 @@
locals {
cloudflare_account_id = get_env("CLOUDFLARE_ACCOUNT_ID")
cloudflare_api_token = get_env("CLOUDFLARE_API_TOKEN")
tf_state_postgres_conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
}
remote_state {
backend = "pg"
config = {
conn_str = local.tf_state_postgres_conn_str
}
}
inputs = {
cloudflare_account_id = local.cloudflare_account_id
cloudflare_api_token = local.cloudflare_api_token
tf_state_postgres_conn_str = local.tf_state_postgres_conn_str
}

View File

@@ -1,124 +1,89 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
# For development see:
# See:
# - https://immich.app/docs/developer/setup
# - https://immich.app/docs/developer/troubleshooting
name: immich-dev
x-server-build: &server-common
image: immich-server-dev:latest
build:
context: ../
dockerfile: server/Dockerfile
target: dev
restart: always
volumes:
- ../server:/usr/src/app
- ../open-api:/usr/src/open-api
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
- /usr/src/app/node_modules
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
ulimits:
nofile:
soft: 1048576
hard: 1048576
services:
immich-server:
container_name: immich_server
command: ['immich-dev']
image: immich-server-dev:latest
command: ['/usr/src/app/bin/immich-dev', 'immich']
<<: *server-common
ports:
- 3001:3001
- 9230:9230
depends_on:
- redis
- database
immich-microservices:
container_name: immich_microservices
command: ['/usr/src/app/bin/immich-dev', 'microservices']
<<: *server-common
# extends:
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
build:
context: ../
dockerfile: server/Dockerfile.dev
target: dev
restart: unless-stopped
volumes:
- ..:/usr/src/app
- ${UPLOAD_LOCATION}/photos:/data
- /etc/localtime:/etc/localtime:ro
- pnpm-store:/usr/src/app/.pnpm-store
- server-node_modules:/usr/src/app/server/node_modules
- web-node_modules:/usr/src/app/web/node_modules
- github-node_modules:/usr/src/app/.github/node_modules
- cli-node_modules:/usr/src/app/cli/node_modules
- docs-node_modules:/usr/src/app/docs/node_modules
- e2e-node_modules:/usr/src/app/e2e/node_modules
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
env_file:
- .env
environment:
IMMICH_REPOSITORY: immich-app/immich
IMMICH_REPOSITORY_URL: https://github.com/immich-app/immich
IMMICH_SOURCE_REF: local
IMMICH_SOURCE_COMMIT: af2efbdbbddc27cd06142f22253ccbbbbeec1f55
IMMICH_SOURCE_URL: https://github.com/immich-app/immich/commit/af2efbdbbddc27cd06142f22253ccbbbbeec1f55
IMMICH_BUILD: '9654404849'
IMMICH_BUILD_URL: https://github.com/immich-app/immich/actions/runs/9654404849
IMMICH_BUILD_IMAGE: development
IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/community-guides
ulimits:
nofile:
soft: 1048576
hard: 1048576
ports:
- 9230:9230
- 9231:9231
- 2283:2283
- 9231:9230
depends_on:
redis:
condition: service_started
database:
condition: service_started
healthcheck:
disable: false
- database
- immich-server
immich-web:
container_name: immich_web
image: immich-web-dev:latest
build:
context: ../
dockerfile: server/Dockerfile.dev
target: dev
command: ['immich-web']
context: ../web
command: ['/usr/src/app/bin/immich-web']
env_file:
- .env
ports:
- 3000:3000
- 2283:3000
- 24678:24678
volumes:
- ..:/usr/src/app
- pnpm-store:/usr/src/app/.pnpm-store
- server-node_modules:/usr/src/app/server/node_modules
- web-node_modules:/usr/src/app/web/node_modules
- github-node_modules:/usr/src/app/.github/node_modules
- cli-node_modules:/usr/src/app/cli/node_modules
- docs-node_modules:/usr/src/app/docs/node_modules
- e2e-node_modules:/usr/src/app/e2e/node_modules
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
- ../web:/usr/src/app
- ../open-api/:/usr/src/open-api/
- /usr/src/app/node_modules
ulimits:
nofile:
soft: 1048576
hard: 1048576
restart: unless-stopped
depends_on:
immich-server:
condition: service_started
- immich-server
immich-machine-learning:
container_name: immich_machine_learning
image: immich-machine-learning-dev:latest
# extends:
# file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build:
context: ../machine-learning
dockerfile: Dockerfile
args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
ports:
- 3003:3003
volumes:
@@ -129,31 +94,26 @@ services:
depends_on:
- database
restart: unless-stopped
healthcheck:
disable: false
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:fea8b3e67b15729d4bb70589eb03367bab9ad1ee89c876f54327fc7c6e618571
healthcheck:
test: redis-cli ping || exit 1
image: redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:c44be5f2871c59362966d71eab4268170eb6f5653c0e6170184e72b38ffdf107
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file:
- .env
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums'
volumes:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports:
- 5432:5432
shm_size: 128mb
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
# set IMMICH_METRICS=true in .env to enable metrics
# immich-prometheus:
# container_name: immich_prometheus
# ports:
@@ -178,14 +138,3 @@ volumes:
model-cache:
prometheus-data:
grafana-data:
pnpm-store:
server-node_modules:
web-node_modules:
github-node_modules:
cli-node_modules:
docs-node_modules:
e2e-node_modules:
sdk-node_modules:
app-node_modules:
sveltekit:
coverage:

View File

@@ -1,89 +1,82 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
name: immich-prod
x-server-build: &server-common
image: immich-server:latest
build:
context: ../
dockerfile: server/Dockerfile
volumes:
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
restart: always
services:
immich-server:
container_name: immich_server
image: immich-server:latest
# extends:
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
build:
context: ../
dockerfile: server/Dockerfile
volumes:
- ${UPLOAD_LOCATION}/photos:/data
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
command: ['start.sh', 'immich']
<<: *server-common
ports:
- 2283:2283
- 2283:3001
depends_on:
- redis
- database
restart: always
healthcheck:
disable: false
immich-microservices:
container_name: immich_microservices
command: ['start.sh', 'microservices']
<<: *server-common
# extends:
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
depends_on:
- redis
- database
- immich-server
immich-machine-learning:
container_name: immich_machine_learning
image: immich-machine-learning:latest
# extends:
# file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
build:
context: ../machine-learning
dockerfile: Dockerfile
args:
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
ports:
- 3003:3003
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
volumes:
- model-cache:/cache
env_file:
- .env
restart: always
healthcheck:
disable: false
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:fea8b3e67b15729d4bb70589eb03367bab9ad1ee89c876f54327fc7c6e618571
healthcheck:
test: redis-cli ping || exit 1
image: redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
restart: always
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:c44be5f2871c59362966d71eab4268170eb6f5653c0e6170184e72b38ffdf107
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
env_file:
- .env
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums'
volumes:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports:
- 5432:5432
shm_size: 128mb
restart: always
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
# set IMMICH_METRICS=true in .env to enable metrics
immich-prometheus:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:63805ebb8d2b3920190daf1cb14a60871b16fd38bed42b857a3182bc621f4996
image: prom/prometheus@sha256:4f6c47e39a9064028766e8c95890ed15690c30f00c4ba14e7ce6ae1ded0295b1
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus
@@ -95,7 +88,7 @@ services:
command: ['./run.sh', '-disable-reporting']
ports:
- 3000:3000
image: grafana/grafana:12.1.1-ubuntu@sha256:d1da838234ff2de93e0065ee1bf0e66d38f948dcc5d718c25fa6237e14b4424a
image: grafana/grafana:10.4.2-ubuntu@sha256:4f55071b556fb03f12b41423c98a185ed6695ed9ff2558e35805f0dd765fd958
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -1,11 +1,10 @@
#
# WARNING: To install Immich, follow our guide: https://immich.app/docs/install/docker-compose
#
# Make sure to use the docker-compose.yml of the current release:
# WARNING: Make sure to use the docker-compose.yml of the current release:
#
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
#
# The compose file on main may not be compatible with the latest release.
#
name: immich
@@ -13,61 +12,64 @@ services:
immich-server:
container_name: immich_server
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
# extends:
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
command: ['start.sh', 'immich']
volumes:
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
- ${UPLOAD_LOCATION}:/data
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
ports:
- '2283:2283'
- 2283:3001
depends_on:
- redis
- database
restart: always
immich-microservices:
container_name: immich_microservices
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/hardware-transcoding
# file: hwaccel.transcoding.yml
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
command: ['start.sh', 'microservices']
volumes:
- ${UPLOAD_LOCATION}:/usr/src/app/upload
- /etc/localtime:/etc/localtime:ro
env_file:
- .env
depends_on:
- redis
- database
restart: always
healthcheck:
disable: false
immich-machine-learning:
container_name: immich_machine_learning
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag.
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
# Example tag: ${IMMICH_VERSION:-release}-cuda
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
# file: hwaccel.ml.yml
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
volumes:
- model-cache:/cache
env_file:
- .env
restart: always
healthcheck:
disable: false
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:fea8b3e67b15729d4bb70589eb03367bab9ad1ee89c876f54327fc7c6e618571
healthcheck:
test: redis-cli ping || exit 1
image: registry.hub.docker.com/library/redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
restart: always
database:
container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:c44be5f2871c59362966d71eab4268170eb6f5653c0e6170184e72b38ffdf107
image: registry.hub.docker.com/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
environment:
POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums'
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
# DB_STORAGE_TYPE: 'HDD'
volumes:
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
shm_size: 128mb
restart: always
volumes:

View File

@@ -3,20 +3,17 @@
# The location where your uploaded files are stored
UPLOAD_LOCATION=./library
# The location where your database files are stored. Network shares are not supported for the database
DB_DATA_LOCATION=./postgres
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
# TZ=Etc/UTC
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
IMMICH_VERSION=release
# Connection secret for postgres. You should change it to a random password
# Please use only the characters `A-Za-z0-9`, without special characters or spaces
DB_PASSWORD=postgres
# The values below this line do not need to be changed
###################################################################################
DB_HOSTNAME=immich_postgres
DB_USERNAME=postgres
DB_DATABASE_NAME=immich
DB_DATA_LOCATION=./postgres
REDIS_HOSTNAME=immich_redis

View File

@@ -1,7 +1,9 @@
version: "3.8"
# Configurations for hardware-accelerated machine learning
# If using Unraid or another platform that doesn't allow multiple Compose files,
# you can inline the config for a backend by copying its contents
# you can inline the config for a backend by copying its contents
# into the immich-machine-learning service in the docker-compose.yml file.
# See https://immich.app/docs/features/ml-hardware-acceleration for info on usage.
@@ -13,13 +15,6 @@ services:
volumes:
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
rknn:
security_opt:
- systempaths=unconfined
- apparmor=unconfined
devices:
- /dev/dri:/dev/dri
cpu: {}
@@ -33,16 +28,9 @@ services:
capabilities:
- gpu
rocm:
group_add:
- video
devices:
- /dev/dri:/dev/dri
- /dev/kfd:/dev/kfd
openvino:
device_cgroup_rules:
- 'c 189:* rmw'
- "c 189:* rmw"
devices:
- /dev/dri:/dev/dri
volumes:

View File

@@ -1,3 +1,5 @@
version: "3.8"
# Configurations for hardware-accelerated transcoding
# If using Unraid or another platform that doesn't allow multiple Compose files,
@@ -48,8 +50,8 @@ services:
vaapi-wsl: # use this for VAAPI if you're running Immich in WSL2
devices:
- /dev/dri:/dev/dri
- /dev/dxg:/dev/dxg
volumes:
- /usr/lib/wsl:/usr/lib/wsl
environment:
- LD_LIBRARY_PATH=/usr/lib/wsl/lib
- LIBVA_DRIVER_NAME=d3d12

Some files were not shown because too many files have changed in this diff Show More