mirror of
https://github.com/immich-app/immich.git
synced 2025-12-07 13:21:02 -08:00
Compare commits
1 Commits
feat/stati
...
feat/docke
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23373f39cb |
2
.devcontainer/Dockerfile
Normal file
2
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,2 @@
|
||||
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:9791f4aa527774bc370c6bd2f6705ce5a686f1e6f204badd8dfaacce28c631ae
|
||||
FROM ${BASEIMAGE}
|
||||
@@ -1,67 +1,20 @@
|
||||
{
|
||||
"name": "Immich - Backend, Frontend and ML",
|
||||
"service": "immich-server",
|
||||
"runServices": [
|
||||
"immich-server",
|
||||
"redis",
|
||||
"database",
|
||||
"immich-machine-learning"
|
||||
],
|
||||
"dockerComposeFile": [
|
||||
"../docker/docker-compose.dev.yml",
|
||||
"./server/container-compose-overrides.yml"
|
||||
],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"svelte.svelte-vscode",
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"foxundermoon.shell-format",
|
||||
"timonwong.shellcheck",
|
||||
"rvest.vs-code-prettier-eslint",
|
||||
"bluebrown.yamlfmt",
|
||||
"vkrishna04.cspell-sync",
|
||||
"vitest.explorer",
|
||||
"ms-playwright.playwright",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [3000, 9231, 9230, 2283],
|
||||
"portsAttributes": {
|
||||
"3000": {
|
||||
"label": "Immich - Frontend HTTP",
|
||||
"description": "The frontend of the Immich project",
|
||||
"onAutoForward": "openBrowserOnce"
|
||||
},
|
||||
"2283": {
|
||||
"label": "Immich - API Server - HTTP",
|
||||
"description": "The API server of the Immich project"
|
||||
},
|
||||
"9231": {
|
||||
"label": "Immich - API Server - DEBUG",
|
||||
"description": "The API server of the Immich project"
|
||||
},
|
||||
"9230": {
|
||||
"label": "Immich - Workers - DEBUG",
|
||||
"description": "The workers of the Immich project"
|
||||
}
|
||||
},
|
||||
"overrideCommand": true,
|
||||
"workspaceFolder": "/workspaces/immich",
|
||||
"remoteUser": "node",
|
||||
"userEnvProbe": "loginInteractiveShell",
|
||||
"remoteEnv": {
|
||||
// The location where your uploaded files are stored
|
||||
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./library}",
|
||||
// Connection secret for postgres. You should change it to a random password
|
||||
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
|
||||
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
|
||||
// The database username
|
||||
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
|
||||
// The database name
|
||||
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
|
||||
}
|
||||
"name": "Immich devcontainers",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
"BASEIMAGE": "mcr.microsoft.com/devcontainers/typescript-node:22"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"svelte.svelte-vscode"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [],
|
||||
"postCreateCommand": "make install-all",
|
||||
"remoteUser": "node"
|
||||
}
|
||||
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
services:
|
||||
immich-server:
|
||||
build:
|
||||
target: dev-container-mobile
|
||||
environment:
|
||||
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
|
||||
volumes: !override # bind mount host to /workspaces/immich
|
||||
- ..:/workspaces/immich
|
||||
- ${UPLOAD_LOCATION:-upload-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/data
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
immich-web:
|
||||
env_file: !reset []
|
||||
immich-machine-learning:
|
||||
env_file: !reset []
|
||||
database:
|
||||
env_file: !reset []
|
||||
environment: !override
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD-postgres}
|
||||
POSTGRES_USER: ${DB_USERNAME-postgres}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
POSTGRES_HOST_AUTH_METHOD: md5
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
|
||||
redis:
|
||||
env_file: !reset []
|
||||
volumes:
|
||||
upload-devcontainer-volume:
|
||||
postgres-devcontainer-volume:
|
||||
@@ -1,52 +0,0 @@
|
||||
{
|
||||
"name": "Immich - Mobile",
|
||||
"service": "immich-server",
|
||||
"runServices": [
|
||||
"immich-server",
|
||||
"redis",
|
||||
"database",
|
||||
"immich-machine-learning"
|
||||
],
|
||||
"dockerComposeFile": [
|
||||
"../../docker/docker-compose.dev.yml",
|
||||
"./container-compose-overrides.yml"
|
||||
],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"Dart-Code.dart-code",
|
||||
"Dart-Code.flutter",
|
||||
"dcmdev.dcm-vscode-extension",
|
||||
"esbenp.prettier-vscode",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"svelte.svelte-vscode",
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"foxundermoon.shell-format",
|
||||
"timonwong.shellcheck",
|
||||
"rvest.vs-code-prettier-eslint",
|
||||
"bluebrown.yamlfmt",
|
||||
"vkrishna04.cspell-sync",
|
||||
"vitest.explorer",
|
||||
"ms-playwright.playwright",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [],
|
||||
"overrideCommand": true,
|
||||
"workspaceFolder": "/workspaces/immich",
|
||||
"remoteUser": "node",
|
||||
"userEnvProbe": "loginInteractiveShell",
|
||||
"remoteEnv": {
|
||||
// The location where your uploaded files are stored
|
||||
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./library}",
|
||||
// Connection secret for postgres. You should change it to a random password
|
||||
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
|
||||
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
|
||||
// The database username
|
||||
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
|
||||
// The database name
|
||||
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
|
||||
}
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
#!/bin/bash
|
||||
export IMMICH_PORT="${DEV_SERVER_PORT:-2283}"
|
||||
export DEV_PORT="${DEV_PORT:-3000}"
|
||||
|
||||
# search for immich directory inside workspace.
|
||||
# /workspaces/immich is the bind mount, but other directories can be mounted if runing
|
||||
# Devcontainer: Clone [repository|pull request] in container volumne
|
||||
WORKSPACES_DIR="/workspaces"
|
||||
IMMICH_DIR="$WORKSPACES_DIR/immich"
|
||||
IMMICH_DEVCONTAINER_LOG="$HOME/immich-devcontainer.log"
|
||||
|
||||
log() {
|
||||
# Display command on console, log with timestamp to file
|
||||
echo "$*"
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" >>"$IMMICH_DEVCONTAINER_LOG"
|
||||
}
|
||||
|
||||
run_cmd() {
|
||||
# Ensure log directory exists
|
||||
mkdir -p "$(dirname "$IMMICH_DEVCONTAINER_LOG")"
|
||||
|
||||
log "$@"
|
||||
|
||||
# Execute command: display normally on console, log with timestamps to file
|
||||
"$@" 2>&1 | tee >(while IFS= read -r line; do
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $line" >>"$IMMICH_DEVCONTAINER_LOG"
|
||||
done)
|
||||
|
||||
# Preserve exit status
|
||||
return "${PIPESTATUS[0]}"
|
||||
}
|
||||
|
||||
# Find directories excluding /workspaces/immich
|
||||
mapfile -t other_dirs < <(find "$WORKSPACES_DIR" -mindepth 1 -maxdepth 1 -type d ! -path "$IMMICH_DIR" ! -name ".*")
|
||||
|
||||
if [ ${#other_dirs[@]} -gt 1 ]; then
|
||||
log "Error: More than one directory found in $WORKSPACES_DIR other than $IMMICH_DIR."
|
||||
exit 1
|
||||
elif [ ${#other_dirs[@]} -eq 1 ]; then
|
||||
export IMMICH_WORKSPACE="${other_dirs[0]}"
|
||||
else
|
||||
export IMMICH_WORKSPACE="$IMMICH_DIR"
|
||||
fi
|
||||
|
||||
log "Found immich workspace in $IMMICH_WORKSPACE"
|
||||
log ""
|
||||
|
||||
fix_permissions() {
|
||||
|
||||
log "Fixing permissions for ${IMMICH_WORKSPACE}"
|
||||
|
||||
# Change ownership for directories that exist
|
||||
for dir in "${IMMICH_WORKSPACE}/.vscode" \
|
||||
"${IMMICH_WORKSPACE}/server/upload" \
|
||||
"${IMMICH_WORKSPACE}/.pnpm-store" \
|
||||
"${IMMICH_WORKSPACE}/.github/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/cli/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/e2e/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/server/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/server/dist" \
|
||||
"${IMMICH_WORKSPACE}/web/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/web/dist"; do
|
||||
if [ -d "$dir" ]; then
|
||||
run_cmd sudo chown node -R "$dir"
|
||||
fi
|
||||
done
|
||||
|
||||
log ""
|
||||
}
|
||||
|
||||
install_dependencies() {
|
||||
|
||||
log "Installing dependencies"
|
||||
(
|
||||
cd "${IMMICH_WORKSPACE}" || exit 1
|
||||
export CI=1 FROZEN=1 OFFLINE=1
|
||||
run_cmd make setup-web-dev setup-server-dev
|
||||
)
|
||||
log ""
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
services:
|
||||
immich-server:
|
||||
build:
|
||||
target: dev-container-server
|
||||
env_file: !reset []
|
||||
hostname: immich-dev
|
||||
environment:
|
||||
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
|
||||
volumes: !override
|
||||
- ..:/workspaces/immich
|
||||
- ${UPLOAD_LOCATION:-upload-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/data
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
immich-web:
|
||||
env_file: !reset []
|
||||
immich-machine-learning:
|
||||
env_file: !reset []
|
||||
database:
|
||||
env_file: !reset []
|
||||
environment: !override
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD-postgres}
|
||||
POSTGRES_USER: ${DB_USERNAME-postgres}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
POSTGRES_HOST_AUTH_METHOD: md5
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
|
||||
redis:
|
||||
env_file: !reset []
|
||||
volumes:
|
||||
upload-devcontainer-volume:
|
||||
postgres-devcontainer-volume:
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
# shellcheck source=common.sh
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
log "Preparing Immich Nest API Server"
|
||||
log ""
|
||||
export CI=1
|
||||
run_cmd pnpm --filter immich install
|
||||
|
||||
log "Starting Nest API Server"
|
||||
log ""
|
||||
cd "${IMMICH_WORKSPACE}/server" || (
|
||||
log "Immich workspace not found"
|
||||
exit 1
|
||||
)
|
||||
|
||||
while true; do
|
||||
run_cmd pnpm --filter immich exec nest start --debug "0.0.0.0:9230" --watch
|
||||
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
@@ -1,29 +0,0 @@
|
||||
#!/bin/bash
|
||||
# shellcheck source=common.sh
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
export CI=1
|
||||
log "Preparing Immich Web Frontend"
|
||||
log ""
|
||||
run_cmd pnpm --filter @immich/sdk install
|
||||
run_cmd pnpm --filter @immich/sdk build
|
||||
run_cmd pnpm --filter immich-web install
|
||||
|
||||
log "Starting Immich Web Frontend"
|
||||
log ""
|
||||
cd "${IMMICH_WORKSPACE}/web" || (
|
||||
log "Immich Workspace not found"
|
||||
exit 1
|
||||
)
|
||||
|
||||
until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_PORT}/api/server/config"; do
|
||||
log "Waiting for api server..."
|
||||
sleep 1
|
||||
done
|
||||
|
||||
while true; do
|
||||
run_cmd pnpm --filter immich-web exec vite dev --host 0.0.0.0 --port "${DEV_PORT}"
|
||||
log "Web crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
# shellcheck source=common.sh
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
log "Setting up Immich dev container..."
|
||||
fix_permissions
|
||||
|
||||
log "Setup complete, please wait while backend and frontend services automatically start"
|
||||
log
|
||||
log "If necessary, the services may be manually started using"
|
||||
log
|
||||
log "$ /immich-devcontainer/container-start-backend.sh"
|
||||
log "$ /immich-devcontainer/container-start-frontend.sh"
|
||||
log
|
||||
log "From different terminal windows, as these scripts automatically restart the server"
|
||||
log "on error, and will continuously run in a loop"
|
||||
@@ -1,41 +1,33 @@
|
||||
.vscode/
|
||||
.github/
|
||||
.git/
|
||||
.env*
|
||||
*.log
|
||||
*.tmp
|
||||
*.temp
|
||||
|
||||
**/Dockerfile
|
||||
**/node_modules/
|
||||
**/.pnpm-store/
|
||||
**/dist/
|
||||
**/coverage/
|
||||
**/build/
|
||||
|
||||
design/
|
||||
docker/
|
||||
!docker/scripts
|
||||
|
||||
docs/
|
||||
!docs/package.json
|
||||
!docs/package-lock.json
|
||||
|
||||
e2e/
|
||||
!e2e/package.json
|
||||
!e2e/package-lock.json
|
||||
|
||||
fastlane/
|
||||
machine-learning/
|
||||
misc/
|
||||
mobile/
|
||||
|
||||
open-api/typescript-sdk/build/
|
||||
!open-api/typescript-sdk/package.json
|
||||
!open-api/typescript-sdk/package-lock.json
|
||||
cli/coverage/
|
||||
cli/dist/
|
||||
cli/node_modules/
|
||||
|
||||
open-api/typescript-sdk/build/
|
||||
open-api/typescript-sdk/node_modules/
|
||||
|
||||
server/coverage/
|
||||
server/node_modules/
|
||||
server/upload/
|
||||
server/src/queries
|
||||
server/dist/
|
||||
server/www/
|
||||
|
||||
web/node_modules/
|
||||
web/coverage/
|
||||
web/.svelte-kit
|
||||
web/build/
|
||||
web/.env
|
||||
|
||||
12
.gitattributes
vendored
12
.gitattributes
vendored
@@ -6,18 +6,6 @@ mobile/openapi/**/*.dart linguist-generated=true
|
||||
mobile/lib/**/*.g.dart -diff -merge
|
||||
mobile/lib/**/*.g.dart linguist-generated=true
|
||||
|
||||
mobile/lib/**/*.drift.dart -diff -merge
|
||||
mobile/lib/**/*.drift.dart linguist-generated=true
|
||||
|
||||
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
|
||||
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
|
||||
|
||||
mobile/lib/infrastructure/repositories/db.repository.steps.dart -diff -merge
|
||||
mobile/lib/infrastructure/repositories/db.repository.steps.dart linguist-generated=true
|
||||
|
||||
mobile/test/drift/main/generated/** -diff -merge
|
||||
mobile/test/drift/main/generated/** linguist-generated=true
|
||||
|
||||
open-api/typescript-sdk/fetch-client.ts -diff -merge
|
||||
open-api/typescript-sdk/fetch-client.ts linguist-generated=true
|
||||
|
||||
|
||||
1
.github/.nvmrc
vendored
1
.github/.nvmrc
vendored
@@ -1 +0,0 @@
|
||||
22.20.0
|
||||
@@ -1,5 +1,5 @@
|
||||
title: '[Feature] feature-name-goes-here'
|
||||
labels: ['feature']
|
||||
title: "[Feature] feature-name-goes-here"
|
||||
labels: ["feature"]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
@@ -11,9 +11,10 @@ body:
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
|
||||
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
||||
options:
|
||||
- label: 'Yes'
|
||||
- label: "Yes"
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: feature
|
||||
|
||||
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
@@ -1 +1 @@
|
||||
custom: ['https://buy.immich.app', 'https://immich.store']
|
||||
custom: ['https://buy.immich.app']
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
18
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,12 +1,6 @@
|
||||
name: Report an issue with Immich
|
||||
description: Report an issue with Immich
|
||||
body:
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
|
||||
options:
|
||||
- label: 'Yes'
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
@@ -64,11 +58,6 @@ body:
|
||||
- label: Web
|
||||
- label: Mobile
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Device make and model
|
||||
placeholder: Samsung S25 Android 16
|
||||
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
@@ -88,7 +77,7 @@ body:
|
||||
id: repro
|
||||
attributes:
|
||||
label: Reproduction steps
|
||||
description: 'How do you trigger this bug? Please walk us through it step by step.'
|
||||
description: "How do you trigger this bug? Please walk us through it step by step."
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
@@ -101,13 +90,12 @@ body:
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description:
|
||||
Please copy and paste any relevant logs below. (code formatting is
|
||||
description: Please copy and paste any relevant logs below. (code formatting is
|
||||
enabled, no need for backticks)
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
|
||||
1
.github/PULL_REQUEST_TEMPLATE/config.yml
vendored
1
.github/PULL_REQUEST_TEMPLATE/config.yml
vendored
@@ -1 +1,2 @@
|
||||
blank_issues_enabled: false
|
||||
blank_pull_request_template_enabled: false
|
||||
|
||||
22
.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
vendored
Normal file
22
.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
## Description
|
||||
<!--- Describe your changes in detail -->
|
||||
<!--- Why is this change required? What problem does it solve? -->
|
||||
<!--- If it fixes an open issue, please link to the issue here. -->
|
||||
|
||||
Fixes # (issue)
|
||||
|
||||
|
||||
## How Has This Been Tested?
|
||||
|
||||
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
|
||||
|
||||
- [ ] Test A
|
||||
- [ ] Test B
|
||||
|
||||
## Screenshots (if appropriate):
|
||||
|
||||
|
||||
## Checklist:
|
||||
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have made corresponding changes to the documentation if applicable
|
||||
1
.github/labeler.yml
vendored
1
.github/labeler.yml
vendored
@@ -6,6 +6,7 @@ cli:
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- docs/blob/**
|
||||
- docs/docs/**
|
||||
- docs/src/**
|
||||
- docs/static/**
|
||||
|
||||
9
.github/package.json
vendored
9
.github/package.json
vendored
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"scripts": {
|
||||
"format": "prettier --check .",
|
||||
"format:fix": "prettier --write ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^3.5.3"
|
||||
}
|
||||
}
|
||||
40
.github/pull_request_template.md
vendored
40
.github/pull_request_template.md
vendored
@@ -1,40 +0,0 @@
|
||||
## Description
|
||||
|
||||
<!--- Describe your changes in detail -->
|
||||
<!--- Why is this change required? What problem does it solve? -->
|
||||
<!--- If it fixes an open issue, please link to the issue here. -->
|
||||
|
||||
Fixes # (issue)
|
||||
|
||||
## How Has This Been Tested?
|
||||
|
||||
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
|
||||
|
||||
- [ ] Test A
|
||||
- [ ] Test B
|
||||
|
||||
<details><summary><h2>Screenshots (if appropriate)</h2></summary>
|
||||
|
||||
<!-- Images go below this line. -->
|
||||
|
||||
</details>
|
||||
|
||||
<!-- API endpoint changes (if relevant)
|
||||
## API Changes
|
||||
The `/api/something` endpoint is now `/api/something-else`
|
||||
-->
|
||||
|
||||
## Checklist:
|
||||
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have made corresponding changes to the documentation if applicable
|
||||
- [ ] I have no unrelated changes in the PR.
|
||||
- [ ] I have confirmed that any new dependencies are strictly necessary.
|
||||
- [ ] I have written tests for new code (if applicable)
|
||||
- [ ] I have followed naming conventions/patterns in the surrounding code
|
||||
- [ ] All code in `src/services/` uses repositories implementations for database calls, filesystem operations, etc.
|
||||
- [ ] All code in `src/repositories/` is pretty basic/simple and does not have any immich specific logic (that belongs in `src/services/`)
|
||||
|
||||
## Please describe to which degree, if any, an LLM was used in creating this pull request.
|
||||
|
||||
...
|
||||
66
.github/release.yml
vendored
66
.github/release.yml
vendored
@@ -1,33 +1,33 @@
|
||||
changelog:
|
||||
categories:
|
||||
- title: 🚨 Breaking Changes
|
||||
labels:
|
||||
- changelog:breaking-change
|
||||
|
||||
- title: 🫥 Deprecated Changes
|
||||
labels:
|
||||
- changelog:deprecated
|
||||
|
||||
- title: 🔒 Security
|
||||
labels:
|
||||
- changelog:security
|
||||
|
||||
- title: 🚀 Features
|
||||
labels:
|
||||
- changelog:feature
|
||||
|
||||
- title: 🌟 Enhancements
|
||||
labels:
|
||||
- changelog:enhancement
|
||||
|
||||
- title: 🐛 Bug fixes
|
||||
labels:
|
||||
- changelog:bugfix
|
||||
|
||||
- title: 📚 Documentation
|
||||
labels:
|
||||
- changelog:documentation
|
||||
|
||||
- title: 🌐 Translations
|
||||
labels:
|
||||
- changelog:translation
|
||||
changelog:
|
||||
categories:
|
||||
- title: 🚨 Breaking Changes
|
||||
labels:
|
||||
- changelog:breaking-change
|
||||
|
||||
- title: 🫥 Deprecated Changes
|
||||
labels:
|
||||
- changelog:deprecated
|
||||
|
||||
- title: 🔒 Security
|
||||
labels:
|
||||
- changelog:security
|
||||
|
||||
- title: 🚀 Features
|
||||
labels:
|
||||
- changelog:feature
|
||||
|
||||
- title: 🌟 Enhancements
|
||||
labels:
|
||||
- changelog:enhancement
|
||||
|
||||
- title: 🐛 Bug fixes
|
||||
labels:
|
||||
- changelog:bugfix
|
||||
|
||||
- title: 📚 Documentation
|
||||
labels:
|
||||
- changelog:documentation
|
||||
|
||||
- title: 🌐 Translations
|
||||
labels:
|
||||
- changelog:translation
|
||||
|
||||
125
.github/workflows/build-mobile.yml
vendored
125
.github/workflows/build-mobile.yml
vendored
@@ -7,15 +7,6 @@ on:
|
||||
ref:
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
KEY_JKS:
|
||||
required: true
|
||||
ALIAS:
|
||||
required: true
|
||||
ANDROID_KEY_PASSWORD:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -24,133 +15,79 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
force-filters: |
|
||||
- '.github/workflows/build-mobile.yml'
|
||||
force-events: 'workflow_call,workflow_dispatch'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build-sign-android:
|
||||
name: Build and sign Android
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
|
||||
runs-on: mich
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
- name: Determine ref
|
||||
id: get-ref
|
||||
run: |
|
||||
input_ref="${{ inputs.ref }}"
|
||||
github_ref="${{ github.sha }}"
|
||||
ref="${input_ref:-$github_ref}"
|
||||
echo "ref=$ref" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
ref: ${{ steps.get-ref.outputs.ref }}
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
|
||||
|
||||
- uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
|
||||
- uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
|
||||
- name: Restore Gradle Cache
|
||||
id: cache-gradle-restore
|
||||
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: build-mobile-gradle-${{ runner.os }}-main
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2
|
||||
with:
|
||||
packages: ''
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: echo $KEY_JKS | base64 -d > android/key.jks
|
||||
|
||||
- name: Get Packages
|
||||
working-directory: ./mobile
|
||||
run: flutter pub get
|
||||
|
||||
- name: Generate translation file
|
||||
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate platform APIs
|
||||
run: make pigeon
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Build Android App Bundle
|
||||
working-directory: ./mobile
|
||||
env:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
|
||||
run: |
|
||||
if [[ $IS_MAIN == 'true' ]]; then
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
else
|
||||
flutter build apk --debug --split-per-abi --target-platform android-arm64
|
||||
fi
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
|
||||
- name: Save Gradle Cache
|
||||
id: cache-gradle-save
|
||||
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}
|
||||
|
||||
26
.github/workflows/cache-cleanup.yml
vendored
26
.github/workflows/cache-cleanup.yml
vendored
@@ -8,45 +8,31 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
name: Cleanup
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check out code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cleanup
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.token.outputs.token }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH=${{ github.ref }}
|
||||
|
||||
echo "Fetching list of cache keys"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
69
.github/workflows/cli.yml
vendored
69
.github/workflows/cli.yml
vendored
@@ -6,6 +6,7 @@ on:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
@@ -16,48 +17,31 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: CLI Publish
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/checkout@v4
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: pnpm install && pnpm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
- run: pnpm build
|
||||
- run: pnpm publish --no-git-checks
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
- name: Build SDK
|
||||
run: npm run build --prefix ../open-api/typescript-sdk/
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm publish
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -65,37 +49,25 @@ jobs:
|
||||
docker:
|
||||
name: Docker
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
needs: publish
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
uses: docker/setup-buildx-action@v3.8.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@v3
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ steps.token.outputs.token }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Get package version
|
||||
id: package-version
|
||||
@@ -105,7 +77,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
@@ -116,7 +88,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: docker/build-push-action@v6.10.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -125,4 +97,3 @@ jobs:
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
|
||||
107
.github/workflows/close-duplicates.yml
vendored
107
.github/workflows/close-duplicates.yml
vendored
@@ -1,107 +0,0 @@
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
discussion:
|
||||
types: [created]
|
||||
|
||||
name: Close likely duplicates
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
should_run:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.should_run.outputs.run }}
|
||||
steps:
|
||||
- id: should_run
|
||||
run: echo "run=${{ github.event_name == 'issues' || github.event.discussion.category.name == 'Feature Request' }}" >> $GITHUB_OUTPUT
|
||||
|
||||
get_body:
|
||||
runs-on: ubuntu-latest
|
||||
needs: should_run
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
env:
|
||||
EVENT: ${{ toJSON(github.event) }}
|
||||
outputs:
|
||||
body: ${{ steps.get_body.outputs.body }}
|
||||
steps:
|
||||
- id: get_body
|
||||
run: |
|
||||
BODY=$(echo """$EVENT""" | jq -r '.issue // .discussion | .body' | base64 -w 0)
|
||||
echo "body=$BODY" >> $GITHUB_OUTPUT
|
||||
|
||||
get_checkbox_json:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [get_body, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' }}
|
||||
container:
|
||||
image: ghcr.io/immich-app/mdq:main@sha256:6b8450bfc06770af1af66bce9bf2ced7d1d9b90df1a59fc4c83a17777a9f6723
|
||||
outputs:
|
||||
checked: ${{ steps.get_checkbox.outputs.checked }}
|
||||
steps:
|
||||
- id: get_checkbox
|
||||
env:
|
||||
BODY: ${{ needs.get_body.outputs.body }}
|
||||
run: |
|
||||
CHECKED=$(echo "$BODY" | base64 -d | /mdq --output json '# I have searched | - [?] Yes' | jq '.items[0].list[0].checked // false')
|
||||
echo "checked=$CHECKED" >> $GITHUB_OUTPUT
|
||||
|
||||
close_and_comment:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [get_checkbox_json, should_run]
|
||||
if: ${{ needs.should_run.outputs.should_run == 'true' && needs.get_checkbox_json.outputs.checked != 'true' }}
|
||||
permissions:
|
||||
issues: write
|
||||
discussions: write
|
||||
steps:
|
||||
- name: Close issue
|
||||
if: ${{ github.event_name == 'issues' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
NODE_ID: ${{ github.event.issue.node_id }}
|
||||
run: |
|
||||
gh api graphql \
|
||||
-f issueId="$NODE_ID" \
|
||||
-f body="This issue has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
|
||||
-f query='
|
||||
mutation CommentAndCloseIssue($issueId: ID!, $body: String!) {
|
||||
addComment(input: {
|
||||
subjectId: $issueId,
|
||||
body: $body
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
|
||||
closeIssue(input: {
|
||||
issueId: $issueId,
|
||||
stateReason: DUPLICATE
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
}'
|
||||
|
||||
- name: Close discussion
|
||||
if: ${{ github.event_name == 'discussion' && github.event.discussion.category.name == 'Feature Request' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
NODE_ID: ${{ github.event.discussion.node_id }}
|
||||
run: |
|
||||
gh api graphql \
|
||||
-f discussionId="$NODE_ID" \
|
||||
-f body="This discussion has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
|
||||
-f query='
|
||||
mutation CommentAndCloseDiscussion($discussionId: ID!, $body: String!) {
|
||||
addDiscussionComment(input: {
|
||||
discussionId: $discussionId,
|
||||
body: $body
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
|
||||
closeDiscussion(input: {
|
||||
discussionId: $discussionId,
|
||||
reason: DUPLICATE
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
}'
|
||||
72
.github/workflows/codeql-analysis.yml
vendored
72
.github/workflows/codeql-analysis.yml
vendored
@@ -9,14 +9,14 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: 'CodeQL'
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: [ "main" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: ['main']
|
||||
branches: [ "main" ]
|
||||
schedule:
|
||||
- cron: '20 13 * * 1'
|
||||
|
||||
@@ -24,8 +24,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -38,51 +36,43 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ['javascript', 'python']
|
||||
language: [ 'javascript', 'python' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@755f44910c12a3d7ca0d8c6e42c048b3362f7cec # v3.30.8
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@755f44910c12a3d7ca0d8c6e42c048b3362f7cec # v3.30.8
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@755f44910c12a3d7ca0d8c6e42c048b3362f7cec # v3.30.8
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
73
.github/workflows/docker-cleanup.yml
vendored
Normal file
73
.github/workflows/docker-cleanup.yml
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
# This workflow runs on certain conditions to check for and potentially
|
||||
# delete container images from the GHCR which no longer have an associated
|
||||
# code branch.
|
||||
# Requires a PAT with the correct scope set in the secrets.
|
||||
#
|
||||
# This workflow will not trigger runs on forked repos.
|
||||
|
||||
name: Docker Cleanup
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- "closed"
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/docker-cleanup.yml"
|
||||
|
||||
concurrency:
|
||||
group: registry-tags-cleanup
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
cleanup-images:
|
||||
name: Cleanup Stale Images Tags for ${{ matrix.primary-name }}
|
||||
runs-on: ubuntu-24.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- primary-name: "immich-server"
|
||||
- primary-name: "immich-machine-learning"
|
||||
env:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
|
||||
steps:
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.9.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
is_org: "true"
|
||||
do_delete: "true"
|
||||
package_name: "${{ matrix.primary-name }}"
|
||||
scheme: "pull_request"
|
||||
repo_name: "immich"
|
||||
match_regex: '^pr-(\d+)$|^(\d+)$'
|
||||
|
||||
cleanup-untagged-images:
|
||||
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- cleanup-images
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- primary-name: "immich-server"
|
||||
- primary-name: "immich-machine-learning"
|
||||
- primary-name: "immich-build-cache"
|
||||
env:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
|
||||
steps:
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.9.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
do_delete: "true"
|
||||
is_org: "true"
|
||||
package_name: "${{ matrix.primary-name }}"
|
||||
365
.github/workflows/docker.yml
vendored
365
.github/workflows/docker.yml
vendored
@@ -5,6 +5,7 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -12,27 +13,21 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
server:
|
||||
- 'server/**'
|
||||
@@ -41,155 +36,275 @@ jobs:
|
||||
- 'i18n/**'
|
||||
machine-learning:
|
||||
- 'machine-learning/**'
|
||||
force-filters: |
|
||||
- '.github/workflows/docker.yml'
|
||||
- '.github/workflows/multi-runner-build.yml'
|
||||
- '.github/actions/image-build'
|
||||
force-events: 'workflow_dispatch,release'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
retag_ml:
|
||||
name: Re-Tag ML
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).machine-learning == false && !github.event.pull_request.head.repo.fork }}
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
|
||||
suffix: ["", "-cuda", "-openvino", "-armnn"]
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
|
||||
retag_server:
|
||||
name: Re-Tag Server
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == false && !github.event.pull_request.head.repo.fork }}
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
suffix: ['']
|
||||
suffix: [""]
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
|
||||
|
||||
build_and_push_ml:
|
||||
name: Build and Push ML
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
strategy:
|
||||
# Prevent a failure in one image from stopping the other builds
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platforms: linux/amd64,linux/arm64
|
||||
device: cpu
|
||||
|
||||
- platforms: linux/amd64
|
||||
device: cuda
|
||||
suffix: -cuda
|
||||
|
||||
- platforms: linux/amd64
|
||||
device: openvino
|
||||
suffix: -openvino
|
||||
|
||||
- platforms: linux/arm64
|
||||
device: armnn
|
||||
suffix: -armnn
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.8.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
|
||||
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr,suffix=${{ matrix.suffix }}
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag,suffix=${{ matrix.suffix }}
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
|
||||
|
||||
machine-learning:
|
||||
name: Build and Push ML
|
||||
- name: Determine build cache output
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
# Essentially just ignore the cache output (PR can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.10.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
# Skip pushing when PR from a fork
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
|
||||
build_and_push_server:
|
||||
name: Build and Push Server
|
||||
runs-on: ubuntu-latest
|
||||
needs: pre-job
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).machine-learning == true }}
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
env:
|
||||
image: immich-server
|
||||
context: .
|
||||
file: server/Dockerfile
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- device: cpu
|
||||
tag-suffix: ''
|
||||
- device: cuda
|
||||
tag-suffix: '-cuda'
|
||||
platforms: linux/amd64
|
||||
- device: openvino
|
||||
tag-suffix: '-openvino'
|
||||
platforms: linux/amd64
|
||||
- device: armnn
|
||||
tag-suffix: '-armnn'
|
||||
platforms: linux/arm64
|
||||
- device: rknn
|
||||
tag-suffix: '-rknn'
|
||||
platforms: linux/arm64
|
||||
- device: rocm
|
||||
tag-suffix: '-rocm'
|
||||
platforms: linux/amd64
|
||||
runner-mapping: '{"linux/amd64": "mich"}'
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
with:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
dockerfile: machine-learning/Dockerfile
|
||||
platforms: ${{ matrix.platforms }}
|
||||
runner-mapping: ${{ matrix.runner-mapping }}
|
||||
tag-suffix: ${{ matrix.tag-suffix }}
|
||||
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
- platforms: linux/amd64,linux/arm64
|
||||
device: cpu
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
server:
|
||||
name: Build and Push Server
|
||||
needs: pre-job
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
with:
|
||||
image: immich-server
|
||||
context: .
|
||||
dockerfile: server/Dockerfile
|
||||
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||
build-args: |
|
||||
DEVICE=cpu
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.8.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
|
||||
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr,suffix=${{ matrix.suffix }}
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag,suffix=${{ matrix.suffix }}
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
|
||||
|
||||
- name: Determine build cache output
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
# Essentially just ignore the cache output (PR can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.10.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
# Skip pushing when PR from a fork
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
success-check-server:
|
||||
name: Docker Build & Push Server Success
|
||||
needs: [server, retag_server]
|
||||
permissions: {}
|
||||
needs: [build_and_push_server, retag_server]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
|
||||
success-check-ml:
|
||||
name: Docker Build & Push ML Success
|
||||
needs: [machine-learning, retag_ml]
|
||||
permissions: {}
|
||||
needs: [build_and_push_ml, retag_ml]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
|
||||
63
.github/workflows/docs-build.yml
vendored
63
.github/workflows/docs-build.yml
vendored
@@ -3,6 +3,7 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -10,84 +11,54 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
docs:
|
||||
- 'docs/**'
|
||||
open-api:
|
||||
- 'open-api/immich-openapi-specs.json'
|
||||
force-filters: |
|
||||
- '.github/workflows/docs-build.yml'
|
||||
force-events: 'release'
|
||||
force-branches: 'main'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build:
|
||||
name: Docs Build
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).docs == true }}
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run install
|
||||
run: pnpm install
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Check formatting
|
||||
run: pnpm format
|
||||
run: npm run format
|
||||
|
||||
- name: Run build
|
||||
run: pnpm build
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
include-hidden-files: true
|
||||
retention-days: 1
|
||||
|
||||
100
.github/workflows/docs-deploy.yml
vendored
100
.github/workflows/docs-deploy.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Docs deploy
|
||||
on:
|
||||
workflow_run: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
workflows: ['Docs build']
|
||||
workflow_run:
|
||||
workflows: ["Docs build"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
@@ -9,26 +9,16 @@ jobs:
|
||||
checks:
|
||||
name: Docs Deploy Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: read
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- if: ${{ github.event.workflow_run.conclusion != 'success' }}
|
||||
run: echo 'The triggering workflow did not succeed' && exit 1
|
||||
- name: Get artifact
|
||||
id: get-artifact
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
@@ -45,11 +35,8 @@ jobs:
|
||||
return { found: true, id: matchArtifact.id };
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
const isFork = context.payload.workflow_run.repository.fork;
|
||||
@@ -70,8 +57,7 @@ jobs:
|
||||
} else if (eventType == "pull_request") {
|
||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||
if(!pull_number) {
|
||||
const {HEAD_SHA} = process.env;
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||
const items = response.data.items
|
||||
if (items.length < 1) {
|
||||
throw new Error("No pull request found for the commit")
|
||||
@@ -109,48 +95,30 @@ jobs:
|
||||
name: Docs Deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
pull-requests: write
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup Mise
|
||||
uses: immich-app/devtools/actions/use-mise@cd24790a7f5f6439ac32cc94f5523cb2de8bfa8c # use-mise-action-v1.1.0
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
const parameters = JSON.parse(process.env.PARAM_JSON);
|
||||
const json = `${{ needs.checks.outputs.parameters }}`;
|
||||
const parameters = JSON.parse(json);
|
||||
core.setOutput("event", parameters.event);
|
||||
core.setOutput("name", parameters.name);
|
||||
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||
|
||||
- run: |
|
||||
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
let artifact = JSON.parse(process.env.ARTIFACT_JSON);
|
||||
let artifact = ${{ needs.checks.outputs.artifact }};
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
@@ -170,8 +138,12 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run tf apply'
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "apply"
|
||||
|
||||
- name: Deploy Docs Subdomain Output
|
||||
id: docs-output
|
||||
@@ -181,26 +153,27 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run tf output -- -json'
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "output -json"
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
env:
|
||||
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
|
||||
run: |
|
||||
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
# TODO: Action is deprecated
|
||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
|
||||
uses: cloudflare/pages-action@v1
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
||||
workingDirectory: 'docs'
|
||||
directory: 'build'
|
||||
workingDirectory: "docs"
|
||||
directory: "build"
|
||||
branch: ${{ steps.parameters.outputs.name }}
|
||||
wranglerVersion: '3'
|
||||
|
||||
@@ -211,14 +184,17 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs-release'
|
||||
run: 'mise run tf apply'
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs-release'
|
||||
tg_command: 'apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||
with:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||
body: |
|
||||
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
|
||||
|
||||
36
.github/workflows/docs-destroy.yml
vendored
36
.github/workflows/docs-destroy.yml
vendored
@@ -1,47 +1,33 @@
|
||||
name: Docs destroy
|
||||
on:
|
||||
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Docs Destroy
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup Mise
|
||||
uses: immich-app/devtools/actions/use-mise@cd24790a7f5f6439ac32cc94f5523cb2de8bfa8c # use-mise-action-v1.1.0
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
TF_VAR_prefix_name: 'pr-${{ github.event.number }}'
|
||||
TF_VAR_prefix_event_type: 'pr'
|
||||
TF_VAR_prefix_name: "pr-${{ github.event.number }}"
|
||||
TF_VAR_prefix_event_type: "pr"
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
working-directory: 'deployment/modules/cloudflare/docs'
|
||||
run: 'mise run tf destroy -- -refresh=false'
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "destroy -refresh=false"
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
with:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
number: ${{ github.event.number }}
|
||||
delete: true
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
|
||||
21
.github/workflows/fix-format.yml
vendored
21
.github/workflows/fix-format.yml
vendored
@@ -4,54 +4,44 @@ on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
fix-formatting:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.label.name == 'fix:formatting' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: 'Checkout'
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
|
||||
- name: Commit and push
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: fix formatting'
|
||||
|
||||
- name: Remove label
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
uses: actions/github-script@v7
|
||||
if: always()
|
||||
with:
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
issue_number: context.payload.pull_request.number,
|
||||
@@ -59,3 +49,4 @@ jobs:
|
||||
repo: context.repo.repo,
|
||||
name: 'fix:formatting'
|
||||
})
|
||||
|
||||
|
||||
128
.github/workflows/merge-translations.yml
vendored
128
.github/workflows/merge-translations.yml
vendored
@@ -1,128 +0,0 @@
|
||||
name: Merge translations
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
secrets:
|
||||
PUSH_O_MATIC_APP_ID:
|
||||
required: true
|
||||
PUSH_O_MATIC_APP_KEY:
|
||||
required: true
|
||||
WEBLATE_TOKEN:
|
||||
required: true
|
||||
inputs:
|
||||
skip:
|
||||
description: 'Skip translations'
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
WEBLATE_HOST: 'https://hosted.weblate.org'
|
||||
WEBLATE_COMPONENT: 'immich/immich'
|
||||
|
||||
jobs:
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate_token
|
||||
if: ${{ inputs.skip != true }}
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Find translation PR
|
||||
id: find_pr
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
PR=$(gh pr list --repo $GITHUB_REPOSITORY --author weblate --json number,mergeable)
|
||||
echo "$PR"
|
||||
|
||||
PR_NUMBER=$(echo "$PR" | jq '
|
||||
if length == 1 then
|
||||
.[0].number
|
||||
else
|
||||
error("Expected exactly 1 entry, got \(length)")
|
||||
end
|
||||
' 2>&1) || exit 1
|
||||
|
||||
echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_OUTPUT
|
||||
echo "Selected PR $PR_NUMBER"
|
||||
|
||||
if ! echo "$PR" | jq -e '.[0].mergeable == "MERGEABLE"'; then
|
||||
echo "PR is not mergeable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Lock weblate
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
|
||||
run: |
|
||||
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/lock/" -d lock=true
|
||||
|
||||
- name: Commit translations
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
|
||||
run: |
|
||||
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/repository/" -d operation=commit
|
||||
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/repository/" -d operation=push
|
||||
|
||||
- name: Merge PR
|
||||
id: merge_pr
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
PR_NUMBER: ${{ steps.find_pr.outputs.PR_NUMBER }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
REVIEW_ID=$(gh api -X POST "repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/reviews" --field event='APPROVE' --field body='Automatically merging translations PR' \
|
||||
| jq '.id')
|
||||
echo "REVIEW_ID=$REVIEW_ID" >> $GITHUB_OUTPUT
|
||||
gh pr merge "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --auto --squash
|
||||
|
||||
- name: Wait for PR to merge
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
PR_NUMBER: ${{ steps.find_pr.outputs.PR_NUMBER }}
|
||||
REVIEW_ID: ${{ steps.merge_pr.outputs.REVIEW_ID }}
|
||||
run: |
|
||||
# So we clean up no matter what
|
||||
set +e
|
||||
|
||||
for i in {1..100}; do
|
||||
if gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json state | jq -e '.state == "MERGED"'; then
|
||||
echo "PR merged"
|
||||
exit 0
|
||||
else
|
||||
echo "PR not merged yet, waiting..."
|
||||
sleep 6
|
||||
fi
|
||||
done
|
||||
echo "PR did not merge in time"
|
||||
gh api -X PUT "repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/reviews/$REVIEW_ID/dismissals" --field message='Merge attempt timed out' --field event='DISMISS'
|
||||
gh pr merge "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --disable-auto
|
||||
exit 1
|
||||
|
||||
- name: Unlock weblate
|
||||
if: ${{ inputs.skip != true }}
|
||||
env:
|
||||
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
|
||||
run: |
|
||||
curl --fail-with-body -X POST -H "Authorization: Token $WEBLATE_TOKEN" "$WEBLATE_HOST/api/components/$WEBLATE_COMPONENT/lock/" -d lock=false
|
||||
|
||||
- name: Report success
|
||||
run: |
|
||||
echo "Workflow completed successfully (or was skipped)"
|
||||
@@ -1,12 +0,0 @@
|
||||
name: PR Conventional Commit
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
name: Validate PR Title (conventional commit)
|
||||
uses: immich-app/devtools/.github/workflows/shared-pr-require-conventional-commit.yml@main
|
||||
permissions:
|
||||
pull-requests: write
|
||||
15
.github/workflows/org-zizmor.yml
vendored
15
.github/workflows/org-zizmor.yml
vendored
@@ -1,15 +0,0 @@
|
||||
name: Zizmor
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
zizmor:
|
||||
name: Zizmor
|
||||
uses: immich-app/devtools/.github/workflows/shared-zizmor.yml@main
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
17
.github/workflows/pr-label-validation.yml
vendored
17
.github/workflows/pr-label-validation.yml
vendored
@@ -1,11 +1,9 @@
|
||||
name: PR Label Validation
|
||||
|
||||
on:
|
||||
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
pull_request_target:
|
||||
types: [opened, labeled, unlabeled, synchronize]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release-label:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -13,19 +11,12 @@ jobs:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
|
||||
uses: mheap/github-action-required-labels@v5
|
||||
with:
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
mode: exactly
|
||||
count: 1
|
||||
use_regex: true
|
||||
labels: 'changelog:.*'
|
||||
labels: "changelog:.*"
|
||||
add_comment: true
|
||||
message: 'Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label.'
|
||||
message: "Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label."
|
||||
|
||||
16
.github/workflows/pr-labeler.yml
vendored
16
.github/workflows/pr-labeler.yml
vendored
@@ -1,8 +1,6 @@
|
||||
name: 'Pull Request Labeler'
|
||||
name: "Pull Request Labeler"
|
||||
on:
|
||||
- pull_request_target # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
|
||||
permissions: {}
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
@@ -11,12 +9,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
- uses: actions/labeler@v5
|
||||
|
||||
15
.github/workflows/pr-require-conventional-commit.yml
vendored
Normal file
15
.github/workflows/pr-require-conventional-commit.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: PR Conventional Commit Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: PR Conventional Commit Validation
|
||||
uses: ytanikin/PRConventionalCommits@1.3.0
|
||||
with:
|
||||
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
|
||||
add_label: 'false'
|
||||
79
.github/workflows/prepare-release.yml
vendored
79
.github/workflows/prepare-release.yml
vendored
@@ -10,79 +10,46 @@ on:
|
||||
type: choice
|
||||
options:
|
||||
- 'false'
|
||||
- major
|
||||
- minor
|
||||
- patch
|
||||
mobileBump:
|
||||
description: 'Bump mobile build number'
|
||||
required: false
|
||||
type: boolean
|
||||
skipTranslations:
|
||||
description: 'Skip translations'
|
||||
required: false
|
||||
type: boolean
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-root
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
merge_translations:
|
||||
uses: ./.github/workflows/merge-translations.yml
|
||||
with:
|
||||
skip: ${{ inputs.skipTranslations }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
secrets:
|
||||
PUSH_O_MATIC_APP_ID: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
PUSH_O_MATIC_APP_KEY: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
|
||||
|
||||
bump_version:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [merge_translations]
|
||||
|
||||
outputs:
|
||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||
permissions: {} # No job-level permissions are needed because it uses the app-token
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
ref: main
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry
|
||||
|
||||
- name: Bump version
|
||||
env:
|
||||
SERVER_BUMP: ${{ inputs.serverBump }}
|
||||
MOBILE_BUMP: ${{ inputs.mobileBump }}
|
||||
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
|
||||
run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
|
||||
|
||||
- name: Commit and tag
|
||||
id: push-tag
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: version ${{ env.IMMICH_VERSION }}'
|
||||
@@ -92,48 +59,30 @@ jobs:
|
||||
build_mobile:
|
||||
uses: ./.github/workflows/build-mobile.yml
|
||||
needs: bump_version
|
||||
permissions:
|
||||
contents: read
|
||||
secrets:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
secrets: inherit
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build_mobile
|
||||
permissions:
|
||||
actions: read # To download the app artifact
|
||||
# No content permissions are needed because it uses the app-token
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
token: ${{ secrets.ORG_RELEASE_TOKEN }}
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-apk-signed
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@6da8fa9354ddfdc4aeace5fc48d7f679b5214090 # v2.4.1
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
generate_release_notes: true
|
||||
body_path: misc/release/notes.tmpl
|
||||
files: |
|
||||
|
||||
63
.github/workflows/preview-label.yaml
vendored
63
.github/workflows/preview-label.yaml
vendored
@@ -1,63 +0,0 @@
|
||||
name: Preview label
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
comment-status:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'preview' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
message-id: 'preview-status'
|
||||
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.build/'
|
||||
|
||||
remove-label:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ (github.event.action == 'closed' || github.event.pull_request.head.repo.fork) && contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
issue_number: context.payload.pull_request.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'preview'
|
||||
})
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
message-id: 'preview-status'
|
||||
message: 'PRs from forks cannot have preview environments.'
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
message-id: 'preview-status'
|
||||
message: 'Preview environment has been removed.'
|
||||
30
.github/workflows/sdk.yml
vendored
30
.github/workflows/sdk.yml
vendored
@@ -4,44 +4,28 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish `@immich/sdk`
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Install deps
|
||||
run: pnpm install --frozen-lockfile
|
||||
run: npm ci
|
||||
- name: Build
|
||||
run: pnpm build
|
||||
run: npm run build
|
||||
- name: Publish
|
||||
run: pnpm publish --no-git-checks
|
||||
run: npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
102
.github/workflows/static_analysis.yml
vendored
102
.github/workflows/static_analysis.yml
vendored
@@ -9,110 +9,58 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
force-filters: |
|
||||
- '.github/workflows/static_analysis.yml'
|
||||
force-events: 'workflow_dispatch,release'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mobile-dart-analyze:
|
||||
name: Run Dart Code Analysis
|
||||
needs: pre-job
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./mobile
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
|
||||
- name: Install DCM
|
||||
uses: CQLabs/setup-dcm@8697ae0790c0852e964a6ef1d768d62a6675481a # v2.0.1
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
version: auto
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate translation file
|
||||
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
|
||||
|
||||
- name: Run Build Runner
|
||||
run: make build
|
||||
|
||||
- name: Generate platform API
|
||||
run: make pigeon
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
mobile/**/*.g.dart
|
||||
mobile/**/*.gr.dart
|
||||
mobile/**/*.drift.dart
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date! Run 'make build' and 'make pigeon' inside the mobile directory"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run dart analyze
|
||||
run: dart analyze --fatal-infos
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run dart format
|
||||
run: make format
|
||||
run: dart format lib/ --set-exit-if-changed
|
||||
working-directory: ./mobile
|
||||
|
||||
# TODO: Re-enable after upgrading custom_lint
|
||||
# - name: Run dart custom_lint
|
||||
- name: Run dart custom_lint
|
||||
run: dart run custom_lint
|
||||
working-directory: ./mobile
|
||||
|
||||
# Enable after riverpod generator migration is completed
|
||||
# - name: Run dart custom lint
|
||||
# run: dart run custom_lint
|
||||
|
||||
# TODO: Use https://github.com/CQLabs/dcm-action
|
||||
- name: Run DCM
|
||||
run: dcm analyze lib --fatal-style --fatal-warnings
|
||||
# working-directory: ./mobile
|
||||
|
||||
847
.github/workflows/test.yml
vendored
847
.github/workflows/test.yml
vendored
File diff suppressed because it is too large
Load Diff
74
.github/workflows/weblate-lock.yml
vendored
74
.github/workflows/weblate-lock.yml
vendored
@@ -1,74 +0,0 @@
|
||||
name: Weblate checks
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
- auto_merge_enabled
|
||||
- auto_merge_disabled
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
BOT_NAME: immich-push-o-matic
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Check what should run
|
||||
id: check
|
||||
uses: immich-app/devtools/actions/pre-job@08bac802a312fc89808e0dd589271ca0974087b5 # pre-job-action-v2.0.0
|
||||
with:
|
||||
github-token: ${{ steps.token.outputs.token }}
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/!(en)**\.json'
|
||||
exclude-branches: 'chore/translations'
|
||||
skip-force-logic: 'true'
|
||||
|
||||
enforce-lock:
|
||||
name: Check Weblate Lock
|
||||
needs: [pre-job]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: ${{ fromJSON(needs.pre-job.outputs.should_run).i18n == true }}
|
||||
steps:
|
||||
- id: token
|
||||
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Bot review status
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number || github.event.pull_request_review.pull_request.number }}
|
||||
GH_TOKEN: ${{ steps.token.outputs.token }}
|
||||
run: |
|
||||
# Then check for APPROVED by the bot, if absent fail
|
||||
gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json reviews | jq -e '.reviews | map(select(.author.login == env.BOT_NAME and .state == "APPROVED")) | length > 0' \
|
||||
|| (echo "The push-o-matic bot has not approved this PR yet" && exit 1)
|
||||
|
||||
success-check-lock:
|
||||
name: Weblate Lock Check Success
|
||||
needs: [enforce-lock]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -3,7 +3,6 @@
|
||||
.DS_Store
|
||||
.vscode/*
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
|
||||
docker/upload
|
||||
@@ -18,13 +17,9 @@ mobile/libisar.dylib
|
||||
mobile/openapi/test
|
||||
mobile/openapi/doc
|
||||
mobile/openapi/.openapi-generator/FILES
|
||||
mobile/ios/build
|
||||
|
||||
open-api/typescript-sdk/build
|
||||
mobile/android/fastlane/report.xml
|
||||
mobile/ios/fastlane/report.xml
|
||||
|
||||
vite.config.js.timestamp-*
|
||||
.pnpm-store
|
||||
.devcontainer/library
|
||||
.devcontainer/.env*
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
module.exports = {
|
||||
hooks: {
|
||||
readPackage: (pkg) => {
|
||||
if (!pkg.name) {
|
||||
return pkg;
|
||||
}
|
||||
if (pkg.name === "exiftool-vendored") {
|
||||
if (pkg.optionalDependencies["exiftool-vendored.pl"]) {
|
||||
// make exiftool-vendored.pl a regular dependency
|
||||
pkg.dependencies["exiftool-vendored.pl"] =
|
||||
pkg.optionalDependencies["exiftool-vendored.pl"];
|
||||
delete pkg.optionalDependencies["exiftool-vendored.pl"];
|
||||
}
|
||||
}
|
||||
return pkg;
|
||||
},
|
||||
},
|
||||
};
|
||||
10
.vscode/extensions.json
vendored
10
.vscode/extensions.json
vendored
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"esbenp.prettier-vscode",
|
||||
"svelte.svelte-vscode",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"dart-code.flutter",
|
||||
"dart-code.dart-code",
|
||||
"dcmdev.dcm-vscode-extension"
|
||||
]
|
||||
}
|
||||
18
.vscode/launch.json
vendored
18
.vscode/launch.json
vendored
@@ -7,7 +7,7 @@
|
||||
"restart": true,
|
||||
"port": 9231,
|
||||
"name": "Immich API Server",
|
||||
"remoteRoot": "/usr/src/app/server",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
},
|
||||
{
|
||||
@@ -16,22 +16,8 @@
|
||||
"restart": true,
|
||||
"port": 9230,
|
||||
"name": "Immich Workers",
|
||||
"remoteRoot": "/usr/src/app/server",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Immich CLI",
|
||||
"program": "${workspaceFolder}/cli/dist/index.js",
|
||||
"args": ["upload", "--help"],
|
||||
"runtimeArgs": ["--enable-source-maps"],
|
||||
"console": "integratedTerminal",
|
||||
"resolveSourceMapLocations": ["${workspaceFolder}/cli/dist/**/*.js.map"],
|
||||
"sourceMaps": true,
|
||||
"outFiles": ["${workspaceFolder}/cli/dist/**/*.js"],
|
||||
"skipFiles": ["<node_internals>/**"],
|
||||
"preLaunchTask": "Build Immich CLI"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
80
.vscode/settings.json
vendored
80
.vscode/settings.json
vendored
@@ -1,64 +1,44 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"svelte"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"[dart]": {
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.selectionHighlight": false,
|
||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||
"editor.suggestSelection": "first",
|
||||
"editor.tabCompletion": "onlySnippets",
|
||||
"editor.wordBasedSuggestions": "off"
|
||||
"editor.wordBasedSuggestions": "off",
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": ["immich"],
|
||||
"editor.formatOnSave": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"cSpell.words": [
|
||||
"immich"
|
||||
],
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
|
||||
"package.json": "package-lock.json, yarn.lock, pnpm-lock.yaml, bun.lockb, bun.lock, pnpm-workspace.yaml, .pnpmfile.cjs"
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
}
|
||||
}
|
||||
80
.vscode/tasks.json
vendored
80
.vscode/tasks.json
vendored
@@ -1,80 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Fix Permissions, Install Dependencies",
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich API Server (Nest)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich Web Server (Vite)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich Server and Web",
|
||||
"dependsOn": ["Immich Web Server (Vite)", "Immich API Server (Nest)"],
|
||||
"runOptions": {
|
||||
"runOn": "folderOpen"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Build Immich CLI",
|
||||
"type": "shell",
|
||||
"command": "pnpm --filter cli build:dev"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
/.github/ @bo0tzz
|
||||
/docker/ @bo0tzz
|
||||
/server/ @danieldietzler
|
||||
/web/ @danieldietzler
|
||||
/machine-learning/ @mertalev
|
||||
/e2e/ @danieldietzler
|
||||
/mobile/ @shenlong-tanwen
|
||||
|
||||
120
Makefile
120
Makefile
@@ -1,36 +1,24 @@
|
||||
dev:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans || make dev-down
|
||||
|
||||
dev-down:
|
||||
docker compose -f ./docker/docker-compose.dev.yml down --remove-orphans
|
||||
|
||||
dev-update:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
|
||||
dev-scale:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
dev-docs:
|
||||
npm --prefix docs run start
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --remove-orphans
|
||||
|
||||
e2e-update:
|
||||
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
e2e-down:
|
||||
docker compose -f ./e2e/docker-compose.yml down --remove-orphans
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
|
||||
prod:
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-down:
|
||||
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-scale:
|
||||
@trap 'make prod-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
|
||||
.PHONY: open-api
|
||||
open-api:
|
||||
@@ -43,7 +31,7 @@ open-api-typescript:
|
||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||
|
||||
sql:
|
||||
pnpm --filter immich run sync:sql
|
||||
npm --prefix server run sync:sql
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
@@ -51,57 +39,31 @@ attach-server:
|
||||
renovate:
|
||||
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
|
||||
|
||||
# Directories that need to be created for volumes or build output
|
||||
VOLUME_DIRS = \
|
||||
./.pnpm-store \
|
||||
./web/.svelte-kit \
|
||||
./web/node_modules \
|
||||
./web/coverage \
|
||||
./e2e/node_modules \
|
||||
./docs/node_modules \
|
||||
./server/node_modules \
|
||||
./open-api/typescript-sdk/node_modules \
|
||||
./.github/node_modules \
|
||||
./node_modules \
|
||||
./cli/node_modules
|
||||
|
||||
# Include .env file if it exists
|
||||
-include docker/.env
|
||||
|
||||
MODULES = e2e server web cli sdk docs .github
|
||||
|
||||
# directory to package name mapping function
|
||||
# cli = @immich/cli
|
||||
# docs = documentation
|
||||
# e2e = immich-e2e
|
||||
# open-api/typescript-sdk = @immich/sdk
|
||||
# server = immich
|
||||
# web = immich-web
|
||||
map-package = $(subst sdk,@immich/sdk,$(subst cli,@immich/cli,$(subst docs,documentation,$(subst e2e,immich-e2e,$(subst server,immich,$(subst web,immich-web,$1))))))
|
||||
MODULES = e2e server web cli sdk docs
|
||||
|
||||
audit-%:
|
||||
pnpm --filter $(call map-package,$*) audit fix
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
install-%:
|
||||
pnpm --filter $(call map-package,$*) install $(if $(FROZEN),--frozen-lockfile) $(if $(OFFLINE),--offline)
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
pnpm --filter $(call map-package,$*) run build
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build
|
||||
format-%:
|
||||
pnpm --filter $(call map-package,$*) run format:fix
|
||||
npm --prefix $* run format:fix
|
||||
lint-%:
|
||||
pnpm --filter $(call map-package,$*) run lint:fix
|
||||
npm --prefix $* run lint:fix
|
||||
check-%:
|
||||
pnpm --filter $(call map-package,$*) run check
|
||||
npm --prefix $* run check
|
||||
check-web:
|
||||
pnpm --filter immich-web run check:typescript
|
||||
pnpm --filter immich-web run check:svelte
|
||||
npm --prefix web run check:typescript
|
||||
npm --prefix web run check:svelte
|
||||
test-%:
|
||||
pnpm --filter $(call map-package,$*) run test
|
||||
npm --prefix $* run test
|
||||
test-e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml build
|
||||
pnpm --filter immich-e2e run test
|
||||
pnpm --filter immich-e2e run test:web
|
||||
npm --prefix e2e run test
|
||||
npm --prefix e2e run test:web
|
||||
test-medium:
|
||||
docker run \
|
||||
--rm \
|
||||
@@ -111,39 +73,23 @@ test-medium:
|
||||
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
|
||||
-e NODE_ENV=development \
|
||||
immich-server:latest \
|
||||
-c "pnpm test:medium -- --run"
|
||||
-c "npm ci && npm run test:medium -- --run"
|
||||
test-medium-dev:
|
||||
docker exec -it immich_server /bin/sh -c "pnpm run test:medium"
|
||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||
|
||||
install-all:
|
||||
pnpm -r --filter '!documentation' install
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e docs .github,$(MODULES)),build-$M) ;
|
||||
|
||||
check-all:
|
||||
pnpm -r --filter '!documentation' run "/^(check|check\:svelte|check\:typescript)$/"
|
||||
lint-all:
|
||||
pnpm -r --filter '!documentation' run lint:fix
|
||||
format-all:
|
||||
pnpm -r --filter '!documentation' run format:fix
|
||||
audit-all:
|
||||
pnpm -r --filter '!documentation' audit fix
|
||||
hygiene-all: audit-all
|
||||
pnpm -r --filter '!documentation' run "/(format:fix|check|check:svelte|check:typescript|sql)/"
|
||||
|
||||
test-all:
|
||||
pnpm -r --filter '!documentation' run "/^test/"
|
||||
build-all: $(foreach M,$(filter-out e2e,$(MODULES)),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
check-all: $(foreach M,$(filter-out sdk cli docs,$(MODULES)),check-$M) ;
|
||||
lint-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),lint-$M) ;
|
||||
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
|
||||
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
||||
hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),test-$M) ;
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf {} +
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name ".svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "coverage" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name ".pnpm-store" -type d -prune -exec rm -rf '{}' +
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml down -v --remove-orphans || true
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml down -v --remove-orphans || true
|
||||
|
||||
|
||||
setup-server-dev: install-server
|
||||
setup-web-dev: install-sdk build-sdk install-web
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
37
README.md
37
README.md
@@ -1,11 +1,11 @@
|
||||
<p align="center">
|
||||
<br/>
|
||||
<br/>
|
||||
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
||||
<a href="https://discord.immich.app">
|
||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
||||
</a>
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
<br/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@@ -28,9 +28,7 @@
|
||||
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
|
||||
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
|
||||
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">简体中文</a>
|
||||
<a href="readme_i18n/README_zh_TW.md">正體中文</a>
|
||||
<a href="readme_i18n/README_uk_UA.md">Українська</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
|
||||
@@ -39,29 +37,32 @@
|
||||
<a href="readme_i18n/README_th_TH.md">ภาษาไทย</a>
|
||||
</p>
|
||||
|
||||
## Disclaimer
|
||||
|
||||
> [!WARNING]
|
||||
> ⚠️ Always follow [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) backup plan for your precious photos and videos!
|
||||
>
|
||||
|
||||
- ⚠️ The project is under **very active** development.
|
||||
- ⚠️ Expect bugs and breaking changes.
|
||||
- ⚠️ **Do not use the app as the only way to store your photos and videos.**
|
||||
- ⚠️ Always follow [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) backup plan for your precious photos and videos!
|
||||
|
||||
> [!NOTE]
|
||||
> You can find the main documentation, including installation guides, at https://immich.app/.
|
||||
|
||||
## Links
|
||||
|
||||
- [Documentation](https://docs.immich.app/)
|
||||
- [About](https://docs.immich.app/overview/introduction)
|
||||
- [Installation](https://docs.immich.app/install/requirements)
|
||||
- [Documentation](https://immich.app/docs)
|
||||
- [About](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Roadmap](https://immich.app/roadmap)
|
||||
- [Demo](#demo)
|
||||
- [Features](#features)
|
||||
- [Translations](https://docs.immich.app/developer/translations)
|
||||
- [Contributing](https://docs.immich.app/overview/support-the-project)
|
||||
- [Translations](https://immich.app/docs/developer/translations)
|
||||
- [Contributing](https://immich.app/docs/overview/support-the-project)
|
||||
|
||||
## Demo
|
||||
|
||||
Access the demo [here](https://demo.immich.app). For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`.
|
||||
Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
|
||||
|
||||
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
|
||||
|
||||
### Login credentials
|
||||
|
||||
@@ -102,11 +103,11 @@ Access the demo [here](https://demo.immich.app). For the mobile app, you can use
|
||||
| Read-only gallery | Yes | Yes |
|
||||
| Stacked Photos | Yes | Yes |
|
||||
| Tags | No | Yes |
|
||||
| Folder View | Yes | Yes |
|
||||
| Folder View | No | Yes |
|
||||
|
||||
## Translations
|
||||
|
||||
Read more about translations [here](https://docs.immich.app/developer/translations).
|
||||
Read more about translations [here](https://immich.app/docs/developer/translations).
|
||||
|
||||
<a href="https://hosted.weblate.org/engage/immich/">
|
||||
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.20.0
|
||||
22.12.0
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
|
||||
FROM node:22.12.0-alpine3.20@sha256:96cc8323e25c8cc6ddcb8b965e135cfd57846e8003ec0d7bcec16c5fd5f6d39f AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
RUN npm ci
|
||||
COPY open-api/typescript-sdk/ ./
|
||||
RUN npm run build
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY package* pnpm* .pnpmfile.cjs ./
|
||||
COPY ./cli ./cli/
|
||||
COPY ./open-api/typescript-sdk ./open-api/typescript-sdk/
|
||||
RUN corepack enable pnpm && \
|
||||
pnpm install --filter @immich/sdk --filter @immich/cli --frozen-lockfile && \
|
||||
pnpm --filter @immich/sdk build && \
|
||||
pnpm --filter @immich/cli build
|
||||
|
||||
COPY cli/package.json cli/package-lock.json ./
|
||||
RUN npm ci
|
||||
|
||||
COPY cli .
|
||||
RUN npm run build
|
||||
|
||||
WORKDIR /import
|
||||
|
||||
ENTRYPOINT ["node", "/usr/src/app/cli/dist"]
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
|
||||
@@ -1,38 +1,30 @@
|
||||
A command-line interface for interfacing with the self-hosted photo manager [Immich](https://immich.app/).
|
||||
|
||||
Please see the [Immich CLI documentation](https://docs.immich.app/features/command-line-interface).
|
||||
Please see the [Immich CLI documentation](https://immich.app/docs/features/command-line-interface).
|
||||
|
||||
# For developers
|
||||
|
||||
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
|
||||
|
||||
$ pnpm install
|
||||
$ pnpm run build
|
||||
$ npm install
|
||||
$ npm run build
|
||||
|
||||
Then, to build the open-api client run the following in the open-api folder:
|
||||
|
||||
$ ./bin/generate-open-api.sh
|
||||
|
||||
## Run from build
|
||||
To run the Immich CLI from source, run the following in the cli folder:
|
||||
|
||||
Go to the cli folder and build it:
|
||||
$ npm install
|
||||
$ npm run build
|
||||
$ ts-node .
|
||||
|
||||
$ pnpm install
|
||||
$ pnpm run build
|
||||
$ node dist/index.js
|
||||
You'll need ts-node, the easiest way to install it is to use npm:
|
||||
|
||||
## Run and Debug from source (VSCode)
|
||||
|
||||
With VScode you can run and debug the Immich CLI. Go to the launch.json file, find the Immich CLI config and change this with the command you need to debug
|
||||
|
||||
`"args": ["upload", "--help"],`
|
||||
|
||||
replace that for the command of your choice.
|
||||
|
||||
## Install from build
|
||||
$ npm i -g ts-node
|
||||
|
||||
You can also build and install the CLI using
|
||||
|
||||
$ pnpm run build
|
||||
$ pnpm install -g .
|
||||
$ npm run build
|
||||
$ npm install -g .
|
||||
****
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import '../dist/index.js';
|
||||
@@ -1,29 +1,39 @@
|
||||
import { FlatCompat } from '@eslint/eslintrc';
|
||||
import js from '@eslint/js';
|
||||
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
|
||||
import eslintPluginUnicorn from 'eslint-plugin-unicorn';
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
import globals from 'globals';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import typescriptEslint from 'typescript-eslint';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default typescriptEslint.config([
|
||||
eslintPluginUnicorn.configs.recommended,
|
||||
eslintPluginPrettierRecommended,
|
||||
js.configs.recommended,
|
||||
typescriptEslint.configs.recommended,
|
||||
export default [
|
||||
{
|
||||
ignores: ['eslint.config.mjs', 'dist'],
|
||||
},
|
||||
...compat.extends(
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:prettier/recommended',
|
||||
'plugin:unicorn/recommended',
|
||||
),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
},
|
||||
|
||||
parser: typescriptEslint.parser,
|
||||
parser: tsParser,
|
||||
ecmaVersion: 5,
|
||||
sourceType: 'module',
|
||||
|
||||
@@ -48,4 +58,4 @@ export default typescriptEslint.config([
|
||||
'object-shorthand': ['error', 'always'],
|
||||
},
|
||||
},
|
||||
]);
|
||||
];
|
||||
|
||||
4559
cli/package-lock.json
generated
Normal file
4559
cli/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.97",
|
||||
"version": "2.2.37",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
"bin": {
|
||||
"immich": "./bin/immich"
|
||||
"immich": "dist/index.js"
|
||||
},
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"keywords": [
|
||||
@@ -13,37 +13,37 @@
|
||||
"cli"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.18.10",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"@types/node": "^22.10.2",
|
||||
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
||||
"@typescript-eslint/parser": "^8.15.0",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
"commander": "^12.0.0",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^60.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"globals": "^15.9.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"typescript-eslint": "^8.28.0",
|
||||
"vite": "^7.0.0",
|
||||
"vite": "^5.0.12",
|
||||
"vite-tsconfig-paths": "^5.0.0",
|
||||
"vitest": "^3.0.0",
|
||||
"vitest": "^2.0.5",
|
||||
"vitest-fetch-mock": "^0.4.0",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "vite build",
|
||||
"build:dev": "vite build --sourcemap true",
|
||||
"lint": "eslint \"src/**/*.ts\" --max-warnings 0",
|
||||
"lint:fix": "npm run lint -- --fix",
|
||||
"prepack": "npm run build",
|
||||
@@ -62,13 +62,11 @@
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"chokidar": "^4.0.3",
|
||||
"fast-glob": "^3.3.2",
|
||||
"fastq": "^1.17.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"micromatch": "^4.0.8"
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.20.0"
|
||||
"node": "22.12.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { setTimeout as sleep } from 'node:timers/promises';
|
||||
import { describe, expect, it, MockedFunction, vi } from 'vitest';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk';
|
||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
||||
import createFetchMock from 'vitest-fetch-mock';
|
||||
|
||||
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset';
|
||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
||||
|
||||
vi.mock('@immich/sdk');
|
||||
|
||||
@@ -200,112 +199,3 @@ describe('checkForDuplicates', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('startWatch', () => {
|
||||
let testFolder: string;
|
||||
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.restoreAllMocks();
|
||||
|
||||
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
|
||||
image: ['.jpg'],
|
||||
sidecar: ['.xmp'],
|
||||
video: ['.mp4'],
|
||||
});
|
||||
|
||||
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
|
||||
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
|
||||
checkBulkUploadMocked.mockResolvedValue({
|
||||
results: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should start watching a directory and upload new files', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: [
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should filter out unsupported files', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
const unsupportedFilePath = path.join(testFolder, 'test.txt');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: unsupportedFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should filger out ignored patterns', async () => {
|
||||
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||
const ignoredPattern = 'ignored';
|
||||
const ignoredFolder = path.join(testFolder, ignoredPattern);
|
||||
await fs.promises.mkdir(ignoredFolder, { recursive: true });
|
||||
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
|
||||
|
||||
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
|
||||
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
|
||||
|
||||
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: testFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
|
||||
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: ignoredFilePath,
|
||||
}),
|
||||
]),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.promises.rm(testFolder, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,18 +12,13 @@ import {
|
||||
getSupportedMediaTypes,
|
||||
} from '@immich/sdk';
|
||||
import byteSize from 'byte-size';
|
||||
import { Matcher, watch as watchFs } from 'chokidar';
|
||||
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import micromatch from 'micromatch';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import path, { basename } from 'node:path';
|
||||
import { Queue } from 'src/queue';
|
||||
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const UPLOAD_WATCH_BATCH_SIZE = 100;
|
||||
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
|
||||
@@ -41,9 +36,6 @@ export interface UploadOptionsDto {
|
||||
albumName?: string;
|
||||
includeHidden?: boolean;
|
||||
concurrency: number;
|
||||
progress?: boolean;
|
||||
watch?: boolean;
|
||||
jsonOutput?: boolean;
|
||||
}
|
||||
|
||||
class UploadFile extends File {
|
||||
@@ -63,100 +55,19 @@ class UploadFile extends File {
|
||||
}
|
||||
}
|
||||
|
||||
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
|
||||
const { newFiles, duplicates } = await checkForDuplicates(files, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
if (options.jsonOutput) {
|
||||
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
|
||||
}
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(
|
||||
newAssets.map(({ filepath }) => filepath),
|
||||
options,
|
||||
);
|
||||
};
|
||||
|
||||
export const startWatch = async (
|
||||
paths: string[],
|
||||
options: UploadOptionsDto,
|
||||
{
|
||||
batchSize = UPLOAD_WATCH_BATCH_SIZE,
|
||||
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
|
||||
}: { batchSize?: number; debounceTimeMs?: number } = {},
|
||||
) => {
|
||||
const watcherIgnored: Matcher[] = [];
|
||||
const { image, video } = await getSupportedMediaTypes();
|
||||
const extensions = new Set([...image, ...video]);
|
||||
|
||||
if (options.ignore) {
|
||||
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
|
||||
}
|
||||
|
||||
const pathsBatcher = new Batcher<string>({
|
||||
batchSize,
|
||||
debounceTimeMs,
|
||||
onBatch: async (paths: string[]) => {
|
||||
const uniquePaths = [...new Set(paths)];
|
||||
await uploadBatch(uniquePaths, options);
|
||||
},
|
||||
});
|
||||
|
||||
const onFile = async (path: string, stats?: Stats) => {
|
||||
if (stats?.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
const ext = '.' + path.split('.').pop()?.toLowerCase();
|
||||
if (!ext || !extensions.has(ext)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!options.progress) {
|
||||
// logging when progress is disabled as it can cause issues with the progress bar rendering
|
||||
console.log(`Change detected: ${path}`);
|
||||
}
|
||||
pathsBatcher.add(path);
|
||||
};
|
||||
const fsWatcher = watchFs(paths, {
|
||||
ignoreInitial: true,
|
||||
ignored: watcherIgnored,
|
||||
alwaysStat: true,
|
||||
awaitWriteFinish: true,
|
||||
depth: options.recursive ? undefined : 1,
|
||||
persistent: true,
|
||||
})
|
||||
.on('add', onFile)
|
||||
.on('change', onFile)
|
||||
.on('error', (error) => console.error(`Watcher error: ${error}`));
|
||||
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('Exiting...');
|
||||
await fsWatcher.close();
|
||||
process.exit();
|
||||
});
|
||||
};
|
||||
|
||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||
await authenticate(baseOptions);
|
||||
|
||||
const scanFiles = await scan(paths, options);
|
||||
|
||||
if (scanFiles.length === 0) {
|
||||
if (options.watch) {
|
||||
console.log('No files found initially.');
|
||||
} else {
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
}
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.watch) {
|
||||
console.log('Watching for changes...');
|
||||
await startWatch(paths, options);
|
||||
// watcher does not handle the initial scan
|
||||
// as the scan() is a more efficient quick start with batched results
|
||||
}
|
||||
|
||||
await uploadBatch(scanFiles, options);
|
||||
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
};
|
||||
|
||||
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
@@ -174,25 +85,19 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
return files;
|
||||
};
|
||||
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => {
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
}
|
||||
|
||||
let multiBar: MultiBar | undefined;
|
||||
const multiBar = new MultiBar(
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
|
||||
if (progress) {
|
||||
multiBar = new MultiBar(
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
} else {
|
||||
console.log(`Received ${files.length} files, hashing...`);
|
||||
}
|
||||
|
||||
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
@@ -212,7 +117,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
}
|
||||
}
|
||||
|
||||
checkProgressBar?.increment(assets.length);
|
||||
checkProgressBar.increment(assets.length);
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
@@ -232,7 +137,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
void checkBulkUploadQueue.push(batch);
|
||||
}
|
||||
|
||||
hashProgressBar?.increment();
|
||||
hashProgressBar.increment();
|
||||
return results;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
@@ -250,7 +155,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
|
||||
await checkBulkUploadQueue.drained();
|
||||
|
||||
multiBar?.stop();
|
||||
multiBar.stop();
|
||||
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
@@ -266,10 +171,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
export const uploadFiles = async (
|
||||
files: string[],
|
||||
{ dryRun, concurrency, progress }: UploadOptionsDto,
|
||||
): Promise<Asset[]> => {
|
||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
@@ -289,20 +191,12 @@ export const uploadFiles = async (
|
||||
return files.map((filepath) => ({ id: '', filepath }));
|
||||
}
|
||||
|
||||
let uploadProgress: SingleBar | undefined;
|
||||
|
||||
if (progress) {
|
||||
uploadProgress = new SingleBar(
|
||||
{
|
||||
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
||||
},
|
||||
Presets.shades_classic,
|
||||
);
|
||||
} else {
|
||||
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||
}
|
||||
uploadProgress?.start(totalSize, 0);
|
||||
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
const uploadProgress = new SingleBar(
|
||||
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
let duplicateCount = 0;
|
||||
let duplicateSize = 0;
|
||||
@@ -328,7 +222,7 @@ export const uploadFiles = async (
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
return response;
|
||||
},
|
||||
@@ -341,7 +235,7 @@ export const uploadFiles = async (
|
||||
|
||||
await queue.drained();
|
||||
|
||||
uploadProgress?.stop();
|
||||
uploadProgress.stop();
|
||||
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
|
||||
@@ -68,19 +68,7 @@ program
|
||||
.env('IMMICH_UPLOAD_CONCURRENCY')
|
||||
.default(4),
|
||||
)
|
||||
.addOption(
|
||||
new Option('-j, --json-output', 'Output detailed information in json format')
|
||||
.env('IMMICH_JSON_OUTPUT')
|
||||
.default(false),
|
||||
)
|
||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
|
||||
.addOption(
|
||||
new Option('--watch', 'Watch for changes and upload automatically')
|
||||
.env('IMMICH_WATCH_CHANGES')
|
||||
.default(false)
|
||||
.implies({ progress: false }),
|
||||
)
|
||||
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
||||
.action((paths, options) => upload(paths, program.opts(), options));
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { Batcher, CrawlOptions, crawl } from 'src/utils';
|
||||
import { Mock } from 'vitest';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
|
||||
interface Test {
|
||||
test: string;
|
||||
options: Omit<CrawlOptions, 'extensions'>;
|
||||
files: Record<string, boolean>;
|
||||
skipOnWin32?: boolean;
|
||||
}
|
||||
|
||||
const cwd = process.cwd();
|
||||
@@ -50,18 +48,6 @@ const tests: Test[] = [
|
||||
'/photos/image.jpg': true,
|
||||
},
|
||||
},
|
||||
{
|
||||
test: 'should crawl folders with quotes',
|
||||
options: {
|
||||
pathsToCrawl: ["/photo's/", '/photo"s/', '/photo`s/'],
|
||||
},
|
||||
files: {
|
||||
"/photo's/image1.jpg": true,
|
||||
'/photo"s/image2.jpg': true,
|
||||
'/photo`s/image3.jpg': true,
|
||||
},
|
||||
skipOnWin32: true, // single quote interferes with mockfs root on Windows
|
||||
},
|
||||
{
|
||||
test: 'should crawl a single file',
|
||||
options: {
|
||||
@@ -284,12 +270,8 @@ describe('crawl', () => {
|
||||
});
|
||||
|
||||
describe('crawl', () => {
|
||||
for (const { test: name, options, files, skipOnWin32 } of tests) {
|
||||
if (process.platform === 'win32' && skipOnWin32) {
|
||||
test.skip(name);
|
||||
continue;
|
||||
}
|
||||
it(name, async () => {
|
||||
for (const { test, options, files } of tests) {
|
||||
it(test, async () => {
|
||||
// The file contents is the same as the path.
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
|
||||
|
||||
@@ -304,38 +286,3 @@ describe('crawl', () => {
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Batcher', () => {
|
||||
let batcher: Batcher;
|
||||
let onBatch: Mock;
|
||||
beforeEach(() => {
|
||||
onBatch = vi.fn();
|
||||
batcher = new Batcher({ batchSize: 2, onBatch });
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when a batch limit is reached', async () => {
|
||||
batcher.add('a');
|
||||
batcher.add('b');
|
||||
batcher.add('c');
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when flush() is called', async () => {
|
||||
batcher.add('a');
|
||||
batcher.flush();
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||
});
|
||||
|
||||
it('should trigger onBatch() when debounce time reached', async () => {
|
||||
vi.useFakeTimers();
|
||||
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
|
||||
batcher.add('a');
|
||||
expect(onBatch).not.toHaveBeenCalled();
|
||||
vi.advanceTimersByTime(200);
|
||||
expect(onBatch).toHaveBeenCalledOnce();
|
||||
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||
vi.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -146,7 +146,7 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
}
|
||||
|
||||
const searchPatterns = patterns.map((pattern) => {
|
||||
let escapedPattern = pattern.replaceAll("'", "[']").replaceAll('"', '["]').replaceAll('`', '[`]');
|
||||
let escapedPattern = pattern;
|
||||
if (recursive) {
|
||||
escapedPattern = escapedPattern + '/**';
|
||||
}
|
||||
@@ -172,64 +172,3 @@ export const sha1 = (filepath: string) => {
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Batches items and calls onBatch to process them
|
||||
* when the batch size is reached or the debounce time has passed.
|
||||
*/
|
||||
export class Batcher<T = unknown> {
|
||||
private items: T[] = [];
|
||||
private readonly batchSize: number;
|
||||
private readonly debounceTimeMs?: number;
|
||||
private readonly onBatch: (items: T[]) => void;
|
||||
private debounceTimer?: NodeJS.Timeout;
|
||||
|
||||
constructor({
|
||||
batchSize,
|
||||
debounceTimeMs,
|
||||
onBatch,
|
||||
}: {
|
||||
batchSize: number;
|
||||
debounceTimeMs?: number;
|
||||
onBatch: (items: T[]) => Promise<void>;
|
||||
}) {
|
||||
this.batchSize = batchSize;
|
||||
this.debounceTimeMs = debounceTimeMs;
|
||||
this.onBatch = onBatch;
|
||||
}
|
||||
|
||||
private setDebounceTimer() {
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
}
|
||||
if (this.debounceTimeMs) {
|
||||
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private clearDebounceTimer() {
|
||||
if (this.debounceTimer) {
|
||||
clearTimeout(this.debounceTimer);
|
||||
this.debounceTimer = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
add(item: T) {
|
||||
this.items.push(item);
|
||||
this.setDebounceTimer();
|
||||
if (this.items.length >= this.batchSize) {
|
||||
this.flush();
|
||||
}
|
||||
}
|
||||
|
||||
flush() {
|
||||
this.clearDebounceTimer();
|
||||
if (this.items.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.onBatch(this.items);
|
||||
|
||||
this.items = [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
export CLOUDFLARE_ACCOUNT_ID="op://tf/cloudflare/account_id"
|
||||
export CLOUDFLARE_API_TOKEN="op://tf/cloudflare/api_token"
|
||||
export TF_STATE_POSTGRES_CONN_STR="op://tf/tf_state/postgres_conn_str"
|
||||
export TF_VAR_env=$ENVIRONMENT
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.52.5"
|
||||
constraints = "4.52.5"
|
||||
version = "4.48.0"
|
||||
constraints = "4.48.0"
|
||||
hashes = [
|
||||
"h1:+rfzF+16ZcWZWnTyW/p1HHTzYbPKX8Zt2nIFtR/+f+E=",
|
||||
"h1:18bXaaOSq8MWKuMxo/4y7EB7/i7G90y5QsKHZRmkoDo=",
|
||||
"h1:4vZVOpKeEQZsF2VrARRZFeL37Ed/gD4rRMtfnvWQres=",
|
||||
"h1:BZOsTF83QPKXTAaYqxPKzdl1KRjk/L2qbPpFjM0w28A=",
|
||||
"h1:CDuC+HXLvc1z6wkCRsSDcc/+QENIHEtssYshiWg3opA=",
|
||||
"h1:DE+YFzLnqSe79pI2R4idRGx5QzLdrA7RXvngTkGfZ30=",
|
||||
"h1:DfaJwH3Ml4yrRbdAY4AcDVy0QTQk5T3A622TXzS/u2E=",
|
||||
"h1:EIDXP0W3kgIv2pecrFmqtK/DnlqkyckzBzhxKaXU+4A=",
|
||||
"h1:EV4kYyaOnwGA0bh/3hU6Ezqnt1PFDxopH7i85e48IzY=",
|
||||
"h1:M0iXabfzamU+MPDi0G9XACpbacFKMakmM+Z9HZ8HrsM=",
|
||||
"h1:YWmCbGF/KbsrUzcYVBLscwLizidbp95TDQa0N2qpmVo=",
|
||||
"h1:cxPcCB5gbrpUO1+IXkQYs1YTY50/0IlApCzGea0cwuQ=",
|
||||
"h1:g6DldikTV2HXUu9uoeNY5FuLufgaYWF4ufgZg7wq62s=",
|
||||
"h1:oi/Hrx9pwoQ+Z52CBC+rrowVH387EIj0qvnxQgDeI+0=",
|
||||
"zh:1a3400cb38863b2585968d1876706bcfc67a148e1318a1d325c6c7704adc999b",
|
||||
"zh:4c5062cb9e9da1676f06ae92b8370186d98976cc4c7030d3cd76df12af54282a",
|
||||
"zh:52110f493b5f0587ef77a1cfd1a67001fd4c617b14c6502d732ab47352bdc2f7",
|
||||
"zh:5aa536f9eaeb43823aaf2aa80e7d39b25ef2b383405ed034aa16a28b446a9238",
|
||||
"zh:5cc39459a1c6be8a918f17054e4fbba573825ed5597dcada588fe99614d98a5b",
|
||||
"zh:629ae6a7ba298815131da826474d199312d21cec53a4d5ded4fa56a692e6f072",
|
||||
"zh:719cc7c75dc1d3eb30c22ff5102a017996d9788b948078c7e1c5b3446aeca661",
|
||||
"zh:8698635a3ca04383c1e93b21d6963346bdae54d27177a48e4b1435b7f731731c",
|
||||
"h1:0IKUOR32xEI1suS5QCOjfxjQ2mRd058btXk8hVnaOJ4=",
|
||||
"h1:3YG6vu/bFPcYOeLdSUZhiAWiWKaFlOAR34z2o8cbE9k=",
|
||||
"h1:FvGy06/i9AMtVkSIUnCrXNv5xF6jqBqMH8oPVLyeeAg=",
|
||||
"h1:GXH7nIF0ocMqebbA41+fSGIYfM+VAM/PvTe7fJr8UrQ=",
|
||||
"h1:H0ll0ph4404vFE868W3qJ3zhOyy4jbXrOMtdkViEZsU=",
|
||||
"h1:SX42e3k73IcFcrQlZ2e/Veqt2tvCMy6fwlo5yNUktCE=",
|
||||
"h1:Uu/gjBc99GefdPdSrlBwU75DWU0ZcwGcrd3ZFyTeL0s=",
|
||||
"h1:VZw0uN41PWRmNlhg7Ze0Eh7cdoklX1oZbfNAXNYnU1I=",
|
||||
"h1:cMdV7ql6PsFa4qtb0EoZSctvTaTqV7yplBSDwcLRCLc=",
|
||||
"h1:ePGvSurmlqOCkD761vkhRmz7bsK36/EnIvx2Xy8TdXo=",
|
||||
"h1:fOYufF+1bzw2N3aHLpkLB6E8VbZ4ysXDODYQOlwhwd4=",
|
||||
"h1:qe8RbnWq0T4xhqjn9QcbO6YW5YDx47P+eJ0NUMIfwCc=",
|
||||
"h1:tRD2av6PafHDP/b9jDQsG5/aX+lHeKxpbIEHYYLBVUc=",
|
||||
"h1:zyl6Gvx/CFpwYW8pFFDesfO8Lxv+a6CopyAsIMhp54s=",
|
||||
"zh:04c0a49c2b23140b2f21cfd0d52f9798d70d3bdae3831613e156aabe519bbc6c",
|
||||
"zh:185f21b4834ba63e8df1f84aa34639d8a7e126429a4007bb5f9ad82f2602a997",
|
||||
"zh:234724f52cb4c0c3f7313d3b2697caef26d921d134f26ae14801e7afac522f7b",
|
||||
"zh:38a56fcd1b3e40706af995611c977816543b53f1e55fe2720944aae2b6828fcb",
|
||||
"zh:419938f5430fc78eff933470aefbf94a460a478f867cf7761a3dea177b4eb153",
|
||||
"zh:4b46d92bfde1deab7de7ba1a6bbf4ba7c711e4fd925341ddf09d4cc28dae03d8",
|
||||
"zh:537acd4a31c752f1bae305ba7190f60b71ad1a459f22d464f3f914336c9e919f",
|
||||
"zh:5ff36b005aad07697dd0b30d4f0c35dbcdc30dc52b41722552060792fa87ce04",
|
||||
"zh:635c5ee419daea098060f794d9d7d999275301181e49562c4e4c08f043076937",
|
||||
"zh:859277c330d61f91abe9e799389467ca11b77131bf34bedbef52f8da68b2bb49",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8a9993f1dcadf1dd6ca43b23348abe374605d29945a2fafc07fb3457644e6a54",
|
||||
"zh:b1b9a1e6bcc24d5863a664a411d2dc906373ae7a2399d2d65548ce7377057852",
|
||||
"zh:b270184cdeec277218e84b94cb136fead753da717f9b9dc378e51907f3f00bb0",
|
||||
"zh:dff2bc10071210181726ce270f954995fe42c696e61e2e8f874021fed02521e5",
|
||||
"zh:e8e87b40b6a87dc097b0fdc20d3f725cec0d82abc9cc3755c1f89f8f6e8b0036",
|
||||
"zh:ee964a6573d399a5dd22ce328fb38ca1207797a02248f14b2e4913ee390e7803",
|
||||
"zh:927dfdb8d9aef37ead03fceaa29e87ba076a3dd24e19b6cefdbb0efe9987ff8c",
|
||||
"zh:bbf2226f07f6b1e721877328e69ded4b64f9c196634d2e2429e3cfabbe41e532",
|
||||
"zh:daeed873d6f38604232b46ee4a5830c85d195b967f8dbcafe2fcffa98daf9c5f",
|
||||
"zh:f8f2fc4646c1ba44085612fa7f4dbb7cbcead43b4e661f2b98ddfb4f68afc758",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.52.5"
|
||||
version = "4.48.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_release_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name
|
||||
domain = "docs.immich.app"
|
||||
domain = "immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_release_domain" {
|
||||
name = "docs.immich.app"
|
||||
name = "immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.52.5"
|
||||
constraints = "4.52.5"
|
||||
version = "4.48.0"
|
||||
constraints = "4.48.0"
|
||||
hashes = [
|
||||
"h1:+rfzF+16ZcWZWnTyW/p1HHTzYbPKX8Zt2nIFtR/+f+E=",
|
||||
"h1:18bXaaOSq8MWKuMxo/4y7EB7/i7G90y5QsKHZRmkoDo=",
|
||||
"h1:4vZVOpKeEQZsF2VrARRZFeL37Ed/gD4rRMtfnvWQres=",
|
||||
"h1:BZOsTF83QPKXTAaYqxPKzdl1KRjk/L2qbPpFjM0w28A=",
|
||||
"h1:CDuC+HXLvc1z6wkCRsSDcc/+QENIHEtssYshiWg3opA=",
|
||||
"h1:DE+YFzLnqSe79pI2R4idRGx5QzLdrA7RXvngTkGfZ30=",
|
||||
"h1:DfaJwH3Ml4yrRbdAY4AcDVy0QTQk5T3A622TXzS/u2E=",
|
||||
"h1:EIDXP0W3kgIv2pecrFmqtK/DnlqkyckzBzhxKaXU+4A=",
|
||||
"h1:EV4kYyaOnwGA0bh/3hU6Ezqnt1PFDxopH7i85e48IzY=",
|
||||
"h1:M0iXabfzamU+MPDi0G9XACpbacFKMakmM+Z9HZ8HrsM=",
|
||||
"h1:YWmCbGF/KbsrUzcYVBLscwLizidbp95TDQa0N2qpmVo=",
|
||||
"h1:cxPcCB5gbrpUO1+IXkQYs1YTY50/0IlApCzGea0cwuQ=",
|
||||
"h1:g6DldikTV2HXUu9uoeNY5FuLufgaYWF4ufgZg7wq62s=",
|
||||
"h1:oi/Hrx9pwoQ+Z52CBC+rrowVH387EIj0qvnxQgDeI+0=",
|
||||
"zh:1a3400cb38863b2585968d1876706bcfc67a148e1318a1d325c6c7704adc999b",
|
||||
"zh:4c5062cb9e9da1676f06ae92b8370186d98976cc4c7030d3cd76df12af54282a",
|
||||
"zh:52110f493b5f0587ef77a1cfd1a67001fd4c617b14c6502d732ab47352bdc2f7",
|
||||
"zh:5aa536f9eaeb43823aaf2aa80e7d39b25ef2b383405ed034aa16a28b446a9238",
|
||||
"zh:5cc39459a1c6be8a918f17054e4fbba573825ed5597dcada588fe99614d98a5b",
|
||||
"zh:629ae6a7ba298815131da826474d199312d21cec53a4d5ded4fa56a692e6f072",
|
||||
"zh:719cc7c75dc1d3eb30c22ff5102a017996d9788b948078c7e1c5b3446aeca661",
|
||||
"zh:8698635a3ca04383c1e93b21d6963346bdae54d27177a48e4b1435b7f731731c",
|
||||
"h1:0IKUOR32xEI1suS5QCOjfxjQ2mRd058btXk8hVnaOJ4=",
|
||||
"h1:3YG6vu/bFPcYOeLdSUZhiAWiWKaFlOAR34z2o8cbE9k=",
|
||||
"h1:FvGy06/i9AMtVkSIUnCrXNv5xF6jqBqMH8oPVLyeeAg=",
|
||||
"h1:GXH7nIF0ocMqebbA41+fSGIYfM+VAM/PvTe7fJr8UrQ=",
|
||||
"h1:H0ll0ph4404vFE868W3qJ3zhOyy4jbXrOMtdkViEZsU=",
|
||||
"h1:SX42e3k73IcFcrQlZ2e/Veqt2tvCMy6fwlo5yNUktCE=",
|
||||
"h1:Uu/gjBc99GefdPdSrlBwU75DWU0ZcwGcrd3ZFyTeL0s=",
|
||||
"h1:VZw0uN41PWRmNlhg7Ze0Eh7cdoklX1oZbfNAXNYnU1I=",
|
||||
"h1:cMdV7ql6PsFa4qtb0EoZSctvTaTqV7yplBSDwcLRCLc=",
|
||||
"h1:ePGvSurmlqOCkD761vkhRmz7bsK36/EnIvx2Xy8TdXo=",
|
||||
"h1:fOYufF+1bzw2N3aHLpkLB6E8VbZ4ysXDODYQOlwhwd4=",
|
||||
"h1:qe8RbnWq0T4xhqjn9QcbO6YW5YDx47P+eJ0NUMIfwCc=",
|
||||
"h1:tRD2av6PafHDP/b9jDQsG5/aX+lHeKxpbIEHYYLBVUc=",
|
||||
"h1:zyl6Gvx/CFpwYW8pFFDesfO8Lxv+a6CopyAsIMhp54s=",
|
||||
"zh:04c0a49c2b23140b2f21cfd0d52f9798d70d3bdae3831613e156aabe519bbc6c",
|
||||
"zh:185f21b4834ba63e8df1f84aa34639d8a7e126429a4007bb5f9ad82f2602a997",
|
||||
"zh:234724f52cb4c0c3f7313d3b2697caef26d921d134f26ae14801e7afac522f7b",
|
||||
"zh:38a56fcd1b3e40706af995611c977816543b53f1e55fe2720944aae2b6828fcb",
|
||||
"zh:419938f5430fc78eff933470aefbf94a460a478f867cf7761a3dea177b4eb153",
|
||||
"zh:4b46d92bfde1deab7de7ba1a6bbf4ba7c711e4fd925341ddf09d4cc28dae03d8",
|
||||
"zh:537acd4a31c752f1bae305ba7190f60b71ad1a459f22d464f3f914336c9e919f",
|
||||
"zh:5ff36b005aad07697dd0b30d4f0c35dbcdc30dc52b41722552060792fa87ce04",
|
||||
"zh:635c5ee419daea098060f794d9d7d999275301181e49562c4e4c08f043076937",
|
||||
"zh:859277c330d61f91abe9e799389467ca11b77131bf34bedbef52f8da68b2bb49",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8a9993f1dcadf1dd6ca43b23348abe374605d29945a2fafc07fb3457644e6a54",
|
||||
"zh:b1b9a1e6bcc24d5863a664a411d2dc906373ae7a2399d2d65548ce7377057852",
|
||||
"zh:b270184cdeec277218e84b94cb136fead753da717f9b9dc378e51907f3f00bb0",
|
||||
"zh:dff2bc10071210181726ce270f954995fe42c696e61e2e8f874021fed02521e5",
|
||||
"zh:e8e87b40b6a87dc097b0fdc20d3f725cec0d82abc9cc3755c1f89f8f6e8b0036",
|
||||
"zh:ee964a6573d399a5dd22ce328fb38ca1207797a02248f14b2e4913ee390e7803",
|
||||
"zh:927dfdb8d9aef37ead03fceaa29e87ba076a3dd24e19b6cefdbb0efe9987ff8c",
|
||||
"zh:bbf2226f07f6b1e721877328e69ded4b64f9c196634d2e2429e3cfabbe41e532",
|
||||
"zh:daeed873d6f38604232b46ee4a5830c85d195b967f8dbcafe2fcffa98daf9c5f",
|
||||
"zh:f8f2fc4646c1ba44085612fa7f4dbb7cbcead43b4e661f2b98ddfb4f68afc758",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.52.5"
|
||||
version = "4.48.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_branch_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_name
|
||||
domain = "docs.${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
domain = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_branch_subdomain" {
|
||||
name = "docs.${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
name = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
|
||||
10
docker/.gitignore
vendored
10
docker/.gitignore
vendored
@@ -1 +1,9 @@
|
||||
.env
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/.svelte-kit
|
||||
/dist
|
||||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
1
docker/.npmrc
Normal file
1
docker/.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
engine-strict=true
|
||||
1
docker/.nvmrc
Normal file
1
docker/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
22.11.0
|
||||
@@ -1,3 +1,13 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/package
|
||||
/coverage
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
*.md
|
||||
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
9
docker/.prettierrc
Normal file
9
docker/.prettierrc
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"jsonRecursiveSort": true,
|
||||
"organizeImportsSkipDestructiveCodeActions": true,
|
||||
"plugins": ["prettier-plugin-organize-imports", "prettier-plugin-sort-json"],
|
||||
"printWidth": 120,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
@@ -1,46 +1,29 @@
|
||||
#
|
||||
# WARNING: To install Immich, follow our guide: https://docs.immich.app/install/docker-compose
|
||||
#
|
||||
# Make sure to use the docker-compose.yml of the current release:
|
||||
#
|
||||
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
#
|
||||
# The compose file on main may not be compatible with the latest release.
|
||||
|
||||
# For development see:
|
||||
# - https://docs.immich.app/developer/setup
|
||||
# - https://docs.immich.app/developer/troubleshooting
|
||||
# See:
|
||||
# - https://immich.app/docs/developer/setup
|
||||
# - https://immich.app/docs/developer/troubleshooting
|
||||
|
||||
name: immich-dev
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['immich-dev']
|
||||
command: ['/usr/src/app/bin/immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile.dev
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: unless-stopped
|
||||
restart: always
|
||||
volumes:
|
||||
- ..:/usr/src/app
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -55,8 +38,8 @@ services:
|
||||
IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server
|
||||
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
|
||||
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
|
||||
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://docs.immich.app
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://docs.immich.app/community-guides
|
||||
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/third-party
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
@@ -66,59 +49,48 @@ services:
|
||||
- 9231:9231
|
||||
- 2283:2283
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
database:
|
||||
condition: service_started
|
||||
- redis
|
||||
- database
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
image: immich-web-dev:latest
|
||||
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
|
||||
# user: 0:0
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile.dev
|
||||
target: dev
|
||||
command: ['immich-web']
|
||||
context: ../web
|
||||
command: ['/usr/src/app/bin/immich-web']
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 3000:3000
|
||||
- 24678:24678
|
||||
volumes:
|
||||
- ..:/usr/src/app
|
||||
- pnpm-store:/usr/src/app/.pnpm-store
|
||||
- server-node_modules:/usr/src/app/server/node_modules
|
||||
- web-node_modules:/usr/src/app/web/node_modules
|
||||
- github-node_modules:/usr/src/app/.github/node_modules
|
||||
- cli-node_modules:/usr/src/app/cli/node_modules
|
||||
- docs-node_modules:/usr/src/app/docs/node_modules
|
||||
- e2e-node_modules:/usr/src/app/e2e/node_modules
|
||||
- sdk-node_modules:/usr/src/app/open-api/typescript-sdk/node_modules
|
||||
- app-node_modules:/usr/src/app/node_modules
|
||||
- sveltekit:/usr/src/app/web/.svelte-kit
|
||||
- coverage:/usr/src/app/web/coverage
|
||||
- ../web:/usr/src/app
|
||||
- ../i18n:/usr/src/i18n
|
||||
- ../open-api/:/usr/src/open-api/
|
||||
- /usr/src/app/node_modules
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
immich-server:
|
||||
condition: service_started
|
||||
- immich-server
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
image: immich-machine-learning-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.ml.yml
|
||||
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
build:
|
||||
context: ../machine-learning
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
@@ -134,13 +106,13 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8@sha256:81db6d39e1bba3b3ff32bd3a1b19a6d69690f94a3954ec131277b9a26b95b3aa
|
||||
image: redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -152,7 +124,25 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
@@ -178,14 +168,3 @@ volumes:
|
||||
model-cache:
|
||||
prometheus-data:
|
||||
grafana-data:
|
||||
pnpm-store:
|
||||
server-node_modules:
|
||||
web-node_modules:
|
||||
github-node_modules:
|
||||
cli-node_modules:
|
||||
docs-node_modules:
|
||||
e2e-node_modules:
|
||||
sdk-node_modules:
|
||||
app-node_modules:
|
||||
sveltekit:
|
||||
coverage:
|
||||
|
||||
@@ -1,12 +1,3 @@
|
||||
#
|
||||
# WARNING: To install Immich, follow our guide: https://docs.immich.app/install/docker-compose
|
||||
#
|
||||
# Make sure to use the docker-compose.yml of the current release:
|
||||
#
|
||||
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
#
|
||||
# The compose file on main may not be compatible with the latest release.
|
||||
|
||||
name: immich-prod
|
||||
|
||||
services:
|
||||
@@ -20,7 +11,7 @@ services:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
@@ -38,12 +29,12 @@ services:
|
||||
image: immich-machine-learning:latest
|
||||
# extends:
|
||||
# file: hwaccel.ml.yml
|
||||
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
build:
|
||||
context: ../machine-learning
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
@@ -56,14 +47,14 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8@sha256:81db6d39e1bba3b3ff32bd3a1b19a6d69690f94a3954ec131277b9a26b95b3aa
|
||||
image: redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -75,7 +66,24 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
shm_size: 128mb
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
restart: always
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
@@ -83,7 +91,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:63805ebb8d2b3920190daf1cb14a60871b16fd38bed42b857a3182bc621f4996
|
||||
image: prom/prometheus@sha256:565ee86501224ebbb98fc10b332fa54440b100469924003359edf49cbce374bd
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -95,7 +103,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:12.1.1-ubuntu@sha256:d1da838234ff2de93e0065ee1bf0e66d38f948dcc5d718c25fa6237e14b4424a
|
||||
image: grafana/grafana:11.4.0-ubuntu@sha256:afccec22ba0e4815cca1d2bf3836e414322390dc78d77f1851976ffa8d61051c
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
#
|
||||
# WARNING: To install Immich, follow our guide: https://docs.immich.app/install/docker-compose
|
||||
#
|
||||
# Make sure to use the docker-compose.yml of the current release:
|
||||
# WARNING: Make sure to use the docker-compose.yml of the current release:
|
||||
#
|
||||
# https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
#
|
||||
# The compose file on main may not be compatible with the latest release.
|
||||
#
|
||||
|
||||
name: immich
|
||||
|
||||
@@ -18,7 +17,7 @@ services:
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
|
||||
- ${UPLOAD_LOCATION}:/data
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
@@ -33,12 +32,12 @@ services:
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag.
|
||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||
# extends: # uncomment this section for hardware acceleration - see https://docs.immich.app/features/ml-hardware-acceleration
|
||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
|
||||
# file: hwaccel.ml.yml
|
||||
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
@@ -49,25 +48,40 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8@sha256:81db6d39e1bba3b3ff32bd3a1b19a6d69690f94a3954ec131277b9a26b95b3aa
|
||||
image: docker.io/redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:bcf63357191b76a916ae5eb93464d65c07511da41e3bf7a8416db519b40b1c23
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
|
||||
# DB_STORAGE_TYPE: 'HDD'
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
shm_size: 128mb
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
86
docker/eslint.config.mjs
Normal file
86
docker/eslint.config.mjs
Normal file
@@ -0,0 +1,86 @@
|
||||
import { FlatCompat } from '@eslint/eslintrc';
|
||||
import js from '@eslint/js';
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
import globals from 'globals';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default [
|
||||
{
|
||||
ignores: [
|
||||
'**/.DS_Store',
|
||||
'**/node_modules',
|
||||
'dist',
|
||||
'lib/docker-compose/types.ts',
|
||||
'build',
|
||||
'package',
|
||||
'**/.env',
|
||||
'**/.env.*',
|
||||
'!**/.env.example',
|
||||
'**/pnpm-lock.yaml',
|
||||
'**/package-lock.json',
|
||||
'**/yarn.lock',
|
||||
'eslint.config.mjs',
|
||||
'vite.config.js',
|
||||
'coverage',
|
||||
],
|
||||
},
|
||||
...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/recommended', 'plugin:unicorn/recommended'),
|
||||
{
|
||||
ignores: ['src/**'],
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.node,
|
||||
NodeJS: true,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 2022,
|
||||
sourceType: 'module',
|
||||
|
||||
parserOptions: {
|
||||
tsconfigRootDir: __dirname,
|
||||
project: ['./tsconfig.json'],
|
||||
},
|
||||
},
|
||||
|
||||
rules: {
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
'warn',
|
||||
{
|
||||
argsIgnorePattern: '^_$',
|
||||
varsIgnorePattern: '^_$',
|
||||
},
|
||||
],
|
||||
|
||||
curly: 2,
|
||||
'unicorn/no-useless-undefined': 'off',
|
||||
'unicorn/prefer-spread': 'off',
|
||||
'unicorn/no-null': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-nested-ternary': 'off',
|
||||
'unicorn/consistent-function-scoping': 'off',
|
||||
'unicorn/prefer-top-level-await': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
'@typescript-eslint/await-thenable': 'error',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'@typescript-eslint/no-misused-promises': 'error',
|
||||
'@typescript-eslint/require-await': 'error',
|
||||
'object-shorthand': ['error', 'always'],
|
||||
},
|
||||
},
|
||||
];
|
||||
@@ -1,9 +1,8 @@
|
||||
# You can find documentation for all the supported env variables at https://docs.immich.app/install/environment-variables
|
||||
# You can find documentation for all the supported env variables at https://immich.app/docs/install/environment-variables
|
||||
|
||||
# The location where your uploaded files are stored
|
||||
UPLOAD_LOCATION=./library
|
||||
|
||||
# The location where your database files are stored. Network shares are not supported for the database
|
||||
# The location where your database files are stored
|
||||
DB_DATA_LOCATION=./postgres
|
||||
|
||||
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# you can inline the config for a backend by copying its contents
|
||||
# into the immich-machine-learning service in the docker-compose.yml file.
|
||||
|
||||
# See https://docs.immich.app/features/ml-hardware-acceleration for info on usage.
|
||||
# See https://immich.app/docs/features/ml-hardware-acceleration for info on usage.
|
||||
|
||||
services:
|
||||
armnn:
|
||||
@@ -13,13 +13,6 @@ services:
|
||||
volumes:
|
||||
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
|
||||
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
|
||||
|
||||
rknn:
|
||||
security_opt:
|
||||
- systempaths=unconfined
|
||||
- apparmor=unconfined
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
|
||||
cpu: {}
|
||||
|
||||
@@ -33,13 +26,6 @@ services:
|
||||
capabilities:
|
||||
- gpu
|
||||
|
||||
rocm:
|
||||
group_add:
|
||||
- video
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
- /dev/kfd:/dev/kfd
|
||||
|
||||
openvino:
|
||||
device_cgroup_rules:
|
||||
- 'c 189:* rmw'
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# you can inline the config for a backend by copying its contents
|
||||
# into the immich-microservices service in the docker-compose.yml file.
|
||||
|
||||
# See https://docs.immich.app/features/hardware-transcoding for more info on using hardware transcoding.
|
||||
# See https://immich.app/docs/features/hardware-transcoding for more info on using hardware transcoding.
|
||||
|
||||
services:
|
||||
cpu: {}
|
||||
@@ -48,7 +48,6 @@ services:
|
||||
vaapi-wsl: # use this for VAAPI if you're running Immich in WSL2
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
- /dev/dxg:/dev/dxg
|
||||
volumes:
|
||||
- /usr/lib/wsl:/usr/lib/wsl
|
||||
environment:
|
||||
|
||||
87
docker/lib/build.ts
Normal file
87
docker/lib/build.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { dump as dumpYaml } from 'js-yaml';
|
||||
import { ComposeBuilder, ServiceBuilder } from 'lib/docker-compose/builder';
|
||||
import { ContainerName, GeneratorOptions, ServiceName } from 'lib/types';
|
||||
import { asQueryParams, getImmichEnvironment, getImmichVolumes, isExternalPostgres, isIoRedis } from 'lib/utils';
|
||||
|
||||
const RELEASE_VERSION = 'v1.122.0';
|
||||
const postgresHealthCheck = [
|
||||
'pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;',
|
||||
`Chksum="$$(psql --dbname="$\${POSTGRES_DB}" --username="$\${POSTGRES_USER}" --tuples-only --no-align`,
|
||||
`--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";`,
|
||||
'echo "checksum failure count is $$Chksum";',
|
||||
`[ "$$Chksum" = '0' ] || exit 1\n`,
|
||||
].join(' ');
|
||||
const postgresCommand = [
|
||||
`postgres`,
|
||||
`-c shared_preload_libraries=vectors.so`,
|
||||
`-c 'search_path="$$user", public, vectors'`,
|
||||
`-c logging_collector=on`,
|
||||
`-c max_wal_size=2GB`,
|
||||
`-c shared_buffers=512MB`,
|
||||
`-c wal_compression=on`,
|
||||
].join(' ');
|
||||
|
||||
const build = (options: GeneratorOptions) => {
|
||||
const healthchecksEnabled = options.healthchecks ?? true;
|
||||
const containerNames = options.containerNames ?? true;
|
||||
|
||||
const immichService = ServiceBuilder.create(ServiceName.ImmichServer)
|
||||
.setImage(`ghcr.io/immich-app/immich-server:${RELEASE_VERSION}`)
|
||||
.setContainerName(containerNames && ContainerName.ImmichServer)
|
||||
.setRestartPolicy('always')
|
||||
.setHealthcheck(healthchecksEnabled)
|
||||
.setEnvironment(getImmichEnvironment(options))
|
||||
.addExposedPort(2283)
|
||||
.addVolumes(getImmichVolumes(options));
|
||||
|
||||
const machineLearningEnabled = options.machineLearning;
|
||||
const modelCacheVolume = 'model-cache';
|
||||
const machineLearningService =
|
||||
machineLearningEnabled &&
|
||||
ServiceBuilder.create(ServiceName.ImmichMachineLearning)
|
||||
.setImage(`ghcr.io/immich-app/immich-machine-learning:${RELEASE_VERSION}-cuda`)
|
||||
.setContainerName(containerNames && ContainerName.ImmichMachineLearning)
|
||||
.setRestartPolicy('always')
|
||||
.setHealthcheck(healthchecksEnabled)
|
||||
.addVolume(`${modelCacheVolume}:/cache`);
|
||||
|
||||
const redisService = isIoRedis(options)
|
||||
? false
|
||||
: ServiceBuilder.create(ServiceName.Redis)
|
||||
.setImage('docker.io/redis:6.2-alpine')
|
||||
.setContainerName(containerNames && ContainerName.Redis)
|
||||
.setRestartPolicy('always')
|
||||
.setHealthcheck(healthchecksEnabled && 'redis-cli ping || exit 1');
|
||||
|
||||
const postgresService = isExternalPostgres(options)
|
||||
? false
|
||||
: ServiceBuilder.create(ServiceName.Postgres)
|
||||
.setImage('docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0')
|
||||
.setContainerName(containerNames && ContainerName.Postgres)
|
||||
.setRestartPolicy('always')
|
||||
.setEnvironment({
|
||||
POSTGRES_PASSWORD: options.postgresPassword,
|
||||
POSTGRES_USER: options.postgresUser,
|
||||
POSTGRES_DB: options.postgresDatabase,
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums',
|
||||
})
|
||||
.setHealthcheck(healthchecksEnabled && postgresHealthCheck)
|
||||
.setCommand(postgresCommand)
|
||||
.addVolume(`${options.postgresDataLocation}:/var/lib/postgresql/data`);
|
||||
|
||||
const domain = 'https://get.immich.app';
|
||||
const url = `${domain}/compose?${asQueryParams(options)}`;
|
||||
|
||||
return ComposeBuilder.create('immich')
|
||||
.addComment(`This docker compose file was originally generated at https://get.immich.app/compose`)
|
||||
.addComment(url)
|
||||
.addComment(`${dumpYaml({ options }, { indent: 2 })}`)
|
||||
.addService(immichService.addDependsOn(redisService).addDependsOn(postgresService))
|
||||
.addService(machineLearningService)
|
||||
.addService(redisService)
|
||||
.addService(postgresService)
|
||||
.addVolume(modelCacheVolume, machineLearningEnabled && {});
|
||||
};
|
||||
|
||||
export const buildSpec = (options: GeneratorOptions) => build(options).asSpec();
|
||||
export const buildYaml = (options: GeneratorOptions) => build(options).asYaml();
|
||||
208
docker/lib/docker-compose/builder.ts
Normal file
208
docker/lib/docker-compose/builder.ts
Normal file
@@ -0,0 +1,208 @@
|
||||
import { dump as dumpYaml } from 'js-yaml';
|
||||
import {
|
||||
Command,
|
||||
ComposeSpecification,
|
||||
DefinitionsService,
|
||||
DefinitionsVolume,
|
||||
ListOfStrings,
|
||||
} from 'lib/docker-compose/types';
|
||||
|
||||
type ServiceNameAccessor = { getName: () => string };
|
||||
type ServiceBuildAccessor = { build: () => DefinitionsService };
|
||||
|
||||
const withNewLines = (yaml: string) =>
|
||||
yaml.replaceAll(/(?<leading>[^:]\n)(?<key>[ ]{0,2}\S+:)$/gm, '$<leading>\n$<key>');
|
||||
|
||||
export class ComposeBuilder {
|
||||
private spec: ComposeSpecification = {};
|
||||
private comments: string[] = [];
|
||||
|
||||
private constructor(projectName?: string) {
|
||||
if (projectName) {
|
||||
this.setProjectName(projectName);
|
||||
}
|
||||
}
|
||||
|
||||
static create(projectName?: string) {
|
||||
return new ComposeBuilder(projectName);
|
||||
}
|
||||
|
||||
setProjectName(name: string) {
|
||||
this.spec.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
addComment(comment: string) {
|
||||
this.comments.push(comment + '\n');
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addService(spec: false | (ServiceNameAccessor & ServiceBuildAccessor)) {
|
||||
if (!spec) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (!this.spec.services) {
|
||||
this.spec.services = {};
|
||||
}
|
||||
|
||||
this.spec.services[spec.getName()] = spec.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addVolume(name: string, volume: false | DefinitionsVolume) {
|
||||
if (volume === false) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (!this.spec.volumes) {
|
||||
this.spec.volumes = {};
|
||||
}
|
||||
|
||||
this.spec.volumes[name] = volume;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
asSpec() {
|
||||
return this.spec;
|
||||
}
|
||||
|
||||
asYaml() {
|
||||
let prefix = '';
|
||||
if (this.comments.length > 0) {
|
||||
const comments =
|
||||
this.comments
|
||||
.flatMap((comment) => comment.split('\n'))
|
||||
.join('\n')
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map((comment) => `# ${comment}`)
|
||||
.join('\n') + '\n\n';
|
||||
|
||||
prefix += comments;
|
||||
}
|
||||
|
||||
const spec = withNewLines(dumpYaml(this.spec, { indent: 2, lineWidth: 140 })).trim();
|
||||
|
||||
return prefix + spec;
|
||||
}
|
||||
}
|
||||
|
||||
export class ServiceBuilder {
|
||||
private spec: DefinitionsService = {};
|
||||
|
||||
private constructor(private name: string) {}
|
||||
|
||||
static create(name: string) {
|
||||
return new ServiceBuilder(name);
|
||||
}
|
||||
|
||||
getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
setImage(image: string) {
|
||||
this.spec.image = image;
|
||||
return this;
|
||||
}
|
||||
|
||||
setContainerName(name: false | string) {
|
||||
if (name === false) {
|
||||
return this;
|
||||
}
|
||||
|
||||
this.spec.container_name = name;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addExposedPort(port: number | { internal: number; external: number }) {
|
||||
if (typeof port === 'number') {
|
||||
port = { internal: port, external: port };
|
||||
}
|
||||
|
||||
const { internal, external } = port;
|
||||
|
||||
if (!this.spec.ports) {
|
||||
this.spec.ports = [];
|
||||
}
|
||||
|
||||
this.spec.ports.push(`${external}:${internal}`);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addDependsOn(service: false | string | ServiceNameAccessor) {
|
||||
if (service === false) {
|
||||
return this;
|
||||
}
|
||||
|
||||
let serviceName = service as string;
|
||||
if ('getName' in (service as ServiceNameAccessor)) {
|
||||
serviceName = (service as ServiceNameAccessor).getName();
|
||||
}
|
||||
|
||||
if (!this.spec.depends_on) {
|
||||
this.spec.depends_on = [];
|
||||
}
|
||||
|
||||
(this.spec.depends_on as ListOfStrings).push(serviceName);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
setRestartPolicy(restart: string) {
|
||||
this.spec.restart = restart;
|
||||
return this;
|
||||
}
|
||||
|
||||
setEnvironment(env: Record<string, string | number | undefined>) {
|
||||
this.spec.environment = env;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
setHealthcheck(test: boolean | string) {
|
||||
if (test === true) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (test === false) {
|
||||
this.spec.healthcheck = { disable: true };
|
||||
return this;
|
||||
}
|
||||
|
||||
this.spec.healthcheck = { test };
|
||||
return this;
|
||||
}
|
||||
|
||||
setCommand(command: Command) {
|
||||
this.spec.command = command;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addVolume(volume: string) {
|
||||
if (!this.spec.volumes) {
|
||||
this.spec.volumes = [];
|
||||
}
|
||||
this.spec.volumes.push(volume);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addVolumes(volumes: string[]) {
|
||||
for (const volume of volumes) {
|
||||
this.addVolume(volume);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
build() {
|
||||
return this.spec;
|
||||
}
|
||||
}
|
||||
937
docker/lib/docker-compose/types.ts
Normal file
937
docker/lib/docker-compose/types.ts
Normal file
@@ -0,0 +1,937 @@
|
||||
export type DefinitionsInclude =
|
||||
| string
|
||||
| {
|
||||
path?: StringOrList;
|
||||
env_file?: StringOrList;
|
||||
project_directory?: string;
|
||||
};
|
||||
export type StringOrList = string | ListOfStrings;
|
||||
export type ListOfStrings = string[];
|
||||
export type DefinitionsDevelopment = {
|
||||
watch?: {
|
||||
ignore?: string[];
|
||||
path: string;
|
||||
action: 'rebuild' | 'sync' | 'sync+restart';
|
||||
target?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Development;
|
||||
export type Development = {
|
||||
watch?: {
|
||||
ignore?: string[];
|
||||
path: string;
|
||||
action: 'rebuild' | 'sync' | 'sync+restart';
|
||||
target?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
export type DefinitionsDeployment = {
|
||||
mode?: string;
|
||||
endpoint_mode?: string;
|
||||
replicas?: number | string;
|
||||
labels?: ListOrDict;
|
||||
rollback_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
update_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
resources?: {
|
||||
limits?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
pids?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
reservations?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
generic_resources?: DefinitionsGenericResources;
|
||||
devices?: DefinitionsDevices;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
restart_policy?: {
|
||||
condition?: string;
|
||||
delay?: string;
|
||||
max_attempts?: number | string;
|
||||
window?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
placement?: {
|
||||
constraints?: string[];
|
||||
preferences?: {
|
||||
spread?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
max_replicas_per_node?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Deployment;
|
||||
export type ListOrDict =
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` ".+".
|
||||
*/
|
||||
[k: string]: undefined | string | number | boolean | null;
|
||||
}
|
||||
| string[];
|
||||
export type DefinitionsGenericResources = {
|
||||
discrete_resource_spec?: {
|
||||
kind?: string;
|
||||
value?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
export type DefinitionsDevices = {
|
||||
capabilities: ListOfStrings;
|
||||
count?: string | number;
|
||||
device_ids?: ListOfStrings;
|
||||
driver?: string;
|
||||
options?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
export type Deployment = {
|
||||
mode?: string;
|
||||
endpoint_mode?: string;
|
||||
replicas?: number | string;
|
||||
labels?: ListOrDict;
|
||||
rollback_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
update_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
resources?: {
|
||||
limits?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
pids?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
reservations?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
generic_resources?: DefinitionsGenericResources;
|
||||
devices?: DefinitionsDevices;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
restart_policy?: {
|
||||
condition?: string;
|
||||
delay?: string;
|
||||
max_attempts?: number | string;
|
||||
window?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
placement?: {
|
||||
constraints?: string[];
|
||||
preferences?: {
|
||||
spread?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
max_replicas_per_node?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
export type ExtraHosts = {} | string[];
|
||||
export type ServiceConfigOrSecret = (
|
||||
| string
|
||||
| {
|
||||
source?: string;
|
||||
target?: string;
|
||||
uid?: string;
|
||||
gid?: string;
|
||||
mode?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
export type Command = null | string | string[];
|
||||
export type EnvFile =
|
||||
| string
|
||||
| (
|
||||
| string
|
||||
| {
|
||||
path: string;
|
||||
format?: string;
|
||||
required?: boolean | string;
|
||||
}
|
||||
)[];
|
||||
/**
|
||||
* This interface was referenced by `PropertiesNetworks`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export type DefinitionsNetwork = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
ipam?: {
|
||||
driver?: string;
|
||||
config?: {
|
||||
subnet?: string;
|
||||
ip_range?: string;
|
||||
gateway?: string;
|
||||
aux_addresses?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
internal?: boolean | string;
|
||||
enable_ipv6?: boolean | string;
|
||||
attachable?: boolean | string;
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Network;
|
||||
export type Network = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
ipam?: {
|
||||
driver?: string;
|
||||
config?: {
|
||||
subnet?: string;
|
||||
ip_range?: string;
|
||||
gateway?: string;
|
||||
aux_addresses?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
internal?: boolean | string;
|
||||
enable_ipv6?: boolean | string;
|
||||
attachable?: boolean | string;
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
/**
|
||||
* This interface was referenced by `PropertiesVolumes`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export type DefinitionsVolume = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Volume;
|
||||
export type Volume = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
|
||||
/**
|
||||
* The Compose file is a YAML file defining a multi-containers based application.
|
||||
*/
|
||||
export interface ComposeSpecification {
|
||||
/**
|
||||
* declared for backward compatibility, ignored.
|
||||
*/
|
||||
version?: string;
|
||||
/**
|
||||
* define the Compose project name, until user defines one explicitly.
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* compose sub-projects to be included.
|
||||
*/
|
||||
include?: DefinitionsInclude[];
|
||||
services?: PropertiesServices;
|
||||
networks?: PropertiesNetworks;
|
||||
volumes?: PropertiesVolumes;
|
||||
secrets?: PropertiesSecrets;
|
||||
configs?: PropertiesConfigs;
|
||||
/**
|
||||
* This interface was referenced by `ComposeSpecification`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesServices {
|
||||
[k: string]: DefinitionsService;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesServices`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsService {
|
||||
develop?: DefinitionsDevelopment;
|
||||
deploy?: DefinitionsDeployment;
|
||||
annotations?: ListOrDict;
|
||||
attach?: boolean | string;
|
||||
build?:
|
||||
| string
|
||||
| {
|
||||
context?: string;
|
||||
dockerfile?: string;
|
||||
dockerfile_inline?: string;
|
||||
entitlements?: string[];
|
||||
args?: ListOrDict;
|
||||
ssh?: ListOrDict;
|
||||
labels?: ListOrDict;
|
||||
cache_from?: string[];
|
||||
cache_to?: string[];
|
||||
no_cache?: boolean | string;
|
||||
additional_contexts?: ListOrDict;
|
||||
network?: string;
|
||||
pull?: boolean | string;
|
||||
target?: string;
|
||||
shm_size?: number | string;
|
||||
extra_hosts?: ExtraHosts;
|
||||
isolation?: string;
|
||||
privileged?: boolean | string;
|
||||
secrets?: ServiceConfigOrSecret;
|
||||
tags?: string[];
|
||||
ulimits?: Ulimits;
|
||||
platforms?: string[];
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
blkio_config?: {
|
||||
device_read_bps?: BlkioLimit[];
|
||||
device_read_iops?: BlkioLimit[];
|
||||
device_write_bps?: BlkioLimit[];
|
||||
device_write_iops?: BlkioLimit[];
|
||||
weight?: number | string;
|
||||
weight_device?: BlkioWeight[];
|
||||
};
|
||||
cap_add?: string[];
|
||||
cap_drop?: string[];
|
||||
cgroup?: 'host' | 'private';
|
||||
cgroup_parent?: string;
|
||||
command?: Command;
|
||||
configs?: ServiceConfigOrSecret;
|
||||
container_name?: string;
|
||||
cpu_count?: string | number;
|
||||
cpu_percent?: string | number;
|
||||
cpu_shares?: number | string;
|
||||
cpu_quota?: number | string;
|
||||
cpu_period?: number | string;
|
||||
cpu_rt_period?: number | string;
|
||||
cpu_rt_runtime?: number | string;
|
||||
cpus?: number | string;
|
||||
cpuset?: string;
|
||||
credential_spec?: {
|
||||
config?: string;
|
||||
file?: string;
|
||||
registry?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
depends_on?:
|
||||
| ListOfStrings
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
[k: string]: {
|
||||
restart?: boolean | string;
|
||||
required?: boolean;
|
||||
condition: 'service_started' | 'service_healthy' | 'service_completed_successfully';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
};
|
||||
device_cgroup_rules?: ListOfStrings;
|
||||
devices?: (
|
||||
| string
|
||||
| {
|
||||
source: string;
|
||||
target?: string;
|
||||
permissions?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
dns?: StringOrList;
|
||||
dns_opt?: string[];
|
||||
dns_search?: StringOrList;
|
||||
domainname?: string;
|
||||
entrypoint?: Command;
|
||||
env_file?: EnvFile;
|
||||
environment?: ListOrDict;
|
||||
expose?: (string | number)[];
|
||||
extends?:
|
||||
| string
|
||||
| {
|
||||
service: string;
|
||||
file?: string;
|
||||
};
|
||||
external_links?: string[];
|
||||
extra_hosts?: ExtraHosts;
|
||||
group_add?: (string | number)[];
|
||||
healthcheck?: DefinitionsHealthcheck;
|
||||
hostname?: string;
|
||||
image?: string;
|
||||
init?: boolean | string;
|
||||
ipc?: string;
|
||||
isolation?: string;
|
||||
labels?: ListOrDict;
|
||||
links?: string[];
|
||||
logging?: {
|
||||
driver?: string;
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number | null;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
mac_address?: string;
|
||||
mem_limit?: number | string;
|
||||
mem_reservation?: string | number;
|
||||
mem_swappiness?: number | string;
|
||||
memswap_limit?: number | string;
|
||||
network_mode?: string;
|
||||
networks?:
|
||||
| ListOfStrings
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
[k: string]: {
|
||||
aliases?: ListOfStrings;
|
||||
ipv4_address?: string;
|
||||
ipv6_address?: string;
|
||||
link_local_ips?: ListOfStrings;
|
||||
mac_address?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
priority?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
};
|
||||
oom_kill_disable?: boolean | string;
|
||||
oom_score_adj?: string | number;
|
||||
pid?: string | null;
|
||||
pids_limit?: number | string;
|
||||
platform?: string;
|
||||
ports?: (
|
||||
| number
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
mode?: string;
|
||||
host_ip?: string;
|
||||
target?: number | string;
|
||||
published?: string | number;
|
||||
protocol?: string;
|
||||
app_protocol?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
post_start?: DefinitionsServiceHook[];
|
||||
pre_stop?: DefinitionsServiceHook[];
|
||||
privileged?: boolean | string;
|
||||
profiles?: ListOfStrings;
|
||||
pull_policy?: 'always' | 'never' | 'if_not_present' | 'build' | 'missing';
|
||||
read_only?: boolean | string;
|
||||
restart?: string;
|
||||
runtime?: string;
|
||||
scale?: number | string;
|
||||
security_opt?: string[];
|
||||
shm_size?: number | string;
|
||||
secrets?: ServiceConfigOrSecret;
|
||||
sysctls?: ListOrDict;
|
||||
stdin_open?: boolean | string;
|
||||
stop_grace_period?: string;
|
||||
stop_signal?: string;
|
||||
storage_opt?: {
|
||||
[k: string]: unknown;
|
||||
};
|
||||
tmpfs?: StringOrList;
|
||||
tty?: boolean | string;
|
||||
ulimits?: Ulimits;
|
||||
user?: string;
|
||||
uts?: string;
|
||||
userns_mode?: string;
|
||||
volumes?: (
|
||||
| string
|
||||
| {
|
||||
type: string;
|
||||
source?: string;
|
||||
target?: string;
|
||||
read_only?: boolean | string;
|
||||
consistency?: string;
|
||||
bind?: {
|
||||
propagation?: string;
|
||||
create_host_path?: boolean | string;
|
||||
selinux?: 'z' | 'Z';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
volume?: {
|
||||
nocopy?: boolean | string;
|
||||
subpath?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
tmpfs?: {
|
||||
size?: number | string;
|
||||
mode?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
volumes_from?: string[];
|
||||
working_dir?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsService`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface Ulimits {
|
||||
/**
|
||||
* This interface was referenced by `Ulimits`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-z]+$".
|
||||
*/
|
||||
[k: string]:
|
||||
| (number | string)
|
||||
| {
|
||||
hard: number | string;
|
||||
soft: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
}
|
||||
export interface BlkioLimit {
|
||||
path?: string;
|
||||
rate?: number | string;
|
||||
}
|
||||
export interface BlkioWeight {
|
||||
path?: string;
|
||||
weight?: number | string;
|
||||
}
|
||||
export interface DefinitionsHealthcheck {
|
||||
disable?: boolean | string;
|
||||
interval?: string;
|
||||
retries?: number | string;
|
||||
test?: string | string[];
|
||||
timeout?: string;
|
||||
start_period?: string;
|
||||
start_interval?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsHealthcheck`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface DefinitionsServiceHook {
|
||||
command?: Command;
|
||||
user?: string;
|
||||
privileged?: boolean | string;
|
||||
working_dir?: string;
|
||||
environment?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsServiceHook`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesNetworks {
|
||||
[k: string]: DefinitionsNetwork;
|
||||
}
|
||||
export interface PropertiesVolumes {
|
||||
[k: string]: DefinitionsVolume;
|
||||
}
|
||||
export interface PropertiesSecrets {
|
||||
[k: string]: DefinitionsSecret;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesSecrets`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsSecret {
|
||||
name?: string;
|
||||
environment?: string;
|
||||
file?: string;
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
template_driver?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsSecret`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesConfigs {
|
||||
[k: string]: DefinitionsConfig;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesConfigs`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsConfig {
|
||||
name?: string;
|
||||
content?: string;
|
||||
environment?: string;
|
||||
file?: string;
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
template_driver?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsConfig`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
2
docker/lib/index.ts
Normal file
2
docker/lib/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from 'lib/build';
|
||||
export * from 'lib/types';
|
||||
57
docker/lib/types.ts
Normal file
57
docker/lib/types.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
export enum ServiceName {
|
||||
ImmichServer = 'immich-server',
|
||||
ImmichMachineLearning = 'immich-machine-learning',
|
||||
Postgres = 'immich-postgres',
|
||||
Redis = 'immich-redis',
|
||||
}
|
||||
|
||||
export enum ContainerName {
|
||||
ImmichServer = 'immich-server',
|
||||
ImmichMachineLearning = 'immich-machine-learning',
|
||||
Postgres = 'immich-postgres',
|
||||
Redis = 'immich-redis',
|
||||
}
|
||||
|
||||
export type BaseOptions = {
|
||||
releaseVersion: string;
|
||||
healthchecks?: boolean;
|
||||
machineLearning: boolean;
|
||||
containerNames?: boolean;
|
||||
serverTimeZone?: string;
|
||||
};
|
||||
|
||||
export type GeneratorOptions = (BaseOptions & FolderOptions & PostgresOptions) & RedisOptions;
|
||||
|
||||
export type FolderOptions = {
|
||||
baseLocation: string;
|
||||
encodedVideoLocation?: string;
|
||||
libraryLocation?: string;
|
||||
uploadLocation?: string;
|
||||
profileLocation?: string;
|
||||
thumbnailsLocation?: string;
|
||||
backupsLocation?: string;
|
||||
};
|
||||
|
||||
export type PostgresOptions = InternalPostgresOptions | ExternalPostgresOptions;
|
||||
export type InternalPostgresOptions = {
|
||||
postgresUser: string;
|
||||
postgresPassword: string;
|
||||
postgresDatabase: string;
|
||||
postgresDataLocation: string;
|
||||
};
|
||||
export type ExternalPostgresOptions = { postgresUrl: string; postgresVectorExtension?: VectorExtension };
|
||||
|
||||
export type RedisOptions = ExternalRedisOptions | IoRedisOptions | { redis: true };
|
||||
export type ExternalRedisOptions = {
|
||||
redisHost: string;
|
||||
redisPort: number;
|
||||
redisDbIndex?: number;
|
||||
redisUsername?: string;
|
||||
redisPassword?: string;
|
||||
redisSocket?: string;
|
||||
};
|
||||
export type IoRedisOptions = { redisUrl: string };
|
||||
|
||||
export type VectorExtension = 'pgvector' | 'pgvecto.rs';
|
||||
|
||||
export type HardwareAccelerationPlatform = 'nvenc' | 'quicksync' | 'rkmpp' | 'vappi' | 'vaapi-wsl';
|
||||
84
docker/lib/utils.ts
Normal file
84
docker/lib/utils.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import {
|
||||
ExternalPostgresOptions,
|
||||
ExternalRedisOptions,
|
||||
GeneratorOptions,
|
||||
IoRedisOptions,
|
||||
PostgresOptions,
|
||||
RedisOptions,
|
||||
ServiceName,
|
||||
} from 'lib/types';
|
||||
|
||||
export const isExternalPostgres = (options: PostgresOptions): options is ExternalPostgresOptions =>
|
||||
'postgresUrl' in options;
|
||||
|
||||
export const isIoRedis = (options: RedisOptions): options is IoRedisOptions => 'redisUrl' in options;
|
||||
export const isExternalRedis = (options: RedisOptions): options is ExternalRedisOptions => 'redisHost' in options;
|
||||
|
||||
export const asQueryParams = (values: Record<string, string | number | boolean | undefined>) => {
|
||||
return new URLSearchParams(
|
||||
Object.entries(values)
|
||||
.filter(Boolean)
|
||||
.map(([key, value]) => [key, String(value)]),
|
||||
).toString();
|
||||
};
|
||||
|
||||
export const getImmichVolumes = (options: GeneratorOptions) => {
|
||||
const {
|
||||
baseLocation,
|
||||
encodedVideoLocation,
|
||||
uploadLocation,
|
||||
backupsLocation,
|
||||
profileLocation,
|
||||
libraryLocation,
|
||||
thumbnailsLocation,
|
||||
} = options;
|
||||
|
||||
const internalBaseLocation = '/usr/src/app/upload';
|
||||
|
||||
const volumes = [`${baseLocation}:${internalBaseLocation}`];
|
||||
|
||||
for (const { override, folder } of [
|
||||
{ override: encodedVideoLocation, folder: 'encoded-video' },
|
||||
{ override: libraryLocation, folder: 'library' },
|
||||
{ override: uploadLocation, folder: 'upload' },
|
||||
{ override: profileLocation, folder: 'profile' },
|
||||
{ override: thumbnailsLocation, folder: 'thumbs' },
|
||||
{ override: backupsLocation, folder: 'backups' },
|
||||
]) {
|
||||
if (override) {
|
||||
volumes.push(`${override}:${internalBaseLocation}/${folder}`);
|
||||
}
|
||||
}
|
||||
|
||||
volumes.push(`/etc/localtime:/etc/localtime:ro`);
|
||||
|
||||
return volumes;
|
||||
};
|
||||
|
||||
export const getImmichEnvironment = (options: GeneratorOptions) => {
|
||||
const env: Record<string, string | number | undefined> = {};
|
||||
if (isExternalPostgres(options)) {
|
||||
env.DB_URL = options.postgresUrl;
|
||||
env.DB_VECTOR_EXTENSION = options.postgresVectorExtension;
|
||||
} else {
|
||||
const { postgresUser, postgresPassword, postgresDatabase } = options;
|
||||
env.DB_URL = `postgres://${postgresUser}:${postgresPassword}@${ServiceName.Postgres}:5432/${postgresDatabase}`;
|
||||
}
|
||||
|
||||
if (isIoRedis(options)) {
|
||||
env.REDIS_URL = options.redisUrl;
|
||||
} else if (isExternalRedis(options)) {
|
||||
env.REDIS_HOSTNAME = options.redisHost;
|
||||
env.REDIS_PORT = options.redisPort;
|
||||
env.REDIS_DBINDEX = options.redisDbIndex;
|
||||
env.REDIS_USERNAME = options.redisUsername;
|
||||
env.REDIS_PASSWORD = options.redisPassword;
|
||||
env.REDIS_SOCKET = options.redisSocket;
|
||||
} else {
|
||||
env.REDIS_HOSTNAME = ServiceName.Redis;
|
||||
}
|
||||
|
||||
env.TZ = options.serverTimeZone;
|
||||
|
||||
return env;
|
||||
};
|
||||
4555
docker/package-lock.json
generated
Normal file
4555
docker/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
38
docker/package.json
Normal file
38
docker/package.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"name": "immich-docker",
|
||||
"version": "0.0.0",
|
||||
"description": "A docker-compose generator for Immich",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "vite build",
|
||||
"generate": "npx tsx src/index.ts",
|
||||
"lint": "eslint . --max-warnings 0",
|
||||
"lint:fix": "npm run lint -- --fix",
|
||||
"format": "prettier --check .",
|
||||
"format:fix": "prettier --write ."
|
||||
},
|
||||
"type": "module",
|
||||
"exports": "./dist/immich-docker.js",
|
||||
"author": "team@immich.app",
|
||||
"private": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "^9.16.0",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^22.10.2",
|
||||
"@typescript-eslint/eslint-plugin": "^8.18.0",
|
||||
"@typescript-eslint/parser": "^8.18.0",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"globals": "^15.13.0",
|
||||
"json-schema-to-ts": "^3.1.1",
|
||||
"prettier-plugin-organize-imports": "^4.1.0",
|
||||
"prettier-plugin-sort-json": "^4.0.0",
|
||||
"vite": "^6.0.3",
|
||||
"vitest": "^2.1.8"
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
|
||||
60
docker/src/index.ts
Normal file
60
docker/src/index.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { mkdirSync, writeFileSync } from 'node:fs';
|
||||
import { buildYaml } from '../lib/index';
|
||||
import { GeneratorOptions } from '../lib/types';
|
||||
|
||||
const main = () => {
|
||||
const commonOptions = {
|
||||
releaseVersion: 'v1.122.0',
|
||||
baseLocation: '/home/immich/library',
|
||||
serverTimeZone: 'America/New_York',
|
||||
healthchecks: true,
|
||||
machineLearning: true,
|
||||
containerNames: true,
|
||||
// hardwareAcceleration: 'nvenc',
|
||||
};
|
||||
|
||||
const postgresOptions = {
|
||||
postgresUser: 'postgres',
|
||||
postgresPassword: 'postgres',
|
||||
postgresDatabase: 'immich',
|
||||
postgresDataLocation: '/home/immich/database',
|
||||
};
|
||||
|
||||
const defaultOptions: GeneratorOptions = { ...commonOptions, ...postgresOptions, redis: true };
|
||||
|
||||
const samples: Array<{ name: string; options: GeneratorOptions }> = [
|
||||
{ name: 'defaults', options: defaultOptions },
|
||||
{ name: 'no-names', options: { ...defaultOptions, containerNames: false } },
|
||||
{ name: 'no-healthchecks', options: { ...defaultOptions, healthchecks: false } },
|
||||
{ name: 'external-ioredis', options: { ...defaultOptions, redisUrl: 'ioredis://<base64>' } },
|
||||
{ name: 'external-redis', options: { ...defaultOptions, redisHost: '192.168.0.5', redisPort: 1234 } },
|
||||
{
|
||||
name: 'external-postgres',
|
||||
options: {
|
||||
...defaultOptions,
|
||||
postgresUrl: 'postgres://immich:immich@localhost:5432/immich',
|
||||
postgresVectorExtension: 'pgvector',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'split-storage',
|
||||
options: {
|
||||
...defaultOptions,
|
||||
thumbnailsLocation: '/home/fast/thumbs',
|
||||
encodedVideoLocation: '/home/fast/encoded-videos',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// TODO replace with vitest test files/scripts
|
||||
mkdirSync('./examples', { recursive: true });
|
||||
for (const { name, options } of samples) {
|
||||
const spec = buildYaml(options);
|
||||
|
||||
const filename = `./examples/docker-compose.${name}.yaml`;
|
||||
writeFileSync(filename, spec);
|
||||
console.log(`Wrote ${filename}`);
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
||||
19
docker/tsconfig.json
Normal file
19
docker/tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowJs": true,
|
||||
"baseUrl": "./",
|
||||
"checkJs": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"module": "es2020",
|
||||
"moduleResolution": "bundler",
|
||||
"outDir": "./dist",
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"strict": true,
|
||||
"target": "es2022",
|
||||
"types": []
|
||||
},
|
||||
"include": ["lib"]
|
||||
}
|
||||
25
docker/tsconfig.src.json
Normal file
25
docker/tsconfig.src.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"baseUrl": "./",
|
||||
"declaration": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"esModuleInterop": true,
|
||||
"experimentalDecorators": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"incremental": true,
|
||||
"jsx": "react",
|
||||
"lib": ["dom", "es2023"],
|
||||
"module": "node16",
|
||||
"moduleResolution": "node16",
|
||||
"outDir": "./dist",
|
||||
"preserveWatchOutput": true,
|
||||
"removeComments": true,
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"strict": true,
|
||||
"target": "es2022"
|
||||
},
|
||||
"include": ["src", "lib"]
|
||||
}
|
||||
20
docker/vite.config.js
Normal file
20
docker/vite.config.js
Normal file
@@ -0,0 +1,20 @@
|
||||
import { resolve } from 'node:path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
resolve: {
|
||||
alias: {
|
||||
lib: resolve('lib'),
|
||||
src: resolve('src'),
|
||||
test: resolve('test'),
|
||||
},
|
||||
},
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'lib/index.ts'),
|
||||
name: 'immich-docker',
|
||||
// the proper extensions will be added
|
||||
fileName: 'immich-docker',
|
||||
},
|
||||
},
|
||||
});
|
||||
4
docs/.gitignore
vendored
4
docs/.gitignore
vendored
@@ -18,6 +18,4 @@
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
yarn.lock
|
||||
|
||||
/static/openapi.json
|
||||
yarn.lock
|
||||
@@ -1 +1 @@
|
||||
22.20.0
|
||||
22.12.0
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user