Compare commits

..

4 Commits

Author SHA1 Message Date
mertalev
b803a35bdc preserve file extension 2025-04-01 16:37:06 -04:00
mertalev
0da1c3b279 add empty array fallback just in case for now 2025-04-01 15:58:32 -04:00
mertalev
33c9ea1c9c update tests 2025-04-01 15:51:08 -04:00
mertalev
e7503ce3dc fix file path logic 2025-04-01 15:51:08 -04:00
1408 changed files with 73114 additions and 87956 deletions

View File

@@ -1,4 +1,4 @@
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:a20b8a3538313487ac9266875bbf733e544c1aa2091df2bb99ab592a6d4f7399 ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:2ef23730ec68d8511ec8e6e0b82550ca728b256805d81f60ed890f3bfb21cfb9
FROM ${BASEIMAGE} FROM ${BASEIMAGE}
# Flutter SDK # Flutter SDK

3
.gitattributes vendored
View File

@@ -6,9 +6,6 @@ mobile/openapi/**/*.dart linguist-generated=true
mobile/lib/**/*.g.dart -diff -merge mobile/lib/**/*.g.dart -diff -merge
mobile/lib/**/*.g.dart linguist-generated=true mobile/lib/**/*.g.dart linguist-generated=true
mobile/lib/**/*.drift.dart -diff -merge
mobile/lib/**/*.drift.dart linguist-generated=true
open-api/typescript-sdk/fetch-client.ts -diff -merge open-api/typescript-sdk/fetch-client.ts -diff -merge
open-api/typescript-sdk/fetch-client.ts linguist-generated=true open-api/typescript-sdk/fetch-client.ts linguist-generated=true

2
.github/.nvmrc vendored
View File

@@ -1 +1 @@
22.15.1 22.14.0

View File

@@ -1,118 +0,0 @@
name: 'Single arch image build'
description: 'Build single-arch image on platform appropriate runner'
inputs:
image:
description: 'Name of the image to build'
required: true
ghcr-token:
description: 'GitHub Container Registry token'
required: true
platform:
description: 'Platform to build for'
required: true
artifact-key-base:
description: 'Base key for artifact name'
required: true
context:
description: 'Path to build context'
required: true
dockerfile:
description: 'Path to Dockerfile'
required: true
build-args:
description: 'Docker build arguments'
required: false
runs:
using: 'composite'
steps:
- name: Prepare
id: prepare
shell: bash
env:
PLATFORM: ${{ inputs.platform }}
run: |
echo "platform-pair=${PLATFORM//\//-}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ inputs.ghcr-token }}
- name: Generate cache key suffix
id: cache-key-suffix
shell: bash
env:
REF: ${{ github.ref_name }}
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "cache-key-suffix=pr-${{ github.event.number }}" >> $GITHUB_OUTPUT
else
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
echo "suffix=${SUFFIX}" >> $GITHUB_OUTPUT
fi
- name: Generate cache target
id: cache-target
shell: bash
env:
BUILD_ARGS: ${{ inputs.build-args }}
IMAGE: ${{ inputs.image }}
SUFFIX: ${{ steps.cache-key-suffix.outputs.suffix }}
PLATFORM_PAIR: ${{ steps.prepare.outputs.platform-pair }}
run: |
HASH=$(sha256sum <<< "${BUILD_ARGS}" | cut -d' ' -f1)
CACHE_KEY="${PLATFORM_PAIR}-${HASH}"
echo "cache-key-base=${CACHE_KEY}" >> $GITHUB_OUTPUT
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
# Essentially just ignore the cache output (forks can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,ref=${IMAGE}-build-cache:${CACHE_KEY}-${SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
fi
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
- name: Build and push image
id: build
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
with:
context: ${{ inputs.context }}
file: ${{ inputs.dockerfile }}
platforms: ${{ inputs.platform }}
labels: ${{ steps.meta.outputs.labels }}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
cache-from: |
type=registry,ref=${{ inputs.image }}-build-cache:${{ steps.cache-target.outputs.cache-key-base }}-${{ steps.cache-key-suffix.outputs.suffix }}
type=registry,ref=${{ inputs.image }}-build-cache:${{ steps.cache-target.outputs.cache-key-base }}-main
outputs: type=image,"name=${{ inputs.image }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
build-args: |
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.meta.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
${{ inputs.build-args }}
- name: Export digest
shell: bash
run: | # zizmor: ignore[template-injection]
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: ${{ inputs.artifact-key-base }}-${{ steps.cache-target.outputs.cache-key-base }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1

View File

@@ -7,15 +7,6 @@ on:
ref: ref:
required: false required: false
type: string type: string
secrets:
KEY_JKS:
required: true
ALIAS:
required: true
ANDROID_KEY_PASSWORD:
required: true
ANDROID_STORE_PASSWORD:
required: true
pull_request: pull_request:
push: push:
branches: [main] branches: [main]
@@ -24,23 +15,16 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with: with:
filters: | filters: |
mobile: mobile:
@@ -54,26 +38,31 @@ jobs:
build-sign-android: build-sign-android:
name: Build and sign Android name: Build and sign Android
needs: pre-job needs: pre-job
permissions:
contents: read
# Skip when PR from a fork # Skip when PR from a fork
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }} if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
runs-on: macos-14 runs-on: macos-14
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Determine ref
with: id: get-ref
ref: ${{ inputs.ref || github.sha }} run: |
persist-credentials: false input_ref="${{ inputs.ref }}"
github_ref="${{ github.sha }}"
ref="${input_ref:-$github_ref}"
echo "ref=$ref" >> $GITHUB_OUTPUT
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
ref: ${{ steps.get-ref.outputs.ref }}
- uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4
with: with:
distribution: 'zulu' distribution: 'zulu'
java-version: '17' java-version: '17'
cache: 'gradle' cache: 'gradle'
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0 uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
@@ -89,10 +78,6 @@ jobs:
working-directory: ./mobile working-directory: ./mobile
run: flutter pub get run: flutter pub get
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Build Android App Bundle - name: Build Android App Bundle
working-directory: ./mobile working-directory: ./mobile
env: env:
@@ -104,7 +89,7 @@ jobs:
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64 flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
- name: Publish Android Artifact - name: Publish Android Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with: with:
name: release-apk-signed name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk path: mobile/build/app/outputs/flutter-apk/*.apk

View File

@@ -8,38 +8,31 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
cleanup: cleanup:
name: Cleanup name: Cleanup
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
actions: write
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Cleanup - name: Cleanup
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REF: ${{ github.ref }}
run: | run: |
gh extension install actions/gh-actions-cache gh extension install actions/gh-actions-cache
REPO=${{ github.repository }} REPO=${{ github.repository }}
BRANCH=${{ github.ref }}
echo "Fetching list of cache keys" echo "Fetching list of cache keys"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 ) cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys. ## Setting this to not fail the workflow while deleting cache keys.
set +e set +e
echo "Deleting caches..." echo "Deleting caches..."
for cacheKey in $cacheKeysForPR for cacheKey in $cacheKeysForPR
do do
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done done
echo "Done" echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -16,25 +16,21 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {} permissions:
packages: write
jobs: jobs:
publish: publish:
name: CLI Publish name: CLI Publish
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
# Setup .npmrc file to publish to npm # Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 - uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'
@@ -52,16 +48,11 @@ jobs:
docker: docker:
name: Docker name: Docker
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
packages: write
needs: publish needs: publish
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
@@ -70,7 +61,7 @@ jobs:
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
if: ${{ !github.event.pull_request.head.repo.fork }} if: ${{ !github.event.pull_request.head.repo.fork }}
with: with:
registry: ghcr.io registry: ghcr.io
@@ -85,7 +76,7 @@ jobs:
- name: Generate docker image tags - name: Generate docker image tags
id: metadata id: metadata
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
with: with:
flavor: | flavor: |
latest=false latest=false
@@ -96,7 +87,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }} type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image - name: Build and push image
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0 uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with: with:
file: cli/Dockerfile file: cli/Dockerfile
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64

View File

@@ -24,8 +24,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
analyze: analyze:
name: Analyze name: Analyze
@@ -44,13 +42,11 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 uses: github/codeql-action/init@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3
with: with:
languages: ${{ matrix.language }} languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file. # If you wish to specify custom queries, you can do so here or in a config file.
@@ -63,7 +59,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below) # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 uses: github/codeql-action/autobuild@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -76,6 +72,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh # ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 uses: github/codeql-action/analyze@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3
with: with:
category: '/language:${{matrix.language}}' category: '/language:${{matrix.language}}'

View File

@@ -12,23 +12,20 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {} permissions:
packages: write
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with: with:
filters: | filters: |
server: server:
@@ -40,8 +37,6 @@ jobs:
- 'machine-learning/**' - 'machine-learning/**'
workflow: workflow:
- '.github/workflows/docker.yml' - '.github/workflows/docker.yml'
- '.github/workflows/multi-runner-build.yml'
- '.github/actions/image-build'
- name: Check if we should force jobs to run - name: Check if we should force jobs to run
id: should_force id: should_force
@@ -50,9 +45,6 @@ jobs:
retag_ml: retag_ml:
name: Re-Tag ML name: Re-Tag ML
needs: pre-job needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }} if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
@@ -60,28 +52,24 @@ jobs:
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn'] suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
steps: steps:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image - name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: | run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REGISTRY_NAME="ghcr.io"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
TAG_OLD=main${{ matrix.suffix }}
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
retag_server: retag_server:
name: Re-Tag Server name: Re-Tag Server
needs: pre-job needs: pre-job
permissions:
contents: read
packages: write
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }} if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
@@ -89,91 +77,376 @@ jobs:
suffix: [''] suffix: ['']
steps: steps:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Re-tag image - name: Re-tag image
env:
REGISTRY_NAME: 'ghcr.io'
REPOSITORY: ${{ github.repository_owner }}/immich-server
TAG_OLD: main${{ matrix.suffix }}
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
run: | run: |
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REGISTRY_NAME="ghcr.io"
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}" REPOSITORY=${{ github.repository_owner }}/immich-server
TAG_OLD=main${{ matrix.suffix }}
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
machine-learning: build_and_push_ml:
name: Build and Push ML name: Build and Push ML
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }} if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ${{ matrix.runner }}
env:
image: immich-machine-learning
context: machine-learning
file: machine-learning/Dockerfile
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
strategy:
# Prevent a failure in one image from stopping the other builds
fail-fast: false
matrix:
include:
- platform: linux/amd64
runner: ubuntu-latest
device: cpu
- platform: linux/arm64
runner: ubuntu-24.04-arm
device: cpu
- platform: linux/amd64
runner: ubuntu-latest
device: cuda
suffix: -cuda
- platform: linux/amd64
runner: mich
device: rocm
suffix: -rocm
- platform: linux/amd64
runner: ubuntu-latest
device: openvino
suffix: -openvino
- platform: linux/arm64
runner: ubuntu-24.04-arm
device: armnn
suffix: -armnn
- platform: linux/arm64
runner: ubuntu-24.04-arm
device: rknn
suffix: -rknn
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Login to GitHub Container Registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Generate cache key suffix
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
else
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
fi
- name: Generate cache target
id: cache-target
run: |
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
# Essentially just ignore the cache output (forks can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
fi
- name: Build and push image
id: build
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platforms }}
labels: ${{ steps.metadata.outputs.labels }}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
cache-from: |
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }}
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-main
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
build-args: |
DEVICE=${{ matrix.device }}
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: ml-digests-${{ matrix.device }}-${{ env.PLATFORM_PAIR }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge_ml:
name: Merge & Push ML
runs-on: ubuntu-latest
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
env:
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
DOCKER_REPO: altran1502/immich-machine-learning
strategy:
matrix:
include:
- device: cpu
- device: cuda
suffix: -cuda
- device: rocm
suffix: -rocm
- device: openvino
suffix: -openvino
- device: armnn
suffix: -armnn
- device: rknn
suffix: -rknn
needs:
- build_and_push_ml
steps:
- name: Download digests
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with:
path: ${{ runner.temp }}/digests
pattern: ml-digests-${{ matrix.device }}-*
merge-multiple: true
- name: Login to Docker Hub
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GHCR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
with:
flavor: |
# Disable latest tag
latest=false
suffix=${{ matrix.suffix }}
images: |
name=${{ env.GHCR_REPO }}
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch
# Tag with pr-number
type=ref,event=pr
# Tag with long commit sha hash
type=sha,format=long,prefix=commit-
# Tag with git tag on release
type=ref,event=tag
type=raw,value=release,enable=${{ github.event_name == 'release' }}
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
build_and_push_server:
name: Build and Push Server
runs-on: ${{ matrix.runner }}
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
env:
image: immich-server
context: .
file: server/Dockerfile
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- device: cpu - platform: linux/amd64
tag-suffix: '' runner: ubuntu-latest
- device: cuda - platform: linux/arm64
tag-suffix: '-cuda' runner: ubuntu-24.04-arm
platforms: linux/amd64 steps:
- device: openvino - name: Prepare
tag-suffix: '-openvino' run: |
platforms: linux/amd64 platform=${{ matrix.platform }}
- device: armnn echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
tag-suffix: '-armnn'
platforms: linux/arm64
- device: rknn
tag-suffix: '-rknn'
platforms: linux/arm64
- device: rocm
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: ./.github/workflows/multi-runner-build.yml
permissions:
contents: read
actions: read
packages: write
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
with:
image: immich-machine-learning
context: machine-learning
dockerfile: machine-learning/Dockerfile
platforms: ${{ matrix.platforms }}
runner-mapping: ${{ matrix.runner-mapping }}
tag-suffix: ${{ matrix.tag-suffix }}
dockerhub-push: ${{ github.event_name == 'release' }}
build-args: |
DEVICE=${{ matrix.device }}
server: - name: Checkout
name: Build and Push Server uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} - name: Set up Docker Buildx
uses: ./.github/workflows/multi-runner-build.yml uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
permissions:
contents: read - name: Login to GitHub Container Registry
actions: read uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
packages: write if: ${{ !github.event.pull_request.head.repo.fork }}
secrets:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
with: with:
image: immich-server registry: ghcr.io
context: . username: ${{ github.repository_owner }}
dockerfile: server/Dockerfile password: ${{ secrets.GITHUB_TOKEN }}
dockerhub-push: ${{ github.event_name == 'release' }}
- name: Generate cache key suffix
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
else
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
fi
- name: Generate cache target
id: cache-target
run: |
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
# Essentially just ignore the cache output (forks can't write to registry cache)
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
else
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
fi
- name: Build and push image
id: build
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
context: ${{ env.context }}
file: ${{ env.file }}
platforms: ${{ matrix.platform }}
labels: ${{ steps.metadata.outputs.labels }}
cache-to: ${{ steps.cache-target.outputs.cache-to }}
cache-from: |
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }}
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-main
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
build-args: | build-args: |
DEVICE=cpu DEVICE=cpu
BUILD_ID=${{ github.run_id }}
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
BUILD_SOURCE_REF=${{ github.ref_name }}
BUILD_SOURCE_COMMIT=${{ github.sha }}
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: server-digests-${{ env.PLATFORM_PAIR }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge_server:
name: Merge & Push Server
runs-on: ubuntu-latest
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
env:
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
DOCKER_REPO: altran1502/immich-server
needs:
- build_and_push_server
steps:
- name: Download digests
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with:
path: ${{ runner.temp }}/digests
pattern: server-digests-*
merge-multiple: true
- name: Login to Docker Hub
if: ${{ github.event_name == 'release' }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GHCR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
with:
flavor: |
# Disable latest tag
latest=false
suffix=${{ matrix.suffix }}
images: |
name=${{ env.GHCR_REPO }}
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
tags: |
# Tag with branch name
type=ref,event=branch
# Tag with pr-number
type=ref,event=pr
# Tag with long commit sha hash
type=sha,format=long,prefix=commit-
# Tag with git tag on release
type=ref,event=tag
type=raw,value=release,enable=${{ github.event_name == 'release' }}
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
success-check-server: success-check-server:
name: Docker Build & Push Server Success name: Docker Build & Push Server Success
needs: [server, retag_server] needs: [merge_server, retag_server]
permissions: {}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: always() if: always()
steps: steps:
@@ -182,13 +455,11 @@ jobs:
run: exit 1 run: exit 1
- name: All jobs passed or skipped - name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }} if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}" run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
success-check-ml: success-check-ml:
name: Docker Build & Push ML Success name: Docker Build & Push ML Success
needs: [machine-learning, retag_ml] needs: [merge_ml, retag_ml]
permissions: {}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: always() if: always()
steps: steps:
@@ -197,5 +468,4 @@ jobs:
run: exit 1 run: exit 1
- name: All jobs passed or skipped - name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }} if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}" run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"

View File

@@ -10,22 +10,16 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with: with:
filters: | filters: |
docs: docs:
@@ -39,8 +33,6 @@ jobs:
build: build:
name: Docs Build name: Docs Build
needs: pre-job needs: pre-job
permissions:
contents: read
if: ${{ needs.pre-job.outputs.should_run == 'true' }} if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
@@ -49,12 +41,10 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './docs/.nvmrc' node-version-file: './docs/.nvmrc'
@@ -68,9 +58,8 @@ jobs:
run: npm run build run: npm run build
- name: Upload build output - name: Upload build output
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with: with:
name: docs-build-output name: docs-build-output
path: docs/build/ path: docs/build/
include-hidden-files: true
retention-days: 1 retention-days: 1

View File

@@ -1,6 +1,6 @@
name: Docs deploy name: Docs deploy
on: on:
workflow_run: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here workflow_run:
workflows: ['Docs build'] workflows: ['Docs build']
types: types:
- completed - completed
@@ -9,9 +9,6 @@ jobs:
checks: checks:
name: Docs Deploy Checks name: Docs Deploy Checks
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
actions: read
pull-requests: read
outputs: outputs:
parameters: ${{ steps.parameters.outputs.result }} parameters: ${{ steps.parameters.outputs.result }}
artifact: ${{ steps.get-artifact.outputs.result }} artifact: ${{ steps.get-artifact.outputs.result }}
@@ -20,7 +17,7 @@ jobs:
run: echo 'The triggering workflow did not succeed' && exit 1 run: echo 'The triggering workflow did not succeed' && exit 1
- name: Get artifact - name: Get artifact
id: get-artifact id: get-artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with: with:
script: | script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
@@ -38,9 +35,7 @@ jobs:
return { found: true, id: matchArtifact.id }; return { found: true, id: matchArtifact.id };
- name: Determine deploy parameters - name: Determine deploy parameters
id: parameters id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
env:
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
with: with:
script: | script: |
const eventType = context.payload.workflow_run.event; const eventType = context.payload.workflow_run.event;
@@ -62,8 +57,7 @@ jobs:
} else if (eventType == "pull_request") { } else if (eventType == "pull_request") {
let pull_number = context.payload.workflow_run.pull_requests[0]?.number; let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
if(!pull_number) { if(!pull_number) {
const {HEAD_SHA} = process.env; const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
const items = response.data.items const items = response.data.items
if (items.length < 1) { if (items.length < 1) {
throw new Error("No pull request found for the commit") throw new Error("No pull request found for the commit")
@@ -101,36 +95,30 @@ jobs:
name: Docs Deploy name: Docs Deploy
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: checks needs: checks
permissions:
contents: read
actions: read
pull-requests: write
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }} if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Load parameters - name: Load parameters
id: parameters id: parameters
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
env:
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
with: with:
script: | script: |
const parameters = JSON.parse(process.env.PARAM_JSON); const json = `${{ needs.checks.outputs.parameters }}`;
const parameters = JSON.parse(json);
core.setOutput("event", parameters.event); core.setOutput("event", parameters.event);
core.setOutput("name", parameters.name); core.setOutput("name", parameters.name);
core.setOutput("shouldDeploy", parameters.shouldDeploy); core.setOutput("shouldDeploy", parameters.shouldDeploy);
- run: |
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
- name: Download artifact - name: Download artifact
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
env:
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
with: with:
script: | script: |
let artifact = JSON.parse(process.env.ARTIFACT_JSON); let artifact = ${{ needs.checks.outputs.artifact }};
let download = await github.rest.actions.downloadArtifact({ let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner, owner: context.repo.owner,
repo: context.repo.repo, repo: context.repo.repo,
@@ -150,7 +138,7 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
with: with:
tg_version: '0.58.12' tg_version: '0.58.12'
tofu_version: '1.7.1' tofu_version: '1.7.1'
@@ -165,7 +153,7 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
with: with:
tg_version: '0.58.12' tg_version: '0.58.12'
tofu_version: '1.7.1' tofu_version: '1.7.1'
@@ -174,15 +162,12 @@ jobs:
- name: Output Cleaning - name: Output Cleaning
id: clean id: clean
env:
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
run: | run: |
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .) TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$CLEANED" >> $GITHUB_OUTPUT echo "output=$TG_OUT" >> $GITHUB_OUTPUT
- name: Publish to Cloudflare Pages - name: Publish to Cloudflare Pages
# TODO: Action is deprecated uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
with: with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
@@ -199,7 +184,7 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
with: with:
tg_version: '0.58.12' tg_version: '0.58.12'
tofu_version: '1.7.1' tofu_version: '1.7.1'
@@ -207,7 +192,7 @@ jobs:
tg_command: 'apply' tg_command: 'apply'
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
if: ${{ steps.parameters.outputs.event == 'pr' }} if: ${{ steps.parameters.outputs.event == 'pr' }}
with: with:
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }} number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}

View File

@@ -1,22 +1,15 @@
name: Docs destroy name: Docs destroy
on: on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here pull_request_target:
types: [closed] types: [closed]
permissions: {}
jobs: jobs:
deploy: deploy:
name: Docs Destroy name: Docs Destroy
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Destroy Docs Subdomain - name: Destroy Docs Subdomain
env: env:
@@ -25,7 +18,7 @@ jobs:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }} TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8 uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
with: with:
tg_version: '0.58.12' tg_version: '0.58.12'
tofu_version: '1.7.1' tofu_version: '1.7.1'
@@ -33,7 +26,7 @@ jobs:
tg_command: 'destroy -refresh=false' tg_command: 'destroy -refresh=false'
- name: Comment - name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0 uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
with: with:
number: ${{ github.event.number }} number: ${{ github.event.number }}
delete: true delete: true

View File

@@ -4,32 +4,28 @@ on:
pull_request: pull_request:
types: [labeled] types: [labeled]
permissions: {}
jobs: jobs:
fix-formatting: fix-formatting:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'fix:formatting' }} if: ${{ github.event.label.name == 'fix:formatting' }}
permissions: permissions:
contents: write
pull-requests: write pull-requests: write
steps: steps:
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1
with: with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }} app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }} private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: 'Checkout' - name: 'Checkout'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
ref: ${{ github.event.pull_request.head.ref }} ref: ${{ github.event.pull_request.head.ref }}
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -37,13 +33,13 @@ jobs:
run: make install-all && make format-all run: make install-all && make format-all
- name: Commit and push - name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4 uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
with: with:
default_author: github_actions default_author: github_actions
message: 'chore: fix formatting' message: 'chore: fix formatting'
- name: Remove label - name: Remove label
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
if: always() if: always()
with: with:
script: | script: |

View File

@@ -1,185 +0,0 @@
name: 'Multi-runner container image build'
on:
workflow_call:
inputs:
image:
description: 'Name of the image'
type: string
required: true
context:
description: 'Path to build context'
type: string
required: true
dockerfile:
description: 'Path to Dockerfile'
type: string
required: true
tag-suffix:
description: 'Suffix to append to the image tag'
type: string
default: ''
dockerhub-push:
description: 'Push to Docker Hub'
type: boolean
default: false
build-args:
description: 'Docker build arguments'
type: string
required: false
platforms:
description: 'Platforms to build for'
type: string
runner-mapping:
description: 'Mapping from platforms to runners'
type: string
secrets:
DOCKERHUB_USERNAME:
required: false
DOCKERHUB_TOKEN:
required: false
env:
GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/${{ inputs.image }}
DOCKERHUB_IMAGE: altran1502/${{ inputs.image }}
jobs:
matrix:
name: 'Generate matrix'
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.matrix.outputs.matrix }}
key: ${{ steps.artifact-key.outputs.base }}
steps:
- name: Generate build matrix
id: matrix
shell: bash
env:
PLATFORMS: ${{ inputs.platforms || 'linux/amd64,linux/arm64' }}
RUNNER_MAPPING: ${{ inputs.runner-mapping || '{"linux/amd64":"ubuntu-latest","linux/arm64":"ubuntu-24.04-arm"}' }}
run: |
matrix=$(jq -R -c \
--argjson runner_mapping "${RUNNER_MAPPING}" \
'split(",") | map({platform: ., runner: $runner_mapping[.]})' \
<<< "${PLATFORMS}")
echo "${matrix}"
echo "matrix=${matrix}" >> $GITHUB_OUTPUT
- name: Determine artifact key
id: artifact-key
shell: bash
env:
IMAGE: ${{ inputs.image }}
SUFFIX: ${{ inputs.tag-suffix }}
run: |
if [[ -n "${SUFFIX}" ]]; then
base="${IMAGE}${SUFFIX}-digests"
else
base="${IMAGE}-digests"
fi
echo "${base}"
echo "base=${base}" >> $GITHUB_OUTPUT
build:
needs: matrix
runs-on: ${{ matrix.runner }}
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.matrix.outputs.matrix) }}
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- uses: ./.github/actions/image-build
with:
context: ${{ inputs.context }}
dockerfile: ${{ inputs.dockerfile }}
image: ${{ env.GHCR_IMAGE }}
ghcr-token: ${{ secrets.GITHUB_TOKEN }}
platform: ${{ matrix.platform }}
artifact-key-base: ${{ needs.matrix.outputs.key }}
build-args: ${{ inputs.build-args }}
merge:
needs: [matrix, build]
runs-on: ubuntu-latest
if: ${{ !github.event.pull_request.head.repo.fork }}
permissions:
contents: read
actions: read
packages: write
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
with:
path: ${{ runner.temp }}/digests
pattern: ${{ needs.matrix.outputs.key }}-*
merge-multiple: true
- name: Login to Docker Hub
if: ${{ inputs.dockerhub-push }}
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GHCR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
- name: Generate docker image tags
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
env:
DOCKER_METADATA_PR_HEAD_SHA: 'true'
with:
flavor: |
# Disable latest tag
latest=false
suffix=${{ inputs.tag-suffix }}
images: |
name=${{ env.GHCR_IMAGE }}
name=${{ env.DOCKERHUB_IMAGE }},enable=${{ inputs.dockerhub-push }}
tags: |
# Tag with branch name
type=ref,event=branch
# Tag with pr-number
type=ref,event=pr
# Tag with long commit sha hash
type=sha,format=long,prefix=commit-
# Tag with git tag on release
type=ref,event=tag
type=raw,value=release,enable=${{ github.event_name == 'release' }}
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
run: |
# Process annotations
declare -a ANNOTATIONS=()
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
while IFS= read -r annotation; do
# Extract key and value by removing the manifest: prefix
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
key="${BASH_REMATCH[1]}"
value="${BASH_REMATCH[2]}"
# Use array to properly handle arguments with spaces
ANNOTATIONS+=(--annotation "index:$key=$value")
fi
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
fi
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
SOURCE_ARGS=$(printf "${GHCR_IMAGE}@sha256:%s " *)
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS

View File

@@ -1,11 +1,9 @@
name: PR Label Validation name: PR Label Validation
on: on:
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here pull_request_target:
types: [opened, labeled, unlabeled, synchronize] types: [opened, labeled, unlabeled, synchronize]
permissions: {}
jobs: jobs:
validate-release-label: validate-release-label:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -14,7 +12,7 @@ jobs:
pull-requests: write pull-requests: write
steps: steps:
- name: Require PR to have a changelog label - name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@388fd6af37b34cdfe5a23b37060e763217e58b03 # v5.5.0 uses: mheap/github-action-required-labels@388fd6af37b34cdfe5a23b37060e763217e58b03 # v5
with: with:
mode: exactly mode: exactly
count: 1 count: 1

View File

@@ -1,8 +1,6 @@
name: 'Pull Request Labeler' name: 'Pull Request Labeler'
on: on:
- pull_request_target # zizmor: ignore[dangerous-triggers] no attacker inputs are used here - pull_request_target
permissions: {}
jobs: jobs:
labeler: labeler:
@@ -11,4 +9,4 @@ jobs:
pull-requests: write pull-requests: write
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5

View File

@@ -4,13 +4,9 @@ on:
pull_request: pull_request:
types: [opened, synchronize, reopened, edited] types: [opened, synchronize, reopened, edited]
permissions: {}
jobs: jobs:
validate-pr-title: validate-pr-title:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
pull-requests: write
steps: steps:
- name: PR Conventional Commit Validation - name: PR Conventional Commit Validation
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0 uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0

View File

@@ -21,40 +21,35 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-root group: ${{ github.workflow }}-${{ github.ref }}-root
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
bump_version: bump_version:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }} ref: ${{ steps.push-tag.outputs.commit_long_sha }}
permissions: {} # No job-level permissions are needed because it uses the app-token
steps: steps:
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1
with: with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }} app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }} private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2 uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
- name: Bump version - name: Bump version
env: run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
SERVER_BUMP: ${{ inputs.serverBump }}
MOBILE_BUMP: ${{ inputs.mobileBump }}
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
- name: Commit and tag - name: Commit and tag
id: push-tag id: push-tag
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4 uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
with: with:
default_author: github_actions default_author: github_actions
message: 'chore: version ${{ env.IMMICH_VERSION }}' message: 'chore: version ${{ env.IMMICH_VERSION }}'
@@ -64,47 +59,37 @@ jobs:
build_mobile: build_mobile:
uses: ./.github/workflows/build-mobile.yml uses: ./.github/workflows/build-mobile.yml
needs: bump_version needs: bump_version
permissions: secrets: inherit
contents: read
secrets:
KEY_JKS: ${{ secrets.KEY_JKS }}
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
with: with:
ref: ${{ needs.bump_version.outputs.ref }} ref: ${{ needs.bump_version.outputs.ref }}
prepare_release: prepare_release:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build_mobile needs: build_mobile
permissions:
actions: read # To download the app artifact
# No content permissions are needed because it uses the app-token
steps: steps:
- name: Generate a token - name: Generate a token
id: generate-token id: generate-token
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6 uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1
with: with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }} app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }} private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout - name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
token: ${{ steps.generate-token.outputs.token }} token: ${{ steps.generate-token.outputs.token }}
persist-credentials: false
- name: Download APK - name: Download APK
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
with: with:
name: release-apk-signed name: release-apk-signed
- name: Create draft release - name: Create draft release
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2 uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v2
with: with:
draft: true draft: true
tag_name: ${{ env.IMMICH_VERSION }} tag_name: ${{ env.IMMICH_VERSION }}
token: ${{ steps.generate-token.outputs.token }}
generate_release_notes: true generate_release_notes: true
body_path: misc/release/notes.tmpl body_path: misc/release/notes.tmpl
files: | files: |

View File

@@ -4,8 +4,6 @@ on:
pull_request: pull_request:
types: [labeled, closed] types: [labeled, closed]
permissions: {}
jobs: jobs:
comment-status: comment-status:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -13,7 +11,7 @@ jobs:
permissions: permissions:
pull-requests: write pull-requests: write
steps: steps:
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2 - uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2
with: with:
message-id: 'preview-status' message-id: 'preview-status'
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/' message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
@@ -24,7 +22,7 @@ jobs:
permissions: permissions:
pull-requests: write pull-requests: write
steps: steps:
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
with: with:
script: | script: |
github.rest.issues.removeLabel({ github.rest.issues.removeLabel({

View File

@@ -4,24 +4,20 @@ on:
release: release:
types: [published] types: [published]
permissions: {} permissions:
packages: write
jobs: jobs:
publish: publish:
name: Publish `@immich/sdk` name: Publish `@immich/sdk`
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./open-api/typescript-sdk working-directory: ./open-api/typescript-sdk
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
# Setup .npmrc file to publish to npm # Setup .npmrc file to publish to npm
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 - uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './open-api/typescript-sdk/.nvmrc' node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'

View File

@@ -9,22 +9,16 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with: with:
filters: | filters: |
mobile: mobile:
@@ -39,17 +33,15 @@ jobs:
name: Run Dart Code Analysis name: Run Dart Code Analysis
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run == 'true' }} if: ${{ needs.pre-job.outputs.should_run == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0 uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
@@ -58,16 +50,12 @@ jobs:
run: dart pub get run: dart pub get
working-directory: ./mobile working-directory: ./mobile
- name: Generate translation file
run: make translation; dart format lib/generated/codegen_loader.g.dart
working-directory: ./mobile
- name: Run Build Runner - name: Run Build Runner
run: make build run: make build
working-directory: ./mobile working-directory: ./mobile
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
id: verify-changed-files id: verify-changed-files
with: with:
files: | files: |
@@ -77,11 +65,9 @@ jobs:
- name: Verify files have not changed - name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true' if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory" echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1 exit 1
- name: Run dart analyze - name: Run dart analyze
@@ -95,30 +81,3 @@ jobs:
- name: Run dart custom_lint - name: Run dart custom_lint
run: dart run custom_lint run: dart run custom_lint
working-directory: ./mobile working-directory: ./mobile
zizmor:
name: zizmor
runs-on: ubuntu-latest
permissions:
security-events: write
contents: read
actions: read
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
- name: Run zizmor 🌈
run: uvx zizmor --format=sarif . > results.sarif
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17
with:
sarif_file: results.sarif
category: zizmor

View File

@@ -9,15 +9,10 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run_i18n: ${{ steps.found_paths.outputs.i18n == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }} should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
@@ -29,16 +24,11 @@ jobs:
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with: with:
filters: | filters: |
i18n:
- 'i18n/**'
web: web:
- 'web/**' - 'web/**'
- 'i18n/**' - 'i18n/**'
@@ -68,20 +58,16 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./server working-directory: ./server
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -109,20 +95,16 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
@@ -154,20 +136,16 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
runs-on: windows-latest runs-on: windows-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./cli working-directory: ./cli
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './cli/.nvmrc' node-version-file: './cli/.nvmrc'
@@ -187,25 +165,21 @@ jobs:
run: npm run test:cov run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
web-lint: web-unit-tests:
name: Lint Web name: Test & Lint Web
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }} if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: mich runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./web working-directory: ./web
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './web/.nvmrc' node-version-file: './web/.nvmrc'
@@ -217,7 +191,7 @@ jobs:
run: npm ci run: npm ci
- name: Run linter - name: Run linter
run: npm run lint:p run: npm run lint
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
- name: Run formatter - name: Run formatter
@@ -228,35 +202,6 @@ jobs:
run: npm run check:svelte run: npm run check:svelte
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
web-unit-tests:
name: Test Web
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
working-directory: ./web
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
- name: Run setup typescript-sdk
run: npm ci && npm run build
working-directory: ./open-api/typescript-sdk
- name: Run npm install
run: npm ci
- name: Run tsc - name: Run tsc
run: npm run check:typescript run: npm run check:typescript
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
@@ -265,65 +210,21 @@ jobs:
run: npm run test:cov run: npm run test:cov
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
i18n-tests:
name: Test i18n
needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_i18n == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version-file: './web/.nvmrc'
- name: Install dependencies
run: npm --prefix=web ci
- name: Format
run: npm --prefix=web run format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
with:
files: |
i18n/**
- name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: |
echo "ERROR: i18n files not up to date!"
echo "Changed files: ${CHANGED_FILES}"
exit 1
e2e-tests-lint: e2e-tests-lint:
name: End-to-End Lint name: End-to-End Lint
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
@@ -353,20 +254,16 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }} if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./server working-directory: ./server
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -381,25 +278,19 @@ jobs:
name: End-to-End Tests (Server & CLI) name: End-to-End Tests (Server & CLI)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
runs-on: ${{ matrix.runner }} runs-on: mich
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
persist-credentials: false
submodules: 'recursive' submodules: 'recursive'
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
@@ -429,25 +320,19 @@ jobs:
name: End-to-End Tests (Web) name: End-to-End Tests (Web)
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }} if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
runs-on: ${{ matrix.runner }} runs-on: mich
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./e2e working-directory: ./e2e
strategy:
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with: with:
persist-credentials: false
submodules: 'recursive' submodules: 'recursive'
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './e2e/.nvmrc' node-version-file: './e2e/.nvmrc'
@@ -472,43 +357,18 @@ jobs:
run: npx playwright test run: npx playwright test
if: ${{ !cancelled() }} if: ${{ !cancelled() }}
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
permissions: {}
runs-on: ubuntu-latest
if: always()
steps:
- name: Any jobs failed?
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1
- name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
mobile-unit-tests: mobile-unit-tests:
name: Unit Test Mobile name: Unit Test Mobile
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }} if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Flutter SDK - name: Setup Flutter SDK
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0 uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
with: with:
channel: 'stable' channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml flutter-version-file: ./mobile/pubspec.yaml
- name: Generate translation file
run: make translation
working-directory: ./mobile
- name: Run tests - name: Run tests
working-directory: ./mobile working-directory: ./mobile
run: flutter test -j 1 run: flutter test -j 1
@@ -518,19 +378,14 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }} if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./machine-learning working-directory: ./machine-learning
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2 uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818) # TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with: # with:
# python-version: 3.11 # python-version: 3.11
@@ -556,20 +411,16 @@ jobs:
needs: pre-job needs: pre-job
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }} if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
defaults: defaults:
run: run:
working-directory: ./.github working-directory: ./.github
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './.github/.nvmrc' node-version-file: './.github/.nvmrc'
@@ -583,34 +434,25 @@ jobs:
shellcheck: shellcheck:
name: ShellCheck name: ShellCheck
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Run ShellCheck - name: Run ShellCheck
uses: ludeeus/action-shellcheck@master uses: ludeeus/action-shellcheck@master
with: with:
ignore_paths: >- ignore_paths: >-
**/open-api/** **/open-api/**
**/openapi** **/openapi/**
**/node_modules/** **/node_modules/**
generated-api-up-to-date: generated-api-up-to-date:
name: OpenAPI Clients name: OpenAPI Clients
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -624,7 +466,7 @@ jobs:
run: make open-api run: make open-api
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
id: verify-changed-files id: verify-changed-files
with: with:
files: | files: |
@@ -634,21 +476,17 @@ jobs:
- name: Verify files have not changed - name: Verify files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true' if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated files not up to date!" echo "ERROR: Generated files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
exit 1 exit 1
sql-schema-up-to-date: generated-typeorm-migrations-up-to-date:
name: SQL Schema Checks name: TypeORM Checks
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
services: services:
postgres: postgres:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:14bec5d02e8704081eafd566029204a4eb6bb75f3056cfb34e81c5ab1657a490 image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
env: env:
POSTGRES_PASSWORD: postgres POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres POSTGRES_USER: postgres
@@ -666,12 +504,10 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- name: Setup Node - name: Setup Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
with: with:
node-version-file: './server/.nvmrc' node-version-file: './server/.nvmrc'
@@ -682,28 +518,26 @@ jobs:
run: npm run build run: npm run build
- name: Run existing migrations - name: Run existing migrations
run: npm run migrations:run run: npm run typeorm:migrations:run
- name: Test npm run schema:reset command works - name: Test npm run schema:reset command works
run: npm run schema:reset run: npm run typeorm:schema:reset
- name: Generate new migrations - name: Generate new migrations
continue-on-error: true continue-on-error: true
run: npm run migrations:generate src/TestMigration run: npm run migrations:generate TestMigration
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
id: verify-changed-files id: verify-changed-files
with: with:
files: | files: |
server/src server/src/migrations/
- name: Verify migration files have not changed - name: Verify migration files have not changed
if: steps.verify-changed-files.outputs.files_changed == 'true' if: steps.verify-changed-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated migration files not up to date!" echo "ERROR: Generated migration files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
cat ./src/*-TestMigration.ts cat ./src/*-TestMigration.ts
exit 1 exit 1
@@ -713,7 +547,7 @@ jobs:
DB_URL: postgres://postgres:postgres@localhost:5432/immich DB_URL: postgres://postgres:postgres@localhost:5432/immich
- name: Find file changes - name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4 uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
id: verify-changed-sql-files id: verify-changed-sql-files
with: with:
files: | files: |
@@ -721,11 +555,9 @@ jobs:
- name: Verify SQL files have not changed - name: Verify SQL files have not changed
if: steps.verify-changed-sql-files.outputs.files_changed == 'true' if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
env:
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
run: | run: |
echo "ERROR: Generated SQL files not up to date!" echo "ERROR: Generated SQL files not up to date!"
echo "Changed files: ${CHANGED_FILES}" echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
exit 1 exit 1
# mobile-integration-tests: # mobile-integration-tests:

View File

@@ -4,32 +4,30 @@ on:
pull_request: pull_request:
branches: [main] branches: [main]
permissions: {}
jobs: jobs:
pre-job: pre-job:
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: read
outputs: outputs:
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}} should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
persist-credentials: false
- id: found_paths - id: found_paths
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
with: with:
filters: | filters: |
i18n: i18n:
- 'i18n/!(en)**\.json' - 'i18n/!(en)**\.json'
- name: Debug
run: |
echo "Should run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}"
echo "Found i18n paths: ${{ steps.found_paths.outputs.i18n }}"
echo "Head ref: ${{ github.head_ref }}"
enforce-lock: enforce-lock:
name: Check Weblate Lock name: Check Weblate Lock
needs: [pre-job] needs: [pre-job]
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: {}
if: ${{ needs.pre-job.outputs.should_run == 'true' }} if: ${{ needs.pre-job.outputs.should_run == 'true' }}
steps: steps:
- name: Check weblate lock - name: Check weblate lock
@@ -38,7 +36,7 @@ jobs:
exit 1 exit 1
fi fi
- name: Find Pull Request - name: Find Pull Request
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1.9.0 uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1
id: find-pr id: find-pr
with: with:
branch: chore/translations branch: chore/translations
@@ -49,7 +47,6 @@ jobs:
name: Weblate Lock Check Success name: Weblate Lock Check Success
needs: [enforce-lock] needs: [enforce-lock]
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: {}
if: always() if: always()
steps: steps:
- name: Any jobs failed? - name: Any jobs failed?
@@ -57,5 +54,4 @@ jobs:
run: exit 1 run: exit 1
- name: All jobs passed or skipped - name: All jobs passed or skipped
if: ${{ !(contains(needs.*.result, 'failure')) }} if: ${{ !(contains(needs.*.result, 'failure')) }}
# zizmor: ignore[template-injection]
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}" run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"

75
.vscode/settings.json vendored
View File

@@ -1,63 +1,44 @@
{ {
"editor.formatOnSave": true,
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.tabSize": 2,
"editor.formatOnSave": true
},
"[css]": { "[css]": {
"editor.defaultFormatter": "esbenp.prettier-vscode", "editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true, "editor.tabSize": 2,
"editor.formatOnSave": true
},
"[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.tabSize": 2 "editor.tabSize": 2
}, },
"svelte.enable-ts-plugin": true,
"eslint.validate": [
"javascript",
"svelte"
],
"typescript.preferences.importModuleSpecifier": "non-relative",
"[dart]": { "[dart]": {
"editor.defaultFormatter": "Dart-Code.dart-code",
"editor.formatOnSave": true, "editor.formatOnSave": true,
"editor.selectionHighlight": false, "editor.selectionHighlight": false,
"editor.suggest.snippetsPreventQuickSuggestions": false, "editor.suggest.snippetsPreventQuickSuggestions": false,
"editor.suggestSelection": "first", "editor.suggestSelection": "first",
"editor.tabCompletion": "onlySnippets", "editor.tabCompletion": "onlySnippets",
"editor.wordBasedSuggestions": "off" "editor.wordBasedSuggestions": "off",
"editor.defaultFormatter": "Dart-Code.dart-code"
}, },
"[javascript]": { "cSpell.words": [
"editor.codeActionsOnSave": { "immich"
"source.organizeImports": "explicit", ],
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[svelte]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "svelte.svelte-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"[typescript]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit",
"source.removeUnusedImports": "explicit"
},
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true,
"editor.tabSize": 2
},
"cSpell.words": ["immich"],
"editor.formatOnSave": true,
"eslint.validate": ["javascript", "svelte"],
"explorer.fileNesting.enabled": true, "explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": { "explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
"*.ts": "${capture}.spec.ts,${capture}.mock.ts" "*.ts": "${capture}.spec.ts,${capture}.mock.ts"
}, }
"svelte.enable-ts-plugin": true,
"typescript.preferences.importModuleSpecifier": "non-relative"
} }

View File

@@ -17,9 +17,6 @@ e2e:
prod: prod:
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
prod-down:
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
prod-scale: prod-scale:
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans

View File

@@ -61,7 +61,9 @@
## Demo ## Demo
Access the demo [here](https://demo.immich.app). For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`. Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`
### Login credentials ### Login credentials

View File

@@ -1 +1 @@
22.15.1 22.14.0

View File

@@ -1,4 +1,4 @@
FROM node:22.15.0-alpine3.20@sha256:686b8892b69879ef5bfd6047589666933508f9a5451c67320df3070ba0e9807b AS core FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
WORKDIR /usr/src/open-api/typescript-sdk WORKDIR /usr/src/open-api/typescript-sdk
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./ COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./

2376
cli/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "@immich/cli", "name": "@immich/cli",
"version": "2.2.66", "version": "2.2.60",
"description": "Command Line Interface (CLI) for Immich", "description": "Command Line Interface (CLI) for Immich",
"type": "module", "type": "module",
"exports": "./dist/index.js", "exports": "./dist/index.js",
@@ -21,7 +21,7 @@
"@types/lodash-es": "^4.17.12", "@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9", "@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1", "@types/mock-fs": "^4.13.1",
"@types/node": "^22.15.18", "@types/node": "^22.13.14",
"@vitest/coverage-v8": "^3.0.0", "@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0", "byte-size": "^9.0.0",
"cli-progress": "^3.12.0", "cli-progress": "^3.12.0",
@@ -69,6 +69,6 @@
"micromatch": "^4.0.8" "micromatch": "^4.0.8"
}, },
"volta": { "volta": {
"node": "22.15.1" "node": "22.14.0"
} }
} }

View File

@@ -116,13 +116,13 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884 image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0 image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
env_file: env_file:
- .env - .env
environment: environment:
@@ -134,6 +134,24 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports: ports:
- 5432:5432 - 5432:5432
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
echo "checksum failure count is $$Chksum";
[ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres
-c shared_preload_libraries=vectors.so
-c 'search_path="$$user", public, vectors'
-c logging_collector=on
-c max_wal_size=2GB
-c shared_buffers=512MB
-c wal_compression=on
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics # set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
# immich-prometheus: # immich-prometheus:

View File

@@ -56,14 +56,14 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884 image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
restart: always restart: always
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0 image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
env_file: env_file:
- .env - .env
environment: environment:
@@ -75,6 +75,14 @@ services:
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data - ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
ports: ports:
- 5432:5432 - 5432:5432
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
restart: always restart: always
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics # set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
@@ -82,7 +90,7 @@ services:
container_name: immich_prometheus container_name: immich_prometheus
ports: ports:
- 9090:9090 - 9090:9090
image: prom/prometheus@sha256:78ed1f9050eb9eaf766af6e580230b1c4965728650e332cd1ee918c0c4699775 image: prom/prometheus@sha256:502ad90314c7485892ce696cb14a99fceab9fc27af29f4b427f41bd39701a199
volumes: volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml - ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus - prometheus-data:/prometheus
@@ -94,7 +102,7 @@ services:
command: [ './run.sh', '-disable-reporting' ] command: [ './run.sh', '-disable-reporting' ]
ports: ports:
- 3000:3000 - 3000:3000
image: grafana/grafana:11.6.1-ubuntu@sha256:6fc273288470ef499dd3c6b36aeade093170d4f608f864c5dd3a7fabeae77b50 image: grafana/grafana:11.5.2-ubuntu@sha256:8b5858c447e06fd7a89006b562ba7bba7c4d5813600c7982374c41852adefaeb
volumes: volumes:
- grafana-data:/var/lib/grafana - grafana-data:/var/lib/grafana

View File

@@ -49,24 +49,30 @@ services:
redis: redis:
container_name: immich_redis container_name: immich_redis
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884 image: docker.io/redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
healthcheck: healthcheck:
test: redis-cli ping || exit 1 test: redis-cli ping || exit 1
restart: always restart: always
database: database:
container_name: immich_postgres container_name: immich_postgres
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0 image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
environment: environment:
POSTGRES_PASSWORD: ${DB_PASSWORD} POSTGRES_PASSWORD: ${DB_PASSWORD}
POSTGRES_USER: ${DB_USERNAME} POSTGRES_USER: ${DB_USERNAME}
POSTGRES_DB: ${DB_DATABASE_NAME} POSTGRES_DB: ${DB_DATABASE_NAME}
POSTGRES_INITDB_ARGS: '--data-checksums' POSTGRES_INITDB_ARGS: '--data-checksums'
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
# DB_STORAGE_TYPE: 'HDD'
volumes: volumes:
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file # Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data - ${DB_DATA_LOCATION}:/var/lib/postgresql/data
healthcheck:
test: >-
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
interval: 5m
start_interval: 30s
start_period: 5m
command: >-
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
restart: always restart: always
volumes: volumes:

View File

@@ -2,8 +2,7 @@
# The location where your uploaded files are stored # The location where your uploaded files are stored
UPLOAD_LOCATION=./library UPLOAD_LOCATION=./library
# The location where your database files are stored
# The location where your database files are stored. Network shares are not supported for the database
DB_DATA_LOCATION=./postgres DB_DATA_LOCATION=./postgres
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List # To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List

View File

@@ -1 +1 @@
22.15.1 22.14.0

View File

@@ -23,32 +23,23 @@ Refer to the official [postgres documentation](https://www.postgresql.org/docs/c
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored. It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
::: :::
### Automatic Database Dumps ### Automatic Database Backups
:::warning For convenience, Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
The automatic database dumps can be used to restore the database in the event of damage to the Postgres database files. As mentioned above, you should make your own backup of these together with the asset folders as noted below.
There is no monitoring for these dumps and you will not be notified if they are unsuccessful. You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
::: By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
:::caution #### Trigger Backup
The database dumps do **NOT** contain any pictures or videos, only metadata. They are only usable with a copy of the other files in `UPLOAD_LOCATION` as outlined below.
:::
For disaster-recovery purposes, Immich will automatically create database dumps. The dumps are stored in `UPLOAD_LOCATION/backups`. You are able to trigger a backup in the [admin job status page](http://my.immich.app/admin/jobs-status).
Please be sure to make your own, independent backup of the database together with the asset folders as noted below. Visit the page, open the "Create job" modal from the top right, select "Backup Database" and click "Confirm".
You can adjust the schedule and amount of kept database dumps in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup). A job will run and trigger a backup, you can verify this worked correctly by checking the logs or the backup folder.
By default, Immich will keep the last 14 database dumps and create a new dump every day at 2:00 AM. This backup will count towards the last X backups that will be kept based on your settings.
#### Trigger Dump
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
This dumps will count towards the last `X` dumps that will be kept based on your settings.
#### Restoring #### Restoring
We hope to make restoring simpler in future versions, for now you can find the database dumps in the `UPLOAD_LOCATION/backups` folder on your host. We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
Then please follow the steps in the following section for restoring the database. Then please follow the steps in the following section for restoring the database.
### Manual Backup and Restore ### Manual Backup and Restore

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -10,16 +10,12 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
## Prerequisites ## Prerequisites
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`. You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
:::note :::note
Immich is known to work with Postgres versions `>= 14, < 18`. Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported. Postgres 17 is nominally compatible, but pgvecto.rs does not have prebuilt images or packages for it as of writing.
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.4.0`. Make sure the installed version of pgvecto.rs is compatible with your version of Immich. The current accepted range for pgvecto.rs is `>= 0.2.0, < 0.4.0`.
::: :::
## Specifying the connection URL ## Specifying the connection URL
@@ -57,81 +53,21 @@ CREATE DATABASE <immichdatabasename>;
\c <immichdatabasename> \c <immichdatabasename>
BEGIN; BEGIN;
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>; ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
CREATE EXTENSION vchord CASCADE; CREATE EXTENSION vectors;
CREATE EXTENSION earthdistance CASCADE; CREATE EXTENSION earthdistance CASCADE;
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
ALTER SCHEMA vectors OWNER TO <immichdbusername>;
COMMIT; COMMIT;
``` ```
### Updating VectorChord ### Updating pgvecto.rs
When installing a new version of VectorChord, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vchord UPDATE;`. When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
## Migrating to VectorChord ### Common errors
VectorChord is the successor extension to pgvecto.rs, allowing for higher performance, lower memory usage and higher quality results for smart search and facial recognition. #### Permission denied for view
### Migrating from pgvecto.rs If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO <immichdbusername>;`.
Support for pgvecto.rs will be dropped in a later release, hence we recommend all users currently using pgvecto.rs to migrate to VectorChord at their convenience. There are two primary approaches to do so. [vectors-install]: https://docs.vectorchord.ai/getting-started/installation.html
The easiest option is to have both extensions installed during the migration:
1. Ensure you still have pgvecto.rs installed
2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
3. [Install VectorChord][vchord-install]
4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed
5. Restart the Postgres database
6. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client
7. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output
8. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;`
9. Drop the old schema by running `DROP SCHEMA vectors;`
10. Remove the `vectors.so` entry from the `shared_preload_libraries` setting
11. Restart the Postgres database
12. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain installed as it provides the data types used by `vchord`
If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps:
1. While pgvecto.rs is still installed, run the following SQL command using psql or your choice of database client. Take note of the number outputted by this command as you will need it later
```sql
SELECT atttypmod as dimsize
FROM pg_attribute f
JOIN pg_class c ON c.oid = f.attrelid
WHERE c.relkind = 'r'::char
AND f.attnum > 0
AND c.relname = 'smart_search'::text
AND f.attname = 'embedding'::text;
```
2. Remove references to pgvecto.rs using the below SQL commands
```sql
DROP INDEX IF EXISTS clip_index;
DROP INDEX IF EXISTS face_index;
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE real[];
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE real[];
```
3. [Install VectorChord][vchord-install]
4. Change the columns back to the appropriate vector types, replacing `<number>` with the number from step 1
```sql
CREATE EXTENSION IF NOT EXISTS vchord CASCADE;
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE vector(<number>);
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512);
```
5. Start Immich and let it create new indices using VectorChord
### Migrating from pgvector
1. Ensure you have at least 0.7.0 of pgvector installed. If it is below that, please upgrade it and run the SQL command `ALTER EXTENSION vector UPDATE;` using psql or your choice of database client
2. Follow the Prerequisites to install VectorChord
3. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;`
4. Remove the `DB_VECTOR_EXTENSION=pgvector` environmental variable as it will make Immich still use pgvector if set
5. Start Immich and let it create new indices using VectorChord
Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps.
[vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html
[pg-apt]: https://www.postgresql.org/download/linux/#generic

View File

@@ -22,7 +22,7 @@ server {
client_max_body_size 50000M; client_max_body_size 50000M;
# Set headers # Set headers
proxy_set_header Host $host; proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;

View File

@@ -31,7 +31,7 @@ Admin can send a welcome email if the Email option is set, you can learn here ho
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore. Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.
In order to select a storage quota, click on the pencil icon and enter the storage quota in GiB. You can choose an unlimited quota by leaving it empty (default). In order to select a storage quota, click on the pencil icon and enter the storage quota in GiB. You can choose an unlimited quota using the value 0 (default).
:::tip :::tip
The system administrator can see the usage quota percentage of all users in Server Stats page. The system administrator can see the usage quota percentage of all users in Server Stats page.

View File

@@ -1,14 +1,14 @@
# Database Migrations # Database Migrations
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration. After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
1. Run the command 1. Run the command
```bash ```bash
npm run migrations:generate <migration-name> npm run typeorm:migrations:generate <migration-name>
``` ```
2. Check if the migration file makes sense. 2. Check if the migration file makes sense.
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor. 3. Move the migration file to folder `./server/src/migrations` in your code editor.
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately. The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.

View File

@@ -63,41 +63,22 @@ If you only want to do web development connected to an existing, remote backend,
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
``` ```
If you're using PowerShell on Windows you may need to set the env var separately like so:
```powershell
$env:IMMICH_SERVER_URL = "https://demo.immich.app/"
npm run dev
```
#### `@immich/ui` #### `@immich/ui`
To see local changes to `@immich/ui` in Immich, do the following: To see local changes to `@immich/ui` in Immich, do the following:
1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui` 1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui`
2. Build the `@immich/ui` project via `npm run build` 1. Build the `@immich/ui` project via `npm run build`
3. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`) 1. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
4. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`) 1. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
5. Uncomment the import statement in `web/src/app.css` file `@import '/usr/ui/dist/theme/default.css';` and comment out `@import '@immich/ui/theme/default.css';` 1. Start up the stack via `make dev`
6. Start up the stack via `make dev` 1. After making changes in `@immich/ui`, rebuild it (`npm run build`)
7. After making changes in `@immich/ui`, rebuild it (`npm run build`)
### Mobile app ### Mobile app
#### Setup The mobile app `(/mobile)` will required Flutter toolchain 3.13.x and FVM to be installed on your system.
1. Setup Flutter toolchain using FVM. Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
2. Run `flutter pub get` to install the dependencies.
3. Run `make translation` to generate the translation file.
4. Run `fvm flutter run` to start the app.
#### Translation
To add a new translation text, enter the key-value pair in the `i18n/en.json` in the root of the immich project. Then, from the `mobile/` directory, run
```bash
make translation
```
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above. The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.

View File

@@ -1,11 +0,0 @@
# Chromecast support
Immich supports the Google's Cast protocol so that photos and videos can be cast to devices such as a Chromecast and a Nest Hub. This feature is considered experimental and has several important limitations listed below. Currently, this feature is only supported by the web client, support on Android and iOS is planned for the future.
## Limitations
To use casting with Immich, there are a few prerequisites:
1. Your instance must be accessed via an HTTPS connection in order for the casting menu to show.
2. Your instance must be publicly accessible via HTTPS and a DNS record for the server must be accessible via Google's DNS servers (`8.8.8.8` and `8.8.4.4`)
3. Videos must be in a format that is compatible with Google Cast. For more info, check out [Google's documentation](https://developers.google.com/cast/docs/media)

View File

@@ -42,12 +42,6 @@ docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich
Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key. Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key.
This `docker run` command will directly run the command `immich` inside the container. You can directly append the desired parameters (see under "usage") to the commandline like this:
```bash
docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich-instance/api -e IMMICH_API_KEY=your-api-key ghcr.io/immich-app/immich-cli:latest upload -a -c 5 --recursive directory/
```
## Usage ## Usage
<details> <details>
@@ -118,7 +112,7 @@ You begin by authenticating to your Immich server. For instance:
immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG
``` ```
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/immich/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually. This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
Once you are authenticated, you can upload assets to your Immich server. Once you are authenticated, you can upload assets to your Immich server.

View File

@@ -121,6 +121,6 @@ Once this is done, you can continue to step 3 of "Basic Setup".
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml [hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html [nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
[jellyfin-lp]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#low-power-encoding [jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#known-issues-and-limitations-on-linux [jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases [libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases

View File

@@ -72,7 +72,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
### Nightly job ### Nightly job
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library management page. There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library managment page.
## Usage ## Usage

View File

@@ -42,7 +42,7 @@ You do not need to redo any machine learning jobs after enabling hardware accele
- The GPU must have compute capability 5.2 or greater. - The GPU must have compute capability 5.2 or greater.
- The server must have the official NVIDIA driver installed. - The server must have the official NVIDIA driver installed.
- The installed driver must be >= 545 (it must support CUDA 12.3). - The installed driver must be >= 535 (it must support CUDA 12.2).
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed. - On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
#### ROCm #### ROCm

View File

@@ -5,7 +5,7 @@ import TabItem from '@theme/TabItem';
Immich uses Postgres as its search database for both metadata and contextual CLIP search. Immich uses Postgres as its search database for both metadata and contextual CLIP search.
Contextual CLIP search is powered by the [VectorChord](https://github.com/tensorchord/VectorChord) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata. Contextual CLIP search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
## Advanced Search Filters ## Advanced Search Filters
@@ -92,7 +92,7 @@ Memory and execution time estimates were obtained without acceleration on a 7800
**Execution Time (ms)**: After warming up the model with one pass, the mean execution time of 100 passes with the same input. **Execution Time (ms)**: After warming up the model with one pass, the mean execution time of 100 passes with the same input.
**Memory (MiB)**: The peak RSS usage of the process after performing the above timing benchmark. Does not include image decoding, concurrent processing, the web server, etc., which are relatively constant factors. **Memory (MiB)**: The peak RSS usage of the process afer performing the above timing benchmark. Does not include image decoding, concurrent processing, the web server, etc., which are relatively constant factors.
**Recall (%)**: Evaluated on Crossmodal-3600, the average of the recall@1, recall@5 and recall@10 results for zeroshot image retrieval. Chinese (Simplified), English, French, German, Italian, Japanese, Korean, Polish, Russian, Spanish and Turkish are additionally tested on XTD-10. Chinese (Simplified) and English are additionally tested on Flickr30k. The recall metrics are the average across all tested datasets. **Recall (%)**: Evaluated on Crossmodal-3600, the average of the recall@1, recall@5 and recall@10 results for zeroshot image retrieval. Chinese (Simplified), English, French, German, Italian, Japanese, Korean, Polish, Russian, Spanish and Turkish are additionally tested on XTD-10. Chinese (Simplified) and English are additionally tested on Flickr30k. The recall metrics are the average across all tested datasets.

View File

@@ -14,14 +14,14 @@ online generators you can use.
2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.) 2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.)
3. Save your selections. Reload the map, and enjoy your custom map style! 3. Save your selections. Reload the map, and enjoy your custom map style!
## Use MapTiler to build a custom style ## Use Maptiler to build a custom style
Customizing the map style can be done easily using MapTiler, if you do not want to write an entire JSON document by hand. Customizing the map style can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
1. Create a free account at https://cloud.maptiler.com 1. Create a free account at https://cloud.maptiler.com
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there. 2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer. 3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer.
4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account. 4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account.
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. MapTiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>![MapTiler Publication Settings](img/immich_map_styles_publish.webp) 5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. Maptiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>![Maptiler Publication Settings](img/immich_map_styles_publish.webp)
6. MapTiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay. 6. Maptiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to MapTiler. 7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to Maptiler.

View File

@@ -1,7 +1,7 @@
# Database Queries # Database Queries
:::danger :::danger
Keep in mind that mucking around in the database might set the Moon on fire. Avoid modifying the database directly when possible, and always have current backups. Keep in mind that mucking around in the database might set the moon on fire. Avoid modifying the database directly when possible, and always have current backups.
::: :::
:::tip :::tip

View File

@@ -2,13 +2,53 @@
sidebar_position: 30 sidebar_position: 30
--- ---
import CodeBlock from '@theme/CodeBlock';
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
# Docker Compose [Recommended] # Docker Compose [Recommended]
Docker Compose is the recommended method to run Immich in production. Below are the steps to deploy Immich with Docker Compose. Docker Compose is the recommended method to run Immich in production. Below are the steps to deploy Immich with Docker Compose.
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx'; ## Step 1 - Download the required files
<DockerComposeSteps /> Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
```bash title="Move to the directory you created"
mkdir ./immich-app
cd ./immich-app
```
Download [`docker-compose.yml`][compose-file] and [`example.env`][env-file] by running the following commands:
```bash title="Get docker-compose.yml file"
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
```
```bash title="Get .env file"
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
```
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
## Step 2 - Populate the .env file with custom values
<CodeBlock language="bash" title="Default environmental variable content">
{ExampleEnv}
</CodeBlock>
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
- Set your timezone by uncommenting the `TZ=` line.
- Populate custom database information if necessary.
## Step 3 - Start the containers
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
```bash title="Start the containers"
docker compose up -d
```
:::info Docker version :::info Docker version
If you get an error such as `unknown shorthand flag: 'd' in -d` or `open <location of your .env file>: permission denied`, you are probably running the wrong Docker version. (This happens, for example, with the docker.io package in Ubuntu 22.04.3 LTS.) You can correct the problem by following the complete [Docker Engine install](https://docs.docker.com/engine/install/) procedure for your distribution, crucially the "Uninstall old versions" and "Install using the apt/rpm repository" sections. These replace the distro's Docker packages with Docker's official ones. If you get an error such as `unknown shorthand flag: 'd' in -d` or `open <location of your .env file>: permission denied`, you are probably running the wrong Docker version. (This happens, for example, with the docker.io package in Ubuntu 22.04.3 LTS.) You can correct the problem by following the complete [Docker Engine install](https://docs.docker.com/engine/install/) procedure for your distribution, crucially the "Uninstall old versions" and "Install using the apt/rpm repository" sections. These replace the distro's Docker packages with Docker's official ones.

View File

@@ -73,20 +73,19 @@ Information on the current workers can be found [here](/docs/administration/jobs
## Database ## Database
| Variable | Description | Default | Containers | | Variable | Description | Default | Containers |
| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- | | :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
| `DB_URL` | Database URL | | server | | `DB_URL` | Database URL | | server |
| `DB_HOSTNAME` | Database host | `database` | server | | `DB_HOSTNAME` | Database host | `database` | server |
| `DB_PORT` | Database port | `5432` | server | | `DB_PORT` | Database port | `5432` | server |
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> | | `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> | | `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> | | `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
| `DB_SSL_MODE` | Database SSL mode | | server | | `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server | | `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`. \*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector. \*2: This setting cannot be changed after the server has successfully started up.
:::info :::info

View File

@@ -29,7 +29,7 @@ Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/la
## Step 2 - Populate the .env file with custom values ## Step 2 - Populate the .env file with custom values
Follow [Step 2 in Docker Compose](/docs/install/docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue. Follow [Step 2 in Docker Compose](./docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue.
## Step 3 - Create a new project in Container Manager ## Step 3 - Create a new project in Container Manager

View File

@@ -2,7 +2,7 @@
sidebar_position: 80 sidebar_position: 80
--- ---
# TrueNAS [Community] # TrueNAS SCALE [Community]
:::note :::note
This is a community contribution and not officially supported by the Immich team, but included here for convenience. This is a community contribution and not officially supported by the Immich team, but included here for convenience.
@@ -12,17 +12,17 @@ Community support can be found in the dedicated channel on the [Discord Server](
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/charts/tree/master/community/immich).** **Please report app issues to the corresponding [Github Repository](https://github.com/truenas/charts/tree/master/community/immich).**
::: :::
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application. Immich can easily be installed on TrueNAS SCALE via the **Community** train application.
Consider reviewing the TrueNAS [Apps resources](https://apps.truenas.com/getting-started/) if you have not previously configured applications on your system. Consider reviewing the TrueNAS [Apps tutorial](https://www.truenas.com/docs/scale/scaletutorials/apps/) if you have not previously configured applications on your system.
TrueNAS Community Edition makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries. TrueNAS SCALE makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
## First Steps ## First Steps
The Immich app in TrueNAS Community Edition installs, completes the initial configuration, then starts the Immich web portal. The Immich app in TrueNAS SCALE installs, completes the initial configuration, then starts the Immich web portal.
When updates become available, TrueNAS alerts and provides easy updates. When updates become available, SCALE alerts and provides easy updates.
Before installing the Immich app in TrueNAS, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation. Before installing the Immich app in SCALE, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
You may also configure environment variables at any time after deploying the application. You may also configure environment variables at any time after deploying the application.
### Setting up Storage Datasets ### Setting up Storage Datasets
@@ -126,9 +126,9 @@ className="border rounded-xl"
Accept the default port `30041` in **WebUI Port** or enter a custom port number. Accept the default port `30041` in **WebUI Port** or enter a custom port number.
:::info Allowed Port Numbers :::info Allowed Port Numbers
Only numbers within the range 9000-65535 may be used on TrueNAS versions below TrueNAS Community Edition 24.10 Electric Eel. Only numbers within the range 9000-65535 may be used on SCALE versions below TrueNAS Scale 24.10 Electric Eel.
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/solutions/optimizations/security/#truenas-default-ports). Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/references/defaultports/).
::: :::
### Storage Configuration ### Storage Configuration
@@ -173,7 +173,7 @@ className="border rounded-xl"
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**. You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich. The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
The **Host Path** is the location on the TrueNAS Community Edition server where your external library is located. The **Host Path** is the location on the TrueNAS SCALE server where your external library is located.
<!-- A section for Labels would go here but I don't know what they do. --> <!-- A section for Labels would go here but I don't know what they do. -->
@@ -188,17 +188,17 @@ className="border rounded-xl"
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core). Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
Specify the **Memory** limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB. Accept the default **Memory** limit of `4096` MB or specify the number of MB of RAM. If you're using Machine Learning you should probably set this above 8000 MB.
:::info Older TrueNAS Versions :::info Older SCALE Versions
Before TrueNAS Community Edition version 24.10 Electric Eel: Before TrueNAS SCALE version 24.10 Electric Eel:
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads. The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000` The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
::: :::
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough) Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://www.truenas.com/docs/truenasapps/#gpu-passthrough)
### Install ### Install
@@ -240,7 +240,7 @@ className="border rounded-xl"
/> />
:::info :::info
Some Environment Variables are not available for the TrueNAS Community Edition app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings). Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`. Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
::: :::
@@ -251,7 +251,7 @@ Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`
Make sure to read the general [upgrade instructions](/docs/install/upgrading.md). Make sure to read the general [upgrade instructions](/docs/install/upgrading.md).
::: :::
When updates become available, TrueNAS alerts and provides easy updates. When updates become available, SCALE alerts and provides easy updates.
To update the app to the latest version: To update the app to the latest version:
- Go to the **Installed Applications** screen and select Immich from the list of installed applications. - Go to the **Installed Applications** screen and select Immich from the list of installed applications.

View File

@@ -24,6 +24,9 @@ To clean up disk space, the old version's obsolete container images can be delet
docker image prune docker image prune
``` ```
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
[env-file]: https://github.com/immich-app/immich/releases/latest/download/example.env
[watchtower]: https://containrrr.dev/watchtower/ [watchtower]: https://containrrr.dev/watchtower/
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created [breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
[container-auth]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry
[releases]: https://github.com/immich-app/immich/releases [releases]: https://github.com/immich-app/immich/releases

View File

@@ -1,5 +1,5 @@
--- ---
sidebar_position: 3 sidebar_position: 2
--- ---
# Comparison # Comparison

Binary file not shown.

Before

Width:  |  Height:  |  Size: 233 KiB

View File

@@ -2,13 +2,9 @@
sidebar_position: 1 sidebar_position: 1
--- ---
# Welcome to Immich # Introduction
<img <img src={require('./img/feature-panel.webp').default} alt="Immich - Self-hosted photos and videos backup tool" />
src={require('./img/social-preview-light.webp').default}
alt="Immich - Self-hosted photos and videos backup tool"
data-theme="light"
/>
## Welcome! ## Welcome!

View File

@@ -1,5 +1,5 @@
--- ---
sidebar_position: 2 sidebar_position: 3
--- ---
# Quick start # Quick start
@@ -10,20 +10,11 @@ to install and use it.
## Requirements ## Requirements
- A system with at least 4GB of RAM and 2 CPU cores. Check the [requirements page](/docs/install/requirements) to get started.
- [Docker](https://docs.docker.com/engine/install/)
> For a more detailed list of requirements, see the [requirements page](/docs/install/requirements).
---
## Set up the server ## Set up the server
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx'; Follow the [Docker Compose (Recommended)](/docs/install/docker-compose) instructions to install the server.
<DockerComposeSteps />
---
## Try the web app ## Try the web app
@@ -35,8 +26,6 @@ Try uploading a picture from your browser.
<img src={require('./img/upload-button.webp').default} title="Upload button" /> <img src={require('./img/upload-button.webp').default} title="Upload button" />
---
## Try the mobile app ## Try the mobile app
### Download the Mobile App ### Download the Mobile App
@@ -67,8 +56,6 @@ You can select the **Jobs** tab to see Immich processing your photos.
<img src={require('/docs/guides/img/jobs-tab.webp').default} title="Jobs tab" width={300} /> <img src={require('/docs/guides/img/jobs-tab.webp').default} title="Jobs tab" width={300} />
---
## Review the database backup and restore process ## Review the database backup and restore process
Immich has built-in database backups. You can refer to the Immich has built-in database backups. You can refer to the
@@ -78,8 +65,6 @@ Immich has built-in database backups. You can refer to the
The database only contains metadata and user information. You must setup manual backups of the images and videos stored in `UPLOAD_LOCATION`. The database only contains metadata and user information. You must setup manual backups of the images and videos stored in `UPLOAD_LOCATION`.
::: :::
---
## Where to go from here? ## Where to go from here?
You may decide you'd like to install the server a different way; the Install category on the left menu provides many options. You may decide you'd like to install the server a different way; the Install category on the left menu provides many options.

View File

@@ -1,43 +0,0 @@
import CodeBlock from '@theme/CodeBlock';
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
### Step 1 - Download the required files
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
```bash title="Move to the directory you created"
mkdir ./immich-app
cd ./immich-app
```
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) by running the following commands:
```bash title="Get docker-compose.yml file"
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
```
```bash title="Get .env file"
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
```
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
### Step 2 - Populate the .env file with custom values
<CodeBlock language="bash" title="Default environmental variable content">
{ExampleEnv}
</CodeBlock>
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
- Set your timezone by uncommenting the `TZ=` line.
- Populate custom database information if necessary.
### Step 3 - Start the containers
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
```bash title="Start the containers"
docker compose up -d
```

View File

@@ -95,7 +95,7 @@ const config = {
position: 'right', position: 'right',
}, },
{ {
to: '/docs/overview/welcome', to: '/docs/overview/introduction',
position: 'right', position: 'right',
label: 'Docs', label: 'Docs',
}, },
@@ -124,12 +124,6 @@ const config = {
label: 'Discord', label: 'Discord',
position: 'right', position: 'right',
}, },
{
type: 'html',
position: 'right',
value:
'<a href="https://buy.immich.app" target="_blank" class="no-underline hover:no-underline"><button class="buy-button bg-immich-primary dark:bg-immich-dark-primary text-white dark:text-black rounded-xl">Buy Immich</button></a>',
},
], ],
}, },
footer: { footer: {
@@ -140,7 +134,7 @@ const config = {
items: [ items: [
{ {
label: 'Welcome', label: 'Welcome',
to: '/docs/overview/welcome', to: '/docs/overview/introduction',
}, },
{ {
label: 'Installation', label: 'Installation',

4988
docs/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -57,6 +57,6 @@
"node": ">=20" "node": ">=20"
}, },
"volta": { "volta": {
"node": "22.15.1" "node": "22.14.0"
} }
} }

View File

@@ -40,9 +40,13 @@ const projects: CommunityProjectProps[] = [
}, },
{ {
title: 'Lightroom Immich Plugin: lrc-immich-plugin', title: 'Lightroom Immich Plugin: lrc-immich-plugin',
description: description: 'Another Lightroom plugin to publish or export photos from Lightroom to Immich.',
'Lightroom plugin to publish, export photos from Lightroom to Immich. Import from Immich to Lightroom is also supported.', url: 'https://github.com/bmachek/lrc-immich-plugin',
url: 'https://blog.fokuspunk.de/lrc-immich-plugin/', },
{
title: 'Immich Duplicate Finder',
description: 'Webapp that uses machine learning to identify near-duplicate images.',
url: 'https://github.com/vale46n1/immich_duplicate_finder',
}, },
{ {
title: 'Immich-Tiktok-Remover', title: 'Immich-Tiktok-Remover',

View File

@@ -7,22 +7,14 @@
@tailwind components; @tailwind components;
@tailwind utilities; @tailwind utilities;
@font-face { @import url('https://fonts.googleapis.com/css2?family=Be+Vietnam+Pro:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap');
font-family: 'Overpass';
src: url('/fonts/overpass/Overpass.ttf') format('truetype-variations');
font-weight: 1 999;
font-style: normal;
ascent-override: 106.25%;
size-adjust: 106.25%;
}
@font-face { body {
font-family: 'Overpass Mono'; font-family: 'Be Vietnam Pro', serif;
src: url('/fonts/overpass/OverpassMono.ttf') format('truetype-variations'); font-optical-sizing: auto;
font-weight: 1 999; /* font-size: 1.125rem;
font-style: normal;
ascent-override: 106.25%; ascent-override: 106.25%;
size-adjust: 106.25%; size-adjust: 106.25%; */
} }
.breadcrumbs__link { .breadcrumbs__link {
@@ -37,7 +29,6 @@ img {
/* You can override the default Infima variables here. */ /* You can override the default Infima variables here. */
:root { :root {
font-family: 'Overpass', sans-serif;
--ifm-color-primary: #4250af; --ifm-color-primary: #4250af;
--ifm-color-primary-dark: #4250af; --ifm-color-primary-dark: #4250af;
--ifm-color-primary-darker: #4250af; --ifm-color-primary-darker: #4250af;
@@ -68,12 +59,14 @@ div[class^='announcementBar_'] {
} }
.menu__link { .menu__link {
padding: 10px 10px 10px 16px; padding: 10px;
padding-left: 16px;
border-radius: 24px; border-radius: 24px;
margin-right: 16px; margin-right: 16px;
} }
.menu__list-item-collapsible { .menu__list-item-collapsible {
border-radius: 10px;
margin-right: 16px; margin-right: 16px;
border-radius: 24px; border-radius: 24px;
} }
@@ -90,12 +83,3 @@ div[class*='navbar__items'] > li:has(a[class*='version-switcher-34ab39']) {
code { code {
font-weight: 600; font-weight: 600;
} }
.buy-button {
padding: 8px 14px;
border: 1px solid transparent;
font-family: 'Overpass', sans-serif;
font-weight: 500;
cursor: pointer;
box-shadow: 0 0 5px 2px rgba(181, 206, 254, 0.4);
}

View File

@@ -2,7 +2,6 @@ import {
mdiBug, mdiBug,
mdiCalendarToday, mdiCalendarToday,
mdiCrosshairsOff, mdiCrosshairsOff,
mdiCrop,
mdiDatabase, mdiDatabase,
mdiLeadPencil, mdiLeadPencil,
mdiLockOff, mdiLockOff,
@@ -23,18 +22,6 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date }; type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
const items: Item[] = [ const items: Item[] = [
{
icon: mdiCrop,
iconColor: 'tomato',
title: 'Image dimensions in EXIF metadata are cursed',
description:
'The dimensions in EXIF metadata can be different from the actual dimensions of the image, causing issues with cropping and resizing.',
link: {
url: 'https://github.com/immich-app/immich/pull/17974',
text: '#17974',
},
date: new Date(2025, 5, 5),
},
{ {
icon: mdiMicrosoftWindows, icon: mdiMicrosoftWindows,
iconColor: '#357EC7', iconColor: '#357EC7',

View File

@@ -1,5 +0,0 @@
# Errors
## TypeORM Upgrade
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.

View File

@@ -4,7 +4,6 @@ import Layout from '@theme/Layout';
import { discordPath, discordViewBox } from '@site/src/components/svg-paths'; import { discordPath, discordViewBox } from '@site/src/components/svg-paths';
import ThemedImage from '@theme/ThemedImage'; import ThemedImage from '@theme/ThemedImage';
import Icon from '@mdi/react'; import Icon from '@mdi/react';
function HomepageHeader() { function HomepageHeader() {
return ( return (
<header> <header>
@@ -13,14 +12,11 @@ function HomepageHeader() {
<div className="w-full h-[120vh] absolute left-0 top-0 backdrop-blur-3xl bg-immich-bg/40 dark:bg-transparent"></div> <div className="w-full h-[120vh] absolute left-0 top-0 backdrop-blur-3xl bg-immich-bg/40 dark:bg-transparent"></div>
</div> </div>
<section className="text-center pt-12 sm:pt-24 bg-immich-bg/50 dark:bg-immich-dark-bg/80"> <section className="text-center pt-12 sm:pt-24 bg-immich-bg/50 dark:bg-immich-dark-bg/80">
<a href="https://futo.org" target="_blank" rel="noopener noreferrer">
<ThemedImage <ThemedImage
sources={{ dark: 'img/logomark-dark-with-futo.svg', light: 'img/logomark-light-with-futo.svg' }} sources={{ dark: 'img/logomark-dark.svg', light: 'img/logomark-light.svg' }}
className="h-[125px] w-[125px] mb-2 antialiased rounded-none" className="h-[115px] w-[115px] mb-2 antialiased rounded-none"
alt="Immich logo" alt="Immich logo"
/> />
</a>
<div className="mt-8"> <div className="mt-8">
<p className="text-3xl md:text-5xl sm:leading-tight mb-1 font-extrabold text-black/90 dark:text-white px-4"> <p className="text-3xl md:text-5xl sm:leading-tight mb-1 font-extrabold text-black/90 dark:text-white px-4">
Self-hosted{' '} Self-hosted{' '}
@@ -31,7 +27,7 @@ function HomepageHeader() {
solution<span className="block"></span> solution<span className="block"></span>
</p> </p>
<p className="max-w-1/4 m-auto mt-4 px-4 text-lg text-gray-700 dark:text-gray-100"> <p className="max-w-1/4 m-auto mt-4 px-4">
Easily back up, organize, and manage your photos on your own server. Immich helps you Easily back up, organize, and manage your photos on your own server. Immich helps you
<span className="sm:block"></span> browse, search and organize your photos and videos with ease, without <span className="sm:block"></span> browse, search and organize your photos and videos with ease, without
sacrificing your privacy. sacrificing your privacy.
@@ -39,21 +35,27 @@ function HomepageHeader() {
</div> </div>
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-9 gap-4 "> <div className="flex flex-col sm:flex-row place-items-center place-content-center mt-9 gap-4 ">
<Link <Link
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold" className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold uppercase"
to="docs/overview/quick-start" to="docs/overview/introduction"
> >
Get Started Get started
</Link> </Link>
<Link <Link
className="flex place-items-center place-content-center py-3 px-8 border bg-white/90 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold" className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
to="https://demo.immich.app/" to="https://demo.immich.app/"
> >
Open Demo Demo
</Link>
<Link
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
to="https://immich.store"
>
Buy Merch
</Link> </Link>
</div> </div>
<div className="my-12 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
<div className="my-8 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
<Icon <Icon
path={discordPath} path={discordPath}
viewBox={discordViewBox} /* viewBox may show an error in your IDE but it is normal. */ viewBox={discordViewBox} /* viewBox may show an error in your IDE but it is normal. */
@@ -86,18 +88,11 @@ function HomepageHeader() {
<img className="h-24" alt="Get it on Google Play" src="/img/google-play-badge.png" /> <img className="h-24" alt="Get it on Google Play" src="/img/google-play-badge.png" />
</a> </a>
</div> </div>
<div className="h-24"> <div className="h-24">
<a href="https://apps.apple.com/sg/app/immich/id1613945652"> <a href="https://apps.apple.com/sg/app/immich/id1613945652">
<img className="h-24 sm:p-3.5 p-3" alt="Download on the App Store" src="/img/ios-app-store-badge.svg" /> <img className="h-24 sm:p-3.5 p-3" alt="Download on the App Store" src="/img/ios-app-store-badge.svg" />
</a> </a>
</div> </div>
<div className="h-24">
<a href="https://github.com/immich-app/immich/releases/latest">
<img className="h-24 sm:p-3.5 p-3" alt="Download APK" src="/img/download-apk-github.svg" />
</a>
</div>
</div> </div>
<ThemedImage <ThemedImage
sources={{ dark: '/img/app-qr-code-dark.svg', light: '/img/app-qr-code-light.svg' }} sources={{ dark: '/img/app-qr-code-dark.svg', light: '/img/app-qr-code-light.svg' }}
@@ -116,7 +111,7 @@ export default function Home(): JSX.Element {
<HomepageHeader /> <HomepageHeader />
<div className="flex flex-col place-items-center text-center place-content-center dark:bg-immich-dark-bg py-8"> <div className="flex flex-col place-items-center text-center place-content-center dark:bg-immich-dark-bg py-8">
<p>This project is available under GNU AGPL v3 license.</p> <p>This project is available under GNU AGPL v3 license.</p>
<p className="text-sm">Privacy should not be a luxury</p> <p className="text-xs">Privacy should not be a luxury</p>
</div> </div>
</Layout> </Layout>
); );

View File

@@ -1,4 +1,5 @@
import React from 'react'; import React from 'react';
import Link from '@docusaurus/Link';
import Layout from '@theme/Layout'; import Layout from '@theme/Layout';
function HomepageHeader() { function HomepageHeader() {
return ( return (

View File

@@ -76,18 +76,13 @@ import {
mdiWeb, mdiWeb,
mdiDatabaseOutline, mdiDatabaseOutline,
mdiLinkEdit, mdiLinkEdit,
mdiTagFaces,
mdiMovieOpenPlayOutline, mdiMovieOpenPlayOutline,
mdiCast,
} from '@mdi/js'; } from '@mdi/js';
import Layout from '@theme/Layout'; import Layout from '@theme/Layout';
import React from 'react'; import React from 'react';
import { Item, Timeline } from '../components/timeline'; import { Item, Timeline } from '../components/timeline';
const releases = { const releases = {
'v1.133.0': new Date(2025, 4, 21),
'v1.130.0': new Date(2025, 2, 25),
'v1.127.0': new Date(2025, 1, 26),
'v1.122.0': new Date(2024, 11, 5), 'v1.122.0': new Date(2024, 11, 5),
'v1.120.0': new Date(2024, 10, 6), 'v1.120.0': new Date(2024, 10, 6),
'v1.114.0': new Date(2024, 8, 6), 'v1.114.0': new Date(2024, 8, 6),
@@ -218,6 +213,14 @@ const roadmap: Item[] = [
iconColor: 'indianred', iconColor: 'indianred',
title: 'Stable release', title: 'Stable release',
description: 'Immich goes stable', description: 'Immich goes stable',
getDateLabel: () => 'Planned for early 2025',
},
{
done: false,
icon: mdiLockOutline,
iconColor: 'sandybrown',
title: 'Private/locked photos',
description: 'Private assets with extra protections',
getDateLabel: () => 'Planned for 2025', getDateLabel: () => 'Planned for 2025',
}, },
{ {
@@ -239,27 +242,6 @@ const roadmap: Item[] = [
]; ];
const milestones: Item[] = [ const milestones: Item[] = [
withRelease({
icon: mdiCast,
iconColor: 'aqua',
title: 'Google Cast (web)',
description: 'Cast assets to Google Cast/Chromecast compatible devices',
release: 'v1.133.0',
}),
withRelease({
icon: mdiLockOutline,
iconColor: 'sandybrown',
title: 'Private/locked photos',
description: 'Private assets with extra protections',
release: 'v1.133.0',
}),
withRelease({
icon: mdiFolderMultiple,
iconColor: 'brown',
title: 'Folders view in the mobile app',
description: 'Browse your photos and videos in their folder structure inside the mobile app',
release: 'v1.130.0',
}),
{ {
icon: mdiStar, icon: mdiStar,
iconColor: 'gold', iconColor: 'gold',
@@ -267,14 +249,6 @@ const milestones: Item[] = [
description: 'Reached 60K Stars on GitHub!', description: 'Reached 60K Stars on GitHub!',
getDateLabel: withLanguage(new Date(2025, 2, 4)), getDateLabel: withLanguage(new Date(2025, 2, 4)),
}, },
withRelease({
icon: mdiTagFaces,
iconColor: 'teal',
title: 'Manual face tagging',
description:
'Manually tag or remove faces in photos and videos, even when automatic detection misses or misidentifies them.',
release: 'v1.127.0',
}),
withRelease({ withRelease({
icon: mdiLinkEdit, icon: mdiLinkEdit,
iconColor: 'crimson', iconColor: 'crimson',
@@ -292,8 +266,8 @@ const milestones: Item[] = [
withRelease({ withRelease({
icon: mdiDatabaseOutline, icon: mdiDatabaseOutline,
iconColor: 'brown', iconColor: 'brown',
title: 'Automatic database dumps', title: 'Automatic database backups',
description: 'Database dumps are now integrated into the Immich server', description: 'Database backups are now integrated into the Immich server',
release: 'v1.120.0', release: 'v1.120.0',
}), }),
{ {
@@ -326,7 +300,7 @@ const milestones: Item[] = [
withRelease({ withRelease({
icon: mdiFolderMultiple, icon: mdiFolderMultiple,
iconColor: 'brown', iconColor: 'brown',
title: 'Folders view', title: 'Folders',
description: 'Browse your photos and videos in their folder structure', description: 'Browse your photos and videos in their folder structure',
release: 'v1.113.0', release: 'v1.113.0',
}), }),

View File

@@ -1,5 +0,0 @@
Policy: https://github.com/immich-app/immich/blob/main/SECURITY.md
Contact: mailto:security@immich.app
Preferred-Languages: en
Expires: 2026-05-01T23:59:00.000Z
Canonical: https://immich.app/.well-known/security.txt

View File

@@ -30,4 +30,3 @@
/docs/guides/api-album-sync /docs/community-projects 307 /docs/guides/api-album-sync /docs/community-projects 307
/docs/guides/remove-offline-files /docs/community-projects 307 /docs/guides/remove-offline-files /docs/community-projects 307
/milestones /roadmap 307 /milestones /roadmap 307
/docs/overview/introduction /docs/overview/welcome 307

View File

@@ -1,20 +1,32 @@
[ [
{ {
"label": "v1.133.0", "label": "v1.131.2",
"url": "https://v1.133.0.archive.immich.app" "url": "https://v1.131.2.archive.immich.app"
}, },
{ {
"label": "v1.132.3", "label": "v1.131.1",
"url": "https://v1.132.3.archive.immich.app" "url": "https://v1.131.1.archive.immich.app"
}, },
{ {
"label": "v1.131.3", "label": "v1.131.0",
"url": "https://v1.131.3.archive.immich.app" "url": "https://v1.131.0.archive.immich.app"
}, },
{ {
"label": "v1.130.3", "label": "v1.130.3",
"url": "https://v1.130.3.archive.immich.app" "url": "https://v1.130.3.archive.immich.app"
}, },
{
"label": "v1.130.2",
"url": "https://v1.130.2.archive.immich.app"
},
{
"label": "v1.130.1",
"url": "https://v1.130.1.archive.immich.app"
},
{
"label": "v1.130.0",
"url": "https://v1.130.0.archive.immich.app"
},
{ {
"label": "v1.129.0", "label": "v1.129.0",
"url": "https://v1.129.0.archive.immich.app" "url": "https://v1.129.0.archive.immich.app"
@@ -31,14 +43,46 @@
"label": "v1.126.1", "label": "v1.126.1",
"url": "https://v1.126.1.archive.immich.app" "url": "https://v1.126.1.archive.immich.app"
}, },
{
"label": "v1.126.0",
"url": "https://v1.126.0.archive.immich.app"
},
{ {
"label": "v1.125.7", "label": "v1.125.7",
"url": "https://v1.125.7.archive.immich.app" "url": "https://v1.125.7.archive.immich.app"
}, },
{
"label": "v1.125.6",
"url": "https://v1.125.6.archive.immich.app"
},
{
"label": "v1.125.5",
"url": "https://v1.125.5.archive.immich.app"
},
{
"label": "v1.125.3",
"url": "https://v1.125.3.archive.immich.app"
},
{
"label": "v1.125.2",
"url": "https://v1.125.2.archive.immich.app"
},
{
"label": "v1.125.1",
"url": "https://v1.125.1.archive.immich.app"
},
{ {
"label": "v1.124.2", "label": "v1.124.2",
"url": "https://v1.124.2.archive.immich.app" "url": "https://v1.124.2.archive.immich.app"
}, },
{
"label": "v1.124.1",
"url": "https://v1.124.1.archive.immich.app"
},
{
"label": "v1.124.0",
"url": "https://v1.124.0.archive.immich.app"
},
{ {
"label": "v1.123.0", "label": "v1.123.0",
"url": "https://v1.123.0.archive.immich.app" "url": "https://v1.123.0.archive.immich.app"
@@ -47,6 +91,18 @@
"label": "v1.122.3", "label": "v1.122.3",
"url": "https://v1.122.3.archive.immich.app" "url": "https://v1.122.3.archive.immich.app"
}, },
{
"label": "v1.122.2",
"url": "https://v1.122.2.archive.immich.app"
},
{
"label": "v1.122.1",
"url": "https://v1.122.1.archive.immich.app"
},
{
"label": "v1.122.0",
"url": "https://v1.122.0.archive.immich.app"
},
{ {
"label": "v1.121.0", "label": "v1.121.0",
"url": "https://v1.121.0.archive.immich.app" "url": "https://v1.121.0.archive.immich.app"
@@ -55,14 +111,34 @@
"label": "v1.120.2", "label": "v1.120.2",
"url": "https://v1.120.2.archive.immich.app" "url": "https://v1.120.2.archive.immich.app"
}, },
{
"label": "v1.120.1",
"url": "https://v1.120.1.archive.immich.app"
},
{
"label": "v1.120.0",
"url": "https://v1.120.0.archive.immich.app"
},
{ {
"label": "v1.119.1", "label": "v1.119.1",
"url": "https://v1.119.1.archive.immich.app" "url": "https://v1.119.1.archive.immich.app"
}, },
{
"label": "v1.119.0",
"url": "https://v1.119.0.archive.immich.app"
},
{ {
"label": "v1.118.2", "label": "v1.118.2",
"url": "https://v1.118.2.archive.immich.app" "url": "https://v1.118.2.archive.immich.app"
}, },
{
"label": "v1.118.1",
"url": "https://v1.118.1.archive.immich.app"
},
{
"label": "v1.118.0",
"url": "https://v1.118.0.archive.immich.app"
},
{ {
"label": "v1.117.0", "label": "v1.117.0",
"url": "https://v1.117.0.archive.immich.app" "url": "https://v1.117.0.archive.immich.app"
@@ -71,6 +147,14 @@
"label": "v1.116.2", "label": "v1.116.2",
"url": "https://v1.116.2.archive.immich.app" "url": "https://v1.116.2.archive.immich.app"
}, },
{
"label": "v1.116.1",
"url": "https://v1.116.1.archive.immich.app"
},
{
"label": "v1.116.0",
"url": "https://v1.116.0.archive.immich.app"
},
{ {
"label": "v1.115.0", "label": "v1.115.0",
"url": "https://v1.115.0.archive.immich.app" "url": "https://v1.115.0.archive.immich.app"
@@ -83,10 +167,18 @@
"label": "v1.113.1", "label": "v1.113.1",
"url": "https://v1.113.1.archive.immich.app" "url": "https://v1.113.1.archive.immich.app"
}, },
{
"label": "v1.113.0",
"url": "https://v1.113.0.archive.immich.app"
},
{ {
"label": "v1.112.1", "label": "v1.112.1",
"url": "https://v1.112.1.archive.immich.app" "url": "https://v1.112.1.archive.immich.app"
}, },
{
"label": "v1.112.0",
"url": "https://v1.112.0.archive.immich.app"
},
{ {
"label": "v1.111.0", "label": "v1.111.0",
"url": "https://v1.111.0.archive.immich.app" "url": "https://v1.111.0.archive.immich.app"
@@ -99,6 +191,14 @@
"label": "v1.109.2", "label": "v1.109.2",
"url": "https://v1.109.2.archive.immich.app" "url": "https://v1.109.2.archive.immich.app"
}, },
{
"label": "v1.109.1",
"url": "https://v1.109.1.archive.immich.app"
},
{
"label": "v1.109.0",
"url": "https://v1.109.0.archive.immich.app"
},
{ {
"label": "v1.108.0", "label": "v1.108.0",
"url": "https://v1.108.0.archive.immich.app" "url": "https://v1.108.0.archive.immich.app"
@@ -107,14 +207,38 @@
"label": "v1.107.2", "label": "v1.107.2",
"url": "https://v1.107.2.archive.immich.app" "url": "https://v1.107.2.archive.immich.app"
}, },
{
"label": "v1.107.1",
"url": "https://v1.107.1.archive.immich.app"
},
{
"label": "v1.107.0",
"url": "https://v1.107.0.archive.immich.app"
},
{ {
"label": "v1.106.4", "label": "v1.106.4",
"url": "https://v1.106.4.archive.immich.app" "url": "https://v1.106.4.archive.immich.app"
}, },
{
"label": "v1.106.3",
"url": "https://v1.106.3.archive.immich.app"
},
{
"label": "v1.106.2",
"url": "https://v1.106.2.archive.immich.app"
},
{
"label": "v1.106.1",
"url": "https://v1.106.1.archive.immich.app"
},
{ {
"label": "v1.105.1", "label": "v1.105.1",
"url": "https://v1.105.1.archive.immich.app" "url": "https://v1.105.1.archive.immich.app"
}, },
{
"label": "v1.105.0",
"url": "https://v1.105.0.archive.immich.app"
},
{ {
"label": "v1.104.0", "label": "v1.104.0",
"url": "https://v1.104.0.archive.immich.app" "url": "https://v1.104.0.archive.immich.app"
@@ -123,10 +247,26 @@
"label": "v1.103.1", "label": "v1.103.1",
"url": "https://v1.103.1.archive.immich.app" "url": "https://v1.103.1.archive.immich.app"
}, },
{
"label": "v1.103.0",
"url": "https://v1.103.0.archive.immich.app"
},
{ {
"label": "v1.102.3", "label": "v1.102.3",
"url": "https://v1.102.3.archive.immich.app" "url": "https://v1.102.3.archive.immich.app"
}, },
{
"label": "v1.102.2",
"url": "https://v1.102.2.archive.immich.app"
},
{
"label": "v1.102.1",
"url": "https://v1.102.1.archive.immich.app"
},
{
"label": "v1.102.0",
"url": "https://v1.102.0.archive.immich.app"
},
{ {
"label": "v1.101.0", "label": "v1.101.0",
"url": "https://v1.101.0.archive.immich.app" "url": "https://v1.101.0.archive.immich.app"

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 14 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 75 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 75 KiB

View File

@@ -1 +1 @@
22.15.1 22.14.0

View File

@@ -34,11 +34,11 @@ services:
- 2285:2285 - 2285:2285
redis: redis:
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
database: database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0 image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf command: -c fsync=off -c shared_preload_libraries=vectors.so
environment: environment:
POSTGRES_PASSWORD: postgres POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres POSTGRES_USER: postgres

3376
e2e/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "immich-e2e", "name": "immich-e2e",
"version": "1.133.0", "version": "1.131.2",
"description": "", "description": "",
"main": "index.js", "main": "index.js",
"type": "module", "type": "module",
@@ -25,9 +25,9 @@
"@immich/sdk": "file:../open-api/typescript-sdk", "@immich/sdk": "file:../open-api/typescript-sdk",
"@playwright/test": "^1.44.1", "@playwright/test": "^1.44.1",
"@types/luxon": "^3.4.2", "@types/luxon": "^3.4.2",
"@types/node": "^22.15.18", "@types/node": "^22.13.14",
"@types/oidc-provider": "^8.5.1", "@types/oidc-provider": "^8.5.1",
"@types/pg": "^8.15.1", "@types/pg": "^8.11.0",
"@types/pngjs": "^6.0.4", "@types/pngjs": "^6.0.4",
"@types/supertest": "^6.0.2", "@types/supertest": "^6.0.2",
"@vitest/coverage-v8": "^3.0.0", "@vitest/coverage-v8": "^3.0.0",
@@ -52,6 +52,6 @@
"vitest": "^3.0.0" "vitest": "^3.0.0"
}, },
"volta": { "volta": {
"node": "22.15.1" "node": "22.14.0"
} }
} }

View File

@@ -46,6 +46,38 @@ describe('/activities', () => {
}); });
describe('GET /activities', () => { describe('GET /activities', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/activities');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require an albumId', async () => {
const { status, body } = await request(app)
.get('/activities')
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
});
it('should reject an invalid albumId', async () => {
const { status, body } = await request(app)
.get('/activities')
.query({ albumId: uuidDto.invalid })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
});
it('should reject an invalid assetId', async () => {
const { status, body } = await request(app)
.get('/activities')
.query({ albumId: uuidDto.notFound, assetId: uuidDto.invalid })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['assetId must be a UUID'])));
});
it('should start off empty', async () => { it('should start off empty', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/activities') .get('/activities')
@@ -160,6 +192,30 @@ describe('/activities', () => {
}); });
describe('POST /activities', () => { describe('POST /activities', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/activities');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require an albumId', async () => {
const { status, body } = await request(app)
.post('/activities')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ albumId: uuidDto.invalid });
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
});
it('should require a comment when type is comment', async () => {
const { status, body } = await request(app)
.post('/activities')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ albumId: uuidDto.notFound, type: 'comment', comment: null });
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest(['comment must be a string', 'comment should not be empty']));
});
it('should add a comment to an album', async () => { it('should add a comment to an album', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post('/activities') .post('/activities')
@@ -274,6 +330,20 @@ describe('/activities', () => {
}); });
describe('DELETE /activities/:id', () => { describe('DELETE /activities/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).delete(`/activities/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.delete(`/activities/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should remove a comment from an album', async () => { it('should remove a comment from an album', async () => {
const reaction = await createActivity({ const reaction = await createActivity({
albumId: album.id, albumId: album.id,

View File

@@ -9,7 +9,7 @@ import {
LoginResponseDto, LoginResponseDto,
SharedLinkType, SharedLinkType,
} from '@immich/sdk'; } from '@immich/sdk';
import { createUserDto } from 'src/fixtures'; import { createUserDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses'; import { errorDto } from 'src/responses';
import { app, asBearerAuth, utils } from 'src/utils'; import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest'; import request from 'supertest';
@@ -128,6 +128,28 @@ describe('/albums', () => {
}); });
describe('GET /albums', () => { describe('GET /albums', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/albums');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should reject an invalid shared param', async () => {
const { status, body } = await request(app)
.get('/albums?shared=invalid')
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest(['shared must be a boolean value']));
});
it('should reject an invalid assetId param', async () => {
const { status, body } = await request(app)
.get('/albums?assetId=invalid')
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest(['assetId must be a UUID']));
});
it("should not show other users' favorites", async () => { it("should not show other users' favorites", async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get(`/albums/${user1Albums[0].id}?withoutAssets=false`) .get(`/albums/${user1Albums[0].id}?withoutAssets=false`)
@@ -301,6 +323,12 @@ describe('/albums', () => {
}); });
describe('GET /albums/:id', () => { describe('GET /albums/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/albums/${user1Albums[0].id}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return album info for own album', async () => { it('should return album info for own album', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get(`/albums/${user1Albums[0].id}?withoutAssets=false`) .get(`/albums/${user1Albums[0].id}?withoutAssets=false`)
@@ -393,6 +421,12 @@ describe('/albums', () => {
}); });
describe('GET /albums/statistics', () => { describe('GET /albums/statistics', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/albums/statistics');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return total count of albums the user has access to', async () => { it('should return total count of albums the user has access to', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/albums/statistics') .get('/albums/statistics')
@@ -404,6 +438,12 @@ describe('/albums', () => {
}); });
describe('POST /albums', () => { describe('POST /albums', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/albums').send({ albumName: 'New album' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should create an album', async () => { it('should create an album', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post('/albums') .post('/albums')
@@ -431,6 +471,12 @@ describe('/albums', () => {
}); });
describe('PUT /albums/:id/assets', () => { describe('PUT /albums/:id/assets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/albums/${user1Albums[0].id}/assets`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should be able to add own asset to own album', async () => { it('should be able to add own asset to own album', async () => {
const asset = await utils.createAsset(user1.accessToken); const asset = await utils.createAsset(user1.accessToken);
const { status, body } = await request(app) const { status, body } = await request(app)
@@ -480,6 +526,14 @@ describe('/albums', () => {
}); });
describe('PATCH /albums/:id', () => { describe('PATCH /albums/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.patch(`/albums/${uuidDto.notFound}`)
.send({ albumName: 'New album name' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should update an album', async () => { it('should update an album', async () => {
const album = await utils.createAlbum(user1.accessToken, { const album = await utils.createAlbum(user1.accessToken, {
albumName: 'New album', albumName: 'New album',
@@ -522,6 +576,15 @@ describe('/albums', () => {
}); });
describe('DELETE /albums/:id/assets', () => { describe('DELETE /albums/:id/assets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.delete(`/albums/${user1Albums[0].id}/assets`)
.send({ ids: [user1Asset1.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => { it('should require authorization', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.delete(`/albums/${user1Albums[1].id}/assets`) .delete(`/albums/${user1Albums[1].id}/assets`)
@@ -616,6 +679,13 @@ describe('/albums', () => {
}); });
}); });
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/albums/${user1Albums[0].id}/users`).send({ sharedUserIds: [] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should be able to add user to own album', async () => { it('should be able to add user to own album', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/albums/${album.id}/users`) .put(`/albums/${album.id}/users`)

View File

@@ -1,5 +1,5 @@
import { LoginResponseDto, Permission, createApiKey } from '@immich/sdk'; import { LoginResponseDto, Permission, createApiKey } from '@immich/sdk';
import { createUserDto } from 'src/fixtures'; import { createUserDto, uuidDto } from 'src/fixtures';
import { errorDto } from 'src/responses'; import { errorDto } from 'src/responses';
import { app, asBearerAuth, utils } from 'src/utils'; import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest'; import request from 'supertest';
@@ -24,6 +24,12 @@ describe('/api-keys', () => {
}); });
describe('POST /api-keys', () => { describe('POST /api-keys', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/api-keys').send({ name: 'API Key' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should not work without permission', async () => { it('should not work without permission', async () => {
const { secret } = await create(user.accessToken, [Permission.ApiKeyRead]); const { secret } = await create(user.accessToken, [Permission.ApiKeyRead]);
const { status, body } = await request(app).post('/api-keys').set('x-api-key', secret).send({ name: 'API Key' }); const { status, body } = await request(app).post('/api-keys').set('x-api-key', secret).send({ name: 'API Key' });
@@ -93,6 +99,12 @@ describe('/api-keys', () => {
}); });
describe('GET /api-keys', () => { describe('GET /api-keys', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/api-keys');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should start off empty', async () => { it('should start off empty', async () => {
const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`); const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`);
expect(body).toEqual([]); expect(body).toEqual([]);
@@ -113,6 +125,12 @@ describe('/api-keys', () => {
}); });
describe('GET /api-keys/:id', () => { describe('GET /api-keys/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/api-keys/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => { it('should require authorization', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]); const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app) const { status, body } = await request(app)
@@ -122,6 +140,14 @@ describe('/api-keys', () => {
expect(body).toEqual(errorDto.badRequest('API Key not found')); expect(body).toEqual(errorDto.badRequest('API Key not found'));
}); });
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.get(`/api-keys/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should get api key details', async () => { it('should get api key details', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]); const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app) const { status, body } = await request(app)
@@ -139,6 +165,12 @@ describe('/api-keys', () => {
}); });
describe('PUT /api-keys/:id', () => { describe('PUT /api-keys/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/api-keys/${uuidDto.notFound}`).send({ name: 'new name' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => { it('should require authorization', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]); const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app) const { status, body } = await request(app)
@@ -149,6 +181,15 @@ describe('/api-keys', () => {
expect(body).toEqual(errorDto.badRequest('API Key not found')); expect(body).toEqual(errorDto.badRequest('API Key not found'));
}); });
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.put(`/api-keys/${uuidDto.invalid}`)
.send({ name: 'new name' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should update api key details', async () => { it('should update api key details', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]); const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app) const { status, body } = await request(app)
@@ -167,6 +208,12 @@ describe('/api-keys', () => {
}); });
describe('DELETE /api-keys/:id', () => { describe('DELETE /api-keys/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).delete(`/api-keys/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require authorization', async () => { it('should require authorization', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]); const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status, body } = await request(app) const { status, body } = await request(app)
@@ -176,6 +223,14 @@ describe('/api-keys', () => {
expect(body).toEqual(errorDto.badRequest('API Key not found')); expect(body).toEqual(errorDto.badRequest('API Key not found'));
}); });
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.delete(`/api-keys/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should delete an api key', async () => { it('should delete an api key', async () => {
const { apiKey } = await create(user.accessToken, [Permission.All]); const { apiKey } = await create(user.accessToken, [Permission.All]);
const { status } = await request(app) const { status } = await request(app)

View File

@@ -3,7 +3,6 @@ import {
AssetMediaStatus, AssetMediaStatus,
AssetResponseDto, AssetResponseDto,
AssetTypeEnum, AssetTypeEnum,
AssetVisibility,
getAssetInfo, getAssetInfo,
getMyUser, getMyUser,
LoginResponseDto, LoginResponseDto,
@@ -23,9 +22,27 @@ import { app, asBearerAuth, tempDir, TEN_TIMES, testAssetDir, utils } from 'src/
import request from 'supertest'; import request from 'supertest';
import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { afterAll, beforeAll, describe, expect, it } from 'vitest';
const makeUploadDto = (options?: { omit: string }): Record<string, any> => {
const dto: Record<string, any> = {
deviceAssetId: 'example-image',
deviceId: 'TEST',
fileCreatedAt: new Date().toISOString(),
fileModifiedAt: new Date().toISOString(),
isFavorite: 'testing',
duration: '0:00:00.000000',
};
const omit = options?.omit;
if (omit) {
delete dto[omit];
}
return dto;
};
const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-springs.jpg`; const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-springs.jpg`;
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`; const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
const facesAssetDir = `${testAssetDir}/metadata/faces`; const facesAssetFilepath = `${testAssetDir}/metadata/faces/portrait.jpg`;
const readTags = async (bytes: Buffer, filename: string) => { const readTags = async (bytes: Buffer, filename: string) => {
const filepath = join(tempDir, filename); const filepath = join(tempDir, filename);
@@ -120,9 +137,9 @@ describe('/asset', () => {
// stats // stats
utils.createAsset(statsUser.accessToken), utils.createAsset(statsUser.accessToken),
utils.createAsset(statsUser.accessToken, { isFavorite: true }), utils.createAsset(statsUser.accessToken, { isFavorite: true }),
utils.createAsset(statsUser.accessToken, { visibility: AssetVisibility.Archive }), utils.createAsset(statsUser.accessToken, { isArchived: true }),
utils.createAsset(statsUser.accessToken, { utils.createAsset(statsUser.accessToken, {
visibility: AssetVisibility.Archive, isArchived: true,
isFavorite: true, isFavorite: true,
assetData: { filename: 'example.mp4' }, assetData: { filename: 'example.mp4' },
}), }),
@@ -143,6 +160,13 @@ describe('/asset', () => {
}); });
describe('GET /assets/:id/original', () => { describe('GET /assets/:id/original', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}/original`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should download the file', async () => { it('should download the file', async () => {
const response = await request(app) const response = await request(app)
.get(`/assets/${user1Assets[0].id}/original`) .get(`/assets/${user1Assets[0].id}/original`)
@@ -154,6 +178,20 @@ describe('/asset', () => {
}); });
describe('GET /assets/:id', () => { describe('GET /assets/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}`);
expect(body).toEqual(errorDto.unauthorized);
expect(status).toBe(401);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.get(`/assets/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should require access', async () => { it('should require access', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get(`/assets/${user2Assets[0].id}`) .get(`/assets/${user2Assets[0].id}`)
@@ -186,22 +224,31 @@ describe('/asset', () => {
}); });
}); });
describe('faces', () => { it('should get the asset faces', async () => {
const metadataFaceTests = [ const config = await utils.getSystemConfig(admin.accessToken);
{ config.metadata.faces.import = true;
description: 'without orientation', await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
// asset faces
const facesAsset = await utils.createAsset(admin.accessToken, {
assetData: {
filename: 'portrait.jpg', filename: 'portrait.jpg',
bytes: await readFile(facesAssetFilepath),
}, },
{ });
description: 'adjusting face regions to orientation',
filename: 'portrait-orientation-6.jpg', await utils.waitForWebsocketEvent({ event: 'assetUpload', id: facesAsset.id });
},
]; const { status, body } = await request(app)
// should produce same resulting face region coordinates for any orientation .get(`/assets/${facesAsset.id}`)
const expectedFaces = [ .set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body.id).toEqual(facesAsset.id);
expect(body.people).toMatchObject([
{ {
name: 'Marie Curie', name: 'Marie Curie',
birthDate: null, birthDate: null,
thumbnailPath: '',
isHidden: false, isHidden: false,
faces: [ faces: [
{ {
@@ -218,6 +265,7 @@ describe('/asset', () => {
{ {
name: 'Pierre Curie', name: 'Pierre Curie',
birthDate: null, birthDate: null,
thumbnailPath: '',
isHidden: false, isHidden: false,
faces: [ faces: [
{ {
@@ -231,30 +279,7 @@ describe('/asset', () => {
}, },
], ],
}, },
]; ]);
it.each(metadataFaceTests)('should get the asset faces from $filename $description', async ({ filename }) => {
const config = await utils.getSystemConfig(admin.accessToken);
config.metadata.faces.import = true;
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
const facesAsset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: await readFile(`${facesAssetDir}/${filename}`),
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: facesAsset.id });
const { status, body } = await request(app)
.get(`/assets/${facesAsset.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body.id).toEqual(facesAsset.id);
expect(body.people).toMatchObject(expectedFaces);
});
}); });
it('should work with a shared link', async () => { it('should work with a shared link', async () => {
@@ -308,7 +333,7 @@ describe('/asset', () => {
}); });
it('disallows viewing archived assets', async () => { it('disallows viewing archived assets', async () => {
const asset = await utils.createAsset(user1.accessToken, { visibility: AssetVisibility.Archive }); const asset = await utils.createAsset(user1.accessToken, { isArchived: true });
const { status } = await request(app) const { status } = await request(app)
.get(`/assets/${asset.id}`) .get(`/assets/${asset.id}`)
@@ -329,6 +354,13 @@ describe('/asset', () => {
}); });
describe('GET /assets/statistics', () => { describe('GET /assets/statistics', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/assets/statistics');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return stats of all assets', async () => { it('should return stats of all assets', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/assets/statistics') .get('/assets/statistics')
@@ -352,7 +384,7 @@ describe('/asset', () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/assets/statistics') .get('/assets/statistics')
.set('Authorization', `Bearer ${statsUser.accessToken}`) .set('Authorization', `Bearer ${statsUser.accessToken}`)
.query({ visibility: AssetVisibility.Archive }); .query({ isArchived: true });
expect(status).toBe(200); expect(status).toBe(200);
expect(body).toEqual({ images: 1, videos: 1, total: 2 }); expect(body).toEqual({ images: 1, videos: 1, total: 2 });
@@ -362,7 +394,7 @@ describe('/asset', () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/assets/statistics') .get('/assets/statistics')
.set('Authorization', `Bearer ${statsUser.accessToken}`) .set('Authorization', `Bearer ${statsUser.accessToken}`)
.query({ isFavorite: true, visibility: AssetVisibility.Archive }); .query({ isFavorite: true, isArchived: true });
expect(status).toBe(200); expect(status).toBe(200);
expect(body).toEqual({ images: 0, videos: 1, total: 1 }); expect(body).toEqual({ images: 0, videos: 1, total: 1 });
@@ -372,7 +404,7 @@ describe('/asset', () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/assets/statistics') .get('/assets/statistics')
.set('Authorization', `Bearer ${statsUser.accessToken}`) .set('Authorization', `Bearer ${statsUser.accessToken}`)
.query({ isFavorite: false, visibility: AssetVisibility.Timeline }); .query({ isFavorite: false, isArchived: false });
expect(status).toBe(200); expect(status).toBe(200);
expect(body).toEqual({ images: 1, videos: 0, total: 1 }); expect(body).toEqual({ images: 1, videos: 0, total: 1 });
@@ -393,6 +425,13 @@ describe('/asset', () => {
await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration'); await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration');
}); });
it('should require authentication', async () => {
const { status, body } = await request(app).get('/assets/random');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it.each(TEN_TIMES)('should return 1 random assets', async () => { it.each(TEN_TIMES)('should return 1 random assets', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/assets/random') .get('/assets/random')
@@ -428,9 +467,31 @@ describe('/asset', () => {
expect(status).toBe(200); expect(status).toBe(200);
expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]); expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]);
}); });
it('should return error', async () => {
const { status } = await request(app)
.get('/assets/random?count=ABC')
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(400);
});
}); });
describe('PUT /assets/:id', () => { describe('PUT /assets/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/assets/:${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid id', async () => {
const { status, body } = await request(app)
.put(`/assets/${uuidDto.invalid}`)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
});
it('should require access', async () => { it('should require access', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/assets/${user2Assets[0].id}`) .put(`/assets/${user2Assets[0].id}`)
@@ -458,7 +519,7 @@ describe('/asset', () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`) .put(`/assets/${user1Assets[0].id}`)
.set('Authorization', `Bearer ${user1.accessToken}`) .set('Authorization', `Bearer ${user1.accessToken}`)
.send({ visibility: AssetVisibility.Archive }); .send({ isArchived: true });
expect(body).toMatchObject({ id: user1Assets[0].id, isArchived: true }); expect(body).toMatchObject({ id: user1Assets[0].id, isArchived: true });
expect(status).toEqual(200); expect(status).toEqual(200);
}); });
@@ -558,6 +619,28 @@ describe('/asset', () => {
expect(status).toEqual(200); expect(status).toEqual(200);
}); });
it('should reject invalid gps coordinates', async () => {
for (const test of [
{ latitude: 12 },
{ longitude: 12 },
{ latitude: 12, longitude: 'abc' },
{ latitude: 'abc', longitude: 12 },
{ latitude: null, longitude: 12 },
{ latitude: 12, longitude: null },
{ latitude: 91, longitude: 12 },
{ latitude: -91, longitude: 12 },
{ latitude: 12, longitude: -181 },
{ latitude: 12, longitude: 181 },
]) {
const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
.send(test)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
}
});
it('should update gps data', async () => { it('should update gps data', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`) .put(`/assets/${user1Assets[0].id}`)
@@ -629,6 +712,17 @@ describe('/asset', () => {
expect(status).toEqual(200); expect(status).toEqual(200);
}); });
it('should reject invalid rating', async () => {
for (const test of [{ rating: 7 }, { rating: 3.5 }, { rating: null }]) {
const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`)
.send(test)
.set('Authorization', `Bearer ${user1.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
}
});
it('should return tagged people', async () => { it('should return tagged people', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/assets/${user1Assets[0].id}`) .put(`/assets/${user1Assets[0].id}`)
@@ -652,6 +746,25 @@ describe('/asset', () => {
}); });
describe('DELETE /assets', () => { describe('DELETE /assets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.delete(`/assets`)
.send({ ids: [uuidDto.notFound] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require a valid uuid', async () => {
const { status, body } = await request(app)
.delete(`/assets`)
.send({ ids: [uuidDto.invalid] })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
});
it('should throw an error when the id is not found', async () => { it('should throw an error when the id is not found', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.delete(`/assets`) .delete(`/assets`)
@@ -764,6 +877,13 @@ describe('/asset', () => {
}); });
describe('GET /assets/:id/thumbnail', () => { describe('GET /assets/:id/thumbnail', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/thumbnail`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should not include gps data for webp thumbnails', async () => { it('should not include gps data for webp thumbnails', async () => {
await utils.waitForWebsocketEvent({ await utils.waitForWebsocketEvent({
event: 'assetUpload', event: 'assetUpload',
@@ -799,6 +919,13 @@ describe('/asset', () => {
}); });
describe('GET /assets/:id/original', () => { describe('GET /assets/:id/original', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/original`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should download the original', async () => { it('should download the original', async () => {
const { status, body, type } = await request(app) const { status, body, type } = await request(app)
.get(`/assets/${locationAsset.id}/original`) .get(`/assets/${locationAsset.id}/original`)
@@ -819,9 +946,43 @@ describe('/asset', () => {
}); });
}); });
describe('PUT /assets', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put('/assets');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
});
describe('POST /assets', () => { describe('POST /assets', () => {
beforeAll(setupTests, 30_000); beforeAll(setupTests, 30_000);
it('should require authentication', async () => {
const { status, body } = await request(app).post(`/assets`);
expect(body).toEqual(errorDto.unauthorized);
expect(status).toBe(401);
});
it.each([
{ should: 'require `deviceAssetId`', dto: { ...makeUploadDto({ omit: 'deviceAssetId' }) } },
{ should: 'require `deviceId`', dto: { ...makeUploadDto({ omit: 'deviceId' }) } },
{ should: 'require `fileCreatedAt`', dto: { ...makeUploadDto({ omit: 'fileCreatedAt' }) } },
{ should: 'require `fileModifiedAt`', dto: { ...makeUploadDto({ omit: 'fileModifiedAt' }) } },
{ should: 'require `duration`', dto: { ...makeUploadDto({ omit: 'duration' }) } },
{ should: 'throw if `isFavorite` is not a boolean', dto: { ...makeUploadDto(), isFavorite: 'not-a-boolean' } },
{ should: 'throw if `isVisible` is not a boolean', dto: { ...makeUploadDto(), isVisible: 'not-a-boolean' } },
{ should: 'throw if `isArchived` is not a boolean', dto: { ...makeUploadDto(), isArchived: 'not-a-boolean' } },
])('should $should', async ({ dto }) => {
const { status, body } = await request(app)
.post('/assets')
.set('Authorization', `Bearer ${user1.accessToken}`)
.attach('assetData', makeRandomImage(), 'example.png')
.field(dto);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
});
const tests = [ const tests = [
{ {
input: 'formats/avif/8bit-sRGB.avif', input: 'formats/avif/8bit-sRGB.avif',
@@ -980,7 +1141,7 @@ describe('/asset', () => {
fNumber: 8, fNumber: 8,
focalLength: 97, focalLength: 97,
iso: 100, iso: 100,
lensModel: 'Sony E PZ 18-105mm F4 G OSS', lensModel: 'E PZ 18-105mm F4 G OSS',
fileSizeInByte: 25_001_984, fileSizeInByte: 25_001_984,
dateTimeOriginal: '2016-09-27T10:51:44+00:00', dateTimeOriginal: '2016-09-27T10:51:44+00:00',
orientation: '1', orientation: '1',
@@ -1002,7 +1163,7 @@ describe('/asset', () => {
fNumber: 22, fNumber: 22,
focalLength: 25, focalLength: 25,
iso: 100, iso: 100,
lensModel: 'Zeiss Batis 25mm F2', lensModel: 'E 25mm F2',
fileSizeInByte: 49_512_448, fileSizeInByte: 49_512_448,
dateTimeOriginal: '2016-01-08T14:08:01+00:00', dateTimeOriginal: '2016-01-08T14:08:01+00:00',
orientation: '1', orientation: '1',
@@ -1073,7 +1234,7 @@ describe('/asset', () => {
focalLength: 18.3, focalLength: 18.3,
iso: 100, iso: 100,
latitude: 36.613_24, latitude: 36.613_24,
lensModel: '18.3mm F2.8', lensModel: 'GR LENS 18.3mm F2.8',
longitude: -121.897_85, longitude: -121.897_85,
make: 'RICOH IMAGING COMPANY, LTD.', make: 'RICOH IMAGING COMPANY, LTD.',
model: 'RICOH GR III', model: 'RICOH GR III',
@@ -1083,21 +1244,31 @@ describe('/asset', () => {
}, },
]; ];
it.each(tests)(`should upload and generate a thumbnail for different file types`, async ({ input, expected }) => { it(`should upload and generate a thumbnail for different file types`, async () => {
// upload in parallel
const assets = await Promise.all(
tests.map(async ({ input }) => {
const filepath = join(testAssetDir, input); const filepath = join(testAssetDir, input);
const response = await utils.createAsset(admin.accessToken, { return utils.createAsset(admin.accessToken, {
assetData: { bytes: await readFile(filepath), filename: basename(filepath) }, assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
}); });
}),
);
expect(response.status).toBe(AssetMediaStatus.Created); for (const { id, status } of assets) {
const id = response.id; expect(status).toBe(AssetMediaStatus.Created);
// longer timeout as the thumbnail generation from full-size raw files can take a while // longer timeout as the thumbnail generation from full-size raw files can take a while
await utils.waitForWebsocketEvent({ event: 'assetUpload', id }); await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
}
for (const [i, { id }] of assets.entries()) {
const { expected } = tests[i];
const asset = await utils.getAssetInfo(admin.accessToken, id); const asset = await utils.getAssetInfo(admin.accessToken, id);
expect(asset.exifInfo).toBeDefined(); expect(asset.exifInfo).toBeDefined();
expect(asset.exifInfo).toMatchObject(expected.exifInfo); expect(asset.exifInfo).toMatchObject(expected.exifInfo);
expect(asset).toMatchObject(expected); expect(asset).toMatchObject(expected);
}
}); });
it('should handle a duplicate', async () => { it('should handle a duplicate', async () => {

View File

@@ -0,0 +1,43 @@
import { deleteAssets, getAuditFiles, updateAsset, type LoginResponseDto } from '@immich/sdk';
import { asBearerAuth, utils } from 'src/utils';
import { beforeAll, describe, expect, it } from 'vitest';
describe('/audits', () => {
let admin: LoginResponseDto;
beforeAll(async () => {
await utils.resetDatabase();
await utils.resetFilesystem();
admin = await utils.adminSetup();
});
// TODO: Enable these tests again once #7436 is resolved as these were flaky
describe.skip('GET :/file-report', () => {
it('excludes assets without issues from report', async () => {
const [trashedAsset, archivedAsset] = await Promise.all([
utils.createAsset(admin.accessToken),
utils.createAsset(admin.accessToken),
utils.createAsset(admin.accessToken),
]);
await Promise.all([
deleteAssets({ assetBulkDeleteDto: { ids: [trashedAsset.id] } }, { headers: asBearerAuth(admin.accessToken) }),
updateAsset(
{
id: archivedAsset.id,
updateAssetDto: { isArchived: true },
},
{ headers: asBearerAuth(admin.accessToken) },
),
]);
const body = await getAuditFiles({
headers: asBearerAuth(admin.accessToken),
});
expect(body.orphans).toHaveLength(0);
expect(body.extras).toHaveLength(0);
});
});
});

View File

@@ -5,7 +5,7 @@ import { app, utils } from 'src/utils';
import request from 'supertest'; import request from 'supertest';
import { beforeEach, describe, expect, it } from 'vitest'; import { beforeEach, describe, expect, it } from 'vitest';
const { email, password } = signupDto.admin; const { name, email, password } = signupDto.admin;
describe(`/auth/admin-sign-up`, () => { describe(`/auth/admin-sign-up`, () => {
beforeEach(async () => { beforeEach(async () => {
@@ -13,12 +13,58 @@ describe(`/auth/admin-sign-up`, () => {
}); });
describe('POST /auth/admin-sign-up', () => { describe('POST /auth/admin-sign-up', () => {
const invalid = [
{
should: 'require an email address',
data: { name, password },
},
{
should: 'require a password',
data: { name, email },
},
{
should: 'require a name',
data: { email, password },
},
{
should: 'require a valid email',
data: { name, email: 'immich', password },
},
];
for (const { should, data } of invalid) {
it(`should ${should}`, async () => {
const { status, body } = await request(app).post('/auth/admin-sign-up').send(data);
expect(status).toEqual(400);
expect(body).toEqual(errorDto.badRequest());
});
}
it(`should sign up the admin`, async () => { it(`should sign up the admin`, async () => {
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin); const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toEqual(signupResponseDto.admin); expect(body).toEqual(signupResponseDto.admin);
}); });
it('should sign up the admin with a local domain', async () => {
const { status, body } = await request(app)
.post('/auth/admin-sign-up')
.send({ ...signupDto.admin, email: 'admin@local' });
expect(status).toEqual(201);
expect(body).toEqual({
...signupResponseDto.admin,
email: 'admin@local',
});
});
it('should transform email to lower case', async () => {
const { status, body } = await request(app)
.post('/auth/admin-sign-up')
.send({ ...signupDto.admin, email: 'aDmIn@IMMICH.cloud' });
expect(status).toEqual(201);
expect(body).toEqual(signupResponseDto.admin);
});
it('should not allow a second admin to sign up', async () => { it('should not allow a second admin to sign up', async () => {
await signUpAdmin({ signUpDto: signupDto.admin }); await signUpAdmin({ signUpDto: signupDto.admin });
@@ -46,6 +92,22 @@ describe('/auth/*', () => {
expect(body).toEqual(errorDto.incorrectLogin); expect(body).toEqual(errorDto.incorrectLogin);
}); });
for (const key of Object.keys(loginDto.admin)) {
it(`should not allow null ${key}`, async () => {
const { status, body } = await request(app)
.post('/auth/login')
.send({ ...loginDto.admin, [key]: null });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
});
it('should reject an invalid email', async () => {
const { status, body } = await request(app).post('/auth/login').send({ email: [], password });
expect(status).toBe(400);
expect(body).toEqual(errorDto.invalidEmail);
});
}
it('should accept a correct password', async () => { it('should accept a correct password', async () => {
const { status, body, headers } = await request(app).post('/auth/login').send({ email, password }); const { status, body, headers } = await request(app).post('/auth/login').send({ email, password });
expect(status).toBe(201); expect(status).toBe(201);
@@ -100,6 +162,14 @@ describe('/auth/*', () => {
}); });
describe('POST /auth/change-password', () => { describe('POST /auth/change-password', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.post(`/auth/change-password`)
.send({ password, newPassword: 'Password1234' });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should require the current password', async () => { it('should require the current password', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post(`/auth/change-password`) .post(`/auth/change-password`)

View File

@@ -1,5 +1,6 @@
import { AssetMediaResponseDto, LoginResponseDto } from '@immich/sdk'; import { AssetMediaResponseDto, LoginResponseDto } from '@immich/sdk';
import { readFile, writeFile } from 'node:fs/promises'; import { readFile, writeFile } from 'node:fs/promises';
import { errorDto } from 'src/responses';
import { app, tempDir, utils } from 'src/utils'; import { app, tempDir, utils } from 'src/utils';
import request from 'supertest'; import request from 'supertest';
import { beforeAll, describe, expect, it } from 'vitest'; import { beforeAll, describe, expect, it } from 'vitest';
@@ -16,6 +17,15 @@ describe('/download', () => {
}); });
describe('POST /download/info', () => { describe('POST /download/info', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.post(`/download/info`)
.send({ assetIds: [asset1.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should download info', async () => { it('should download info', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post('/download/info') .post('/download/info')
@@ -32,6 +42,15 @@ describe('/download', () => {
}); });
describe('POST /download/archive', () => { describe('POST /download/archive', () => {
it('should require authentication', async () => {
const { status, body } = await request(app)
.post(`/download/archive`)
.send({ assetIds: [asset1.id, asset2.id] });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should download an archive', async () => { it('should download an archive', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post('/download/archive') .post('/download/archive')

View File

@@ -1,4 +1,4 @@
import { AssetVisibility, LoginResponseDto } from '@immich/sdk'; import { LoginResponseDto } from '@immich/sdk';
import { readFile } from 'node:fs/promises'; import { readFile } from 'node:fs/promises';
import { basename, join } from 'node:path'; import { basename, join } from 'node:path';
import { Socket } from 'socket.io-client'; import { Socket } from 'socket.io-client';
@@ -44,7 +44,7 @@ describe('/map', () => {
it('should get map markers for all non-archived assets', async () => { it('should get map markers for all non-archived assets', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/map/markers') .get('/map/markers')
.query({ visibility: AssetVisibility.Timeline }) .query({ isArchived: false })
.set('Authorization', `Bearer ${admin.accessToken}`); .set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200); expect(status).toBe(200);

View File

@@ -6,7 +6,6 @@ import {
startOAuth, startOAuth,
updateConfig, updateConfig,
} from '@immich/sdk'; } from '@immich/sdk';
import { createHash, randomBytes } from 'node:crypto';
import { errorDto } from 'src/responses'; import { errorDto } from 'src/responses';
import { OAuthClient, OAuthUser } from 'src/setup/auth-server'; import { OAuthClient, OAuthUser } from 'src/setup/auth-server';
import { app, asBearerAuth, baseUrl, utils } from 'src/utils'; import { app, asBearerAuth, baseUrl, utils } from 'src/utils';
@@ -22,30 +21,18 @@ const mobileOverrideRedirectUri = 'https://photos.immich.app/oauth/mobile-redire
const redirect = async (url: string, cookies?: string[]) => { const redirect = async (url: string, cookies?: string[]) => {
const { headers } = await request(url) const { headers } = await request(url)
.get('') .get('/')
.set('Cookie', cookies || []); .set('Cookie', cookies || []);
return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location }; return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location };
}; };
// Function to generate a code challenge from the verifier
const generateCodeChallenge = async (codeVerifier: string): Promise<string> => {
const hashed = createHash('sha256').update(codeVerifier).digest();
return hashed.toString('base64url');
};
const loginWithOAuth = async (sub: OAuthUser | string, redirectUri?: string) => { const loginWithOAuth = async (sub: OAuthUser | string, redirectUri?: string) => {
const state = randomBytes(16).toString('base64url'); const { url } = await startOAuth({ oAuthConfigDto: { redirectUri: redirectUri ?? `${baseUrl}/auth/login` } });
const codeVerifier = randomBytes(64).toString('base64url');
const codeChallenge = await generateCodeChallenge(codeVerifier);
const { url } = await startOAuth({
oAuthConfigDto: { redirectUri: redirectUri ?? `${baseUrl}/auth/login`, state, codeChallenge },
});
// login // login
const response1 = await redirect(url.replace(authServer.internal, authServer.external)); const response1 = await redirect(url.replace(authServer.internal, authServer.external));
const response2 = await request(authServer.external + response1.location) const response2 = await request(authServer.external + response1.location)
.post('') .post('/')
.set('Cookie', response1.cookies) .set('Cookie', response1.cookies)
.type('form') .type('form')
.send({ prompt: 'login', login: sub, password: 'password' }); .send({ prompt: 'login', login: sub, password: 'password' });
@@ -53,7 +40,7 @@ const loginWithOAuth = async (sub: OAuthUser | string, redirectUri?: string) =>
// approve // approve
const response3 = await redirect(response2.header.location, response1.cookies); const response3 = await redirect(response2.header.location, response1.cookies);
const response4 = await request(authServer.external + response3.location) const response4 = await request(authServer.external + response3.location)
.post('') .post('/')
.type('form') .type('form')
.set('Cookie', response3.cookies) .set('Cookie', response3.cookies)
.send({ prompt: 'consent' }); .send({ prompt: 'consent' });
@@ -64,9 +51,9 @@ const loginWithOAuth = async (sub: OAuthUser | string, redirectUri?: string) =>
expect(redirectUrl).toBeDefined(); expect(redirectUrl).toBeDefined();
const params = new URL(redirectUrl).searchParams; const params = new URL(redirectUrl).searchParams;
expect(params.get('code')).toBeDefined(); expect(params.get('code')).toBeDefined();
expect(params.get('state')).toBe(state); expect(params.get('state')).toBeDefined();
return { url: redirectUrl, state, codeVerifier }; return redirectUrl;
}; };
const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => { const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => {
@@ -132,42 +119,9 @@ describe(`/oauth`, () => {
expect(body).toEqual(errorDto.badRequest(['url should not be empty'])); expect(body).toEqual(errorDto.badRequest(['url should not be empty']));
}); });
it(`should throw an error if the state is not provided`, async () => {
const { url } = await loginWithOAuth('oauth-auto-register');
const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('OAuth state is missing'));
});
it(`should throw an error if the state mismatches`, async () => {
const callbackParams = await loginWithOAuth('oauth-auto-register');
const { state } = await loginWithOAuth('oauth-auto-register');
const { status } = await request(app)
.post('/oauth/callback')
.send({ ...callbackParams, state });
expect(status).toBeGreaterThanOrEqual(400);
});
it(`should throw an error if the codeVerifier is not provided`, async () => {
const { url, state } = await loginWithOAuth('oauth-auto-register');
const { status, body } = await request(app).post('/oauth/callback').send({ url, state });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('OAuth code verifier is missing'));
});
it(`should throw an error if the codeVerifier doesn't match the challenge`, async () => {
const callbackParams = await loginWithOAuth('oauth-auto-register');
const { codeVerifier } = await loginWithOAuth('oauth-auto-register');
const { status, body } = await request(app)
.post('/oauth/callback')
.send({ ...callbackParams, codeVerifier });
console.log(body);
expect(status).toBeGreaterThanOrEqual(400);
});
it('should auto register the user by default', async () => { it('should auto register the user by default', async () => {
const callbackParams = await loginWithOAuth('oauth-auto-register'); const url = await loginWithOAuth('oauth-auto-register');
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toMatchObject({ expect(body).toMatchObject({
accessToken: expect.any(String), accessToken: expect.any(String),
@@ -178,30 +132,16 @@ describe(`/oauth`, () => {
}); });
}); });
it('should allow passing state and codeVerifier via cookies', async () => {
const { url, state, codeVerifier } = await loginWithOAuth('oauth-auto-register');
const { status, body } = await request(app)
.post('/oauth/callback')
.set('Cookie', [`immich_oauth_state=${state}`, `immich_oauth_code_verifier=${codeVerifier}`])
.send({ url });
expect(status).toBe(201);
expect(body).toMatchObject({
accessToken: expect.any(String),
userId: expect.any(String),
userEmail: 'oauth-auto-register@immich.app',
});
});
it('should handle a user without an email', async () => { it('should handle a user without an email', async () => {
const callbackParams = await loginWithOAuth(OAuthUser.NO_EMAIL); const url = await loginWithOAuth(OAuthUser.NO_EMAIL);
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(400); expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address')); expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address'));
}); });
it('should set the quota from a claim', async () => { it('should set the quota from a claim', async () => {
const callbackParams = await loginWithOAuth(OAuthUser.WITH_QUOTA); const url = await loginWithOAuth(OAuthUser.WITH_QUOTA);
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toMatchObject({ expect(body).toMatchObject({
accessToken: expect.any(String), accessToken: expect.any(String),
@@ -214,8 +154,8 @@ describe(`/oauth`, () => {
}); });
it('should set the storage label from a claim', async () => { it('should set the storage label from a claim', async () => {
const callbackParams = await loginWithOAuth(OAuthUser.WITH_USERNAME); const url = await loginWithOAuth(OAuthUser.WITH_USERNAME);
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toMatchObject({ expect(body).toMatchObject({
accessToken: expect.any(String), accessToken: expect.any(String),
@@ -236,8 +176,8 @@ describe(`/oauth`, () => {
buttonText: 'Login with Immich', buttonText: 'Login with Immich',
signingAlgorithm: 'RS256', signingAlgorithm: 'RS256',
}); });
const callbackParams = await loginWithOAuth('oauth-RS256-token'); const url = await loginWithOAuth('oauth-RS256-token');
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toMatchObject({ expect(body).toMatchObject({
accessToken: expect.any(String), accessToken: expect.any(String),
@@ -256,8 +196,8 @@ describe(`/oauth`, () => {
buttonText: 'Login with Immich', buttonText: 'Login with Immich',
profileSigningAlgorithm: 'RS256', profileSigningAlgorithm: 'RS256',
}); });
const callbackParams = await loginWithOAuth('oauth-signed-profile'); const url = await loginWithOAuth('oauth-signed-profile');
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toMatchObject({ expect(body).toMatchObject({
userId: expect.any(String), userId: expect.any(String),
@@ -273,8 +213,8 @@ describe(`/oauth`, () => {
buttonText: 'Login with Immich', buttonText: 'Login with Immich',
signingAlgorithm: 'something-that-does-not-work', signingAlgorithm: 'something-that-does-not-work',
}); });
const callbackParams = await loginWithOAuth('oauth-signed-bad'); const url = await loginWithOAuth('oauth-signed-bad');
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(500); expect(status).toBe(500);
expect(body).toMatchObject({ expect(body).toMatchObject({
error: 'Internal Server Error', error: 'Internal Server Error',
@@ -295,8 +235,8 @@ describe(`/oauth`, () => {
}); });
it('should not auto register the user', async () => { it('should not auto register the user', async () => {
const callbackParams = await loginWithOAuth('oauth-no-auto-register'); const url = await loginWithOAuth('oauth-no-auto-register');
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(400); expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.')); expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.'));
}); });
@@ -307,8 +247,8 @@ describe(`/oauth`, () => {
email: 'oauth-user3@immich.app', email: 'oauth-user3@immich.app',
password: 'password', password: 'password',
}); });
const callbackParams = await loginWithOAuth('oauth-user3'); const url = await loginWithOAuth('oauth-user3');
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams); const { status, body } = await request(app).post('/oauth/callback').send({ url });
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toMatchObject({ expect(body).toMatchObject({
userId, userId,
@@ -346,15 +286,13 @@ describe(`/oauth`, () => {
}); });
it('should auto register the user by default', async () => { it('should auto register the user by default', async () => {
const callbackParams = await loginWithOAuth('oauth-mobile-override', 'app.immich:///oauth-callback'); const url = await loginWithOAuth('oauth-mobile-override', 'app.immich:///oauth-callback');
expect(callbackParams.url).toEqual(expect.stringContaining(mobileOverrideRedirectUri)); expect(url).toEqual(expect.stringContaining(mobileOverrideRedirectUri));
// simulate redirecting back to mobile app // simulate redirecting back to mobile app
const url = callbackParams.url.replace(mobileOverrideRedirectUri, 'app.immich:///oauth-callback'); const redirectUri = url.replace(mobileOverrideRedirectUri, 'app.immich:///oauth-callback');
const { status, body } = await request(app) const { status, body } = await request(app).post('/oauth/callback').send({ url: redirectUri });
.post('/oauth/callback')
.send({ ...callbackParams, url });
expect(status).toBe(201); expect(status).toBe(201);
expect(body).toMatchObject({ expect(body).toMatchObject({
accessToken: expect.any(String), accessToken: expect.any(String),

View File

@@ -5,6 +5,22 @@ import { app, asBearerAuth, utils } from 'src/utils';
import request from 'supertest'; import request from 'supertest';
import { beforeAll, beforeEach, describe, expect, it } from 'vitest'; import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
const invalidBirthday = [
{
birthDate: 'false',
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
},
{
birthDate: '123567',
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
},
{
birthDate: 123_567,
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
},
{ birthDate: '9999-01-01', response: ['Birth date cannot be in the future'] },
];
describe('/people', () => { describe('/people', () => {
let admin: LoginResponseDto; let admin: LoginResponseDto;
let visiblePerson: PersonResponseDto; let visiblePerson: PersonResponseDto;
@@ -42,6 +58,14 @@ describe('/people', () => {
describe('GET /people', () => { describe('GET /people', () => {
beforeEach(async () => {}); beforeEach(async () => {});
it('should require authentication', async () => {
const { status, body } = await request(app).get('/people');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return all people (including hidden)', async () => { it('should return all people (including hidden)', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/people') .get('/people')
@@ -93,6 +117,13 @@ describe('/people', () => {
}); });
describe('GET /people/:id', () => { describe('GET /people/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/people/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should throw error if person with id does not exist', async () => { it('should throw error if person with id does not exist', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get(`/people/${uuidDto.notFound}`) .get(`/people/${uuidDto.notFound}`)
@@ -113,6 +144,13 @@ describe('/people', () => {
}); });
describe('GET /people/:id/statistics', () => { describe('GET /people/:id/statistics', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/people/${multipleAssetsPerson.id}/statistics`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should throw error if person with id does not exist', async () => { it('should throw error if person with id does not exist', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get(`/people/${uuidDto.notFound}/statistics`) .get(`/people/${uuidDto.notFound}/statistics`)
@@ -133,6 +171,23 @@ describe('/people', () => {
}); });
describe('POST /people', () => { describe('POST /people', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post(`/people`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
for (const { birthDate, response } of invalidBirthday) {
it(`should not accept an invalid birth date [${birthDate}]`, async () => {
const { status, body } = await request(app)
.post(`/people`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ birthDate });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(response));
});
}
it('should create a person', async () => { it('should create a person', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post(`/people`) .post(`/people`)
@@ -168,6 +223,39 @@ describe('/people', () => {
}); });
describe('PUT /people/:id', () => { describe('PUT /people/:id', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/people/${uuidDto.notFound}`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
for (const { key, type } of [
{ key: 'name', type: 'string' },
{ key: 'featureFaceAssetId', type: 'string' },
{ key: 'isHidden', type: 'boolean value' },
{ key: 'isFavorite', type: 'boolean value' },
]) {
it(`should not allow null ${key}`, async () => {
const { status, body } = await request(app)
.put(`/people/${visiblePerson.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ [key]: null });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest([`${key} must be a ${type}`]));
});
}
for (const { birthDate, response } of invalidBirthday) {
it(`should not accept an invalid birth date [${birthDate}]`, async () => {
const { status, body } = await request(app)
.put(`/people/${visiblePerson.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ birthDate });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(response));
});
}
it('should update a date of birth', async () => { it('should update a date of birth', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/people/${visiblePerson.id}`) .put(`/people/${visiblePerson.id}`)
@@ -224,6 +312,12 @@ describe('/people', () => {
}); });
describe('POST /people/:id/merge', () => { describe('POST /people/:id/merge', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post(`/people/${uuidDto.notFound}/merge`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should not supporting merging a person into themselves', async () => { it('should not supporting merging a person into themselves', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post(`/people/${visiblePerson.id}/merge`) .post(`/people/${visiblePerson.id}/merge`)

View File

@@ -1,15 +1,9 @@
import { import { AssetMediaResponseDto, AssetResponseDto, deleteAssets, LoginResponseDto, updateAsset } from '@immich/sdk';
AssetMediaResponseDto,
AssetResponseDto,
AssetVisibility,
deleteAssets,
LoginResponseDto,
updateAsset,
} from '@immich/sdk';
import { DateTime } from 'luxon'; import { DateTime } from 'luxon';
import { readFile } from 'node:fs/promises'; import { readFile } from 'node:fs/promises';
import { join } from 'node:path'; import { join } from 'node:path';
import { Socket } from 'socket.io-client'; import { Socket } from 'socket.io-client';
import { errorDto } from 'src/responses';
import { app, asBearerAuth, TEN_TIMES, testAssetDir, utils } from 'src/utils'; import { app, asBearerAuth, TEN_TIMES, testAssetDir, utils } from 'src/utils';
import request from 'supertest'; import request from 'supertest';
import { afterAll, beforeAll, describe, expect, it } from 'vitest'; import { afterAll, beforeAll, describe, expect, it } from 'vitest';
@@ -56,7 +50,7 @@ describe('/search', () => {
{ filename: '/formats/motionphoto/samsung-one-ui-6.heic' }, { filename: '/formats/motionphoto/samsung-one-ui-6.heic' },
{ filename: '/formats/motionphoto/samsung-one-ui-5.jpg' }, { filename: '/formats/motionphoto/samsung-one-ui-5.jpg' },
{ filename: '/metadata/gps-position/thompson-springs.jpg', dto: { visibility: AssetVisibility.Archive } }, { filename: '/metadata/gps-position/thompson-springs.jpg', dto: { isArchived: true } },
// used for search suggestions // used for search suggestions
{ filename: '/formats/png/density_plot.png' }, { filename: '/formats/png/density_plot.png' },
@@ -147,6 +141,65 @@ describe('/search', () => {
}); });
describe('POST /search/metadata', () => { describe('POST /search/metadata', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/search/metadata');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
const badTests = [
{
should: 'should reject page as a string',
dto: { page: 'abc' },
expected: ['page must not be less than 1', 'page must be an integer number'],
},
{
should: 'should reject page as a decimal',
dto: { page: 1.5 },
expected: ['page must be an integer number'],
},
{
should: 'should reject page as a negative number',
dto: { page: -10 },
expected: ['page must not be less than 1'],
},
{
should: 'should reject page as 0',
dto: { page: 0 },
expected: ['page must not be less than 1'],
},
{
should: 'should reject size as a string',
dto: { size: 'abc' },
expected: [
'size must not be greater than 1000',
'size must not be less than 1',
'size must be an integer number',
],
},
{
should: 'should reject an invalid size',
dto: { size: -1.5 },
expected: ['size must not be less than 1', 'size must be an integer number'],
},
...['isArchived', 'isFavorite', 'isEncoded', 'isOffline', 'isMotion', 'isVisible'].map((value) => ({
should: `should reject ${value} not a boolean`,
dto: { [value]: 'immich' },
expected: [`${value} must be a boolean value`],
})),
];
for (const { should, dto, expected } of badTests) {
it(should, async () => {
const { status, body } = await request(app)
.post('/search/metadata')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send(dto);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest(expected));
});
}
const searchTests = [ const searchTests = [
{ {
should: 'should get my assets', should: 'should get my assets',
@@ -178,12 +231,12 @@ describe('/search', () => {
deferred: () => ({ dto: { size: 1, isFavorite: false }, assets: [assetLast] }), deferred: () => ({ dto: { size: 1, isFavorite: false }, assets: [assetLast] }),
}, },
{ {
should: 'should search by visibility (AssetVisibility.Archive)', should: 'should search by isArchived (true)',
deferred: () => ({ dto: { visibility: AssetVisibility.Archive }, assets: [assetSprings] }), deferred: () => ({ dto: { isArchived: true }, assets: [assetSprings] }),
}, },
{ {
should: 'should search by visibility (AssetVisibility.Timeline)', should: 'should search by isArchived (false)',
deferred: () => ({ dto: { size: 1, visibility: AssetVisibility.Timeline }, assets: [assetLast] }), deferred: () => ({ dto: { size: 1, isArchived: false }, assets: [assetLast] }),
}, },
{ {
should: 'should search by type (image)', should: 'should search by type (image)',
@@ -192,7 +245,7 @@ describe('/search', () => {
{ {
should: 'should search by type (video)', should: 'should search by type (video)',
deferred: () => ({ deferred: () => ({
dto: { type: 'VIDEO', visibility: AssetVisibility.Hidden }, dto: { type: 'VIDEO' },
assets: [ assets: [
// the three live motion photos // the three live motion photos
{ id: expect.any(String) }, { id: expect.any(String) },
@@ -236,6 +289,13 @@ describe('/search', () => {
should: 'should search by takenAfter (no results)', should: 'should search by takenAfter (no results)',
deferred: () => ({ dto: { takenAfter: today.plus({ hour: 1 }).toJSDate() }, assets: [] }), deferred: () => ({ dto: { takenAfter: today.plus({ hour: 1 }).toJSDate() }, assets: [] }),
}, },
// {
// should: 'should search by originalPath',
// deferred: () => ({
// dto: { originalPath: asset1.originalPath },
// assets: [asset1],
// }),
// },
{ {
should: 'should search by originalFilename', should: 'should search by originalFilename',
deferred: () => ({ deferred: () => ({
@@ -265,7 +325,7 @@ describe('/search', () => {
deferred: () => ({ deferred: () => ({
dto: { dto: {
city: '', city: '',
visibility: AssetVisibility.Timeline, isVisible: true,
includeNull: true, includeNull: true,
}, },
assets: [assetLast], assets: [assetLast],
@@ -276,7 +336,7 @@ describe('/search', () => {
deferred: () => ({ deferred: () => ({
dto: { dto: {
city: null, city: null,
visibility: AssetVisibility.Timeline, isVisible: true,
includeNull: true, includeNull: true,
}, },
assets: [assetLast], assets: [assetLast],
@@ -297,7 +357,7 @@ describe('/search', () => {
deferred: () => ({ deferred: () => ({
dto: { dto: {
state: '', state: '',
visibility: AssetVisibility.Timeline, isVisible: true,
withExif: true, withExif: true,
includeNull: true, includeNull: true,
}, },
@@ -309,7 +369,7 @@ describe('/search', () => {
deferred: () => ({ deferred: () => ({
dto: { dto: {
state: null, state: null,
visibility: AssetVisibility.Timeline, isVisible: true,
includeNull: true, includeNull: true,
}, },
assets: [assetLast, assetNotocactus], assets: [assetLast, assetNotocactus],
@@ -330,7 +390,7 @@ describe('/search', () => {
deferred: () => ({ deferred: () => ({
dto: { dto: {
country: '', country: '',
visibility: AssetVisibility.Timeline, isVisible: true,
includeNull: true, includeNull: true,
}, },
assets: [assetLast], assets: [assetLast],
@@ -341,7 +401,7 @@ describe('/search', () => {
deferred: () => ({ deferred: () => ({
dto: { dto: {
country: null, country: null,
visibility: AssetVisibility.Timeline, isVisible: true,
includeNull: true, includeNull: true,
}, },
assets: [assetLast], assets: [assetLast],
@@ -394,6 +454,14 @@ describe('/search', () => {
} }
}); });
describe('POST /search/smart', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post('/search/smart');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
});
describe('POST /search/random', () => { describe('POST /search/random', () => {
beforeAll(async () => { beforeAll(async () => {
await Promise.all([ await Promise.all([
@@ -408,6 +476,13 @@ describe('/search', () => {
await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration'); await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration');
}); });
it('should require authentication', async () => {
const { status, body } = await request(app).post('/search/random').send({ size: 1 });
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it.each(TEN_TIMES)('should return 1 random assets', async () => { it.each(TEN_TIMES)('should return 1 random assets', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.post('/search/random') .post('/search/random')
@@ -437,6 +512,12 @@ describe('/search', () => {
}); });
describe('GET /search/explore', () => { describe('GET /search/explore', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/search/explore');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should get explore data', async () => { it('should get explore data', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/search/explore') .get('/search/explore')
@@ -447,6 +528,12 @@ describe('/search', () => {
}); });
describe('GET /search/places', () => { describe('GET /search/places', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/search/places');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should get relevant places', async () => { it('should get relevant places', async () => {
const name = 'Paris'; const name = 'Paris';
@@ -465,6 +552,12 @@ describe('/search', () => {
}); });
describe('GET /search/cities', () => { describe('GET /search/cities', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/search/cities');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should get all cities', async () => { it('should get all cities', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/search/cities') .get('/search/cities')
@@ -483,6 +576,12 @@ describe('/search', () => {
}); });
describe('GET /search/suggestions', () => { describe('GET /search/suggestions', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/search/suggestions');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should get suggestions for country (including null)', async () => { it('should get suggestions for country (including null)', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/search/suggestions?type=country&includeNull=true') .get('/search/suggestions?type=country&includeNull=true')

View File

@@ -246,7 +246,15 @@ describe('/shared-links', () => {
const { status, body } = await request(app).get('/shared-links/me').query({ key: linkWithMetadata.key }); const { status, body } = await request(app).get('/shared-links/me').query({ key: linkWithMetadata.key });
expect(status).toBe(200); expect(status).toBe(200);
expect(body.assets).toHaveLength(0); expect(body.assets).toHaveLength(1);
expect(body.assets[0]).toEqual(
expect.objectContaining({
originalFileName: 'example.png',
localDateTime: expect.any(String),
fileCreatedAt: expect.any(String),
exifInfo: expect.any(Object),
}),
);
expect(body.album).toBeDefined(); expect(body.album).toBeDefined();
}); });

View File

@@ -1,4 +1,4 @@
import { AssetMediaResponseDto, AssetVisibility, LoginResponseDto, SharedLinkType } from '@immich/sdk'; import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
import { DateTime } from 'luxon'; import { DateTime } from 'luxon';
import { createUserDto } from 'src/fixtures'; import { createUserDto } from 'src/fixtures';
import { errorDto } from 'src/responses'; import { errorDto } from 'src/responses';
@@ -52,7 +52,7 @@ describe('/timeline', () => {
describe('GET /timeline/buckets', () => { describe('GET /timeline/buckets', () => {
it('should require authentication', async () => { it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/buckets'); const { status, body } = await request(app).get('/timeline/buckets').query({ size: TimeBucketSize.Month });
expect(status).toBe(401); expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized); expect(body).toEqual(errorDto.unauthorized);
}); });
@@ -60,7 +60,8 @@ describe('/timeline', () => {
it('should get time buckets by month', async () => { it('should get time buckets by month', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/timeline/buckets') .get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`); .set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Month });
expect(status).toBe(200); expect(status).toBe(200);
expect(body).toEqual( expect(body).toEqual(
@@ -77,17 +78,33 @@ describe('/timeline', () => {
assetIds: userAssets.map(({ id }) => id), assetIds: userAssets.map(({ id }) => id),
}); });
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key }); const { status, body } = await request(app)
.get('/timeline/buckets')
.query({ key: sharedLink.key, size: TimeBucketSize.Month });
expect(status).toBe(400); expect(status).toBe(400);
expect(body).toEqual(errorDto.noPermission); expect(body).toEqual(errorDto.noPermission);
}); });
it('should get time buckets by day', async () => {
const { status, body } = await request(app)
.get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ size: TimeBucketSize.Day });
expect(status).toBe(200);
expect(body).toEqual([
{ count: 2, timeBucket: '1970-02-11T00:00:00.000Z' },
{ count: 1, timeBucket: '1970-02-10T00:00:00.000Z' },
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
]);
});
it('should return error if time bucket is requested with partners asset and archived', async () => { it('should return error if time bucket is requested with partners asset and archived', async () => {
const req1 = await request(app) const req1 = await request(app)
.get('/timeline/buckets') .get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`) .set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, visibility: AssetVisibility.Archive }); .query({ size: TimeBucketSize.Month, withPartners: true, isArchived: true });
expect(req1.status).toBe(400); expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest()); expect(req1.body).toEqual(errorDto.badRequest());
@@ -95,7 +112,7 @@ describe('/timeline', () => {
const req2 = await request(app) const req2 = await request(app)
.get('/timeline/buckets') .get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.query({ withPartners: true, visibility: undefined }); .query({ size: TimeBucketSize.Month, withPartners: true, isArchived: undefined });
expect(req2.status).toBe(400); expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest()); expect(req2.body).toEqual(errorDto.badRequest());
@@ -105,7 +122,7 @@ describe('/timeline', () => {
const req1 = await request(app) const req1 = await request(app)
.get('/timeline/buckets') .get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`) .set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, isFavorite: true }); .query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: true });
expect(req1.status).toBe(400); expect(req1.status).toBe(400);
expect(req1.body).toEqual(errorDto.badRequest()); expect(req1.body).toEqual(errorDto.badRequest());
@@ -113,7 +130,7 @@ describe('/timeline', () => {
const req2 = await request(app) const req2 = await request(app)
.get('/timeline/buckets') .get('/timeline/buckets')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`) .set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ withPartners: true, isFavorite: false }); .query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: false });
expect(req2.status).toBe(400); expect(req2.status).toBe(400);
expect(req2.body).toEqual(errorDto.badRequest()); expect(req2.body).toEqual(errorDto.badRequest());
@@ -123,7 +140,7 @@ describe('/timeline', () => {
const req = await request(app) const req = await request(app)
.get('/timeline/buckets') .get('/timeline/buckets')
.set('Authorization', `Bearer ${user.accessToken}`) .set('Authorization', `Bearer ${user.accessToken}`)
.query({ withPartners: true, isTrashed: true }); .query({ size: TimeBucketSize.Month, withPartners: true, isTrashed: true });
expect(req.status).toBe(400); expect(req.status).toBe(400);
expect(req.body).toEqual(errorDto.badRequest()); expect(req.body).toEqual(errorDto.badRequest());
@@ -133,6 +150,7 @@ describe('/timeline', () => {
describe('GET /timeline/bucket', () => { describe('GET /timeline/bucket', () => {
it('should require authentication', async () => { it('should require authentication', async () => {
const { status, body } = await request(app).get('/timeline/bucket').query({ const { status, body } = await request(app).get('/timeline/bucket').query({
size: TimeBucketSize.Month,
timeBucket: '1900-01-01', timeBucket: '1900-01-01',
}); });
@@ -143,27 +161,11 @@ describe('/timeline', () => {
it('should handle 5 digit years', async () => { it('should handle 5 digit years', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/timeline/bucket') .get('/timeline/bucket')
.query({ timeBucket: '012345-01-01' }) .query({ size: TimeBucketSize.Month, timeBucket: '012345-01-01' })
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`); .set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
expect(status).toBe(200); expect(status).toBe(200);
expect(body).toEqual({ expect(body).toEqual([]);
city: [],
country: [],
duration: [],
id: [],
visibility: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
localDateTime: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
}); });
// TODO enable date string validation while still accepting 5 digit years // TODO enable date string validation while still accepting 5 digit years
@@ -171,7 +173,7 @@ describe('/timeline', () => {
// const { status, body } = await request(app) // const { status, body } = await request(app)
// .get('/timeline/bucket') // .get('/timeline/bucket')
// .set('Authorization', `Bearer ${user.accessToken}`) // .set('Authorization', `Bearer ${user.accessToken}`)
// .query({ timeBucket: 'foo' }); // .query({ size: TimeBucketSize.Month, timeBucket: 'foo' });
// expect(status).toBe(400); // expect(status).toBe(400);
// expect(body).toEqual(errorDto.badRequest); // expect(body).toEqual(errorDto.badRequest);
@@ -181,26 +183,10 @@ describe('/timeline', () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get('/timeline/bucket') .get('/timeline/bucket')
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`) .set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
.query({ timeBucket: '1970-02-10' }); .query({ size: TimeBucketSize.Month, timeBucket: '1970-02-10' });
expect(status).toBe(200); expect(status).toBe(200);
expect(body).toEqual({ expect(body).toEqual([]);
city: [],
country: [],
duration: [],
id: [],
visibility: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
localDateTime: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
}); });
}); });
}); });

View File

@@ -215,19 +215,6 @@ describe('/admin/users', () => {
const user = await getMyUser({ headers: asBearerAuth(token.accessToken) }); const user = await getMyUser({ headers: asBearerAuth(token.accessToken) });
expect(user).toMatchObject({ email: nonAdmin.userEmail }); expect(user).toMatchObject({ email: nonAdmin.userEmail });
}); });
it('should update the avatar color', async () => {
const { status, body } = await request(app)
.put(`/admin/users/${admin.userId}`)
.send({ avatarColor: 'orange' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatarColor: 'orange' });
const after = await getUserAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatarColor: 'orange' });
});
}); });
describe('PUT /admin/users/:id/preferences', () => { describe('PUT /admin/users/:id/preferences', () => {
@@ -253,6 +240,19 @@ describe('/admin/users', () => {
expect(after).toMatchObject({ memories: { enabled: false } }); expect(after).toMatchObject({ memories: { enabled: false } });
}); });
it('should update the avatar color', async () => {
const { status, body } = await request(app)
.put(`/admin/users/${admin.userId}/preferences`)
.send({ avatar: { color: 'orange' } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatar: { color: 'orange' } });
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatar: { color: 'orange' } });
});
it('should update download archive size', async () => { it('should update download archive size', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/admin/users/${admin.userId}/preferences`) .put(`/admin/users/${admin.userId}/preferences`)

View File

@@ -31,7 +31,33 @@ describe('/users', () => {
); );
}); });
describe('GET /users', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/users');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should get users', async () => {
const { status, body } = await request(app).get('/users').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toEqual(200);
expect(body).toHaveLength(2);
expect(body).toEqual(
expect.arrayContaining([
expect.objectContaining({ email: 'admin@immich.cloud' }),
expect.objectContaining({ email: 'user2@immich.cloud' }),
]),
);
});
});
describe('GET /users/me', () => { describe('GET /users/me', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get(`/users/me`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should not work for shared links', async () => { it('should not work for shared links', async () => {
const album = await utils.createAlbum(admin.accessToken, { albumName: 'Album' }); const album = await utils.createAlbum(admin.accessToken, { albumName: 'Album' });
const sharedLink = await utils.createSharedLink(admin.accessToken, { const sharedLink = await utils.createSharedLink(admin.accessToken, {
@@ -73,6 +99,24 @@ describe('/users', () => {
}); });
describe('PUT /users/me', () => { describe('PUT /users/me', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).put(`/users/me`);
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
for (const key of ['email', 'name']) {
it(`should not allow null ${key}`, async () => {
const dto = { [key]: null };
const { status, body } = await request(app)
.put(`/users/me`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send(dto);
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest());
});
}
it('should update first and last name', async () => { it('should update first and last name', async () => {
const before = await getMyUser({ headers: asBearerAuth(admin.accessToken) }); const before = await getMyUser({ headers: asBearerAuth(admin.accessToken) });
@@ -139,19 +183,6 @@ describe('/users', () => {
profileChangedAt: expect.anything(), profileChangedAt: expect.anything(),
}); });
}); });
it('should update avatar color', async () => {
const { status, body } = await request(app)
.put(`/users/me`)
.send({ avatarColor: 'blue' })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatarColor: 'blue' });
const after = await getMyUser({ headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatarColor: 'blue' });
});
}); });
describe('PUT /users/me/preferences', () => { describe('PUT /users/me/preferences', () => {
@@ -171,6 +202,19 @@ describe('/users', () => {
expect(after).toMatchObject({ memories: { enabled: false } }); expect(after).toMatchObject({ memories: { enabled: false } });
}); });
it('should update avatar color', async () => {
const { status, body } = await request(app)
.put(`/users/me/preferences`)
.send({ avatar: { color: 'blue' } })
.set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
expect(body).toMatchObject({ avatar: { color: 'blue' } });
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
expect(after).toMatchObject({ avatar: { color: 'blue' } });
});
it('should require an integer for download archive size', async () => { it('should require an integer for download archive size', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/users/me/preferences`) .put(`/users/me/preferences`)
@@ -225,6 +269,11 @@ describe('/users', () => {
}); });
describe('GET /users/:id', () => { describe('GET /users/:id', () => {
it('should require authentication', async () => {
const { status } = await request(app).get(`/users/${admin.userId}`);
expect(status).toEqual(401);
});
it('should get the user', async () => { it('should get the user', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.get(`/users/${admin.userId}`) .get(`/users/${admin.userId}`)
@@ -243,6 +292,12 @@ describe('/users', () => {
}); });
describe('GET /server/license', () => { describe('GET /server/license', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).get('/users/me/license');
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should return the user license', async () => { it('should return the user license', async () => {
await request(app) await request(app)
.put('/users/me/license') .put('/users/me/license')
@@ -260,6 +315,11 @@ describe('/users', () => {
}); });
describe('PUT /users/me/license', () => { describe('PUT /users/me/license', () => {
it('should require authentication', async () => {
const { status } = await request(app).put(`/users/me/license`);
expect(status).toEqual(401);
});
it('should set the user license', async () => { it('should set the user license', async () => {
const { status, body } = await request(app) const { status, body } = await request(app)
.put(`/users/me/license`) .put(`/users/me/license`)

View File

@@ -3,7 +3,6 @@ import {
AssetMediaCreateDto, AssetMediaCreateDto,
AssetMediaResponseDto, AssetMediaResponseDto,
AssetResponseDto, AssetResponseDto,
AssetVisibility,
CheckExistingAssetsDto, CheckExistingAssetsDto,
CreateAlbumDto, CreateAlbumDto,
CreateLibraryDto, CreateLibraryDto,
@@ -430,10 +429,7 @@ export const utils = {
}, },
archiveAssets: (accessToken: string, ids: string[]) => archiveAssets: (accessToken: string, ids: string[]) =>
updateAssets( updateAssets({ assetBulkUpdateDto: { ids, isArchived: true } }, { headers: asBearerAuth(accessToken) }),
{ assetBulkUpdateDto: { ids, visibility: AssetVisibility.Archive } },
{ headers: asBearerAuth(accessToken) },
),
deleteAssets: (accessToken: string, ids: string[]) => deleteAssets: (accessToken: string, ids: string[]) =>
deleteAssets({ assetBulkDeleteDto: { ids } }, { headers: asBearerAuth(accessToken) }), deleteAssets({ assetBulkDeleteDto: { ids } }, { headers: asBearerAuth(accessToken) }),

View File

@@ -25,7 +25,7 @@ test.describe('Registration', () => {
// login // login
await expect(page).toHaveTitle(/Login/); await expect(page).toHaveTitle(/Login/);
await page.goto('/auth/login?autoLaunch=0'); await page.goto('/auth/login');
await page.getByLabel('Email').fill('admin@immich.app'); await page.getByLabel('Email').fill('admin@immich.app');
await page.getByLabel('Password').fill('password'); await page.getByLabel('Password').fill('password');
await page.getByRole('button', { name: 'Login' }).click(); await page.getByRole('button', { name: 'Login' }).click();
@@ -59,7 +59,7 @@ test.describe('Registration', () => {
await context.clearCookies(); await context.clearCookies();
// login // login
await page.goto('/auth/login?autoLaunch=0'); await page.goto('/auth/login');
await page.getByLabel('Email').fill('user@immich.cloud'); await page.getByLabel('Email').fill('user@immich.cloud');
await page.getByLabel('Password').fill('password'); await page.getByLabel('Password').fill('password');
await page.getByRole('button', { name: 'Login' }).click(); await page.getByRole('button', { name: 'Login' }).click();
@@ -72,7 +72,7 @@ test.describe('Registration', () => {
await page.getByRole('button', { name: 'Change password' }).click(); await page.getByRole('button', { name: 'Change password' }).click();
// login with new password // login with new password
await expect(page).toHaveURL('/auth/login?autoLaunch=0'); await expect(page).toHaveURL('/auth/login');
await page.getByLabel('Email').fill('user@immich.cloud'); await page.getByLabel('Email').fill('user@immich.cloud');
await page.getByLabel('Password').fill('new-password'); await page.getByLabel('Password').fill('new-password');
await page.getByRole('button', { name: 'Login' }).click(); await page.getByRole('button', { name: 'Login' }).click();

Some files were not shown because too many files have changed in this diff Show More