mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 12:51:32 -08:00
Compare commits
7 Commits
feat/docke
...
web/automa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
736b968eab | ||
|
|
d5d8426bda | ||
|
|
7e92ef9428 | ||
|
|
372fae20d9 | ||
|
|
16543a233b | ||
|
|
73b961f5fa | ||
|
|
e0d15c96f1 |
@@ -1,2 +0,0 @@
|
||||
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:9791f4aa527774bc370c6bd2f6705ce5a686f1e6f204badd8dfaacce28c631ae
|
||||
FROM ${BASEIMAGE}
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"name": "Immich devcontainers",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"args": {
|
||||
"BASEIMAGE": "mcr.microsoft.com/devcontainers/typescript-node:22"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"svelte.svelte-vscode"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [],
|
||||
"postCreateCommand": "make install-all",
|
||||
"remoteUser": "node"
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
design/
|
||||
docker/
|
||||
!docker/scripts
|
||||
docs/
|
||||
e2e/
|
||||
fastlane/
|
||||
@@ -22,12 +21,11 @@ open-api/typescript-sdk/node_modules/
|
||||
server/coverage/
|
||||
server/node_modules/
|
||||
server/upload/
|
||||
server/src/queries
|
||||
server/dist/
|
||||
server/www/
|
||||
server/test/assets/
|
||||
|
||||
web/node_modules/
|
||||
web/coverage/
|
||||
web/.svelte-kit
|
||||
web/build/
|
||||
web/.env
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -2,6 +2,8 @@ mobile/openapi/**/*.md -diff -merge
|
||||
mobile/openapi/**/*.md linguist-generated=true
|
||||
mobile/openapi/**/*.dart -diff -merge
|
||||
mobile/openapi/**/*.dart linguist-generated=true
|
||||
mobile/openapi/.openapi-generator/FILES -diff -merge
|
||||
mobile/openapi/.openapi-generator/FILES linguist-generated=true
|
||||
|
||||
mobile/lib/**/*.g.dart -diff -merge
|
||||
mobile/lib/**/*.g.dart linguist-generated=true
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
title: "[Feature] feature-name-goes-here"
|
||||
title: "[Feature] <feature-name-goes-here>"
|
||||
labels: ["feature"]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please use this form to request new feature for Immich.
|
||||
Stick to only a single feature per request. If you list multiple different features at once,
|
||||
your request will be closed.
|
||||
Please use this form to request new feature for Immich
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
|
||||
6
.github/FUNDING.yml
vendored
6
.github/FUNDING.yml
vendored
@@ -1 +1,5 @@
|
||||
custom: ['https://buy.immich.app']
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: immich-app
|
||||
liberapay: alex.tran1502
|
||||
custom: https://www.buymeacoffee.com/altran1502
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -83,6 +83,7 @@ body:
|
||||
2.
|
||||
3.
|
||||
...
|
||||
render: bash
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
13
.github/ISSUE_TEMPLATE/config.yml
vendored
13
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,14 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: ✋ I have a question or need support
|
||||
url: https://discord.immich.app
|
||||
- name: I have a question or need support
|
||||
url: https://discord.gg/D8JsnBEuKb
|
||||
about: We use GitHub for tracking bugs, please check out our Discord channel for freaky fast support.
|
||||
- name: 📷 My photo or video has a date, time, or timezone problem
|
||||
url: https://github.com/immich-app/immich/discussions/12650
|
||||
about: Upload a sample file to this discussion and we will take a look
|
||||
- name: 🌟 Feature request
|
||||
- name: Feature Request
|
||||
url: https://github.com/immich-app/immich/discussions/new?category=feature-request
|
||||
about: Please use our GitHub Discussion for making feature requests.
|
||||
- name: 🫣 I'm unsure where to go
|
||||
url: https://discord.immich.app
|
||||
- name: I'm unsure where to go
|
||||
url: https://discord.gg/D8JsnBEuKb
|
||||
about: If you are unsure where to go, then joining our Discord is recommended; Just ask!
|
||||
|
||||
7
.github/dependabot.yml
vendored
Normal file
7
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
version: 2
|
||||
updates:
|
||||
# Maintain dependencies for GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
38
.github/labeler.yml
vendored
38
.github/labeler.yml
vendored
@@ -1,38 +0,0 @@
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- cli/src/**
|
||||
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- docs/blob/**
|
||||
- docs/docs/**
|
||||
- docs/src/**
|
||||
- docs/static/**
|
||||
|
||||
🖥️web:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- web/src/**
|
||||
- web/static/**
|
||||
|
||||
📱mobile:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- mobile/lib/**
|
||||
- mobile/test/**
|
||||
|
||||
🗄️server:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- server/src/**
|
||||
- server/test/**
|
||||
|
||||
🧠machine-learning:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- machine-learning/app/**
|
||||
|
||||
changelog:translation:
|
||||
- head-branch: ['^chore/translations$']
|
||||
40
.github/release.yml
vendored
40
.github/release.yml
vendored
@@ -1,33 +1,41 @@
|
||||
changelog:
|
||||
categories:
|
||||
- title: 🚨 Breaking Changes
|
||||
- title: ⚠️ Breaking Changes
|
||||
labels:
|
||||
- changelog:breaking-change
|
||||
- breaking-change
|
||||
|
||||
- title: 🫥 Deprecated Changes
|
||||
- title: 🗄️ Server
|
||||
labels:
|
||||
- changelog:deprecated
|
||||
- 🗄️server
|
||||
|
||||
- title: 🔒 Security
|
||||
- title: 📱 Mobile
|
||||
labels:
|
||||
- changelog:security
|
||||
- 📱mobile
|
||||
|
||||
- title: 🚀 Features
|
||||
- title: 🖥️ Web
|
||||
labels:
|
||||
- changelog:feature
|
||||
- 🖥️web
|
||||
|
||||
- title: 🌟 Enhancements
|
||||
- title: 🧠 Machine Learning
|
||||
labels:
|
||||
- changelog:enhancement
|
||||
- 🧠machine-learning
|
||||
|
||||
- title: 🐛 Bug fixes
|
||||
- title: ⚡ CLI
|
||||
labels:
|
||||
- changelog:bugfix
|
||||
- cli
|
||||
|
||||
- title: 📚 Documentation
|
||||
- title: 📓 Documentation
|
||||
labels:
|
||||
- changelog:documentation
|
||||
- documentation
|
||||
|
||||
- title: 🌐 Translations
|
||||
- title: 🔨 Maintenance
|
||||
labels:
|
||||
- changelog:translation
|
||||
- deployment
|
||||
- dependencies
|
||||
- renovate
|
||||
- maintenance
|
||||
- tech-debt
|
||||
|
||||
- title: Other changes
|
||||
labels:
|
||||
- "*"
|
||||
|
||||
22
.github/workflows/build-mobile.yml
vendored
22
.github/workflows/build-mobile.yml
vendored
@@ -16,28 +16,10 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build-sign-android:
|
||||
name: Build and sign Android
|
||||
needs: pre-job
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }}
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
@@ -63,7 +45,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
flutter-version: '3.19.3'
|
||||
cache: true
|
||||
|
||||
- name: Create the Keystore
|
||||
|
||||
31
.github/workflows/cli.yml
vendored
31
.github/workflows/cli.yml
vendored
@@ -1,17 +1,16 @@
|
||||
name: CLI Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
- "cli/**"
|
||||
- ".github/workflows/cli.yml"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'cli/**'
|
||||
- '.github/workflows/cli.yml'
|
||||
release:
|
||||
types: [published]
|
||||
- "cli/**"
|
||||
- ".github/workflows/cli.yml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -22,7 +21,7 @@ permissions:
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: CLI Publish
|
||||
name: Publish
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -33,8 +32,8 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version: "20.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
- name: Build SDK
|
||||
@@ -42,7 +41,7 @@ jobs:
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm publish
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
if: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
@@ -56,10 +55,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.8.0
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -84,15 +83,15 @@ jobs:
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/immich-cli
|
||||
tags: |
|
||||
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'release' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
type=raw,value=${{ steps.package-version.outputs.version }},enable=${{ github.event_name == 'workflow_dispatch' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'workflow_dispatch' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.10.0
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
push: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
|
||||
8
.github/workflows/docker-cleanup.yml
vendored
8
.github/workflows/docker-cleanup.yml
vendored
@@ -22,7 +22,7 @@ concurrency:
|
||||
jobs:
|
||||
cleanup-images:
|
||||
name: Cleanup Stale Images Tags for ${{ matrix.primary-name }}
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.9.0
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.6.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
|
||||
cleanup-untagged-images:
|
||||
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- cleanup-images
|
||||
strategy:
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.9.0
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.6.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
|
||||
248
.github/workflows/docker.yml
vendored
248
.github/workflows/docker.yml
vendored
@@ -17,206 +17,56 @@ permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
build_and_push:
|
||||
name: Build and Push
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
server:
|
||||
- 'server/**'
|
||||
- 'openapi/**'
|
||||
- 'web/**'
|
||||
- 'i18n/**'
|
||||
machine-learning:
|
||||
- 'machine-learning/**'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
retag_ml:
|
||||
name: Re-Tag ML
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
suffix: ["", "-cuda", "-openvino", "-armnn"]
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
|
||||
retag_server:
|
||||
name: Re-Tag Server
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
suffix: [""]
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_NEW=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_NEW $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
|
||||
|
||||
build_and_push_ml:
|
||||
name: Build and Push ML
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
strategy:
|
||||
# Prevent a failure in one image from stopping the other builds
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platforms: linux/amd64,linux/arm64
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
device: cpu
|
||||
|
||||
- platforms: linux/amd64
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/amd64
|
||||
device: cuda
|
||||
suffix: -cuda
|
||||
|
||||
- platforms: linux/amd64
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/amd64
|
||||
device: openvino
|
||||
suffix: -openvino
|
||||
|
||||
- platforms: linux/arm64
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/arm64
|
||||
device: armnn
|
||||
suffix: -armnn
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.8.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
|
||||
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr,suffix=${{ matrix.suffix }}
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag,suffix=${{ matrix.suffix }}
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
|
||||
|
||||
- name: Determine build cache output
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
# Essentially just ignore the cache output (PR can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.10.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
# Skip pushing when PR from a fork
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
|
||||
build_and_push_server:
|
||||
name: Build and Push Server
|
||||
runs-on: ubuntu-latest
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
env:
|
||||
image: immich-server
|
||||
context: .
|
||||
file: server/Dockerfile
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platforms: linux/amd64,linux/arm64
|
||||
- image: immich-server
|
||||
context: .
|
||||
file: server/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
device: cpu
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.8.0
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
@@ -243,8 +93,8 @@ jobs:
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
|
||||
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{matrix.image}}
|
||||
name=altran1502/${{matrix.image}},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
||||
@@ -261,50 +111,20 @@ jobs:
|
||||
# Essentially just ignore the cache output (PR can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ matrix.image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.10.0
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
# Skip pushing when PR from a fork
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
success-check-server:
|
||||
name: Docker Build & Push Server Success
|
||||
needs: [build_and_push_server, retag_server]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
|
||||
success-check-ml:
|
||||
name: Docker Build & Push ML Success
|
||||
needs: [build_and_push_ml, retag_ml]
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
|
||||
64
.github/workflows/docs-build.yml
vendored
64
.github/workflows/docs-build.yml
vendored
@@ -1,64 +0,0 @@
|
||||
name: Docs build
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
docs:
|
||||
- 'docs/**'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'release' || github.ref_name == 'main' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build:
|
||||
name: Docs Build
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Check formatting
|
||||
run: npm run format
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
retention-days: 1
|
||||
202
.github/workflows/docs-deploy.yml
vendored
202
.github/workflows/docs-deploy.yml
vendored
@@ -1,202 +0,0 @@
|
||||
name: Docs deploy
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Docs build"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
checks:
|
||||
name: Docs Deploy Checks
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
steps:
|
||||
- if: ${{ github.event.workflow_run.conclusion != 'success' }}
|
||||
run: echo 'The triggering workflow did not succeed' && exit 1
|
||||
- name: Get artifact
|
||||
id: get-artifact
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "docs-build-output"
|
||||
})[0];
|
||||
if (!matchArtifact) {
|
||||
console.log("No artifact found with the name docs-build-output, build job was skipped")
|
||||
return { found: false };
|
||||
}
|
||||
return { found: true, id: matchArtifact.id };
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
const isFork = context.payload.workflow_run.repository.fork;
|
||||
|
||||
let parameters;
|
||||
|
||||
console.log({eventType, isFork});
|
||||
|
||||
if (eventType == "push") {
|
||||
const branch = context.payload.workflow_run.head_branch;
|
||||
console.log({branch});
|
||||
const shouldDeploy = !isFork && branch == "main";
|
||||
parameters = {
|
||||
event: "branch",
|
||||
name: "main",
|
||||
shouldDeploy
|
||||
};
|
||||
} else if (eventType == "pull_request") {
|
||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||
if(!pull_number) {
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||
const items = response.data.items
|
||||
if (items.length < 1) {
|
||||
throw new Error("No pull request found for the commit")
|
||||
}
|
||||
const pullRequestNumber = items[0].number
|
||||
console.info("Pull request number is", pullRequestNumber)
|
||||
pull_number = pullRequestNumber
|
||||
}
|
||||
const {data: pr} = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number
|
||||
});
|
||||
|
||||
console.log({pull_number});
|
||||
|
||||
parameters = {
|
||||
event: "pr",
|
||||
name: `pr-${pull_number}`,
|
||||
pr_number: pull_number,
|
||||
shouldDeploy: true
|
||||
};
|
||||
} else if (eventType == "release") {
|
||||
parameters = {
|
||||
event: "release",
|
||||
name: context.payload.workflow_run.head_branch,
|
||||
shouldDeploy: !isFork
|
||||
};
|
||||
}
|
||||
|
||||
console.log(parameters);
|
||||
return parameters;
|
||||
|
||||
deploy:
|
||||
name: Docs Deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const json = `${{ needs.checks.outputs.parameters }}`;
|
||||
const parameters = JSON.parse(json);
|
||||
core.setOutput("event", parameters.event);
|
||||
core.setOutput("name", parameters.name);
|
||||
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||
|
||||
- run: |
|
||||
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
let artifact = ${{ needs.checks.outputs.artifact }};
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: artifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
let fs = require('fs');
|
||||
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/docs-build-output.zip`, Buffer.from(download.data));
|
||||
|
||||
- name: Unzip artifact
|
||||
run: unzip "${{ github.workspace }}/docs-build-output.zip" -d "${{ github.workspace }}/docs/build"
|
||||
|
||||
- name: Deploy Docs Subdomain
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "apply"
|
||||
|
||||
- name: Deploy Docs Subdomain Output
|
||||
id: docs-output
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
TF_VAR_prefix_event_type: ${{ steps.parameters.outputs.event }}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "output -json"
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
run: |
|
||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
uses: cloudflare/pages-action@v1
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
||||
workingDirectory: "docs"
|
||||
directory: "build"
|
||||
branch: ${{ steps.parameters.outputs.name }}
|
||||
wranglerVersion: '3'
|
||||
|
||||
- name: Deploy Docs Release Domain
|
||||
if: ${{ steps.parameters.outputs.event == 'release' }}
|
||||
env:
|
||||
TF_VAR_prefix_name: ${{ steps.parameters.outputs.name}}
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
tg_dir: 'deployment/modules/cloudflare/docs-release'
|
||||
tg_command: 'apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||
with:
|
||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||
body: |
|
||||
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
|
||||
emojis: 'rocket'
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
33
.github/workflows/docs-destroy.yml
vendored
33
.github/workflows/docs-destroy.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: Docs destroy
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Docs Destroy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
TF_VAR_prefix_name: "pr-${{ github.event.number }}"
|
||||
TF_VAR_prefix_event_type: "pr"
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@v2
|
||||
with:
|
||||
tg_version: "0.58.12"
|
||||
tofu_version: "1.7.1"
|
||||
tg_dir: "deployment/modules/cloudflare/docs"
|
||||
tg_command: "destroy -refresh=false"
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@v3
|
||||
with:
|
||||
number: ${{ github.event.number }}
|
||||
delete: true
|
||||
body-include: '<!-- Docs PR URL -->'
|
||||
52
.github/workflows/fix-format.yml
vendored
52
.github/workflows/fix-format.yml
vendored
@@ -1,52 +0,0 @@
|
||||
name: Fix formatting
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
fix-formatting:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.label.name == 'fix:formatting' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: 'Checkout'
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
|
||||
- name: Commit and push
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: fix formatting'
|
||||
|
||||
- name: Remove label
|
||||
uses: actions/github-script@v7
|
||||
if: always()
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
issue_number: context.payload.pull_request.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'fix:formatting'
|
||||
})
|
||||
|
||||
22
.github/workflows/pr-label-validation.yml
vendored
22
.github/workflows/pr-label-validation.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: PR Label Validation
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, labeled, unlabeled, synchronize]
|
||||
|
||||
jobs:
|
||||
validate-release-label:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@v5
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
use_regex: true
|
||||
labels: "changelog:.*"
|
||||
add_comment: true
|
||||
message: "Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label."
|
||||
12
.github/workflows/pr-labeler.yml
vendored
12
.github/workflows/pr-labeler.yml
vendored
@@ -1,12 +0,0 @@
|
||||
name: "Pull Request Labeler"
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
@@ -1,15 +0,0 @@
|
||||
name: PR Conventional Commit Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: PR Conventional Commit Validation
|
||||
uses: ytanikin/PRConventionalCommits@1.3.0
|
||||
with:
|
||||
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
|
||||
add_label: 'false'
|
||||
13
.github/workflows/pr-require-label.yml
vendored
Normal file
13
.github/workflows/pr-require-label.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: Enforce PR labels
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled, unlabeled, opened, edited, synchronize]
|
||||
jobs:
|
||||
enforce-label:
|
||||
name: Enforce label
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- if: toJson(github.event.pull_request.labels) == '[]'
|
||||
run: exit 1
|
||||
|
||||
15
.github/workflows/prepare-release.yml
vendored
15
.github/workflows/prepare-release.yml
vendored
@@ -29,17 +29,10 @@ jobs:
|
||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
token: ${{ secrets.ORG_RELEASE_TOKEN }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry
|
||||
@@ -51,8 +44,10 @@ jobs:
|
||||
id: push-tag
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: version ${{ env.IMMICH_VERSION }}'
|
||||
author_name: Alex The Bot
|
||||
author_email: alex.tran1502@gmail.com
|
||||
default_author: user_info
|
||||
message: 'Version ${{ env.IMMICH_VERSION }}'
|
||||
tag: ${{ env.IMMICH_VERSION }}
|
||||
push: true
|
||||
|
||||
|
||||
2
.github/workflows/sdk.yml
vendored
2
.github/workflows/sdk.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
node-version: '20.x'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
27
.github/workflows/static_analysis.yml
vendored
27
.github/workflows/static_analysis.yml
vendored
@@ -10,27 +10,8 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mobile-dart-analyze:
|
||||
name: Run Dart Code Analysis
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
@@ -41,8 +22,8 @@ jobs:
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
channel: "stable"
|
||||
flutter-version: "3.19.3"
|
||||
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
@@ -56,10 +37,6 @@ jobs:
|
||||
run: dart format lib/ --set-exit-if-changed
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run dart custom_lint
|
||||
run: dart run custom_lint
|
||||
working-directory: ./mobile
|
||||
|
||||
# Enable after riverpod generator migration is completed
|
||||
# - name: Run dart custom lint
|
||||
# run: dart run custom_lint
|
||||
|
||||
255
.github/workflows/test.yml
vendored
255
.github/workflows/test.yml
vendored
@@ -10,48 +10,30 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
doc-tests:
|
||||
name: Docs
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_e2e: ${{ steps.found_paths.outputs.e2e == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_mobile: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
web:
|
||||
- 'web/**'
|
||||
- 'i18n/**'
|
||||
- 'open-api/typescript-sdk/**'
|
||||
server:
|
||||
- 'server/**'
|
||||
cli:
|
||||
- 'cli/**'
|
||||
- 'open-api/typescript-sdk/**'
|
||||
e2e:
|
||||
- 'e2e/**'
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
machine-learning:
|
||||
- 'machine-learning/**'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
server-unit-tests:
|
||||
name: Test & Lint Server
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
name: Server
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -61,11 +43,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
@@ -81,14 +58,12 @@ jobs:
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run small tests & coverage
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests:
|
||||
name: Unit Test CLI
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
name: CLI
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -101,7 +76,7 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
node-version: 20
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -126,44 +101,8 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests-win:
|
||||
name: Unit Test CLI (Windows)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
# Skip linter & formatter in Windows test.
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Test & Lint Web
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||
name: Web
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -173,11 +112,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
@@ -205,10 +139,8 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-lint:
|
||||
name: End-to-End Lint
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
||||
e2e-tests:
|
||||
name: End-to-End Tests
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -217,17 +149,24 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
node-version: 20
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run setup cli
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./cli
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
if: ${{ !cancelled() }}
|
||||
@@ -244,58 +183,8 @@ jobs:
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
medium-tests-server:
|
||||
name: Medium Tests (Server)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: mich
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Production build
|
||||
if: ${{ !cancelled() }}
|
||||
run: docker compose -f e2e/docker-compose.yml build
|
||||
|
||||
- name: Run medium tests
|
||||
if: ${{ !cancelled() }}
|
||||
run: make test-medium
|
||||
|
||||
e2e-tests-server-cli:
|
||||
name: End-to-End Tests (Server & CLI)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||
runs-on: mich
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run setup cli
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./cli
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Install Playwright Browsers
|
||||
run: npx playwright install --with-deps chromium
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Docker build
|
||||
@@ -306,51 +195,12 @@ jobs:
|
||||
run: npm run test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-web:
|
||||
name: End-to-End Tests (Web)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||
runs-on: mich
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: npx playwright install --with-deps chromium
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Docker build
|
||||
run: docker compose build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run e2e tests (web)
|
||||
run: npx playwright test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
mobile-unit-tests:
|
||||
name: Unit Test Mobile
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
||||
name: Mobile
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -358,15 +208,13 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
flutter-version: '3.19.3'
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test -j 1
|
||||
|
||||
ml-unit-tests:
|
||||
name: Unit Test ML
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
name: Machine Learning
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -412,32 +260,16 @@ jobs:
|
||||
name: OpenAPI Clients
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
|
||||
- name: Build the app
|
||||
run: npm --prefix=server run build
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run API generation
|
||||
run: make open-api
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v20
|
||||
uses: tj-actions/verify-changed-files@v19
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
mobile/openapi
|
||||
open-api/typescript-sdk
|
||||
open-api/immich-openapi-specs.json
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
run: |
|
||||
@@ -470,11 +302,6 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm ci
|
||||
|
||||
@@ -492,7 +319,7 @@ jobs:
|
||||
run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v20
|
||||
uses: tj-actions/verify-changed-files@v19
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
@@ -505,12 +332,12 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Run SQL generation
|
||||
run: npm run sync:sql
|
||||
run: npm run sql:generate
|
||||
env:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v20
|
||||
uses: tj-actions/verify-changed-files@v19
|
||||
id: verify-changed-sql-files
|
||||
with:
|
||||
files: |
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -14,12 +14,7 @@ mobile/gradle.properties
|
||||
mobile/openapi/pubspec.lock
|
||||
mobile/*.jks
|
||||
mobile/libisar.dylib
|
||||
mobile/openapi/test
|
||||
mobile/openapi/doc
|
||||
mobile/openapi/.openapi-generator/FILES
|
||||
|
||||
open-api/typescript-sdk/build
|
||||
mobile/android/fastlane/report.xml
|
||||
mobile/ios/fastlane/report.xml
|
||||
|
||||
vite.config.js.timestamp-*
|
||||
mobile/ios/fastlane/report.xml
|
||||
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -1,6 +1,6 @@
|
||||
[submodule "mobile/.isar"]
|
||||
path = mobile/.isar
|
||||
url = https://github.com/isar/isar
|
||||
[submodule "e2e/test-assets"]
|
||||
path = e2e/test-assets
|
||||
[submodule "server/test/assets"]
|
||||
path = server/test/assets
|
||||
url = https://github.com/immich-app/test-assets
|
||||
|
||||
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
@@ -5,8 +5,8 @@
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"restart": true,
|
||||
"port": 9231,
|
||||
"name": "Immich API Server",
|
||||
"port": 9230,
|
||||
"name": "Immich Server",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
},
|
||||
@@ -14,8 +14,8 @@
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"restart": true,
|
||||
"port": 9230,
|
||||
"name": "Immich Workers",
|
||||
"port": 9231,
|
||||
"name": "Immich Microservices",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
}
|
||||
|
||||
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -1,16 +1,6 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[css]": {
|
||||
"[javascript][typescript][css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
|
||||
@@ -131,4 +131,4 @@ conduct enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
For answers to common questions about this code of conduct, see the
|
||||
FAQ at https://www.contributor-covenant.org/faq. Translations are
|
||||
available at https://www.contributor-covenant.org/translations.
|
||||
available at https://www.contributor-covenant.org/translations.
|
||||
66
Makefile
66
Makefile
@@ -10,6 +10,12 @@ dev-update:
|
||||
dev-scale:
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
stage:
|
||||
docker compose -f ./docker/docker-compose.staging.yml up --build -V --remove-orphans
|
||||
|
||||
pull-stage:
|
||||
docker compose -f ./docker/docker-compose.staging.yml pull
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
@@ -31,65 +37,7 @@ open-api-typescript:
|
||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||
|
||||
sql:
|
||||
npm --prefix server run sync:sql
|
||||
npm --prefix server run sql:generate
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
|
||||
renovate:
|
||||
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
|
||||
|
||||
MODULES = e2e server web cli sdk docs
|
||||
|
||||
audit-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
install-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build
|
||||
format-%:
|
||||
npm --prefix $* run format:fix
|
||||
lint-%:
|
||||
npm --prefix $* run lint:fix
|
||||
check-%:
|
||||
npm --prefix $* run check
|
||||
check-web:
|
||||
npm --prefix web run check:typescript
|
||||
npm --prefix web run check:svelte
|
||||
test-%:
|
||||
npm --prefix $* run test
|
||||
test-e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml build
|
||||
npm --prefix e2e run test
|
||||
npm --prefix e2e run test:web
|
||||
test-medium:
|
||||
docker run \
|
||||
--rm \
|
||||
-v ./server/src:/usr/src/app/src \
|
||||
-v ./server/test:/usr/src/app/test \
|
||||
-v ./server/vitest.config.medium.mjs:/usr/src/app/vitest.config.medium.mjs \
|
||||
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
|
||||
-e NODE_ENV=development \
|
||||
immich-server:latest \
|
||||
-c "npm ci && npm run test:medium -- --run"
|
||||
test-medium-dev:
|
||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e,$(MODULES)),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
check-all: $(foreach M,$(filter-out sdk cli docs,$(MODULES)),check-$M) ;
|
||||
lint-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),lint-$M) ;
|
||||
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
|
||||
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
||||
hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),test-$M) ;
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
88
README.md
88
README.md
@@ -1,7 +1,7 @@
|
||||
<p align="center">
|
||||
<br/>
|
||||
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
||||
<a href="https://discord.immich.app">
|
||||
<a href="https://discord.gg/D8JsnBEuKb">
|
||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
||||
</a>
|
||||
<br/>
|
||||
@@ -17,7 +17,6 @@
|
||||
<img src="design/immich-screenshots.png" title="Main Screenshot">
|
||||
</a>
|
||||
<br/>
|
||||
|
||||
<p align="center">
|
||||
<a href="readme_i18n/README_ca_ES.md">Català</a>
|
||||
<a href="readme_i18n/README_es_ES.md">Español</a>
|
||||
@@ -30,11 +29,6 @@
|
||||
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
|
||||
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||
<a href="readme_i18n/README_vi_VN.md">Tiếng Việt</a>
|
||||
<a href="readme_i18n/README_th_TH.md">ภาษาไทย</a>
|
||||
</p>
|
||||
|
||||
## Disclaimer
|
||||
@@ -44,36 +38,45 @@
|
||||
- ⚠️ **Do not use the app as the only way to store your photos and videos.**
|
||||
- ⚠️ Always follow [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) backup plan for your precious photos and videos!
|
||||
|
||||
> [!NOTE]
|
||||
> You can find the main documentation, including installation guides, at https://immich.app/.
|
||||
## Content
|
||||
|
||||
## Links
|
||||
|
||||
- [Documentation](https://immich.app/docs)
|
||||
- [About](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Roadmap](https://immich.app/roadmap)
|
||||
- [Official Documentation](https://immich.app/docs)
|
||||
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
|
||||
- [Demo](#demo)
|
||||
- [Features](#features)
|
||||
- [Translations](https://immich.app/docs/developer/translations)
|
||||
- [Contributing](https://immich.app/docs/overview/support-the-project)
|
||||
- [Introduction](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
|
||||
- [Support The Project](#support-the-project)
|
||||
|
||||
## Documentation
|
||||
|
||||
You can find the main documentation, including installation guides, at https://immich.app/.
|
||||
|
||||
## Demo
|
||||
|
||||
Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
|
||||
You can access the web demo at https://demo.immich.app
|
||||
|
||||
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
|
||||
|
||||
### Login credentials
|
||||
```bash title="Demo Credential"
|
||||
The credential
|
||||
email: demo@immich.app
|
||||
password: demo
|
||||
```
|
||||
|
||||
| Email | Password |
|
||||
| --------------- | -------- |
|
||||
| demo@immich.app | demo |
|
||||
```
|
||||
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
```
|
||||
|
||||
## Activities
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
|
||||
| Features | Mobile | Web |
|
||||
| :------------------------------------------- | ------ | --- |
|
||||
| :--------------------------------------------- | -------- | ----- |
|
||||
| Upload and view videos and photos | Yes | Yes |
|
||||
| Auto backup when the app is opened | Yes | N/A |
|
||||
| Prevent duplication of assets | Yes | Yes |
|
||||
@@ -93,7 +96,7 @@ For the mobile app, you can use `https://demo.immich.app/api` for the `Server En
|
||||
| LivePhoto/MotionPhoto backup and playback | Yes | Yes |
|
||||
| Support 360 degree image display | No | Yes |
|
||||
| User-defined storage structure | Yes | Yes |
|
||||
| Public Sharing | Yes | Yes |
|
||||
| Public Sharing | No | Yes |
|
||||
| Archive and Favorites | Yes | Yes |
|
||||
| Global Map | Yes | Yes |
|
||||
| Partner Sharing | Yes | Yes |
|
||||
@@ -102,22 +105,31 @@ For the mobile app, you can use `https://demo.immich.app/api` for the `Server En
|
||||
| Offline support | Yes | No |
|
||||
| Read-only gallery | Yes | Yes |
|
||||
| Stacked Photos | Yes | Yes |
|
||||
| Tags | No | Yes |
|
||||
| Folder View | No | Yes |
|
||||
|
||||
## Translations
|
||||
## Support the project
|
||||
|
||||
Read more about translations [here](https://immich.app/docs/developer/translations).
|
||||
I've committed to this project, and I will not stop. I will keep updating the docs, adding new features, and fixing bugs. But I can't do it alone. So I need your help to give me additional motivation to keep going.
|
||||
|
||||
<a href="https://hosted.weblate.org/engage/immich/">
|
||||
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Translation status" />
|
||||
As our hosts in the [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) said, this is a massive undertaking of what the team and I are doing. And I would love to someday be able to do this full-time, and I am asking for your help to make that happen.
|
||||
|
||||
If you feel like this is the right cause and the app is something you are seeing yourself using for a long time, please consider supporting the project with the option below.
|
||||
|
||||
### Donation
|
||||
|
||||
- [Monthly donation](https://github.com/sponsors/immich-app) via GitHub Sponsors
|
||||
- [One-time donation](https://github.com/sponsors/immich-app?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
||||
- [Liberapay](https://liberapay.com/alex.tran1502/)
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
||||
|
||||
## Contributors
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
|
||||
## Repository activity
|
||||
|
||||

|
||||
|
||||
## Star history
|
||||
## Star History
|
||||
|
||||
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||
<picture>
|
||||
@@ -126,9 +138,3 @@ Read more about translations [here](https://immich.app/docs/developer/translatio
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Contributors
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Please report security issues to `security@immich.app`
|
||||
Please report security issues to `alex.tran1502@gmail.com`
|
||||
1
cli/.eslintignore
Normal file
1
cli/.eslintignore
Normal file
@@ -0,0 +1 @@
|
||||
/dist
|
||||
28
cli/.eslintrc.cjs
Normal file
28
cli/.eslintrc.cjs
Normal file
@@ -0,0 +1,28 @@
|
||||
module.exports = {
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
project: 'tsconfig.json',
|
||||
sourceType: 'module',
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
plugins: ['@typescript-eslint/eslint-plugin'],
|
||||
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended', 'plugin:unicorn/recommended'],
|
||||
root: true,
|
||||
env: {
|
||||
node: true,
|
||||
},
|
||||
ignorePatterns: ['.eslintrc.js'],
|
||||
rules: {
|
||||
'@typescript-eslint/interface-name-prefix': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-process-exit': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
},
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
22.12.0
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22.12.0-alpine3.20@sha256:96cc8323e25c8cc6ddcb8b965e135cfd57846e8003ec0d7bcec16c5fd5f6d39f AS core
|
||||
FROM node:20-alpine3.19@sha256:ec0c413b1d84f3f7f67ec986ba885930c57b5318d2eb3abc6960ee05d4f2eb28 as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
@@ -16,4 +16,4 @@ RUN npm run build
|
||||
|
||||
WORKDIR /import
|
||||
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
@@ -4,18 +4,8 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
|
||||
|
||||
# For developers
|
||||
|
||||
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
|
||||
Then, to build the open-api client run the following in the open-api folder:
|
||||
|
||||
$ ./bin/generate-open-api.sh
|
||||
|
||||
To run the Immich CLI from source, run the following in the cli folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
$ ts-node .
|
||||
|
||||
@@ -27,4 +17,3 @@ You can also build and install the CLI using
|
||||
|
||||
$ npm run build
|
||||
$ npm install -g .
|
||||
****
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
import { FlatCompat } from '@eslint/eslintrc';
|
||||
import js from '@eslint/js';
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
import globals from 'globals';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default [
|
||||
{
|
||||
ignores: ['eslint.config.mjs', 'dist'],
|
||||
},
|
||||
...compat.extends(
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:prettier/recommended',
|
||||
'plugin:unicorn/recommended',
|
||||
),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 5,
|
||||
sourceType: 'module',
|
||||
|
||||
parserOptions: {
|
||||
project: 'tsconfig.json',
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
},
|
||||
|
||||
rules: {
|
||||
'@typescript-eslint/interface-name-prefix': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-process-exit': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
'object-shorthand': ['error', 'always'],
|
||||
},
|
||||
},
|
||||
];
|
||||
2328
cli/package-lock.json
generated
2328
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.37",
|
||||
"version": "2.2.0",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -13,33 +13,30 @@
|
||||
"cli"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.10.2",
|
||||
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
||||
"@typescript-eslint/parser": "^8.15.0",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
"byte-size": "^9.0.0",
|
||||
"@types/node": "^20.3.1",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
"byte-size": "^8.1.1",
|
||||
"cli-progress": "^3.12.0",
|
||||
"commander": "^12.0.0",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"globals": "^15.9.0",
|
||||
"eslint-plugin-unicorn": "^52.0.0",
|
||||
"glob": "^10.3.1",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.0.12",
|
||||
"vite-tsconfig-paths": "^5.0.0",
|
||||
"vitest": "^2.0.5",
|
||||
"vitest-fetch-mock": "^0.4.0",
|
||||
"vite-tsconfig-paths": "^4.3.2",
|
||||
"vitest": "^1.2.2",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
@@ -62,11 +59,6 @@
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
"fastq": "^1.17.1",
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.12.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,201 +0,0 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
||||
import createFetchMock from 'vitest-fetch-mock';
|
||||
|
||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
||||
|
||||
vi.mock('@immich/sdk');
|
||||
|
||||
describe('getAlbumName', () => {
|
||||
it('should return a non-undefined value', () => {
|
||||
if (os.platform() === 'win32') {
|
||||
// This is meaningless for Unix systems.
|
||||
expect(getAlbumName(String.raw`D:\test\Filename.txt`, {} as UploadOptionsDto)).toBe('test');
|
||||
}
|
||||
expect(getAlbumName('D:/parentfolder/test/Filename.txt', {} as UploadOptionsDto)).toBe('test');
|
||||
});
|
||||
|
||||
it('has higher priority to return `albumName` in `options`', () => {
|
||||
expect(getAlbumName('/parentfolder/test/Filename.txt', { albumName: 'example' } as UploadOptionsDto)).toBe(
|
||||
'example',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadFiles', () => {
|
||||
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
|
||||
const testFilePath = path.join(testDir, 'test.png');
|
||||
const testFileData = 'test';
|
||||
const baseUrl = 'http://example.com';
|
||||
const apiKey = 'key';
|
||||
const retry = 3;
|
||||
|
||||
const fetchMocker = createFetchMock(vi);
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a test file
|
||||
fs.writeFileSync(testFilePath, testFileData);
|
||||
|
||||
// Defaults
|
||||
vi.mocked(defaults).baseUrl = baseUrl;
|
||||
vi.mocked(defaults).headers = { 'x-api-key': apiKey };
|
||||
|
||||
fetchMocker.enableMocks();
|
||||
fetchMocker.resetMocks();
|
||||
});
|
||||
|
||||
it('returns new assets when upload file is successful', async () => {
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
return {
|
||||
status: 200,
|
||||
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
|
||||
};
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns new assets when upload file retry is successful', async () => {
|
||||
let counter = 0;
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
counter++;
|
||||
if (counter < retry) {
|
||||
throw new Error('Network error');
|
||||
}
|
||||
|
||||
return {
|
||||
status: 200,
|
||||
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
|
||||
};
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns new assets when upload file retry is failed', async () => {
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
throw new Error('Network error');
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkForDuplicates', () => {
|
||||
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
|
||||
const testFilePath = path.join(testDir, 'test.png');
|
||||
const testFileData = 'test';
|
||||
const testFileChecksum = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'; // SHA1
|
||||
const retry = 3;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a test file
|
||||
fs.writeFileSync(testFilePath, testFileData);
|
||||
});
|
||||
|
||||
it('checks duplicates', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await checkForDuplicates([testFilePath], { concurrency: 1 });
|
||||
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: [
|
||||
{
|
||||
checksum: testFileChecksum,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('returns duplicates when check duplicates is rejected', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Reject,
|
||||
id: testFilePath,
|
||||
assetId: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
reason: Reason.Duplicate,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
],
|
||||
newFiles: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns new assets when check duplicates is accepted', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [testFilePath],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns results when check duplicates retry is successful', async () => {
|
||||
let mocked = vi.mocked(checkBulkUpload);
|
||||
for (let i = 1; i < retry; i++) {
|
||||
mocked = mocked.mockRejectedValueOnce(new Error('Network error'));
|
||||
}
|
||||
mocked.mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [testFilePath],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns results when check duplicates retry is failed', async () => {
|
||||
vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error'));
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,7 @@
|
||||
import {
|
||||
Action,
|
||||
AssetBulkUploadCheckItem,
|
||||
AssetBulkUploadCheckResult,
|
||||
AssetMediaResponseDto,
|
||||
AssetMediaStatus,
|
||||
AssetFileUploadResponseDto,
|
||||
addAssetsToAlbum,
|
||||
checkBulkUpload,
|
||||
createAlbum,
|
||||
@@ -12,12 +10,12 @@ import {
|
||||
getSupportedMediaTypes,
|
||||
} from '@immich/sdk';
|
||||
import byteSize from 'byte-size';
|
||||
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
||||
import { Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path, { basename } from 'node:path';
|
||||
import { Queue } from 'src/queue';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
@@ -26,9 +24,9 @@ const s = (count: number) => (count === 1 ? '' : 's');
|
||||
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
|
||||
type Asset = { id: string; filepath: string };
|
||||
|
||||
export interface UploadOptionsDto {
|
||||
interface UploadOptionsDto {
|
||||
recursive?: boolean;
|
||||
ignore?: string;
|
||||
exclusionPatterns?: string[];
|
||||
dryRun?: boolean;
|
||||
skipHash?: boolean;
|
||||
delete?: boolean;
|
||||
@@ -77,7 +75,7 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
const files = await crawl({
|
||||
pathsToCrawl,
|
||||
recursive: options.recursive,
|
||||
exclusionPattern: options.ignore,
|
||||
exclusionPatterns: options.exclusionPatterns,
|
||||
includeHidden: options.includeHidden,
|
||||
extensions: [...image, ...video],
|
||||
});
|
||||
@@ -85,93 +83,48 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
return files;
|
||||
};
|
||||
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
}
|
||||
|
||||
const multiBar = new MultiBar(
|
||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
const progressBar = new SingleBar(
|
||||
{ format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
|
||||
const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
|
||||
const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||
progressBar.start(files.length, 0);
|
||||
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
|
||||
const checkBulkUploadQueue = new Queue<AssetBulkUploadCheckItem[], void>(
|
||||
async (assets: AssetBulkUploadCheckItem[]) => {
|
||||
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets } });
|
||||
try {
|
||||
// TODO refactor into a queue
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
|
||||
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||
|
||||
const results = response.results as AssetBulkUploadCheckResults;
|
||||
|
||||
for (const { id: filepath, assetId, action } of results) {
|
||||
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
|
||||
if (action === Action.Accept) {
|
||||
newFiles.push(filepath);
|
||||
} else {
|
||||
// rejects are always duplicates
|
||||
duplicates.push({ id: assetId as string, filepath });
|
||||
}
|
||||
progressBar.increment();
|
||||
}
|
||||
|
||||
checkProgressBar.increment(assets.length);
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
|
||||
const results: { id: string; checksum: string }[] = [];
|
||||
let checkBulkUploadRequests: AssetBulkUploadCheckItem[] = [];
|
||||
|
||||
const queue = new Queue<string, AssetBulkUploadCheckItem[]>(
|
||||
async (filepath: string): Promise<AssetBulkUploadCheckItem[]> => {
|
||||
const dto = { id: filepath, checksum: await sha1(filepath) };
|
||||
|
||||
results.push(dto);
|
||||
checkBulkUploadRequests.push(dto);
|
||||
if (checkBulkUploadRequests.length === 5000) {
|
||||
const batch = checkBulkUploadRequests;
|
||||
checkBulkUploadRequests = [];
|
||||
void checkBulkUploadQueue.push(batch);
|
||||
}
|
||||
|
||||
hashProgressBar.increment();
|
||||
return results;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
|
||||
for (const item of files) {
|
||||
void queue.push(item);
|
||||
}
|
||||
} finally {
|
||||
progressBar.stop();
|
||||
}
|
||||
|
||||
await queue.drained();
|
||||
|
||||
if (checkBulkUploadRequests.length > 0) {
|
||||
void checkBulkUploadQueue.push(checkBulkUploadRequests);
|
||||
}
|
||||
|
||||
await checkBulkUploadQueue.drained();
|
||||
|
||||
multiBar.stop();
|
||||
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
// Report failures
|
||||
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
|
||||
if (failedTasks.length > 0) {
|
||||
console.log(`Failed to verify ${failedTasks.length} file${s(failedTasks.length)}:`);
|
||||
for (const task of failedTasks) {
|
||||
console.log(`- ${task.data} - ${task.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
@@ -188,7 +141,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
|
||||
if (dryRun) {
|
||||
console.log(`Would have uploaded ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||
return files.map((filepath) => ({ id: '', filepath }));
|
||||
return [];
|
||||
}
|
||||
|
||||
const uploadProgress = new SingleBar(
|
||||
@@ -205,56 +158,41 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
||||
|
||||
const newAssets: Asset[] = [];
|
||||
|
||||
const queue = new Queue<string, AssetMediaResponseDto>(
|
||||
async (filepath: string) => {
|
||||
const stats = statsMap.get(filepath);
|
||||
if (!stats) {
|
||||
throw new Error(`Stats not found for ${filepath}`);
|
||||
}
|
||||
try {
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
await Promise.all(
|
||||
items.map(async (filepath) => {
|
||||
const stats = statsMap.get(filepath) as Stats;
|
||||
const response = await uploadFile(filepath, stats);
|
||||
|
||||
const response = await uploadFile(filepath, stats);
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
if (response.status === AssetMediaStatus.Duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
if (response.duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
return response;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
for (const item of files) {
|
||||
void queue.push(item);
|
||||
return response;
|
||||
}),
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
uploadProgress.stop();
|
||||
}
|
||||
|
||||
await queue.drained();
|
||||
|
||||
uploadProgress.stop();
|
||||
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
|
||||
}
|
||||
|
||||
// Report failures
|
||||
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
|
||||
if (failedTasks.length > 0) {
|
||||
console.log(`Failed to upload ${failedTasks.length} asset${s(failedTasks.length)}:`);
|
||||
for (const task of failedTasks) {
|
||||
console.log(`- ${task.data} - ${task.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return newAssets;
|
||||
};
|
||||
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaResponseDto> => {
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadResponseDto> => {
|
||||
const { baseUrl, headers } = defaults;
|
||||
|
||||
const assetPath = path.parse(input);
|
||||
@@ -287,7 +225,7 @@ const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaRespon
|
||||
formData.append('sidecarData', sidecarData);
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}/assets`, {
|
||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
||||
method: 'post',
|
||||
redirect: 'error',
|
||||
headers: headers as Record<string, string>,
|
||||
@@ -306,7 +244,7 @@ const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<
|
||||
}
|
||||
|
||||
if (options.dryRun) {
|
||||
console.log(`Would have deleted ${files.length} local asset${s(files.length)}`);
|
||||
console.log(`Would now have deleted assets, but skipped due to dry run`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -347,7 +285,7 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
if (dryRun) {
|
||||
// TODO print asset counts for new albums
|
||||
console.log(`Would have created ${newAlbums.size} new album${s(newAlbums.size)}`);
|
||||
console.log(`Would have updated albums of ${assets.length} asset${s(assets.length)}`);
|
||||
console.log(`Would have updated ${assets.length} asset${s(assets.length)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -407,9 +345,7 @@ const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
}
|
||||
};
|
||||
|
||||
// `filepath` valid format:
|
||||
// - Windows: `D:\\test\\Filename.txt` or `D:/test/Filename.txt`
|
||||
// - Unix: `/test/Filename.txt`
|
||||
export const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
return options.albumName ?? path.basename(path.dirname(filepath));
|
||||
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
|
||||
return options.albumName ?? folderName;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getMyUser } from '@immich/sdk';
|
||||
import { getMyUserInfo } from '@immich/sdk';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { mkdir, unlink } from 'node:fs/promises';
|
||||
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
|
||||
@@ -10,13 +10,13 @@ export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||
|
||||
await connect(url, key);
|
||||
|
||||
const [error, user] = await withError(getMyUser());
|
||||
const [error, userInfo] = await withError(getMyUserInfo());
|
||||
if (error) {
|
||||
logError(error, 'Failed to load user info');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`Logged in as ${user.email}`);
|
||||
console.log(`Logged in as ${userInfo.email}`);
|
||||
|
||||
if (!existsSync(configDir)) {
|
||||
// Create config folder if it doesn't exist
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getAssetStatistics, getMyUser, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { getAssetStatistics, getMyUserInfo, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { BaseOptions, authenticate } from 'src/utils';
|
||||
|
||||
export const serverInfo = async (options: BaseOptions) => {
|
||||
@@ -8,7 +8,7 @@ export const serverInfo = async (options: BaseOptions) => {
|
||||
getServerVersion(),
|
||||
getSupportedMediaTypes(),
|
||||
getAssetStatistics({}),
|
||||
getMyUser(),
|
||||
getMyUserInfo(),
|
||||
]);
|
||||
|
||||
console.log(`Server Info (via ${userInfo.email})`);
|
||||
|
||||
@@ -44,7 +44,7 @@ program
|
||||
.description('Upload assets')
|
||||
.usage('[paths...] [options]')
|
||||
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
||||
.addOption(new Option('-i, --ignore <pattern>', 'Pattern to ignore').env('IMMICH_IGNORE_PATHS'))
|
||||
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS').default([]))
|
||||
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
|
||||
.addOption(new Option('-H, --include-hidden', 'Include hidden folders').env('IMMICH_INCLUDE_HIDDEN').default(false))
|
||||
.addOption(
|
||||
@@ -60,8 +60,7 @@ program
|
||||
.addOption(
|
||||
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
||||
.env('IMMICH_DRY_RUN')
|
||||
.default(false)
|
||||
.conflicts('skipHash'),
|
||||
.default(false),
|
||||
)
|
||||
.addOption(
|
||||
new Option('-c, --concurrency <number>', 'Number of assets to upload at the same time')
|
||||
|
||||
131
cli/src/queue.ts
131
cli/src/queue.ts
@@ -1,131 +0,0 @@
|
||||
import * as fastq from 'fastq';
|
||||
import { uniqueId } from 'lodash-es';
|
||||
|
||||
export type Task<T, R> = {
|
||||
readonly id: string;
|
||||
status: 'idle' | 'processing' | 'succeeded' | 'failed';
|
||||
data: T;
|
||||
error: unknown | undefined;
|
||||
count: number;
|
||||
// TODO: Could be useful to adding progress property.
|
||||
// TODO: Could be useful to adding start_at/end_at/duration properties.
|
||||
result: undefined | R;
|
||||
};
|
||||
|
||||
export type QueueOptions = {
|
||||
verbose?: boolean;
|
||||
concurrency?: number;
|
||||
retry?: number;
|
||||
// TODO: Could be useful to adding timeout property for retry.
|
||||
};
|
||||
|
||||
export type ComputedQueueOptions = Required<QueueOptions>;
|
||||
|
||||
export const defaultQueueOptions = {
|
||||
concurrency: 1,
|
||||
retry: 0,
|
||||
verbose: false,
|
||||
};
|
||||
|
||||
/**
|
||||
* An in-memory queue that processes tasks in parallel with a given concurrency.
|
||||
* @see {@link https://www.npmjs.com/package/fastq}
|
||||
* @template T - The type of the worker task data.
|
||||
* @template R - The type of the worker output data.
|
||||
*/
|
||||
export class Queue<T, R> {
|
||||
private readonly queue: fastq.queueAsPromised<string, Task<T, R>>;
|
||||
private readonly store = new Map<string, Task<T, R>>();
|
||||
readonly options: ComputedQueueOptions;
|
||||
readonly worker: (data: T) => Promise<R>;
|
||||
|
||||
/**
|
||||
* Create a new queue.
|
||||
* @param worker - The worker function that processes the task.
|
||||
* @param options - The queue options.
|
||||
*/
|
||||
constructor(worker: (data: T) => Promise<R>, options?: QueueOptions) {
|
||||
this.options = { ...defaultQueueOptions, ...options };
|
||||
this.worker = worker;
|
||||
this.store = new Map<string, Task<T, R>>();
|
||||
this.queue = this.buildQueue();
|
||||
}
|
||||
|
||||
get tasks(): Task<T, R>[] {
|
||||
const tasks: Task<T, R>[] = [];
|
||||
for (const task of this.store.values()) {
|
||||
tasks.push(task);
|
||||
}
|
||||
return tasks;
|
||||
}
|
||||
|
||||
getTask(id: string): Task<T, R> {
|
||||
const task = this.store.get(id);
|
||||
if (!task) {
|
||||
throw new Error(`Task with id ${id} not found`);
|
||||
}
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for the queue to be empty.
|
||||
* @returns Promise<void> - The returned Promise will be resolved when all tasks in the queue have been processed by a worker.
|
||||
* This promise could be ignored as it will not lead to a `unhandledRejection`.
|
||||
*/
|
||||
drained(): Promise<void> {
|
||||
return this.queue.drained();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a task at the end of the queue.
|
||||
* @see {@link https://www.npmjs.com/package/fastq}
|
||||
* @param data
|
||||
* @returns Promise<void> - A Promise that will be fulfilled (rejected) when the task is completed successfully (unsuccessfully).
|
||||
* This promise could be ignored as it will not lead to a `unhandledRejection`.
|
||||
*/
|
||||
async push(data: T): Promise<Task<T, R>> {
|
||||
const id = uniqueId();
|
||||
const task: Task<T, R> = { id, status: 'idle', error: undefined, count: 0, data, result: undefined };
|
||||
this.store.set(id, task);
|
||||
return this.queue.push(id);
|
||||
}
|
||||
|
||||
// TODO: Support more function delegation to fastq.
|
||||
|
||||
private buildQueue(): fastq.queueAsPromised<string, Task<T, R>> {
|
||||
return fastq.promise((id: string) => {
|
||||
const task = this.getTask(id);
|
||||
return this.work(task);
|
||||
}, this.options.concurrency);
|
||||
}
|
||||
|
||||
private async work(task: Task<T, R>): Promise<Task<T, R>> {
|
||||
task.count += 1;
|
||||
task.error = undefined;
|
||||
task.status = 'processing';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] processing:', task);
|
||||
}
|
||||
try {
|
||||
task.result = await this.worker(task.data);
|
||||
task.status = 'succeeded';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] succeeded:', task);
|
||||
}
|
||||
return task;
|
||||
} catch (error) {
|
||||
task.error = error;
|
||||
task.status = 'failed';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] failed:', task);
|
||||
}
|
||||
if (this.options.retry > 0 && task.count < this.options.retry) {
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] retry:', task);
|
||||
}
|
||||
return this.work(task);
|
||||
}
|
||||
return task;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
|
||||
interface Test {
|
||||
@@ -10,10 +9,6 @@ interface Test {
|
||||
|
||||
const cwd = process.cwd();
|
||||
|
||||
const readContent = (path: string) => {
|
||||
return readFileSync(path).toString();
|
||||
};
|
||||
|
||||
const extensions = [
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
@@ -71,7 +66,7 @@ const tests: Test[] = [
|
||||
test: 'should exclude by file extension',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos/'],
|
||||
exclusionPattern: '**/*.tif',
|
||||
exclusionPatterns: ['**/*.tif'],
|
||||
},
|
||||
files: {
|
||||
'/photos/image.jpg': true,
|
||||
@@ -82,7 +77,7 @@ const tests: Test[] = [
|
||||
test: 'should exclude by file extension without case sensitivity',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos/'],
|
||||
exclusionPattern: '**/*.TIF',
|
||||
exclusionPatterns: ['**/*.TIF'],
|
||||
},
|
||||
files: {
|
||||
'/photos/image.jpg': true,
|
||||
@@ -93,7 +88,7 @@ const tests: Test[] = [
|
||||
test: 'should exclude by folder',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos/'],
|
||||
exclusionPattern: '**/raw/**',
|
||||
exclusionPatterns: ['**/raw/**'],
|
||||
recursive: true,
|
||||
},
|
||||
files: {
|
||||
@@ -115,7 +110,17 @@ const tests: Test[] = [
|
||||
'/albums/image3.jpg': true,
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
test: 'should support globbing paths',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos*'],
|
||||
},
|
||||
files: {
|
||||
'/photos1/image1.jpg': true,
|
||||
'/photos2/image2.jpg': true,
|
||||
'/images/image3.jpg': false,
|
||||
},
|
||||
},
|
||||
{
|
||||
test: 'should crawl a single path without trailing slash',
|
||||
options: {
|
||||
@@ -213,7 +218,7 @@ const tests: Test[] = [
|
||||
test: 'should support ignoring full filename',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos'],
|
||||
exclusionPattern: '**/image2.jpg',
|
||||
exclusionPatterns: ['**/image2.jpg'],
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -225,7 +230,7 @@ const tests: Test[] = [
|
||||
test: 'should support ignoring file extensions',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos'],
|
||||
exclusionPattern: '**/*.png',
|
||||
exclusionPatterns: ['**/*.png'],
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -238,7 +243,7 @@ const tests: Test[] = [
|
||||
options: {
|
||||
pathsToCrawl: ['/photos'],
|
||||
recursive: true,
|
||||
exclusionPattern: '**/raw/**',
|
||||
exclusionPatterns: ['**/raw/**'],
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -251,10 +256,9 @@ const tests: Test[] = [
|
||||
{
|
||||
test: 'should support ignoring absolute paths',
|
||||
options: {
|
||||
// Currently, fast-glob has some caveat when dealing with `/`.
|
||||
pathsToCrawl: ['/*s'],
|
||||
pathsToCrawl: ['/'],
|
||||
recursive: true,
|
||||
exclusionPattern: '/images/**',
|
||||
exclusionPatterns: ['/images/**'],
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -272,16 +276,14 @@ describe('crawl', () => {
|
||||
describe('crawl', () => {
|
||||
for (const { test, options, files } of tests) {
|
||||
it(test, async () => {
|
||||
// The file contents is the same as the path.
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, file])));
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
|
||||
|
||||
const actual = await crawl({ ...options, extensions });
|
||||
const expected = Object.entries(files)
|
||||
.filter((entry) => entry[1])
|
||||
.map(([file]) => file);
|
||||
|
||||
// Compare file's content instead of path since a file can be represent in multiple ways.
|
||||
expect(actual.map((path) => readContent(path)).sort()).toEqual(expected.sort());
|
||||
expect(actual.sort()).toEqual(expected.sort());
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { getMyUser, init, isHttpError } from '@immich/sdk';
|
||||
import { convertPathToPattern, glob } from 'fast-glob';
|
||||
import { defaults, getMyUserInfo, isHttpError } from '@immich/sdk';
|
||||
import { glob } from 'glob';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { readFile, stat, writeFile } from 'node:fs/promises';
|
||||
import { platform } from 'node:os';
|
||||
import { join, resolve } from 'node:path';
|
||||
import { join } from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
export interface BaseOptions {
|
||||
@@ -47,9 +46,10 @@ export const connect = async (url: string, key: string) => {
|
||||
// noop
|
||||
}
|
||||
|
||||
init({ baseUrl: url, apiKey: key });
|
||||
defaults.baseUrl = url;
|
||||
defaults.headers = { 'x-api-key': key };
|
||||
|
||||
const [error] = await withError(getMyUser());
|
||||
const [error] = await withError(getMyUserInfo());
|
||||
if (isHttpError(error)) {
|
||||
logError(error, 'Failed to connect to server');
|
||||
process.exit(1);
|
||||
@@ -104,16 +104,11 @@ export interface CrawlOptions {
|
||||
pathsToCrawl: string[];
|
||||
recursive?: boolean;
|
||||
includeHidden?: boolean;
|
||||
exclusionPattern?: string;
|
||||
exclusionPatterns?: string[];
|
||||
extensions: string[];
|
||||
}
|
||||
|
||||
const convertPathToPatternOnWin = (path: string) => {
|
||||
return platform() === 'win32' ? convertPathToPattern(path) : path;
|
||||
};
|
||||
|
||||
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
|
||||
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
|
||||
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
|
||||
|
||||
if (pathsToCrawl.length === 0) {
|
||||
@@ -125,42 +120,45 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
|
||||
for await (const currentPath of pathsToCrawl) {
|
||||
try {
|
||||
const absolutePath = resolve(currentPath);
|
||||
const stats = await stat(absolutePath);
|
||||
const stats = await stat(currentPath);
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(absolutePath);
|
||||
crawledFiles.push(currentPath);
|
||||
} else {
|
||||
patterns.push(convertPathToPatternOnWin(absolutePath));
|
||||
patterns.push(currentPath);
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
patterns.push(convertPathToPatternOnWin(currentPath));
|
||||
patterns.push(currentPath);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (patterns.length === 0) {
|
||||
let searchPattern: string;
|
||||
if (patterns.length === 1) {
|
||||
searchPattern = patterns[0];
|
||||
} else if (patterns.length === 0) {
|
||||
return crawledFiles;
|
||||
} else {
|
||||
searchPattern = '{' + patterns.join(',') + '}';
|
||||
}
|
||||
|
||||
const searchPatterns = patterns.map((pattern) => {
|
||||
let escapedPattern = pattern;
|
||||
if (recursive) {
|
||||
escapedPattern = escapedPattern + '/**';
|
||||
}
|
||||
return `${escapedPattern}/*.{${extensions.join(',')}}`;
|
||||
if (recursive) {
|
||||
searchPattern = searchPattern + '/**/';
|
||||
}
|
||||
|
||||
searchPattern = `${searchPattern}/*.{${extensions.join(',')}}`;
|
||||
|
||||
const globbedFiles = await glob(searchPattern, {
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
nodir: true,
|
||||
dot: includeHidden,
|
||||
ignore: exclusionPatterns,
|
||||
});
|
||||
|
||||
const globbedFiles = await glob(searchPatterns, {
|
||||
absolute: true,
|
||||
caseSensitiveMatch: false,
|
||||
dot: includeHidden,
|
||||
ignore: [`**/${exclusionPattern}`],
|
||||
});
|
||||
globbedFiles.push(...crawledFiles);
|
||||
return globbedFiles.sort();
|
||||
return [...crawledFiles, ...globbedFiles].sort();
|
||||
};
|
||||
|
||||
export const sha1 = (filepath: string) => {
|
||||
|
||||
37
cli/src/version.ts
Normal file
37
cli/src/version.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { version } from '../package.json';
|
||||
|
||||
export interface ICliVersion {
|
||||
major: number;
|
||||
minor: number;
|
||||
patch: number;
|
||||
}
|
||||
|
||||
export class CliVersion implements ICliVersion {
|
||||
constructor(
|
||||
public readonly major: number,
|
||||
public readonly minor: number,
|
||||
public readonly patch: number,
|
||||
) {}
|
||||
|
||||
toString() {
|
||||
return `${this.major}.${this.minor}.${this.patch}`;
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
const { major, minor, patch } = this;
|
||||
return { major, minor, patch };
|
||||
}
|
||||
|
||||
static fromString(version: string): CliVersion {
|
||||
const regex = /v?(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)/i;
|
||||
const matchResult = version.match(regex);
|
||||
if (matchResult) {
|
||||
const [, major, minor, patch] = matchResult.map(Number);
|
||||
return new CliVersion(major, minor, patch);
|
||||
} else {
|
||||
throw new Error(`Invalid version format: ${version}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const cliVersion = CliVersion.fromString(version);
|
||||
@@ -2,7 +2,6 @@ import { defineConfig } from 'vite';
|
||||
import tsconfigPaths from 'vite-tsconfig-paths';
|
||||
|
||||
export default defineConfig({
|
||||
resolve: { alias: { src: '/src' } },
|
||||
build: {
|
||||
rollupOptions: {
|
||||
input: 'src/index.ts',
|
||||
|
||||
38
deployment/.gitignore
vendored
38
deployment/.gitignore
vendored
@@ -1,38 +0,0 @@
|
||||
# OpenTofu
|
||||
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
crash.*.log
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
||||
# Terragrunt
|
||||
|
||||
# terragrunt cache directories
|
||||
**/.terragrunt-cache/*
|
||||
|
||||
# Terragrunt debug output file (when using `--terragrunt-debug` option)
|
||||
# See: https://terragrunt.gruntwork.io/docs/reference/cli-options/#terragrunt-debug
|
||||
terragrunt-debug.tfvars.json
|
||||
@@ -1,38 +0,0 @@
|
||||
# This file is maintained automatically by "tofu init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.48.0"
|
||||
constraints = "4.48.0"
|
||||
hashes = [
|
||||
"h1:0IKUOR32xEI1suS5QCOjfxjQ2mRd058btXk8hVnaOJ4=",
|
||||
"h1:3YG6vu/bFPcYOeLdSUZhiAWiWKaFlOAR34z2o8cbE9k=",
|
||||
"h1:FvGy06/i9AMtVkSIUnCrXNv5xF6jqBqMH8oPVLyeeAg=",
|
||||
"h1:GXH7nIF0ocMqebbA41+fSGIYfM+VAM/PvTe7fJr8UrQ=",
|
||||
"h1:H0ll0ph4404vFE868W3qJ3zhOyy4jbXrOMtdkViEZsU=",
|
||||
"h1:SX42e3k73IcFcrQlZ2e/Veqt2tvCMy6fwlo5yNUktCE=",
|
||||
"h1:Uu/gjBc99GefdPdSrlBwU75DWU0ZcwGcrd3ZFyTeL0s=",
|
||||
"h1:VZw0uN41PWRmNlhg7Ze0Eh7cdoklX1oZbfNAXNYnU1I=",
|
||||
"h1:cMdV7ql6PsFa4qtb0EoZSctvTaTqV7yplBSDwcLRCLc=",
|
||||
"h1:ePGvSurmlqOCkD761vkhRmz7bsK36/EnIvx2Xy8TdXo=",
|
||||
"h1:fOYufF+1bzw2N3aHLpkLB6E8VbZ4ysXDODYQOlwhwd4=",
|
||||
"h1:qe8RbnWq0T4xhqjn9QcbO6YW5YDx47P+eJ0NUMIfwCc=",
|
||||
"h1:tRD2av6PafHDP/b9jDQsG5/aX+lHeKxpbIEHYYLBVUc=",
|
||||
"h1:zyl6Gvx/CFpwYW8pFFDesfO8Lxv+a6CopyAsIMhp54s=",
|
||||
"zh:04c0a49c2b23140b2f21cfd0d52f9798d70d3bdae3831613e156aabe519bbc6c",
|
||||
"zh:185f21b4834ba63e8df1f84aa34639d8a7e126429a4007bb5f9ad82f2602a997",
|
||||
"zh:234724f52cb4c0c3f7313d3b2697caef26d921d134f26ae14801e7afac522f7b",
|
||||
"zh:38a56fcd1b3e40706af995611c977816543b53f1e55fe2720944aae2b6828fcb",
|
||||
"zh:419938f5430fc78eff933470aefbf94a460a478f867cf7761a3dea177b4eb153",
|
||||
"zh:4b46d92bfde1deab7de7ba1a6bbf4ba7c711e4fd925341ddf09d4cc28dae03d8",
|
||||
"zh:537acd4a31c752f1bae305ba7190f60b71ad1a459f22d464f3f914336c9e919f",
|
||||
"zh:5ff36b005aad07697dd0b30d4f0c35dbcdc30dc52b41722552060792fa87ce04",
|
||||
"zh:635c5ee419daea098060f794d9d7d999275301181e49562c4e4c08f043076937",
|
||||
"zh:859277c330d61f91abe9e799389467ca11b77131bf34bedbef52f8da68b2bb49",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:927dfdb8d9aef37ead03fceaa29e87ba076a3dd24e19b6cefdbb0efe9987ff8c",
|
||||
"zh:bbf2226f07f6b1e721877328e69ded4b64f9c196634d2e2429e3cfabbe41e532",
|
||||
"zh:daeed873d6f38604232b46ee4a5830c85d195b967f8dbcafe2fcffa98daf9c5f",
|
||||
"zh:f8f2fc4646c1ba44085612fa7f4dbb7cbcead43b4e661f2b98ddfb4f68afc758",
|
||||
]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
terraform {
|
||||
backend "pg" {}
|
||||
required_version = "~> 1.7"
|
||||
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.48.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_release_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name
|
||||
domain = "immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_release_domain" {
|
||||
name = "immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
content = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
provider "cloudflare" {
|
||||
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
data "terraform_remote_state" "api_keys_state" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_api_keys"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_account" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_account"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_immich_app_docs" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_immich_app_docs_${var.prefix_name}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
terraform {
|
||||
source = "."
|
||||
|
||||
extra_arguments custom_vars {
|
||||
commands = get_terraform_commands_that_need_vars()
|
||||
}
|
||||
}
|
||||
|
||||
include {
|
||||
path = find_in_parent_folders("state.hcl")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
schema_name = "prod_cloudflare_immich_app_docs_release"
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
variable "cloudflare_account_id" {}
|
||||
variable "tf_state_postgres_conn_str" {}
|
||||
|
||||
variable "prefix_name" {}
|
||||
@@ -1,38 +0,0 @@
|
||||
# This file is maintained automatically by "tofu init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.48.0"
|
||||
constraints = "4.48.0"
|
||||
hashes = [
|
||||
"h1:0IKUOR32xEI1suS5QCOjfxjQ2mRd058btXk8hVnaOJ4=",
|
||||
"h1:3YG6vu/bFPcYOeLdSUZhiAWiWKaFlOAR34z2o8cbE9k=",
|
||||
"h1:FvGy06/i9AMtVkSIUnCrXNv5xF6jqBqMH8oPVLyeeAg=",
|
||||
"h1:GXH7nIF0ocMqebbA41+fSGIYfM+VAM/PvTe7fJr8UrQ=",
|
||||
"h1:H0ll0ph4404vFE868W3qJ3zhOyy4jbXrOMtdkViEZsU=",
|
||||
"h1:SX42e3k73IcFcrQlZ2e/Veqt2tvCMy6fwlo5yNUktCE=",
|
||||
"h1:Uu/gjBc99GefdPdSrlBwU75DWU0ZcwGcrd3ZFyTeL0s=",
|
||||
"h1:VZw0uN41PWRmNlhg7Ze0Eh7cdoklX1oZbfNAXNYnU1I=",
|
||||
"h1:cMdV7ql6PsFa4qtb0EoZSctvTaTqV7yplBSDwcLRCLc=",
|
||||
"h1:ePGvSurmlqOCkD761vkhRmz7bsK36/EnIvx2Xy8TdXo=",
|
||||
"h1:fOYufF+1bzw2N3aHLpkLB6E8VbZ4ysXDODYQOlwhwd4=",
|
||||
"h1:qe8RbnWq0T4xhqjn9QcbO6YW5YDx47P+eJ0NUMIfwCc=",
|
||||
"h1:tRD2av6PafHDP/b9jDQsG5/aX+lHeKxpbIEHYYLBVUc=",
|
||||
"h1:zyl6Gvx/CFpwYW8pFFDesfO8Lxv+a6CopyAsIMhp54s=",
|
||||
"zh:04c0a49c2b23140b2f21cfd0d52f9798d70d3bdae3831613e156aabe519bbc6c",
|
||||
"zh:185f21b4834ba63e8df1f84aa34639d8a7e126429a4007bb5f9ad82f2602a997",
|
||||
"zh:234724f52cb4c0c3f7313d3b2697caef26d921d134f26ae14801e7afac522f7b",
|
||||
"zh:38a56fcd1b3e40706af995611c977816543b53f1e55fe2720944aae2b6828fcb",
|
||||
"zh:419938f5430fc78eff933470aefbf94a460a478f867cf7761a3dea177b4eb153",
|
||||
"zh:4b46d92bfde1deab7de7ba1a6bbf4ba7c711e4fd925341ddf09d4cc28dae03d8",
|
||||
"zh:537acd4a31c752f1bae305ba7190f60b71ad1a459f22d464f3f914336c9e919f",
|
||||
"zh:5ff36b005aad07697dd0b30d4f0c35dbcdc30dc52b41722552060792fa87ce04",
|
||||
"zh:635c5ee419daea098060f794d9d7d999275301181e49562c4e4c08f043076937",
|
||||
"zh:859277c330d61f91abe9e799389467ca11b77131bf34bedbef52f8da68b2bb49",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:927dfdb8d9aef37ead03fceaa29e87ba076a3dd24e19b6cefdbb0efe9987ff8c",
|
||||
"zh:bbf2226f07f6b1e721877328e69ded4b64f9c196634d2e2429e3cfabbe41e532",
|
||||
"zh:daeed873d6f38604232b46ee4a5830c85d195b967f8dbcafe2fcffa98daf9c5f",
|
||||
"zh:f8f2fc4646c1ba44085612fa7f4dbb7cbcead43b4e661f2b98ddfb4f68afc758",
|
||||
]
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
terraform {
|
||||
backend "pg" {}
|
||||
required_version = "~> 1.7"
|
||||
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.48.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
resource "cloudflare_pages_domain" "immich_app_branch_domain" {
|
||||
account_id = var.cloudflare_account_id
|
||||
project_name = local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_name : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_name
|
||||
domain = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
}
|
||||
|
||||
resource "cloudflare_record" "immich_app_branch_subdomain" {
|
||||
name = "${var.prefix_name}.${local.deploy_domain_prefix}.immich.app"
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
content = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
|
||||
output "immich_app_branch_subdomain" {
|
||||
value = cloudflare_record.immich_app_branch_subdomain.hostname
|
||||
}
|
||||
|
||||
output "immich_app_branch_pages_hostname" {
|
||||
value = cloudflare_record.immich_app_branch_subdomain.content
|
||||
}
|
||||
|
||||
output "pages_project_name" {
|
||||
value = cloudflare_pages_domain.immich_app_branch_domain.project_name
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
locals {
|
||||
domain_name = "immich.app"
|
||||
preview_prefix = contains(["branch", "pr"], var.prefix_event_type) ? "preview" : ""
|
||||
archive_prefix = contains(["release"], var.prefix_event_type) ? "archive" : ""
|
||||
deploy_domain_prefix = coalesce(local.preview_prefix, local.archive_prefix)
|
||||
is_release = contains(["release"], var.prefix_event_type)
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
provider "cloudflare" {
|
||||
api_token = data.terraform_remote_state.api_keys_state.outputs.terraform_key_cloudflare_docs
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
data "terraform_remote_state" "api_keys_state" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_api_keys"
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "cloudflare_account" {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = var.tf_state_postgres_conn_str
|
||||
schema_name = "prod_cloudflare_account"
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
terraform {
|
||||
source = "."
|
||||
|
||||
extra_arguments custom_vars {
|
||||
commands = get_terraform_commands_that_need_vars()
|
||||
}
|
||||
}
|
||||
|
||||
include {
|
||||
path = find_in_parent_folders("state.hcl")
|
||||
}
|
||||
|
||||
locals {
|
||||
prefix_name = get_env("TF_VAR_prefix_name")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
schema_name = "prod_cloudflare_immich_app_docs_${local.prefix_name}"
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
variable "cloudflare_account_id" {}
|
||||
variable "tf_state_postgres_conn_str" {}
|
||||
|
||||
variable "prefix_name" {}
|
||||
variable "prefix_event_type" {}
|
||||
@@ -1,20 +0,0 @@
|
||||
locals {
|
||||
cloudflare_account_id = get_env("CLOUDFLARE_ACCOUNT_ID")
|
||||
cloudflare_api_token = get_env("CLOUDFLARE_API_TOKEN")
|
||||
|
||||
tf_state_postgres_conn_str = get_env("TF_STATE_POSTGRES_CONN_STR")
|
||||
}
|
||||
|
||||
remote_state {
|
||||
backend = "pg"
|
||||
|
||||
config = {
|
||||
conn_str = local.tf_state_postgres_conn_str
|
||||
}
|
||||
}
|
||||
|
||||
inputs = {
|
||||
cloudflare_account_id = local.cloudflare_account_id
|
||||
cloudflare_api_token = local.cloudflare_api_token
|
||||
tf_state_postgres_conn_str = local.tf_state_postgres_conn_str
|
||||
}
|
||||
10
docker/.gitignore
vendored
10
docker/.gitignore
vendored
@@ -1,9 +1 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/.svelte-kit
|
||||
/dist
|
||||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
.env
|
||||
@@ -1 +0,0 @@
|
||||
engine-strict=true
|
||||
@@ -1 +0,0 @@
|
||||
22.11.0
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"jsonRecursiveSort": true,
|
||||
"organizeImportsSkipDestructiveCodeActions": true,
|
||||
"plugins": ["prettier-plugin-organize-imports", "prettier-plugin-sort-json"],
|
||||
"printWidth": 120,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
@@ -4,72 +4,65 @@
|
||||
|
||||
name: immich-dev
|
||||
|
||||
x-server-build: &server-common
|
||||
image: immich-server-dev:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: always
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['/usr/src/app/bin/immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: always
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
IMMICH_REPOSITORY: immich-app/immich
|
||||
IMMICH_REPOSITORY_URL: https://github.com/immich-app/immich
|
||||
IMMICH_SOURCE_REF: local
|
||||
IMMICH_SOURCE_COMMIT: af2efbdbbddc27cd06142f22253ccbbbbeec1f55
|
||||
IMMICH_SOURCE_URL: https://github.com/immich-app/immich/commit/af2efbdbbddc27cd06142f22253ccbbbbeec1f55
|
||||
IMMICH_BUILD: '9654404849'
|
||||
IMMICH_BUILD_URL: https://github.com/immich-app/immich/actions/runs/9654404849
|
||||
IMMICH_BUILD_IMAGE: development
|
||||
IMMICH_BUILD_IMAGE_URL: https://github.com/immich-app/immich/pkgs/container/immich-server
|
||||
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
|
||||
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
|
||||
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/third-party
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
command: ['/usr/src/app/bin/immich-dev', 'immich']
|
||||
<<: *server-common
|
||||
ports:
|
||||
- 3001:3001
|
||||
- 9230:9230
|
||||
- 9231:9231
|
||||
- 2283:2283
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
command: ['/usr/src/app/bin/immich-dev', 'microservices']
|
||||
<<: *server-common
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
ports:
|
||||
- 9231:9230
|
||||
depends_on:
|
||||
- database
|
||||
- immich-server
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
image: immich-web-dev:latest
|
||||
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
|
||||
# user: 0:0
|
||||
build:
|
||||
context: ../web
|
||||
command: ['/usr/src/app/bin/immich-web']
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 3000:3000
|
||||
- 2283:3000
|
||||
- 24678:24678
|
||||
volumes:
|
||||
- ../web:/usr/src/app
|
||||
- ../i18n:/usr/src/i18n
|
||||
- ../open-api/:/usr/src/open-api/
|
||||
- /usr/src/app/node_modules
|
||||
ulimits:
|
||||
@@ -101,14 +94,10 @@ services:
|
||||
depends_on:
|
||||
- database
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
image: redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
@@ -119,31 +108,12 @@ services:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
# ports:
|
||||
|
||||
@@ -1,28 +1,39 @@
|
||||
name: immich-prod
|
||||
|
||||
x-server-build: &server-common
|
||||
image: immich-server:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: immich-server:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
command: ['start.sh', 'immich']
|
||||
<<: *server-common
|
||||
ports:
|
||||
- 2283:2283
|
||||
- 2283:3001
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
command: ['start.sh', 'microservices']
|
||||
<<: *server-common
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- immich-server
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -35,21 +46,15 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
image: redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
|
||||
restart: always
|
||||
|
||||
database:
|
||||
@@ -61,37 +66,17 @@ services:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
restart: always
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
immich-prometheus:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:565ee86501224ebbb98fc10b332fa54440b100469924003359edf49cbce374bd
|
||||
image: prom/prometheus@sha256:4f6c47e39a9064028766e8c95890ed15690c30f00c4ba14e7ce6ae1ded0295b1
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -103,7 +88,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.4.0-ubuntu@sha256:afccec22ba0e4815cca1d2bf3836e414322390dc78d77f1851976ffa8d61051c
|
||||
image: grafana/grafana:10.4.2-ubuntu@sha256:4f55071b556fb03f12b41423c98a185ed6695ed9ff2558e35805f0dd765fd958
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -12,23 +12,35 @@ services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
command: ['start.sh', 'immich']
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- '2283:2283'
|
||||
- 2283:3001
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/hardware-transcoding
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
command: ['start.sh', 'microservices']
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -43,45 +55,21 @@ services:
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/redis:6.2-alpine@sha256:eaba718fecd1196d88533de7ba49bf903ad33664a92debb24660a922ecd9cac8
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
image: registry.hub.docker.com/library/redis:6.2-alpine@sha256:84882e87b54734154586e5f8abd4dce69fe7311315e2fc6d67c29614c8de2672
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: registry.hub.docker.com/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import { FlatCompat } from '@eslint/eslintrc';
|
||||
import js from '@eslint/js';
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
import globals from 'globals';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default [
|
||||
{
|
||||
ignores: [
|
||||
'**/.DS_Store',
|
||||
'**/node_modules',
|
||||
'dist',
|
||||
'lib/docker-compose/types.ts',
|
||||
'build',
|
||||
'package',
|
||||
'**/.env',
|
||||
'**/.env.*',
|
||||
'!**/.env.example',
|
||||
'**/pnpm-lock.yaml',
|
||||
'**/package-lock.json',
|
||||
'**/yarn.lock',
|
||||
'eslint.config.mjs',
|
||||
'vite.config.js',
|
||||
'coverage',
|
||||
],
|
||||
},
|
||||
...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/recommended', 'plugin:unicorn/recommended'),
|
||||
{
|
||||
ignores: ['src/**'],
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.node,
|
||||
NodeJS: true,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 2022,
|
||||
sourceType: 'module',
|
||||
|
||||
parserOptions: {
|
||||
tsconfigRootDir: __dirname,
|
||||
project: ['./tsconfig.json'],
|
||||
},
|
||||
},
|
||||
|
||||
rules: {
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
'warn',
|
||||
{
|
||||
argsIgnorePattern: '^_$',
|
||||
varsIgnorePattern: '^_$',
|
||||
},
|
||||
],
|
||||
|
||||
curly: 2,
|
||||
'unicorn/no-useless-undefined': 'off',
|
||||
'unicorn/prefer-spread': 'off',
|
||||
'unicorn/no-null': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-nested-ternary': 'off',
|
||||
'unicorn/consistent-function-scoping': 'off',
|
||||
'unicorn/prefer-top-level-await': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
'@typescript-eslint/await-thenable': 'error',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'@typescript-eslint/no-misused-promises': 'error',
|
||||
'@typescript-eslint/require-await': 'error',
|
||||
'object-shorthand': ['error', 'always'],
|
||||
},
|
||||
},
|
||||
];
|
||||
@@ -2,20 +2,18 @@
|
||||
|
||||
# The location where your uploaded files are stored
|
||||
UPLOAD_LOCATION=./library
|
||||
# The location where your database files are stored
|
||||
DB_DATA_LOCATION=./postgres
|
||||
|
||||
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||
# TZ=Etc/UTC
|
||||
|
||||
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
||||
IMMICH_VERSION=release
|
||||
|
||||
# Connection secret for postgres. You should change it to a random password
|
||||
# Please use only the characters `A-Za-z0-9`, without special characters or spaces
|
||||
DB_PASSWORD=postgres
|
||||
|
||||
# The values below this line do not need to be changed
|
||||
###################################################################################
|
||||
DB_HOSTNAME=immich_postgres
|
||||
DB_USERNAME=postgres
|
||||
DB_DATABASE_NAME=immich
|
||||
DB_DATA_LOCATION=./postgres
|
||||
|
||||
REDIS_HOSTNAME=immich_redis
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
version: "3.8"
|
||||
|
||||
# Configurations for hardware-accelerated machine learning
|
||||
|
||||
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
||||
# you can inline the config for a backend by copying its contents
|
||||
# you can inline the config for a backend by copying its contents
|
||||
# into the immich-machine-learning service in the docker-compose.yml file.
|
||||
|
||||
# See https://immich.app/docs/features/ml-hardware-acceleration for info on usage.
|
||||
@@ -28,7 +30,7 @@ services:
|
||||
|
||||
openvino:
|
||||
device_cgroup_rules:
|
||||
- 'c 189:* rmw'
|
||||
- "c 189:* rmw"
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
volumes:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
version: "3.8"
|
||||
|
||||
# Configurations for hardware-accelerated transcoding
|
||||
|
||||
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
||||
@@ -51,4 +53,5 @@ services:
|
||||
volumes:
|
||||
- /usr/lib/wsl:/usr/lib/wsl
|
||||
environment:
|
||||
- LD_LIBRARY_PATH=/usr/lib/wsl/lib
|
||||
- LIBVA_DRIVER_NAME=d3d12
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
import { dump as dumpYaml } from 'js-yaml';
|
||||
import { ComposeBuilder, ServiceBuilder } from 'lib/docker-compose/builder';
|
||||
import { ContainerName, GeneratorOptions, ServiceName } from 'lib/types';
|
||||
import { asQueryParams, getImmichEnvironment, getImmichVolumes, isExternalPostgres, isIoRedis } from 'lib/utils';
|
||||
|
||||
const RELEASE_VERSION = 'v1.122.0';
|
||||
const postgresHealthCheck = [
|
||||
'pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;',
|
||||
`Chksum="$$(psql --dbname="$\${POSTGRES_DB}" --username="$\${POSTGRES_USER}" --tuples-only --no-align`,
|
||||
`--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";`,
|
||||
'echo "checksum failure count is $$Chksum";',
|
||||
`[ "$$Chksum" = '0' ] || exit 1\n`,
|
||||
].join(' ');
|
||||
const postgresCommand = [
|
||||
`postgres`,
|
||||
`-c shared_preload_libraries=vectors.so`,
|
||||
`-c 'search_path="$$user", public, vectors'`,
|
||||
`-c logging_collector=on`,
|
||||
`-c max_wal_size=2GB`,
|
||||
`-c shared_buffers=512MB`,
|
||||
`-c wal_compression=on`,
|
||||
].join(' ');
|
||||
|
||||
const build = (options: GeneratorOptions) => {
|
||||
const healthchecksEnabled = options.healthchecks ?? true;
|
||||
const containerNames = options.containerNames ?? true;
|
||||
|
||||
const immichService = ServiceBuilder.create(ServiceName.ImmichServer)
|
||||
.setImage(`ghcr.io/immich-app/immich-server:${RELEASE_VERSION}`)
|
||||
.setContainerName(containerNames && ContainerName.ImmichServer)
|
||||
.setRestartPolicy('always')
|
||||
.setHealthcheck(healthchecksEnabled)
|
||||
.setEnvironment(getImmichEnvironment(options))
|
||||
.addExposedPort(2283)
|
||||
.addVolumes(getImmichVolumes(options));
|
||||
|
||||
const machineLearningEnabled = options.machineLearning;
|
||||
const modelCacheVolume = 'model-cache';
|
||||
const machineLearningService =
|
||||
machineLearningEnabled &&
|
||||
ServiceBuilder.create(ServiceName.ImmichMachineLearning)
|
||||
.setImage(`ghcr.io/immich-app/immich-machine-learning:${RELEASE_VERSION}-cuda`)
|
||||
.setContainerName(containerNames && ContainerName.ImmichMachineLearning)
|
||||
.setRestartPolicy('always')
|
||||
.setHealthcheck(healthchecksEnabled)
|
||||
.addVolume(`${modelCacheVolume}:/cache`);
|
||||
|
||||
const redisService = isIoRedis(options)
|
||||
? false
|
||||
: ServiceBuilder.create(ServiceName.Redis)
|
||||
.setImage('docker.io/redis:6.2-alpine')
|
||||
.setContainerName(containerNames && ContainerName.Redis)
|
||||
.setRestartPolicy('always')
|
||||
.setHealthcheck(healthchecksEnabled && 'redis-cli ping || exit 1');
|
||||
|
||||
const postgresService = isExternalPostgres(options)
|
||||
? false
|
||||
: ServiceBuilder.create(ServiceName.Postgres)
|
||||
.setImage('docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0')
|
||||
.setContainerName(containerNames && ContainerName.Postgres)
|
||||
.setRestartPolicy('always')
|
||||
.setEnvironment({
|
||||
POSTGRES_PASSWORD: options.postgresPassword,
|
||||
POSTGRES_USER: options.postgresUser,
|
||||
POSTGRES_DB: options.postgresDatabase,
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums',
|
||||
})
|
||||
.setHealthcheck(healthchecksEnabled && postgresHealthCheck)
|
||||
.setCommand(postgresCommand)
|
||||
.addVolume(`${options.postgresDataLocation}:/var/lib/postgresql/data`);
|
||||
|
||||
const domain = 'https://get.immich.app';
|
||||
const url = `${domain}/compose?${asQueryParams(options)}`;
|
||||
|
||||
return ComposeBuilder.create('immich')
|
||||
.addComment(`This docker compose file was originally generated at https://get.immich.app/compose`)
|
||||
.addComment(url)
|
||||
.addComment(`${dumpYaml({ options }, { indent: 2 })}`)
|
||||
.addService(immichService.addDependsOn(redisService).addDependsOn(postgresService))
|
||||
.addService(machineLearningService)
|
||||
.addService(redisService)
|
||||
.addService(postgresService)
|
||||
.addVolume(modelCacheVolume, machineLearningEnabled && {});
|
||||
};
|
||||
|
||||
export const buildSpec = (options: GeneratorOptions) => build(options).asSpec();
|
||||
export const buildYaml = (options: GeneratorOptions) => build(options).asYaml();
|
||||
@@ -1,208 +0,0 @@
|
||||
import { dump as dumpYaml } from 'js-yaml';
|
||||
import {
|
||||
Command,
|
||||
ComposeSpecification,
|
||||
DefinitionsService,
|
||||
DefinitionsVolume,
|
||||
ListOfStrings,
|
||||
} from 'lib/docker-compose/types';
|
||||
|
||||
type ServiceNameAccessor = { getName: () => string };
|
||||
type ServiceBuildAccessor = { build: () => DefinitionsService };
|
||||
|
||||
const withNewLines = (yaml: string) =>
|
||||
yaml.replaceAll(/(?<leading>[^:]\n)(?<key>[ ]{0,2}\S+:)$/gm, '$<leading>\n$<key>');
|
||||
|
||||
export class ComposeBuilder {
|
||||
private spec: ComposeSpecification = {};
|
||||
private comments: string[] = [];
|
||||
|
||||
private constructor(projectName?: string) {
|
||||
if (projectName) {
|
||||
this.setProjectName(projectName);
|
||||
}
|
||||
}
|
||||
|
||||
static create(projectName?: string) {
|
||||
return new ComposeBuilder(projectName);
|
||||
}
|
||||
|
||||
setProjectName(name: string) {
|
||||
this.spec.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
addComment(comment: string) {
|
||||
this.comments.push(comment + '\n');
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addService(spec: false | (ServiceNameAccessor & ServiceBuildAccessor)) {
|
||||
if (!spec) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (!this.spec.services) {
|
||||
this.spec.services = {};
|
||||
}
|
||||
|
||||
this.spec.services[spec.getName()] = spec.build();
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addVolume(name: string, volume: false | DefinitionsVolume) {
|
||||
if (volume === false) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (!this.spec.volumes) {
|
||||
this.spec.volumes = {};
|
||||
}
|
||||
|
||||
this.spec.volumes[name] = volume;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
asSpec() {
|
||||
return this.spec;
|
||||
}
|
||||
|
||||
asYaml() {
|
||||
let prefix = '';
|
||||
if (this.comments.length > 0) {
|
||||
const comments =
|
||||
this.comments
|
||||
.flatMap((comment) => comment.split('\n'))
|
||||
.join('\n')
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map((comment) => `# ${comment}`)
|
||||
.join('\n') + '\n\n';
|
||||
|
||||
prefix += comments;
|
||||
}
|
||||
|
||||
const spec = withNewLines(dumpYaml(this.spec, { indent: 2, lineWidth: 140 })).trim();
|
||||
|
||||
return prefix + spec;
|
||||
}
|
||||
}
|
||||
|
||||
export class ServiceBuilder {
|
||||
private spec: DefinitionsService = {};
|
||||
|
||||
private constructor(private name: string) {}
|
||||
|
||||
static create(name: string) {
|
||||
return new ServiceBuilder(name);
|
||||
}
|
||||
|
||||
getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
setImage(image: string) {
|
||||
this.spec.image = image;
|
||||
return this;
|
||||
}
|
||||
|
||||
setContainerName(name: false | string) {
|
||||
if (name === false) {
|
||||
return this;
|
||||
}
|
||||
|
||||
this.spec.container_name = name;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addExposedPort(port: number | { internal: number; external: number }) {
|
||||
if (typeof port === 'number') {
|
||||
port = { internal: port, external: port };
|
||||
}
|
||||
|
||||
const { internal, external } = port;
|
||||
|
||||
if (!this.spec.ports) {
|
||||
this.spec.ports = [];
|
||||
}
|
||||
|
||||
this.spec.ports.push(`${external}:${internal}`);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addDependsOn(service: false | string | ServiceNameAccessor) {
|
||||
if (service === false) {
|
||||
return this;
|
||||
}
|
||||
|
||||
let serviceName = service as string;
|
||||
if ('getName' in (service as ServiceNameAccessor)) {
|
||||
serviceName = (service as ServiceNameAccessor).getName();
|
||||
}
|
||||
|
||||
if (!this.spec.depends_on) {
|
||||
this.spec.depends_on = [];
|
||||
}
|
||||
|
||||
(this.spec.depends_on as ListOfStrings).push(serviceName);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
setRestartPolicy(restart: string) {
|
||||
this.spec.restart = restart;
|
||||
return this;
|
||||
}
|
||||
|
||||
setEnvironment(env: Record<string, string | number | undefined>) {
|
||||
this.spec.environment = env;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
setHealthcheck(test: boolean | string) {
|
||||
if (test === true) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (test === false) {
|
||||
this.spec.healthcheck = { disable: true };
|
||||
return this;
|
||||
}
|
||||
|
||||
this.spec.healthcheck = { test };
|
||||
return this;
|
||||
}
|
||||
|
||||
setCommand(command: Command) {
|
||||
this.spec.command = command;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addVolume(volume: string) {
|
||||
if (!this.spec.volumes) {
|
||||
this.spec.volumes = [];
|
||||
}
|
||||
this.spec.volumes.push(volume);
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
addVolumes(volumes: string[]) {
|
||||
for (const volume of volumes) {
|
||||
this.addVolume(volume);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
build() {
|
||||
return this.spec;
|
||||
}
|
||||
}
|
||||
@@ -1,937 +0,0 @@
|
||||
export type DefinitionsInclude =
|
||||
| string
|
||||
| {
|
||||
path?: StringOrList;
|
||||
env_file?: StringOrList;
|
||||
project_directory?: string;
|
||||
};
|
||||
export type StringOrList = string | ListOfStrings;
|
||||
export type ListOfStrings = string[];
|
||||
export type DefinitionsDevelopment = {
|
||||
watch?: {
|
||||
ignore?: string[];
|
||||
path: string;
|
||||
action: 'rebuild' | 'sync' | 'sync+restart';
|
||||
target?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Development;
|
||||
export type Development = {
|
||||
watch?: {
|
||||
ignore?: string[];
|
||||
path: string;
|
||||
action: 'rebuild' | 'sync' | 'sync+restart';
|
||||
target?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
export type DefinitionsDeployment = {
|
||||
mode?: string;
|
||||
endpoint_mode?: string;
|
||||
replicas?: number | string;
|
||||
labels?: ListOrDict;
|
||||
rollback_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
update_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
resources?: {
|
||||
limits?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
pids?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
reservations?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
generic_resources?: DefinitionsGenericResources;
|
||||
devices?: DefinitionsDevices;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
restart_policy?: {
|
||||
condition?: string;
|
||||
delay?: string;
|
||||
max_attempts?: number | string;
|
||||
window?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
placement?: {
|
||||
constraints?: string[];
|
||||
preferences?: {
|
||||
spread?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
max_replicas_per_node?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Deployment;
|
||||
export type ListOrDict =
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` ".+".
|
||||
*/
|
||||
[k: string]: undefined | string | number | boolean | null;
|
||||
}
|
||||
| string[];
|
||||
export type DefinitionsGenericResources = {
|
||||
discrete_resource_spec?: {
|
||||
kind?: string;
|
||||
value?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
export type DefinitionsDevices = {
|
||||
capabilities: ListOfStrings;
|
||||
count?: string | number;
|
||||
device_ids?: ListOfStrings;
|
||||
driver?: string;
|
||||
options?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
export type Deployment = {
|
||||
mode?: string;
|
||||
endpoint_mode?: string;
|
||||
replicas?: number | string;
|
||||
labels?: ListOrDict;
|
||||
rollback_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
update_config?: {
|
||||
parallelism?: number | string;
|
||||
delay?: string;
|
||||
failure_action?: string;
|
||||
monitor?: string;
|
||||
max_failure_ratio?: number | string;
|
||||
order?: 'start-first' | 'stop-first';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
resources?: {
|
||||
limits?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
pids?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
reservations?: {
|
||||
cpus?: number | string;
|
||||
memory?: string;
|
||||
generic_resources?: DefinitionsGenericResources;
|
||||
devices?: DefinitionsDevices;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
restart_policy?: {
|
||||
condition?: string;
|
||||
delay?: string;
|
||||
max_attempts?: number | string;
|
||||
window?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
placement?: {
|
||||
constraints?: string[];
|
||||
preferences?: {
|
||||
spread?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
max_replicas_per_node?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
export type ExtraHosts = {} | string[];
|
||||
export type ServiceConfigOrSecret = (
|
||||
| string
|
||||
| {
|
||||
source?: string;
|
||||
target?: string;
|
||||
uid?: string;
|
||||
gid?: string;
|
||||
mode?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
export type Command = null | string | string[];
|
||||
export type EnvFile =
|
||||
| string
|
||||
| (
|
||||
| string
|
||||
| {
|
||||
path: string;
|
||||
format?: string;
|
||||
required?: boolean | string;
|
||||
}
|
||||
)[];
|
||||
/**
|
||||
* This interface was referenced by `PropertiesNetworks`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export type DefinitionsNetwork = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
ipam?: {
|
||||
driver?: string;
|
||||
config?: {
|
||||
subnet?: string;
|
||||
ip_range?: string;
|
||||
gateway?: string;
|
||||
aux_addresses?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
internal?: boolean | string;
|
||||
enable_ipv6?: boolean | string;
|
||||
attachable?: boolean | string;
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Network;
|
||||
export type Network = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
ipam?: {
|
||||
driver?: string;
|
||||
config?: {
|
||||
subnet?: string;
|
||||
ip_range?: string;
|
||||
gateway?: string;
|
||||
aux_addresses?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}[];
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
internal?: boolean | string;
|
||||
enable_ipv6?: boolean | string;
|
||||
attachable?: boolean | string;
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
/**
|
||||
* This interface was referenced by `PropertiesVolumes`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export type DefinitionsVolume = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} & Volume;
|
||||
export type Volume = {
|
||||
name?: string;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
|
||||
/**
|
||||
* The Compose file is a YAML file defining a multi-containers based application.
|
||||
*/
|
||||
export interface ComposeSpecification {
|
||||
/**
|
||||
* declared for backward compatibility, ignored.
|
||||
*/
|
||||
version?: string;
|
||||
/**
|
||||
* define the Compose project name, until user defines one explicitly.
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* compose sub-projects to be included.
|
||||
*/
|
||||
include?: DefinitionsInclude[];
|
||||
services?: PropertiesServices;
|
||||
networks?: PropertiesNetworks;
|
||||
volumes?: PropertiesVolumes;
|
||||
secrets?: PropertiesSecrets;
|
||||
configs?: PropertiesConfigs;
|
||||
/**
|
||||
* This interface was referenced by `ComposeSpecification`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesServices {
|
||||
[k: string]: DefinitionsService;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesServices`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsService {
|
||||
develop?: DefinitionsDevelopment;
|
||||
deploy?: DefinitionsDeployment;
|
||||
annotations?: ListOrDict;
|
||||
attach?: boolean | string;
|
||||
build?:
|
||||
| string
|
||||
| {
|
||||
context?: string;
|
||||
dockerfile?: string;
|
||||
dockerfile_inline?: string;
|
||||
entitlements?: string[];
|
||||
args?: ListOrDict;
|
||||
ssh?: ListOrDict;
|
||||
labels?: ListOrDict;
|
||||
cache_from?: string[];
|
||||
cache_to?: string[];
|
||||
no_cache?: boolean | string;
|
||||
additional_contexts?: ListOrDict;
|
||||
network?: string;
|
||||
pull?: boolean | string;
|
||||
target?: string;
|
||||
shm_size?: number | string;
|
||||
extra_hosts?: ExtraHosts;
|
||||
isolation?: string;
|
||||
privileged?: boolean | string;
|
||||
secrets?: ServiceConfigOrSecret;
|
||||
tags?: string[];
|
||||
ulimits?: Ulimits;
|
||||
platforms?: string[];
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
blkio_config?: {
|
||||
device_read_bps?: BlkioLimit[];
|
||||
device_read_iops?: BlkioLimit[];
|
||||
device_write_bps?: BlkioLimit[];
|
||||
device_write_iops?: BlkioLimit[];
|
||||
weight?: number | string;
|
||||
weight_device?: BlkioWeight[];
|
||||
};
|
||||
cap_add?: string[];
|
||||
cap_drop?: string[];
|
||||
cgroup?: 'host' | 'private';
|
||||
cgroup_parent?: string;
|
||||
command?: Command;
|
||||
configs?: ServiceConfigOrSecret;
|
||||
container_name?: string;
|
||||
cpu_count?: string | number;
|
||||
cpu_percent?: string | number;
|
||||
cpu_shares?: number | string;
|
||||
cpu_quota?: number | string;
|
||||
cpu_period?: number | string;
|
||||
cpu_rt_period?: number | string;
|
||||
cpu_rt_runtime?: number | string;
|
||||
cpus?: number | string;
|
||||
cpuset?: string;
|
||||
credential_spec?: {
|
||||
config?: string;
|
||||
file?: string;
|
||||
registry?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
depends_on?:
|
||||
| ListOfStrings
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
[k: string]: {
|
||||
restart?: boolean | string;
|
||||
required?: boolean;
|
||||
condition: 'service_started' | 'service_healthy' | 'service_completed_successfully';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
};
|
||||
device_cgroup_rules?: ListOfStrings;
|
||||
devices?: (
|
||||
| string
|
||||
| {
|
||||
source: string;
|
||||
target?: string;
|
||||
permissions?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
dns?: StringOrList;
|
||||
dns_opt?: string[];
|
||||
dns_search?: StringOrList;
|
||||
domainname?: string;
|
||||
entrypoint?: Command;
|
||||
env_file?: EnvFile;
|
||||
environment?: ListOrDict;
|
||||
expose?: (string | number)[];
|
||||
extends?:
|
||||
| string
|
||||
| {
|
||||
service: string;
|
||||
file?: string;
|
||||
};
|
||||
external_links?: string[];
|
||||
extra_hosts?: ExtraHosts;
|
||||
group_add?: (string | number)[];
|
||||
healthcheck?: DefinitionsHealthcheck;
|
||||
hostname?: string;
|
||||
image?: string;
|
||||
init?: boolean | string;
|
||||
ipc?: string;
|
||||
isolation?: string;
|
||||
labels?: ListOrDict;
|
||||
links?: string[];
|
||||
logging?: {
|
||||
driver?: string;
|
||||
options?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number | null;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
mac_address?: string;
|
||||
mem_limit?: number | string;
|
||||
mem_reservation?: string | number;
|
||||
mem_swappiness?: number | string;
|
||||
memswap_limit?: number | string;
|
||||
network_mode?: string;
|
||||
networks?:
|
||||
| ListOfStrings
|
||||
| {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
[k: string]: {
|
||||
aliases?: ListOfStrings;
|
||||
ipv4_address?: string;
|
||||
ipv6_address?: string;
|
||||
link_local_ips?: ListOfStrings;
|
||||
mac_address?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
priority?: number;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
} | null;
|
||||
};
|
||||
oom_kill_disable?: boolean | string;
|
||||
oom_score_adj?: string | number;
|
||||
pid?: string | null;
|
||||
pids_limit?: number | string;
|
||||
platform?: string;
|
||||
ports?: (
|
||||
| number
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
mode?: string;
|
||||
host_ip?: string;
|
||||
target?: number | string;
|
||||
published?: string | number;
|
||||
protocol?: string;
|
||||
app_protocol?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
post_start?: DefinitionsServiceHook[];
|
||||
pre_stop?: DefinitionsServiceHook[];
|
||||
privileged?: boolean | string;
|
||||
profiles?: ListOfStrings;
|
||||
pull_policy?: 'always' | 'never' | 'if_not_present' | 'build' | 'missing';
|
||||
read_only?: boolean | string;
|
||||
restart?: string;
|
||||
runtime?: string;
|
||||
scale?: number | string;
|
||||
security_opt?: string[];
|
||||
shm_size?: number | string;
|
||||
secrets?: ServiceConfigOrSecret;
|
||||
sysctls?: ListOrDict;
|
||||
stdin_open?: boolean | string;
|
||||
stop_grace_period?: string;
|
||||
stop_signal?: string;
|
||||
storage_opt?: {
|
||||
[k: string]: unknown;
|
||||
};
|
||||
tmpfs?: StringOrList;
|
||||
tty?: boolean | string;
|
||||
ulimits?: Ulimits;
|
||||
user?: string;
|
||||
uts?: string;
|
||||
userns_mode?: string;
|
||||
volumes?: (
|
||||
| string
|
||||
| {
|
||||
type: string;
|
||||
source?: string;
|
||||
target?: string;
|
||||
read_only?: boolean | string;
|
||||
consistency?: string;
|
||||
bind?: {
|
||||
propagation?: string;
|
||||
create_host_path?: boolean | string;
|
||||
selinux?: 'z' | 'Z';
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
volume?: {
|
||||
nocopy?: boolean | string;
|
||||
subpath?: string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
tmpfs?: {
|
||||
size?: number | string;
|
||||
mode?: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
)[];
|
||||
volumes_from?: string[];
|
||||
working_dir?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsService`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface Ulimits {
|
||||
/**
|
||||
* This interface was referenced by `Ulimits`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-z]+$".
|
||||
*/
|
||||
[k: string]:
|
||||
| (number | string)
|
||||
| {
|
||||
hard: number | string;
|
||||
soft: number | string;
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
};
|
||||
}
|
||||
export interface BlkioLimit {
|
||||
path?: string;
|
||||
rate?: number | string;
|
||||
}
|
||||
export interface BlkioWeight {
|
||||
path?: string;
|
||||
weight?: number | string;
|
||||
}
|
||||
export interface DefinitionsHealthcheck {
|
||||
disable?: boolean | string;
|
||||
interval?: string;
|
||||
retries?: number | string;
|
||||
test?: string | string[];
|
||||
timeout?: string;
|
||||
start_period?: string;
|
||||
start_interval?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsHealthcheck`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface DefinitionsServiceHook {
|
||||
command?: Command;
|
||||
user?: string;
|
||||
privileged?: boolean | string;
|
||||
working_dir?: string;
|
||||
environment?: ListOrDict;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsServiceHook`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesNetworks {
|
||||
[k: string]: DefinitionsNetwork;
|
||||
}
|
||||
export interface PropertiesVolumes {
|
||||
[k: string]: DefinitionsVolume;
|
||||
}
|
||||
export interface PropertiesSecrets {
|
||||
[k: string]: DefinitionsSecret;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesSecrets`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsSecret {
|
||||
name?: string;
|
||||
environment?: string;
|
||||
file?: string;
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
driver?: string;
|
||||
driver_opts?: {
|
||||
/**
|
||||
* This interface was referenced by `undefined`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^.+$".
|
||||
*/
|
||||
[k: string]: string | number;
|
||||
};
|
||||
template_driver?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsSecret`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
export interface PropertiesConfigs {
|
||||
[k: string]: DefinitionsConfig;
|
||||
}
|
||||
/**
|
||||
* This interface was referenced by `PropertiesConfigs`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^[a-zA-Z0-9._-]+$".
|
||||
*/
|
||||
export interface DefinitionsConfig {
|
||||
name?: string;
|
||||
content?: string;
|
||||
environment?: string;
|
||||
file?: string;
|
||||
external?:
|
||||
| boolean
|
||||
| string
|
||||
| {
|
||||
name?: string;
|
||||
[k: string]: unknown;
|
||||
};
|
||||
labels?: ListOrDict;
|
||||
template_driver?: string;
|
||||
/**
|
||||
* This interface was referenced by `DefinitionsConfig`'s JSON-Schema definition
|
||||
* via the `patternProperty` "^x-".
|
||||
*/
|
||||
[k: string]: unknown;
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
export * from 'lib/build';
|
||||
export * from 'lib/types';
|
||||
@@ -1,57 +0,0 @@
|
||||
export enum ServiceName {
|
||||
ImmichServer = 'immich-server',
|
||||
ImmichMachineLearning = 'immich-machine-learning',
|
||||
Postgres = 'immich-postgres',
|
||||
Redis = 'immich-redis',
|
||||
}
|
||||
|
||||
export enum ContainerName {
|
||||
ImmichServer = 'immich-server',
|
||||
ImmichMachineLearning = 'immich-machine-learning',
|
||||
Postgres = 'immich-postgres',
|
||||
Redis = 'immich-redis',
|
||||
}
|
||||
|
||||
export type BaseOptions = {
|
||||
releaseVersion: string;
|
||||
healthchecks?: boolean;
|
||||
machineLearning: boolean;
|
||||
containerNames?: boolean;
|
||||
serverTimeZone?: string;
|
||||
};
|
||||
|
||||
export type GeneratorOptions = (BaseOptions & FolderOptions & PostgresOptions) & RedisOptions;
|
||||
|
||||
export type FolderOptions = {
|
||||
baseLocation: string;
|
||||
encodedVideoLocation?: string;
|
||||
libraryLocation?: string;
|
||||
uploadLocation?: string;
|
||||
profileLocation?: string;
|
||||
thumbnailsLocation?: string;
|
||||
backupsLocation?: string;
|
||||
};
|
||||
|
||||
export type PostgresOptions = InternalPostgresOptions | ExternalPostgresOptions;
|
||||
export type InternalPostgresOptions = {
|
||||
postgresUser: string;
|
||||
postgresPassword: string;
|
||||
postgresDatabase: string;
|
||||
postgresDataLocation: string;
|
||||
};
|
||||
export type ExternalPostgresOptions = { postgresUrl: string; postgresVectorExtension?: VectorExtension };
|
||||
|
||||
export type RedisOptions = ExternalRedisOptions | IoRedisOptions | { redis: true };
|
||||
export type ExternalRedisOptions = {
|
||||
redisHost: string;
|
||||
redisPort: number;
|
||||
redisDbIndex?: number;
|
||||
redisUsername?: string;
|
||||
redisPassword?: string;
|
||||
redisSocket?: string;
|
||||
};
|
||||
export type IoRedisOptions = { redisUrl: string };
|
||||
|
||||
export type VectorExtension = 'pgvector' | 'pgvecto.rs';
|
||||
|
||||
export type HardwareAccelerationPlatform = 'nvenc' | 'quicksync' | 'rkmpp' | 'vappi' | 'vaapi-wsl';
|
||||
@@ -1,84 +0,0 @@
|
||||
import {
|
||||
ExternalPostgresOptions,
|
||||
ExternalRedisOptions,
|
||||
GeneratorOptions,
|
||||
IoRedisOptions,
|
||||
PostgresOptions,
|
||||
RedisOptions,
|
||||
ServiceName,
|
||||
} from 'lib/types';
|
||||
|
||||
export const isExternalPostgres = (options: PostgresOptions): options is ExternalPostgresOptions =>
|
||||
'postgresUrl' in options;
|
||||
|
||||
export const isIoRedis = (options: RedisOptions): options is IoRedisOptions => 'redisUrl' in options;
|
||||
export const isExternalRedis = (options: RedisOptions): options is ExternalRedisOptions => 'redisHost' in options;
|
||||
|
||||
export const asQueryParams = (values: Record<string, string | number | boolean | undefined>) => {
|
||||
return new URLSearchParams(
|
||||
Object.entries(values)
|
||||
.filter(Boolean)
|
||||
.map(([key, value]) => [key, String(value)]),
|
||||
).toString();
|
||||
};
|
||||
|
||||
export const getImmichVolumes = (options: GeneratorOptions) => {
|
||||
const {
|
||||
baseLocation,
|
||||
encodedVideoLocation,
|
||||
uploadLocation,
|
||||
backupsLocation,
|
||||
profileLocation,
|
||||
libraryLocation,
|
||||
thumbnailsLocation,
|
||||
} = options;
|
||||
|
||||
const internalBaseLocation = '/usr/src/app/upload';
|
||||
|
||||
const volumes = [`${baseLocation}:${internalBaseLocation}`];
|
||||
|
||||
for (const { override, folder } of [
|
||||
{ override: encodedVideoLocation, folder: 'encoded-video' },
|
||||
{ override: libraryLocation, folder: 'library' },
|
||||
{ override: uploadLocation, folder: 'upload' },
|
||||
{ override: profileLocation, folder: 'profile' },
|
||||
{ override: thumbnailsLocation, folder: 'thumbs' },
|
||||
{ override: backupsLocation, folder: 'backups' },
|
||||
]) {
|
||||
if (override) {
|
||||
volumes.push(`${override}:${internalBaseLocation}/${folder}`);
|
||||
}
|
||||
}
|
||||
|
||||
volumes.push(`/etc/localtime:/etc/localtime:ro`);
|
||||
|
||||
return volumes;
|
||||
};
|
||||
|
||||
export const getImmichEnvironment = (options: GeneratorOptions) => {
|
||||
const env: Record<string, string | number | undefined> = {};
|
||||
if (isExternalPostgres(options)) {
|
||||
env.DB_URL = options.postgresUrl;
|
||||
env.DB_VECTOR_EXTENSION = options.postgresVectorExtension;
|
||||
} else {
|
||||
const { postgresUser, postgresPassword, postgresDatabase } = options;
|
||||
env.DB_URL = `postgres://${postgresUser}:${postgresPassword}@${ServiceName.Postgres}:5432/${postgresDatabase}`;
|
||||
}
|
||||
|
||||
if (isIoRedis(options)) {
|
||||
env.REDIS_URL = options.redisUrl;
|
||||
} else if (isExternalRedis(options)) {
|
||||
env.REDIS_HOSTNAME = options.redisHost;
|
||||
env.REDIS_PORT = options.redisPort;
|
||||
env.REDIS_DBINDEX = options.redisDbIndex;
|
||||
env.REDIS_USERNAME = options.redisUsername;
|
||||
env.REDIS_PASSWORD = options.redisPassword;
|
||||
env.REDIS_SOCKET = options.redisSocket;
|
||||
} else {
|
||||
env.REDIS_HOSTNAME = ServiceName.Redis;
|
||||
}
|
||||
|
||||
env.TZ = options.serverTimeZone;
|
||||
|
||||
return env;
|
||||
};
|
||||
4555
docker/package-lock.json
generated
4555
docker/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,38 +0,0 @@
|
||||
{
|
||||
"name": "immich-docker",
|
||||
"version": "0.0.0",
|
||||
"description": "A docker-compose generator for Immich",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "vite build",
|
||||
"generate": "npx tsx src/index.ts",
|
||||
"lint": "eslint . --max-warnings 0",
|
||||
"lint:fix": "npm run lint -- --fix",
|
||||
"format": "prettier --check .",
|
||||
"format:fix": "prettier --write ."
|
||||
},
|
||||
"type": "module",
|
||||
"exports": "./dist/immich-docker.js",
|
||||
"author": "team@immich.app",
|
||||
"private": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "^9.16.0",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^22.10.2",
|
||||
"@typescript-eslint/eslint-plugin": "^8.18.0",
|
||||
"@typescript-eslint/parser": "^8.18.0",
|
||||
"eslint-plugin-unicorn": "^56.0.1",
|
||||
"globals": "^15.13.0",
|
||||
"json-schema-to-ts": "^3.1.1",
|
||||
"prettier-plugin-organize-imports": "^4.1.0",
|
||||
"prettier-plugin-sort-json": "^4.0.0",
|
||||
"vite": "^6.0.3",
|
||||
"vitest": "^2.1.8"
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: immich_api
|
||||
- job_name: immich_server
|
||||
static_configs:
|
||||
- targets: ['immich-server:8081']
|
||||
|
||||
|
||||
- job_name: immich_microservices
|
||||
static_configs:
|
||||
- targets: ['immich-server:8082']
|
||||
- targets: ['immich-microservices:8081']
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
LOG_LEVEL="${IMMICH_LOG_LEVEL:='info'}"
|
||||
|
||||
logDebug() {
|
||||
if [ "$LOG_LEVEL" = "debug" ] || [ "$LOG_LEVEL" = "verbose" ]; then
|
||||
echo "DEBUG: $1" >&2
|
||||
fi
|
||||
}
|
||||
|
||||
if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
|
||||
logDebug "cgroup v2 detected."
|
||||
if [ -f /sys/fs/cgroup/cpu.max ]; then
|
||||
read -r quota period </sys/fs/cgroup/cpu.max
|
||||
if [ "$quota" = "max" ]; then
|
||||
logDebug "No CPU limits set."
|
||||
unset quota period
|
||||
fi
|
||||
else
|
||||
logDebug "/sys/fs/cgroup/cpu.max not found."
|
||||
fi
|
||||
else
|
||||
logDebug "cgroup v1 detected."
|
||||
|
||||
if [ -f /sys/fs/cgroup/cpu/cpu.cfs_quota_us ] && [ -f /sys/fs/cgroup/cpu/cpu.cfs_period_us ]; then
|
||||
quota=$(cat /sys/fs/cgroup/cpu/cpu.cfs_quota_us)
|
||||
period=$(cat /sys/fs/cgroup/cpu/cpu.cfs_period_us)
|
||||
|
||||
if [ "$quota" = "-1" ]; then
|
||||
logDebug "No CPU limits set."
|
||||
unset quota period
|
||||
fi
|
||||
else
|
||||
logDebug "/sys/fs/cgroup/cpu/cpu.cfs_quota_us or /sys/fs/cgroup/cpu/cpu.cfs_period_us not found."
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "${quota:-}" ] && [ -n "${period:-}" ]; then
|
||||
cpus=$((quota / period))
|
||||
if [ "$cpus" -eq 0 ]; then
|
||||
cpus=1
|
||||
fi
|
||||
else
|
||||
cpus=$(grep -c ^processor /proc/cpuinfo)
|
||||
fi
|
||||
|
||||
echo "$cpus"
|
||||
@@ -1,60 +0,0 @@
|
||||
import { mkdirSync, writeFileSync } from 'node:fs';
|
||||
import { buildYaml } from '../lib/index';
|
||||
import { GeneratorOptions } from '../lib/types';
|
||||
|
||||
const main = () => {
|
||||
const commonOptions = {
|
||||
releaseVersion: 'v1.122.0',
|
||||
baseLocation: '/home/immich/library',
|
||||
serverTimeZone: 'America/New_York',
|
||||
healthchecks: true,
|
||||
machineLearning: true,
|
||||
containerNames: true,
|
||||
// hardwareAcceleration: 'nvenc',
|
||||
};
|
||||
|
||||
const postgresOptions = {
|
||||
postgresUser: 'postgres',
|
||||
postgresPassword: 'postgres',
|
||||
postgresDatabase: 'immich',
|
||||
postgresDataLocation: '/home/immich/database',
|
||||
};
|
||||
|
||||
const defaultOptions: GeneratorOptions = { ...commonOptions, ...postgresOptions, redis: true };
|
||||
|
||||
const samples: Array<{ name: string; options: GeneratorOptions }> = [
|
||||
{ name: 'defaults', options: defaultOptions },
|
||||
{ name: 'no-names', options: { ...defaultOptions, containerNames: false } },
|
||||
{ name: 'no-healthchecks', options: { ...defaultOptions, healthchecks: false } },
|
||||
{ name: 'external-ioredis', options: { ...defaultOptions, redisUrl: 'ioredis://<base64>' } },
|
||||
{ name: 'external-redis', options: { ...defaultOptions, redisHost: '192.168.0.5', redisPort: 1234 } },
|
||||
{
|
||||
name: 'external-postgres',
|
||||
options: {
|
||||
...defaultOptions,
|
||||
postgresUrl: 'postgres://immich:immich@localhost:5432/immich',
|
||||
postgresVectorExtension: 'pgvector',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'split-storage',
|
||||
options: {
|
||||
...defaultOptions,
|
||||
thumbnailsLocation: '/home/fast/thumbs',
|
||||
encodedVideoLocation: '/home/fast/encoded-videos',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
// TODO replace with vitest test files/scripts
|
||||
mkdirSync('./examples', { recursive: true });
|
||||
for (const { name, options } of samples) {
|
||||
const spec = buildYaml(options);
|
||||
|
||||
const filename = `./examples/docker-compose.${name}.yaml`;
|
||||
writeFileSync(filename, spec);
|
||||
console.log(`Wrote ${filename}`);
|
||||
}
|
||||
};
|
||||
|
||||
main();
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowJs": true,
|
||||
"baseUrl": "./",
|
||||
"checkJs": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"module": "es2020",
|
||||
"moduleResolution": "bundler",
|
||||
"outDir": "./dist",
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"strict": true,
|
||||
"target": "es2022",
|
||||
"types": []
|
||||
},
|
||||
"include": ["lib"]
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"baseUrl": "./",
|
||||
"declaration": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"esModuleInterop": true,
|
||||
"experimentalDecorators": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"incremental": true,
|
||||
"jsx": "react",
|
||||
"lib": ["dom", "es2023"],
|
||||
"module": "node16",
|
||||
"moduleResolution": "node16",
|
||||
"outDir": "./dist",
|
||||
"preserveWatchOutput": true,
|
||||
"removeComments": true,
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"strict": true,
|
||||
"target": "es2022"
|
||||
},
|
||||
"include": ["src", "lib"]
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import { resolve } from 'node:path';
|
||||
import { defineConfig } from 'vite';
|
||||
|
||||
export default defineConfig({
|
||||
resolve: {
|
||||
alias: {
|
||||
lib: resolve('lib'),
|
||||
src: resolve('src'),
|
||||
test: resolve('test'),
|
||||
},
|
||||
},
|
||||
build: {
|
||||
lib: {
|
||||
entry: resolve(__dirname, 'lib/index.ts'),
|
||||
name: 'immich-docker',
|
||||
// the proper extensions will be added
|
||||
fileName: 'immich-docker',
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -1 +0,0 @@
|
||||
22.12.0
|
||||
@@ -5,13 +5,13 @@ This website is built using [Docusaurus](https://docusaurus.io/), a modern stati
|
||||
### Installation
|
||||
|
||||
```
|
||||
$ npm install
|
||||
$ yarn
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
```
|
||||
$ npm run start
|
||||
$ yarn start
|
||||
```
|
||||
|
||||
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
||||
@@ -19,7 +19,7 @@ This command starts a local development server and opens up a browser window. Mo
|
||||
### Build
|
||||
|
||||
```
|
||||
$ npm run build
|
||||
$ yarn build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||
@@ -29,13 +29,13 @@ This command generates static content into the `build` directory and can be serv
|
||||
Using SSH:
|
||||
|
||||
```
|
||||
$ USE_SSH=true npm run deploy
|
||||
$ USE_SSH=true yarn deploy
|
||||
```
|
||||
|
||||
Not using SSH:
|
||||
|
||||
```
|
||||
$ GIT_USER=<Your GitHub username> npm run deploy
|
||||
$ GIT_USER=<Your GitHub username> yarn deploy
|
||||
```
|
||||
|
||||
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
||||
|
||||
@@ -94,7 +94,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
|
||||
|
||||
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
|
||||
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
|
||||
|
||||
Cheer!
|
||||
|
||||
|
||||
@@ -142,7 +142,7 @@ Thank you, and I am asking for your support for the project. I hope to be a full
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- Give a project a star - the contributors love gazing at the stars and seeing their creations shining in the sky.
|
||||
|
||||
Join our friendly [Discord](https://discord.immich.app) to talk and discuss Immich, tech, or anything
|
||||
Join our friendly [Discord](https://discord.gg/D8JsnBEuKb) to talk and discuss Immich, tech, or anything
|
||||
|
||||
Cheer!
|
||||
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
---
|
||||
title: The Immich core team goes full-time
|
||||
authors: [alextran]
|
||||
tags: [update, announcement, FUTO]
|
||||
date: 2024-05-01T00:00
|
||||
---
|
||||
|
||||
**Immich is joining [FUTO](https://futo.org/)!**
|
||||
|
||||
Since the beginning of this adventure, my goal has always been to create a better world for my children. Memories are priceless, and privacy should not be a luxury. However, building quality open source has its challenges. Over the past two years, it has taken significant dedication, time, and effort.
|
||||
|
||||
Recently, a company in Austin, Texas, called FUTO contacted the team. FUTO strives to develop quality and sustainable open software. They build software alternatives that focus on giving control to users. From their mission statement:
|
||||
|
||||
“Computers should belong to you, the people. We develop and fund technology to give them back.”
|
||||
|
||||
FUTO loved Immich and wanted to see if we’d consider working with them to take the project to the next level. In short, FUTO offered to:
|
||||
|
||||
- Pay the core team to work on Immich full-time
|
||||
- Let us keep full autonomy about the project’s direction and leadership
|
||||
- Continue to license Immich under AGPL
|
||||
- Keep Immich’s development direction with no paywalled features
|
||||
- Keep Immich “built for the people” (no ads, data mining/selling, or alternative motives)
|
||||
- Provide us with financial, technical, legal, and administrative support
|
||||
|
||||
After careful deliberation, the team decided that FUTO’s vision closely aligns with our own: to build a better future by providing a polished, performant, and privacy-preserving open-source software solution for photo and video management delivered in a sustainable way.
|
||||
|
||||
Immich’s future has never looked brighter, and we look forward to realizing our vision for Immich as part of FUTO.
|
||||
|
||||
If you have more questions, we’ll host a Q&A live stream on May 9th at 3PM UTC (10AM CST). [You can ask questions here](https://www.live-ask.com/event/01HWP2SB99A1K8EXFBDKZ5Z9CF), and the stream will be live [here on our YouTube channel](https://youtube.com/live/cwz2iZwYpgg).
|
||||
|
||||
Cheers,
|
||||
|
||||
The Immich Team
|
||||
|
||||
---
|
||||
|
||||
## FAQs
|
||||
|
||||
### What is FUTO?
|
||||
|
||||
[https://futo.org/what-is-futo/](https://futo.org/what-is-futo/)
|
||||
|
||||
### Will the license change?
|
||||
|
||||
No. Immich will continue to be licensed under AGPL without a CLA.
|
||||
|
||||
### Will Immich continue to be free?
|
||||
|
||||
Yes. The Immich source code will remain freely available under the AGPL license.
|
||||
|
||||
### Is Immich getting VC funding?
|
||||
|
||||
No. Venture capital implies investment in a business, often with the expectation of a future payout (exit plan). Immich is neither a business that can be acquired nor comes with a money-making exit plan.
|
||||
|
||||
### I am currently supporting Immich through GitHub sponsors. What will happen to my donation?
|
||||
|
||||
Effective immediately, all donations to the Immich organization will be canceled. In the future, we will offer an optional, modest payment option instead. Thank you to everyone who donated to help us get this far!
|
||||
|
||||
### How is funding sustainable?
|
||||
|
||||
Immich and FUTO believe a sustainable future requires a model that does not rely on users-as-a-product. To this end, FUTO advocates that users pay for good, open software. In keeping with this model, we will adopt a purchase price. This means we no longer accept donations, but — _without limiting features for those who do not pay_ — we will soon allow you to purchase Immich through a modest payment. We encourage you to pay for the high-quality software you use to foster a healthy software culture where developers build great applications without hidden motives for their users.
|
||||
|
||||
### When does this change take effect?
|
||||
|
||||
This change takes effect immediately.
|
||||
|
||||
### What will change?
|
||||
|
||||
The following things will change as Immich joins FUTO:
|
||||
|
||||
- The brand, logo, and other Immich trademarks will be transferred to FUTO.
|
||||
- We will stop all donations to the project.
|
||||
- The core team can now dedicate our full attention to Immich
|
||||
- Before the end of the year, we plan to have a roadmap for what it will take to get Immich to a stable release.
|
||||
- Bugs will be squashed, and features will be delivered faster.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user