Compare commits
204 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa0b7c8563 | ||
|
|
f62678f58f | ||
|
|
04c783f2f0 | ||
|
|
660b2e908d | ||
|
|
91efe7f7ae | ||
|
|
02393126e6 | ||
|
|
68f52818ae | ||
|
|
44873b4224 | ||
|
|
98cee8864d | ||
|
|
9a2fa21b28 | ||
|
|
b98d1bf9d3 | ||
|
|
d4146e3e6d | ||
|
|
f0b328fb6b | ||
|
|
c55503496f | ||
|
|
6f291006e4 | ||
|
|
574aecc1e2 | ||
|
|
c317feaf93 | ||
|
|
0350058689 | ||
|
|
a7768cc64d | ||
|
|
702e91145a | ||
|
|
4c2befc68c | ||
|
|
78de4f1312 | ||
|
|
abce82e235 | ||
|
|
d3ff2408bc | ||
|
|
76b66e42e1 | ||
|
|
7b0104f905 | ||
|
|
8e2d790c2a | ||
|
|
9300946ff1 | ||
|
|
6457436d91 | ||
|
|
9976b2ae92 | ||
|
|
76bad762d7 | ||
|
|
7fc1954e2a | ||
|
|
f160969894 | ||
|
|
3e793c582e | ||
|
|
fff3a52e60 | ||
|
|
ba5cca9348 | ||
|
|
984feafb90 | ||
|
|
4f021a74ed | ||
|
|
e6c0f0e3aa | ||
|
|
aa8c54e248 | ||
|
|
d096caccac | ||
|
|
2a8cb70c98 | ||
|
|
a09fbe5723 | ||
|
|
6ee9c8277f | ||
|
|
f3d9196a7e | ||
|
|
9a7f987835 | ||
|
|
5e2aec3892 | ||
|
|
e2666f0e74 | ||
|
|
20be42cec0 | ||
|
|
bd5ae9f31e | ||
|
|
deb1f970a8 | ||
|
|
f4edb6c4bd | ||
|
|
19e9908ee2 | ||
|
|
df4af025d7 | ||
|
|
d12a361992 | ||
|
|
a4f49d197e | ||
|
|
2439c5ab57 | ||
|
|
a1523a9af0 | ||
|
|
753292956e | ||
|
|
29747437f6 | ||
|
|
4f942bc182 | ||
|
|
0e0a472de1 | ||
|
|
902977f165 | ||
|
|
08fcce9e90 | ||
|
|
bf1dd36fa9 | ||
|
|
e5786b200a | ||
|
|
12dc7c48c9 | ||
|
|
26e6602ed3 | ||
|
|
61b97157ed | ||
|
|
58bd9c0018 | ||
|
|
8d1287ef15 | ||
|
|
8d0a619e81 | ||
|
|
29b204de57 | ||
|
|
bf8e2966c4 | ||
|
|
df59b2099f | ||
|
|
7cc0904273 | ||
|
|
319ddfda53 | ||
|
|
d095382b14 | ||
|
|
5a66314ead | ||
|
|
e262298090 | ||
|
|
6835d4519a | ||
|
|
84e60ea155 | ||
|
|
41a32b4e6b | ||
|
|
0750e13d3f | ||
|
|
78d8783a54 | ||
|
|
2cc5149d0b | ||
|
|
8c784defa0 | ||
|
|
8921278447 | ||
|
|
a233e176e5 | ||
|
|
56cde0438c | ||
|
|
ad09896f58 | ||
|
|
d3af2b1f69 | ||
|
|
ac0cb4a96e | ||
|
|
af32183728 | ||
|
|
d1e16025cf | ||
|
|
4cf1e553d2 | ||
|
|
2aaf941dda | ||
|
|
13ba83dce6 | ||
|
|
4f6f79a392 | ||
|
|
ba4a20a181 | ||
|
|
aefd93e43a | ||
|
|
18f59f78e3 | ||
|
|
f8d64be13c | ||
|
|
52e92e9bb4 | ||
|
|
7b40c20ea5 | ||
|
|
317adc5c28 | ||
|
|
1e503c3212 | ||
|
|
282f7803bd | ||
|
|
2950d84820 | ||
|
|
ee82327d2a | ||
|
|
13fb32513c | ||
|
|
103c3ee2f1 | ||
|
|
40fbe81c7b | ||
|
|
093e61eee9 | ||
|
|
23b38a0474 | ||
|
|
27be076958 | ||
|
|
c24d0e82bb | ||
|
|
9a04014f98 | ||
|
|
a7d9e25fb0 | ||
|
|
014adf175a | ||
|
|
cc7ba3c21a | ||
|
|
2688e05033 | ||
|
|
4a5b8c3770 | ||
|
|
7dd88c4114 | ||
|
|
523e7d4742 | ||
|
|
e01c96c637 | ||
|
|
13390d0c87 | ||
|
|
a5a71e6b5c | ||
|
|
a53b2de3c4 | ||
|
|
cb3f18bb9f | ||
|
|
c80e37aded | ||
|
|
03eb5ffc5c | ||
|
|
5f6d09d3da | ||
|
|
9de557916b | ||
|
|
0ed89e61ec | ||
|
|
ca9cad20bc | ||
|
|
2e38fa73bf | ||
|
|
5f6bd4ae7e | ||
|
|
4fff2c75aa | ||
|
|
a1e1f11399 | ||
|
|
fd3a1a4da8 | ||
|
|
27bc777581 | ||
|
|
8119d4bb26 | ||
|
|
1af27fcc47 | ||
|
|
59cf17e5e1 | ||
|
|
5596b3a6a5 | ||
|
|
f7a78618e5 | ||
|
|
733fa28aa2 | ||
|
|
c0ebc943d2 | ||
|
|
1d0dbdff67 | ||
|
|
3edc87f684 | ||
|
|
a7889e5e11 | ||
|
|
dea1063b17 | ||
|
|
17ef411b0a | ||
|
|
83da1c74fc | ||
|
|
8fdd3aaed1 | ||
|
|
aaa7a613b2 | ||
|
|
45cf3291a2 | ||
|
|
612590feda | ||
|
|
19ea0ead85 | ||
|
|
e47e25e671 | ||
|
|
4dd7412a86 | ||
|
|
cc2dc12f6c | ||
|
|
2790a46703 | ||
|
|
f602295bf9 | ||
|
|
092a23fd7f | ||
|
|
154292242f | ||
|
|
8295542941 | ||
|
|
4505ebc315 | ||
|
|
19d66296fe | ||
|
|
64176d2ff4 | ||
|
|
cabc2d57dd | ||
|
|
5a3a2c7293 | ||
|
|
7e216809f3 | ||
|
|
81af48af7b | ||
|
|
4e06ccd052 | ||
|
|
234449f3c6 | ||
|
|
95a7bf7fac | ||
|
|
1c69dff967 | ||
|
|
f4c5bdfa1c | ||
|
|
b40859551b | ||
|
|
4e9b96ff1a | ||
|
|
baed16dab6 | ||
|
|
a7b4727c20 | ||
|
|
9834693fab | ||
|
|
085dc6cd93 | ||
|
|
de1514a441 | ||
|
|
fade8b627f | ||
|
|
d3e1572229 | ||
|
|
ffc31f034c | ||
|
|
3beeffaaf0 | ||
|
|
b68800d45c | ||
|
|
b520955d0e | ||
|
|
6e7b3d6f24 | ||
|
|
c45e8cc170 | ||
|
|
c6f56d9591 | ||
|
|
691e20521d | ||
|
|
27f8dd6040 | ||
|
|
e3fa32ad23 | ||
|
|
08f66c2ae5 | ||
|
|
4f38a283b4 | ||
|
|
00771899da | ||
|
|
09402eb6d0 | ||
|
|
d9b5adf0f7 |
@@ -1,5 +1,5 @@
|
|||||||
.vscode/
|
.vscode/
|
||||||
cli/
|
|
||||||
design/
|
design/
|
||||||
docker/
|
docker/
|
||||||
docs/
|
docs/
|
||||||
@@ -8,7 +8,7 @@ machine-learning/
|
|||||||
misc/
|
misc/
|
||||||
mobile/
|
mobile/
|
||||||
|
|
||||||
server/node_modules
|
server/node_modules/
|
||||||
server/coverage/
|
server/coverage/
|
||||||
server/.reverse-geocoding-dump/
|
server/.reverse-geocoding-dump/
|
||||||
server/upload/
|
server/upload/
|
||||||
@@ -18,3 +18,11 @@ web/node_modules/
|
|||||||
web/coverage/
|
web/coverage/
|
||||||
web/.svelte-kit
|
web/.svelte-kit
|
||||||
web/build/
|
web/build/
|
||||||
|
|
||||||
|
cli/node_modules/
|
||||||
|
cli/.reverse-geocoding-dump/
|
||||||
|
cli/upload/
|
||||||
|
cli/dist/
|
||||||
|
|
||||||
|
open-api/typescript-sdk/node_modules/
|
||||||
|
open-api/typescript-sdk/build/
|
||||||
|
|||||||
13
.gitattributes
vendored
@@ -8,14 +8,9 @@ mobile/openapi/.openapi-generator/FILES linguist-generated=true
|
|||||||
mobile/lib/**/*.g.dart -diff -merge
|
mobile/lib/**/*.g.dart -diff -merge
|
||||||
mobile/lib/**/*.g.dart linguist-generated=true
|
mobile/lib/**/*.g.dart linguist-generated=true
|
||||||
|
|
||||||
cli/src/api/open-api/**/*.md -diff -merge
|
open-api/typescript-sdk/client/**/*.md -diff -merge
|
||||||
cli/src/api/open-api/**/*.md linguist-generated=true
|
open-api/typescript-sdk/client/**/*.md linguist-generated=true
|
||||||
cli/src/api/open-api/**/*.ts -diff -merge
|
open-api/typescript-sdk/client/**/*.ts -diff -merge
|
||||||
cli/src/api/open-api/**/*.ts linguist-generated=true
|
open-api/typescript-sdk/client/**/*.ts linguist-generated=true
|
||||||
|
|
||||||
web/src/api/open-api/**/*.md -diff -merge
|
|
||||||
web/src/api/open-api/**/*.md linguist-generated=true
|
|
||||||
web/src/api/open-api/**/*.ts -diff -merge
|
|
||||||
web/src/api/open-api/**/*.ts linguist-generated=true
|
|
||||||
|
|
||||||
*.sh text eol=lf
|
*.sh text eol=lf
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,7 +1,5 @@
|
|||||||
name: Report an issue with Immich
|
name: Report an issue with Immich
|
||||||
description: Report an issue with Immich
|
description: Report an issue with Immich
|
||||||
labels: ["bug", "need triage"]
|
|
||||||
title: "[BUG] <title>"
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
|||||||
29
.github/release.yml
vendored
@@ -1,29 +1,42 @@
|
|||||||
changelog:
|
changelog:
|
||||||
categories:
|
categories:
|
||||||
- title: Breaking Changes 🛠
|
- title: ⚠️ Breaking Changes
|
||||||
labels:
|
labels:
|
||||||
- breaking-change
|
- breaking-change
|
||||||
- title: Server
|
|
||||||
|
- title: 🗄️ Server
|
||||||
labels:
|
labels:
|
||||||
- 🗄️server
|
- 🗄️server
|
||||||
- title: Mobile
|
|
||||||
|
- title: 📱 Mobile
|
||||||
labels:
|
labels:
|
||||||
- 📱mobile
|
- 📱mobile
|
||||||
- title: Web
|
|
||||||
|
- title: 🖥️ Web
|
||||||
labels:
|
labels:
|
||||||
- 🖥️web
|
- 🖥️web
|
||||||
- title: Machine Learning
|
|
||||||
|
- title: 🧠 Machine Learning
|
||||||
labels:
|
labels:
|
||||||
- 🧠machine-learning
|
- 🧠machine-learning
|
||||||
- title: CLI
|
|
||||||
|
- title: ⚡ CLI
|
||||||
labels:
|
labels:
|
||||||
- cli
|
- cli
|
||||||
- title: Documentation
|
|
||||||
|
- title: 📓 Documentation
|
||||||
labels:
|
labels:
|
||||||
- documentation
|
- documentation
|
||||||
- title: Dependency updates
|
|
||||||
|
- title: 🔨 Build
|
||||||
labels:
|
labels:
|
||||||
|
- deployment
|
||||||
|
|
||||||
|
- title: 🤖 Dependencies
|
||||||
|
labels:
|
||||||
|
- dependencies
|
||||||
- renovate
|
- renovate
|
||||||
|
|
||||||
- title: Other changes
|
- title: Other changes
|
||||||
labels:
|
labels:
|
||||||
- "*"
|
- "*"
|
||||||
|
|||||||
87
.github/workflows/test.yml
vendored
@@ -10,9 +10,26 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
e2e-tests:
|
server-e2e-api:
|
||||||
name: Server (e2e)
|
name: Server (e2e-api)
|
||||||
runs-on: ubuntu-latest
|
runs-on: mich
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./server
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Run npm install
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run e2e tests
|
||||||
|
run: npm run e2e:api
|
||||||
|
|
||||||
|
server-e2e-jobs:
|
||||||
|
name: Server (e2e-jobs)
|
||||||
|
runs-on: mich
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -21,7 +38,7 @@ jobs:
|
|||||||
submodules: "recursive"
|
submodules: "recursive"
|
||||||
|
|
||||||
- name: Run e2e tests
|
- name: Run e2e tests
|
||||||
run: docker compose -f ./docker/docker-compose.test.yml up --renew-anon-volumes --abort-on-container-exit --exit-code-from immich-server --remove-orphans --build
|
run: make server-e2e-jobs
|
||||||
|
|
||||||
doc-tests:
|
doc-tests:
|
||||||
name: Docs
|
name: Docs
|
||||||
@@ -41,10 +58,6 @@ jobs:
|
|||||||
run: npm run format
|
run: npm run format
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
- name: Run tsc
|
|
||||||
run: npm run check
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
|
|
||||||
- name: Run build
|
- name: Run build
|
||||||
run: npm run build
|
run: npm run build
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
@@ -90,9 +103,17 @@ jobs:
|
|||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Run npm install
|
- name: Run setup typescript-sdk
|
||||||
|
run: npm ci && npm run build
|
||||||
|
working-directory: ./open-api/typescript-sdk
|
||||||
|
|
||||||
|
- name: Run npm install (cli)
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run npm install (server)
|
||||||
|
run: npm ci
|
||||||
|
working-directory: ./server
|
||||||
|
|
||||||
- name: Run linter
|
- name: Run linter
|
||||||
run: npm run lint
|
run: npm run lint
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
@@ -109,6 +130,33 @@ jobs:
|
|||||||
run: npm run test:cov
|
run: npm run test:cov
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
|
cli-e2e-tests:
|
||||||
|
name: CLI (e2e)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./cli
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: "recursive"
|
||||||
|
|
||||||
|
- name: Run setup typescript-sdk
|
||||||
|
run: npm ci && npm run build
|
||||||
|
working-directory: ./open-api/typescript-sdk
|
||||||
|
|
||||||
|
- name: Run npm install (cli)
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run npm install (server)
|
||||||
|
run: npm ci
|
||||||
|
working-directory: ./server
|
||||||
|
|
||||||
|
- name: Run e2e tests
|
||||||
|
run: npm run test:e2e
|
||||||
|
|
||||||
web-unit-tests:
|
web-unit-tests:
|
||||||
name: Web
|
name: Web
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -120,6 +168,10 @@ jobs:
|
|||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Run setup typescript-sdk
|
||||||
|
run: npm ci && npm run build
|
||||||
|
working-directory: ./open-api/typescript-sdk
|
||||||
|
|
||||||
- name: Run npm install
|
- name: Run npm install
|
||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
@@ -139,9 +191,9 @@ jobs:
|
|||||||
run: npm run check:typescript
|
run: npm run check:typescript
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
# - name: Run unit tests & coverage
|
- name: Run unit tests & coverage
|
||||||
# run: npm run test:cov
|
run: npm run test:cov
|
||||||
# if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
mobile-unit-tests:
|
mobile-unit-tests:
|
||||||
name: Mobile
|
name: Mobile
|
||||||
@@ -176,13 +228,13 @@ jobs:
|
|||||||
poetry install --with dev
|
poetry install --with dev
|
||||||
- name: Lint with ruff
|
- name: Lint with ruff
|
||||||
run: |
|
run: |
|
||||||
poetry run ruff check --format=github app export
|
poetry run ruff check --output-format=github app export
|
||||||
- name: Check black formatting
|
- name: Check black formatting
|
||||||
run: |
|
run: |
|
||||||
poetry run black --check app export
|
poetry run black --check app export
|
||||||
- name: Run mypy type checking
|
- name: Run mypy type checking
|
||||||
run: |
|
run: |
|
||||||
poetry run mypy --install-types --non-interactive --strict app/ export/
|
poetry run mypy --install-types --non-interactive --strict app/
|
||||||
- name: Run tests and coverage
|
- name: Run tests and coverage
|
||||||
run: |
|
run: |
|
||||||
poetry run pytest --cov app
|
poetry run pytest --cov app
|
||||||
@@ -193,14 +245,14 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- name: Run API generation
|
- name: Run API generation
|
||||||
run: npm --prefix server run api:generate
|
run: make open-api
|
||||||
- name: Find file changes
|
- name: Find file changes
|
||||||
uses: tj-actions/verify-changed-files@v13.1
|
uses: tj-actions/verify-changed-files@v13.1
|
||||||
id: verify-changed-files
|
id: verify-changed-files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
mobile/openapi
|
mobile/openapi
|
||||||
web/src/api/open-api
|
open-api/typescript-sdk
|
||||||
- name: Verify files have not changed
|
- name: Verify files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -242,6 +294,9 @@ jobs:
|
|||||||
- name: Run existing migrations
|
- name: Run existing migrations
|
||||||
run: npm run typeorm:migrations:run
|
run: npm run typeorm:migrations:run
|
||||||
|
|
||||||
|
- name: Test npm run schema:reset command works
|
||||||
|
run: npm run typeorm:schema:reset
|
||||||
|
|
||||||
- name: Generate new migrations
|
- name: Generate new migrations
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: npm run typeorm:migrations:generate ./src/infra/migrations/TestMigration
|
run: npm run typeorm:migrations:generate ./src/infra/migrations/TestMigration
|
||||||
|
|||||||
4
.gitignore
vendored
@@ -1,3 +1,5 @@
|
|||||||
|
**/node_modules/**
|
||||||
|
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.vscode/*
|
.vscode/*
|
||||||
!.vscode/launch.json
|
!.vscode/launch.json
|
||||||
@@ -12,3 +14,5 @@ mobile/gradle.properties
|
|||||||
mobile/openapi/pubspec.lock
|
mobile/openapi/pubspec.lock
|
||||||
mobile/*.jks
|
mobile/*.jks
|
||||||
mobile/libisar.dylib
|
mobile/libisar.dylib
|
||||||
|
|
||||||
|
open-api/typescript-sdk/build
|
||||||
|
|||||||
15
Makefile
@@ -16,8 +16,8 @@ stage:
|
|||||||
pull-stage:
|
pull-stage:
|
||||||
docker compose -f ./docker/docker-compose.staging.yml pull
|
docker compose -f ./docker/docker-compose.staging.yml pull
|
||||||
|
|
||||||
test-e2e:
|
server-e2e-jobs:
|
||||||
docker compose -f ./docker/docker-compose.test.yml up --renew-anon-volumes --abort-on-container-exit --exit-code-from immich-server --remove-orphans --build
|
docker compose -f ./server/e2e/docker-compose.server-e2e.yml up --renew-anon-volumes --abort-on-container-exit --exit-code-from immich-server --remove-orphans --build
|
||||||
|
|
||||||
prod:
|
prod:
|
||||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||||
@@ -25,8 +25,15 @@ prod:
|
|||||||
prod-scale:
|
prod-scale:
|
||||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||||
|
|
||||||
api:
|
.PHONY: open-api
|
||||||
npm --prefix server run api:generate
|
open-api:
|
||||||
|
cd ./open-api && bash ./bin/generate-open-api.sh
|
||||||
|
|
||||||
|
open-api-dart:
|
||||||
|
cd ./open-api && bash ./bin/generate-open-api.sh dart
|
||||||
|
|
||||||
|
open-api-typescript:
|
||||||
|
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||||
|
|
||||||
sql:
|
sql:
|
||||||
npm --prefix server run sql:generate
|
npm --prefix server run sql:generate
|
||||||
|
|||||||
@@ -28,6 +28,7 @@
|
|||||||
<a href="README_nl_NL.md">Nederlands</a>
|
<a href="README_nl_NL.md">Nederlands</a>
|
||||||
<a href="README_tr_TR.md">Türkçe</a>
|
<a href="README_tr_TR.md">Türkçe</a>
|
||||||
<a href="README_zh_CN.md">中文</a>
|
<a href="README_zh_CN.md">中文</a>
|
||||||
|
<a href="README_ru_RU.md">Русский</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
## Disclaimer
|
## Disclaimer
|
||||||
@@ -111,7 +112,7 @@ If you feel like this is the right cause and the app is something you are seeing
|
|||||||
|
|
||||||
- [Monthly donation](https://github.com/sponsors/alextran1502) via GitHub Sponsors
|
- [Monthly donation](https://github.com/sponsors/alextran1502) via GitHub Sponsors
|
||||||
- [One-time donation](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
- [One-time donation](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
||||||
- [Librepay](https://liberapay.com/alex.tran1502/)
|
- [Liberapay](https://liberapay.com/alex.tran1502/)
|
||||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||||
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||||
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
|||||||
|
|
||||||
私はこのプロジェクトにコミットしてきました。ドキュメントを更新し、新しい機能を追加し、バグを修正し続けるつもりですが、私ひとりではできません。だから、続けるためのモチベーションをさらに高めてくれる皆さんの助けが必要なのです。
|
私はこのプロジェクトにコミットしてきました。ドキュメントを更新し、新しい機能を追加し、バグを修正し続けるつもりですが、私ひとりではできません。だから、続けるためのモチベーションをさらに高めてくれる皆さんの助けが必要なのです。
|
||||||
|
|
||||||
[selfhosted.show - In the episode 'The-organization-must-いいえt-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) のホストが言ったように、これはチームと私がやっていることの大規模な事業だ。そしていつの日か、フルタイムでこの仕事ができるようになりたいと思っています。
|
[selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) のホストが言ったように、これはチームと私がやっていることの大規模な事業だ。そしていつの日か、フルタイムでこの仕事ができるようになりたいと思っています。
|
||||||
|
|
||||||
もし、あなたがこのプロジェクトに賛同し、このアプリを長く使い続けたいと思われるのであれば、以下のオプションから支援をご検討ください。
|
もし、あなたがこのプロジェクトに賛同し、このアプリを長く使い続けたいと思われるのであれば、以下のオプションから支援をご検討ください。
|
||||||
|
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
|||||||
|
|
||||||
Ik ben trouw aan dit project en ik zal niet stoppen. Ik zal de documenten blijven bijwerken, nieuwe functies toevoegen en bugs oplossen. Maar ik kan het niet alleen. Ik heb dus jouw hulp nodig om mij extra motivatie te geven om door te gaan.
|
Ik ben trouw aan dit project en ik zal niet stoppen. Ik zal de documenten blijven bijwerken, nieuwe functies toevoegen en bugs oplossen. Maar ik kan het niet alleen. Ik heb dus jouw hulp nodig om mij extra motivatie te geven om door te gaan.
|
||||||
|
|
||||||
Als onze gastheren in de [selfhosted.show - In de aflevering 'The-organization-must-Neet-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) zeiden, dit is een eNeerme onderneming van wat het team en ik doen. En ik zou dit graag fulltime willen doen, ik vraag jouw hulp om dat mogelijk te maken.
|
Als onze gastheren in de [selfhosted.show - In de aflevering 'The-organization-must-Neet-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) zeiden, dit is een enorme onderneming van wat het team en ik doen. En ik zou dit graag fulltime willen doen, ik vraag jouw hulp om dat mogelijk te maken.
|
||||||
|
|
||||||
Als je denkt dat dit het juiste doel is en de app iets is dat je jezelf al heel lang ziet gebruiken, overweeg dan om het project te steunen met de onderstaande optie.
|
Als je denkt dat dit het juiste doel is en de app iets is dat je jezelf al heel lang ziet gebruiken, overweeg dan om het project te steunen met de onderstaande optie.
|
||||||
|
|
||||||
|
|||||||
124
README_ru_RU.md
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
<p align="center">
|
||||||
|
<br/>
|
||||||
|
<a href="https://opensource.org/licenses/MIT"><img src="https://img.shields.io/badge/license-MIT-green.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: MIT"></a>
|
||||||
|
<a href="https://discord.gg/D8JsnBEuKb">
|
||||||
|
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
||||||
|
</a>
|
||||||
|
<br/>
|
||||||
|
<br/>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<img src="design/immich-logo.svg" width="150" title="Login With Custom URL">
|
||||||
|
</p>
|
||||||
|
<h3 align="center">Immich - Высокопроизводительное решение для автономоного создания фото и видео архивов</h3>
|
||||||
|
<br/>
|
||||||
|
<a href="https://immich.app">
|
||||||
|
<img src="design/immich-screenshots.png" title="Main Screenshot">
|
||||||
|
</a>
|
||||||
|
<br/>
|
||||||
|
<p align="center">
|
||||||
|
<a href="README_ca_ES.md">Català</a>
|
||||||
|
<a href="README_es_ES.md">Español</a>
|
||||||
|
<a href="README_fr_FR.md">Français</a>
|
||||||
|
<a href="README_it_IT.md">Italiano</a>
|
||||||
|
<a href="README_ja_JP.md">日本語</a>
|
||||||
|
<a href="README_ko_KR.md">한국어</a>
|
||||||
|
<a href="README_de_DE.md">Deutsch</a>
|
||||||
|
<a href="README_nl_NL.md">Nederlands</a>
|
||||||
|
<a href="README_tr_TR.md">Türkçe</a>
|
||||||
|
<a href="README_zh_CN.md">中文</a>
|
||||||
|
<a href="README_ru_RU.md">Русский</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
## Предупреждение
|
||||||
|
|
||||||
|
- ⚠️ Этот проект находится **в очень активной** разработке.
|
||||||
|
- ⚠️ Ожидайте ошибок и критических изменение.
|
||||||
|
- ⚠️ **Не используйте это приложение для бекапа ваших фото и видео.**
|
||||||
|
- ⚠️ Всегда следуйте [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) плану резервного копирования ваших драгоценных фото и видео!
|
||||||
|
|
||||||
|
## Содержание
|
||||||
|
|
||||||
|
- [Официальная документация](https://immich.app/docs)
|
||||||
|
- [План разработки](https://github.com/orgs/immich-app/projects/1)
|
||||||
|
- [Демо](#demo)
|
||||||
|
- [Возможности](#features)
|
||||||
|
- [Введение](https://immich.app/docs/overview/introduction)
|
||||||
|
- [Инсталяция](https://immich.app/docs/install/requirements)
|
||||||
|
- [Гайд по доработке проекта](https://immich.app/docs/overview/support-the-project)
|
||||||
|
- [Поддержки проект](#support-the-project)
|
||||||
|
|
||||||
|
## Документация
|
||||||
|
|
||||||
|
Вы можете найти основную документация, включая инструкции по установке по ссылке https://immich.app/.
|
||||||
|
|
||||||
|
## Демо
|
||||||
|
|
||||||
|
Вы можете посмотреть веб демо по ссылке https://demo.immich.app
|
||||||
|
|
||||||
|
Для мобильного приложения вы можете использовать адрес `https://demo.immich.app/api` в поле `Server Endpoint URL`
|
||||||
|
|
||||||
|
```bash title="Демо доступ"
|
||||||
|
Реквизиты доступа
|
||||||
|
логин/почта: demo@immich.app
|
||||||
|
пароль: demo
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||||
|
```
|
||||||
|
|
||||||
|
## Возможности
|
||||||
|
|
||||||
|
| Возможности | Приложение | Веб |
|
||||||
|
| --------------------------------------------------- | ---------- | --- |
|
||||||
|
| Выгрузка на сервер и просмотр видео и фото | Да | Да |
|
||||||
|
| Авто бекап когда приложение открыто | Да | Н/Д |
|
||||||
|
| Выбор альбома(ов) для бекапа | Да | Н/Д |
|
||||||
|
| загрузка с сервера фото и видео на устройство | Да | Да |
|
||||||
|
| Поддержка нескольких пользователей | Да | Да |
|
||||||
|
| Альбомы и общие альбомы | Да | Да |
|
||||||
|
| Прокручиваемая/перетаскиваемая полоса прокрутки | Да | Да |
|
||||||
|
| Поддержка формата RAW | Да | Да |
|
||||||
|
| Просмотр метаданных (EXIF, map) | Да | Да |
|
||||||
|
| Поиск до метаданным, объектам, лицам и CLIP | Да | Да |
|
||||||
|
| Административные функци (управление пользователями) | Нет | Да |
|
||||||
|
| Фоновый бекпа | Да | Н/Д |
|
||||||
|
| Виртуальная прокрутка | Да | Да |
|
||||||
|
| Поддержка OAuth | Да | Да |
|
||||||
|
| Ключи API | Н/Д | Да |
|
||||||
|
| LivePhoto/MotionPhoto бекап и воспроизведение | Да | Да |
|
||||||
|
| Настраиваемая структура хранилища | Да | Да |
|
||||||
|
| Публичные альбомы | Нет | Да |
|
||||||
|
| Архив и Избранное | Да | Да |
|
||||||
|
| Мировая карта | Да | Да |
|
||||||
|
| Совместное использование | Да | Да |
|
||||||
|
| Распознавание лиц и группировка по лицам | Да | Да |
|
||||||
|
| В этот день (x лет назад) | Да | Да |
|
||||||
|
| Работа без интернета | Да | Нет |
|
||||||
|
| Галлереи только для просмотра | Да | Да |
|
||||||
|
| Колллажи | Да | Да |
|
||||||
|
|
||||||
|
## Поддержка проекта
|
||||||
|
|
||||||
|
Я посвятил себя этому проекту и не остановлюсь. Я буду продолжать обновлять документацию, добавлять новые функции и исправлять ошибки. Но я не могу сделать это один. Поэтому мне нужна ваша помощь, чтобы дать мне дополнительную мотивацию продолжать идти дальше.
|
||||||
|
|
||||||
|
Как сказали наши покровители [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418), это масштабная работа, которую мы с командой делаем. И мне бы очень хотелось когда-нибудь иметь возможность заниматься этим на постоянной основе, и я прошу вашей помощи, чтобы это произошло.
|
||||||
|
|
||||||
|
|
||||||
|
Если вы считаете, что это правильная причина и вы уже давно используете это приложение, рассмотрите возможность финансовой поддержки проекта, выбрав вариант ниже.
|
||||||
|
|
||||||
|
### Пожертвование
|
||||||
|
|
||||||
|
- [Ежемесячное пожертвование](https://github.com/sponsors/alextran1502) via GitHub Sponsors
|
||||||
|
- [Одноразовое пожертвование](https://github.com/sponsors/alextran1502?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
||||||
|
- [Librepay](https://liberapay.com/alex.tran1502/)
|
||||||
|
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||||
|
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||||
|
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
||||||
|
|
||||||
|
## Авторы
|
||||||
|
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||||
|
</a>
|
||||||
@@ -17,7 +17,7 @@
|
|||||||
</p>
|
</p>
|
||||||
<br/>
|
<br/>
|
||||||
<a href="https://immich.app">
|
<a href="https://immich.app">
|
||||||
<img src="design/immich-screenshots.png" title="Main Screenshot">
|
<img src="design/immich-screenshots.png" title="界面截图">
|
||||||
</a>
|
</a>
|
||||||
<br/>
|
<br/>
|
||||||
|
|
||||||
@@ -32,6 +32,7 @@
|
|||||||
<a href="README_de_DE.md">Deutsch</a>
|
<a href="README_de_DE.md">Deutsch</a>
|
||||||
<a href="README_nl_NL.md">Nederlands</a>
|
<a href="README_nl_NL.md">Nederlands</a>
|
||||||
<a href="README_tr_TR.md">Türkçe</a>
|
<a href="README_tr_TR.md">Türkçe</a>
|
||||||
|
<a href="README_ru_RU.md">Русский</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
## 免责声明
|
## 免责声明
|
||||||
@@ -39,7 +40,7 @@
|
|||||||
- ⚠️ 本项目正在 **非常活跃** 地开发中。
|
- ⚠️ 本项目正在 **非常活跃** 地开发中。
|
||||||
- ⚠️ 可能存在 bug 或者随时有重大变更。
|
- ⚠️ 可能存在 bug 或者随时有重大变更。
|
||||||
- ⚠️ **不要把本软件作为您存储照片或视频的唯一方式。**
|
- ⚠️ **不要把本软件作为您存储照片或视频的唯一方式。**
|
||||||
- ⚠️ 为了您宝贵的照片与视频,始终遵守 [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) 备份方案!
|
- ⚠️ 为了您宝贵的照片与视频,请始终遵守 [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) 备份方案!
|
||||||
|
|
||||||
## 目录
|
## 目录
|
||||||
|
|
||||||
@@ -74,40 +75,41 @@
|
|||||||
|
|
||||||
# 功能特性
|
# 功能特性
|
||||||
|
|
||||||
| 功能特性 | 移动端 | 网页端 |
|
| 功能特性 | 移动端 | 网页端 |
|
||||||
| ------------------------------------------- | ------- | --- |
|
|---------------------------------------------|--------|--------|
|
||||||
| 上传并查看照片和视频 | 是 | 是 |
|
| 上传并查看照片和视频 | 是 | 是 |
|
||||||
| 软件运行时自动备份 | 是 | N/A |
|
| 软件运行时自动备份 | 是 | N/A |
|
||||||
| 选择需要备份的相册 | 是 | N/A |
|
| 选择需要备份的相册 | 是 | N/A |
|
||||||
| 下载照片和视频到本地 | 是 | 是 |
|
| 下载照片和视频到本地 | 是 | 是 |
|
||||||
| 多用户支持 | 是 | 是 |
|
| 多用户支持 | 是 | 是 |
|
||||||
| 相册 | 是 | 是 |
|
| 相册与共享相册 | 是 | 是 |
|
||||||
| 共享相册 | 是 | 是 |
|
| 可拖动的快速导航栏 | 是 | 是 |
|
||||||
| 可拖动的快速导航栏 | 是 | 是 |
|
| 支持RAW格式 | 是 | 是 |
|
||||||
| 支持RAW格式 (HEIC, HEIF, DNG, Apple ProRaw) | 是 | 是 |
|
| 元数据视图(EXIF、地图) | 是 | 是 |
|
||||||
| 元数据视图(EXIF, 地图) | 是 | 是 |
|
| 通过元数据、对象、人脸和标签进行搜索 | 是 | 是 |
|
||||||
| 通过元数据、对象和标签进行搜索 | 是 | 是 |
|
| 管理功能(用户管理) | 否 | 是 |
|
||||||
| 管理功能(用户管理) | 否 | 是 |
|
| 后台备份 | 是 | N/A |
|
||||||
| 后台备份 | 是 | N/A |
|
| 虚拟滚动 | 是 | 是 |
|
||||||
| 虚拟滚动 | 是 | 是 |
|
| OAuth 支持 | 是 | 是 |
|
||||||
| OAuth 支持 | 是 | 是 |
|
| API Keys | N/A | 是 |
|
||||||
| API Keys|N/A|是|
|
| 实况照片备份和查看 | 是 | 是 |
|
||||||
| 实况照片备份和查看 | 仅 iOS | 是 |
|
| 用户自定义存储结构 | 是 | 是 |
|
||||||
|用户自定义存储结构|是|是|
|
| 公共分享 | 否 | 是 |
|
||||||
|公共分享|否|是|
|
| 归档与收藏功能 | 是 | 是 |
|
||||||
|归档与收藏功能|是|是|
|
| 足迹地图 | 是 | 是 |
|
||||||
|全局地图|否|是|
|
| 好友分享 | 是 | 是 |
|
||||||
|好友分享|是|是|
|
| 人脸识别与分组 | 是 | 是 |
|
||||||
|人像识别与分组|是|是|
|
| 回忆(那年今日) | 是 | 是 |
|
||||||
|回忆(那年今日)|是|是|
|
| 离线支持 | 是 | 否 |
|
||||||
|离线支持|是|否|
|
| 只读相册 | 是 | 是 |
|
||||||
|只读相册|是|是|
|
| 照片堆叠 | 是 | 是 |
|
||||||
|
|
||||||
|
|
||||||
# 支持本项目
|
# 支持本项目
|
||||||
|
|
||||||
我已经致力于本项目并且将我会持续更新文档、新增功能和修复问题。但是独木不成林,我需要您给予我坚持下去的动力。
|
我已经致力于本项目并且我将会持续更新文档、新增功能和修复问题。但是独木不成林,我需要您给予我坚持下去的动力。
|
||||||
|
|
||||||
就像我在 [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) 节目里说的一样,这是我和团队的一项艰巨任务。并且我希望某一天我能够全职开发本项目,在此我请求您能够助我梦想成真。
|
就像我在 [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) 节目里说的一样,这是我和团队的一项艰巨任务。并且我希望某一天我能够全职开发本项目,在此我请求您能够助我梦想成真。
|
||||||
|
|
||||||
如果您使用了本项目一段时间,并且觉得上面的话有道理,那么请您考虑通过下列任一方式支持我吧。
|
如果您使用了本项目一段时间,并且觉得上面的话有道理,那么请您考虑通过下列任一方式支持我吧。
|
||||||
|
|
||||||
@@ -118,3 +120,9 @@
|
|||||||
- [Librepay](https://liberapay.com/alex.tran1502/)
|
- [Librepay](https://liberapay.com/alex.tran1502/)
|
||||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||||
- 比特币: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
- 比特币: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||||
|
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
||||||
|
|
||||||
|
## 贡献者
|
||||||
|
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||||
|
</a>
|
||||||
|
|||||||
2
cli/.gitignore
vendored
@@ -11,3 +11,5 @@ oclif.manifest.json
|
|||||||
.vscode
|
.vscode
|
||||||
.idea
|
.idea
|
||||||
/coverage/
|
/coverage/
|
||||||
|
.reverse-geocoding-dump/
|
||||||
|
upload/
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
**/*.spec.js
|
**/*.spec.js
|
||||||
|
test/**
|
||||||
|
upload/**
|
||||||
.editorconfig
|
.editorconfig
|
||||||
.eslintignore
|
.eslintignore
|
||||||
.eslintrc.js
|
.eslintrc.js
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ node_modules
|
|||||||
.env
|
.env
|
||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
src/api/open-api
|
|
||||||
*.md
|
*.md
|
||||||
*.json
|
*.json
|
||||||
coverage
|
coverage
|
||||||
|
|||||||
19
cli/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
FROM ghcr.io/immich-app/base-server-dev:20240111@sha256:5acf773796f93c7a3216ffdbdb3604dc812f2b2317b84a1b57b65674826b746a as test
|
||||||
|
|
||||||
|
WORKDIR /usr/src/app/server
|
||||||
|
COPY server/package.json server/package-lock.json ./
|
||||||
|
RUN npm ci
|
||||||
|
COPY ./server/ .
|
||||||
|
|
||||||
|
WORKDIR /usr/src/app/cli
|
||||||
|
COPY cli/package.json cli/package-lock.json ./
|
||||||
|
RUN npm ci
|
||||||
|
COPY ./cli/ .
|
||||||
|
|
||||||
|
FROM ghcr.io/immich-app/base-server-prod:20240111@sha256:e917605008977f68dc3b6f7879c264cae4bff6c4186b119a6e114a60f8f5a354
|
||||||
|
|
||||||
|
VOLUME /usr/src/app/upload
|
||||||
|
|
||||||
|
EXPOSE 3001
|
||||||
|
|
||||||
|
ENTRYPOINT ["tini", "--", "/bin/sh"]
|
||||||
3491
cli/package-lock.json
generated
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@immich/cli",
|
"name": "@immich/cli",
|
||||||
"version": "2.0.4",
|
"version": "2.0.6",
|
||||||
"description": "Command Line Interface (CLI) for Immich",
|
"description": "Command Line Interface (CLI) for Immich",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
@@ -12,31 +12,31 @@
|
|||||||
"cli"
|
"cli"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||||
"axios": "^1.6.2",
|
"axios": "^1.6.2",
|
||||||
"byte-size": "^8.1.1",
|
"byte-size": "^8.1.1",
|
||||||
"cli-progress": "^3.12.0",
|
"cli-progress": "^3.12.0",
|
||||||
"commander": "^11.0.0",
|
"commander": "^11.0.0",
|
||||||
"form-data": "^4.0.0",
|
"form-data": "^4.0.0",
|
||||||
"glob": "^10.3.1",
|
"glob": "^10.3.1",
|
||||||
|
"graceful-fs": "^4.2.11",
|
||||||
"yaml": "^2.3.1"
|
"yaml": "^2.3.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@testcontainers/postgresql": "^10.4.0",
|
||||||
"@types/byte-size": "^8.1.0",
|
"@types/byte-size": "^8.1.0",
|
||||||
"@types/chai": "^4.3.5",
|
|
||||||
"@types/cli-progress": "^3.11.0",
|
"@types/cli-progress": "^3.11.0",
|
||||||
"@types/jest": "^29.5.2",
|
"@types/jest": "^29.5.2",
|
||||||
"@types/js-yaml": "^4.0.5",
|
|
||||||
"@types/mime-types": "^2.1.1",
|
|
||||||
"@types/mock-fs": "^4.13.1",
|
"@types/mock-fs": "^4.13.1",
|
||||||
"@types/node": "^20.3.1",
|
"@types/node": "^20.3.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
"@typescript-eslint/eslint-plugin": "^6.0.0",
|
||||||
"@typescript-eslint/parser": "^6.0.0",
|
"@typescript-eslint/parser": "^6.0.0",
|
||||||
"chai": "^4.3.7",
|
|
||||||
"eslint": "^8.43.0",
|
"eslint": "^8.43.0",
|
||||||
"eslint-config-prettier": "^9.0.0",
|
"eslint-config-prettier": "^9.0.0",
|
||||||
"eslint-plugin-jest": "^27.2.2",
|
"eslint-plugin-jest": "^27.2.2",
|
||||||
"eslint-plugin-prettier": "^5.0.0",
|
"eslint-plugin-prettier": "^5.0.0",
|
||||||
"eslint-plugin-unicorn": "^49.0.0",
|
"eslint-plugin-unicorn": "^50.0.0",
|
||||||
|
"immich": "file:../server",
|
||||||
"jest": "^29.5.0",
|
"jest": "^29.5.0",
|
||||||
"jest-extended": "^4.0.0",
|
"jest-extended": "^4.0.0",
|
||||||
"jest-message-util": "^29.5.0",
|
"jest-message-util": "^29.5.0",
|
||||||
@@ -50,13 +50,15 @@
|
|||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc --project tsconfig.build.json",
|
"build": "tsc --project tsconfig.build.json",
|
||||||
"lint": "eslint \"src/**/*.ts\" --max-warnings 0",
|
"lint": "eslint \"src/**/*.ts\" \"test/**/*.ts\" --max-warnings 0",
|
||||||
|
"lint:fix": "npm run lint -- --fix",
|
||||||
"prepack": "npm run build",
|
"prepack": "npm run build",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:cov": "jest --coverage",
|
"test:cov": "jest --coverage",
|
||||||
"format": "prettier --check .",
|
"format": "prettier --check .",
|
||||||
"format:fix": "prettier --write .",
|
"format:fix": "prettier --write .",
|
||||||
"check": "tsc --noEmit"
|
"check": "tsc --noEmit",
|
||||||
|
"test:e2e": "jest --config test/e2e/jest-e2e.json --runInBand"
|
||||||
},
|
},
|
||||||
"jest": {
|
"jest": {
|
||||||
"clearMocks": true,
|
"clearMocks": true,
|
||||||
@@ -71,10 +73,15 @@
|
|||||||
"^.+\\.ts$": "ts-jest"
|
"^.+\\.ts$": "ts-jest"
|
||||||
},
|
},
|
||||||
"collectCoverageFrom": [
|
"collectCoverageFrom": [
|
||||||
"<rootDir>/src/**/*.(t|j)s"
|
"<rootDir>/src/**/*.(t|j)s",
|
||||||
|
"!**/open-api/**"
|
||||||
],
|
],
|
||||||
"moduleNameMapper": {
|
"moduleNameMapper": {
|
||||||
"^@api(|/.*)$": "<rootDir>/src/api/$1"
|
"^@api(|/.*)$": "<rootDir>/src/api/$1",
|
||||||
|
"^@test(|/.*)$": "<rootDir>../server/test/$1",
|
||||||
|
"^@app/immich(|/.*)$": "<rootDir>../server/src/immich/$1",
|
||||||
|
"^@app/infra(|/.*)$": "<rootDir>../server/src/infra/$1",
|
||||||
|
"^@app/domain(|/.*)$": "<rootDir>../server/src/domain/$1"
|
||||||
},
|
},
|
||||||
"coverageDirectory": "./coverage",
|
"coverageDirectory": "./coverage",
|
||||||
"testEnvironment": "node"
|
"testEnvironment": "node"
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
ServerInfoApi,
|
ServerInfoApi,
|
||||||
SystemConfigApi,
|
SystemConfigApi,
|
||||||
UserApi,
|
UserApi,
|
||||||
} from './open-api';
|
} from '@immich/sdk';
|
||||||
import { ApiConfiguration } from '../cores/api-configuration';
|
import { ApiConfiguration } from '../cores/api-configuration';
|
||||||
import FormData from 'form-data';
|
import FormData from 'form-data';
|
||||||
|
|
||||||
|
|||||||
@@ -1,150 +0,0 @@
|
|||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
/**
|
|
||||||
* Immich
|
|
||||||
* Immich API
|
|
||||||
*
|
|
||||||
* The version of the OpenAPI document: 1.91.2
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
|
||||||
* https://openapi-generator.tech
|
|
||||||
* Do not edit the class manually.
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
import type { Configuration } from "./configuration";
|
|
||||||
import type { RequestArgs } from "./base";
|
|
||||||
import type { AxiosInstance, AxiosResponse } from 'axios';
|
|
||||||
import { RequiredError } from "./base";
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const DUMMY_BASE_URL = 'https://example.com'
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @throws {RequiredError}
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const assertParamExists = function (functionName: string, paramName: string, paramValue: unknown) {
|
|
||||||
if (paramValue === null || paramValue === undefined) {
|
|
||||||
throw new RequiredError(paramName, `Required parameter ${paramName} was null or undefined when calling ${functionName}.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const setApiKeyToObject = async function (object: any, keyParamName: string, configuration?: Configuration) {
|
|
||||||
if (configuration && configuration.apiKey) {
|
|
||||||
const localVarApiKeyValue = typeof configuration.apiKey === 'function'
|
|
||||||
? await configuration.apiKey(keyParamName)
|
|
||||||
: await configuration.apiKey;
|
|
||||||
object[keyParamName] = localVarApiKeyValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const setBasicAuthToObject = function (object: any, configuration?: Configuration) {
|
|
||||||
if (configuration && (configuration.username || configuration.password)) {
|
|
||||||
object["auth"] = { username: configuration.username, password: configuration.password };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const setBearerAuthToObject = async function (object: any, configuration?: Configuration) {
|
|
||||||
if (configuration && configuration.accessToken) {
|
|
||||||
const accessToken = typeof configuration.accessToken === 'function'
|
|
||||||
? await configuration.accessToken()
|
|
||||||
: await configuration.accessToken;
|
|
||||||
object["Authorization"] = "Bearer " + accessToken;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const setOAuthToObject = async function (object: any, name: string, scopes: string[], configuration?: Configuration) {
|
|
||||||
if (configuration && configuration.accessToken) {
|
|
||||||
const localVarAccessTokenValue = typeof configuration.accessToken === 'function'
|
|
||||||
? await configuration.accessToken(name, scopes)
|
|
||||||
: await configuration.accessToken;
|
|
||||||
object["Authorization"] = "Bearer " + localVarAccessTokenValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function setFlattenedQueryParams(urlSearchParams: URLSearchParams, parameter: any, key: string = ""): void {
|
|
||||||
if (parameter == null) return;
|
|
||||||
if (typeof parameter === "object") {
|
|
||||||
if (Array.isArray(parameter)) {
|
|
||||||
(parameter as any[]).forEach(item => setFlattenedQueryParams(urlSearchParams, item, key));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Object.keys(parameter).forEach(currentKey =>
|
|
||||||
setFlattenedQueryParams(urlSearchParams, parameter[currentKey], `${key}${key !== '' ? '.' : ''}${currentKey}`)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (urlSearchParams.has(key)) {
|
|
||||||
urlSearchParams.append(key, parameter);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
urlSearchParams.set(key, parameter);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const setSearchParams = function (url: URL, ...objects: any[]) {
|
|
||||||
const searchParams = new URLSearchParams(url.search);
|
|
||||||
setFlattenedQueryParams(searchParams, objects);
|
|
||||||
url.search = searchParams.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const serializeDataIfNeeded = function (value: any, requestOptions: any, configuration?: Configuration) {
|
|
||||||
const nonString = typeof value !== 'string';
|
|
||||||
const needsSerialization = nonString && configuration && configuration.isJsonMime
|
|
||||||
? configuration.isJsonMime(requestOptions.headers['Content-Type'])
|
|
||||||
: nonString;
|
|
||||||
return needsSerialization
|
|
||||||
? JSON.stringify(value !== undefined ? value : {})
|
|
||||||
: (value || "");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const toPathString = function (url: URL) {
|
|
||||||
return url.pathname + url.search + url.hash
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @export
|
|
||||||
*/
|
|
||||||
export const createRequestFunction = function (axiosArgs: RequestArgs, globalAxios: AxiosInstance, BASE_PATH: string, configuration?: Configuration) {
|
|
||||||
return <T = unknown, R = AxiosResponse<T>>(axios: AxiosInstance = globalAxios, basePath: string = BASE_PATH) => {
|
|
||||||
const axiosRequestArgs = {...axiosArgs.options, url: (configuration?.basePath || basePath) + axiosArgs.url};
|
|
||||||
return axios.request<T, R>(axiosRequestArgs);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,101 +0,0 @@
|
|||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
/**
|
|
||||||
* Immich
|
|
||||||
* Immich API
|
|
||||||
*
|
|
||||||
* The version of the OpenAPI document: 1.91.2
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
|
||||||
* https://openapi-generator.tech
|
|
||||||
* Do not edit the class manually.
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
export interface ConfigurationParameters {
|
|
||||||
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
|
|
||||||
username?: string;
|
|
||||||
password?: string;
|
|
||||||
accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
|
|
||||||
basePath?: string;
|
|
||||||
baseOptions?: any;
|
|
||||||
formDataCtor?: new () => any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class Configuration {
|
|
||||||
/**
|
|
||||||
* parameter for apiKey security
|
|
||||||
* @param name security name
|
|
||||||
* @memberof Configuration
|
|
||||||
*/
|
|
||||||
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>);
|
|
||||||
/**
|
|
||||||
* parameter for basic security
|
|
||||||
*
|
|
||||||
* @type {string}
|
|
||||||
* @memberof Configuration
|
|
||||||
*/
|
|
||||||
username?: string;
|
|
||||||
/**
|
|
||||||
* parameter for basic security
|
|
||||||
*
|
|
||||||
* @type {string}
|
|
||||||
* @memberof Configuration
|
|
||||||
*/
|
|
||||||
password?: string;
|
|
||||||
/**
|
|
||||||
* parameter for oauth2 security
|
|
||||||
* @param name security name
|
|
||||||
* @param scopes oauth2 scope
|
|
||||||
* @memberof Configuration
|
|
||||||
*/
|
|
||||||
accessToken?: string | Promise<string> | ((name?: string, scopes?: string[]) => string) | ((name?: string, scopes?: string[]) => Promise<string>);
|
|
||||||
/**
|
|
||||||
* override base path
|
|
||||||
*
|
|
||||||
* @type {string}
|
|
||||||
* @memberof Configuration
|
|
||||||
*/
|
|
||||||
basePath?: string;
|
|
||||||
/**
|
|
||||||
* base options for axios calls
|
|
||||||
*
|
|
||||||
* @type {any}
|
|
||||||
* @memberof Configuration
|
|
||||||
*/
|
|
||||||
baseOptions?: any;
|
|
||||||
/**
|
|
||||||
* The FormData constructor that will be used to create multipart form data
|
|
||||||
* requests. You can inject this here so that execution environments that
|
|
||||||
* do not support the FormData class can still run the generated client.
|
|
||||||
*
|
|
||||||
* @type {new () => FormData}
|
|
||||||
*/
|
|
||||||
formDataCtor?: new () => any;
|
|
||||||
|
|
||||||
constructor(param: ConfigurationParameters = {}) {
|
|
||||||
this.apiKey = param.apiKey;
|
|
||||||
this.username = param.username;
|
|
||||||
this.password = param.password;
|
|
||||||
this.accessToken = param.accessToken;
|
|
||||||
this.basePath = param.basePath;
|
|
||||||
this.baseOptions = param.baseOptions;
|
|
||||||
this.formDataCtor = param.formDataCtor;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if the given MIME is a JSON MIME.
|
|
||||||
* JSON MIME examples:
|
|
||||||
* application/json
|
|
||||||
* application/json; charset=UTF8
|
|
||||||
* APPLICATION/JSON
|
|
||||||
* application/vnd.company+json
|
|
||||||
* @param mime - MIME (Multipurpose Internet Mail Extensions)
|
|
||||||
* @return True if the given MIME is JSON, false otherwise.
|
|
||||||
*/
|
|
||||||
public isJsonMime(mime: string): boolean {
|
|
||||||
const jsonMime: RegExp = new RegExp('^(application\/json|[^;/ \t]+\/[^;/ \t]+[+]json)[ \t]*(;.*)?$', 'i');
|
|
||||||
return mime !== null && (jsonMime.test(mime) || mime.toLowerCase() === 'application/json-patch+json');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
import { ImmichApi } from '../api/client';
|
import { ImmichApi } from '../api/client';
|
||||||
import path from 'node:path';
|
|
||||||
import { SessionService } from '../services/session.service';
|
import { SessionService } from '../services/session.service';
|
||||||
import { LoginError } from '../cores/errors/login-error';
|
import { LoginError } from '../cores/errors/login-error';
|
||||||
import { exit } from 'node:process';
|
import { exit } from 'node:process';
|
||||||
import os from 'os';
|
import { ServerVersionResponseDto, UserResponseDto } from '@immich/sdk';
|
||||||
import { ServerVersionResponseDto, UserResponseDto } from 'src/api/open-api';
|
import { BaseOptionsDto } from 'src/cores/dto/base-options-dto';
|
||||||
|
|
||||||
export abstract class BaseCommand {
|
export abstract class BaseCommand {
|
||||||
protected sessionService!: SessionService;
|
protected sessionService!: SessionService;
|
||||||
@@ -12,14 +11,11 @@ export abstract class BaseCommand {
|
|||||||
protected user!: UserResponseDto;
|
protected user!: UserResponseDto;
|
||||||
protected serverVersion!: ServerVersionResponseDto;
|
protected serverVersion!: ServerVersionResponseDto;
|
||||||
|
|
||||||
protected configDir;
|
constructor(options: BaseOptionsDto) {
|
||||||
protected authPath;
|
if (!options.config) {
|
||||||
|
throw new Error('Config directory is required');
|
||||||
constructor() {
|
}
|
||||||
const userHomeDir = os.homedir();
|
this.sessionService = new SessionService(options.config);
|
||||||
this.configDir = path.join(userHomeDir, '.config/immich/');
|
|
||||||
this.sessionService = new SessionService(this.configDir);
|
|
||||||
this.authPath = path.join(this.configDir, 'auth.yml');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async connect(): Promise<void> {
|
public async connect(): Promise<void> {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { BaseCommand } from '../../cli/base-command';
|
import { BaseCommand } from '../../cli/base-command';
|
||||||
|
|
||||||
export default class LoginKey extends BaseCommand {
|
export class LoginKey extends BaseCommand {
|
||||||
public async run(instanceUrl: string, apiKey: string): Promise<void> {
|
public async run(instanceUrl: string, apiKey: string): Promise<void> {
|
||||||
console.log('Executing API key auth flow...');
|
console.log('Executing API key auth flow...');
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { BaseCommand } from '../cli/base-command';
|
import { BaseCommand } from '../cli/base-command';
|
||||||
|
|
||||||
export default class Logout extends BaseCommand {
|
export class Logout extends BaseCommand {
|
||||||
public static readonly description = 'Logout and remove persisted credentials';
|
public static readonly description = 'Logout and remove persisted credentials';
|
||||||
|
|
||||||
public async run(): Promise<void> {
|
public async run(): Promise<void> {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { BaseCommand } from '../cli/base-command';
|
import { BaseCommand } from '../cli/base-command';
|
||||||
|
|
||||||
export default class ServerInfo extends BaseCommand {
|
export class ServerInfo extends BaseCommand {
|
||||||
public async run() {
|
public async run() {
|
||||||
await this.connect();
|
await this.connect();
|
||||||
const { data: versionInfo } = await this.immichApi.serverInfoApi.getServerVersion();
|
const { data: versionInfo } = await this.immichApi.serverInfoApi.getServerVersion();
|
||||||
|
|||||||
@@ -2,21 +2,19 @@ import { Asset } from '../cores/models/asset';
|
|||||||
import { CrawlService } from '../services';
|
import { CrawlService } from '../services';
|
||||||
import { UploadOptionsDto } from '../cores/dto/upload-options-dto';
|
import { UploadOptionsDto } from '../cores/dto/upload-options-dto';
|
||||||
import { CrawlOptionsDto } from '../cores/dto/crawl-options-dto';
|
import { CrawlOptionsDto } from '../cores/dto/crawl-options-dto';
|
||||||
|
import fs from 'node:fs';
|
||||||
import cliProgress from 'cli-progress';
|
import cliProgress from 'cli-progress';
|
||||||
import byteSize from 'byte-size';
|
import byteSize from 'byte-size';
|
||||||
import { BaseCommand } from '../cli/base-command';
|
import { BaseCommand } from '../cli/base-command';
|
||||||
import axios, { AxiosRequestConfig } from 'axios';
|
import axios, { AxiosRequestConfig, AxiosResponse } from 'axios';
|
||||||
import FormData from 'form-data';
|
import FormData from 'form-data';
|
||||||
|
|
||||||
export default class Upload extends BaseCommand {
|
export class Upload extends BaseCommand {
|
||||||
uploadLength!: number;
|
uploadLength!: number;
|
||||||
|
|
||||||
public async run(paths: string[], options: UploadOptionsDto): Promise<void> {
|
public async run(paths: string[], options: UploadOptionsDto): Promise<void> {
|
||||||
await this.connect();
|
await this.connect();
|
||||||
|
|
||||||
const deviceId = 'CLI';
|
|
||||||
|
|
||||||
const formatResponse = await this.immichApi.serverInfoApi.getSupportedMediaTypes();
|
const formatResponse = await this.immichApi.serverInfoApi.getSupportedMediaTypes();
|
||||||
const crawlService = new CrawlService(formatResponse.data.image, formatResponse.data.video);
|
const crawlService = new CrawlService(formatResponse.data.image, formatResponse.data.video);
|
||||||
|
|
||||||
@@ -24,15 +22,28 @@ export default class Upload extends BaseCommand {
|
|||||||
crawlOptions.pathsToCrawl = paths;
|
crawlOptions.pathsToCrawl = paths;
|
||||||
crawlOptions.recursive = options.recursive;
|
crawlOptions.recursive = options.recursive;
|
||||||
crawlOptions.exclusionPatterns = options.exclusionPatterns;
|
crawlOptions.exclusionPatterns = options.exclusionPatterns;
|
||||||
|
crawlOptions.includeHidden = options.includeHidden;
|
||||||
|
|
||||||
|
const files: string[] = [];
|
||||||
|
|
||||||
|
for (const pathArgument of paths) {
|
||||||
|
const fileStat = await fs.promises.lstat(pathArgument);
|
||||||
|
|
||||||
|
if (fileStat.isFile()) {
|
||||||
|
files.push(pathArgument);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const crawledFiles: string[] = await crawlService.crawl(crawlOptions);
|
const crawledFiles: string[] = await crawlService.crawl(crawlOptions);
|
||||||
|
|
||||||
|
crawledFiles.push(...files);
|
||||||
|
|
||||||
if (crawledFiles.length === 0) {
|
if (crawledFiles.length === 0) {
|
||||||
console.log('No assets found, exiting');
|
console.log('No assets found, exiting');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const assetsToUpload = crawledFiles.map((path) => new Asset(path, deviceId));
|
const assetsToUpload = crawledFiles.map((path) => new Asset(path));
|
||||||
|
|
||||||
const uploadProgress = new cliProgress.SingleBar(
|
const uploadProgress = new cliProgress.SingleBar(
|
||||||
{
|
{
|
||||||
@@ -49,8 +60,12 @@ export default class Upload extends BaseCommand {
|
|||||||
|
|
||||||
for (const asset of assetsToUpload) {
|
for (const asset of assetsToUpload) {
|
||||||
// Compute total size first
|
// Compute total size first
|
||||||
await asset.process();
|
await asset.prepare();
|
||||||
totalSize += asset.fileSize;
|
totalSize += asset.fileSize;
|
||||||
|
|
||||||
|
if (options.albumName) {
|
||||||
|
asset.albumName = options.albumName;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const existingAlbums = (await this.immichApi.albumApi.getAllAlbums()).data;
|
const existingAlbums = (await this.immichApi.albumApi.getAllAlbums()).data;
|
||||||
@@ -65,6 +80,10 @@ export default class Upload extends BaseCommand {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let skipUpload = false;
|
let skipUpload = false;
|
||||||
|
|
||||||
|
let skipAsset = false;
|
||||||
|
let existingAssetId: string | undefined = undefined;
|
||||||
|
|
||||||
if (!options.skipHash) {
|
if (!options.skipHash) {
|
||||||
const assetBulkUploadCheckDto = { assets: [{ id: asset.path, checksum: await asset.hash() }] };
|
const assetBulkUploadCheckDto = { assets: [{ id: asset.path, checksum: await asset.hash() }] };
|
||||||
|
|
||||||
@@ -73,14 +92,26 @@ export default class Upload extends BaseCommand {
|
|||||||
});
|
});
|
||||||
|
|
||||||
skipUpload = checkResponse.data.results[0].action === 'reject';
|
skipUpload = checkResponse.data.results[0].action === 'reject';
|
||||||
|
|
||||||
|
const isDuplicate = checkResponse.data.results[0].reason === 'duplicate';
|
||||||
|
if (isDuplicate) {
|
||||||
|
existingAssetId = checkResponse.data.results[0].assetId;
|
||||||
|
}
|
||||||
|
|
||||||
|
skipAsset = skipUpload && !isDuplicate;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!skipUpload) {
|
if (!skipAsset) {
|
||||||
if (!options.dryRun) {
|
if (!options.dryRun) {
|
||||||
const formData = asset.getUploadFormData();
|
if (!skipUpload) {
|
||||||
const res = await this.uploadAsset(formData);
|
const formData = asset.getUploadFormData();
|
||||||
|
const res = await this.uploadAsset(formData);
|
||||||
|
existingAssetId = res.data.id;
|
||||||
|
uploadCounter++;
|
||||||
|
totalSizeUploaded += asset.fileSize;
|
||||||
|
}
|
||||||
|
|
||||||
if (options.album && asset.albumName) {
|
if ((options.album || options.albumName) && asset.albumName !== undefined) {
|
||||||
let album = existingAlbums.find((album) => album.albumName === asset.albumName);
|
let album = existingAlbums.find((album) => album.albumName === asset.albumName);
|
||||||
if (!album) {
|
if (!album) {
|
||||||
const res = await this.immichApi.albumApi.createAlbum({
|
const res = await this.immichApi.albumApi.createAlbum({
|
||||||
@@ -90,12 +121,14 @@ export default class Upload extends BaseCommand {
|
|||||||
existingAlbums.push(album);
|
existingAlbums.push(album);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.immichApi.albumApi.addAssetsToAlbum({ id: album.id, bulkIdsDto: { ids: [res.data.id] } });
|
if (existingAssetId) {
|
||||||
|
await this.immichApi.albumApi.addAssetsToAlbum({
|
||||||
|
id: album.id,
|
||||||
|
bulkIdsDto: { ids: [existingAssetId] },
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
totalSizeUploaded += asset.fileSize;
|
|
||||||
uploadCounter++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sizeSoFar += asset.fileSize;
|
sizeSoFar += asset.fileSize;
|
||||||
@@ -138,7 +171,7 @@ export default class Upload extends BaseCommand {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async uploadAsset(data: FormData): Promise<axios.AxiosResponse> {
|
private async uploadAsset(data: FormData): Promise<AxiosResponse> {
|
||||||
const url = this.immichApi.apiConfiguration.instanceUrl + '/asset/upload';
|
const url = this.immichApi.apiConfiguration.instanceUrl + '/asset/upload';
|
||||||
|
|
||||||
const config: AxiosRequestConfig = {
|
const config: AxiosRequestConfig = {
|
||||||
|
|||||||
37
cli/src/constants.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import pkg from '../package.json';
|
||||||
|
|
||||||
|
export interface ICLIVersion {
|
||||||
|
major: number;
|
||||||
|
minor: number;
|
||||||
|
patch: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class CLIVersion implements ICLIVersion {
|
||||||
|
constructor(
|
||||||
|
public readonly major: number,
|
||||||
|
public readonly minor: number,
|
||||||
|
public readonly patch: number,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
toString() {
|
||||||
|
return `${this.major}.${this.minor}.${this.patch}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
toJSON() {
|
||||||
|
const { major, minor, patch } = this;
|
||||||
|
return { major, minor, patch };
|
||||||
|
}
|
||||||
|
|
||||||
|
static fromString(version: string): CLIVersion {
|
||||||
|
const regex = /(?:v)?(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)/i;
|
||||||
|
const matchResult = version.match(regex);
|
||||||
|
if (matchResult) {
|
||||||
|
const [, major, minor, patch] = matchResult.map(Number);
|
||||||
|
return new CLIVersion(major, minor, patch);
|
||||||
|
} else {
|
||||||
|
throw new Error(`Invalid version format: ${version}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const cliVersion = CLIVersion.fromString(pkg.version);
|
||||||
3
cli/src/cores/dto/base-options-dto.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export class BaseOptionsDto {
|
||||||
|
config?: string;
|
||||||
|
}
|
||||||
@@ -1,9 +1,10 @@
|
|||||||
export class UploadOptionsDto {
|
export class UploadOptionsDto {
|
||||||
recursive = false;
|
recursive? = false;
|
||||||
exclusionPatterns!: string[];
|
exclusionPatterns?: string[] = [];
|
||||||
dryRun = false;
|
dryRun? = false;
|
||||||
skipHash = false;
|
skipHash? = false;
|
||||||
delete = false;
|
delete? = false;
|
||||||
readOnly = true;
|
album? = false;
|
||||||
album = false;
|
albumName? = '';
|
||||||
|
includeHidden? = false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ export class LoginError extends Error {
|
|||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
super(message);
|
super(message);
|
||||||
|
|
||||||
// assign the error class name in your custom error (as a shortcut)
|
|
||||||
this.name = this.constructor.name;
|
this.name = this.constructor.name;
|
||||||
|
|
||||||
// capturing the stack trace keeps the reference to your error class
|
|
||||||
Error.captureStackTrace(this, this.constructor);
|
Error.captureStackTrace(this, this.constructor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,56 +1,51 @@
|
|||||||
import * as fs from 'fs';
|
|
||||||
import { basename } from 'node:path';
|
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import Os from 'os';
|
|
||||||
import FormData from 'form-data';
|
import FormData from 'form-data';
|
||||||
|
import * as fs from 'graceful-fs';
|
||||||
|
import { createReadStream } from 'node:fs';
|
||||||
|
import { basename } from 'node:path';
|
||||||
|
import Os from 'os';
|
||||||
|
|
||||||
export class Asset {
|
export class Asset {
|
||||||
readonly path: string;
|
readonly path: string;
|
||||||
readonly deviceId!: string;
|
readonly deviceId!: string;
|
||||||
|
|
||||||
assetData?: fs.ReadStream;
|
|
||||||
deviceAssetId?: string;
|
deviceAssetId?: string;
|
||||||
fileCreatedAt?: string;
|
fileCreatedAt?: string;
|
||||||
fileModifiedAt?: string;
|
fileModifiedAt?: string;
|
||||||
sidecarData?: fs.ReadStream;
|
|
||||||
sidecarPath?: string;
|
sidecarPath?: string;
|
||||||
fileSize!: number;
|
fileSize!: number;
|
||||||
albumName?: string;
|
albumName?: string;
|
||||||
|
|
||||||
constructor(path: string, deviceId: string) {
|
constructor(path: string) {
|
||||||
this.path = path;
|
this.path = path;
|
||||||
this.deviceId = deviceId;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async process() {
|
async prepare() {
|
||||||
const stats = await fs.promises.stat(this.path);
|
const stats = await fs.promises.stat(this.path);
|
||||||
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replace(/\s+/g, '');
|
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replace(/\s+/g, '');
|
||||||
this.fileCreatedAt = stats.mtime.toISOString();
|
this.fileCreatedAt = stats.mtime.toISOString();
|
||||||
this.fileModifiedAt = stats.mtime.toISOString();
|
this.fileModifiedAt = stats.mtime.toISOString();
|
||||||
this.fileSize = stats.size;
|
this.fileSize = stats.size;
|
||||||
this.albumName = this.extractAlbumName();
|
this.albumName = this.extractAlbumName();
|
||||||
|
|
||||||
this.assetData = this.getReadStream(this.path);
|
|
||||||
|
|
||||||
// TODO: doesn't xmp replace the file extension? Will need investigation
|
|
||||||
const sideCarPath = `${this.path}.xmp`;
|
|
||||||
try {
|
|
||||||
fs.accessSync(sideCarPath, fs.constants.R_OK);
|
|
||||||
this.sidecarData = this.getReadStream(sideCarPath);
|
|
||||||
} catch (error) {}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
getUploadFormData(): FormData {
|
getUploadFormData(): FormData {
|
||||||
if (!this.assetData) throw new Error('Asset data not set');
|
|
||||||
if (!this.deviceAssetId) throw new Error('Device asset id not set');
|
if (!this.deviceAssetId) throw new Error('Device asset id not set');
|
||||||
if (!this.fileCreatedAt) throw new Error('File created at not set');
|
if (!this.fileCreatedAt) throw new Error('File created at not set');
|
||||||
if (!this.fileModifiedAt) throw new Error('File modified at not set');
|
if (!this.fileModifiedAt) throw new Error('File modified at not set');
|
||||||
if (!this.deviceId) throw new Error('Device id not set');
|
|
||||||
|
// TODO: doesn't xmp replace the file extension? Will need investigation
|
||||||
|
const sideCarPath = `${this.path}.xmp`;
|
||||||
|
let sidecarData: fs.ReadStream | undefined = undefined;
|
||||||
|
try {
|
||||||
|
fs.accessSync(sideCarPath, fs.constants.R_OK);
|
||||||
|
sidecarData = createReadStream(sideCarPath);
|
||||||
|
} catch (error) {}
|
||||||
|
|
||||||
const data: any = {
|
const data: any = {
|
||||||
assetData: this.assetData as any,
|
assetData: createReadStream(this.path),
|
||||||
deviceAssetId: this.deviceAssetId,
|
deviceAssetId: this.deviceAssetId,
|
||||||
deviceId: this.deviceId,
|
deviceId: 'CLI',
|
||||||
fileCreatedAt: this.fileCreatedAt,
|
fileCreatedAt: this.fileCreatedAt,
|
||||||
fileModifiedAt: this.fileModifiedAt,
|
fileModifiedAt: this.fileModifiedAt,
|
||||||
isFavorite: String(false),
|
isFavorite: String(false),
|
||||||
@@ -61,17 +56,13 @@ export class Asset {
|
|||||||
formData.append(prop, data[prop]);
|
formData.append(prop, data[prop]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.sidecarData) {
|
if (sidecarData) {
|
||||||
formData.append('sidecarData', this.sidecarData);
|
formData.append('sidecarData', sidecarData);
|
||||||
}
|
}
|
||||||
|
|
||||||
return formData;
|
return formData;
|
||||||
}
|
}
|
||||||
|
|
||||||
private getReadStream(path: string): fs.ReadStream {
|
|
||||||
return fs.createReadStream(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(): Promise<void> {
|
async delete(): Promise<void> {
|
||||||
return fs.promises.unlink(this.path);
|
return fs.promises.unlink(this.path);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,23 @@
|
|||||||
#! /usr/bin/env node
|
#! /usr/bin/env node
|
||||||
|
|
||||||
import { program, Option } from 'commander';
|
import { Option, Command } from 'commander';
|
||||||
import Upload from './commands/upload';
|
import { Upload } from './commands/upload';
|
||||||
import ServerInfo from './commands/server-info';
|
import { ServerInfo } from './commands/server-info';
|
||||||
import LoginKey from './commands/login/key';
|
import { LoginKey } from './commands/login/key';
|
||||||
import Logout from './commands/logout';
|
import { Logout } from './commands/logout';
|
||||||
import { version } from '../package.json';
|
import { version } from '../package.json';
|
||||||
|
|
||||||
program.name('immich').description('Immich command line interface').version(version);
|
import path from 'node:path';
|
||||||
|
import os from 'os';
|
||||||
|
|
||||||
|
const userHomeDir = os.homedir();
|
||||||
|
const configDir = path.join(userHomeDir, '.config/immich/');
|
||||||
|
|
||||||
|
const program = new Command()
|
||||||
|
.name('immich')
|
||||||
|
.version(version)
|
||||||
|
.description('Command line interface for Immich')
|
||||||
|
.addOption(new Option('-d, --config', 'Configuration directory').env('IMMICH_CONFIG_DIR').default(configDir));
|
||||||
|
|
||||||
program
|
program
|
||||||
.command('upload')
|
.command('upload')
|
||||||
@@ -16,11 +26,17 @@ program
|
|||||||
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
||||||
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS'))
|
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS'))
|
||||||
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
|
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
|
||||||
|
.addOption(new Option('-i, --include-hidden', 'Include hidden folders').env('IMMICH_INCLUDE_HIDDEN').default(false))
|
||||||
.addOption(
|
.addOption(
|
||||||
new Option('-a, --album', 'Automatically create albums based on folder name')
|
new Option('-a, --album', 'Automatically create albums based on folder name')
|
||||||
.env('IMMICH_AUTO_CREATE_ALBUM')
|
.env('IMMICH_AUTO_CREATE_ALBUM')
|
||||||
.default(false),
|
.default(false),
|
||||||
)
|
)
|
||||||
|
.addOption(
|
||||||
|
new Option('-A, --album-name <name>', 'Add all assets to specified album')
|
||||||
|
.env('IMMICH_ALBUM_NAME')
|
||||||
|
.conflicts('album'),
|
||||||
|
)
|
||||||
.addOption(
|
.addOption(
|
||||||
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
||||||
.env('IMMICH_DRY_RUN')
|
.env('IMMICH_DRY_RUN')
|
||||||
@@ -30,14 +46,14 @@ program
|
|||||||
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
||||||
.action(async (paths, options) => {
|
.action(async (paths, options) => {
|
||||||
options.exclusionPatterns = options.ignore;
|
options.exclusionPatterns = options.ignore;
|
||||||
await new Upload().run(paths, options);
|
await new Upload(program.opts()).run(paths, options);
|
||||||
});
|
});
|
||||||
|
|
||||||
program
|
program
|
||||||
.command('server-info')
|
.command('server-info')
|
||||||
.description('Display server information')
|
.description('Display server information')
|
||||||
.action(async () => {
|
.action(async () => {
|
||||||
await new ServerInfo().run();
|
await new ServerInfo(program.opts()).run();
|
||||||
});
|
});
|
||||||
|
|
||||||
program
|
program
|
||||||
@@ -46,14 +62,14 @@ program
|
|||||||
.argument('[instanceUrl]')
|
.argument('[instanceUrl]')
|
||||||
.argument('[apiKey]')
|
.argument('[apiKey]')
|
||||||
.action(async (paths, options) => {
|
.action(async (paths, options) => {
|
||||||
await new LoginKey().run(paths, options);
|
await new LoginKey(program.opts()).run(paths, options);
|
||||||
});
|
});
|
||||||
|
|
||||||
program
|
program
|
||||||
.command('logout')
|
.command('logout')
|
||||||
.description('Remove stored credentials')
|
.description('Remove stored credentials')
|
||||||
.action(async () => {
|
.action(async () => {
|
||||||
await new Logout().run();
|
await new Logout(program.opts()).run();
|
||||||
});
|
});
|
||||||
|
|
||||||
program.parse(process.argv);
|
program.parse(process.argv);
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ const tests: Test[] = [
|
|||||||
files: {},
|
files: {},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
test: 'should crawl a single path',
|
test: 'should crawl a single folder',
|
||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos/'],
|
pathsToCrawl: ['/photos/'],
|
||||||
},
|
},
|
||||||
@@ -27,6 +27,25 @@ const tests: Test[] = [
|
|||||||
'/photos/image.jpg': true,
|
'/photos/image.jpg': true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
test: 'should crawl a single file',
|
||||||
|
options: {
|
||||||
|
pathsToCrawl: ['/photos/image.jpg'],
|
||||||
|
},
|
||||||
|
files: {
|
||||||
|
'/photos/image.jpg': true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: 'should crawl a single file and a folder',
|
||||||
|
options: {
|
||||||
|
pathsToCrawl: ['/photos/image.jpg', '/images/'],
|
||||||
|
},
|
||||||
|
files: {
|
||||||
|
'/photos/image.jpg': true,
|
||||||
|
'/images/image2.jpg': true,
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
test: 'should exclude by file extension',
|
test: 'should exclude by file extension',
|
||||||
options: {
|
options: {
|
||||||
@@ -54,6 +73,7 @@ const tests: Test[] = [
|
|||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos/'],
|
pathsToCrawl: ['/photos/'],
|
||||||
exclusionPatterns: ['**/raw/**'],
|
exclusionPatterns: ['**/raw/**'],
|
||||||
|
recursive: true,
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image.jpg': true,
|
'/photos/image.jpg': true,
|
||||||
@@ -98,6 +118,7 @@ const tests: Test[] = [
|
|||||||
test: 'should crawl a single path',
|
test: 'should crawl a single path',
|
||||||
options: {
|
options: {
|
||||||
pathsToCrawl: ['/photos/'],
|
pathsToCrawl: ['/photos/'],
|
||||||
|
recursive: true,
|
||||||
},
|
},
|
||||||
files: {
|
files: {
|
||||||
'/photos/image.jpg': true,
|
'/photos/image.jpg': true,
|
||||||
@@ -177,6 +198,58 @@ const tests: Test[] = [
|
|||||||
[`/photos/3.jpg`]: false,
|
[`/photos/3.jpg`]: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
test: 'should support ignoring full filename',
|
||||||
|
options: {
|
||||||
|
pathsToCrawl: ['/photos'],
|
||||||
|
exclusionPatterns: ['**/image2.jpg'],
|
||||||
|
},
|
||||||
|
files: {
|
||||||
|
'/photos/image1.jpg': true,
|
||||||
|
'/photos/image2.jpg': false,
|
||||||
|
'/photos/image3.jpg': true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: 'should support ignoring file extensions',
|
||||||
|
options: {
|
||||||
|
pathsToCrawl: ['/photos'],
|
||||||
|
exclusionPatterns: ['**/*.png'],
|
||||||
|
},
|
||||||
|
files: {
|
||||||
|
'/photos/image1.jpg': true,
|
||||||
|
'/photos/image2.png': false,
|
||||||
|
'/photos/image3.jpg': true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: 'should support ignoring folder names',
|
||||||
|
options: {
|
||||||
|
pathsToCrawl: ['/photos'],
|
||||||
|
recursive: true,
|
||||||
|
exclusionPatterns: ['**/raw/**'],
|
||||||
|
},
|
||||||
|
files: {
|
||||||
|
'/photos/image1.jpg': true,
|
||||||
|
'/photos/image/image1.jpg': true,
|
||||||
|
'/photos/raw/image2.dng': false,
|
||||||
|
'/photos/raw/image3.dng': false,
|
||||||
|
'/photos/notraw/image3.jpg': true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: 'should support ignoring absolute paths',
|
||||||
|
options: {
|
||||||
|
pathsToCrawl: ['/'],
|
||||||
|
recursive: true,
|
||||||
|
exclusionPatterns: ['/images/**'],
|
||||||
|
},
|
||||||
|
files: {
|
||||||
|
'/photos/image1.jpg': true,
|
||||||
|
'/images/image2.jpg': false,
|
||||||
|
'/assets/image3.jpg': true,
|
||||||
|
},
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
describe(CrawlService.name, () => {
|
describe(CrawlService.name, () => {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { CrawlOptionsDto } from 'src/cores/dto/crawl-options-dto';
|
import { CrawlOptionsDto } from 'src/cores/dto/crawl-options-dto';
|
||||||
import { glob } from 'glob';
|
import { glob } from 'glob';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
|
||||||
export class CrawlService {
|
export class CrawlService {
|
||||||
private readonly extensions!: string[];
|
private readonly extensions!: string[];
|
||||||
@@ -8,21 +9,57 @@ export class CrawlService {
|
|||||||
this.extensions = image.concat(video).map((extension) => extension.replace('.', ''));
|
this.extensions = image.concat(video).map((extension) => extension.replace('.', ''));
|
||||||
}
|
}
|
||||||
|
|
||||||
crawl(crawlOptions: CrawlOptionsDto): Promise<string[]> {
|
async crawl(crawlOptions: CrawlOptionsDto): Promise<string[]> {
|
||||||
const { pathsToCrawl, exclusionPatterns, includeHidden } = crawlOptions;
|
const { pathsToCrawl, exclusionPatterns, includeHidden } = crawlOptions;
|
||||||
if (!pathsToCrawl) {
|
if (!pathsToCrawl) {
|
||||||
return Promise.resolve([]);
|
return Promise.resolve([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
const base = pathsToCrawl.length === 1 ? pathsToCrawl[0] : `{${pathsToCrawl.join(',')}}`;
|
const patterns: string[] = [];
|
||||||
const extensions = `*{${this.extensions}}`;
|
const crawledFiles: string[] = [];
|
||||||
|
|
||||||
return glob(`${base}/**/${extensions}`, {
|
for await (const currentPath of pathsToCrawl) {
|
||||||
|
try {
|
||||||
|
const stats = await fs.promises.stat(currentPath);
|
||||||
|
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||||
|
crawledFiles.push(currentPath);
|
||||||
|
} else {
|
||||||
|
patterns.push(currentPath);
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
patterns.push(currentPath);
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let searchPattern: string;
|
||||||
|
if (patterns.length === 1) {
|
||||||
|
searchPattern = patterns[0];
|
||||||
|
} else if (patterns.length === 0) {
|
||||||
|
return crawledFiles;
|
||||||
|
} else {
|
||||||
|
searchPattern = '{' + patterns.join(',') + '}';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (crawlOptions.recursive) {
|
||||||
|
searchPattern = searchPattern + '/**/';
|
||||||
|
}
|
||||||
|
|
||||||
|
searchPattern = `${searchPattern}/*.{${this.extensions.join(',')}}`;
|
||||||
|
|
||||||
|
const globbedFiles = await glob(searchPattern, {
|
||||||
absolute: true,
|
absolute: true,
|
||||||
nocase: true,
|
nocase: true,
|
||||||
nodir: true,
|
nodir: true,
|
||||||
dot: includeHidden,
|
dot: includeHidden,
|
||||||
ignore: exclusionPatterns,
|
ignore: exclusionPatterns,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const returnedFiles = crawledFiles.concat(globbedFiles);
|
||||||
|
returnedFiles.sort();
|
||||||
|
return returnedFiles;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,24 @@
|
|||||||
import { SessionService } from './session.service';
|
import { SessionService } from './session.service';
|
||||||
import mockfs from 'mock-fs';
|
|
||||||
import fs from 'node:fs';
|
import fs from 'node:fs';
|
||||||
import yaml from 'yaml';
|
import yaml from 'yaml';
|
||||||
import { LoginError } from '../cores/errors/login-error';
|
import { LoginError } from '../cores/errors/login-error';
|
||||||
|
import {
|
||||||
|
TEST_AUTH_FILE,
|
||||||
|
TEST_CONFIG_DIR,
|
||||||
|
TEST_IMMICH_API_KEY,
|
||||||
|
TEST_IMMICH_INSTANCE_URL,
|
||||||
|
createTestAuthFile,
|
||||||
|
deleteAuthFile,
|
||||||
|
readTestAuthFile,
|
||||||
|
spyOnConsole,
|
||||||
|
} from '../../test/cli-test-utils';
|
||||||
|
|
||||||
const mockPingServer = jest.fn(() => Promise.resolve({ data: { res: 'pong' } }));
|
const mockPingServer = jest.fn(() => Promise.resolve({ data: { res: 'pong' } }));
|
||||||
const mockUserInfo = jest.fn(() => Promise.resolve({ data: { email: 'admin@example.com' } }));
|
const mockUserInfo = jest.fn(() => Promise.resolve({ data: { email: 'admin@example.com' } }));
|
||||||
|
|
||||||
jest.mock('../api/open-api', () => {
|
jest.mock('@immich/sdk', () => {
|
||||||
return {
|
return {
|
||||||
__esModule: true,
|
...jest.requireActual('@immich/sdk'),
|
||||||
...jest.requireActual('../api/open-api'),
|
|
||||||
UserApi: jest.fn().mockImplementation(() => {
|
UserApi: jest.fn().mockImplementation(() => {
|
||||||
return { getMyUserInfo: mockUserInfo };
|
return { getMyUserInfo: mockUserInfo };
|
||||||
}),
|
}),
|
||||||
@@ -22,74 +30,85 @@ jest.mock('../api/open-api', () => {
|
|||||||
|
|
||||||
describe('SessionService', () => {
|
describe('SessionService', () => {
|
||||||
let sessionService: SessionService;
|
let sessionService: SessionService;
|
||||||
|
let consoleSpy: jest.SpyInstance;
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
// Write a dummy output before mock-fs to prevent some annoying errors
|
consoleSpy = spyOnConsole();
|
||||||
console.log();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
const configDir = '/config';
|
deleteAuthFile();
|
||||||
sessionService = new SessionService(configDir);
|
sessionService = new SessionService(TEST_CONFIG_DIR);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
deleteAuthFile();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should connect to immich', async () => {
|
it('should connect to immich', async () => {
|
||||||
mockfs({
|
await createTestAuthFile(
|
||||||
'/config/auth.yml': 'apiKey: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\ninstanceUrl: https://test/api',
|
JSON.stringify({
|
||||||
});
|
apiKey: TEST_IMMICH_API_KEY,
|
||||||
|
instanceUrl: TEST_IMMICH_INSTANCE_URL,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
await sessionService.connect();
|
await sessionService.connect();
|
||||||
expect(mockPingServer).toHaveBeenCalledTimes(1);
|
expect(mockPingServer).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should error if no auth file exists', async () => {
|
it('should error if no auth file exists', async () => {
|
||||||
mockfs();
|
|
||||||
await sessionService.connect().catch((error) => {
|
await sessionService.connect().catch((error) => {
|
||||||
expect(error.message).toEqual('No auth file exist. Please login first');
|
expect(error.message).toEqual('No auth file exist. Please login first');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should error if auth file is missing instance URl', async () => {
|
it('should error if auth file is missing instance URl', async () => {
|
||||||
mockfs({
|
await createTestAuthFile(
|
||||||
'/config/auth.yml': 'foo: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\napiKey: https://test/api',
|
JSON.stringify({
|
||||||
});
|
apiKey: TEST_IMMICH_API_KEY,
|
||||||
|
}),
|
||||||
|
);
|
||||||
await sessionService.connect().catch((error) => {
|
await sessionService.connect().catch((error) => {
|
||||||
expect(error).toBeInstanceOf(LoginError);
|
expect(error).toBeInstanceOf(LoginError);
|
||||||
expect(error.message).toEqual('Instance URL missing in auth config file /config/auth.yml');
|
expect(error.message).toEqual(`Instance URL missing in auth config file ${TEST_AUTH_FILE}`);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should error if auth file is missing api key', async () => {
|
it('should error if auth file is missing api key', async () => {
|
||||||
mockfs({
|
await createTestAuthFile(
|
||||||
'/config/auth.yml': 'instanceUrl: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\nbar: https://test/api',
|
JSON.stringify({
|
||||||
});
|
instanceUrl: TEST_IMMICH_INSTANCE_URL,
|
||||||
await sessionService.connect().catch((error) => {
|
}),
|
||||||
expect(error).toBeInstanceOf(LoginError);
|
);
|
||||||
expect(error.message).toEqual('API key missing in auth config file /config/auth.yml');
|
|
||||||
});
|
await expect(sessionService.connect()).rejects.toThrow(
|
||||||
|
new LoginError(`API key missing in auth config file ${TEST_AUTH_FILE}`),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it.skip('should create auth file when logged in', async () => {
|
it('should create auth file when logged in', async () => {
|
||||||
mockfs();
|
await sessionService.keyLogin(TEST_IMMICH_INSTANCE_URL, TEST_IMMICH_API_KEY);
|
||||||
|
|
||||||
await sessionService.keyLogin('https://test/api', 'pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg');
|
const data: string = await readTestAuthFile();
|
||||||
|
|
||||||
const data: string = await fs.promises.readFile('/config/auth.yml', 'utf8');
|
|
||||||
const authConfig = yaml.parse(data);
|
const authConfig = yaml.parse(data);
|
||||||
expect(authConfig.instanceUrl).toBe('https://test/api');
|
expect(authConfig.instanceUrl).toBe(TEST_IMMICH_INSTANCE_URL);
|
||||||
expect(authConfig.apiKey).toBe('pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg');
|
expect(authConfig.apiKey).toBe(TEST_IMMICH_API_KEY);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete auth file when logging out', async () => {
|
it('should delete auth file when logging out', async () => {
|
||||||
mockfs({
|
await createTestAuthFile(
|
||||||
'/config/auth.yml': 'apiKey: pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg\ninstanceUrl: https://test/api',
|
JSON.stringify({
|
||||||
});
|
apiKey: TEST_IMMICH_API_KEY,
|
||||||
|
instanceUrl: TEST_IMMICH_INSTANCE_URL,
|
||||||
|
}),
|
||||||
|
);
|
||||||
await sessionService.logout();
|
await sessionService.logout();
|
||||||
|
|
||||||
await fs.promises.access('/auth.yml', fs.constants.F_OK).catch((error) => {
|
await fs.promises.access(TEST_AUTH_FILE, fs.constants.F_OK).catch((error) => {
|
||||||
expect(error.message).toContain('ENOENT');
|
expect(error.message).toContain('ENOENT');
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
expect(consoleSpy.mock.calls).toEqual([[`Removed auth file ${TEST_AUTH_FILE}`]]);
|
||||||
mockfs.restore();
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -5,33 +5,39 @@ import { ImmichApi } from '../api/client';
|
|||||||
import { LoginError } from '../cores/errors/login-error';
|
import { LoginError } from '../cores/errors/login-error';
|
||||||
|
|
||||||
export class SessionService {
|
export class SessionService {
|
||||||
readonly configDir: string;
|
readonly configDir!: string;
|
||||||
readonly authPath!: string;
|
readonly authPath!: string;
|
||||||
private api!: ImmichApi;
|
private api!: ImmichApi;
|
||||||
|
|
||||||
constructor(configDir: string) {
|
constructor(configDir: string) {
|
||||||
this.configDir = configDir;
|
this.configDir = configDir;
|
||||||
this.authPath = path.join(this.configDir, 'auth.yml');
|
this.authPath = path.join(configDir, '/auth.yml');
|
||||||
}
|
}
|
||||||
|
|
||||||
public async connect(): Promise<ImmichApi> {
|
public async connect(): Promise<ImmichApi> {
|
||||||
await fs.promises.access(this.authPath, fs.constants.F_OK).catch((error) => {
|
let instanceUrl = process.env.IMMICH_INSTANCE_URL;
|
||||||
if (error.code === 'ENOENT') {
|
let apiKey = process.env.IMMICH_API_KEY;
|
||||||
throw new LoginError('No auth file exist. Please login first');
|
|
||||||
|
if (!instanceUrl || !apiKey) {
|
||||||
|
await fs.promises.access(this.authPath, fs.constants.F_OK).catch((error) => {
|
||||||
|
if (error.code === 'ENOENT') {
|
||||||
|
throw new LoginError('No auth file exist. Please login first');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const data: string = await fs.promises.readFile(this.authPath, 'utf8');
|
||||||
|
const parsedConfig = yaml.parse(data);
|
||||||
|
|
||||||
|
instanceUrl = parsedConfig.instanceUrl;
|
||||||
|
apiKey = parsedConfig.apiKey;
|
||||||
|
|
||||||
|
if (!instanceUrl) {
|
||||||
|
throw new LoginError(`Instance URL missing in auth config file ${this.authPath}`);
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
const data: string = await fs.promises.readFile(this.authPath, 'utf8');
|
if (!apiKey) {
|
||||||
const parsedConfig = yaml.parse(data);
|
throw new LoginError(`API key missing in auth config file ${this.authPath}`);
|
||||||
const instanceUrl: string = parsedConfig.instanceUrl;
|
}
|
||||||
const apiKey: string = parsedConfig.apiKey;
|
|
||||||
|
|
||||||
if (!instanceUrl) {
|
|
||||||
throw new LoginError('Instance URL missing in auth config file ' + this.authPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!apiKey) {
|
|
||||||
throw new LoginError('API key missing in auth config file ' + this.authPath);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.api = new ImmichApi(instanceUrl, apiKey);
|
this.api = new ImmichApi(instanceUrl, apiKey);
|
||||||
@@ -59,10 +65,6 @@ export class SessionService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!fs.existsSync(this.configDir)) {
|
|
||||||
console.error('waah');
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.writeFileSync(this.authPath, yaml.stringify({ instanceUrl, apiKey }));
|
fs.writeFileSync(this.authPath, yaml.stringify({ instanceUrl, apiKey }));
|
||||||
|
|
||||||
console.log('Wrote auth info to ' + this.authPath);
|
console.log('Wrote auth info to ' + this.authPath);
|
||||||
@@ -82,7 +84,7 @@ export class SessionService {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (pingResponse.res !== 'pong') {
|
if (pingResponse.res !== 'pong') {
|
||||||
throw new Error('Unexpected ping reply');
|
throw new Error(`Could not parse response. Is Immich listening on ${this.api.apiConfiguration.instanceUrl}?`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
38
cli/test/cli-test-utils.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import { BaseOptionsDto } from 'src/cores/dto/base-options-dto';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
|
||||||
|
export const TEST_CONFIG_DIR = '/tmp/immich/';
|
||||||
|
export const TEST_AUTH_FILE = path.join(TEST_CONFIG_DIR, 'auth.yml');
|
||||||
|
export const TEST_IMMICH_INSTANCE_URL = 'https://test/api';
|
||||||
|
export const TEST_IMMICH_API_KEY = 'pNussssKSYo5WasdgalvKJ1n9kdvaasdfbluPg';
|
||||||
|
|
||||||
|
export const CLI_BASE_OPTIONS: BaseOptionsDto = { config: TEST_CONFIG_DIR };
|
||||||
|
|
||||||
|
export const spyOnConsole = () => jest.spyOn(console, 'log').mockImplementation();
|
||||||
|
|
||||||
|
export const createTestAuthFile = async (contents: string) => {
|
||||||
|
if (!fs.existsSync(TEST_CONFIG_DIR)) {
|
||||||
|
// Create config folder if it doesn't exist
|
||||||
|
const created = await fs.promises.mkdir(TEST_CONFIG_DIR, { recursive: true });
|
||||||
|
if (!created) {
|
||||||
|
throw new Error(`Failed to create config folder ${TEST_CONFIG_DIR}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fs.writeFileSync(TEST_AUTH_FILE, contents);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const readTestAuthFile = async (): Promise<string> => {
|
||||||
|
return await fs.promises.readFile(TEST_AUTH_FILE, 'utf8');
|
||||||
|
};
|
||||||
|
|
||||||
|
export const deleteAuthFile = () => {
|
||||||
|
try {
|
||||||
|
fs.unlinkSync(TEST_AUTH_FILE);
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.code !== 'ENOENT') {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
24
cli/test/e2e/jest-e2e.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"moduleFileExtensions": ["js", "json", "ts"],
|
||||||
|
"modulePaths": ["<rootDir>"],
|
||||||
|
"rootDir": "../..",
|
||||||
|
"globalSetup": "<rootDir>/test/e2e/setup.ts",
|
||||||
|
"testEnvironment": "node",
|
||||||
|
"testRegex": ".e2e-spec.ts$",
|
||||||
|
"testTimeout": 6000000,
|
||||||
|
"transform": {
|
||||||
|
"^.+\\.ts$": "ts-jest"
|
||||||
|
},
|
||||||
|
"collectCoverageFrom": [
|
||||||
|
"<rootDir>/src/**/*.(t|j)s",
|
||||||
|
"!<rootDir>/src/**/*.spec.(t|s)s",
|
||||||
|
"!<rootDir>/src/infra/migrations/**"
|
||||||
|
],
|
||||||
|
"coverageDirectory": "./coverage",
|
||||||
|
"moduleNameMapper": {
|
||||||
|
"^@test(|/.*)$": "<rootDir>../server/test/$1",
|
||||||
|
"^@app/immich(|/.*)$": "<rootDir>../server/src/immich/$1",
|
||||||
|
"^@app/infra(|/.*)$": "<rootDir>../server/src/infra/$1",
|
||||||
|
"^@app/domain(|/.*)$": "<rootDir>/../server/src/domain/$1"
|
||||||
|
}
|
||||||
|
}
|
||||||
48
cli/test/e2e/login-key.e2e-spec.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { APIKeyCreateResponseDto } from '@app/domain';
|
||||||
|
import { api } from '@test/../e2e/api/client';
|
||||||
|
import { restoreTempFolder, testApp } from '@test/../e2e/jobs/utils';
|
||||||
|
import { LoginResponseDto } from '@immich/sdk';
|
||||||
|
import { LoginKey } from 'src/commands/login/key';
|
||||||
|
import { LoginError } from 'src/cores/errors/login-error';
|
||||||
|
import { CLI_BASE_OPTIONS, spyOnConsole } from 'test/cli-test-utils';
|
||||||
|
|
||||||
|
describe(`login-key (e2e)`, () => {
|
||||||
|
let server: any;
|
||||||
|
let admin: LoginResponseDto;
|
||||||
|
let apiKey: APIKeyCreateResponseDto;
|
||||||
|
let instanceUrl: string;
|
||||||
|
spyOnConsole();
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
server = (await testApp.create()).getHttpServer();
|
||||||
|
if (!process.env.IMMICH_INSTANCE_URL) {
|
||||||
|
throw new Error('IMMICH_INSTANCE_URL environment variable not set');
|
||||||
|
} else {
|
||||||
|
instanceUrl = process.env.IMMICH_INSTANCE_URL;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await testApp.teardown();
|
||||||
|
await restoreTempFolder();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await testApp.reset();
|
||||||
|
await restoreTempFolder();
|
||||||
|
await api.authApi.adminSignUp(server);
|
||||||
|
admin = await api.authApi.adminLogin(server);
|
||||||
|
apiKey = await api.apiKeyApi.createApiKey(server, admin.accessToken);
|
||||||
|
process.env.IMMICH_API_KEY = apiKey.secret;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should error when providing an invalid API key', async () => {
|
||||||
|
await expect(async () => await new LoginKey(CLI_BASE_OPTIONS).run(instanceUrl, 'invalid')).rejects.toThrow(
|
||||||
|
new LoginError(`Failed to connect to server ${instanceUrl}: Request failed with status code 401`),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should log in when providing the correct API key', async () => {
|
||||||
|
await new LoginKey(CLI_BASE_OPTIONS).run(instanceUrl, apiKey.secret);
|
||||||
|
});
|
||||||
|
});
|
||||||
42
cli/test/e2e/server-info.e2e-spec.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { APIKeyCreateResponseDto } from '@app/domain';
|
||||||
|
import { api } from '@test/../e2e/api/client';
|
||||||
|
import { restoreTempFolder, testApp } from '@test/../e2e/jobs/utils';
|
||||||
|
import { LoginResponseDto } from '@immich/sdk';
|
||||||
|
import { ServerInfo } from 'src/commands/server-info';
|
||||||
|
import { CLI_BASE_OPTIONS, spyOnConsole } from 'test/cli-test-utils';
|
||||||
|
|
||||||
|
describe(`server-info (e2e)`, () => {
|
||||||
|
let server: any;
|
||||||
|
let admin: LoginResponseDto;
|
||||||
|
let apiKey: APIKeyCreateResponseDto;
|
||||||
|
const consoleSpy = spyOnConsole();
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
server = (await testApp.create()).getHttpServer();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await testApp.teardown();
|
||||||
|
await restoreTempFolder();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await testApp.reset();
|
||||||
|
await restoreTempFolder();
|
||||||
|
await api.authApi.adminSignUp(server);
|
||||||
|
admin = await api.authApi.adminLogin(server);
|
||||||
|
apiKey = await api.apiKeyApi.createApiKey(server, admin.accessToken);
|
||||||
|
process.env.IMMICH_API_KEY = apiKey.secret;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should show server version', async () => {
|
||||||
|
await new ServerInfo(CLI_BASE_OPTIONS).run();
|
||||||
|
|
||||||
|
expect(consoleSpy.mock.calls).toEqual([
|
||||||
|
[expect.stringMatching(new RegExp('Server is running version \\d+.\\d+.\\d+'))],
|
||||||
|
[expect.stringMatching('Supported image types: .*')],
|
||||||
|
[expect.stringMatching('Supported video types: .*')],
|
||||||
|
['Images: 0, Videos: 0, Total: 0'],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
42
cli/test/e2e/setup.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import path from 'path';
|
||||||
|
import { PostgreSqlContainer } from '@testcontainers/postgresql';
|
||||||
|
import { access } from 'fs/promises';
|
||||||
|
|
||||||
|
export default async () => {
|
||||||
|
let IMMICH_TEST_ASSET_PATH: string = '';
|
||||||
|
|
||||||
|
if (process.env.IMMICH_TEST_ASSET_PATH === undefined) {
|
||||||
|
IMMICH_TEST_ASSET_PATH = path.normalize(`${__dirname}/../../../server/test/assets/`);
|
||||||
|
process.env.IMMICH_TEST_ASSET_PATH = IMMICH_TEST_ASSET_PATH;
|
||||||
|
} else {
|
||||||
|
IMMICH_TEST_ASSET_PATH = process.env.IMMICH_TEST_ASSET_PATH;
|
||||||
|
}
|
||||||
|
|
||||||
|
const directoryExists = async (dirPath: string) =>
|
||||||
|
await access(dirPath)
|
||||||
|
.then(() => true)
|
||||||
|
.catch(() => false);
|
||||||
|
|
||||||
|
if (!(await directoryExists(`${IMMICH_TEST_ASSET_PATH}/albums`))) {
|
||||||
|
throw new Error(
|
||||||
|
`Test assets not found. Please checkout https://github.com/immich-app/test-assets into ${IMMICH_TEST_ASSET_PATH} before testing`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.DB_HOSTNAME === undefined) {
|
||||||
|
// DB hostname not set which likely means we're not running e2e through docker compose. Start a local postgres container.
|
||||||
|
const pg = await new PostgreSqlContainer('tensorchord/pgvecto-rs:pg14-v0.1.11')
|
||||||
|
.withExposedPorts(5432)
|
||||||
|
.withDatabase('immich')
|
||||||
|
.withUsername('postgres')
|
||||||
|
.withPassword('postgres')
|
||||||
|
.withReuse()
|
||||||
|
.start();
|
||||||
|
|
||||||
|
process.env.DB_URL = pg.getConnectionUri();
|
||||||
|
}
|
||||||
|
|
||||||
|
process.env.NODE_ENV = 'development';
|
||||||
|
process.env.IMMICH_CONFIG_FILE = path.normalize(`${__dirname}/../../../server/e2e/jobs/immich-e2e-config.json`);
|
||||||
|
process.env.TZ = 'Z';
|
||||||
|
};
|
||||||
84
cli/test/e2e/upload.e2e-spec.ts
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import { APIKeyCreateResponseDto } from '@app/domain';
|
||||||
|
import { api } from '@test/../e2e/api/client';
|
||||||
|
import { IMMICH_TEST_ASSET_PATH, restoreTempFolder, testApp } from '@test/../e2e/jobs/utils';
|
||||||
|
import { LoginResponseDto } from '@immich/sdk';
|
||||||
|
import { Upload } from 'src/commands/upload';
|
||||||
|
import { CLI_BASE_OPTIONS, spyOnConsole } from 'test/cli-test-utils';
|
||||||
|
|
||||||
|
describe(`upload (e2e)`, () => {
|
||||||
|
let server: any;
|
||||||
|
let admin: LoginResponseDto;
|
||||||
|
let apiKey: APIKeyCreateResponseDto;
|
||||||
|
spyOnConsole();
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
server = (await testApp.create()).getHttpServer();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await testApp.teardown();
|
||||||
|
await restoreTempFolder();
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
await testApp.reset();
|
||||||
|
await restoreTempFolder();
|
||||||
|
await api.authApi.adminSignUp(server);
|
||||||
|
admin = await api.authApi.adminLogin(server);
|
||||||
|
apiKey = await api.apiKeyApi.createApiKey(server, admin.accessToken);
|
||||||
|
process.env.IMMICH_API_KEY = apiKey.secret;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should upload a folder recursively', async () => {
|
||||||
|
await new Upload(CLI_BASE_OPTIONS).run([`${IMMICH_TEST_ASSET_PATH}/albums/nature/`], { recursive: true });
|
||||||
|
const assets = await api.assetApi.getAllAssets(server, admin.accessToken);
|
||||||
|
expect(assets.length).toBeGreaterThan(4);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not create a new album', async () => {
|
||||||
|
await new Upload(CLI_BASE_OPTIONS).run([`${IMMICH_TEST_ASSET_PATH}/albums/nature/`], { recursive: true });
|
||||||
|
const albums = await api.albumApi.getAllAlbums(server, admin.accessToken);
|
||||||
|
expect(albums.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create album from folder name', async () => {
|
||||||
|
await new Upload(CLI_BASE_OPTIONS).run([`${IMMICH_TEST_ASSET_PATH}/albums/nature/`], {
|
||||||
|
recursive: true,
|
||||||
|
album: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const albums = await api.albumApi.getAllAlbums(server, admin.accessToken);
|
||||||
|
expect(albums.length).toEqual(1);
|
||||||
|
const natureAlbum = albums[0];
|
||||||
|
expect(natureAlbum.albumName).toEqual('nature');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add existing assets to album', async () => {
|
||||||
|
await new Upload(CLI_BASE_OPTIONS).run([`${IMMICH_TEST_ASSET_PATH}/albums/nature/`], {
|
||||||
|
recursive: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upload again, but this time add to album
|
||||||
|
await new Upload(CLI_BASE_OPTIONS).run([`${IMMICH_TEST_ASSET_PATH}/albums/nature/`], {
|
||||||
|
recursive: true,
|
||||||
|
album: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const albums = await api.albumApi.getAllAlbums(server, admin.accessToken);
|
||||||
|
expect(albums.length).toEqual(1);
|
||||||
|
const natureAlbum = albums[0];
|
||||||
|
expect(natureAlbum.albumName).toEqual('nature');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should upload to the specified album name', async () => {
|
||||||
|
await new Upload(CLI_BASE_OPTIONS).run([`${IMMICH_TEST_ASSET_PATH}/albums/nature/`], {
|
||||||
|
recursive: true,
|
||||||
|
albumName: 'testAlbum',
|
||||||
|
});
|
||||||
|
|
||||||
|
const albums = await api.albumApi.getAllAlbums(server, admin.accessToken);
|
||||||
|
expect(albums.length).toEqual(1);
|
||||||
|
const testAlbum = albums[0];
|
||||||
|
expect(testAlbum.albumName).toEqual('testAlbum');
|
||||||
|
});
|
||||||
|
});
|
||||||
3
cli/test/global-setup.js
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module.exports = async () => {
|
||||||
|
process.env.TZ = 'UTC';
|
||||||
|
};
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"module": "Node16",
|
"module": "commonjs",
|
||||||
"strict": true,
|
"strict": true,
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"removeComments": true,
|
"removeComments": true,
|
||||||
@@ -8,17 +8,23 @@
|
|||||||
"experimentalDecorators": true,
|
"experimentalDecorators": true,
|
||||||
"allowSyntheticDefaultImports": true,
|
"allowSyntheticDefaultImports": true,
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"target": "es2022",
|
"target": "es2021",
|
||||||
"moduleResolution": "node16",
|
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"outDir": "./dist",
|
"outDir": "./dist",
|
||||||
"incremental": true,
|
"incremental": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
|
"rootDirs": ["src", "../server/src"],
|
||||||
"baseUrl": "./",
|
"baseUrl": "./",
|
||||||
"paths": {
|
"paths": {
|
||||||
"@test": ["test"],
|
"@test": ["../server/test"],
|
||||||
"@test/*": ["test/*"]
|
"@test/*": ["../server/test/*"],
|
||||||
|
"@app/immich": ["../server/src/immich"],
|
||||||
|
"@app/immich/*": ["../server/src/immich/*"],
|
||||||
|
"@app/infra": ["../server/src/infra"],
|
||||||
|
"@app/infra/*": ["../server/src/infra/*"],
|
||||||
|
"@app/domain": ["../server/src/domain"],
|
||||||
|
"@app/domain/*": ["../server/src/domain/*"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"exclude": ["dist", "node_modules", "upload"]
|
"exclude": ["dist", "node_modules", "upload"]
|
||||||
|
|||||||
@@ -12,15 +12,16 @@ x-server-build: &server-common
|
|||||||
context: ../
|
context: ../
|
||||||
dockerfile: server/Dockerfile
|
dockerfile: server/Dockerfile
|
||||||
target: dev
|
target: dev
|
||||||
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
- ../server:/usr/src/app
|
- ../server:/usr/src/app
|
||||||
|
- ../open-api:/usr/src/open-api
|
||||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||||
|
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||||
- /usr/src/app/node_modules
|
- /usr/src/app/node_modules
|
||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
|
||||||
- NODE_ENV=development
|
|
||||||
ulimits:
|
ulimits:
|
||||||
nofile:
|
nofile:
|
||||||
soft: 1048576
|
soft: 1048576
|
||||||
@@ -65,6 +66,7 @@ services:
|
|||||||
- 24678:24678
|
- 24678:24678
|
||||||
volumes:
|
volumes:
|
||||||
- ../web:/usr/src/app
|
- ../web:/usr/src/app
|
||||||
|
- ../open-api/:/usr/src/open-api/
|
||||||
- /usr/src/app/node_modules
|
- /usr/src/app/node_modules
|
||||||
ulimits:
|
ulimits:
|
||||||
nofile:
|
nofile:
|
||||||
@@ -87,15 +89,13 @@ services:
|
|||||||
- model-cache:/cache
|
- model-cache:/cache
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
|
||||||
- NODE_ENV=development
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- database
|
- database
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: redis:6.2-alpine@sha256:b6124ab2e45cc332e16398022a411d7e37181f21ff7874835e0180f56a09e82a
|
image: redis:6.2-alpine@sha256:c5a607fb6e1bb15d32bbcf14db22787d19e428d59e31a5da67511b49bb0f1ccc
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: redis:6.2-alpine@sha256:b6124ab2e45cc332e16398022a411d7e37181f21ff7874835e0180f56a09e82a
|
image: redis:6.2-alpine@sha256:c5a607fb6e1bb15d32bbcf14db22787d19e428d59e31a5da67511b49bb0f1ccc
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
database:
|
database:
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: redis:6.2-alpine@sha256:b6124ab2e45cc332e16398022a411d7e37181f21ff7874835e0180f56a09e82a
|
image: redis:6.2-alpine@sha256:c5a607fb6e1bb15d32bbcf14db22787d19e428d59e31a5da67511b49bb0f1ccc
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
database:
|
database:
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ services:
|
|||||||
# volumes:
|
# volumes:
|
||||||
# - /usr/lib/wsl:/usr/lib/wsl # If using VAAPI in WSL2
|
# - /usr/lib/wsl:/usr/lib/wsl # If using VAAPI in WSL2
|
||||||
# environment:
|
# environment:
|
||||||
# - NVIDIA_DRIVER_CAPABILITIES=all # If using NVIDIA GPU
|
|
||||||
# - LD_LIBRARY_PATH=/usr/lib/wsl/lib # If using VAAPI in WSL2
|
# - LD_LIBRARY_PATH=/usr/lib/wsl/lib # If using VAAPI in WSL2
|
||||||
# - LIBVA_DRIVER_NAME=d3d12 # If using VAAPI in WSL2
|
# - LIBVA_DRIVER_NAME=d3d12 # If using VAAPI in WSL2
|
||||||
# deploy: # Uncomment this section if using NVIDIA GPU
|
# deploy: # Uncomment this section if using NVIDIA GPU
|
||||||
|
|||||||
11
docker/mlaccel-armnn.yml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
# ML acceleration on supported Mali ARM GPUs using ARM-NN
|
||||||
|
|
||||||
|
services:
|
||||||
|
mlaccel:
|
||||||
|
devices:
|
||||||
|
- /dev/mali0:/dev/mali0
|
||||||
|
volumes:
|
||||||
|
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
|
||||||
|
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
|
||||||
126
docs/docs/FAQ.md
@@ -1,126 +0,0 @@
|
|||||||
---
|
|
||||||
sidebar_position: 7
|
|
||||||
---
|
|
||||||
|
|
||||||
# FAQ
|
|
||||||
|
|
||||||
### What is the difference between the cloud icon on the mobile app?
|
|
||||||
|
|
||||||
| Icon | Description |
|
|
||||||
| ---------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
|
||||||
|  | Asset is only available in the cloud and was uploaded from some other device (like the web client) or was deleted from this device after upload |
|
|
||||||
|  | Asset is only available locally and has not yet been backed up |
|
|
||||||
|  | Asset was uploaded from this device and is now backed up in the cloud/server and still available in original on the device |
|
|
||||||
|
|
||||||
### Can I add my existing photo library?
|
|
||||||
|
|
||||||
Yes, with an [external library](/docs/features/libraries.md).
|
|
||||||
|
|
||||||
### Why are only photos and not videos being uploaded to Immich?
|
|
||||||
|
|
||||||
This often happens when using a reverse proxy or cloudflare tunnel in front of Immich. Make sure to set your reverse proxy to allow large POST requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Cloudflare tunnels are limited to 100 mb file sizes. Also check the disk space of your reverse proxy, in some cases proxies caches requests to disk before passing them on, and if disk space runs out the request fails.
|
|
||||||
|
|
||||||
### Why is Immich slow on low-memory systems like the Raspberry Pi?
|
|
||||||
|
|
||||||
Immich optionally uses machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#how-can-i-lower-immichs-cpu-usage) this or [disable](/docs/FAQ.md#how-can-i-disable-machine-learning) machine learning entirely.
|
|
||||||
|
|
||||||
### How can I lower Immich's CPU usage?
|
|
||||||
|
|
||||||
The initial backup is the most intensive due to the number of jobs running. The most CPU-intensive ones are transcoding and machine learning jobs (Tag Images, Smart Search, Recognize Faces), and to a lesser extent thumbnail generation. Here are some ways to lower their CPU usage:
|
|
||||||
|
|
||||||
- Lower the job concurrency for these jobs to 1.
|
|
||||||
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
|
|
||||||
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
|
|
||||||
- You _must_ re-run the Recognize Faces job for all images after this for facial recognition on new images to work properly.
|
|
||||||
- If these changes are not enough, see [below](/docs/FAQ.md#how-can-i-disable-machine-learning) for how you can disable machine learning.
|
|
||||||
|
|
||||||
### How can I disable machine learning?
|
|
||||||
|
|
||||||
:::info
|
|
||||||
Disabling machine learning will result in a poor experience for searching and the 'Explore' page, as these are reliant on it to work as intended.
|
|
||||||
:::
|
|
||||||
|
|
||||||
Machine learning can be disabled under Settings > Machine Learning Settings, either entirely or by model type. For instance, you can choose to disable smart search with CLIP, but keep facial recognition enabled. This means that the machine learning service will only process the enabled jobs.
|
|
||||||
|
|
||||||
However, disabling all jobs will not disable the machine learning service itself. To prevent it from starting up at all in this case, you can comment out the `immich-machine-learning` section of the docker-compose.yml.
|
|
||||||
|
|
||||||
### I'm getting errors about models being corrupt or failing to download. What do I do?
|
|
||||||
|
|
||||||
You can delete the model cache volume, which is where models are downloaded. This will give the service a clean environment to download the model again.
|
|
||||||
|
|
||||||
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
|
||||||
|
|
||||||
Template changes will only apply to new assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs.md) page.
|
|
||||||
|
|
||||||
### In the uploads folder, why are photos stored in the wrong date?
|
|
||||||
|
|
||||||
This is fixed by running the storage migration job.
|
|
||||||
|
|
||||||
### Why is object detection not very good?
|
|
||||||
|
|
||||||
The default image tagging model is relatively small. You can change this for a larger model like `google/vit-base-patch16-224` by setting the model name under Settings > Machine Learning Settings > Image Tagging. You can then re-run the Image Tagging job to get improved tags.
|
|
||||||
|
|
||||||
### Why are there so many thumbnail generation jobs?
|
|
||||||
|
|
||||||
Immich generates three thumbnails for each asset (blurred, small, and large), as well as a thumbnail for each recognized face.
|
|
||||||
|
|
||||||
### How can I see Immich logs?
|
|
||||||
|
|
||||||
Most Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md)
|
|
||||||
|
|
||||||
### How can I run Immich as a non-root user?
|
|
||||||
|
|
||||||
1. Set the `PUID`/`PGID` environment variables (in `.env`).
|
|
||||||
2. Set the corresponding `user` argument in `docker-compose` for each service.
|
|
||||||
3. Add an additional volume to `immich-microservices` that mounts internally to `/usr/src/app/.reverse-geocoding-dump`.
|
|
||||||
|
|
||||||
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
|
|
||||||
|
|
||||||
### How can I reset the admin password?
|
|
||||||
|
|
||||||
The admin password can be reset by running the [reset-admin-password](/docs/administration/server-commands.md) command on the immich-server.
|
|
||||||
|
|
||||||
### How can I backup data from Immich?
|
|
||||||
|
|
||||||
See [backup and restore](/docs/administration/backup-and-restore.md).
|
|
||||||
|
|
||||||
### How can I **purge** data from Immich?
|
|
||||||
|
|
||||||
Data for Immich comes in two forms:
|
|
||||||
|
|
||||||
1. **Metadata** stored in a postgres database, persisted via the `pg_data` volume
|
|
||||||
2. **Files** (originals, thumbs, profile, etc.), stored in the `UPLOAD_LOCATION` folder.
|
|
||||||
|
|
||||||
To remove the **Metadata** you can stop Immich and delete the volume.
|
|
||||||
|
|
||||||
```bash title="Remove Immich (containers and volumes)"
|
|
||||||
docker-compose down -v
|
|
||||||
```
|
|
||||||
|
|
||||||
After removing the containers and volumes, the **Files** can be cleaned up (if necessary) from the `UPLOAD_LOCATION` by simply deleting an unwanted files or folders.
|
|
||||||
|
|
||||||
### How can I move all data (photos, persons, albums) from one user to another?
|
|
||||||
|
|
||||||
This requires some database queries. You can do this on the command line (in the PostgreSQL container using the psql command), or you can add for example an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file, so that you can use a web-interface.
|
|
||||||
|
|
||||||
:::warning
|
|
||||||
This is an advanced operation. If you can't to do it with the steps described here, this is not for you.
|
|
||||||
:::
|
|
||||||
|
|
||||||
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
|
|
||||||
2. Find the id of both the 'source' and the 'destination' user (it's the id column in the users table)
|
|
||||||
3. Three tables need to be updated:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
// reassign albums
|
|
||||||
update albums set "ownerId" = '<destinationId>' where "ownerId" = '<sourceId>';
|
|
||||||
|
|
||||||
// reassign people
|
|
||||||
update person set "ownerId" = '<destinationId>' where "ownerId" = '<sourceId>';
|
|
||||||
|
|
||||||
// reassign assets
|
|
||||||
update assets set "ownerId" = '<destinationId>' where "ownerId" = '<sourceId>'
|
|
||||||
and checksum not in (select checksum from assets where "ownerId" = '<destinationId>');
|
|
||||||
```
|
|
||||||
|
|
||||||
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
|
|
||||||
311
docs/docs/FAQ.mdx
Normal file
@@ -0,0 +1,311 @@
|
|||||||
|
# FAQ
|
||||||
|
|
||||||
|
## User
|
||||||
|
|
||||||
|
### How can I reset the admin password?
|
||||||
|
|
||||||
|
The admin password can be reset by running the [reset-admin-password](/docs/administration/server-commands.md) command on the immich-server.
|
||||||
|
|
||||||
|
### How can I see list of all users in Immich?
|
||||||
|
|
||||||
|
You can see the list of all users by running [list-users](/docs/administration/server-commands.md) Command on the Immich-server.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Mobile App
|
||||||
|
|
||||||
|
### What is the difference between the cloud icons on the mobile app?
|
||||||
|
|
||||||
|
| Icon | Description |
|
||||||
|
| ---------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
|  | Asset is only available in the cloud and was uploaded from some other device (like the web client) or was deleted from this device after upload |
|
||||||
|
|  | Asset is only available locally and has not yet been backed up |
|
||||||
|
|  | Asset was uploaded from this device and is now backed up to the server; the original file is still on the device |
|
||||||
|
|
||||||
|
### I cannot log into the application after an update. What can I do?
|
||||||
|
|
||||||
|
First, verify that the mobile app and server are both running the same version (major and minor).
|
||||||
|
|
||||||
|
:::note
|
||||||
|
App store updates sometimes take longer because the stores (play store; Google and app store; Apple)
|
||||||
|
need to approve the update first which may take some time.
|
||||||
|
:::
|
||||||
|
|
||||||
|
If you still cannot login to the app, try the following:
|
||||||
|
|
||||||
|
- Check the mobile logs
|
||||||
|
- Make sure login credentials are correct by logging in on the web app
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Assets
|
||||||
|
|
||||||
|
### Can I add my existing photo library?
|
||||||
|
|
||||||
|
Yes, with an [External Library](/docs/features/libraries.md).
|
||||||
|
|
||||||
|
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
||||||
|
|
||||||
|
Template changes will only apply to _new_ assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs.md) page.
|
||||||
|
|
||||||
|
### Why are only photos and not videos being uploaded to Immich?
|
||||||
|
|
||||||
|
This often happens when using a reverse proxy (such as nginx or Cloudflare tunnel) in front of Immich. Make sure to set your reverse proxy to allow large `POST` requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Also check the disk space of your reverse proxy, in some cases proxies cache requests to disk before passing them on, and if disk space runs out the request fails.
|
||||||
|
|
||||||
|
### Why are some photos stored in the file system with the wrong date?
|
||||||
|
|
||||||
|
There are a few different scenarios that can lead to this situation. The solution is to simply run the storage migration job again. The job is only _automatically_ run once per asset, after upload. If metadata extraction originally failed, the jobs were cleared/cancelled, etc. the job may not have run automatically the first time.
|
||||||
|
|
||||||
|
### How can I hide photos from the timeline?
|
||||||
|
|
||||||
|
You can _archive_ them.
|
||||||
|
|
||||||
|
### How can I backup data from Immich?
|
||||||
|
|
||||||
|
See [Backup and Restore](/docs/administration/backup-and-restore.md).
|
||||||
|
|
||||||
|
### Does Immich support reading existing face tag metadata?
|
||||||
|
|
||||||
|
No, it currently does not.
|
||||||
|
|
||||||
|
### Does Immich support filtering of NSFW images?
|
||||||
|
|
||||||
|
No, it currently does not, but there is an [open discussion about it On Github](https://github.com/immich-app/immich/discussions/2451). You can submit a pull request or vote for the discussion.
|
||||||
|
|
||||||
|
### Why are there so many thumbnail generation jobs?
|
||||||
|
|
||||||
|
There are three thubmanil jobs for each asset:
|
||||||
|
|
||||||
|
- Blurred (thumbhash)
|
||||||
|
- Small (webp)
|
||||||
|
- Large (jpeg)
|
||||||
|
|
||||||
|
Also, there are additional jobs for person (face) thumbnails.
|
||||||
|
|
||||||
|
### What happens if an asset exists in more than one account?
|
||||||
|
|
||||||
|
There are no requirements for assets to be unique across users. If multiple users upload the same image they are processed as if they were distinct assets and jobs run and thumbnails are generated accordingly.
|
||||||
|
|
||||||
|
### How can I delete transcoded videos without deleting the original?
|
||||||
|
|
||||||
|
The transcode of an asset can be deleted by setting a transcode policy that makes it unnecessary, then running a transcoding job for that asset. This can be done on a per-asset basis by starting a transcoding job for an asset (with the _Refresh encoded videos_ button in the asset viewer options. Or, for all assets by running transcoding jobs for all assets.
|
||||||
|
|
||||||
|
To update the transcode policy, navigate to Administration > Video Transcoding Settings > Transcoding Policy and select a policy from the drop-down. This policy will determine whether an existing transcode will be deleted or overwritten in the transcoding job. If a video should be transcoded according to this policy, an existing transcode is overwritten. If not, then it is deleted.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
For example, say you have existing transcodes with the policy "Videos higher than normal resolution or not in the desired format" and switch to a narrower policy: "Videos not in the desired format". If an asset was only transcoded due to its resolution, then running a transcoding job for it will now delete the existing transcode. This is because resolution is no longer part of the transcode policy and the transcode is unnecessary as a result. Likewise, if you set the policy to "Don't transcode any videos" and run transcoding jobs for all assets, this will delete all existing transcodes as they are all unnecessary.
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Is it possible to compress images during backup?
|
||||||
|
|
||||||
|
No. Our golden rule is that the original assets should always be untouched, so we don't think this feature is a good fit for Immich.
|
||||||
|
|
||||||
|
### How can I move all data (photos, persons, albums) from one user to another?
|
||||||
|
|
||||||
|
This is not officially supported, but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the psql command), or you can add for example an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file, so that you can use a web-interface.
|
||||||
|
|
||||||
|
:::warning
|
||||||
|
This is an advanced operation. If you can't do it with the steps described here, this is not for you.
|
||||||
|
:::
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>Steps</summary>
|
||||||
|
|
||||||
|
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
|
||||||
|
|
||||||
|
2. Find the id of both the 'source' and the 'destination' user (it's the id column in the users table)
|
||||||
|
|
||||||
|
3. Three tables need to be updated:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
// reassign albums
|
||||||
|
UPDATE albums SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||||
|
|
||||||
|
// reassign people
|
||||||
|
UPDATE person SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||||
|
|
||||||
|
// reassign assets
|
||||||
|
UPDATE assets SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>'
|
||||||
|
AND CHECKSUM NOT IN (SELECT CHECKSUM FROM assets WHERE "ownerId" = '<destinationId>');
|
||||||
|
```
|
||||||
|
|
||||||
|
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Albums
|
||||||
|
|
||||||
|
### Can I keep my existing album structure while importing assets into Immich?
|
||||||
|
|
||||||
|
Yes. You can by use [Immich CLI](/docs/features/command-line-interface) along with the `--album` flag.
|
||||||
|
|
||||||
|
### Is there a way to reorder photos within an album?
|
||||||
|
|
||||||
|
No, not yet. For updates on this planned feature, follow the [GitHub discussion](https://github.com/immich-app/immich/discussions/1689),
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## External Library
|
||||||
|
|
||||||
|
### Can I add an external library while keeping the existing albums structure?
|
||||||
|
|
||||||
|
We haven't put in an official mechanism to create albums from external libraries at the moment., but there are some [workarounds from the community](https://github.com/immich-app/immich/discussions/4279) which you can find here to help you achieve that.
|
||||||
|
|
||||||
|
### What happens to duplicates in external libraries?
|
||||||
|
|
||||||
|
Duplicate checking only exists for upload libraries, using the file hash. Furthermore, duplicate checking is not global, but _per library_. Therefore, a situation where the same file appears twice in the timeline is possible, especially for external libraries.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Machine Learning
|
||||||
|
|
||||||
|
### How does smart search work?
|
||||||
|
|
||||||
|
Immich uses CLIP models, for more information about CLIP and its capabilities read about it [here](https://openai.com/research/clip).
|
||||||
|
|
||||||
|
### How does facial recognition work?
|
||||||
|
|
||||||
|
For face detection and recognition, Immich uses [InsightFace models](https://github.com/deepinsight/insightface/tree/master/model_zoo).
|
||||||
|
|
||||||
|
### How can I disable machine learning?
|
||||||
|
|
||||||
|
:::info
|
||||||
|
Disabling machine learning will result in a poor experience for searching and the 'Explore' page, as these are reliant on it to work as intended.
|
||||||
|
:::
|
||||||
|
|
||||||
|
Machine learning can be disabled under Administration > Settings > Machine Learning Settings, either entirely or by model type. For instance, you can choose to disable smart search with CLIP, but keep facial recognition enabled. This means that the machine learning service will only process the enabled jobs.
|
||||||
|
|
||||||
|
However, disabling all jobs will not disable the machine learning service itself. To prevent it from starting up at all in this case, you can comment out the `immich-machine-learning` section of the docker-compose.yml.
|
||||||
|
|
||||||
|
### I'm getting errors about models being corrupt or failing to download. What do I do?
|
||||||
|
|
||||||
|
You can delete the model cache volume, which is where models are downloaded to. This will give the service a clean environment to download the model again.
|
||||||
|
|
||||||
|
### Why did Immich decide to remove object detection?
|
||||||
|
|
||||||
|
The feature added keywords to images for metadata search, but wasn't used for smart search. Smart search made it unnecessary as it isn't limited to exact keywords. Combined with it causing crashes on some devices, using many dependencies and causing user confusion as to how search worked, it was better to remove the job altogether.
|
||||||
|
For more info see [here](https://github.com/immich-app/immich/pull/5903)
|
||||||
|
|
||||||
|
### Can I use a custom CLIP model?
|
||||||
|
|
||||||
|
No, this is not supported. Only models listed in the [Huggingface](https://huggingface.co/immich-app) are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
|
||||||
|
|
||||||
|
### I want to be able to search in other languages besides English. How can I do that?
|
||||||
|
|
||||||
|
You can change to a multilingual model listed [here](https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7) by going to Administration > Machine Learning Settings > Smart Search and replacing the name of the model. Be sure to re-run Smart Search on all assets after this change. You can then search in over 100 languages.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list](https://huggingface.co/immich-app).
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Does Immich support Facial Recognition for videos ?
|
||||||
|
|
||||||
|
This is not currently implemented, but may be in the future.
|
||||||
|
|
||||||
|
On the other hand, Immich does scan video thumbnails for faces, so it can perform recognition if the face is clear in the video thumbnail.
|
||||||
|
|
||||||
|
### Does Immich have animal recognition?
|
||||||
|
|
||||||
|
No.
|
||||||
|
|
||||||
|
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
||||||
|
|
||||||
|
If you installed several models and chose not to use some of them, it might be worth deleting the old models that are in immich_model-cache.
|
||||||
|
|
||||||
|
To do this you can run:
|
||||||
|
|
||||||
|
- `docker run -it --rm -v immich_model-cache:/mnt ubuntu bash`
|
||||||
|
- `cd mnt`
|
||||||
|
- `ls`
|
||||||
|
- and delete unused models with `rm -r <model_name>`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Performance
|
||||||
|
|
||||||
|
### Why is Immich slow on low-memory systems like the Raspberry Pi?
|
||||||
|
|
||||||
|
Immich optionally uses machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#can-i-lower-cpu-and-ram-usage) this or transfer to host Immich's machine-learning container on a [more powerful system](/docs/guides/remote-machine-learning) ,or [disable](/docs/FAQ#how-can-i-disable-machine-learning) machine learning entirely.
|
||||||
|
|
||||||
|
### Can I lower CPU and RAM usage?
|
||||||
|
|
||||||
|
The initial backup is the most intensive due to the number of jobs running. The most CPU-intensive ones are transcoding and machine learning jobs (Smart Search, Face Detection), and to a lesser extent thumbnail generation. Here are some ways to lower their CPU usage:
|
||||||
|
|
||||||
|
- Lower the job concurrency for these jobs to 1.
|
||||||
|
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
|
||||||
|
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
|
||||||
|
- You _must_ re-run the Face Detection job for all images after this for facial recognition on new images to work properly.
|
||||||
|
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for how you can disable machine learning.
|
||||||
|
|
||||||
|
### Can I limit the amount of CPU and RAM usage?
|
||||||
|
|
||||||
|
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows.
|
||||||
|
You can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit) to learn how to do this.
|
||||||
|
|
||||||
|
### How an I boost machine learning speed?
|
||||||
|
|
||||||
|
:::note
|
||||||
|
This advice improves throughput, not latency. This is to say that it will make Smart Search jobs process more quickly, but it won't make searching faster.
|
||||||
|
:::
|
||||||
|
|
||||||
|
You can increase throughput by increasing the job concurrency for machine learning jobs (Smart Search, Face Detection). With higher concurrency, the host will work on more assets in parallel. You can do this by navigating to Administration > Settings > Job Settings and increasing concurrency as needed.
|
||||||
|
|
||||||
|
:::danger
|
||||||
|
On a normal machine, 2 or 3 concurrent jobs can probably max the CPU, so if you're not hitting those maximums with, say, 30 jobs.
|
||||||
|
Note that storage speed and latency may quickly become the limiting factor; particularly when using HDDs.
|
||||||
|
|
||||||
|
Do not exaggerate with the amount of jobs because you're probably thoroughly overloading the server.
|
||||||
|
|
||||||
|
more info [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Why is Immich using so much of my CPU?
|
||||||
|
|
||||||
|
When a large amount of assets are uploaded to Immich it makes sense that the CPU and RAM will be heavily used due to machine learning work and creating image thumbnails after that, the percentage of CPU usage will drop to around 3-5% usage
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Docker
|
||||||
|
|
||||||
|
### How can I see Immich logs?
|
||||||
|
|
||||||
|
Most Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md).
|
||||||
|
|
||||||
|
### How can I run Immich as a non-root user?
|
||||||
|
|
||||||
|
1. Set the `PUID`/`PGID` environment variables (in `.env`).
|
||||||
|
2. Set the corresponding `user` argument in `docker-compose` for each service.
|
||||||
|
3. Add an additional volume to `immich-microservices` that mounts internally to `/usr/src/app/.reverse-geocoding-dump`.
|
||||||
|
|
||||||
|
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
|
||||||
|
|
||||||
|
### How can I **purge** data from Immich?
|
||||||
|
|
||||||
|
Data for Immich comes in two forms:
|
||||||
|
|
||||||
|
1. **Metadata** stored in a postgres database, persisted via the `pg_data` volume
|
||||||
|
2. **Files** (originals, thumbs, profile, etc.), stored in the `UPLOAD_LOCATION` folder.
|
||||||
|
|
||||||
|
To remove the **Metadata** you can stop Immich and delete the volume.
|
||||||
|
|
||||||
|
```bash title="Remove Immich (containers and volumes)"
|
||||||
|
docker compose down -v
|
||||||
|
```
|
||||||
|
|
||||||
|
After removing the containers and volumes, the **Files** can be cleaned up (if necessary) from the `UPLOAD_LOCATION` by simply deleting any unwanted files or folders.
|
||||||
|
|
||||||
|
### Why does the machine learning service report workers crashing?
|
||||||
|
|
||||||
|
:::note
|
||||||
|
If the error says the worker is exiting, then this is normal. This is a feature intended to reduce RAM consumption when the service isn't being used.
|
||||||
|
:::
|
||||||
|
|
||||||
|
There are a few reasons why this can happen.
|
||||||
|
|
||||||
|
If the error mentions SIGKILL or error code 137, it most likely means the service is running out of memory. Consider either increasing the server's RAM or moving the service to a server with more RAM.
|
||||||
|
|
||||||
|
If it mentions SIGILL (note the lack of a K) or error code 132, it most likely means your server's CPU is incompatible. This is unlikely to occur on version 1.92.0 or later. Consider upgrading if your version of Immich is below that.
|
||||||
|
|
||||||
|
If your version of Immich is below 1.92.0 and the crash occurs after logs about tracing or exporting a model, consider either upgrading or disabling the Tag Objects job.
|
||||||
@@ -1,5 +1,7 @@
|
|||||||
# Backup and Restore
|
# Backup and Restore
|
||||||
|
|
||||||
|
A [3-2-1 backup strategy](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) is recommended to protect your data. You should keep copies of your uploaded photos/videos as well as the Immich database for a comprehensive backup solution. This page provides an overview on how to backup the database and the location of user-uploaded pictures and videos. A template bash script that can be run as a cron job is provided [here](/docs/guides/template-backup-script.md)
|
||||||
|
|
||||||
## Database
|
## Database
|
||||||
|
|
||||||
:::caution
|
:::caution
|
||||||
@@ -64,3 +66,34 @@ Immich stores two types of content in the filesystem: (1) original, unmodified c
|
|||||||
1. `UPLOAD_LOCATION/library`
|
1. `UPLOAD_LOCATION/library`
|
||||||
1. `UPLOAD_LOCATION/upload`
|
1. `UPLOAD_LOCATION/upload`
|
||||||
1. `UPLOAD_LOCATION/profile`
|
1. `UPLOAD_LOCATION/profile`
|
||||||
|
|
||||||
|
**1. User-Specific Folders:**
|
||||||
|
|
||||||
|
- Each user has a unique string representing them.
|
||||||
|
- The main user is "Admin" (but only for `\library\library\`)
|
||||||
|
- Other users have different string identifiers.
|
||||||
|
- You can find your user ID in Account Account Settings > Account > User ID.
|
||||||
|
|
||||||
|
**2. Asset Types and Storage Locations:**
|
||||||
|
|
||||||
|
- **Source Assets:**
|
||||||
|
- Original assets uploaded through the browser interface&mobile&CLI.
|
||||||
|
- Stored in `\library\library\<userID>`.
|
||||||
|
- **Avatar Images:**
|
||||||
|
- User profile images.
|
||||||
|
- Stored in `\library\profile\<userID>`.
|
||||||
|
- **Thumbs Images:**
|
||||||
|
- Preview images (blurred, small, large) for each asset and thumbnails for recognized faces.
|
||||||
|
- Stored in `\library\thumbs\<userID>`.
|
||||||
|
- **Encoded Assets:**
|
||||||
|
- By default, unless otherwise specified re-encoded video assets for wider compatibility .
|
||||||
|
- Stored in `\library\encoded-video\<userID>`.
|
||||||
|
- **Files in Upload Queue (Mobile):**
|
||||||
|
- Files uploaded through mobile apps.
|
||||||
|
- Temporarily located in `\library\upload\<userID>`.
|
||||||
|
- Transferred to `\library\library\<userID>` upon successful upload.
|
||||||
|
|
||||||
|
:::danger
|
||||||
|
Do not touch the files inside these folders under any circumstances except taking a backup, changing or removing an asset can cause untracked and missing files.
|
||||||
|
You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface.
|
||||||
|
:::
|
||||||
|
|||||||
@@ -28,3 +28,13 @@ server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Caddy example config
|
||||||
|
|
||||||
|
As an alternative to nginx, you can also use [Caddy](https://caddyserver.com/) as a reverse proxy (with automatic HTTPS configuration). Below is an example config.
|
||||||
|
|
||||||
|
```
|
||||||
|
immich.example.org {
|
||||||
|
reverse_proxy http://<snip>:2283
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import StorageTemplate from '../partials/_storage-template.md';
|
import StorageTemplate from '/docs/partials/_storage-template.md';
|
||||||
|
|
||||||
# Storage Template
|
# Storage Template
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import RegisterAdminUser from '../partials/_register-admin.md';
|
import RegisterAdminUser from '/docs/partials/_register-admin.md';
|
||||||
import UserCreate from '../partials/_user-create.md';
|
import UserCreate from '/docs/partials/_user-create.md';
|
||||||
|
|
||||||
# User Management
|
# User Management
|
||||||
|
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ The Immich Microservices image uses the same `Dockerfile` as the Immich Server,
|
|||||||
- Thumbnail Generation
|
- Thumbnail Generation
|
||||||
- Metadata Extraction
|
- Metadata Extraction
|
||||||
- Video Transcoding
|
- Video Transcoding
|
||||||
- Object Tagging
|
- Smart Search
|
||||||
- Facial Recognition
|
- Facial Recognition
|
||||||
- Storage Template Migration
|
- Storage Template Migration
|
||||||
- Sidecar (see [XMP Sidecars](/docs/features/xmp-sidecars.md))
|
- Sidecar (see [XMP Sidecars](/docs/features/xmp-sidecars.md))
|
||||||
@@ -107,4 +107,4 @@ See [Database Migrations](./database-migrations.md) for more information about h
|
|||||||
|
|
||||||
### Redis
|
### Redis
|
||||||
|
|
||||||
Immich uses [Redis](https://redis.com/) via [BullMQ](https://docs.bullmq.io/) to manage job queues. Some jobs trigger subsequent jobs. For example, object detection relies on thumbnail generation and automatically run after one is generated.
|
Immich uses [Redis](https://redis.com/) via [BullMQ](https://docs.bullmq.io/) to manage job queues. Some jobs trigger subsequent jobs. For example, Smart Search and Facial Recognition relies on thumbnail generation and automatically run after one is generated.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Database Migrations
|
# Database Migrations
|
||||||
|
|
||||||
After making any changes in the `server/src/infra/database/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
After making any changes in the `server/src/infra/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||||
|
|
||||||
1. Run the command
|
1. Run the command
|
||||||
|
|
||||||
|
|||||||
@@ -13,5 +13,5 @@ npm run api:generate # Run from the `server/` directory
|
|||||||
You can find the generated client SDK in the `web/src/api` for Typescript SDK and `mobile/openapi` for Dart SDK.
|
You can find the generated client SDK in the `web/src/api` for Typescript SDK and `mobile/openapi` for Dart SDK.
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
This can also be run via `make api` from the project root directory (not in the `server` folder)
|
This can also be run via `make open-api` from the project root directory (not in the `server` folder)
|
||||||
:::
|
:::
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ When contributing code through a pull request, please check the following:
|
|||||||
|
|
||||||
- [ ] `npm run lint` (linting via ESLint)
|
- [ ] `npm run lint` (linting via ESLint)
|
||||||
- [ ] `npm run format` (formatting via Prettier)
|
- [ ] `npm run format` (formatting via Prettier)
|
||||||
- [ ] `npm run check` (Type checking via SvelteKit)
|
- [ ] `npm run check:svelte` (Type checking via SvelteKit)
|
||||||
- [ ] `npm test` (Tests via Jest)
|
- [ ] `npm test` (Tests via Jest)
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
|
|||||||
@@ -4,6 +4,16 @@ sidebar_position: 2
|
|||||||
|
|
||||||
# Setup
|
# Setup
|
||||||
|
|
||||||
|
:::note
|
||||||
|
If there's a feature you're planning to work on, just give us a heads up in [Discord](https://discord.com/channels/979116623879368755/1071165397228855327) so we can:
|
||||||
|
|
||||||
|
1. Let you know if it's something we would accept into Immich
|
||||||
|
2. Provide any guidance on how something like that would ideally be implemented
|
||||||
|
3. Ensure nobody is already working on that issue/feature so we don't duplicate effort
|
||||||
|
|
||||||
|
Thanks for being interested in contributing 😊
|
||||||
|
:::
|
||||||
|
|
||||||
## Environment
|
## Environment
|
||||||
|
|
||||||
### Server and web app
|
### Server and web app
|
||||||
@@ -39,7 +49,7 @@ You can access the web from `http://your-machine-ip:2283` or `http://localhost:2
|
|||||||
|
|
||||||
### Mobile app
|
### Mobile app
|
||||||
|
|
||||||
The mobile app `(/mobile)` will required Flutter toolchain to be installed on your system.
|
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x to be installed on your system.
|
||||||
|
|
||||||
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
||||||
|
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ Unit are run by calling `npm run test` from the `server` directory.
|
|||||||
|
|
||||||
The backend has an end-to-end test suite that can be called with `npm run test:e2e` from the `server` directory. This will set up a dummy database inside a temporary container and run the tests against it. Setup and teardown is automatically taken care of. That test, however, can not set up all prerequisites to parse file formats, as that is very complex and error-prone. As such, this test excludes some test cases like HEIC file imports. The test suite will also print a friendly warning to remind you that not all tests are being run.
|
The backend has an end-to-end test suite that can be called with `npm run test:e2e` from the `server` directory. This will set up a dummy database inside a temporary container and run the tests against it. Setup and teardown is automatically taken care of. That test, however, can not set up all prerequisites to parse file formats, as that is very complex and error-prone. As such, this test excludes some test cases like HEIC file imports. The test suite will also print a friendly warning to remind you that not all tests are being run.
|
||||||
|
|
||||||
Note that there is a bug in nodejs <20.8 that causes segmentation faults when running these tests. If you run into segfaults, ensure you are using at least version 20.8.
|
Note that there is a bug in nodejs \<20.8 that causes segmentation faults when running these tests. If you run into segfaults, ensure you are using at least version 20.8.
|
||||||
|
|
||||||
To perform a full e2e test, you need to run e2e tests inside docker. The easiest way to do that is to run `make test-e2e` in the root directory. This will build and start a docker-compose consisting of the server, microservices, and a postgres database. It will then perform the tests and exit.
|
To perform a full e2e test, you need to run e2e tests inside docker. The easiest way to do that is to run `make test-e2e` in the root directory. This will build and start a docker-compose consisting of the server, microservices, and a postgres database. It will then perform the tests and exit.
|
||||||
|
|
||||||
If you manually install the dependencies (see the DOCKERFILE) on your development machine, you can also run the full e2e tests manually by setting the `IMMICH_RUN_ALL_TESTS` environment value to true, i.e. `IMMICH_RUN_ALL_TESTS=true npm run test:e2e`.
|
If you manually install the dependencies (see the DOCKERFILE) on your development machine, you can also run the full e2e tests manually by setting the `IMMICH_RUN_ALL_TESTS` environment value to true, i.e. `IMMICH_RUN_ALL_TESTS=true npm run e2e:jobs`.
|
||||||
|
|||||||
@@ -25,6 +25,6 @@ Additional actions you can do with a detected person are:
|
|||||||
- Merge two or more detected faces into one person
|
- Merge two or more detected faces into one person
|
||||||
- Hide face
|
- Hide face
|
||||||
|
|
||||||
It can be found from the app bar when you access the detial view of a person
|
It can be found from the app bar when you access the detail view of a person.
|
||||||
|
|
||||||
<img src={require('./img/facial-recognition-4.png').default} title='Facial Recognition 4' width="70%"/>
|
<img src={require('./img/facial-recognition-4.png').default} title='Facial Recognition 4' width="70%"/>
|
||||||
|
|||||||
@@ -43,12 +43,28 @@ As this is a new feature, it is still experimental and may not work on all syste
|
|||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
|
|
||||||
|
#### Initial Setup
|
||||||
|
|
||||||
1. If you do not already have it, download the latest [`hwaccel.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
1. If you do not already have it, download the latest [`hwaccel.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||||
2. Uncomment the lines that apply to your system and desired usage.
|
2. Uncomment the lines that apply to your system and desired usage.
|
||||||
3. In the `docker-compose.yml` under `immich-microservices`, uncomment the lines relating to the `hwaccel.yml` file.
|
3. In the `docker-compose.yml` under `immich-microservices`, uncomment the lines relating to the `hwaccel.yml` file.
|
||||||
4. Redeploy the `immich-microservices` container with these updated settings.
|
4. Redeploy the `immich-microservices` container with these updated settings.
|
||||||
5. In the Admin page under `FFmpeg settings`, change the hardware acceleration setting to the appropriate option and save.
|
5. In the Admin page under `FFmpeg settings`, change the hardware acceleration setting to the appropriate option and save.
|
||||||
|
|
||||||
|
#### All-In-One - Unraid Setup
|
||||||
|
|
||||||
|
##### NVENC - NVIDIA GPUs
|
||||||
|
|
||||||
|
- If you are using other backends. You will still need to implement [`hwaccel.yml`][hw-file] file into the `immich-microservices` service directly, please see the "Initial Setup" section above on how to do that.
|
||||||
|
- As of v1.92.0, steps 1 and 2 are no longer necessary. If your version of Immich is below that or missing the environment variables, please follow these steps. Otherwise, skip to step 3.
|
||||||
|
- Please note that`NVIDIA_DRIVER_CAPABILITIES` is no longer required to enter as a variable.
|
||||||
|
|
||||||
|
1. Assuming you already have the Nvidia Driver Plugin installed on your Unraid Server. Please confirm that your Nvida GPU is showing up with its GPU ID in the Nvidia Driver Plugin. The ID will be `GPU-LONG_STRING_OF_CHARACTERS`. Copy the GPU ID.
|
||||||
|
2. In the Imagegenius/Immich Docker Container app, add two new variables: Key=`NVIDIA_VISIBLE_DEVICES` Value=`GPU-LONG_STRING_OF_CHARACTERS` and Key=`NVIDIA_DRIVER_CAPABILITIES` Value=`all`
|
||||||
|
3. While you are in the docker container app, change the Container from Basic Mode to Advanced Mode and add the following parameter to the Extra Parameters field: `--runtime=nvidia`
|
||||||
|
4. Restart the Imagegenius/Immich Docker Container app.
|
||||||
|
5. In the Admin page under FFmpeg settings, change the hardware acceleration setting to the appropriate option and save.
|
||||||
|
|
||||||
## Tips
|
## Tips
|
||||||
|
|
||||||
- You may want to choose a slower preset than for software transcoding to maintain quality and efficiency
|
- You may want to choose a slower preset than for software transcoding to maintain quality and efficiency
|
||||||
|
|||||||
@@ -34,9 +34,11 @@ If you add assets from an external library to an album and then move the asset t
|
|||||||
|
|
||||||
### Deleted External Assets
|
### Deleted External Assets
|
||||||
|
|
||||||
|
Note: Either a manual or scheduled library scan must have been performed to identify offline assets before this process will work.
|
||||||
|
|
||||||
In all above scan methods, Immich will check if any files are missing. This can happen if files are deleted, or if they are on a storage location that is currently unavailable, like a network drive that is not mounted, or a USB drive that has been unplugged. In order to prevent accidental deletion of assets, Immich will not immediately delete an asset from the library if the file is missing. Instead, the asset will be internally marked as offline and will still be visible in the main timeline. If the file is moved back to its original location and the library is scanned again, the asset will be restored.
|
In all above scan methods, Immich will check if any files are missing. This can happen if files are deleted, or if they are on a storage location that is currently unavailable, like a network drive that is not mounted, or a USB drive that has been unplugged. In order to prevent accidental deletion of assets, Immich will not immediately delete an asset from the library if the file is missing. Instead, the asset will be internally marked as offline and will still be visible in the main timeline. If the file is moved back to its original location and the library is scanned again, the asset will be restored.
|
||||||
|
|
||||||
Finally, files can be deleted from Immich via the `Remove Offline Files` job. Any assets marked as offline will then be removed from Immich. Run this job whenever files have been deleted from the file system and you want to remove them from Immich. Note that a library scan must be performed first to mark the assets as offline.
|
Finally, files can be deleted from Immich via the `Remove Offline Files` job. This job can be found by the three dots menu for the associated external storage that was configured under user account settings > libraries (the same location described at [create external libraries](#create-external-libraries)). When this job is run, any assets marked as offline will then be removed from Immich. Run this job whenever files have been deleted from the file system and you want to remove them from Immich.
|
||||||
|
|
||||||
### Import Paths
|
### Import Paths
|
||||||
|
|
||||||
@@ -51,6 +53,7 @@ Sometimes, an external library will not scan correctly. This can happen if the i
|
|||||||
- Are the volumes identical between the `server` and `microservices` container?
|
- Are the volumes identical between the `server` and `microservices` container?
|
||||||
- Are the import paths set correctly, and do they match the path set in docker-compose file?
|
- Are the import paths set correctly, and do they match the path set in docker-compose file?
|
||||||
- Are the permissions set correctly?
|
- Are the permissions set correctly?
|
||||||
|
- Are you using forward slashes everywhere? (`/`)
|
||||||
|
|
||||||
If all else fails, you can always start a shell inside the container and check if the path is accessible. For example, `docker exec -it immich_microservices /bin/bash` will start a bash shell. If your import path, for instance, is `/data/import/photos`, you can check if the files are accessible by running `ls /data/import/photos`. Also check the `immich_server` container in the same way.
|
If all else fails, you can always start a shell inside the container and check if the path is accessible. For example, `docker exec -it immich_microservices /bin/bash` will start a bash shell. If your import path, for instance, is `/data/import/photos`, you can check if the files are accessible by running `ls /data/import/photos`. Also check the `immich_server` container in the same way.
|
||||||
|
|
||||||
@@ -102,6 +105,7 @@ First, we need to plan how we want to organize the libraries. The christmas trip
|
|||||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||||
|
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||||
|
|
||||||
|
|
||||||
immich-microservices:
|
immich-microservices:
|
||||||
@@ -110,6 +114,7 @@ First, we need to plan how we want to organize the libraries. The christmas trip
|
|||||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||||
|
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||||
```
|
```
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
@@ -125,6 +130,14 @@ Only an admin can do this.
|
|||||||
- Navigate to `Administration > Users` page on the web.
|
- Navigate to `Administration > Users` page on the web.
|
||||||
- Click on the user edit button.
|
- Click on the user edit button.
|
||||||
- Set `/mnt/media` to be the external path. This folder will only contain the three folders that we want to import, so nothing else can be accessed.
|
- Set `/mnt/media` to be the external path. This folder will only contain the three folders that we want to import, so nothing else can be accessed.
|
||||||
|
:::note
|
||||||
|
Spaces in the internal path aren't currently supported.
|
||||||
|
|
||||||
|
You must import it as:
|
||||||
|
`..:/mnt/media/my-media:ro`
|
||||||
|
instead of
|
||||||
|
`..:/mnt/media/my media:ro`
|
||||||
|
:::
|
||||||
|
|
||||||
### Create External Libraries
|
### Create External Libraries
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import MobileAppDownload from '../partials/_mobile-app-download.md';
|
import MobileAppDownload from '/docs/partials/_mobile-app-download.md';
|
||||||
import MobileAppLogin from '../partials/_mobile-app-login.md';
|
import MobileAppLogin from '/docs/partials/_mobile-app-login.md';
|
||||||
import MobileAppBackup from '../partials/_mobile-app-backup.md';
|
import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
|
||||||
|
|
||||||
# Mobile App
|
# Mobile App
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvec
|
|||||||
|
|
||||||
Metadata search (prefixed with `m:`) can search specifically by text without the use of a model.
|
Metadata search (prefixed with `m:`) can search specifically by text without the use of a model.
|
||||||
|
|
||||||
|
Archived photos are not included in search results by default. To include them, add the query parameter `withArchived=true` to the url.
|
||||||
|
|
||||||
Some search examples:
|
Some search examples:
|
||||||
<img src={require('./img/search-ex-2.webp').default} title='Search Example 1' />
|
<img src={require('./img/search-ex-2.webp').default} title='Search Example 1' />
|
||||||
|
|
||||||
|
|||||||
|
After Width: | Height: | Size: 39 KiB |
40
docs/docs/guides/database-gui.md
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# Database GUI
|
||||||
|
|
||||||
|
A short guide on connecting [pgAdmin](https://www.pgadmin.org/) to Immich.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
|
||||||
|
- In order to connect to the database the immich_postgres container **must be running**.
|
||||||
|
- The passwords and usernames used below match the ones specified in the example `.env` file. If changed, please use actual values instead.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
## 1. Install pgAdmin
|
||||||
|
|
||||||
|
Download and install [pgAdmin](https://www.pgadmin.org/download/) following the official documentation.
|
||||||
|
|
||||||
|
## 2. Add a Server
|
||||||
|
|
||||||
|
Open pgAdmin and click "Add New Server".
|
||||||
|
|
||||||
|
<img src={require('./img/add-new-server-option.png').default} width="50%" title="new server option" />
|
||||||
|
|
||||||
|
## 3. Enter Connection Details
|
||||||
|
|
||||||
|
| Name | Value |
|
||||||
|
| -------------------- | ----------- |
|
||||||
|
| Host name/address | `localhost` |
|
||||||
|
| Port | `5432` |
|
||||||
|
| Maintenance database | `immich` |
|
||||||
|
| Username | `postgres` |
|
||||||
|
| Password | `postgres` |
|
||||||
|
|
||||||
|
<img src={require('./img/Connection-Pgadmin.png').default} width="75%" title="Connection" />
|
||||||
|
|
||||||
|
## 4. Save Connection
|
||||||
|
|
||||||
|
Click on "Save" to connect to the Immich database.
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
View [Database Queries](https://immich.app/docs/guides/database-queries/) for common database queries.
|
||||||
|
:::
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
# External Library
|
# External Library
|
||||||
|
|
||||||
This guide walks you through adding an [External Library](../features/libraries#external-libraries).
|
This guide walks you through adding an [External Library](/docs/features/libraries#external-libraries).
|
||||||
This guide assumes you are running Immich in Docker and that the files you wish to access are stored
|
This guide assumes you are running Immich in Docker and that the files you wish to access are stored
|
||||||
in a directory on the same machine.
|
in a directory on the same machine.
|
||||||
|
|
||||||
@@ -16,9 +16,6 @@ Edit `docker-compose.yml` to add two new mount points under `volumes:`
|
|||||||
|
|
||||||
Be sure to add exactly the same line to both `immich-server:` and `immich-microservices:`.
|
Be sure to add exactly the same line to both `immich-server:` and `immich-microservices:`.
|
||||||
|
|
||||||
[Question for the devs: Is editing docker-compose.yml really the desirable way to solve this problem?
|
|
||||||
I assumed user changes were supposed to be kept to .env?]
|
|
||||||
|
|
||||||
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
|
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -81,13 +78,13 @@ In the Immich web UI:
|
|||||||
- Click \*_Add path_
|
- Click \*_Add path_
|
||||||
<img src={require('./img/add-path-button.png').default} width="50%" title="Add Path button" />
|
<img src={require('./img/add-path-button.png').default} width="50%" title="Add Path button" />
|
||||||
|
|
||||||
- Enter **.** as the path and click Add
|
- Enter **/usr/src/app/external** as the path and click Add
|
||||||
<img src={require('./img/add-path-field.png').default} width="50%" title="Add Path field" />
|
<img src={require('./img/add-path-field.png').default} width="50%" title="Add Path field" />
|
||||||
|
|
||||||
- Save the new path
|
- Save the new path
|
||||||
<img src={require('./img/path-save.png').default} width="50%" title="Path Save button" />
|
<img src={require('./img/path-save.png').default} width="50%" title="Path Save button" />
|
||||||
|
|
||||||
- Click the three-dots menu and select **Scan New Library Files** [I'm not sure whether this is necessary]
|
- Click the three-dots menu and select **Scan New Library Files**
|
||||||
<img src={require('./img/scan-new-library-files.png').default} width="50%" title="Scan New Library Files menu option" />
|
<img src={require('./img/scan-new-library-files.png').default} width="50%" title="Scan New Library Files menu option" />
|
||||||
|
|
||||||
# Confirm stuff is happening
|
# Confirm stuff is happening
|
||||||
|
|||||||
BIN
docs/docs/guides/img/Connection-Pgadmin.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
docs/docs/guides/img/add-new-server-option.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 9.5 KiB |
@@ -1,6 +1,7 @@
|
|||||||
# Remote Access
|
# Remote Access
|
||||||
|
|
||||||
This page gives a few pointers on how to access your Immich instance from outside your LAN.
|
This page gives a few pointers on how to access your Immich instance from outside your LAN.
|
||||||
|
You can read the [full discussion in Discord](https://discord.com/channels/979116623879368755/1122615710846308484)
|
||||||
|
|
||||||
:::danger
|
:::danger
|
||||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you succeptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you succeptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||||
@@ -41,7 +42,7 @@ If you are unable to open a port on your router for Wireguard or OpenVPN to your
|
|||||||
|
|
||||||
A reverse proxy is a service that sits between web servers and clients. A reverse proxy can either be hosted on the server itself or remotely. Clients can connect to the reverse proxy via https, and the proxy relays data to Immich. This setup makes most sense if you have your own domain and want to access your Immich instance just like any other website, from outside your LAN. You can also use a DDNS provider like DuckDNS or no-ip if you don't have a domain. This configuration allows the Immich Android and iphone apps to connect to your server without a VPN or tailscale app on the client side.
|
A reverse proxy is a service that sits between web servers and clients. A reverse proxy can either be hosted on the server itself or remotely. Clients can connect to the reverse proxy via https, and the proxy relays data to Immich. This setup makes most sense if you have your own domain and want to access your Immich instance just like any other website, from outside your LAN. You can also use a DDNS provider like DuckDNS or no-ip if you don't have a domain. This configuration allows the Immich Android and iphone apps to connect to your server without a VPN or tailscale app on the client side.
|
||||||
|
|
||||||
If you're hosting your own reverse proxy, [Nginx](https://docs.nginx.com/nginx/admin-guide/web-server/reverse-proxy/) is a great option. An example configuration for Nginx is provided [here](https://immich.app/docs/administration/reverse-proxy).
|
If you're hosting your own reverse proxy, [Nginx](https://docs.nginx.com/nginx/admin-guide/web-server/reverse-proxy/) is a great option. An example configuration for Nginx is provided [here](/docs/administration/reverse-proxy.md).
|
||||||
|
|
||||||
You'll also need your own certificate to authenticate https connections. If you're making Immich publicly accesible, [Let's Encrypt](https://letsencrypt.org/) can provide a free certificate for your domain and is the recommended option. Alternatively, a [self-signed certificate](https://en.wikipedia.org/wiki/Self-signed_certificate) allows you to encrypt your connection to Immich, but it raises a security warning on the client's browser.
|
You'll also need your own certificate to authenticate https connections. If you're making Immich publicly accesible, [Let's Encrypt](https://letsencrypt.org/) can provide a free certificate for your domain and is the recommended option. Alternatively, a [self-signed certificate](https://en.wikipedia.org/wiki/Self-signed_certificate) allows you to encrypt your connection to Immich, but it raises a security warning on the client's browser.
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Remote Machine Learning
|
# Remote Machine Learning
|
||||||
|
|
||||||
To alleviate [performance issues on low-memory systems](/docs/FAQ.md#why-is-immich-slow-on-low-memory-systems-like-the-raspberry-pi) like the Raspberry Pi, you may also host Immich's machine-learning container on a more powerful system (e.g. your laptop or desktop computer):
|
To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-immich-slow-on-low-memory-systems-like-the-raspberry-pi) like the Raspberry Pi, you may also host Immich's machine-learning container on a more powerful system (e.g. your laptop or desktop computer):
|
||||||
|
|
||||||
- Set the URL in Machine Learning Settings on the Admin Settings page to point to the designated ML system, e.g. `http://workstation:3003`.
|
- Set the URL in Machine Learning Settings on the Admin Settings page to point to the designated ML system, e.g. `http://workstation:3003`.
|
||||||
- Copy the following `docker-compose.yml` to your ML system.
|
- Copy the following `docker-compose.yml` to your ML system.
|
||||||
38
docs/docs/guides/remove-offline-files.md
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Remove Offline Files
|
||||||
|
|
||||||
|
:::note
|
||||||
|
**Before running the script**, please make sure you have a [backup](/docs/administration/backup-and-restore) of your assets and database
|
||||||
|
:::
|
||||||
|
|
||||||
|
This page is a guide to get rid of offline files from the repair page.
|
||||||
|
|
||||||
|
This way works by downloading a JSON file that contains a list of all the files that are defined as offline files, running a script that uses the [Immich API](/docs/api/delete-assets) in order to remove the offline files.
|
||||||
|
|
||||||
|
1. Create an API key under Admin User -> Account Settings -> API Keys -> New API Key -> Copy to clipboard.
|
||||||
|
2. Download the JSON file under Administration -> repair -> Export.
|
||||||
|
3. Replace `YOUR_IP_HERE` and `YOUR_API_KEY_HERE` with your actual IP address and API key in the script.
|
||||||
|
4. Run the script in the same folder where the JSON file is located.
|
||||||
|
|
||||||
|
## Script for Linux based systems:
|
||||||
|
|
||||||
|
```
|
||||||
|
awk -F\" '/entityId/ {print $4}' orphans.json | while read line; do curl --location --request DELETE 'http://YOUR_IP_HERE:2283/api/asset' --header 'Content- Type: application/json' --header 'x-api-key: YOUR_API_KEY_HERE' --data '{ "force": true, "ids": ["'"$line"'"]}';done
|
||||||
|
```
|
||||||
|
|
||||||
|
## Script for the Windows system (run through PowerShell):
|
||||||
|
|
||||||
|
```
|
||||||
|
Get-Content orphans.json | Select-String -Pattern 'entityId' | ForEach-Object {
|
||||||
|
$line = $_ -split '"' | Select-Object -Index 3
|
||||||
|
$body = [pscustomobject]@{
|
||||||
|
'ids' = @($line)
|
||||||
|
'force' = (' true ' | ConvertFrom-Json)
|
||||||
|
} | ConvertTo-Json -Depth 3
|
||||||
|
Invoke-RestMethod -Uri 'http://YOUR_IP_HERE:2283/api/asset' -Method Delete -Headers @{
|
||||||
|
'Content-Type' = 'application/json'
|
||||||
|
'x-api-key' = 'YOUR_API_KEY_HERE'
|
||||||
|
} -Body $body
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Thanks to [DooMRunneR](https://discord.com/channels/979116623879368755/1179655214870040596/1194308198413373482) for writing this script.
|
||||||
82
docs/docs/guides/template-backup-script.md
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
# Backup Script
|
||||||
|
|
||||||
|
[Borg](https://www.borgbackup.org/) is a feature-rich deduplicating archiving software with built-in versioning. We provide a template bash script that can be run daily/weekly as a [cron](https://wiki.archlinux.org/title/cron) job to back up your files and database. We encourage you to read the quick-start guide for Borg before running this script.
|
||||||
|
|
||||||
|
This script assumes you have a second hard drive connected to your server for on-site backup and ssh access to a remote machine for your third off-site copy. [BorgBase](https://www.borgbase.com/) is an alternative option for off-site backups with a competitive pricing structure. You may choose to skip off-site backups entirely by removing the relevant lines from the template script.
|
||||||
|
|
||||||
|
The database is saved to your Immich upload folder in the `database-backup` subdirectory. The database is then backed up and versioned with your assets by Borg. This ensures that the database backup is in sync with your assets in every snapshot.
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Borg needs to be installed on your server as well as the remote machine. You can find instructions to install Borg [here](https://borgbackup.readthedocs.io/en/latest/installation.html).
|
||||||
|
- To run this sript as a non-root user, you should [add your username to the docker group](https://docs.docker.com/engine/install/linux-postinstall/).
|
||||||
|
- To run this script non-interactively, set up [passwordless ssh](https://www.redhat.com/sysadmin/passwordless-ssh) to your remote machine from your server.
|
||||||
|
|
||||||
|
To initialize the borg repository, run the following commands once.
|
||||||
|
|
||||||
|
```bash title='Borg set-up'
|
||||||
|
UPLOAD_LOCATION="/path/to/immich/directory" # Immich database location, as set in your .env file
|
||||||
|
BACKUP_PATH="/path/to/local/backup/directory"
|
||||||
|
|
||||||
|
mkdir "$UPLOAD_LOCATION/database-backup"
|
||||||
|
mkdir "$BACKUP_PATH/immich-borg"
|
||||||
|
|
||||||
|
borg init --encryption=none "$BACKUP_PATH/immich-borg"
|
||||||
|
|
||||||
|
## Remote set up
|
||||||
|
REMOTE_HOST="remote_host@IP"
|
||||||
|
REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||||
|
|
||||||
|
ssh "$REMOTE_HOST" "mkdir $REMOTE_BACKUP_PATH/immich-borg"
|
||||||
|
ssh "$REMOTE_HOST" "borg init --encryption=none $REMOTE_BACKUP_PATH/immich-borg"
|
||||||
|
```
|
||||||
|
|
||||||
|
Edit the following script as necessary and add it to your crontab. Note that this script assumes there are no spaces in your paths. If there are spaces, enclose the paths in double quotes.
|
||||||
|
|
||||||
|
```bash title='Borg backup template'
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Paths
|
||||||
|
UPLOAD_LOCATION="/path/to/immich/directory"
|
||||||
|
BACKUP_PATH="/path/to/local/backup/directory"
|
||||||
|
REMOTE_HOST="remote_host@IP"
|
||||||
|
REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||||
|
|
||||||
|
|
||||||
|
### Local
|
||||||
|
|
||||||
|
# Backup Immich database
|
||||||
|
docker exec -t immich_postgres pg_dumpall -c -U postgres | /usr/bin/gzip > $UPLOAD_LOCATION/database-backup/immich-database.sql.gz
|
||||||
|
|
||||||
|
### Append to local Borg repository
|
||||||
|
borg create $BACKUP_PATH/immich-borg::{now} $UPLOAD_LOCATION --exclude $UPLOAD_LOCATION/thumbs/ --exclude $UPLOAD_LOCATION/encoded-video/
|
||||||
|
borg prune --keep-weekly=4 --keep-monthly=3 $BACKUP_PATH/immich-borg
|
||||||
|
borg compact $BACKUP_PATH/immich-borg
|
||||||
|
|
||||||
|
|
||||||
|
### Append to remote Borg repository
|
||||||
|
borg create $REMOTE_HOST:$REMOTE_BACKUP_PATH/immich-borg::{now} $UPLOAD_LOCATION --exclude $UPLOAD_LOCATION/thumbs/ --exclude $UPLOAD_LOCATION/encoded-video/
|
||||||
|
borg prune --keep-weekly=4 --keep-monthly=3 $REMOTE_HOST:$REMOTE_BACKUP_PATH/immich-borg
|
||||||
|
borg compact $REMOTE_HOST:$REMOTE_BACKUP_PATH/immich-borg
|
||||||
|
```
|
||||||
|
|
||||||
|
### Restoring
|
||||||
|
|
||||||
|
To restore from a backup, use the `borg mount` command.
|
||||||
|
|
||||||
|
```bash title='Restore from local backup'
|
||||||
|
BACKUP_PATH="/path/to/local/backup/directory"
|
||||||
|
mkdir /tmp/immich-mountpoint
|
||||||
|
borg mount $BACKUP_PATH/immich-borg /tmp/immich-mountpoint
|
||||||
|
cd /tmp/immich-mountpoint
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash title='Restore from remote backup'
|
||||||
|
REMOTE_HOST="remote_host@IP"
|
||||||
|
REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||||
|
mkdir /tmp/immich-mountpoint
|
||||||
|
borg mount $REMOTE_HOST:$REMOTE_BACKUP_PATH/immich-borg /tmp/immich-mountpoint
|
||||||
|
cd /tmp/immich-mountpoint
|
||||||
|
```
|
||||||
|
|
||||||
|
You can find available snapshots in seperate sub-directories at `/tmp/immich-mountpoint`. Restore the files you need, and unmount the Borg repository using `borg umount /tmp/immich-mountpoint`
|
||||||
@@ -32,15 +32,12 @@ The default configuration looks like this:
|
|||||||
"backgroundTask": {
|
"backgroundTask": {
|
||||||
"concurrency": 5
|
"concurrency": 5
|
||||||
},
|
},
|
||||||
"clipEncoding": {
|
"smartSearch": {
|
||||||
"concurrency": 2
|
"concurrency": 2
|
||||||
},
|
},
|
||||||
"metadataExtraction": {
|
"metadataExtraction": {
|
||||||
"concurrency": 5
|
"concurrency": 5
|
||||||
},
|
},
|
||||||
"objectTagging": {
|
|
||||||
"concurrency": 2
|
|
||||||
},
|
|
||||||
"recognizeFaces": {
|
"recognizeFaces": {
|
||||||
"concurrency": 2
|
"concurrency": 2
|
||||||
},
|
},
|
||||||
@@ -66,14 +63,13 @@ The default configuration looks like this:
|
|||||||
"concurrency": 1
|
"concurrency": 1
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"logging": {
|
||||||
|
"enabled": true,
|
||||||
|
"level": "log"
|
||||||
|
},
|
||||||
"machineLearning": {
|
"machineLearning": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"url": "http://immich-machine-learning:3003",
|
"url": "http://immich-machine-learning:3003",
|
||||||
"classification": {
|
|
||||||
"enabled": true,
|
|
||||||
"modelName": "microsoft/resnet-50",
|
|
||||||
"minScore": 0.9
|
|
||||||
},
|
|
||||||
"clip": {
|
"clip": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"modelName": "ViT-B-32__openai"
|
"modelName": "ViT-B-32__openai"
|
||||||
@@ -83,12 +79,13 @@ The default configuration looks like this:
|
|||||||
"modelName": "buffalo_l",
|
"modelName": "buffalo_l",
|
||||||
"minScore": 0.7,
|
"minScore": 0.7,
|
||||||
"maxDistance": 0.6,
|
"maxDistance": 0.6,
|
||||||
"minFaces": 1
|
"minFaces": 3
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"map": {
|
"map": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"tileUrl": "https://tile.openstreetmap.org/{z}/{x}/{y}.png"
|
"lightStyle": "",
|
||||||
|
"darkStyle": ""
|
||||||
},
|
},
|
||||||
"reverseGeocoding": {
|
"reverseGeocoding": {
|
||||||
"enabled": true
|
"enabled": true
|
||||||
@@ -133,9 +130,6 @@ The default configuration looks like this:
|
|||||||
"enabled": true,
|
"enabled": true,
|
||||||
"cronExpression": "0 0 * * *"
|
"cronExpression": "0 0 * * *"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"stylesheets": {
|
|
||||||
"css": ""
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -148,4 +142,4 @@ So you can just grab it from there, paste it into a file and you're pretty much
|
|||||||
### Step 2 - Specify the file location
|
### Step 2 - Specify the file location
|
||||||
|
|
||||||
In your `.env` file, set the variable `IMMICH_CONFIG_FILE` to the path of your config.
|
In your `.env` file, set the variable `IMMICH_CONFIG_FILE` to the path of your config.
|
||||||
For more information, refer to the [Environment Variables](https://docs.immich.app/docs/install/environment-variables) section.
|
For more information, refer to the [Environment Variables](/docs/install/environment-variables.md) section.
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ Optionally, you can use the [`hwaccel.yml`][hw-file] file to enable hardware acc
|
|||||||
|
|
||||||
### Step 3 - Start the containers
|
### Step 3 - Start the containers
|
||||||
|
|
||||||
From the directory you created in Step 1, (which should now contain your customized `docker-compose.yml` and `.env` files) run `docker-compose up -d`.
|
From the directory you created in Step 1, (which should now contain your customized `docker-compose.yml` and `.env` files) run `docker compose up -d`.
|
||||||
|
|
||||||
```bash title="Start the containers using docker compose command"
|
```bash title="Start the containers using docker compose command"
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
|
|||||||
@@ -30,15 +30,15 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
|||||||
|
|
||||||
## General
|
## General
|
||||||
|
|
||||||
| Variable | Description | Default | Services |
|
| Variable | Description | Default | Services |
|
||||||
| :-------------------------- | :------------------------------------------- | :-----------------: | :------------------------------------------- |
|
| :------------------------------ | :------------------------------------------- | :------------------: | :------------------------------------------- |
|
||||||
| `TZ` | Timezone | | microservices |
|
| `TZ` | Timezone | | microservices |
|
||||||
| `NODE_ENV` | Environment (production, development) | `production` | server, microservices, machine learning, web |
|
| `NODE_ENV` | Environment (production, development) | `production` | server, microservices, machine learning, web |
|
||||||
| `LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, microservices |
|
| `LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, microservices |
|
||||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload` | server, microservices |
|
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload` | server, microservices |
|
||||||
| `PUBLIC_LOGIN_PAGE_MESSAGE` | Public Login Page Message | | web |
|
| `IMMICH_CONFIG_FILE` | Path to config file | | server |
|
||||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server |
|
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server |
|
||||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www'` | server |
|
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | microservices |
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
|
|
||||||
@@ -48,12 +48,6 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Geocoding
|
|
||||||
|
|
||||||
| Variable | Description | Default | Services |
|
|
||||||
| :--------------------------------- | :------------------------------- | :--------------------------: | :------------ |
|
|
||||||
| `REVERSE_GEOCODING_DUMP_DIRECTORY` | Reverse Geocoding Dump Directory | `./.reverse-geocoding-dump/` | microservices |
|
|
||||||
|
|
||||||
## Ports
|
## Ports
|
||||||
|
|
||||||
| Variable | Description | Default | Services |
|
| Variable | Description | Default | Services |
|
||||||
@@ -127,16 +121,16 @@ Redis (Sentinel) URL example JSON before encoding:
|
|||||||
|
|
||||||
## Machine Learning
|
## Machine Learning
|
||||||
|
|
||||||
| Variable | Description | Default | Services |
|
| Variable | Description | Default | Services |
|
||||||
| :----------------------------------------------- | :---------------------------------------------------------------- | :-----------------: | :--------------- |
|
| :----------------------------------------------- | :----------------------------------------------------------------- | :-----------------: | :--------------- |
|
||||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if <= 0) | `300` | machine learning |
|
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if <= 0) | `10` | machine learning |
|
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if <= 0) | number of CPU cores | machine learning |
|
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` | machine learning |
|
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` | machine learning |
|
||||||
|
|
||||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||||
|
|
||||||
|
|||||||
@@ -2,12 +2,12 @@
|
|||||||
sidebar_position: 90
|
sidebar_position: 90
|
||||||
---
|
---
|
||||||
|
|
||||||
import RegisterAdminUser from '../partials/_register-admin.md';
|
import RegisterAdminUser from '/docs/partials/_register-admin.md';
|
||||||
import UserCreate from '../partials/_user-create.md';
|
import UserCreate from '/docs/partials/_user-create.md';
|
||||||
import StorageTemplate from '../partials/_storage-template.md';
|
import StorageTemplate from '/docs/partials/_storage-template.md';
|
||||||
import MobileAppDownload from '../partials/_mobile-app-download.md';
|
import MobileAppDownload from '/docs/partials/_mobile-app-download.md';
|
||||||
import MobileAppLogin from '../partials/_mobile-app-login.md';
|
import MobileAppLogin from '/docs/partials/_mobile-app-login.md';
|
||||||
import MobileAppBackup from '../partials/_mobile-app-backup.md';
|
import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
|
||||||
|
|
||||||
# Post Install Steps
|
# Post Install Steps
|
||||||
|
|
||||||
|
|||||||
12
docs/docs/overview/Comparison.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Comparison
|
||||||
|
|
||||||
|
If you're new here and came from other photo self-hosting alternatives you might want to look at a comparison between Immich and your current self-hosting.
|
||||||
|
Here you can see a [comparison between the various OpenSource Photo Libraries](https://meichthys.github.io/foss_photo_libraries/) including Immich.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
It is important to remember, Immich is under very active development. Expect bugs and changes. Do not use it as the only way to store your photos and videos!
|
||||||
|
:::
|
||||||
@@ -1,12 +1,12 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 4
|
sidebar_position: 6
|
||||||
---
|
---
|
||||||
|
|
||||||
# Help Me!
|
# Help Me!
|
||||||
|
|
||||||
Running into an issue or have a question? Try the following:
|
Running into an issue or have a question? Try the following:
|
||||||
|
|
||||||
1. Check the [FAQs](/docs/FAQ.md).
|
1. Check the [FAQs](/docs/FAQ.mdx).
|
||||||
2. Read through the [Release Notes][github-releases].
|
2. Read through the [Release Notes][github-releases].
|
||||||
3. Search through existing [GitHub Issues][github-issues].
|
3. Search through existing [GitHub Issues][github-issues].
|
||||||
4. Open a help ticket on [Discord][discord-link].
|
4. Open a help ticket on [Discord][discord-link].
|
||||||
|
|||||||
BIN
docs/docs/overview/img/upload-button.png
Normal file
|
After Width: | Height: | Size: 2.5 KiB |
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 2
|
sidebar_position: 4
|
||||||
---
|
---
|
||||||
|
|
||||||
# Logo
|
# Logo
|
||||||
|
|||||||
87
docs/docs/overview/quick-start.mdx
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 3
|
||||||
|
---
|
||||||
|
|
||||||
|
# Quick Start
|
||||||
|
|
||||||
|
Here is a quick, no-choices path to install Immich and take it for a test drive.
|
||||||
|
Once you've tried it, perhaps you'll use one of the many other ways
|
||||||
|
to install and use it.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
Check the [requirements page](/docs/install/requirements) to get started.
|
||||||
|
|
||||||
|
## Install and launch via Docker Compose
|
||||||
|
|
||||||
|
Follow the [Docker Compose (Recommended)](/docs/install/docker-compose) instructions
|
||||||
|
to install the server.
|
||||||
|
|
||||||
|
- Where random passwords are required, `pwgen` is a handy utility.
|
||||||
|
- `UPLOAD_LOCATION` should be set to some new directory on the server
|
||||||
|
with free space.
|
||||||
|
- You may ignore "Step 4 - Upgrading".
|
||||||
|
|
||||||
|
## Try the Web UI
|
||||||
|
|
||||||
|
import RegisterAdminUser from '/docs/partials/_register-admin.md';
|
||||||
|
|
||||||
|
<RegisterAdminUser />
|
||||||
|
|
||||||
|
Try uploading a picture from your browser.
|
||||||
|
|
||||||
|
<img src={require('./img/upload-button.png').default} title="Upload button" />
|
||||||
|
|
||||||
|
## Try the Mobile UI
|
||||||
|
|
||||||
|
### Download the Mobile App
|
||||||
|
|
||||||
|
import MobileAppDownload from '/docs/partials/_mobile-app-download.md';
|
||||||
|
|
||||||
|
<MobileAppDownload />
|
||||||
|
|
||||||
|
### Login to the Mobile App
|
||||||
|
|
||||||
|
import MobileAppLogin from '/docs/partials/_mobile-app-login.md';
|
||||||
|
|
||||||
|
<MobileAppLogin />
|
||||||
|
|
||||||
|
In the mobile app, you should see the photo you uploaded from the web UI.
|
||||||
|
|
||||||
|
### Transfer Photos from your Mobile Device
|
||||||
|
|
||||||
|
import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
|
||||||
|
|
||||||
|
<MobileAppBackup />
|
||||||
|
|
||||||
|
Depending on how many photos are on your mobile device, this backup may
|
||||||
|
take quite a while.
|
||||||
|
|
||||||
|
You can select the Jobs tab to see Immich processing your photos.
|
||||||
|
|
||||||
|
<img src={require('/docs/guides/img/jobs-tab.png').default} title="Jobs tab" />
|
||||||
|
|
||||||
|
## Set up your backups
|
||||||
|
|
||||||
|
You may want to back up the content of your Immich instance
|
||||||
|
along with other parts of your server; be sure to read about
|
||||||
|
[database backup](/docs/administration/backup-and-restore).
|
||||||
|
|
||||||
|
## Where to go from here?
|
||||||
|
|
||||||
|
You may decide you'd like to install the server a different way;
|
||||||
|
the Install category on the left menu provides many options.
|
||||||
|
|
||||||
|
You may decide you'd like to add the _rest_ of your photos from Google Photos,
|
||||||
|
even those not on your mobile device, via Google Takeout.
|
||||||
|
You can use [immich-go](https://github.com/simulot/immich-go) for this.
|
||||||
|
|
||||||
|
You may want to
|
||||||
|
[upload photos from your own archive](/docs/features/command-line-interface).
|
||||||
|
|
||||||
|
You may want to incorporate an immutable archive of photos from an
|
||||||
|
[External Library](/docs/features/libraries#external-libraries);
|
||||||
|
there's a [Guide](/docs/guides/external-library) for that.
|
||||||
|
|
||||||
|
You may want your mobile device to
|
||||||
|
[back photos up to your server automatically](/docs/features/automatic-backup).
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 3
|
sidebar_position: 5
|
||||||
---
|
---
|
||||||
|
|
||||||
# Support The Project
|
# Support The Project
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
// @ts-check
|
// @ts-check
|
||||||
// Note: type annotations allow type checking and IDEs autocompletion
|
// Note: type annotations allow type checking and IDEs autocompletion
|
||||||
|
|
||||||
const lightCodeTheme = require('prism-react-renderer/themes/github');
|
const prism = require('prism-react-renderer');
|
||||||
const darkCodeTheme = require('prism-react-renderer/themes/dracula');
|
|
||||||
|
|
||||||
/** @type {import('@docusaurus/types').Config} */
|
/** @type {import('@docusaurus/types').Config} */
|
||||||
const config = {
|
const config = {
|
||||||
@@ -56,7 +55,7 @@ const config = {
|
|||||||
editUrl: 'https://github.com/immich-app/immich/tree/main/docs/',
|
editUrl: 'https://github.com/immich-app/immich/tree/main/docs/',
|
||||||
},
|
},
|
||||||
api: {
|
api: {
|
||||||
path: '../server/immich-openapi-specs.json',
|
path: '../open-api/immich-openapi-specs.json',
|
||||||
routeBasePath: '/docs/api',
|
routeBasePath: '/docs/api',
|
||||||
},
|
},
|
||||||
// blog: {
|
// blog: {
|
||||||
@@ -165,8 +164,8 @@ const config = {
|
|||||||
copyright: `Immich is available as open source under the terms of the MIT License.`,
|
copyright: `Immich is available as open source under the terms of the MIT License.`,
|
||||||
},
|
},
|
||||||
prism: {
|
prism: {
|
||||||
theme: lightCodeTheme,
|
theme: prism.themes.github,
|
||||||
darkTheme: darkCodeTheme,
|
darkTheme: prism.themes.dracula,
|
||||||
additionalLanguages: ['sql'],
|
additionalLanguages: ['sql'],
|
||||||
},
|
},
|
||||||
image: 'overview/img/feature-panel.png',
|
image: 'overview/img/feature-panel.png',
|
||||||
|
|||||||
19549
docs/package-lock.json
generated
@@ -13,32 +13,31 @@
|
|||||||
"clear": "docusaurus clear",
|
"clear": "docusaurus clear",
|
||||||
"serve": "docusaurus serve",
|
"serve": "docusaurus serve",
|
||||||
"write-translations": "docusaurus write-translations",
|
"write-translations": "docusaurus write-translations",
|
||||||
"write-heading-ids": "docusaurus write-heading-ids",
|
"write-heading-ids": "docusaurus write-heading-ids"
|
||||||
"check": "tsc"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "^2.4.3",
|
"@docusaurus/core": "^3.1.0",
|
||||||
"@docusaurus/preset-classic": "^2.4.3",
|
"@docusaurus/preset-classic": "^3.1.0",
|
||||||
"@mdi/js": "^7.3.67",
|
"@mdi/js": "^7.3.67",
|
||||||
"@mdi/react": "^1.6.1",
|
"@mdi/react": "^1.6.1",
|
||||||
"@mdx-js/react": "^1.6.22",
|
"@mdx-js/react": "^3.0.0",
|
||||||
"autoprefixer": "^10.4.13",
|
"autoprefixer": "^10.4.17",
|
||||||
"classnames": "^2.3.2",
|
"classnames": "^2.3.2",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
"docusaurus-lunr-search": "^2.3.2",
|
"docusaurus-lunr-search": "^3.3.2",
|
||||||
"docusaurus-preset-openapi": "^0.6.3",
|
"docusaurus-preset-openapi": "^0.7.3",
|
||||||
"postcss": "^8.4.25",
|
"postcss": "^8.4.25",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^2.3.1",
|
||||||
"raw-loader": "^4.0.2",
|
"raw-loader": "^4.0.2",
|
||||||
"react": "^17.0.2",
|
"react": "^18.0.0",
|
||||||
"react-dom": "^17.0.2",
|
"react-dom": "^18.0.0",
|
||||||
"tailwindcss": "^3.2.4",
|
"tailwindcss": "^3.2.4",
|
||||||
"url": "^0.11.0"
|
"url": "^0.11.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@docusaurus/module-type-aliases": "^2.4.1",
|
"@docusaurus/module-type-aliases": "^3.1.0",
|
||||||
"@tsconfig/docusaurus": "^1.0.5",
|
"@tsconfig/docusaurus": "^2.0.2",
|
||||||
"prettier": "^3.0.0",
|
"prettier": "^3.2.4",
|
||||||
"typescript": "^5.1.6"
|
"typescript": "^5.1.6"
|
||||||
},
|
},
|
||||||
"browserslist": {
|
"browserslist": {
|
||||||
|
|||||||
@@ -4,6 +4,6 @@
|
|||||||
|
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"module": "Node16"
|
"module": "Node16",
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,6 +30,8 @@ download:
|
|||||||
locale_code: pl-PL
|
locale_code: pl-PL
|
||||||
- file: mobile/assets/i18n/fi-FI.json
|
- file: mobile/assets/i18n/fi-FI.json
|
||||||
locale_code: fi-FI
|
locale_code: fi-FI
|
||||||
|
- file: mobile/assets/i18n/pt-PT.json
|
||||||
|
locale_code: pt-PT
|
||||||
- file: mobile/assets/i18n/pt-BR.json
|
- file: mobile/assets/i18n/pt-BR.json
|
||||||
locale_code: pt-BR
|
locale_code: pt-BR
|
||||||
- file: mobile/assets/i18n/cs-CZ.json
|
- file: mobile/assets/i18n/cs-CZ.json
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.11-bookworm@sha256:ba7a7ac30c38e119c4304f98ef0e188f90f4f67a958bb6899da9defb99bfb471 as builder
|
FROM python:3.11-bookworm@sha256:497c00ec2cff14316a6859c4e30fc88e7ab1f11dd254fb43b8f4b201ca657596 as builder
|
||||||
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
@@ -13,20 +13,51 @@ ENV VIRTUAL_ENV="/opt/venv" PATH="/opt/venv/bin:${PATH}"
|
|||||||
COPY poetry.lock pyproject.toml ./
|
COPY poetry.lock pyproject.toml ./
|
||||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --only main
|
RUN poetry install --sync --no-interaction --no-ansi --no-root --only main
|
||||||
|
|
||||||
FROM python:3.11-slim-bookworm@sha256:cfd7ed5c11a88ce533d69a1da2fd932d647f9eb6791c5b4ddce081aedf7f7876
|
|
||||||
|
|
||||||
|
ARG TARGETPLATFORM
|
||||||
|
ENV ARMNN_PATH=/opt/armnn
|
||||||
|
COPY ann /opt/ann
|
||||||
|
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||||
|
mkdir /opt/armnn && \
|
||||||
|
curl -SL "https://github.com/ARM-software/armnn/releases/download/v23.11/ArmNN-linux-aarch64.tar.gz" | tar -zx -C /opt/armnn && \
|
||||||
|
cd /opt/ann && \
|
||||||
|
sh build.sh; \
|
||||||
|
else \
|
||||||
|
mkdir /opt/armnn; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
FROM python:3.11-slim-bookworm@sha256:637774748f62b832dc11e7b286e48cd716727ed04b45a0322776c01bc526afc3
|
||||||
|
ARG TARGETPLATFORM
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends tini libmimalloc2.0 && rm -rf /var/lib/apt/lists/*
|
RUN apt-get update && apt-get install -y --no-install-recommends tini libmimalloc2.0 && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ]; then \
|
||||||
|
apt-get update && apt-get install -y --no-install-recommends ocl-icd-libopencl1 mesa-opencl-icd && \
|
||||||
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
mkdir --parents /etc/OpenCL/vendors && \
|
||||||
|
echo "/usr/lib/libmali.so" > /etc/OpenCL/vendors/mali.icd && \
|
||||||
|
mkdir /opt/armnn; \
|
||||||
|
fi
|
||||||
|
|
||||||
WORKDIR /usr/src/app
|
WORKDIR /usr/src/app
|
||||||
ENV NODE_ENV=production \
|
ENV NODE_ENV=production \
|
||||||
TRANSFORMERS_CACHE=/cache \
|
TRANSFORMERS_CACHE=/cache \
|
||||||
PYTHONDONTWRITEBYTECODE=1 \
|
PYTHONDONTWRITEBYTECODE=1 \
|
||||||
PYTHONUNBUFFERED=1 \
|
PYTHONUNBUFFERED=1 \
|
||||||
PATH="/opt/venv/bin:$PATH" \
|
PATH="/opt/venv/bin:$PATH" \
|
||||||
PYTHONPATH=/usr/src
|
PYTHONPATH=/usr/src \
|
||||||
|
LD_LIBRARY_PATH=/opt/armnn
|
||||||
|
|
||||||
|
# prevent core dumps
|
||||||
|
RUN echo "hard core 0" >> /etc/security/limits.conf && \
|
||||||
|
echo "fs.suid_dumpable 0" >> /etc/sysctl.conf && \
|
||||||
|
echo 'ulimit -S -c 0 > /dev/null 2>&1' >> /etc/profile
|
||||||
|
|
||||||
COPY --from=builder /opt/venv /opt/venv
|
COPY --from=builder /opt/venv /opt/venv
|
||||||
|
COPY --from=builder /opt/armnn/libarmnn.so.?? /opt/armnn/libarmnnOnnxParser.so.?? /opt/armnn/libarmnnDeserializer.so.?? /opt/armnn/libarmnnTfLiteParser.so.?? /opt/armnn/libprotobuf.so.?.??.?.? /opt/ann/libann.s[o] /opt/ann/build.sh /opt/armnn
|
||||||
|
COPY ann/ann.py /usr/src/ann/ann.py
|
||||||
COPY start.sh log_conf.json ./
|
COPY start.sh log_conf.json ./
|
||||||
COPY app .
|
COPY app .
|
||||||
|
|
||||||
ENTRYPOINT ["tini", "--"]
|
ENTRYPOINT ["tini", "--"]
|
||||||
CMD ["./start.sh"]
|
CMD ["./start.sh"]
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
# Immich Machine Learning
|
# Immich Machine Learning
|
||||||
|
|
||||||
- Image classification
|
|
||||||
- CLIP embeddings
|
- CLIP embeddings
|
||||||
- Facial recognition
|
- Facial recognition
|
||||||
|
|
||||||
|
|||||||
1
machine-learning/ann/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
from .ann import Ann, is_available
|
||||||
281
machine-learning/ann/ann.cpp
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
#include <fstream>
|
||||||
|
#include <mutex>
|
||||||
|
#include <atomic>
|
||||||
|
|
||||||
|
#include "armnn/IRuntime.hpp"
|
||||||
|
#include "armnn/INetwork.hpp"
|
||||||
|
#include "armnn/Types.hpp"
|
||||||
|
#include "armnnDeserializer/IDeserializer.hpp"
|
||||||
|
#include "armnnTfLiteParser/ITfLiteParser.hpp"
|
||||||
|
#include "armnnOnnxParser/IOnnxParser.hpp"
|
||||||
|
|
||||||
|
using namespace armnn;
|
||||||
|
|
||||||
|
struct IOInfos
|
||||||
|
{
|
||||||
|
std::vector<BindingPointInfo> inputInfos;
|
||||||
|
std::vector<BindingPointInfo> outputInfos;
|
||||||
|
};
|
||||||
|
|
||||||
|
// from https://rigtorp.se/spinlock/
|
||||||
|
struct SpinLock
|
||||||
|
{
|
||||||
|
std::atomic<bool> lock_ = {false};
|
||||||
|
|
||||||
|
void lock()
|
||||||
|
{
|
||||||
|
for (;;)
|
||||||
|
{
|
||||||
|
if (!lock_.exchange(true, std::memory_order_acquire))
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
while (lock_.load(std::memory_order_relaxed))
|
||||||
|
;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void unlock() { lock_.store(false, std::memory_order_release); }
|
||||||
|
};
|
||||||
|
|
||||||
|
class Ann
|
||||||
|
{
|
||||||
|
|
||||||
|
public:
|
||||||
|
int load(const char *modelPath,
|
||||||
|
bool fastMath,
|
||||||
|
bool fp16,
|
||||||
|
bool saveCachedNetwork,
|
||||||
|
const char *cachedNetworkPath)
|
||||||
|
{
|
||||||
|
INetworkPtr network = loadModel(modelPath);
|
||||||
|
IOptimizedNetworkPtr optNet = OptimizeNetwork(network.get(), fastMath, fp16, saveCachedNetwork, cachedNetworkPath);
|
||||||
|
const IOInfos infos = getIOInfos(optNet.get());
|
||||||
|
NetworkId netId;
|
||||||
|
mutex.lock();
|
||||||
|
Status status = runtime->LoadNetwork(netId, std::move(optNet));
|
||||||
|
mutex.unlock();
|
||||||
|
if (status != Status::Success)
|
||||||
|
{
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
spinLock.lock();
|
||||||
|
ioInfos[netId] = infos;
|
||||||
|
mutexes.emplace(netId, std::make_unique<std::mutex>());
|
||||||
|
spinLock.unlock();
|
||||||
|
return netId;
|
||||||
|
}
|
||||||
|
|
||||||
|
void execute(NetworkId netId, const void **inputData, void **outputData)
|
||||||
|
{
|
||||||
|
spinLock.lock();
|
||||||
|
const IOInfos *infos = &ioInfos[netId];
|
||||||
|
auto m = mutexes[netId].get();
|
||||||
|
spinLock.unlock();
|
||||||
|
InputTensors inputTensors;
|
||||||
|
inputTensors.reserve(infos->inputInfos.size());
|
||||||
|
size_t i = 0;
|
||||||
|
for (const BindingPointInfo &info : infos->inputInfos)
|
||||||
|
inputTensors.emplace_back(info.first, ConstTensor(info.second, inputData[i++]));
|
||||||
|
OutputTensors outputTensors;
|
||||||
|
outputTensors.reserve(infos->outputInfos.size());
|
||||||
|
i = 0;
|
||||||
|
for (const BindingPointInfo &info : infos->outputInfos)
|
||||||
|
outputTensors.emplace_back(info.first, Tensor(info.second, outputData[i++]));
|
||||||
|
m->lock();
|
||||||
|
runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
|
||||||
|
m->unlock();
|
||||||
|
}
|
||||||
|
|
||||||
|
void unload(NetworkId netId)
|
||||||
|
{
|
||||||
|
mutex.lock();
|
||||||
|
runtime->UnloadNetwork(netId);
|
||||||
|
mutex.unlock();
|
||||||
|
}
|
||||||
|
|
||||||
|
int tensors(NetworkId netId, bool isInput = false)
|
||||||
|
{
|
||||||
|
spinLock.lock();
|
||||||
|
const IOInfos *infos = &ioInfos[netId];
|
||||||
|
spinLock.unlock();
|
||||||
|
return (int)(isInput ? infos->inputInfos.size() : infos->outputInfos.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned long shape(NetworkId netId, bool isInput = false, int index = 0)
|
||||||
|
{
|
||||||
|
spinLock.lock();
|
||||||
|
const IOInfos *infos = &ioInfos[netId];
|
||||||
|
spinLock.unlock();
|
||||||
|
const TensorShape shape = (isInput ? infos->inputInfos : infos->outputInfos)[index].second.GetShape();
|
||||||
|
unsigned long s = 0;
|
||||||
|
for (unsigned int d = 0; d < shape.GetNumDimensions(); d++)
|
||||||
|
s |= ((unsigned long)shape[d]) << (d * 16); // stores up to 4 16-bit values in a 64-bit value
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ann(int tuningLevel, const char *tuningFile)
|
||||||
|
{
|
||||||
|
IRuntime::CreationOptions runtimeOptions;
|
||||||
|
BackendOptions backendOptions{"GpuAcc",
|
||||||
|
{
|
||||||
|
{"TuningLevel", tuningLevel},
|
||||||
|
{"MemoryOptimizerStrategy", "ConstantMemoryStrategy"}, // SingleAxisPriorityList or ConstantMemoryStrategy
|
||||||
|
}};
|
||||||
|
if (tuningFile)
|
||||||
|
backendOptions.AddOption({"TuningFile", tuningFile});
|
||||||
|
runtimeOptions.m_BackendOptions.emplace_back(backendOptions);
|
||||||
|
runtime = IRuntime::CreateRaw(runtimeOptions);
|
||||||
|
};
|
||||||
|
~Ann()
|
||||||
|
{
|
||||||
|
IRuntime::Destroy(runtime);
|
||||||
|
};
|
||||||
|
|
||||||
|
private:
|
||||||
|
INetworkPtr loadModel(const char *modelPath)
|
||||||
|
{
|
||||||
|
const auto path = std::string(modelPath);
|
||||||
|
if (path.rfind(".tflite") == path.length() - 7) // endsWith()
|
||||||
|
{
|
||||||
|
auto parser = armnnTfLiteParser::ITfLiteParser::CreateRaw();
|
||||||
|
return parser->CreateNetworkFromBinaryFile(modelPath);
|
||||||
|
}
|
||||||
|
else if (path.rfind(".onnx") == path.length() - 5) // endsWith()
|
||||||
|
{
|
||||||
|
auto parser = armnnOnnxParser::IOnnxParser::CreateRaw();
|
||||||
|
return parser->CreateNetworkFromBinaryFile(modelPath);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
std::ifstream ifs(path, std::ifstream::in | std::ifstream::binary);
|
||||||
|
auto parser = armnnDeserializer::IDeserializer::CreateRaw();
|
||||||
|
return parser->CreateNetworkFromBinary(ifs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static BindingPointInfo getInputTensorInfo(LayerBindingId inputBindingId, TensorInfo info)
|
||||||
|
{
|
||||||
|
const auto newInfo = TensorInfo{info.GetShape(), info.GetDataType(),
|
||||||
|
info.GetQuantizationScale(),
|
||||||
|
info.GetQuantizationOffset(),
|
||||||
|
true};
|
||||||
|
return {inputBindingId, newInfo};
|
||||||
|
}
|
||||||
|
|
||||||
|
IOptimizedNetworkPtr OptimizeNetwork(INetwork *network, bool fastMath, bool fp16, bool saveCachedNetwork, const char *cachedNetworkPath)
|
||||||
|
{
|
||||||
|
const bool allowExpandedDims = false;
|
||||||
|
const ShapeInferenceMethod shapeInferenceMethod = ShapeInferenceMethod::ValidateOnly;
|
||||||
|
|
||||||
|
OptimizerOptionsOpaque options;
|
||||||
|
options.SetReduceFp32ToFp16(fp16);
|
||||||
|
options.SetShapeInferenceMethod(shapeInferenceMethod);
|
||||||
|
options.SetAllowExpandedDims(allowExpandedDims);
|
||||||
|
|
||||||
|
BackendOptions gpuAcc("GpuAcc", {{"FastMathEnabled", fastMath}});
|
||||||
|
if (cachedNetworkPath)
|
||||||
|
{
|
||||||
|
gpuAcc.AddOption({"SaveCachedNetwork", saveCachedNetwork});
|
||||||
|
gpuAcc.AddOption({"CachedNetworkFilePath", cachedNetworkPath});
|
||||||
|
}
|
||||||
|
options.AddModelOption(gpuAcc);
|
||||||
|
|
||||||
|
// No point in using ARMNN for CPU, use ONNX (quantized) instead.
|
||||||
|
// BackendOptions cpuAcc("CpuAcc",
|
||||||
|
// {
|
||||||
|
// {"FastMathEnabled", fastMath},
|
||||||
|
// {"NumberOfThreads", 0},
|
||||||
|
// });
|
||||||
|
// options.AddModelOption(cpuAcc);
|
||||||
|
|
||||||
|
BackendOptions allowExDimOpt("AllowExpandedDims",
|
||||||
|
{{"AllowExpandedDims", allowExpandedDims}});
|
||||||
|
options.AddModelOption(allowExDimOpt);
|
||||||
|
BackendOptions shapeInferOpt("ShapeInferenceMethod",
|
||||||
|
{{"InferAndValidate", shapeInferenceMethod == ShapeInferenceMethod::InferAndValidate}});
|
||||||
|
options.AddModelOption(shapeInferOpt);
|
||||||
|
|
||||||
|
std::vector<BackendId> backends = {
|
||||||
|
BackendId("GpuAcc"),
|
||||||
|
// BackendId("CpuAcc"),
|
||||||
|
// BackendId("CpuRef"),
|
||||||
|
};
|
||||||
|
return Optimize(*network, backends, runtime->GetDeviceSpec(), options);
|
||||||
|
}
|
||||||
|
|
||||||
|
IOInfos getIOInfos(IOptimizedNetwork *optNet)
|
||||||
|
{
|
||||||
|
struct InfoStrategy : IStrategy
|
||||||
|
{
|
||||||
|
void ExecuteStrategy(const IConnectableLayer *layer,
|
||||||
|
const BaseDescriptor &descriptor,
|
||||||
|
const std::vector<ConstTensor> &constants,
|
||||||
|
const char *name,
|
||||||
|
const LayerBindingId id = 0) override
|
||||||
|
{
|
||||||
|
IgnoreUnused(descriptor, constants, id);
|
||||||
|
const LayerType lt = layer->GetType();
|
||||||
|
if (lt == LayerType::Input)
|
||||||
|
ioInfos.inputInfos.push_back(getInputTensorInfo(id, layer->GetOutputSlot(0).GetTensorInfo()));
|
||||||
|
else if (lt == LayerType::Output)
|
||||||
|
ioInfos.outputInfos.push_back({id, layer->GetInputSlot(0).GetTensorInfo()});
|
||||||
|
}
|
||||||
|
IOInfos ioInfos;
|
||||||
|
};
|
||||||
|
|
||||||
|
InfoStrategy infoStrategy;
|
||||||
|
optNet->ExecuteStrategy(infoStrategy);
|
||||||
|
return infoStrategy.ioInfos;
|
||||||
|
}
|
||||||
|
|
||||||
|
IRuntime *runtime;
|
||||||
|
std::map<NetworkId, IOInfos> ioInfos;
|
||||||
|
std::map<NetworkId, std::unique_ptr<std::mutex>> mutexes; // mutex per network to not execute the same the same network concurrently
|
||||||
|
std::mutex mutex; // global mutex for load/unload calls to the runtime
|
||||||
|
SpinLock spinLock; // fast spin lock to guard access to the ioInfos and mutexes maps
|
||||||
|
};
|
||||||
|
|
||||||
|
extern "C" void *init(int logLevel, int tuningLevel, const char *tuningFile)
|
||||||
|
{
|
||||||
|
LogSeverity level = static_cast<LogSeverity>(logLevel);
|
||||||
|
ConfigureLogging(true, true, level);
|
||||||
|
|
||||||
|
Ann *ann = new Ann(tuningLevel, tuningFile);
|
||||||
|
return ann;
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" void destroy(void *ann)
|
||||||
|
{
|
||||||
|
delete ((Ann *)ann);
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" int load(void *ann,
|
||||||
|
const char *path,
|
||||||
|
bool fastMath,
|
||||||
|
bool fp16,
|
||||||
|
bool saveCachedNetwork,
|
||||||
|
const char *cachedNetworkPath)
|
||||||
|
{
|
||||||
|
return ((Ann *)ann)->load(path, fastMath, fp16, saveCachedNetwork, cachedNetworkPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" void unload(void *ann, NetworkId netId)
|
||||||
|
{
|
||||||
|
((Ann *)ann)->unload(netId);
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" void execute(void *ann, NetworkId netId, const void **inputData, void **outputData)
|
||||||
|
{
|
||||||
|
((Ann *)ann)->execute(netId, inputData, outputData);
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" unsigned long shape(void *ann, NetworkId netId, bool isInput, int index)
|
||||||
|
{
|
||||||
|
return ((Ann *)ann)->shape(netId, isInput, index);
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" int tensors(void *ann, NetworkId netId, bool isInput)
|
||||||
|
{
|
||||||
|
return ((Ann *)ann)->tensors(netId, isInput);
|
||||||
|
}
|
||||||
162
machine-learning/ann/ann.py
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ctypes import CDLL, Array, c_bool, c_char_p, c_int, c_ulong, c_void_p
|
||||||
|
from os.path import exists
|
||||||
|
from typing import Any, Protocol, TypeVar
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpy.typing import NDArray
|
||||||
|
|
||||||
|
from app.config import log
|
||||||
|
|
||||||
|
try:
|
||||||
|
CDLL("libmali.so") # fail if libmali.so is not mounted into container
|
||||||
|
libann = CDLL("libann.so")
|
||||||
|
libann.init.argtypes = c_int, c_int, c_char_p
|
||||||
|
libann.init.restype = c_void_p
|
||||||
|
libann.load.argtypes = c_void_p, c_char_p, c_bool, c_bool, c_bool, c_char_p
|
||||||
|
libann.load.restype = c_int
|
||||||
|
libann.execute.argtypes = c_void_p, c_int, Array[c_void_p], Array[c_void_p]
|
||||||
|
libann.unload.argtypes = c_void_p, c_int
|
||||||
|
libann.destroy.argtypes = (c_void_p,)
|
||||||
|
libann.shape.argtypes = c_void_p, c_int, c_bool, c_int
|
||||||
|
libann.shape.restype = c_ulong
|
||||||
|
libann.tensors.argtypes = c_void_p, c_int, c_bool
|
||||||
|
libann.tensors.restype = c_int
|
||||||
|
is_available = True
|
||||||
|
except OSError as e:
|
||||||
|
log.debug("Could not load ANN shared libraries, using ONNX: %s", e)
|
||||||
|
is_available = False
|
||||||
|
|
||||||
|
T = TypeVar("T", covariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Newable(Protocol[T]):
|
||||||
|
def new(self) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class _Singleton(type, Newable[T]):
|
||||||
|
_instances: dict[_Singleton[T], Newable[T]] = {}
|
||||||
|
|
||||||
|
def __call__(cls, *args: Any, **kwargs: Any) -> Newable[T]:
|
||||||
|
if cls not in cls._instances:
|
||||||
|
obj: Newable[T] = super(_Singleton, cls).__call__(*args, **kwargs)
|
||||||
|
cls._instances[cls] = obj
|
||||||
|
else:
|
||||||
|
obj = cls._instances[cls]
|
||||||
|
obj.new()
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
class Ann(metaclass=_Singleton):
|
||||||
|
def __init__(self, log_level: int = 3, tuning_level: int = 1, tuning_file: str | None = None) -> None:
|
||||||
|
if not is_available:
|
||||||
|
raise RuntimeError("libann is not available!")
|
||||||
|
if tuning_file and not exists(tuning_file):
|
||||||
|
raise ValueError("tuning_file must point to an existing (possibly empty) file!")
|
||||||
|
if tuning_level == 0 and tuning_file is None:
|
||||||
|
raise ValueError("tuning_level == 0 reads existing tuning information and requires a tuning_file")
|
||||||
|
if tuning_level < 0 or tuning_level > 3:
|
||||||
|
raise ValueError("tuning_level must be 0 (load from tuning_file), 1, 2 or 3.")
|
||||||
|
if log_level < 0 or log_level > 5:
|
||||||
|
raise ValueError("log_level must be 0 (trace), 1 (debug), 2 (info), 3 (warning), 4 (error) or 5 (fatal)")
|
||||||
|
self.log_level = log_level
|
||||||
|
self.tuning_level = tuning_level
|
||||||
|
self.tuning_file = tuning_file
|
||||||
|
self.output_shapes: dict[int, tuple[tuple[int], ...]] = {}
|
||||||
|
self.input_shapes: dict[int, tuple[tuple[int], ...]] = {}
|
||||||
|
self.ann: int | None = None
|
||||||
|
self.new()
|
||||||
|
|
||||||
|
def new(self) -> None:
|
||||||
|
if self.ann is None:
|
||||||
|
self.ann = libann.init(
|
||||||
|
self.log_level,
|
||||||
|
self.tuning_level,
|
||||||
|
self.tuning_file.encode() if self.tuning_file is not None else None,
|
||||||
|
)
|
||||||
|
self.ref_count = 0
|
||||||
|
|
||||||
|
self.ref_count += 1
|
||||||
|
|
||||||
|
def destroy(self) -> None:
|
||||||
|
self.ref_count -= 1
|
||||||
|
if self.ref_count <= 0 and self.ann is not None:
|
||||||
|
libann.destroy(self.ann)
|
||||||
|
self.ann = None
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
if self.ann is not None:
|
||||||
|
libann.destroy(self.ann)
|
||||||
|
self.ann = None
|
||||||
|
|
||||||
|
def load(
|
||||||
|
self,
|
||||||
|
model_path: str,
|
||||||
|
fast_math: bool = True,
|
||||||
|
fp16: bool = False,
|
||||||
|
save_cached_network: bool = False,
|
||||||
|
cached_network_path: str | None = None,
|
||||||
|
) -> int:
|
||||||
|
if not model_path.endswith((".armnn", ".tflite", ".onnx")):
|
||||||
|
raise ValueError("model_path must be a file with extension .armnn, .tflite or .onnx")
|
||||||
|
if not exists(model_path):
|
||||||
|
raise ValueError("model_path must point to an existing file!")
|
||||||
|
if cached_network_path is not None and not exists(cached_network_path):
|
||||||
|
raise ValueError("cached_network_path must point to an existing (possibly empty) file!")
|
||||||
|
if save_cached_network and cached_network_path is None:
|
||||||
|
raise ValueError("save_cached_network is True, cached_network_path must be specified!")
|
||||||
|
net_id: int = libann.load(
|
||||||
|
self.ann,
|
||||||
|
model_path.encode(),
|
||||||
|
fast_math,
|
||||||
|
fp16,
|
||||||
|
save_cached_network,
|
||||||
|
cached_network_path.encode() if cached_network_path is not None else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.input_shapes[net_id] = tuple(
|
||||||
|
self.shape(net_id, input=True, index=i) for i in range(self.tensors(net_id, input=True))
|
||||||
|
)
|
||||||
|
self.output_shapes[net_id] = tuple(
|
||||||
|
self.shape(net_id, input=False, index=i) for i in range(self.tensors(net_id, input=False))
|
||||||
|
)
|
||||||
|
return net_id
|
||||||
|
|
||||||
|
def unload(self, network_id: int) -> None:
|
||||||
|
libann.unload(self.ann, network_id)
|
||||||
|
del self.output_shapes[network_id]
|
||||||
|
|
||||||
|
def execute(self, network_id: int, input_tensors: list[NDArray[np.float32]]) -> list[NDArray[np.float32]]:
|
||||||
|
if not isinstance(input_tensors, list):
|
||||||
|
raise ValueError("input_tensors needs to be a list!")
|
||||||
|
net_input_shapes = self.input_shapes[network_id]
|
||||||
|
if len(input_tensors) != len(net_input_shapes):
|
||||||
|
raise ValueError(f"input_tensors lengths {len(input_tensors)} != network inputs {len(net_input_shapes)}")
|
||||||
|
for net_input_shape, input_tensor in zip(net_input_shapes, input_tensors):
|
||||||
|
if net_input_shape != input_tensor.shape:
|
||||||
|
raise ValueError(f"input_tensor shape {input_tensor.shape} != network input shape {net_input_shape}")
|
||||||
|
if not input_tensor.flags.c_contiguous:
|
||||||
|
raise ValueError("input_tensors must be c_contiguous numpy ndarrays")
|
||||||
|
output_tensors: list[NDArray[np.float32]] = [
|
||||||
|
np.ndarray(s, dtype=np.float32) for s in self.output_shapes[network_id]
|
||||||
|
]
|
||||||
|
input_type = c_void_p * len(input_tensors)
|
||||||
|
inputs = input_type(*[t.ctypes.data_as(c_void_p) for t in input_tensors])
|
||||||
|
output_type = c_void_p * len(output_tensors)
|
||||||
|
outputs = output_type(*[t.ctypes.data_as(c_void_p) for t in output_tensors])
|
||||||
|
libann.execute(self.ann, network_id, inputs, outputs)
|
||||||
|
return output_tensors
|
||||||
|
|
||||||
|
def shape(self, network_id: int, input: bool = False, index: int = 0) -> tuple[int]:
|
||||||
|
s = libann.shape(self.ann, network_id, input, index)
|
||||||
|
a = []
|
||||||
|
while s != 0:
|
||||||
|
a.append(s & 0xFFFF)
|
||||||
|
s >>= 16
|
||||||
|
return tuple(a)
|
||||||
|
|
||||||
|
def tensors(self, network_id: int, input: bool = False) -> int:
|
||||||
|
tensors: int = libann.tensors(self.ann, network_id, input)
|
||||||
|
return tensors
|
||||||
1
machine-learning/ann/build.sh
Normal file
@@ -0,0 +1 @@
|
|||||||
|
g++ -shared -O3 -o libann.so -fuse-ld=gold -std=c++17 -I$ARMNN_PATH/include -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -L$ARMNN_PATH ann.cpp
|
||||||
2
machine-learning/ann/export/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
armnn*
|
||||||
|
output/
|
||||||
4
machine-learning/ann/export/build-converter.sh
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
cd armnn-23.11/
|
||||||
|
g++ -o ../armnnconverter -O1 -DARMNN_ONNX_PARSER -DARMNN_SERIALIZER -DARMNN_TF_LITE_PARSER -fuse-ld=gold -std=c++17 -Iinclude -Isrc/armnnUtils -Ithird-party -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -larmnnSerializer -L../armnn src/armnnConverter/ArmnnConverter.cpp
|
||||||