mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 21:01:24 -08:00
Compare commits
401 Commits
v1.108.0
...
sqlite-flu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
94e315c845 | ||
|
|
6f37ab6a9e | ||
|
|
e5667f09c7 | ||
|
|
668632c398 | ||
|
|
5d6716d265 | ||
|
|
b6cad7715f | ||
|
|
48da4c9317 | ||
|
|
a1d9619a6e | ||
|
|
5dd9a2f850 | ||
|
|
058b5ea5ca | ||
|
|
441b009a0b | ||
|
|
cb903db308 | ||
|
|
03ceca8552 | ||
|
|
53609d45fe | ||
|
|
4af8433aad | ||
|
|
7c978571e0 | ||
|
|
efdf1b49f4 | ||
|
|
f46abbb5b5 | ||
|
|
d8b602f757 | ||
|
|
59507e557e | ||
|
|
174de979db | ||
|
|
862d6d9abe | ||
|
|
bd6c5e1b1c | ||
|
|
b80cc0d90f | ||
|
|
438344fc8f | ||
|
|
39141d3f1c | ||
|
|
28bc7f318e | ||
|
|
6bfe54788f | ||
|
|
67468ea367 | ||
|
|
40327ad987 | ||
|
|
d18bc7007a | ||
|
|
4cc11efd04 | ||
|
|
18fcc3569f | ||
|
|
fcbc1ba399 | ||
|
|
5e6ac87eaf | ||
|
|
40854f358c | ||
|
|
51a11d0cb6 | ||
|
|
cc88cbb456 | ||
|
|
860ba78650 | ||
|
|
9b1a985d29 | ||
|
|
b9e5e40ced | ||
|
|
3316acb71f | ||
|
|
1736887f96 | ||
|
|
c40262f3ff | ||
|
|
b3b599e071 | ||
|
|
b1e780561d | ||
|
|
aa04ded311 | ||
|
|
fa9b2219f8 | ||
|
|
7d0c64b73e | ||
|
|
48fb0f309d | ||
|
|
8c54312c87 | ||
|
|
eb4a291c81 | ||
|
|
c63f63cc15 | ||
|
|
715ac4c599 | ||
|
|
6fe011e2d7 | ||
|
|
ebecb60f39 | ||
|
|
9bfaa525db | ||
|
|
74f18a4523 | ||
|
|
d08a20bd57 | ||
|
|
682adaa334 | ||
|
|
c008feca63 | ||
|
|
f3e176e192 | ||
|
|
9f5a3f1e84 | ||
|
|
bab5ad7ebd | ||
|
|
c6c7c54fa5 | ||
|
|
f0c86846e0 | ||
|
|
562fec6e2b | ||
|
|
363c558db7 | ||
|
|
5811025ebd | ||
|
|
7506eefee3 | ||
|
|
2297d86569 | ||
|
|
cc4e5298ff | ||
|
|
e705831e67 | ||
|
|
6867bae770 | ||
|
|
c44280a50b | ||
|
|
cf272fc7fd | ||
|
|
365facfc51 | ||
|
|
d8aec81ae0 | ||
|
|
1239066ada | ||
|
|
1fd00d8262 | ||
|
|
d4cdd590bd | ||
|
|
be476d7982 | ||
|
|
028be6738e | ||
|
|
72ab664936 | ||
|
|
98b3441cb1 | ||
|
|
0be3c4472f | ||
|
|
aac6a4b052 | ||
|
|
16d5996f77 | ||
|
|
3e970bc2d3 | ||
|
|
f70dcaa6cc | ||
|
|
b051b29eca | ||
|
|
9894b9513b | ||
|
|
6b6d2a6621 | ||
|
|
f4371578f5 | ||
|
|
edf47dbbd0 | ||
|
|
3ac42edc74 | ||
|
|
129e5eae66 | ||
|
|
fe672d4f35 | ||
|
|
4f02412493 | ||
|
|
96056208fc | ||
|
|
b2dd5a3152 | ||
|
|
b653a20d15 | ||
|
|
868aedd212 | ||
|
|
e457d8d62e | ||
|
|
b41af65997 | ||
|
|
7a4fccb1b2 | ||
|
|
843345df4f | ||
|
|
00a7b80184 | ||
|
|
da12d5f567 | ||
|
|
c14e2914f8 | ||
|
|
7fbf50a75e | ||
|
|
f69ce6ad8a | ||
|
|
296bbeb2fc | ||
|
|
c24cc8a33b | ||
|
|
837b1e4929 | ||
|
|
07538299cf | ||
|
|
6cf5906813 | ||
|
|
817bd2ee94 | ||
|
|
29d229c5ba | ||
|
|
fd225e7462 | ||
|
|
817f42aef7 | ||
|
|
3be1aaaaa4 | ||
|
|
ef9a06be5c | ||
|
|
cde0458dc8 | ||
|
|
8285803c95 | ||
|
|
c7801eae7e | ||
|
|
b60fa77846 | ||
|
|
8d89eba3a9 | ||
|
|
2fba9f9547 | ||
|
|
1d559431ba | ||
|
|
7af6733665 | ||
|
|
af3a793fe8 | ||
|
|
2237b7a399 | ||
|
|
d9698884bd | ||
|
|
8338657eaa | ||
|
|
ca52cbace1 | ||
|
|
bc31b7c06c | ||
|
|
fa7f1e656f | ||
|
|
036676d501 | ||
|
|
5ab92f346a | ||
|
|
bd42e05152 | ||
|
|
c9f1304bce | ||
|
|
5ef9a8ff8d | ||
|
|
0261f79c72 | ||
|
|
d61828598b | ||
|
|
e9bfe5418a | ||
|
|
f230b3aa42 | ||
|
|
a372b56d44 | ||
|
|
1c754b60dc | ||
|
|
c582a841ba | ||
|
|
433c7ab01d | ||
|
|
32c05ea950 | ||
|
|
ed6971222c | ||
|
|
00023e387f | ||
|
|
e51b581f6e | ||
|
|
f40a4fc1c8 | ||
|
|
3ab7438036 | ||
|
|
49610de4b3 | ||
|
|
a4506758aa | ||
|
|
b288241a5c | ||
|
|
fa64277476 | ||
|
|
f7bfde6a32 | ||
|
|
7d5f07d1c7 | ||
|
|
a38dd53afd | ||
|
|
44c26c20b6 | ||
|
|
fcec5f867c | ||
|
|
7d888106ed | ||
|
|
9e21f254cd | ||
|
|
da6f269008 | ||
|
|
228a7710e6 | ||
|
|
8014b0f86d | ||
|
|
fb962f49ea | ||
|
|
7f7fec2cea | ||
|
|
593f036c0d | ||
|
|
e934e368b3 | ||
|
|
f331a974ed | ||
|
|
9d09b95618 | ||
|
|
a8a63b24d0 | ||
|
|
ab0ed11778 | ||
|
|
5ec407b57c | ||
|
|
fdf0b16fe3 | ||
|
|
c924f6c27c | ||
|
|
df45ef0e35 | ||
|
|
81c813a882 | ||
|
|
b014162088 | ||
|
|
276101ee82 | ||
|
|
9837d60074 | ||
|
|
28b7443b92 | ||
|
|
5acdc958b6 | ||
|
|
e384692025 | ||
|
|
54b276c984 | ||
|
|
7eb004bd00 | ||
|
|
c2965c4408 | ||
|
|
b749a68349 | ||
|
|
30aa2c9b82 | ||
|
|
9ed04588b8 | ||
|
|
7d320217b9 | ||
|
|
efdf8bbca9 | ||
|
|
34c4fbf730 | ||
|
|
ca775ab3e9 | ||
|
|
2dd5514043 | ||
|
|
f33dbdfe9a | ||
|
|
b1587a5dee | ||
|
|
501485d0b1 | ||
|
|
ed7f857975 | ||
|
|
d346985457 | ||
|
|
a144a1bec3 | ||
|
|
9f318a9338 | ||
|
|
11f41099c3 | ||
|
|
96481aae5d | ||
|
|
4a42a72bd3 | ||
|
|
66f2ac8ce3 | ||
|
|
6b2de807a7 | ||
|
|
96f8050143 | ||
|
|
14689462f8 | ||
|
|
fb68da2b51 | ||
|
|
720b9a286e | ||
|
|
d93ccb1669 | ||
|
|
c34fc4f2d1 | ||
|
|
905a062a6e | ||
|
|
aeed24b5b4 | ||
|
|
28ba22e8c1 | ||
|
|
5b64456f48 | ||
|
|
02fd6d22b3 | ||
|
|
10ed31d725 | ||
|
|
23d4314eed | ||
|
|
ea135cc310 | ||
|
|
745e1b003d | ||
|
|
1dae622dbc | ||
|
|
8ca24f0ef2 | ||
|
|
f679021f0e | ||
|
|
65f5118bdd | ||
|
|
9f4fad2a0f | ||
|
|
325fb4b5d1 | ||
|
|
f040c9fb38 | ||
|
|
0eacdf93eb | ||
|
|
20262209ce | ||
|
|
dd638ac207 | ||
|
|
d5b23373c7 | ||
|
|
9765ccb5a7 | ||
|
|
82d934d09d | ||
|
|
2821e0bf95 | ||
|
|
bb3d9b6306 | ||
|
|
c83df2686a | ||
|
|
94da5942bd | ||
|
|
54d2c12fff | ||
|
|
64fcb25971 | ||
|
|
7f03bd8440 | ||
|
|
2974cdbbee | ||
|
|
f0677735fd | ||
|
|
bb78eb4c4b | ||
|
|
4ed75f2ac9 | ||
|
|
3f4b783889 | ||
|
|
3968d76a57 | ||
|
|
55b31d1ce2 | ||
|
|
37cc6fbf27 | ||
|
|
899b8a0ce7 | ||
|
|
d3a5490e71 | ||
|
|
3afb5b497f | ||
|
|
1f0f880ecb | ||
|
|
2c05ceaf50 | ||
|
|
01f8b7e458 | ||
|
|
b73f7fe16f | ||
|
|
281cfc95a4 | ||
|
|
3a3ea6135e | ||
|
|
c44271e9b2 | ||
|
|
86904a8382 | ||
|
|
cf54829b3b | ||
|
|
990627e00d | ||
|
|
41580696c7 | ||
|
|
2423bb36c4 | ||
|
|
82b899649d | ||
|
|
8ee8450d18 | ||
|
|
6d47d52b3c | ||
|
|
919fd7d41f | ||
|
|
c2fdb6aab8 | ||
|
|
b6c4da37fd | ||
|
|
17c3e8e8bf | ||
|
|
21d3f248da | ||
|
|
a29660aae3 | ||
|
|
6c81fa0f0a | ||
|
|
7156da502f | ||
|
|
13741410a7 | ||
|
|
3408e6b3cb | ||
|
|
434bcec5cc | ||
|
|
ebc71e428d | ||
|
|
a70cd368af | ||
|
|
3225e33fc1 | ||
|
|
85ab916ecf | ||
|
|
7445dad0dd | ||
|
|
0237f9baa3 | ||
|
|
2e059bfbfd | ||
|
|
7bb7f63d57 | ||
|
|
66a5a5718f | ||
|
|
ddc4d2f927 | ||
|
|
0beeb61f5c | ||
|
|
a321db9f48 | ||
|
|
827136fc8b | ||
|
|
088eea88e0 | ||
|
|
15503784c8 | ||
|
|
bc8e236598 | ||
|
|
909bd43e65 | ||
|
|
3330885bcc | ||
|
|
e1ac73718c | ||
|
|
a78eeb9b9c | ||
|
|
86b3e3ee13 | ||
|
|
4b2bc8e4ce | ||
|
|
f92aee204e | ||
|
|
7fd2b7965c | ||
|
|
32ba6e3e3f | ||
|
|
0a6e5e0ec1 | ||
|
|
65a4f86154 | ||
|
|
147c6e3600 | ||
|
|
ee6f1a010c | ||
|
|
a444ea7361 | ||
|
|
59b809012f | ||
|
|
c037a8b8fa | ||
|
|
ce15cf6065 | ||
|
|
04340b3a62 | ||
|
|
ef7a6bb246 | ||
|
|
bc20710c6d | ||
|
|
a63490a23b | ||
|
|
a3799b3053 | ||
|
|
ea5d6780f2 | ||
|
|
62ac9bb7cd | ||
|
|
86a658b891 | ||
|
|
536628ad95 | ||
|
|
2c7db0122d | ||
|
|
ade2901259 | ||
|
|
d180373ec1 | ||
|
|
c2a65d8fac | ||
|
|
8e6bc13540 | ||
|
|
152421e288 | ||
|
|
72a8bbb874 | ||
|
|
b8d2d38bd1 | ||
|
|
9f6ef92f0b | ||
|
|
9e60c107ca | ||
|
|
2179f83d63 | ||
|
|
b259095899 | ||
|
|
145ace0fa1 | ||
|
|
7d3db11a5c | ||
|
|
8725656fd2 | ||
|
|
6394b4a9a3 | ||
|
|
d0b3dd888b | ||
|
|
849bc6e3aa | ||
|
|
3d7a9d79da | ||
|
|
f7cc9517ba | ||
|
|
73305feb5b | ||
|
|
950cd5d996 | ||
|
|
b53bd8c525 | ||
|
|
8b773a2b2e | ||
|
|
1e8806854d | ||
|
|
9d2d556200 | ||
|
|
7ecdcb3bc0 | ||
|
|
54488b1016 | ||
|
|
7c3326b662 | ||
|
|
745b16e4b4 | ||
|
|
a469fe44a1 | ||
|
|
b9fc59ca9f | ||
|
|
e005a123ba | ||
|
|
cd63212118 | ||
|
|
a9dd013daf | ||
|
|
01ba859567 | ||
|
|
173c9070c8 | ||
|
|
d37e8ede3b | ||
|
|
c77702279c | ||
|
|
ef0e1a81b9 | ||
|
|
88f62087fd | ||
|
|
4f89195702 | ||
|
|
ee22bbc85c | ||
|
|
66fae76af2 | ||
|
|
f0d1dbccf4 | ||
|
|
a78365faab | ||
|
|
e3fd766e9b | ||
|
|
c9c56ac600 | ||
|
|
f6da01cb96 | ||
|
|
fb8d9d8c40 | ||
|
|
87e8c16a90 | ||
|
|
99fe7b809a | ||
|
|
04e6e879a2 | ||
|
|
dda9c0057b | ||
|
|
cc1235d4aa | ||
|
|
8193416230 | ||
|
|
8863bd4e7d | ||
|
|
d23aa5e8e2 | ||
|
|
18b466ee52 | ||
|
|
e852971a13 | ||
|
|
fbe29bf4cd | ||
|
|
5748f50c1f | ||
|
|
1b3a7feb67 | ||
|
|
d68bd876c1 | ||
|
|
c50ac55892 | ||
|
|
b2dd4e1c2b | ||
|
|
ff2ba240c9 | ||
|
|
96084355f0 | ||
|
|
25a380d023 | ||
|
|
3cb42de931 | ||
|
|
8dd1d95913 | ||
|
|
0ee2390c7f | ||
|
|
52db9558b3 | ||
|
|
0fbfbc86d2 |
@@ -22,6 +22,7 @@ open-api/typescript-sdk/node_modules/
|
||||
server/coverage/
|
||||
server/node_modules/
|
||||
server/upload/
|
||||
server/src/queries
|
||||
server/dist/
|
||||
server/www/
|
||||
|
||||
@@ -29,3 +30,4 @@ web/node_modules/
|
||||
web/coverage/
|
||||
web/.svelte-kit
|
||||
web/build/
|
||||
web/.env
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
title: "[Feature] <feature-name-goes-here>"
|
||||
title: "[Feature] feature-name-goes-here"
|
||||
labels: ["feature"]
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please use this form to request new feature for Immich
|
||||
Please use this form to request new feature for Immich.
|
||||
Stick to only a single feature per request. If you list multiple different features at once,
|
||||
your request will be closed.
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
|
||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
custom: ['https://buy.immich.app']
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -83,7 +83,6 @@ body:
|
||||
2.
|
||||
3.
|
||||
...
|
||||
render: bash
|
||||
validations:
|
||||
required: true
|
||||
|
||||
|
||||
40
.github/release.yml
vendored
40
.github/release.yml
vendored
@@ -1,41 +1,29 @@
|
||||
changelog:
|
||||
categories:
|
||||
- title: ⚠️ Breaking Changes
|
||||
- title: 🚨 Breaking Changes
|
||||
labels:
|
||||
- breaking-change
|
||||
- changelog:breaking-change
|
||||
|
||||
- title: 🗄️ Server
|
||||
- title: 🔒 Security
|
||||
labels:
|
||||
- 🗄️server
|
||||
- changelog:security
|
||||
|
||||
- title: 📱 Mobile
|
||||
- title: 🚀 Features
|
||||
labels:
|
||||
- 📱mobile
|
||||
- changelog:feature
|
||||
|
||||
- title: 🖥️ Web
|
||||
- title: 🌟 Enhancements
|
||||
labels:
|
||||
- 🖥️web
|
||||
- changelog:enhancement
|
||||
|
||||
- title: 🧠 Machine Learning
|
||||
- title: 🐛 Bug fixes
|
||||
labels:
|
||||
- 🧠machine-learning
|
||||
- changelog:bugfix
|
||||
|
||||
- title: ⚡ CLI
|
||||
- title: 📚 Documentation
|
||||
labels:
|
||||
- cli
|
||||
- changelog:documentation
|
||||
|
||||
- title: 📓 Documentation
|
||||
- title: 🌐 Translations
|
||||
labels:
|
||||
- documentation
|
||||
|
||||
- title: 🔨 Maintenance
|
||||
labels:
|
||||
- deployment
|
||||
- dependencies
|
||||
- renovate
|
||||
- maintenance
|
||||
- tech-debt
|
||||
|
||||
- title: Other changes
|
||||
labels:
|
||||
- "*"
|
||||
- changelog:translation
|
||||
|
||||
20
.github/workflows/build-mobile.yml
vendored
20
.github/workflows/build-mobile.yml
vendored
@@ -16,10 +16,28 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build-sign-android:
|
||||
name: Build and sign Android
|
||||
needs: pre-job
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' }}
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
|
||||
6
.github/workflows/cli.yml
vendored
6
.github/workflows/cli.yml
vendored
@@ -56,10 +56,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.1.0
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.4.0
|
||||
uses: docker/setup-buildx-action@v3.6.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.3.0
|
||||
uses: docker/build-push-action@v6.7.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
4
.github/workflows/docker-cleanup.yml
vendored
4
.github/workflows/docker-cleanup.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.7.0
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.8.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.7.0
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.8.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
|
||||
168
.github/workflows/docker.yml
vendored
168
.github/workflows/docker.yml
vendored
@@ -17,56 +17,67 @@ permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build_and_push:
|
||||
name: Build and Push
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
server:
|
||||
- 'server/**'
|
||||
- 'openapi/**'
|
||||
- 'web/**'
|
||||
machine-learning:
|
||||
- 'machine-learning/**'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' || github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build_and_push_ml:
|
||||
name: Build and Push ML
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
strategy:
|
||||
# Prevent a failure in one image from stopping the other builds
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- platforms: linux/amd64,linux/arm64
|
||||
device: cpu
|
||||
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/amd64
|
||||
- platforms: linux/amd64
|
||||
device: cuda
|
||||
suffix: -cuda
|
||||
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/amd64
|
||||
- platforms: linux/amd64
|
||||
device: openvino
|
||||
suffix: -openvino
|
||||
|
||||
- image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
platforms: linux/arm64
|
||||
- platforms: linux/arm64
|
||||
device: armnn
|
||||
suffix: -armnn
|
||||
|
||||
- image: immich-server
|
||||
context: .
|
||||
file: server/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
device: cpu
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.1.0
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.4.0
|
||||
uses: docker/setup-buildx-action@v3.6.1
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
@@ -93,8 +104,8 @@ jobs:
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{matrix.image}}
|
||||
name=altran1502/${{matrix.image}},enable=${{ github.event_name == 'release' }}
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
|
||||
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
||||
@@ -111,18 +122,109 @@ jobs:
|
||||
# Essentially just ignore the cache output (PR can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ matrix.image }}" >> $GITHUB_OUTPUT
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.3.0
|
||||
uses: docker/build-push-action@v6.7.0
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
# Skip pushing when PR from a fork
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{matrix.image}}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
|
||||
build_and_push_server:
|
||||
name: Build and Push Server
|
||||
runs-on: ubuntu-latest
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
env:
|
||||
image: immich-server
|
||||
context: .
|
||||
file: server/Dockerfile
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platforms: linux/amd64,linux/arm64
|
||||
device: cpu
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.6.1
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
images: |
|
||||
name=ghcr.io/${{ github.repository_owner }}/${{env.image}}
|
||||
name=altran1502/${{env.image}},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr,suffix=${{ matrix.suffix }}
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag,suffix=${{ matrix.suffix }}
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
|
||||
|
||||
- name: Determine build cache output
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
# Essentially just ignore the cache output (PR can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,mode=max,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{ env.image }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.7.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
# Skip pushing when PR from a fork
|
||||
push: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
cache-from: type=registry,ref=ghcr.io/${{ github.repository_owner }}/immich-build-cache:${{env.image}}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
|
||||
23
.github/workflows/docs-build.yml
vendored
23
.github/workflows/docs-build.yml
vendored
@@ -2,12 +2,8 @@ name: Docs build
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -16,7 +12,26 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
docs:
|
||||
- 'docs/**'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
|
||||
37
.github/workflows/docs-deploy.yml
vendored
37
.github/workflows/docs-deploy.yml
vendored
@@ -10,10 +10,28 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
steps:
|
||||
- if: ${{ github.event.workflow_run.conclusion == 'failure' }}
|
||||
run: echo 'The triggering workflow failed' && exit 1
|
||||
|
||||
- if: ${{ github.event.workflow_run.conclusion != 'success' }}
|
||||
run: echo 'The triggering workflow did not succeed' && exit 1
|
||||
- name: Get artifact
|
||||
id: get-artifact
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "docs-build-output"
|
||||
})[0];
|
||||
if (!matchArtifact) {
|
||||
console.log("No artifact found with the name docs-build-output, build job was skipped")
|
||||
return { found: false };
|
||||
}
|
||||
return { found: true, id: matchArtifact.id };
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@v7
|
||||
@@ -75,7 +93,7 @@ jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
if: ${{ fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
@@ -98,18 +116,11 @@ jobs:
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "docs-build-output"
|
||||
})[0];
|
||||
let artifact = ${{ needs.checks.outputs.artifact }};
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
artifact_id: artifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
let fs = require('fs');
|
||||
|
||||
21
.github/workflows/pr-label-validation.yml
vendored
Normal file
21
.github/workflows/pr-label-validation.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: PR Label Validation
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, labeled, unlabeled, synchronize]
|
||||
|
||||
jobs:
|
||||
validate-release-label:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: read
|
||||
steps:
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@v5
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
use_regex: true
|
||||
labels: "changelog:.*"
|
||||
add_comment: true
|
||||
15
.github/workflows/prepare-release.yml
vendored
15
.github/workflows/prepare-release.yml
vendored
@@ -29,10 +29,17 @@ jobs:
|
||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.ORG_RELEASE_TOKEN }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry
|
||||
@@ -44,10 +51,8 @@ jobs:
|
||||
id: push-tag
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
author_name: Alex The Bot
|
||||
author_email: alex.tran1502@gmail.com
|
||||
default_author: user_info
|
||||
message: 'Version ${{ env.IMMICH_VERSION }}'
|
||||
default_author: github_actions
|
||||
message: 'chore: version ${{ env.IMMICH_VERSION }}'
|
||||
tag: ${{ env.IMMICH_VERSION }}
|
||||
push: true
|
||||
|
||||
|
||||
19
.github/workflows/static_analysis.yml
vendored
19
.github/workflows/static_analysis.yml
vendored
@@ -10,8 +10,27 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'release' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mobile-dart-analyze:
|
||||
name: Run Dart Code Analysis
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
135
.github/workflows/test.yml
vendored
135
.github/workflows/test.yml
vendored
@@ -10,8 +10,47 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_e2e: ${{ steps.found_paths.outputs.e2e == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_mobile: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
web:
|
||||
- 'web/**'
|
||||
- 'open-api/typescript-sdk/**'
|
||||
server:
|
||||
- 'server/**'
|
||||
cli:
|
||||
- 'cli/**'
|
||||
- 'open-api/typescript-sdk/**'
|
||||
e2e:
|
||||
- 'e2e/**'
|
||||
mobile:
|
||||
- 'mobile/**'
|
||||
machine-learning:
|
||||
- 'machine-learning/**'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
server-unit-tests:
|
||||
name: Server
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -47,6 +86,8 @@ jobs:
|
||||
|
||||
cli-unit-tests:
|
||||
name: CLI
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -86,6 +127,8 @@ jobs:
|
||||
|
||||
cli-unit-tests-win:
|
||||
name: CLI (Windows)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -118,6 +161,8 @@ jobs:
|
||||
|
||||
web-unit-tests:
|
||||
name: Web
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -159,13 +204,54 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests:
|
||||
name: End-to-End Tests
|
||||
e2e-tests-lint:
|
||||
name: End-to-End Lint
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-server-cli:
|
||||
name: End-to-End Tests (Server & CLI)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||
runs-on: mich
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
@@ -191,16 +277,41 @@ jobs:
|
||||
run: npm ci
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
- name: Docker build
|
||||
run: docker compose build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
- name: Run e2e tests (api & cli)
|
||||
run: npm run test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
e2e-tests-web:
|
||||
name: End-to-End Tests (Web)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||
runs-on: mich
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
@@ -211,16 +322,14 @@ jobs:
|
||||
run: docker compose build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run e2e tests (api & cli)
|
||||
run: npm run test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run e2e tests (web)
|
||||
run: npx playwright test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
mobile-unit-tests:
|
||||
name: Mobile
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -235,6 +344,8 @@ jobs:
|
||||
|
||||
ml-unit-tests:
|
||||
name: Machine Learning
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
|
||||
2
.gitmodules
vendored
2
.gitmodules
vendored
@@ -1,6 +1,6 @@
|
||||
[submodule "mobile/.isar"]
|
||||
path = mobile/.isar
|
||||
url = https://github.com/isar/isar
|
||||
[submodule "server/test/assets"]
|
||||
[submodule "e2e/test-assets"]
|
||||
path = e2e/test-assets
|
||||
url = https://github.com/immich-app/test-assets
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
/dist
|
||||
@@ -1,28 +0,0 @@
|
||||
module.exports = {
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
project: 'tsconfig.json',
|
||||
sourceType: 'module',
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
plugins: ['@typescript-eslint/eslint-plugin'],
|
||||
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended', 'plugin:unicorn/recommended'],
|
||||
root: true,
|
||||
env: {
|
||||
node: true,
|
||||
},
|
||||
ignorePatterns: ['.eslintrc.js'],
|
||||
rules: {
|
||||
'@typescript-eslint/interface-name-prefix': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-process-exit': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
},
|
||||
};
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.17.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as core
|
||||
FROM node:20.17.0-alpine3.20@sha256:1a526b97cace6b4006256570efa1a29cd1fe4b96a5301f8d48e87c5139438a45 AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
@@ -4,8 +4,18 @@ Please see the [Immich CLI documentation](https://immich.app/docs/features/comma
|
||||
|
||||
# For developers
|
||||
|
||||
Before building the CLI, you must build the immich server and the open-api client. To build the server run the following in the server folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
|
||||
Then, to build the open-api client run the following in the open-api folder:
|
||||
|
||||
$ ./bin/generate-open-api.sh
|
||||
|
||||
To run the Immich CLI from source, run the following in the cli folder:
|
||||
|
||||
$ npm install
|
||||
$ npm run build
|
||||
$ ts-node .
|
||||
|
||||
@@ -17,3 +27,4 @@ You can also build and install the CLI using
|
||||
|
||||
$ npm run build
|
||||
$ npm install -g .
|
||||
****
|
||||
|
||||
61
cli/eslint.config.mjs
Normal file
61
cli/eslint.config.mjs
Normal file
@@ -0,0 +1,61 @@
|
||||
import { FlatCompat } from '@eslint/eslintrc';
|
||||
import js from '@eslint/js';
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
import globals from 'globals';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default [
|
||||
{
|
||||
ignores: ['eslint.config.mjs', 'dist'],
|
||||
},
|
||||
...compat.extends(
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:prettier/recommended',
|
||||
'plugin:unicorn/recommended',
|
||||
),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 5,
|
||||
sourceType: 'module',
|
||||
|
||||
parserOptions: {
|
||||
project: 'tsconfig.json',
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
},
|
||||
|
||||
rules: {
|
||||
'@typescript-eslint/interface-name-prefix': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-process-exit': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
'object-shorthand': ['error', 'always'],
|
||||
},
|
||||
},
|
||||
];
|
||||
1820
cli/package-lock.json
generated
1820
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.8",
|
||||
"version": "2.2.17",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -13,30 +13,33 @@
|
||||
"cli"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@types/byte-size": "^8.1.0",
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.14.10",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
"byte-size": "^8.1.1",
|
||||
"@types/node": "^20.16.2",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
"commander": "^12.0.0",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint": "^9.0.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"eslint-plugin-unicorn": "^55.0.0",
|
||||
"globals": "^15.9.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.0.12",
|
||||
"vite-tsconfig-paths": "^4.3.2",
|
||||
"vitest": "^1.2.2",
|
||||
"vitest-fetch-mock": "^0.2.2",
|
||||
"vite-tsconfig-paths": "^5.0.0",
|
||||
"vitest": "^2.0.5",
|
||||
"vitest-fetch-mock": "^0.3.0",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
@@ -64,6 +67,6 @@
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.17.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.36.0"
|
||||
constraints = "4.36.0"
|
||||
version = "4.40.0"
|
||||
constraints = "4.40.0"
|
||||
hashes = [
|
||||
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
|
||||
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
|
||||
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
|
||||
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
|
||||
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
|
||||
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
|
||||
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
|
||||
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
|
||||
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
|
||||
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
|
||||
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
|
||||
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
|
||||
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
|
||||
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
|
||||
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
|
||||
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
|
||||
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
|
||||
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
|
||||
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
|
||||
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
|
||||
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
|
||||
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
|
||||
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
|
||||
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
|
||||
"h1:GP2N1tXrmpxu+qEDvFAmkfv9aeZNhag3bchyJpGpYbU=",
|
||||
"h1:HDJKZBQkVU0kQl4gViQ5L7EcFLn9hB0iuvO+ORJiDS4=",
|
||||
"h1:KrbeEsZoCJOnnX68yNI5h3QhMjc5bBCQW4yvYaEFq3s=",
|
||||
"h1:LelwnzU0OVn6g2+T9Ub9XdpC+vbheraIL/qgXhWBs/k=",
|
||||
"h1:TIq9CynfWrKgCxKL97Akj89cYlvJKn/AL4UXogd8/FM=",
|
||||
"h1:Uoy5oPdm1ipDG7yIMCUN1IXMpsTGXahPw3I0rVA/6wA=",
|
||||
"h1:Wunfpm+IZhENdoimrh4iXiakVnCsfKOHo80yJUjMQXM=",
|
||||
"h1:cRdCuahMOFrNyldnCInqGQRBT1DTkRPSfPnaf5r05iw=",
|
||||
"h1:k+zpXg8BO7gdbTIfSGyQisHhs5aVWQVbPLa5uUdr2UA=",
|
||||
"h1:kWNrzZ8Rh0OpHikexkmwJIIucD6SMZPi4oGyDsKJitw=",
|
||||
"h1:lomfTTjK78BdSEVTFcJUBQRy7IQHuGQImMaPWaYpfgQ=",
|
||||
"h1:oWcWlZe52ZRyLQciNe94RaWzhHifSTu03nlK0uL7rlM=",
|
||||
"h1:p3JJrhGEPlPQP7Uwy9FNMdvqCyD8tuT4lnXuJ+pSF/M=",
|
||||
"h1:wtB0sKxG2K/H41hWJI4uJdImWquuaP34Sip5LmfE410=",
|
||||
"zh:01742e5946f936548f8e42120287ffc757abf97e7cbbe34e25c266a438fb54fd",
|
||||
"zh:08d81f5a5aab4cc269f983b8c6b5be0e278105136aca9681740802619577371f",
|
||||
"zh:0d75131ba70902cfc94a7a5900369bdde56528b2aad6e10b164449cc97d57396",
|
||||
"zh:3890a715a012e197541daacdacb8cceec6d364814daa4640ddfe98a8ba9036cb",
|
||||
"zh:58254ce5ebe1faed4664df86210c39d660bcdc60280f17b25fe4d4dbea21ea8c",
|
||||
"zh:6b0abc1adbc2edee79368ce9f7338ebcb5d0bf941e8d7d9ac505b750f20f80a2",
|
||||
"zh:81cc415d1477174a1ca288d25fdb57e5ee488c2d7f61f265ef995b255a53b0ce",
|
||||
"zh:8680140c7fe5beaefe61c5cfa471bf88422dc0c0f05dad6d3cb482d4ffd22be4",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
|
||||
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
|
||||
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
|
||||
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
|
||||
"zh:a491d26236122ccb83dac8cb490d2c0aa1f4d3a0b4abe99300fd49b1a624f42f",
|
||||
"zh:a70d9c469dc8d55715ba77c9d1a4ede1fdebf79e60ee18438a0844868db54e0d",
|
||||
"zh:a7fcb7d5c4222e14ec6d9a15adf8b9a083d84b102c3d0e4a0d102df5a1360b62",
|
||||
"zh:b4f9677174fabd199c8ebd2e9e5eb3528cf887e700569a4fb61eef4e070cec5e",
|
||||
"zh:c27f0f7519221d75dae4a3787a59e05acd5cc9a0d30a390eff349a77d20d52e6",
|
||||
"zh:db00d8605dbf43ca42fe1481a6c67fdcaa73debb7d2a0f613cb95ae5c5e7150e",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.36.0"
|
||||
version = "4.40.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,6 @@ resource "cloudflare_record" "immich_app_release_domain" {
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
value = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
|
||||
content = data.terraform_remote_state.cloudflare_immich_app_docs.outputs.immich_app_branch_pages_hostname
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
|
||||
@@ -2,37 +2,37 @@
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.opentofu.org/cloudflare/cloudflare" {
|
||||
version = "4.36.0"
|
||||
constraints = "4.36.0"
|
||||
version = "4.40.0"
|
||||
constraints = "4.40.0"
|
||||
hashes = [
|
||||
"h1:00/Y+l17VV4RquGSfwDnYsGYzyf2ZmdQwUgeIzXC7eg=",
|
||||
"h1:489GpKItA/VRIUA5S4+F8MsnurGVciRvUFyIV81MJTU=",
|
||||
"h1:7cnczyKGj3+gvaJ0r5JIVWLXPbQfkHYejac76MJx+I8=",
|
||||
"h1:8rmr1PjJc14Xmor2eEvo5/WBojylt1eYdx6VbSU3Ulo=",
|
||||
"h1:HjgphNjtgny5tkcUAQoGgBdcuQ+0IyhL8yLsiBqWAP0=",
|
||||
"h1:LH3umxdBnJcAyeVoBLVn+PC0F0CzN6v9UN6lb6CqQPE=",
|
||||
"h1:Xx6WUD/zB8fM9SjkFx06Fgx2K7aGJIVvsJS2pwqALEM=",
|
||||
"h1:YizL5YN9zQ8YkSR6V/G201YrCVdnkF9EUIK4lpROWiA=",
|
||||
"h1:aPcXVGjYcCJdqvWSzc/dEjwj05LnbWZje8IanygVjcI=",
|
||||
"h1:eKCvfashdCqfDcFGXE2gq+XxAURD5SzuaQ9Brs3zLos=",
|
||||
"h1:gpKcBYkBcfn/uF1A8W7MD/OysMZW7EU4QVYvPEEnxGc=",
|
||||
"h1:kCkcxZZnkKAnMz9scUQHb19d9/l9FPOHovAyrvtA618=",
|
||||
"h1:t8mXXnICTeKqoD29uvyLFHVWMfMzTUrJuHje8lpI0zU=",
|
||||
"h1:zjzavjIdLDGRYsWd3v0HJz6ul12Cewj9RW/cqAQ4DxI=",
|
||||
"zh:02665712b3893307596b3caab99cf1f2502d5caca18e22d4b37bb535e628e102",
|
||||
"zh:1514b0d3ef62934484ac471113ee68cddec0c21e56b4f710922741fe9b6e6fdf",
|
||||
"zh:1fab4dfcecbcea13267b42e5ff05ba0692aa2dcb247b8e633fea0daf49feb156",
|
||||
"zh:24d8367295fe1f1b2be37802aecb96edf32f743364663ffe781d1bb92438395d",
|
||||
"zh:34e84e7940c99dcf65663cfd25afac22bf5c8a5ff2cd21900c67180d3a072be9",
|
||||
"zh:3d71d63204a329acf1d1de8638f2c725243cb94cf444d2d7acde54b3d1ac1696",
|
||||
"zh:57831ba88e779a762bcfa224ba9eac8bc22ef9cd70cd541d848b351e0ba6a75c",
|
||||
"zh:6407560f2e548afcb4852c91efc664627a9ee565c31a9c81fc9ea1806fca0567",
|
||||
"zh:738ddbc664d75f4859aa09444a27809bc398795a8ea8f5be8531040690287712",
|
||||
"zh:841ca2b2d78b6f8d33ec3435bc090c5e04a3a7d85c80df11227a7ea00d36f6b1",
|
||||
"h1:GP2N1tXrmpxu+qEDvFAmkfv9aeZNhag3bchyJpGpYbU=",
|
||||
"h1:HDJKZBQkVU0kQl4gViQ5L7EcFLn9hB0iuvO+ORJiDS4=",
|
||||
"h1:KrbeEsZoCJOnnX68yNI5h3QhMjc5bBCQW4yvYaEFq3s=",
|
||||
"h1:LelwnzU0OVn6g2+T9Ub9XdpC+vbheraIL/qgXhWBs/k=",
|
||||
"h1:TIq9CynfWrKgCxKL97Akj89cYlvJKn/AL4UXogd8/FM=",
|
||||
"h1:Uoy5oPdm1ipDG7yIMCUN1IXMpsTGXahPw3I0rVA/6wA=",
|
||||
"h1:Wunfpm+IZhENdoimrh4iXiakVnCsfKOHo80yJUjMQXM=",
|
||||
"h1:cRdCuahMOFrNyldnCInqGQRBT1DTkRPSfPnaf5r05iw=",
|
||||
"h1:k+zpXg8BO7gdbTIfSGyQisHhs5aVWQVbPLa5uUdr2UA=",
|
||||
"h1:kWNrzZ8Rh0OpHikexkmwJIIucD6SMZPi4oGyDsKJitw=",
|
||||
"h1:lomfTTjK78BdSEVTFcJUBQRy7IQHuGQImMaPWaYpfgQ=",
|
||||
"h1:oWcWlZe52ZRyLQciNe94RaWzhHifSTu03nlK0uL7rlM=",
|
||||
"h1:p3JJrhGEPlPQP7Uwy9FNMdvqCyD8tuT4lnXuJ+pSF/M=",
|
||||
"h1:wtB0sKxG2K/H41hWJI4uJdImWquuaP34Sip5LmfE410=",
|
||||
"zh:01742e5946f936548f8e42120287ffc757abf97e7cbbe34e25c266a438fb54fd",
|
||||
"zh:08d81f5a5aab4cc269f983b8c6b5be0e278105136aca9681740802619577371f",
|
||||
"zh:0d75131ba70902cfc94a7a5900369bdde56528b2aad6e10b164449cc97d57396",
|
||||
"zh:3890a715a012e197541daacdacb8cceec6d364814daa4640ddfe98a8ba9036cb",
|
||||
"zh:58254ce5ebe1faed4664df86210c39d660bcdc60280f17b25fe4d4dbea21ea8c",
|
||||
"zh:6b0abc1adbc2edee79368ce9f7338ebcb5d0bf941e8d7d9ac505b750f20f80a2",
|
||||
"zh:81cc415d1477174a1ca288d25fdb57e5ee488c2d7f61f265ef995b255a53b0ce",
|
||||
"zh:8680140c7fe5beaefe61c5cfa471bf88422dc0c0f05dad6d3cb482d4ffd22be4",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:8b3d3d63354032ab9b2403c50728e9aa4e83c7367eaad2d18794221addeafc0f",
|
||||
"zh:9e293443fe3127e488f540229983c1b9688268185f87567bb3d18e794697acd2",
|
||||
"zh:b3a22439156e46461213db183e2e89569cd2e8d7cbcfc4b9f90469090e105807",
|
||||
"zh:f430feb5d51891e84028459e57039045dea4f1f5fcf671161d8ac2d8f28763f3",
|
||||
"zh:a491d26236122ccb83dac8cb490d2c0aa1f4d3a0b4abe99300fd49b1a624f42f",
|
||||
"zh:a70d9c469dc8d55715ba77c9d1a4ede1fdebf79e60ee18438a0844868db54e0d",
|
||||
"zh:a7fcb7d5c4222e14ec6d9a15adf8b9a083d84b102c3d0e4a0d102df5a1360b62",
|
||||
"zh:b4f9677174fabd199c8ebd2e9e5eb3528cf887e700569a4fb61eef4e070cec5e",
|
||||
"zh:c27f0f7519221d75dae4a3787a59e05acd5cc9a0d30a390eff349a77d20d52e6",
|
||||
"zh:db00d8605dbf43ca42fe1481a6c67fdcaa73debb7d2a0f613cb95ae5c5e7150e",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ terraform {
|
||||
required_providers {
|
||||
cloudflare = {
|
||||
source = "cloudflare/cloudflare"
|
||||
version = "4.36.0"
|
||||
version = "4.40.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ resource "cloudflare_record" "immich_app_branch_subdomain" {
|
||||
proxied = true
|
||||
ttl = 1
|
||||
type = "CNAME"
|
||||
value = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
|
||||
content = "${replace(var.prefix_name, "/\\/|\\./", "-")}.${local.is_release ? data.terraform_remote_state.cloudflare_account.outputs.immich_app_archive_pages_project_subdomain : data.terraform_remote_state.cloudflare_account.outputs.immich_app_preview_pages_project_subdomain}"
|
||||
zone_id = data.terraform_remote_state.cloudflare_account.outputs.immich_app_zone_id
|
||||
}
|
||||
|
||||
|
||||
@@ -46,6 +46,8 @@ services:
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
@@ -91,10 +93,12 @@ services:
|
||||
depends_on:
|
||||
- database
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
image: redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
|
||||
@@ -21,6 +21,8 @@ services:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -33,15 +35,19 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
image: redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -65,7 +71,7 @@ services:
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
command: ["postgres", "-c", "shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
restart: always
|
||||
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
@@ -73,7 +79,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:f20d3127bf2876f4a1df76246fca576b41ddf1125ed1c546fbd8b16ea55117e6
|
||||
image: prom/prometheus@sha256:f6639335d34a77d9d9db382b92eeb7fc00934be8eae81dbc03b31cfe90411a94
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -85,7 +91,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.1.0-ubuntu@sha256:c7fc29ec783d5e7fc1bdfaad6f92345a345cffbc5d21c388ca228175006fc107
|
||||
image: grafana/grafana:11.2.0-ubuntu@sha256:8e2c13739563c3da9d45de96c6bcb63ba617cac8c571c060112c7fc8ad6914e9
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ services:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the media storage location on your system, edit the value of UPLOAD_LOCATION in the .env file
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
@@ -26,6 +27,8 @@ services:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -40,10 +43,12 @@ services:
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
image: docker.io/redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -57,13 +62,14 @@ services:
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
command: ["postgres", "-c", "shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -12,6 +12,7 @@ DB_DATA_LOCATION=./postgres
|
||||
IMMICH_VERSION=release
|
||||
|
||||
# Connection secret for postgres. You should change it to a random password
|
||||
# Please use only the characters `A-Za-z0-9`, without special characters or spaces
|
||||
DB_PASSWORD=postgres
|
||||
|
||||
# The values below this line do not need to be changed
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.17.0
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
title: The Immich core team goes full-time
|
||||
authors: [alextran]
|
||||
tags: [update, announcement, futo]
|
||||
tags: [update, announcement, FUTO]
|
||||
date: 2024-05-01T00:00
|
||||
---
|
||||
|
||||
|
||||
91
docs/blog/2024/immich-licensing.mdx
Normal file
91
docs/blog/2024/immich-licensing.mdx
Normal file
@@ -0,0 +1,91 @@
|
||||
---
|
||||
title: Licensing announcement - Purchase a license to support Immich
|
||||
authors: [alextran]
|
||||
tags: [update, announcement, FUTO]
|
||||
date: 2024-07-18T00:00
|
||||
---
|
||||
|
||||
Hello everybody,
|
||||
|
||||
Firstly, on behalf of the Immich team, I'd like to thank everybody for your continuous support of Immich since the very first day! Your contributions, encouragement, and community engagement have helped bring Immich to its current state. The team and I are forever grateful for that.
|
||||
|
||||
Since our [last announcement of the core team joining FUTO to work on Immich full-time](https://immich.app/blog/2024/immich-core-team-goes-fulltime), one of the goals of our new position is to foster a healthy relationship between the developers and the users. We believe that this enables us to create great software, establish transparent policies and build trust.
|
||||
|
||||
We want to build a great software application that brings value to you and your loved ones' lives. We are not using you as a product, i.e., selling or tracking your data. We are not putting annoying ads into our software. We respect your privacy. We want to be compensated for the hard work we put in to build Immich for you.
|
||||
|
||||
With those notes, we have enabled a way for you to financially support the continued development of Immich, ensuring the software can move forward and will be maintained, by offering a lifetime license of the software. We think if you like and use software, you should pay for it, but _we're never going to force anyone to pay or try to limit Immich for those who don't._
|
||||
|
||||
There are two types of license that you can choose to purchase: **Server License** and **Individual License**.
|
||||
|
||||
### Server License
|
||||
|
||||
This is a lifetime license costing **$99.99**. The license is applied to the whole server. You and all users that use your server are licensed.
|
||||
|
||||
### Individual License
|
||||
|
||||
This is a lifetime license costing **$24.99**. The license is applied to a single user, and can be used on any server they choose to connect to.
|
||||
|
||||
<img
|
||||
width="837"
|
||||
alt="license-social-gh"
|
||||
src="https://github.com/user-attachments/assets/241932ed-ef3b-44ec-a9e2-ee80754e0cca"
|
||||
/>
|
||||
|
||||
You can purchase the license on [our page - https://buy.immich.app](https://buy.immich.app).
|
||||
|
||||
Starting with release `v1.109.0` you can purchase and enter your purchased license key directly in the app.
|
||||
|
||||
<img
|
||||
width="1414"
|
||||
alt="license-page-gh"
|
||||
src="https://github.com/user-attachments/assets/364fc32a-f6ef-4594-9fea-28d5a26ad77c"
|
||||
/>
|
||||
|
||||
## Thank you
|
||||
|
||||
Thank you again for your support, this will help create a strong foundation and stability for the Immich team to continue developing and maintaining the project that you love to use.
|
||||
|
||||
<p align="center">
|
||||
<img
|
||||
src="https://media.giphy.com/media/v1.Y2lkPTc5MGI3NjExbjY2eWc5Y2F0ZW56MmR4aWE0dDhzZXlidXRmYWZyajl1bWZidXZpcyZlcD12MV9pbnRlcm5hbF9naWZfYnlfaWQmY3Q9Zw/87CKDqErVfMqY/giphy.gif"
|
||||
width="550"
|
||||
title="SUPPORT THE PROJECT!"
|
||||
/>
|
||||
</p>
|
||||
|
||||
<br />
|
||||
<br />
|
||||
|
||||
Cheers! 🎉
|
||||
|
||||
Immich team
|
||||
|
||||
# FAQ
|
||||
|
||||
### 1. Where can I purchase a license?
|
||||
|
||||
There are several places where you can purchase the license from
|
||||
|
||||
- [https://buy.immich.app](https://buy.immich.app)
|
||||
- [https://pay.futo.org](https://pay.futo.org/)
|
||||
- or directly from the app.
|
||||
|
||||
### 2. Do I need both _Individual License_ and _Server License_?
|
||||
|
||||
No,
|
||||
|
||||
If you are the admin and the sole user, or your instance has less than a total of 4 users, you can buy the **Individual License** for each user.
|
||||
|
||||
If your instance has more than 4 users, it is more cost-effective to buy the **Server License**, which will license all the users on your instance.
|
||||
|
||||
### 3. What do I do if I don't pay?
|
||||
|
||||
You can continue using Immich without any restriction.
|
||||
|
||||
### 4. Will there be any paywalled features?
|
||||
|
||||
No, there will never be any paywalled features.
|
||||
|
||||
### 5. Where can I get support regarding payment issues?
|
||||
|
||||
You can email us with your `orderId` and your email address `billing@futo.org` or on our Discord server.
|
||||
@@ -1,6 +1,7 @@
|
||||
---
|
||||
title: Immich Update - July 2024
|
||||
authors: [alextran]
|
||||
date: 2024-07-01T00:00
|
||||
tags: [update, v1.106.0]
|
||||
---
|
||||
|
||||
|
||||
@@ -52,14 +52,25 @@ On iOS (iPhone and iPad), the operating system determines if a particular app ca
|
||||
- Disable Background App Refresh for apps that don't need background tasks to run. This will reduce the competition for background task invocation for Immich.
|
||||
- Use the Immich app more often.
|
||||
|
||||
### Why are features not working with a self-signed cert or mTLS?
|
||||
|
||||
Due to limitations in the upstream app/video library, using a self-signed TLS certificate or mutual TLS may break video playback or asset upload (both foreground and/or background).
|
||||
We recommend using a real SSL certificate from a free provider, for example [Let's Encrypt](https://letsencrypt.org/).
|
||||
|
||||
---
|
||||
|
||||
## Assets
|
||||
|
||||
### Does Immich change the file?
|
||||
|
||||
No, Immich does not touch the original file under any circumstances,
|
||||
all edited metadata are saved in the companion sidecar file and the database.
|
||||
No, Immich does not modify the original files.
|
||||
All edited metadata is saved in companion `.xmp` sidecar files and the database.
|
||||
However, Immich will delete original files that have been trashed when the trash is emptied in the Immich UI.
|
||||
|
||||
### Why do my file names appear as a random string in the file manager?
|
||||
|
||||
When Storage Template is off (default) Immich saves the file names in a random string (also known as random UUIDs) to prevent duplicate file names. To retrieve the original file names, you must enable the Storage Template and then run the STORAGE TEMPLATE MIGRATION job.
|
||||
It is recommended to read about [Storage Template](https://immich.app/docs/administration/storage-template) before activation.
|
||||
|
||||
### Can I add my existing photo library?
|
||||
|
||||
@@ -157,6 +168,19 @@ We haven't implemented an official mechanism for creating albums from external l
|
||||
|
||||
Duplicate checking only exists for upload libraries, using the file hash. Furthermore, duplicate checking is not global, but _per library_. Therefore, a situation where the same file appears twice in the timeline is possible, especially for external libraries.
|
||||
|
||||
### Why are my edits to files not being saved in read-only external libraries?
|
||||
|
||||
Images in read-write external libraries (the default) can be edited as normal.
|
||||
In read-only libraries (`:ro` in the `docker-compose.yml`), Immich is unable to create the `.xmp` sidecar files to store edited file metadata.
|
||||
For this reason, the metadata (timestamp, location, description, star rating, etc.) cannot be edited for files in read-only external libraries.
|
||||
|
||||
### How are deletions of files handled in external libraries?
|
||||
|
||||
Immich will attempt to delete original files that have been trashed when the trash is emptied.
|
||||
In read-write external libraries (the default), Immich will delete the original file.
|
||||
In read-only libraries (`:ro` in the `docker-compose.yml`), files can still be trashed in the UI.
|
||||
However, when the trash is emptied, the files will re-appear in the main timeline since Immich is unable to delete the original file.
|
||||
|
||||
---
|
||||
|
||||
## Machine Learning
|
||||
@@ -167,7 +191,7 @@ Immich uses CLIP models. For more information about CLIP and its capabilities, r
|
||||
|
||||
### How does facial recognition work?
|
||||
|
||||
For face detection and recognition, Immich uses [InsightFace models](https://github.com/deepinsight/insightface/tree/master/model_zoo).
|
||||
See [How Facial Recognition Works](/docs/features/facial-recognition#How-Facial-Recognition-Works) for details.
|
||||
|
||||
### How can I disable machine learning?
|
||||
|
||||
@@ -181,19 +205,15 @@ However, disabling all jobs will not disable the machine learning service itself
|
||||
|
||||
### I'm getting errors about models being corrupt or failing to download. What do I do?
|
||||
|
||||
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Huggingface][huggingface] and place them in the cache folder.
|
||||
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Hugging Face][huggingface] and place them in the cache folder.
|
||||
|
||||
### Can I use a custom CLIP model?
|
||||
|
||||
No, this is not supported. Only models listed in the [Huggingface][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
|
||||
No, this is not supported. Only models listed in the [Hugging Face][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
|
||||
|
||||
### I want to be able to search in other languages besides English. How can I do that?
|
||||
|
||||
You can change to a multilingual model listed [here](https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7) by going to Administration > Machine Learning Settings > Smart Search and replacing the name of the model. Be sure to re-run Smart Search on all assets after this change. You can then search in over 100 languages.
|
||||
|
||||
:::note
|
||||
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list][huggingface].
|
||||
:::
|
||||
You can change to a multilingual CLIP model. See [here](/docs/features/smart-search#CLIP-model) for instructions.
|
||||
|
||||
### Does Immich support Facial Recognition for videos?
|
||||
|
||||
@@ -234,7 +254,7 @@ ls clip/ facial-recognition/
|
||||
|
||||
### Why is Immich slow on low-memory systems like the Raspberry Pi?
|
||||
|
||||
Immich optionally uses machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#can-i-lower-cpu-and-ram-usage) this or host Immich's machine-learning container on a [more powerful system](/docs/guides/remote-machine-learning), or [disable](/docs/FAQ#how-can-i-disable-machine-learning) machine learning entirely.
|
||||
Immich optionally uses transcoding and machine learning for several features. However, it can be too heavy to run on a Raspberry Pi. You can [mitigate](/docs/FAQ#can-i-lower-cpu-and-ram-usage) this or host Immich's machine-learning container on a [more powerful system](/docs/guides/remote-machine-learning), or [disable](/docs/FAQ#how-can-i-disable-machine-learning) machine learning entirely.
|
||||
|
||||
### Can I lower CPU and RAM usage?
|
||||
|
||||
@@ -243,10 +263,12 @@ The initial backup is the most intensive due to the number of jobs running. The
|
||||
- Lower the job concurrency for these jobs to 1.
|
||||
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
|
||||
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
|
||||
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
|
||||
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
|
||||
- At the container level, you can [set resource constraints](/docs/FAQ#can-i-limit-cpu-and-ram-usage) to lower usage further.
|
||||
- It's recommended to only apply these constraints _after_ taking some of the measures here for best performance.
|
||||
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
|
||||
|
||||
### Can I limit the amount of CPU and RAM usage?
|
||||
### Can I limit CPU and RAM usage?
|
||||
|
||||
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows. To limit this, you can add the following to the `docker-compose.yml` block of any containers that you want to have limited resources.
|
||||
|
||||
@@ -266,6 +288,8 @@ deploy:
|
||||
</details>
|
||||
For more details, you can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit).
|
||||
|
||||
Note that memory constraints work by terminating the container, so this can introduce instability if set too low.
|
||||
|
||||
### How can I boost machine learning speed?
|
||||
|
||||
:::note
|
||||
@@ -275,21 +299,16 @@ This advice improves throughput, not latency. This is to say that it will make S
|
||||
You can increase throughput by increasing the job concurrency for machine learning jobs (Smart Search, Face Detection). With higher concurrency, the host will work on more assets in parallel. You can do this by navigating to Administration > Settings > Job Settings and increasing concurrency as needed.
|
||||
|
||||
:::danger
|
||||
On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Beyond this, note that storage speed and latency may quickly become the limiting factor; particularly when using HDDs.
|
||||
On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Storage speed and latency can quickly become the limiting factor beyond this, particularly when using HDDs.
|
||||
|
||||
Do not exaggerate with the amount of jobs because you're probably thoroughly overloading the server.
|
||||
The concurrency can be increased more comfortably with a GPU, but should still not be above 16 in most cases.
|
||||
|
||||
More details can be found [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
|
||||
Do not exaggerate with the job concurrency because you're probably thoroughly overloading the server.
|
||||
:::
|
||||
|
||||
### Why is Immich using so much of my CPU?
|
||||
### My server shows Server Status Offline | Version Unknown. What can I do?
|
||||
|
||||
When a large number of assets are uploaded to Immich, it makes sense that the CPU and RAM will be heavily used for machine learning work and creating image thumbnails.
|
||||
Once this process is completed, the percentage of CPU usage will drop to around 3-5% usage
|
||||
|
||||
### My server shows Server Status Offline | Version Unknown what can I do?
|
||||
|
||||
You need to enable Websocket on your reverse proxy.
|
||||
You need to enable WebSockets on your reverse proxy.
|
||||
|
||||
---
|
||||
|
||||
@@ -299,6 +318,12 @@ You need to enable Websocket on your reverse proxy.
|
||||
|
||||
Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md).
|
||||
|
||||
### How can I reduce the log verbosity of Redis?
|
||||
|
||||
To decrease Redis logs, you can add the following line to the `redis:` section of the `docker-compose.yml`:
|
||||
|
||||
` command: redis-server --loglevel warning`
|
||||
|
||||
### How can I run Immich as a non-root user?
|
||||
|
||||
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
||||
|
||||
@@ -45,7 +45,7 @@ docker compose up -d # Start remainder of Immich apps
|
||||
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||
|
||||
```powershell title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "\path\to\backup\dump.sql"
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | Set-Content -Encoding utf8 "C:\path\to\backup\dump.sql"
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
@@ -149,9 +149,21 @@ for more info read the [release notes](https://github.com/immich-app/immich/rele
|
||||
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
|
||||
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
|
||||
- **Encoded Assets:**
|
||||
|
||||
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `UPLOAD_LOCATION/postgres`.
|
||||
|
||||
:::danger
|
||||
A backup of this folder does not constitute a backup of your database!
|
||||
Follow the instructions listed [here](/docs/administration/backup-and-restore#database) to learn how to perform a proper backup.
|
||||
:::
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Storage Template On" label="Storage Template On">
|
||||
|
||||
@@ -187,8 +199,19 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
|
||||
- Files uploaded through mobile apps.
|
||||
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
|
||||
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
|
||||
- **Postgres**
|
||||
|
||||
- The Immich database containing all the information to allow the system to function properly.
|
||||
**Note:** This folder will only appear to users who have made the changes mentioned in [v1.102.0](https://github.com/immich-app/immich/discussions/8930) (an optional, non-mandatory change) or who started with this version.
|
||||
- Stored in `UPLOAD_LOCATION/postgres`.
|
||||
|
||||
:::danger
|
||||
A backup of this folder does not constitute a backup of your database!
|
||||
Follow the instructions listed [here](/docs/administration/backup-and-restore#database) to learn how to perform a proper backup.
|
||||
:::
|
||||
|
||||
</TabItem>
|
||||
|
||||
</Tabs>
|
||||
|
||||
:::danger
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.1 MiB |
BIN
docs/docs/administration/img/admin-jobs.webp
Normal file
BIN
docs/docs/administration/img/admin-jobs.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 79 KiB |
@@ -52,4 +52,4 @@ Additionally, some jobs run on a schedule, which is every night at midnight. Thi
|
||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||
:::
|
||||
|
||||
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||
<img src={require('./img/admin-jobs.webp').default} width="60%" title="Admin jobs" />
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
This page contains details about using OAuth in Immich.
|
||||
|
||||
:::tip
|
||||
Unable to set `app.immich:/` as a valid redirect URI? See [Mobile Redirect URI](#mobile-redirect-uri) for an alternative solution.
|
||||
Unable to set `app.immich:///oauth-callback` as a valid redirect URI? See [Mobile Redirect URI](#mobile-redirect-uri) for an alternative solution.
|
||||
:::
|
||||
|
||||
## Overview
|
||||
@@ -30,7 +30,7 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
|
||||
The **Sign-in redirect URIs** should include:
|
||||
|
||||
- `app.immich:/` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
|
||||
- `app.immich:///oauth-callback` - for logging in with OAuth from the [Mobile App](/docs/features/mobile-app.mdx)
|
||||
- `http://DOMAIN:PORT/auth/login` - for logging in with OAuth from the Web Client
|
||||
- `http://DOMAIN:PORT/user-settings` - for manually linking OAuth in the Web Client
|
||||
|
||||
@@ -38,7 +38,7 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
|
||||
Mobile
|
||||
|
||||
- `app.immich:/` (You **MUST** include this for iOS and Android mobile apps to work properly)
|
||||
- `app.immich:///oauth-callback` (You **MUST** include this for iOS and Android mobile apps to work properly)
|
||||
|
||||
Localhost
|
||||
|
||||
@@ -96,16 +96,16 @@ When Auto Launch is enabled, the login page will automatically redirect the user
|
||||
|
||||
## Mobile Redirect URI
|
||||
|
||||
The redirect URI for the mobile app is `app.immich:/`, which is a [Custom Scheme](https://developer.apple.com/documentation/xcode/defining-a-custom-url-scheme-for-your-app). If this custom scheme is an invalid redirect URI for your OAuth Provider, you can work around this by doing the following:
|
||||
The redirect URI for the mobile app is `app.immich:///oauth-callback`, which is a [Custom Scheme](https://developer.apple.com/documentation/xcode/defining-a-custom-url-scheme-for-your-app). If this custom scheme is an invalid redirect URI for your OAuth Provider, you can work around this by doing the following:
|
||||
|
||||
1. Configure an http(s) endpoint to forwards requests to `app.immich:/`
|
||||
1. Configure an http(s) endpoint to forwards requests to `app.immich:///oauth-callback`
|
||||
2. Whitelist the new endpoint as a valid redirect URI with your provider.
|
||||
3. Specify the new endpoint as the `Mobile Redirect URI Override`, in the OAuth settings.
|
||||
|
||||
With these steps in place, you should be able to use OAuth from the [Mobile App](/docs/features/mobile-app.mdx) without a custom scheme redirect URI.
|
||||
|
||||
:::info
|
||||
Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to forward requests to `app.immich:/`, and can be used for step 1.
|
||||
Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to forward requests to `app.immich:///oauth-callback`, and can be used for step 1.
|
||||
:::
|
||||
|
||||
## Example Configuration
|
||||
@@ -154,21 +154,21 @@ Configuration of Authorised redirect URIs (Google Console)
|
||||
|
||||
Configuration of OAuth in Immich System Settings
|
||||
|
||||
| Setting | Value |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------------------------------ |
|
||||
| Issuer URL | [https://accounts.google.com](https://accounts.google.com) |
|
||||
| Client ID | 7\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***vuls.apps.googleusercontent.com |
|
||||
| Client Secret | G\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***OO |
|
||||
| Scope | openid email profile |
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Button Text | Sign in with Google (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled |
|
||||
| Mobile Redirect URI Override | Enabled (required) |
|
||||
| Mobile Redirect URI | [https://demo.immich.app/api/oauth/mobile-redirect](https://demo.immich.app/api/oauth/mobile-redirect) |
|
||||
| Setting | Value |
|
||||
| ---------------------------- | ---------------------------------------------------------------------------- |
|
||||
| Issuer URL | `https://accounts.google.com` |
|
||||
| Client ID | 7\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***vuls.apps.googleusercontent.com |
|
||||
| Client Secret | G\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***OO |
|
||||
| Scope | openid email profile |
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Button Text | Sign in with Google (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled |
|
||||
| Mobile Redirect URI Override | Enabled (required) |
|
||||
| Mobile Redirect URI | `https://example.immich.app/api/oauth/mobile-redirect` |
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ You can choose to disable a certain type of machine learning, for example smart
|
||||
|
||||
### Smart Search
|
||||
|
||||
The smart search settings are designed to allow the search tool to be used using [CLIP](https://openai.com/research/clip) models that [can be changed](/docs/FAQ#can-i-use-a-custom-clip-model), different models will necessarily give better results but may consume more processing power, when changing a model it is mandatory to re-run the
|
||||
The [smart search](/docs/features/smart-search) settings are designed to allow the search tool to be used using [CLIP](https://openai.com/research/clip) models that [can be changed](/docs/FAQ#can-i-use-a-custom-clip-model), different models will necessarily give better results but may consume more processing power, when changing a model it is mandatory to re-run the
|
||||
Smart Search job on all images to fully apply the change.
|
||||
|
||||
:::info Internet connection
|
||||
@@ -113,15 +113,23 @@ After downloading, there is no need for Immich to connect to the network
|
||||
Unless version checking has been enabled in the settings.
|
||||
:::
|
||||
|
||||
### Duplicate Detection
|
||||
|
||||
Use CLIP embeddings to find likely duplicates. The maximum detection distance can be configured in order to improve / reduce the level of accuracy.
|
||||
|
||||
- **Maximum detection distance -** Maximum distance between two images to consider them duplicates, ranging from 0.001-0.1. Higher values will detect more duplicates, but may result in false positives.
|
||||
|
||||
### Facial Recognition
|
||||
|
||||
Under these settings, you can change the facial recognition settings
|
||||
Editable settings:
|
||||
|
||||
- **Facial Recognition Model -** Models are listed in descending order of size. Larger models are slower and use more memory, but produce better results. Note that you must re-run the Face Detection job for all images upon changing a model.
|
||||
- **Min Detection Score -** Minimum confidence score for a face to be detected from 0-1. Lower values will detect more faces but may result in false positives.
|
||||
- **Max Recognition Distance -** Maximum distance between two faces to be considered the same person, ranging from 0-2. Lowering this can prevent labeling two people as the same person, while raising it can prevent labeling the same person as two different people. Note that it is easier to merge two people than to split one person in two, so err on the side of a lower threshold when possible.
|
||||
- **Min Recognized Faces -** The minimum number of recognized faces for a person to be created (AKA: Core face). Increasing this makes Facial Recognition more precise at the cost of increasing the chance that a face is not assigned to a person.
|
||||
- **Facial Recognition Model**
|
||||
- **Min Detection Score**
|
||||
- **Max Recognition Distance**
|
||||
- **Min Recognized Faces**
|
||||
|
||||
You can learn more about these options on the [Facial Recognition page](/docs/features/facial-recognition#how-face-detection-works)
|
||||
|
||||
:::info
|
||||
When changing the values in Min Detection Score, Max Recognition Distance, and Min Recognized Faces.
|
||||
@@ -153,7 +161,7 @@ SMTP server setup, for user creation notifications, new albums, etc. More inform
|
||||
|
||||
### External Domain
|
||||
|
||||
When set, will override the domain name used when viewing and copying a shared link.
|
||||
Overrides the domain name in shared links and email notifications. The URL should not include a trailing slash.
|
||||
|
||||
### Welcome Message
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ This environment includes the services below. Additional details are available i
|
||||
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
||||
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
||||
- Redis
|
||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to acess it
|
||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to access it
|
||||
|
||||
All the services are packaged to run as with single Docker Compose command.
|
||||
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|
||||
### Unit tests
|
||||
|
||||
Unit are run by calling `npm run test` from the `server` directory.
|
||||
Unit are run by calling `npm run test` from the `server/` directory.
|
||||
You need to run `npm install` (in `server/`) before _once_.
|
||||
|
||||
### End to end tests
|
||||
|
||||
@@ -14,6 +15,11 @@ The e2e tests can be run by first starting up a test production environment via:
|
||||
make e2e
|
||||
```
|
||||
|
||||
Before you can run the tests, you need to run the following commands _once_:
|
||||
|
||||
- `npm install` (in `e2e/`)
|
||||
- `make open-api` (in the project root `/`)
|
||||
|
||||
Once the test environment is running, the e2e tests can be run via:
|
||||
|
||||
```bash
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
## Overview
|
||||
|
||||
Immich recognizes faces in your photos and videos and groups them together. You can then assign names to the faces and search for them.
|
||||
Immich recognizes faces in your photos and videos and groups them together into people. You can then assign names to these people and search for them.
|
||||
|
||||
The list of people is shown in the Explore page.
|
||||
|
||||
@@ -18,13 +18,75 @@ The asset detail view will also show the faces that are recognized in the asset.
|
||||
|
||||
## Actions
|
||||
|
||||
Additional actions you can do with a detected person are:
|
||||
Additional actions you can do include:
|
||||
|
||||
- Change the feature face photo of the person
|
||||
- Set date of birth
|
||||
- Merge two or more detected faces into one person
|
||||
- Hide face
|
||||
- Changing the feature photo of the person
|
||||
- Setting a person's date of birth
|
||||
- Merging two or more detected faces into one person
|
||||
- Hiding the faces of a person from the Explore page and detail view
|
||||
- Assigning an unrecognized face to a person
|
||||
|
||||
It can be found from the app bar when you access the detail view of a person.
|
||||
|
||||
<img src={require('./img/facial-recognition-4.png').default} title='Facial Recognition 4' width="70%"/>
|
||||
|
||||
## How Face Detection Works
|
||||
|
||||
Face detection sends the generated preview image to the machine learning service for processing. The service checks if it has the relevant model downloaded and downloads it if not. The image is decoded, pre-processed and passed to the face detection model (with hardware acceleration if configured). The bounding boxes and scores outputted from this model are used to crop and preprocess the image once again to be passed to a facial recognition model (also accelerated if configured). The embeddings from the recognition model, together with the bounding boxes and scores from the face detection model, are then sent back to the server to be added to the database. The embeddings in particular are indexed so they can be searched quickly during facial recognition clustering.
|
||||
|
||||
## How Facial Recognition Works
|
||||
|
||||
The facial recognition algorithm we use is derived from [DBSCAN](https://www.youtube.com/watch?v=RDZUdRSDOok), a popular clustering algorithm. It essentially treats each detected face as a point in a graph and aims to group points that are close to each other.
|
||||
|
||||
:::note
|
||||
An important concept is whether something is a _core point_. A core point has a minimum number of points around it within a certain distance. A non-core point can only be assigned to a cluster if it can reach a core point; a non-core point can't be used to extend a cluster even if it's part of one. In Immich, the _Minimum Recognized Faces_ setting controls the threshold to be considered a core point.
|
||||
:::
|
||||
|
||||
For each face, it looks around it to find other faces within a certain distance. Faces within this distance are considered similar, so it then checks if any of these faces are associated with a person.
|
||||
|
||||
If there is an existing person, it assigns the person of the most similar face to the face being processed.
|
||||
|
||||
If there is none, then it has to determine something from the DBSCAN algorithm: whether the face is a _core point_. If there are a certain number of similar faces (by default 3, including the face being considered), then this face is a core point. A new person is created for this face and the face is assigned to it. When other faces are processed, if they're similar to this face, they'll see that it has an associated person and can be assigned to that person.
|
||||
|
||||
However, if there aren't enough similar faces, no new person will be created. Instead, the face will wait for all the other faces to be processed to see if any matches that previously didn't have an associated person now do. If they do, then the face will be assigned to that person. If not, this face will be considered an outlier, such as a stranger in the background of an image.
|
||||
|
||||
The algorithm has some subtle differences compared to DBSCAN:
|
||||
|
||||
- DBSCAN doesn't have a concept of incremental clustering: it clusters all points at once. In contrast, facial recognition has to evolve as more assets are added without re-clustering everything each time.
|
||||
- The algorithm described above works within a set of queued assets. Once these faces are processed and a new round of faces are detected, the behavior will not be the same as traditional DBSCAN since it preserves the clusters (people) generated from the previous round.
|
||||
- Facial recognition tries to wait for face detection and thumbnail generation to complete before starting for this reason: the larger the set of faces in the queue, the better the results will be.
|
||||
- Re-running facial recognition on all assets afterwards does behave like DBSCAN, however.
|
||||
- DBSCAN is designed for range-based searches (i.e. points within a distance), but high-dimensional vector indices are generally optimized for getting the closest K results. The recognition algorithm doesn't try to get _all_ similar faces within a distance for performance reasons. Instead, it searches for a small number of matches for each face. The end result should be very similar if not identical, but with possibly different performance characteristics.
|
||||
- Because of this, part of the recognition process is handled during a nightly job to ensure that unassigned faces with potential matches can be recognized.
|
||||
|
||||
:::tip
|
||||
If you didn't import your assets at once or if the server was able to process jobs faster than you could upload them, it's possible that the clustering was suboptimal. If you haven't put effort into the current results, it may be worth re-running facial recognition on all assets for the best starting point. If it's too late for that, you can also manually assign a selection of unassigned faces and queue _Missing_ for Facial Recognition to help it learn and assign more faces automatically.
|
||||
:::
|
||||
|
||||
## Configuration
|
||||
|
||||
Navigating to Administration > Settings > Machine Learning Settings > Facial Recognition will show the options available.
|
||||
|
||||
:::tip
|
||||
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
|
||||
:::
|
||||
|
||||
### Facial recognition model
|
||||
|
||||
There are a few different models available; the default is typically considered the best. On more constrained systems where the default is too intensive, you can choose a smaller model instead.
|
||||
|
||||
### Minimum detection score
|
||||
|
||||
This setting affects whether a result from the face detecton model is filtered out as a false positive. It may seem tempting to set this low to detect more faces, but it can lead to false positives that are difficult to deal with and can harm facial recognition. It is strongly recommended not to go below 0.5 for this setting. Setting it to a very high number like 0.9 is also not recommended: the default is already biased toward precision, so a threshold that high leads to many undetected faces.
|
||||
|
||||
After changing this setting, it will only apply to new face detection jobs. To apply the new setting to all assets, you need to re-run face detection for all assets.
|
||||
|
||||
### Maximum recognition distance
|
||||
|
||||
The distance threshold described in How Facial Recognition Works. The default works well for most people, but it may be worth lowering it if the library has twins or otherwise very similar looking people. A threshold that's too low just means needing to merge duplicate people after facial recognition, whereas a threshold too high can produce unsalvageable results. It is strongly recommended not to go below 0.3 or above 0.7.
|
||||
|
||||
### Minimum recognized faces
|
||||
|
||||
The core point threshold described in How Facial Recognition Works. This setting has a few implications. First, it takes effect immediately in that people with fewer faces than this are hidden from view. Secondly, it makes clustering more robust as it prevents loosely-related faces from being linked to each other by requiring a certain level of density.
|
||||
|
||||
Increasing this setting is a good idea if you increase the recognition distance or reduce the minimum detection score. Setting it to 1 effectively disables the concept of core points, but can be an option if you prefer a more hands-on approach.
|
||||
|
||||
@@ -123,6 +123,7 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
||||
|
||||
- You may want to choose a slower preset than for software transcoding to maintain quality and efficiency
|
||||
- While you can use VAAPI with NVIDIA and Intel devices, prefer the more specific APIs since they're more optimized for their respective devices
|
||||
- You can confirm the device is being recognized and used by checking its utilization (via `nvtop` for NVIDIA, `intel_gpu_top` for Intel, etc.) when transcoding. A lack of error logs when transcoding also indicates that it's being used.
|
||||
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||
|
||||
@@ -107,15 +107,17 @@ The `immich-server` container will need access to the gallery. Modify your docke
|
||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||
+ - /mnt/media/videos2:/mnt/media/videos2 # the files in this folder can be deleted, as it does not end with :ro
|
||||
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||
```
|
||||
|
||||
:::tip
|
||||
The `ro` flag at the end only gives read-only access to the volumes. While Immich does not modify files, it's a good practice to mount read-only.
|
||||
The `ro` flag at the end only gives read-only access to the volumes.
|
||||
This will disallow the images from being deleted in the web UI, or adding metadata to the library ([XMP sidecars](/docs/features/xmp-sidecars)).
|
||||
:::
|
||||
|
||||
:::info
|
||||
_Remember to bring the container `docker compose down/up` to register the changes. Make sure you can see the mounted path in the container._
|
||||
_Remember to run `docker compose up -d` to register the changes. Make sure you can see the mounted path in the container._
|
||||
:::
|
||||
|
||||
### Create External Libraries
|
||||
|
||||
@@ -32,12 +32,13 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
- Where and how you can get this file depends on device and vendor, but typically, the device vendor also supplies these
|
||||
- The `hwaccel.ml.yml` file assumes the path to it is `/usr/lib/libmali.so`, so update accordingly if it is elsewhere
|
||||
- The `hwaccel.ml.yml` file assumes an additional file `/lib/firmware/mali_csffw.bin`, so update accordingly if your device's driver does not require this file
|
||||
- Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for ARM NN specific settings
|
||||
|
||||
#### CUDA
|
||||
|
||||
- The GPU must have compute capability 5.2 or greater.
|
||||
- The server must have the official NVIDIA driver installed.
|
||||
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
||||
- The installed driver must be >= 545 (it must support CUDA 12.3.2).
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||
|
||||
#### OpenVINO
|
||||
|
||||
@@ -66,7 +66,7 @@ The provided file is just a starting point. There are a ton of ways to configure
|
||||
After bringing down the containers with `docker compose down` and back up with `docker compose up -d`, a Prometheus instance will now collect metrics from the immich server and microservices containers. Note that we didn't need to expose any new ports for these containers - the communication is handled in the internal Docker network.
|
||||
|
||||
:::note
|
||||
To see exactly what metrics are made available, you can additionally add `8081:8081` to the server container's ports and `8082:8081` to the microservices container's ports. Visiting the `/metrics` endpoint for these services will show the same raw data that Prometheus collects.
|
||||
To see exactly what metrics are made available, you can additionally add `8081:8081` to the server container's ports and `8082:8082` to the microservices container's ports. Visiting the `/metrics` endpoint for these services will show the same raw data that Prometheus collects.
|
||||
:::
|
||||
|
||||
### Usage
|
||||
|
||||
@@ -16,7 +16,7 @@ When sharing shared albums, whats shared is:
|
||||
|
||||
- Download all assets as zip file (Web only).
|
||||
:::info Archive size limited.
|
||||
If the size of the album exceeds 4GB, the archive files will be divided into 4GB each.
|
||||
If the size of the album exceeds 4GB, the archive files will by default be divided into 4GB each. This can be changed on the user settings page.
|
||||
:::
|
||||
- Add a description to the album (Web only).
|
||||
- Slideshow view (Web only).
|
||||
@@ -73,14 +73,14 @@ You can edit the link properties, options include;
|
||||
- **Allow public user to download -** whether to allow whoever has the link to download all the images or a certain image (optional).
|
||||
- **Allow public user to upload -** whether to allow whoever has the link to upload assets to the album (optional).
|
||||
:::info
|
||||
whoever has the link and have uploaded files cannot delete them.
|
||||
Whoever has the link and have uploaded files cannot delete them.
|
||||
:::
|
||||
- **Expire after -** adding an expiration date to the link (optional).
|
||||
|
||||
## Share Specific Assets
|
||||
|
||||
A user can share specific assets without linking them to a specific album.
|
||||
in order to do so;
|
||||
In order to do this:
|
||||
|
||||
1. Go to the timeline
|
||||
2. Select the assets (Shift can be used for multiple selection)
|
||||
@@ -152,7 +152,7 @@ Some of the features are not available on mobile, to understand what the full fe
|
||||
|
||||
## Sharing Between Users
|
||||
|
||||
#### Add or remove users from the album.
|
||||
#### Add or remove users from the album
|
||||
|
||||
:::info remove user(s)
|
||||
When a user is removed from the album, the photos he uploaded will still appear in the album.
|
||||
|
||||
@@ -7,29 +7,30 @@ Immich uses Postgres as its search database for both metadata and smart search.
|
||||
|
||||
Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
|
||||
Archived photos are not included in search results by default. To include them, mark the checkbox in [advanced search filters](/docs/features/smart-search#advanced-search-filters).
|
||||
|
||||
:::tip Alternative CLIP Models
|
||||
More powerful models can be used for more accurate search results. For more information, see the related [FAQ](/docs/FAQ#can-i-use-a-custom-clip-model).
|
||||
:::
|
||||
|
||||
:::info
|
||||
Smart Search is currently limited to 5,000 results for a single search on the web.
|
||||
:::
|
||||
|
||||
## Advanced Search Filters
|
||||
|
||||
In addition, Immich offers advanced search functionality, allowing you to find specific content using customizable search filters. These filters include location, one or more faces, specific albums, and more. You can try out the search filters on the [Demo site](https://demo.immich.app).
|
||||
|
||||
Smart search features include:
|
||||
The filters smart search allows you to search by include:
|
||||
|
||||
- Search for one or more faces (with or without context search).
|
||||
- Search by Country or State or City or by all three.
|
||||
- Search by camera make and model.
|
||||
- Search by date range.
|
||||
- Search by file name.
|
||||
- Search by media types: image, video or all (**Note:** Image includes live images).
|
||||
- Search by condition: not in any album or archive or Favorite or all conditions.
|
||||
- People
|
||||
- Location
|
||||
- Country
|
||||
- State
|
||||
- City
|
||||
- Camera
|
||||
- Make
|
||||
- Model
|
||||
- Date range
|
||||
- File name or extension
|
||||
- Media type
|
||||
- Image (including live/motion photos)
|
||||
- Video
|
||||
- All
|
||||
- Condition
|
||||
- Not in any album
|
||||
- Archived
|
||||
- Favorited
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="Computer" label="Computer" default>
|
||||
@@ -47,3 +48,27 @@ Some search examples:
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
## Configuration
|
||||
|
||||
Navigating to `Administration > Settings > Machine Learning Settings > Smart Search` will show the options available.
|
||||
|
||||
### CLIP model
|
||||
|
||||
More powerful models can be used for more accurate search results, but are slower and can require more server resources. Check out the models [here][huggingface-clip] for more options!
|
||||
|
||||
[Multilingual models][huggingface-multilingual-clip] are also available so users can search in their native language. These models support over 100 languages; the `nllb` models in particular support 200.
|
||||
:::note
|
||||
Multilingual models are much slower and larger and perform slightly worse for English than English-only models. For this reason, only use them if you actually intend to search in a language besides English.
|
||||
|
||||
As a special case, the `ViT-H-14-quickgelu__dfn5b` and `ViT-H-14-378-quickgelu__dfn5b` models are excellent at many European languages despite not specifically being multilingual. They're very intensive regardless, however - especially the latter.
|
||||
:::
|
||||
|
||||
Once you've chosen a model, change this setting to the name of the model you chose. Be sure to re-run Smart Search on all assets after this change.
|
||||
|
||||
:::note
|
||||
Feel free to make a feature request if there's a model you want to use that we don't currently support.
|
||||
:::
|
||||
|
||||
[huggingface-clip]: https://huggingface.co/collections/immich-app/clip-654eaefb077425890874cd07
|
||||
[huggingface-multilingual-clip]: https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7
|
||||
|
||||
@@ -13,14 +13,14 @@ In our `.env` file, we will define variables that will help us in the future whe
|
||||
|
||||
# Custom location where your uploaded, thumbnails, and transcoded video files are stored
|
||||
- UPLOAD_LOCATION=./library
|
||||
+ UPLOAD_LOCATION=/custom/location/on/your/system/immich/immich_files
|
||||
+ THUMB_LOCATION=/custom/location/on/your/system/immich/thumbs
|
||||
+ ENCODED_VIDEO_LOCATION=/custom/location/on/your/system/immich/encoded-video
|
||||
+ PROFILE_LOCATION=/custom/location/on/your/system/immich/profile
|
||||
+ UPLOAD_LOCATION=/custom/path/immich/immich_files
|
||||
+ THUMB_LOCATION=/custom/path/immich/thumbs
|
||||
+ ENCODED_VIDEO_LOCATION=/custom/path/immich/encoded-video
|
||||
+ PROFILE_LOCATION=/custom/path/immich/profile
|
||||
...
|
||||
```
|
||||
|
||||
After defining the locations for these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` and `immich-microservices` containers.
|
||||
After defining the locations for these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` container.
|
||||
|
||||
```diff title="docker-compose.yml"
|
||||
services:
|
||||
@@ -29,16 +29,6 @@ services:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
|
||||
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
|
||||
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
...
|
||||
|
||||
immich-microservices:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
|
||||
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
|
||||
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
```
|
||||
@@ -46,7 +36,6 @@ services:
|
||||
Restart Immich to register the changes.
|
||||
|
||||
```
|
||||
docker compose down
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
|
||||
@@ -1,8 +1,22 @@
|
||||
# Create Custom Map Styles for Immich Using Maptiler
|
||||
# Custom Map Styles
|
||||
|
||||
You may decide that you'd like to modify the style document which is used to draw the maps in Immich. This can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
|
||||
You may decide that you'd like to modify the style document which is used to
|
||||
draw the maps in Immich. In addition to visual customization, this also allows
|
||||
you to pick your own map tile provider instead of the default one. The default
|
||||
`style.json` for [light theme](https://github.com/immich-app/immich/tree/main/server/resources/style-light.json)
|
||||
and [dark theme](https://github.com/immich-app/immich/blob/main/server/resources/style-dark.json)
|
||||
can be used as a basis for creating your own style.
|
||||
|
||||
## Steps
|
||||
There are several sources for already-made `style.json` map themes, as well as
|
||||
online generators you can use.
|
||||
|
||||
1. In **Immich**, navigate to **Administration --> Settings --> Map & GPS Settings** and expand the **Map Settings** subsection.
|
||||
2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.)
|
||||
3. Save your selections. Reload the map, and enjoy your custom map style!
|
||||
|
||||
## Use Maptiler to build a custom style
|
||||
|
||||
Customizing the map style can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
|
||||
|
||||
1. Create a free account at https://cloud.maptiler.com
|
||||
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
|
||||
@@ -11,6 +25,3 @@ You may decide that you'd like to modify the style document which is used to dra
|
||||
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. Maptiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>
|
||||
6. Maptiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
|
||||
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to Maptiler.
|
||||
8. In **Immich**, navigate to **Administration --> Settings --> Map & GPS Settings** and expand the **Map Settings** subsection.
|
||||
9. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.
|
||||
10. Save your selections. Reload the map, and enjoy your custom map style!
|
||||
|
||||
@@ -5,7 +5,7 @@ Keep in mind that mucking around in the database might set the moon on fire. Avo
|
||||
:::
|
||||
|
||||
:::tip
|
||||
Run `docker exec -it immich_postgres psql immich <DB_USERNAME>` to connect to the database via the container directly.
|
||||
Run `docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME>` to connect to the database via the container directly.
|
||||
|
||||
(Replace `<DB_USERNAME>` with the value from your [`.env` file](/docs/install/environment-variables#database)).
|
||||
:::
|
||||
@@ -23,7 +23,7 @@ SELECT * FROM "assets" WHERE "originalFileName" LIKE '%_2023_%'; -- all files wi
|
||||
```
|
||||
|
||||
```sql title="Find by path"
|
||||
SELECT * FROM "assets" WHERE "originalPath" = 'upload/library/admin/2023/2023-09-03/PXL_20230903_232542848.jpg';
|
||||
SELECT * FROM "assets" WHERE "originalPath" = 'upload/library/admin/2023/2023-09-03/PXL_2023.jpg';
|
||||
SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
||||
```
|
||||
|
||||
@@ -37,6 +37,12 @@ SELECT * FROM "assets" WHERE "checksum" = decode('69de19c87658c4c15d9cacb9967b8e
|
||||
SELECT * FROM "assets" WHERE "checksum" = '\x69de19c87658c4c15d9cacb9967b8e033bf74dd1'; -- alternate notation
|
||||
```
|
||||
|
||||
```sql title="Find duplicate assets with identical checksum (SHA-1) (excluding trashed files)"
|
||||
SELECT T1."checksum", array_agg(T2."id") ids FROM "assets" T1
|
||||
INNER JOIN "assets" T2 ON T1."checksum" = T2."checksum" AND T1."id" != T2."id" AND T2."deletedAt" IS NULL
|
||||
WHERE T1."deletedAt" IS NULL GROUP BY T1."checksum";
|
||||
```
|
||||
|
||||
```sql title="Live photos"
|
||||
SELECT * FROM "assets" WHERE "livePhotoVideoId" IS NOT NULL;
|
||||
```
|
||||
@@ -79,8 +85,7 @@ SELECT "assets"."type", COUNT(*) FROM "assets" GROUP BY "assets"."type";
|
||||
```sql title="Count by type (per user)"
|
||||
SELECT "users"."email", "assets"."type", COUNT(*) FROM "assets"
|
||||
JOIN "users" ON "assets"."ownerId" = "users"."id"
|
||||
GROUP BY "assets"."type", "users"."email"
|
||||
ORDER BY "users"."email";
|
||||
GROUP BY "assets"."type", "users"."email" ORDER BY "users"."email";
|
||||
```
|
||||
|
||||
```sql title="Failed file movements"
|
||||
@@ -106,3 +111,9 @@ SELECT "key", "value" FROM "system_metadata" WHERE "key" = 'system-config';
|
||||
```sql title="Delete person and unset it for the faces it was associated with"
|
||||
DELETE FROM "person" WHERE "name" = 'PersonNameHere';
|
||||
```
|
||||
|
||||
## Postgres internal
|
||||
|
||||
```sql title="Change DB_PASSWORD"
|
||||
ALTER USER <DB_USERNAME> WITH ENCRYPTED PASSWORD 'newpasswordhere';
|
||||
```
|
||||
|
||||
@@ -6,36 +6,18 @@ in a directory on the same machine.
|
||||
|
||||
# Mount the directory into the containers.
|
||||
|
||||
Edit `docker-compose.yml` to add two new mount points in the section `immich-server:` under `volumes:`
|
||||
Edit `docker-compose.yml` to add one or more new mount points in the section `immich-server:` under `volumes:`.
|
||||
If you want Immich to be able to delete the images in the external library or add metadata ([XMP sidecars](/docs/features/xmp-sidecars)), remove `:ro` from the end of the mount point.
|
||||
|
||||
```diff
|
||||
immich-server:
|
||||
volumes:
|
||||
+ - ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - /home/user/photos1:/home/user/photos1:ro
|
||||
+ - /mnt/photos2:/mnt/photos2:ro # you can delete this line if you only have one mount point, or you can add more lines if you have more than two
|
||||
```
|
||||
|
||||
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
|
||||
|
||||
```
|
||||
EXTERNAL_PATH=<your-path-here>
|
||||
```
|
||||
|
||||
On my computer, for example, I use this path:
|
||||
|
||||
```
|
||||
EXTERNAL_PATH=/home/tenino/photos
|
||||
```
|
||||
|
||||
:::info EXTERNAL_PATH design
|
||||
The design choice to put the EXTERNAL_PATH into .env rather than put two copies of the absolute path in the yml file in order to make everything easier, so if you have two copies of the same path that have to be kept in sync, then someday later when you move the data, update only one of the paths, without everything will break mysteriously.
|
||||
:::
|
||||
|
||||
Restart Immich.
|
||||
|
||||
```
|
||||
docker compose down
|
||||
docker compose up -d
|
||||
```
|
||||
Restart Immich by running `docker compose up -d`.
|
||||
|
||||
# Create the library
|
||||
|
||||
|
||||
@@ -4,20 +4,20 @@ This page gives a few pointers on how to access your Immich instance from outsid
|
||||
You can read the [full discussion in Discord](https://discord.com/channels/979116623879368755/1122615710846308484)
|
||||
|
||||
:::danger
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you succeptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you susceptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
:::
|
||||
|
||||
## Option 1: VPN to home network
|
||||
|
||||
You may use a VPN service to open an encrypted connection to your Immich instance. OpenVPN and Wireguard are two popular VPN solutions. Here is a guide on setting up VPN access to your server - [Pihole documentation](https://docs.pi-hole.net/guides/vpn/wireguard/overview/)
|
||||
|
||||
### Pros:
|
||||
### Pros
|
||||
|
||||
- Simple to set up and very secure.
|
||||
- Single point of potential failure, i.e., the VPN software itself. Even if there is a zero-day vulnerability on Immich, you will not be at risk.
|
||||
- Both Wireguard and OpenVPN are independently security-audited, so the risk of serious zero-day exploits are minimal.
|
||||
|
||||
### Cons:
|
||||
### Cons
|
||||
|
||||
- If you don't have a static IP address, you would need to set up a [Dynamic DNS](https://www.cloudflare.com/learning/dns/glossary/dynamic-dns/). [DuckDNS](https://www.duckdns.org/) is a free DDNS provider.
|
||||
- VPN software needs to be installed and active on both server-side and client-side.
|
||||
@@ -27,6 +27,10 @@ You may use a VPN service to open an encrypted connection to your Immich instanc
|
||||
|
||||
If you are unable to open a port on your router for Wireguard or OpenVPN to your server, [Tailscale](https://tailscale.com/) is a good option. Tailscale mediates a peer-to-peer wireguard tunnel between your server and remote device, even if one or both of them are behind a [NAT firewall](https://en.wikipedia.org/wiki/Network_address_translation).
|
||||
|
||||
:::tip Video toturial
|
||||
You can learn how to set up Tailscale together with Immich with the [tutorial video](https://www.youtube.com/watch?v=Vt4PDUXB_fg) they created.
|
||||
:::
|
||||
|
||||
### Pros
|
||||
|
||||
- Minimal configuration needed on server and client sides.
|
||||
@@ -44,7 +48,7 @@ A reverse proxy is a service that sits between web servers and clients. A revers
|
||||
|
||||
If you're hosting your own reverse proxy, [Nginx](https://docs.nginx.com/nginx/admin-guide/web-server/reverse-proxy/) is a great option. An example configuration for Nginx is provided [here](/docs/administration/reverse-proxy.md).
|
||||
|
||||
You'll also need your own certificate to authenticate https connections. If you're making Immich publicly accesible, [Let's Encrypt](https://letsencrypt.org/) can provide a free certificate for your domain and is the recommended option. Alternatively, a [self-signed certificate](https://en.wikipedia.org/wiki/Self-signed_certificate) allows you to encrypt your connection to Immich, but it raises a security warning on the client's browser.
|
||||
You'll also need your own certificate to authenticate https connections. If you're making Immich publicly accessible, [Let's Encrypt](https://letsencrypt.org/) can provide a free certificate for your domain and is the recommended option. Alternatively, a [self-signed certificate](https://en.wikipedia.org/wiki/Self-signed_certificate) allows you to encrypt your connection to Immich, but it raises a security warning on the client's browser.
|
||||
|
||||
A remote reverse proxy like [Cloudflare](https://www.cloudflare.com/learning/cdn/glossary/reverse-proxy/) increases security by hiding the server IP address, which makes targeted attacks like [DDoS](https://www.cloudflare.com/learning/ddos/what-is-a-ddos-attack/) harder.
|
||||
|
||||
|
||||
@@ -4,14 +4,15 @@ To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-imm
|
||||
|
||||
- Set the URL in Machine Learning Settings on the Admin Settings page to point to the designated ML system, e.g. `http://workstation:3003`.
|
||||
- Copy the following `docker-compose.yml` to your ML system.
|
||||
- If using [hardware acceleration](/docs/features/ml-hardware-acceleration), the [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml) file also needs to be added
|
||||
- Start the container by running `docker compose up -d`.
|
||||
|
||||
:::info
|
||||
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning container, but facial recognition is done in the `microservices` worker.
|
||||
Smart Search and Face Detection will use this feature, but Facial Recognition is handled in the server.
|
||||
:::
|
||||
|
||||
:::note
|
||||
The [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml) file also needs to be in the same folder if trying to use [hardware acceleration](/docs/features/ml-hardware-acceleration).
|
||||
:::danger
|
||||
When using remote machine learning, the thumbnails are sent to the remote machine learning container. Use this option carefully when running this on a public computer or a paid processing cloud.
|
||||
:::
|
||||
|
||||
```yaml
|
||||
@@ -37,3 +38,7 @@ volumes:
|
||||
```
|
||||
|
||||
Please note that version mismatches between both hosts may cause instabilities and bugs, so make sure to always perform updates together.
|
||||
|
||||
:::caution
|
||||
As an internal service, the machine learning container has no security measures whatsoever. Please be mindful of where it's deployed and who can access it.
|
||||
:::
|
||||
|
||||
@@ -56,7 +56,8 @@ Optionally, you can enable hardware acceleration for machine learning and transc
|
||||
|
||||
- Populate custom database information if necessary.
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets.
|
||||
- Consider changing `DB_PASSWORD` to something randomly generated
|
||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publically exposed, so this password is only used for local authentication.
|
||||
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`.
|
||||
|
||||
### Step 3 - Start the containers
|
||||
|
||||
@@ -108,7 +109,7 @@ Immich is currently under heavy development, which means you can expect [breakin
|
||||
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
[env-file]: https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
[watchtower]: https://containrrr.dev/watchtower/
|
||||
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Abreaking-change+sort%3Adate_created
|
||||
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
|
||||
[container-auth]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry
|
||||
[releases]: https://github.com/immich-app/immich/releases
|
||||
[docker-repo]: https://docs.docker.com/engine/install/ubuntu/#install-using-the-repository
|
||||
|
||||
@@ -38,19 +38,23 @@ Regardless of filesystem, it is not recommended to use a network share for your
|
||||
|
||||
## General
|
||||
|
||||
| Variable | Description | Default | Containers | Workers |
|
||||
| :---------------------------------- | :---------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||
| `TZ` | Timezone | | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
||||
| Variable | Description | Default | Containers | Workers |
|
||||
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||
| `TZ` | Timezone | | server | microservices |
|
||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location inside the container ⚠️**You probably shouldn't set this**<sup>\*1</sup>⚠️ | `./upload`<sup>\*2</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
||||
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
|
||||
| `IMMICH_TRUSTED_PROXIES` | List of comma separated IPs set as trusted proxies | | server | api |
|
||||
|
||||
\*1: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||
\*1: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
||||
|
||||
\*2: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||
It only need to be set if the Immich deployment method is changing.
|
||||
|
||||
:::tip
|
||||
@@ -155,23 +159,29 @@ Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :----------------------------------------------- | :------------------------------------------------------------------- | :-----------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP` | Name of a CLIP model to be preloaded and kept in cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION` | Name of a facial recognition model to be preloaded and kept in cache | | machine learning |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :-------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
|
||||
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
|
||||
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
|
||||
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO image) | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__CLIP` | Name of a CLIP model to be preloaded and kept in cache | | machine learning |
|
||||
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION` | Name of a facial recognition model to be preloaded and kept in cache | | machine learning |
|
||||
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
|
||||
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
|
||||
|
||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||
|
||||
\*2: Since each process duplicates models in memory, changing this is not recommended unless you have abundant memory to go around.
|
||||
|
||||
\*3: For scenarios like HPA in K8S. https://github.com/immich-app/immich/discussions/12064
|
||||
|
||||
:::info
|
||||
|
||||
Other machine learning parameters can be tuned from the admin UI.
|
||||
|
||||
@@ -4,7 +4,7 @@ sidebar_position: 10
|
||||
|
||||
# Requirements
|
||||
|
||||
Hardware and software requirements for Immich
|
||||
Hardware and software requirements for Immich:
|
||||
|
||||
## Software
|
||||
|
||||
|
||||
@@ -8,6 +8,10 @@ sidebar_position: 20
|
||||
This method is experimental and not currently recommended for production use. For production, please refer to installing with [Docker Compose](/docs/install/docker-compose.mdx).
|
||||
:::
|
||||
|
||||
:::note
|
||||
The install script only supports Linux operating systems and requires Docker to be already installed on the system.
|
||||
:::
|
||||
|
||||
In the shell, from a directory of your choice, run the following command:
|
||||
|
||||
```bash
|
||||
|
||||
@@ -45,7 +45,7 @@ width="70%"
|
||||
alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||
/>
|
||||
|
||||
3. Select the cog ⚙️ next to Immich then click "**Edit Stack**"
|
||||
3. Select the cogwheel ⚙️ next to Immich and click "**Edit Stack**"
|
||||
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
|
||||
|
||||
<details >
|
||||
@@ -130,7 +130,7 @@ For more information on how to use the application once installed, please refer
|
||||
|
||||
## Updating Steps
|
||||
|
||||
Updating is extremely easy however it's important to be aware that containers managed via the Docker Compose Manager plugin do not integrate with Unraid's native dockerman ui, the label "_update ready_" will always be present on containers installed via the Docker Compose Manager.
|
||||
Updating is extremely easy however it's important to be aware that containers managed via the Docker Compose Manager plugin do not integrate with Unraid's native dockerman UI, the label "_update ready_" will always be present on containers installed via the Docker Compose Manager.
|
||||
|
||||
<img
|
||||
src={require('./img/unraid09.png').default}
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 404 KiB After Width: | Height: | Size: 1.5 MiB |
@@ -92,6 +92,7 @@ const config = {
|
||||
alt: 'Immich Logo',
|
||||
src: 'img/immich-logo-inline-light.png',
|
||||
srcDark: 'img/immich-logo-inline-dark.png',
|
||||
className: 'rounded-none',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
@@ -144,28 +145,36 @@ const config = {
|
||||
label: 'Installation',
|
||||
to: '/docs/install/requirements',
|
||||
},
|
||||
{
|
||||
label: 'Contributing',
|
||||
to: '/docs/overview/support-the-project',
|
||||
},
|
||||
{
|
||||
label: 'Privacy Policy',
|
||||
to: '/privacy-policy',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Community',
|
||||
title: 'Documentation',
|
||||
items: [
|
||||
{
|
||||
label: 'Discord',
|
||||
href: 'https://discord.immich.app',
|
||||
label: 'Roadmap',
|
||||
to: '/roadmap',
|
||||
},
|
||||
{
|
||||
label: 'Reddit',
|
||||
href: 'https://www.reddit.com/r/immich/',
|
||||
label: 'API',
|
||||
to: '/docs/api',
|
||||
},
|
||||
{
|
||||
label: 'Cursed Knowledge',
|
||||
to: '/cursed-knowledge',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Links',
|
||||
items: [
|
||||
// {
|
||||
// label: "Blog",
|
||||
// to: "/blog",
|
||||
// },
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/immich-app/immich',
|
||||
@@ -174,6 +183,14 @@ const config = {
|
||||
label: 'YouTube',
|
||||
href: 'https://www.youtube.com/@immich-app',
|
||||
},
|
||||
{
|
||||
label: 'Discord',
|
||||
href: 'https://discord.immich.app',
|
||||
},
|
||||
{
|
||||
label: 'Reddit',
|
||||
href: 'https://www.reddit.com/r/immich/',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
568
docs/package-lock.json
generated
568
docs/package-lock.json
generated
@@ -2155,9 +2155,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/core": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.4.0.tgz",
|
||||
"integrity": "sha512-g+0wwmN2UJsBqy2fQRQ6fhXruoEa62JDeEa5d8IdTJlMoaDaEDfHh7WjwGRn4opuTQWpjAwP/fbcgyHKlE+64w==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.5.2.tgz",
|
||||
"integrity": "sha512-4Z1WkhCSkX4KO0Fw5m/Vuc7Q3NxBG53NE5u59Rs96fWkMPZVSrzEPP16/Nk6cWb/shK7xXPndTmalJtw7twL/w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.23.3",
|
||||
@@ -2170,12 +2170,12 @@
|
||||
"@babel/runtime": "^7.22.6",
|
||||
"@babel/runtime-corejs3": "^7.22.6",
|
||||
"@babel/traverse": "^7.22.8",
|
||||
"@docusaurus/cssnano-preset": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/cssnano-preset": "3.5.2",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"autoprefixer": "^10.4.14",
|
||||
"babel-loader": "^9.1.3",
|
||||
"babel-plugin-dynamic-import-node": "^2.3.3",
|
||||
@@ -2236,14 +2236,15 @@
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/cssnano-preset": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.4.0.tgz",
|
||||
"integrity": "sha512-qwLFSz6v/pZHy/UP32IrprmH5ORce86BGtN0eBtG75PpzQJAzp9gefspox+s8IEOr0oZKuQ/nhzZ3xwyc3jYJQ==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.5.2.tgz",
|
||||
"integrity": "sha512-D3KiQXOMA8+O0tqORBrTOEQyQxNIfPm9jEaJoALjjSjc2M/ZAWcUfPQEnwr2JB2TadHw2gqWgpZckQmrVWkytA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cssnano-preset-advanced": "^6.1.2",
|
||||
@@ -2256,9 +2257,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/logger": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.4.0.tgz",
|
||||
"integrity": "sha512-bZwkX+9SJ8lB9kVRkXw+xvHYSMGG4bpYHKGXeXFvyVc79NMeeBSGgzd4TQLHH+DYeOJoCdl8flrFJVxlZ0wo/Q==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.5.2.tgz",
|
||||
"integrity": "sha512-LHC540SGkeLfyT3RHK3gAMK6aS5TRqOD4R72BEU/DE2M/TY8WwEUAMY576UUc/oNJXv8pGhBmQB6N9p3pt8LQw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"chalk": "^4.1.2",
|
||||
@@ -2269,14 +2270,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/mdx-loader": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.4.0.tgz",
|
||||
"integrity": "sha512-kSSbrrk4nTjf4d+wtBA9H+FGauf2gCax89kV8SUSJu3qaTdSIKdWERlngsiHaCFgZ7laTJ8a67UFf+xlFPtuTw==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.5.2.tgz",
|
||||
"integrity": "sha512-ku3xO9vZdwpiMIVd8BzWV0DCqGEbCP5zs1iHfKX50vw6jX8vQo0ylYo1YJMZyz6e+JFJ17HYHT5FzVidz2IflA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"@mdx-js/mdx": "^3.0.0",
|
||||
"@slorber/remark-comment": "^1.0.0",
|
||||
"escape-html": "^1.0.3",
|
||||
@@ -2308,12 +2309,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/module-type-aliases": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.4.0.tgz",
|
||||
"integrity": "sha512-A1AyS8WF5Bkjnb8s+guTDuYmUiwJzNrtchebBHpc0gz0PyHJNMaybUlSrmJjHVcGrya0LKI4YcR3lBDQfXRYLw==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.5.2.tgz",
|
||||
"integrity": "sha512-Z+Xu3+2rvKef/YKTMxZHsEXp1y92ac0ngjDiExRdqGTmEKtCUpkbNYH8v5eXo5Ls+dnW88n6WTa+Q54kLOkwPg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@types/history": "^4.7.11",
|
||||
"@types/react": "*",
|
||||
"@types/react-router-config": "*",
|
||||
@@ -2326,52 +2327,21 @@
|
||||
"react-dom": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-content-blog": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.4.0.tgz",
|
||||
"integrity": "sha512-vv6ZAj78ibR5Jh7XBUT4ndIjmlAxkijM3Sx5MAAzC1gyv0vupDQNhzuFg1USQmQVj3P5I6bquk12etPV3LJ+Xw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"feed": "^4.2.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"lodash": "^4.17.21",
|
||||
"reading-time": "^1.5.0",
|
||||
"srcset": "^4.0.0",
|
||||
"tslib": "^2.6.0",
|
||||
"unist-util-visit": "^5.0.0",
|
||||
"utility-types": "^3.10.0",
|
||||
"webpack": "^5.88.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-content-docs": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.4.0.tgz",
|
||||
"integrity": "sha512-HkUCZffhBo7ocYheD9oZvMcDloRnGhBMOZRyVcAQRFmZPmNqSyISlXA1tQCIxW+r478fty97XXAGjNYzBjpCsg==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.5.2.tgz",
|
||||
"integrity": "sha512-Bt+OXn/CPtVqM3Di44vHjE7rPCEsRCB/DMo2qoOuozB9f7+lsdrHvD0QCHdBs0uhz6deYJDppAr2VgqybKPlVQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/module-type-aliases": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/module-type-aliases": "3.5.2",
|
||||
"@docusaurus/theme-common": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"@types/react-router-config": "^5.0.7",
|
||||
"combine-promises": "^1.1.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
@@ -2389,38 +2359,15 @@
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-content-pages": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.4.0.tgz",
|
||||
"integrity": "sha512-h2+VN/0JjpR8fIkDEAoadNjfR3oLzB+v1qSXbIAKjQ46JAHx3X22n9nqS+BWSQnTnp1AjkjSvZyJMekmcwxzxg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"fs-extra": "^11.1.1",
|
||||
"tslib": "^2.6.0",
|
||||
"webpack": "^5.88.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-debug": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.4.0.tgz",
|
||||
"integrity": "sha512-uV7FDUNXGyDSD3PwUaf5YijX91T5/H9SX4ErEcshzwgzWwBtK37nUWPU3ZLJfeTavX3fycTOqk9TglpOLaWkCg==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.5.2.tgz",
|
||||
"integrity": "sha512-kBK6GlN0itCkrmHuCS6aX1wmoWc5wpd5KJlqQ1FyrF0cLDnvsYSnh7+ftdwzt7G6lGBho8lrVwkkL9/iQvaSOA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"react-json-view-lite": "^1.2.0",
|
||||
"tslib": "^2.6.0"
|
||||
@@ -2434,14 +2381,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-google-analytics": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.4.0.tgz",
|
||||
"integrity": "sha512-mCArluxEGi3cmYHqsgpGGt3IyLCrFBxPsxNZ56Mpur0xSlInnIHoeLDH7FvVVcPJRPSQ9/MfRqLsainRw+BojA==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.5.2.tgz",
|
||||
"integrity": "sha512-rjEkJH/tJ8OXRE9bwhV2mb/WP93V441rD6XnM6MIluu7rk8qg38iSxS43ga2V2Q/2ib53PcqbDEJDG/yWQRJhQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2453,14 +2400,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-google-gtag": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.4.0.tgz",
|
||||
"integrity": "sha512-Dsgg6PLAqzZw5wZ4QjUYc8Z2KqJqXxHxq3vIoyoBWiLEEfigIs7wHR+oiWUQy3Zk9MIk6JTYj7tMoQU0Jm3nqA==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.5.2.tgz",
|
||||
"integrity": "sha512-lm8XL3xLkTPHFKKjLjEEAHUrW0SZBSHBE1I+i/tmYMBsjCcUB5UJ52geS5PSiOCFVR74tbPGcPHEV/gaaxFeSA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"@types/gtag.js": "^0.0.12",
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
@@ -2473,14 +2420,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-google-tag-manager": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.4.0.tgz",
|
||||
"integrity": "sha512-O9tX1BTwxIhgXpOLpFDueYA9DWk69WCbDRrjYoMQtFHSkTyE7RhNgyjSPREUWJb9i+YUg3OrsvrBYRl64FCPCQ==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.5.2.tgz",
|
||||
"integrity": "sha512-QkpX68PMOMu10Mvgvr5CfZAzZQFx8WLlOiUQ/Qmmcl6mjGK6H21WLT5x7xDmcpCoKA/3CegsqIqBR+nA137lQg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"tslib": "^2.6.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2492,17 +2439,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/plugin-sitemap": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.4.0.tgz",
|
||||
"integrity": "sha512-+0VDvx9SmNrFNgwPoeoCha+tRoAjopwT0+pYO1xAbyLcewXSemq+eLxEa46Q1/aoOaJQ0qqHELuQM7iS2gp33Q==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.5.2.tgz",
|
||||
"integrity": "sha512-DnlqYyRAdQ4NHY28TfHuVk414ft2uruP4QWCH//jzpHjqvKyXjj2fmDtI8RPUBh9K8iZKFMHRnLtzJKySPWvFA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"sitemap": "^7.1.1",
|
||||
"tslib": "^2.6.0"
|
||||
@@ -2516,24 +2463,81 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/preset-classic": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.4.0.tgz",
|
||||
"integrity": "sha512-Ohj6KB7siKqZaQhNJVMBBUzT3Nnp6eTKqO+FXO3qu/n1hJl3YLwVKTWBg28LF7MWrKu46UuYavwMRxud0VyqHg==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.5.2.tgz",
|
||||
"integrity": "sha512-3ihfXQ95aOHiLB5uCu+9PRy2gZCeSZoDcqpnDvf3B+sTrMvMTr8qRUzBvWkoIqc82yG5prCboRjk1SVILKx6sg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/plugin-content-blog": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/plugin-content-pages": "3.4.0",
|
||||
"@docusaurus/plugin-debug": "3.4.0",
|
||||
"@docusaurus/plugin-google-analytics": "3.4.0",
|
||||
"@docusaurus/plugin-google-gtag": "3.4.0",
|
||||
"@docusaurus/plugin-google-tag-manager": "3.4.0",
|
||||
"@docusaurus/plugin-sitemap": "3.4.0",
|
||||
"@docusaurus/theme-classic": "3.4.0",
|
||||
"@docusaurus/theme-common": "3.4.0",
|
||||
"@docusaurus/theme-search-algolia": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0"
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/plugin-content-blog": "3.5.2",
|
||||
"@docusaurus/plugin-content-docs": "3.5.2",
|
||||
"@docusaurus/plugin-content-pages": "3.5.2",
|
||||
"@docusaurus/plugin-debug": "3.5.2",
|
||||
"@docusaurus/plugin-google-analytics": "3.5.2",
|
||||
"@docusaurus/plugin-google-gtag": "3.5.2",
|
||||
"@docusaurus/plugin-google-tag-manager": "3.5.2",
|
||||
"@docusaurus/plugin-sitemap": "3.5.2",
|
||||
"@docusaurus/theme-classic": "3.5.2",
|
||||
"@docusaurus/theme-common": "3.5.2",
|
||||
"@docusaurus/theme-search-algolia": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/preset-classic/node_modules/@docusaurus/plugin-content-blog": {
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.5.2.tgz",
|
||||
"integrity": "sha512-R7ghWnMvjSf+aeNDH0K4fjyQnt5L0KzUEnUhmf1e3jZrv3wogeytZNN6n7X8yHcMsuZHPOrctQhXWnmxu+IRRg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/theme-common": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"feed": "^4.2.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"lodash": "^4.17.21",
|
||||
"reading-time": "^1.5.0",
|
||||
"srcset": "^4.0.0",
|
||||
"tslib": "^2.6.0",
|
||||
"unist-util-visit": "^5.0.0",
|
||||
"utility-types": "^3.10.0",
|
||||
"webpack": "^5.88.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@docusaurus/plugin-content-docs": "*",
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/preset-classic/node_modules/@docusaurus/plugin-content-pages": {
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.5.2.tgz",
|
||||
"integrity": "sha512-WzhHjNpoQAUz/ueO10cnundRz+VUtkjFhhaQ9jApyv1a46FPURO4cef89pyNIOMny1fjDz/NUN2z6Yi+5WUrCw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"tslib": "^2.6.0",
|
||||
"webpack": "^5.88.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
@@ -2544,27 +2548,27 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-classic": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.4.0.tgz",
|
||||
"integrity": "sha512-0IPtmxsBYv2adr1GnZRdMkEQt1YW6tpzrUPj02YxNpvJ5+ju4E13J5tB4nfdaen/tfR1hmpSPlTFPvTf4kwy8Q==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.5.2.tgz",
|
||||
"integrity": "sha512-XRpinSix3NBv95Rk7xeMF9k4safMkwnpSgThn0UNQNumKvmcIYjfkwfh2BhwYh/BxMXQHJ/PdmNh22TQFpIaYg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/module-type-aliases": "3.4.0",
|
||||
"@docusaurus/plugin-content-blog": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/plugin-content-pages": "3.4.0",
|
||||
"@docusaurus/theme-common": "3.4.0",
|
||||
"@docusaurus/theme-translations": "3.4.0",
|
||||
"@docusaurus/types": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/module-type-aliases": "3.5.2",
|
||||
"@docusaurus/plugin-content-blog": "3.5.2",
|
||||
"@docusaurus/plugin-content-docs": "3.5.2",
|
||||
"@docusaurus/plugin-content-pages": "3.5.2",
|
||||
"@docusaurus/theme-common": "3.5.2",
|
||||
"@docusaurus/theme-translations": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"copy-text-to-clipboard": "^3.2.0",
|
||||
"infima": "0.2.0-alpha.43",
|
||||
"infima": "0.2.0-alpha.44",
|
||||
"lodash": "^4.17.21",
|
||||
"nprogress": "^0.2.0",
|
||||
"postcss": "^8.4.26",
|
||||
@@ -2583,19 +2587,73 @@
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-common": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.4.0.tgz",
|
||||
"integrity": "sha512-0A27alXuv7ZdCg28oPE8nH/Iz73/IUejVaCazqu9elS4ypjiLhK3KfzdSQBnL/g7YfHSlymZKdiOHEo8fJ0qMA==",
|
||||
"node_modules/@docusaurus/theme-classic/node_modules/@docusaurus/plugin-content-blog": {
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.5.2.tgz",
|
||||
"integrity": "sha512-R7ghWnMvjSf+aeNDH0K4fjyQnt5L0KzUEnUhmf1e3jZrv3wogeytZNN6n7X8yHcMsuZHPOrctQhXWnmxu+IRRg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/mdx-loader": "3.4.0",
|
||||
"@docusaurus/module-type-aliases": "3.4.0",
|
||||
"@docusaurus/plugin-content-blog": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/plugin-content-pages": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/theme-common": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"feed": "^4.2.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"lodash": "^4.17.21",
|
||||
"reading-time": "^1.5.0",
|
||||
"srcset": "^4.0.0",
|
||||
"tslib": "^2.6.0",
|
||||
"unist-util-visit": "^5.0.0",
|
||||
"utility-types": "^3.10.0",
|
||||
"webpack": "^5.88.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@docusaurus/plugin-content-docs": "*",
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-classic/node_modules/@docusaurus/plugin-content-pages": {
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.5.2.tgz",
|
||||
"integrity": "sha512-WzhHjNpoQAUz/ueO10cnundRz+VUtkjFhhaQ9jApyv1a46FPURO4cef89pyNIOMny1fjDz/NUN2z6Yi+5WUrCw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/types": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"fs-extra": "^11.1.1",
|
||||
"tslib": "^2.6.0",
|
||||
"webpack": "^5.88.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-common": {
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.5.2.tgz",
|
||||
"integrity": "sha512-QXqlm9S6x9Ibwjs7I2yEDgsCocp708DrCrgHgKwg2n2AY0YQ6IjU0gAK35lHRLOvAoJUfCKpQAwUykB0R7+Eew==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/mdx-loader": "3.5.2",
|
||||
"@docusaurus/module-type-aliases": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@types/history": "^4.7.11",
|
||||
"@types/react": "*",
|
||||
"@types/react-router-config": "*",
|
||||
@@ -2609,24 +2667,25 @@
|
||||
"node": ">=18.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@docusaurus/plugin-content-docs": "*",
|
||||
"react": "^18.0.0",
|
||||
"react-dom": "^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-search-algolia": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.4.0.tgz",
|
||||
"integrity": "sha512-aiHFx7OCw4Wck1z6IoShVdUWIjntC8FHCw9c5dR8r3q4Ynh+zkS8y2eFFunN/DL6RXPzpnvKCg3vhLQYJDmT9Q==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.5.2.tgz",
|
||||
"integrity": "sha512-qW53kp3VzMnEqZGjakaV90sst3iN1o32PH+nawv1uepROO8aEGxptcq2R5rsv7aBShSRbZwIobdvSYKsZ5pqvA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docsearch/react": "^3.5.2",
|
||||
"@docusaurus/core": "3.4.0",
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/plugin-content-docs": "3.4.0",
|
||||
"@docusaurus/theme-common": "3.4.0",
|
||||
"@docusaurus/theme-translations": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-validation": "3.4.0",
|
||||
"@docusaurus/core": "3.5.2",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/plugin-content-docs": "3.5.2",
|
||||
"@docusaurus/theme-common": "3.5.2",
|
||||
"@docusaurus/theme-translations": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-validation": "3.5.2",
|
||||
"algoliasearch": "^4.18.0",
|
||||
"algoliasearch-helper": "^3.13.3",
|
||||
"clsx": "^2.0.0",
|
||||
@@ -2645,9 +2704,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/theme-translations": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.4.0.tgz",
|
||||
"integrity": "sha512-zSxCSpmQCCdQU5Q4CnX/ID8CSUUI3fvmq4hU/GNP/XoAWtXo9SAVnM3TzpU8Gb//H3WCsT8mJcTfyOk3d9ftNg==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.5.2.tgz",
|
||||
"integrity": "sha512-GPZLcu4aT1EmqSTmbdpVrDENGR2yObFEX8ssEFYTCiAIVc0EihNSdOIBTazUvgNqwvnoU1A8vIs1xyzc3LITTw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fs-extra": "^11.1.1",
|
||||
@@ -2658,9 +2717,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/types": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.4.0.tgz",
|
||||
"integrity": "sha512-4jcDO8kXi5Cf9TcyikB/yKmz14f2RZ2qTRerbHAsS+5InE9ZgSLBNLsewtFTcTOXSVcbU3FoGOzcNWAmU1TR0A==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.5.2.tgz",
|
||||
"integrity": "sha512-N6GntLXoLVUwkZw7zCxwy9QiuEXIcTVzA9AkmNw16oc0AP3SXLrMmDMMBIfgqwuKWa6Ox6epHol9kMtJqekACw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@mdx-js/mdx": "^3.0.0",
|
||||
@@ -2679,13 +2738,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/utils": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.4.0.tgz",
|
||||
"integrity": "sha512-fRwnu3L3nnWaXOgs88BVBmG1yGjcQqZNHG+vInhEa2Sz2oQB+ZjbEMO5Rh9ePFpZ0YDiDUhpaVjwmS+AU2F14g==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.5.2.tgz",
|
||||
"integrity": "sha512-33QvcNFh+Gv+C2dP9Y9xWEzMgf3JzrpL2nW9PopidiohS1nDcyknKRx2DWaFvyVTTYIkkABVSr073VTj/NITNA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"@svgr/webpack": "^8.1.0",
|
||||
"escape-string-regexp": "^4.0.0",
|
||||
"file-loader": "^6.2.0",
|
||||
@@ -2718,9 +2777,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/utils-common": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.4.0.tgz",
|
||||
"integrity": "sha512-NVx54Wr4rCEKsjOH5QEVvxIqVvm+9kh7q8aYTU5WzUU9/Hctd6aTrcZ3G0Id4zYJ+AeaG5K5qHA4CY5Kcm2iyQ==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.5.2.tgz",
|
||||
"integrity": "sha512-i0AZjHiRgJU6d7faQngIhuHKNrszpL/SHQPgF1zH4H+Ij6E9NBYGy6pkcGWToIv7IVPbs+pQLh1P3whn0gWXVg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"tslib": "^2.6.0"
|
||||
@@ -2738,14 +2797,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@docusaurus/utils-validation": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.4.0.tgz",
|
||||
"integrity": "sha512-hYQ9fM+AXYVTWxJOT1EuNaRnrR2WGpRdLDQG07O8UOpsvCPWUVOeo26Rbm0JWY2sGLfzAb+tvJ62yF+8F+TV0g==",
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.5.2.tgz",
|
||||
"integrity": "sha512-m+Foq7augzXqB6HufdS139PFxDC5d5q2QKZy8q0qYYvGdI6nnlNsGH4cIGsgBnV7smz+mopl3g4asbSDvMV0jA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@docusaurus/logger": "3.4.0",
|
||||
"@docusaurus/utils": "3.4.0",
|
||||
"@docusaurus/utils-common": "3.4.0",
|
||||
"@docusaurus/logger": "3.5.2",
|
||||
"@docusaurus/utils": "3.5.2",
|
||||
"@docusaurus/utils-common": "3.5.2",
|
||||
"fs-extra": "^11.2.0",
|
||||
"joi": "^17.9.2",
|
||||
"js-yaml": "^4.1.0",
|
||||
@@ -4237,9 +4296,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/autoprefixer": {
|
||||
"version": "10.4.19",
|
||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz",
|
||||
"integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==",
|
||||
"version": "10.4.20",
|
||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.20.tgz",
|
||||
"integrity": "sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -4254,12 +4313,13 @@
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"browserslist": "^4.23.0",
|
||||
"caniuse-lite": "^1.0.30001599",
|
||||
"browserslist": "^4.23.3",
|
||||
"caniuse-lite": "^1.0.30001646",
|
||||
"fraction.js": "^4.3.7",
|
||||
"normalize-range": "^0.1.2",
|
||||
"picocolors": "^1.0.0",
|
||||
"picocolors": "^1.0.1",
|
||||
"postcss-value-parser": "^4.2.0"
|
||||
},
|
||||
"bin": {
|
||||
@@ -4531,9 +4591,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/browserslist": {
|
||||
"version": "4.23.0",
|
||||
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz",
|
||||
"integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==",
|
||||
"version": "4.23.3",
|
||||
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz",
|
||||
"integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -4548,11 +4608,12 @@
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"caniuse-lite": "^1.0.30001587",
|
||||
"electron-to-chromium": "^1.4.668",
|
||||
"node-releases": "^2.0.14",
|
||||
"update-browserslist-db": "^1.0.13"
|
||||
"caniuse-lite": "^1.0.30001646",
|
||||
"electron-to-chromium": "^1.5.4",
|
||||
"node-releases": "^2.0.18",
|
||||
"update-browserslist-db": "^1.1.0"
|
||||
},
|
||||
"bin": {
|
||||
"browserslist": "cli.js"
|
||||
@@ -4699,9 +4760,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/caniuse-lite": {
|
||||
"version": "1.0.30001614",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001614.tgz",
|
||||
"integrity": "sha512-jmZQ1VpmlRwHgdP1/uiKzgiAuGOfLEJsYFP4+GBou/QQ4U6IOJCB4NP1c+1p9RGLpwObcT94jA5/uO+F1vBbog==",
|
||||
"version": "1.0.30001651",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001651.tgz",
|
||||
"integrity": "sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -4715,7 +4776,8 @@
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
]
|
||||
],
|
||||
"license": "CC-BY-4.0"
|
||||
},
|
||||
"node_modules/ccount": {
|
||||
"version": "2.0.1",
|
||||
@@ -6342,9 +6404,10 @@
|
||||
"integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="
|
||||
},
|
||||
"node_modules/electron-to-chromium": {
|
||||
"version": "1.4.751",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.751.tgz",
|
||||
"integrity": "sha512-2DEPi++qa89SMGRhufWTiLmzqyuGmNF3SK4+PQetW1JKiZdEpF4XQonJXJCzyuYSA6mauiMhbyVhqYAP45Hvfw=="
|
||||
"version": "1.5.6",
|
||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.6.tgz",
|
||||
"integrity": "sha512-jwXWsM5RPf6j9dPYzaorcBSUg6AiqocPEyMpkchkvntaH9HGfOOMZwxMJjDY/XEs3T5dM7uyH1VhRMkqUU9qVw==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/emoji-regex": {
|
||||
"version": "9.2.2",
|
||||
@@ -8763,9 +8826,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/infima": {
|
||||
"version": "0.2.0-alpha.43",
|
||||
"resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.43.tgz",
|
||||
"integrity": "sha512-2uw57LvUqW0rK/SWYnd/2rRfxNA5DDNOh33jxF7fy46VWoNhGxiUQyVZHbBMjQ33mQem0cjdDVwgWVAmlRfgyQ==",
|
||||
"version": "0.2.0-alpha.44",
|
||||
"resolved": "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.44.tgz",
|
||||
"integrity": "sha512-tuRkUSO/lB3rEhLJk25atwAjgLuzq070+pOW8XcvpHky/YbENnRRdPd85IBkyeTgttmOy5ah+yHYsK1HhUd4lQ==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
@@ -11958,9 +12022,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/node-releases": {
|
||||
"version": "2.0.14",
|
||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz",
|
||||
"integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw=="
|
||||
"version": "2.0.18",
|
||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz",
|
||||
"integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/nopt": {
|
||||
"version": "1.0.10",
|
||||
@@ -12754,9 +12819,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.39",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
|
||||
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
|
||||
"version": "8.4.40",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.40.tgz",
|
||||
"integrity": "sha512-YF2kKIUzAofPMpfH6hOi2cGnv/HrUlfucspc7pDyvv7kGdqXrfj8SCl/t8owkEgKEuu8ZcRjSOxFxVLqwChZ2Q==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -13600,9 +13665,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.2.tgz",
|
||||
"integrity": "sha512-rAVeHYMcv8ATV5d508CFdn+8/pHPpXeIid1DdrPwXnaAdH7cqjVbpJaT5eq4yRAFU/lsbwYwSF/n5iNrdJHPQA==",
|
||||
"version": "3.3.3",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz",
|
||||
"integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
@@ -13747,9 +13812,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.12.1",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.12.1.tgz",
|
||||
"integrity": "sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==",
|
||||
"version": "6.13.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
|
||||
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.0.6"
|
||||
},
|
||||
@@ -16014,9 +16080,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tailwindcss": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.4.tgz",
|
||||
"integrity": "sha512-ZoyXOdJjISB7/BcLTR6SEsLgKtDStYyYZVLsUtWChO4Ps20CBad7lfJKVDiejocV4ME1hLmyY0WJE3hSDcmQ2A==",
|
||||
"version": "3.4.10",
|
||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.10.tgz",
|
||||
"integrity": "sha512-KWZkVPm7yJRhdu4SRSl9d4AK2wM3a50UsvgHZO7xY77NQr2V+fIrEuoDGQcbvswWvFGbS2f6e+jC/6WJm1Dl0w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@alloc/quick-lru": "^5.2.0",
|
||||
@@ -16376,9 +16442,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.5.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
|
||||
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
|
||||
"version": "5.5.4",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz",
|
||||
"integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
@@ -16607,9 +16673,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/update-browserslist-db": {
|
||||
"version": "1.0.13",
|
||||
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz",
|
||||
"integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz",
|
||||
"integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -16624,9 +16690,10 @@
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"escalade": "^3.1.1",
|
||||
"picocolors": "^1.0.0"
|
||||
"escalade": "^3.1.2",
|
||||
"picocolors": "^1.0.1"
|
||||
},
|
||||
"bin": {
|
||||
"update-browserslist-db": "cli.js"
|
||||
@@ -16714,12 +16781,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/url": {
|
||||
"version": "0.11.3",
|
||||
"resolved": "https://registry.npmjs.org/url/-/url-0.11.3.tgz",
|
||||
"integrity": "sha512-6hxOLGfZASQK/cijlZnZJTq8OXAkt/3YGfQX45vvMYXpZoo8NdWZcY73K108Jf759lS1Bv/8wXnHDTSz17dSRw==",
|
||||
"version": "0.11.4",
|
||||
"resolved": "https://registry.npmjs.org/url/-/url-0.11.4.tgz",
|
||||
"integrity": "sha512-oCwdVC7mTuWiPyjLUz/COz5TLk6wgp0RCsN+wHZ2Ekneac9w8uuV0njcbbie2ME+Vs+d6duwmYuR3HgQXs1fOg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"punycode": "^1.4.1",
|
||||
"qs": "^6.11.2"
|
||||
"qs": "^6.12.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/url-loader": {
|
||||
@@ -16783,7 +16854,8 @@
|
||||
"node_modules/url/node_modules/punycode": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz",
|
||||
"integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ=="
|
||||
"integrity": "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/util": {
|
||||
"version": "0.10.4",
|
||||
|
||||
@@ -56,6 +56,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.17.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,6 +43,11 @@ const guides: CommunityGuidesProps[] = [
|
||||
description: 'Access your local Immich installation over the internet using your own domain',
|
||||
url: 'https://github.com/ppr88/immich-guides/blob/main/open-immich-custom-domain.md',
|
||||
},
|
||||
{
|
||||
title: 'Nginx caching map server',
|
||||
description: 'Increase privacy by using nginx as a caching proxy in front of a map tile server',
|
||||
url: 'https://github.com/pcouy/pcouy.github.io/blob/main/_posts/2024-08-30-proxying-a-map-tile-server-for-increased-privacy.md',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityGuide({ title, description, url }: CommunityGuidesProps): JSX.Element {
|
||||
|
||||
@@ -28,11 +28,6 @@ const projects: CommunityProjectProps[] = [
|
||||
description: 'A simple way to remove orphaned offline assets from the Immich database',
|
||||
url: 'https://github.com/Thoroslives/immich_remove_offline_files',
|
||||
},
|
||||
{
|
||||
title: 'Create albums from folders',
|
||||
description: 'A Python script to create albums based on the folder structure of an external library.',
|
||||
url: 'https://github.com/Salvoxia/immich-folder-album-creator',
|
||||
},
|
||||
{
|
||||
title: 'Immich-Tools',
|
||||
description: 'Provides scripts for handling problems on the repair page.',
|
||||
@@ -43,6 +38,11 @@ const projects: CommunityProjectProps[] = [
|
||||
description: 'Lightroom plugin to publish photos from Lightroom collections to Immich albums.',
|
||||
url: 'https://github.com/midzelis/mi.Immich.Publisher',
|
||||
},
|
||||
{
|
||||
title: 'Lightroom Immich Plugin: lrc-immich-plugin',
|
||||
description: 'Another Lightroom plugin to publish or export photos from Lightroom to Immich.',
|
||||
url: 'https://github.com/bmachek/lrc-immich-plugin',
|
||||
},
|
||||
{
|
||||
title: 'Immich Duplicate Finder',
|
||||
description: 'Webapp that uses machine learning to identify near-duplicate images.',
|
||||
@@ -58,11 +58,31 @@ const projects: CommunityProjectProps[] = [
|
||||
description: 'Unofficial Immich Android TV app.',
|
||||
url: 'https://github.com/giejay/Immich-Android-TV',
|
||||
},
|
||||
{
|
||||
title: 'Create albums from folders',
|
||||
description: 'A Python script to create albums based on the folder structure of an external library.',
|
||||
url: 'https://github.com/Salvoxia/immich-folder-album-creator',
|
||||
},
|
||||
{
|
||||
title: 'Powershell Module PSImmich',
|
||||
description: 'Powershell Module for the Immich API',
|
||||
url: 'https://github.com/hanpq/PSImmich',
|
||||
},
|
||||
{
|
||||
title: 'Immich Distribution',
|
||||
description: 'Snap package for easy install and zero-care auto updates of Immich. Self-hosted photo management.',
|
||||
url: 'https://immich-distribution.nsg.cc',
|
||||
},
|
||||
{
|
||||
title: 'Immich Kiosk',
|
||||
description: 'Lightweight slideshow to run on kiosk devices and browsers.',
|
||||
url: 'https://github.com/damongolding/immich-kiosk',
|
||||
},
|
||||
{
|
||||
title: 'Immich Power Tools',
|
||||
description: 'Power tools for organizing your immich library.',
|
||||
url: 'https://github.com/varun-raj/immich-power-tools',
|
||||
},
|
||||
];
|
||||
|
||||
function CommunityProject({ title, description, url }: CommunityProjectProps): JSX.Element {
|
||||
|
||||
@@ -49,7 +49,7 @@ export function Timeline({ items }: Props): JSX.Element {
|
||||
<div className="flex flex-col flex-grow justify-between gap-2">
|
||||
<div className="flex gap-2 items-center">
|
||||
{cardIcon === 'immich' ? (
|
||||
<img src="img/immich-logo.svg" height="30" />
|
||||
<img src="img/immich-logo.svg" height="30" className="rounded-none" />
|
||||
) : (
|
||||
<Icon path={cardIcon} size={1} color={item.iconColor} />
|
||||
)}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import '@docusaurus/theme-classic/lib/theme/Unlisted/index';
|
||||
import { useWindowSize } from '@docusaurus/theme-common';
|
||||
import DropdownNavbarItem from '@theme/NavbarItem/DropdownNavbarItem';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
import { mdiCalendarToday, mdiLeadPencil, mdiLockOutline, mdiSpeedometerSlow, mdiWeb } from '@mdi/js';
|
||||
import {
|
||||
mdiCalendarToday,
|
||||
mdiCrosshairsOff,
|
||||
mdiLeadPencil,
|
||||
mdiLockOff,
|
||||
mdiLockOutline,
|
||||
mdiSpeedometerSlow,
|
||||
mdiWeb,
|
||||
mdiWrap,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
import { Item as TimelineItem, Timeline } from '../components/timeline';
|
||||
@@ -8,6 +17,41 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
|
||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||
|
||||
const items: Item[] = [
|
||||
{
|
||||
icon: mdiWrap,
|
||||
iconColor: 'gray',
|
||||
title: 'Carriage returns in bash scripts are cursed',
|
||||
description: 'Git can be configured to automatically convert LF to CRLF on checkout and CRLF breaks bash scripts.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/11613',
|
||||
text: '#11613',
|
||||
},
|
||||
date: new Date(2024, 7, 7),
|
||||
},
|
||||
{
|
||||
icon: mdiLockOff,
|
||||
iconColor: 'red',
|
||||
title: 'Fetch inside Cloudflare Workers is cursed',
|
||||
description:
|
||||
'Fetch requests in Cloudflare Workers use http by default, even if you explicitly specify https, which can often cause redirect loops.',
|
||||
link: {
|
||||
url: 'https://community.cloudflare.com/t/does-cloudflare-worker-allow-secure-https-connection-to-fetch-even-on-flexible-ssl/68051/5',
|
||||
text: 'Cloudflare',
|
||||
},
|
||||
date: new Date(2024, 7, 7),
|
||||
},
|
||||
{
|
||||
icon: mdiCrosshairsOff,
|
||||
iconColor: 'gray',
|
||||
title: 'GPS sharing on mobile is cursed',
|
||||
description:
|
||||
'Some phones will silently strip GPS data from images when apps without location permission try to access them.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/discussions/11268',
|
||||
text: '#11268',
|
||||
},
|
||||
date: new Date(2024, 6, 21),
|
||||
},
|
||||
{
|
||||
icon: mdiLeadPencil,
|
||||
iconColor: 'gold',
|
||||
@@ -49,7 +93,7 @@ const items: Item[] = [
|
||||
iconColor: 'greenyellow',
|
||||
title: 'JavaScript Date objects are cursed',
|
||||
description: 'JavaScript date objects are 1 indexed for years and days, but 0 indexed for months.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pulls/6787', text: '#6787' },
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/6787', text: '#6787' },
|
||||
date: new Date(2024, 0, 31),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -10,7 +10,7 @@ function HomepageHeader() {
|
||||
<section className="text-center m-6 p-12 border border-red-400 rounded-[50px] bg-slate-200 dark:bg-immich-dark-gray">
|
||||
<img
|
||||
src={isDarkTheme ? 'img/immich-logo-stacked-dark.svg' : 'img/immich-logo-stacked-light.svg'}
|
||||
className="md:h-60 h-44 mb-2 antialiased"
|
||||
className="md:h-60 h-44 mb-2 antialiased rounded-none"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<div className="sm:text-2xl text-lg md:text-4xl mb-12 sm:leading-tight">
|
||||
@@ -41,7 +41,7 @@ function HomepageHeader() {
|
||||
Discord
|
||||
</Link>
|
||||
</div>
|
||||
<img src="/img/immich-screenshots.png" alt="screenshots" width={'70%'} />
|
||||
<img src="/img/immich-screenshots.webp" alt="screenshots" width={'70%'} />
|
||||
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-4 gap-1">
|
||||
<div className="h-24">
|
||||
<a href="https://play.google.com/store/apps/details?id=app.alextran.immich">
|
||||
|
||||
114
docs/src/pages/privacy-policy.tsx
Normal file
114
docs/src/pages/privacy-policy.tsx
Normal file
@@ -0,0 +1,114 @@
|
||||
import React from 'react';
|
||||
import Link from '@docusaurus/Link';
|
||||
import Layout from '@theme/Layout';
|
||||
import { useColorMode } from '@docusaurus/theme-common';
|
||||
function HomepageHeader() {
|
||||
const { isDarkTheme } = useColorMode();
|
||||
|
||||
return (
|
||||
<header>
|
||||
<section className="max-w-[900px] m-4 p-4 md:p-6 md:m-auto md:my-12 border border-red-400 rounded-2xl bg-slate-200 dark:bg-immich-dark-gray">
|
||||
<section>
|
||||
<h1>Privacy Policy</h1>
|
||||
<p>Last updated: July 31st 2024</p>
|
||||
<p>
|
||||
Welcome to Immich. We are committed to respecting your privacy. This Privacy Policy sets out how we collect,
|
||||
use, and share information when you use our Immich app.
|
||||
</p>
|
||||
</section>
|
||||
|
||||
{/* 1. Scope of This Policy */}
|
||||
<section>
|
||||
<h2>1. Scope of This Policy</h2>
|
||||
<p>
|
||||
This Privacy Policy applies to the Immich app ("we", "our", or "us") and covers our collection, use, and
|
||||
disclosure of your information. This Policy does not cover any third-party websites, services, or
|
||||
applications that can be accessed through our app, or third-party services you may access through Immich.
|
||||
</p>
|
||||
</section>
|
||||
|
||||
{/* 2. Information We Collect */}
|
||||
<section>
|
||||
<h2>2. Information We Collect</h2>
|
||||
<div>
|
||||
<p>
|
||||
<strong>Locally Stored Data</strong>: Immich stores all your photos, albums, settings, and locally on your
|
||||
device. We do not have access to this data, nor do we transmit or store it on any of our servers.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<p>
|
||||
<strong>Purchase Information:</strong> When you make a purchase within the{' '}
|
||||
<a href="https://buy.immich.app">https://buy.immich.app</a>, we collect the following information for tax
|
||||
calculation purposes:
|
||||
</p>
|
||||
<ul>
|
||||
<li>Country of origin</li>
|
||||
<li>Postal code (if the user is from Canada or the United States)</li>
|
||||
</ul>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* 3. Use of Your Information */}
|
||||
<section>
|
||||
<h2>3. Use of Your Information</h2>
|
||||
<p>
|
||||
<strong>Tax Calculation:</strong> The country of origin and postal code (for users from Canada or the United
|
||||
States) are collected solely for determining the applicable tax rates on your purchase.
|
||||
</p>
|
||||
</section>
|
||||
|
||||
{/* 4. Sharing of Your Information */}
|
||||
<section>
|
||||
<h2>4. Sharing of Your Information</h2>
|
||||
<ul>
|
||||
<li>
|
||||
<strong>Tax Authorities:</strong> The purchase information may be shared with tax authorities as required
|
||||
by law.
|
||||
</li>
|
||||
<li>
|
||||
<strong>Payment Providers:</strong> The purchase information may be shared with payment providers where
|
||||
required.
|
||||
</li>
|
||||
</ul>
|
||||
</section>
|
||||
|
||||
{/* 5. Changes to This Policy */}
|
||||
<section>
|
||||
<h2>5. Changes to This Policy</h2>
|
||||
<p>
|
||||
We may update our Privacy Policy from time to time. If we make any changes, we will notify you by revising
|
||||
the "Last updated" date at the top of this policy. It's encouraged that users frequently check this page for
|
||||
any changes to stay informed about how we are helping to protect the personal information we collect.
|
||||
</p>
|
||||
</section>
|
||||
|
||||
{/* 6. Contact Us */}
|
||||
<section>
|
||||
<h2>6. Contact Us</h2>
|
||||
<p>
|
||||
If you have any questions about this Privacy Policy, please contact us at{' '}
|
||||
<a href="mailto:immich@futo.org">immich@futo.org</a>
|
||||
</p>
|
||||
</section>
|
||||
</section>
|
||||
</header>
|
||||
);
|
||||
}
|
||||
|
||||
export default function Home(): JSX.Element {
|
||||
return (
|
||||
<Layout
|
||||
title="Home"
|
||||
description="immich Self-hosted photo and video backup solution directly from your mobile phone "
|
||||
noFooter={true}
|
||||
>
|
||||
<HomepageHeader />
|
||||
<div className="flex flex-col place-items-center place-content-center">
|
||||
<p>This project is available under GNU AGPL v3 license.</p>
|
||||
<p className="text-xs">Privacy should not be a luxury</p>
|
||||
</div>
|
||||
</Layout>
|
||||
);
|
||||
}
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
mdiCloudUploadOutline,
|
||||
mdiCollage,
|
||||
mdiContentDuplicate,
|
||||
mdiCrop,
|
||||
mdiDevices,
|
||||
mdiEmailOutline,
|
||||
mdiExpansionCard,
|
||||
@@ -26,6 +27,7 @@ import {
|
||||
mdiFileSearch,
|
||||
mdiFlash,
|
||||
mdiFolder,
|
||||
mdiFolderMultiple,
|
||||
mdiForum,
|
||||
mdiHandshakeOutline,
|
||||
mdiHeart,
|
||||
@@ -36,6 +38,7 @@ import {
|
||||
mdiImageMultipleOutline,
|
||||
mdiImageSearch,
|
||||
mdiKeyboardSettingsOutline,
|
||||
mdiLicense,
|
||||
mdiLockOutline,
|
||||
mdiMagnify,
|
||||
mdiMagnifyScan,
|
||||
@@ -55,11 +58,14 @@ import {
|
||||
mdiScaleBalance,
|
||||
mdiSecurity,
|
||||
mdiServer,
|
||||
mdiShare,
|
||||
mdiShareAll,
|
||||
mdiShareCircle,
|
||||
mdiStar,
|
||||
mdiStarOutline,
|
||||
mdiTableKey,
|
||||
mdiTag,
|
||||
mdiTagMultiple,
|
||||
mdiText,
|
||||
mdiThemeLightDark,
|
||||
mdiTrashCanOutline,
|
||||
@@ -72,6 +78,11 @@ import React from 'react';
|
||||
import { Item, Timeline } from '../components/timeline';
|
||||
|
||||
const releases = {
|
||||
'v1.113.0': new Date(2024, 7, 30),
|
||||
'v1.112.0': new Date(2024, 7, 14),
|
||||
'v1.111.0': new Date(2024, 6, 26),
|
||||
'v1.110.0': new Date(2024, 5, 11),
|
||||
'v1.109.0': new Date(2024, 6, 18),
|
||||
'v1.106.1': new Date(2024, 5, 11),
|
||||
'v1.104.0': new Date(2024, 4, 13),
|
||||
'v1.103.0': new Date(2024, 3, 29),
|
||||
@@ -220,6 +231,67 @@ const roadmap: Item[] = [
|
||||
];
|
||||
|
||||
const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiTagMultiple,
|
||||
iconColor: 'orange',
|
||||
title: 'Tags',
|
||||
description: 'Tag your photos and videos',
|
||||
release: 'v1.113.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiFolderMultiple,
|
||||
iconColor: 'brown',
|
||||
title: 'Folders',
|
||||
description: 'View your photos and videos in folders',
|
||||
release: 'v1.113.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiPalette,
|
||||
title: 'Theming (mobile)',
|
||||
description: 'Pick a primary color for the mobile app',
|
||||
release: 'v1.112.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiStarOutline,
|
||||
iconColor: 'gold',
|
||||
title: 'Star rating',
|
||||
description: 'Rate your photos and videos',
|
||||
release: 'v1.112.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiCrop,
|
||||
iconColor: 'royalblue',
|
||||
title: 'Editor (mobile)',
|
||||
description: 'Crop and rotate on mobile',
|
||||
release: 'v1.111.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiMap,
|
||||
iconColor: 'green',
|
||||
title: 'Deploy tiles.immich.cloud',
|
||||
description: 'Dedicated tile server for Immich',
|
||||
release: 'v1.111.0',
|
||||
}),
|
||||
{
|
||||
icon: mdiStar,
|
||||
iconColor: 'gold',
|
||||
title: '40,000 Stars',
|
||||
description: 'Reached 40K Stars on GitHub!',
|
||||
getDateLabel: withLanguage(new Date(2024, 6, 21)),
|
||||
},
|
||||
withRelease({
|
||||
icon: mdiShare,
|
||||
title: 'Deploy my.immich.app',
|
||||
description: 'Url router for immich links',
|
||||
release: 'v1.109.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiLicense,
|
||||
iconColor: 'gold',
|
||||
title: 'Supporter Badge',
|
||||
description: 'The option to buy Immich to support its development!',
|
||||
release: 'v1.109.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiHistory,
|
||||
title: 'Versioned documentation',
|
||||
@@ -236,7 +308,7 @@ const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiContentDuplicate,
|
||||
title: 'Similar image detection',
|
||||
description: 'Detect duplicate assets that aren’t exactly identical',
|
||||
description: "Detect duplicate assets that aren't exactly identical",
|
||||
release: 'v1.106.1',
|
||||
}),
|
||||
withRelease({
|
||||
|
||||
36
docs/static/archived-versions.json
vendored
36
docs/static/archived-versions.json
vendored
@@ -1,4 +1,40 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.113.1",
|
||||
"url": "https://v1.113.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.113.0",
|
||||
"url": "https://v1.113.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.112.1",
|
||||
"url": "https://v1.112.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.112.0",
|
||||
"url": "https://v1.112.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.111.0",
|
||||
"url": "https://v1.111.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.110.0",
|
||||
"url": "https://v1.110.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.109.2",
|
||||
"url": "https://v1.109.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.109.1",
|
||||
"url": "https://v1.109.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.109.0",
|
||||
"url": "https://v1.109.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.108.0",
|
||||
"url": "https://v1.108.0.archive.immich.app"
|
||||
|
||||
BIN
docs/static/img/immich-screenshots.webp
vendored
Normal file
BIN
docs/static/img/immich-screenshots.webp
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 196 KiB |
@@ -4,7 +4,7 @@ module.exports = {
|
||||
corePlugins: {
|
||||
preflight: false, // disable Tailwind's reset
|
||||
},
|
||||
content: ['./src/**/*.{js,jsx,ts,tsx}', '../docs/**/*.mdx'], // my markdown stuff is in ../docs, not /src
|
||||
content: ['./src/**/*.{js,jsx,ts,tsx}', './{docs,blog}/**/*.{md,mdx}'], // my markdown stuff is in ../docs, not /src
|
||||
darkMode: ['class', '[data-theme="dark"]'], // hooks into docusaurus' dark mode settigns
|
||||
theme: {
|
||||
extend: {
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
module.exports = {
|
||||
parser: '@typescript-eslint/parser',
|
||||
parserOptions: {
|
||||
project: 'tsconfig.json',
|
||||
sourceType: 'module',
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
plugins: ['@typescript-eslint/eslint-plugin'],
|
||||
extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended', 'plugin:unicorn/recommended'],
|
||||
root: true,
|
||||
env: {
|
||||
node: true,
|
||||
},
|
||||
ignorePatterns: ['.eslintrc.js'],
|
||||
rules: {
|
||||
'@typescript-eslint/interface-name-prefix': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/filename-case': 'off',
|
||||
'unicorn/no-null': 'off',
|
||||
'unicorn/prefer-top-level-await': 'off',
|
||||
'unicorn/prefer-event-target': 'off',
|
||||
'unicorn/no-thenable': 'off',
|
||||
},
|
||||
};
|
||||
@@ -1 +1 @@
|
||||
20.15.1
|
||||
20.17.0
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
version: '3.8'
|
||||
|
||||
name: immich-e2e
|
||||
|
||||
services:
|
||||
@@ -26,14 +24,16 @@ services:
|
||||
volumes:
|
||||
- upload:/usr/src/app/upload
|
||||
- ./test-assets:/test-assets
|
||||
extra_hosts:
|
||||
- 'auth-server:host-gateway'
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
ports:
|
||||
- 2283:3001
|
||||
- 2285:3001
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:328fe6a5822256d065debb36617a8169dbfbd77b797c525288e465f56c1d392b
|
||||
image: redis:6.2-alpine@sha256:e3b17ba9479deec4b7d1eeec1548a253acc5374d68d3b27937fcfe4df8d18c7e
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
@@ -43,7 +43,7 @@ services:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: immich
|
||||
ports:
|
||||
- 5433:5432
|
||||
- 5435:5432
|
||||
|
||||
volumes:
|
||||
model-cache:
|
||||
|
||||
65
e2e/eslint.config.mjs
Normal file
65
e2e/eslint.config.mjs
Normal file
@@ -0,0 +1,65 @@
|
||||
import { FlatCompat } from '@eslint/eslintrc';
|
||||
import js from '@eslint/js';
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
import globals from 'globals';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default [
|
||||
{
|
||||
ignores: ['eslint.config.mjs'],
|
||||
},
|
||||
...compat.extends(
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:prettier/recommended',
|
||||
'plugin:unicorn/recommended',
|
||||
),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 5,
|
||||
sourceType: 'module',
|
||||
|
||||
parserOptions: {
|
||||
project: 'tsconfig.json',
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
},
|
||||
|
||||
rules: {
|
||||
'@typescript-eslint/interface-name-prefix': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/filename-case': 'off',
|
||||
'unicorn/no-null': 'off',
|
||||
'unicorn/prefer-top-level-await': 'off',
|
||||
'unicorn/prefer-event-target': 'off',
|
||||
'unicorn/no-thenable': 'off',
|
||||
'object-shorthand': ['error', 'always'],
|
||||
},
|
||||
},
|
||||
];
|
||||
2868
e2e/package-lock.json
generated
2868
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.108.0",
|
||||
"version": "1.113.1",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -19,23 +19,29 @@
|
||||
"author": "",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.1.0",
|
||||
"@eslint/js": "^9.8.0",
|
||||
"@immich/cli": "file:../cli",
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/node": "^20.16.2",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "^7.1.0",
|
||||
"@typescript-eslint/parser": "^7.1.0",
|
||||
"@vitest/coverage-v8": "^1.3.0",
|
||||
"eslint": "^8.57.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vitest/coverage-v8": "^2.0.5",
|
||||
"eslint": "^9.0.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"exiftool-vendored": "^27.0.0",
|
||||
"eslint-plugin-unicorn": "^55.0.0",
|
||||
"exiftool-vendored": "^28.0.0",
|
||||
"globals": "^15.9.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
"oidc-provider": "^8.5.1",
|
||||
"pg": "^8.11.3",
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
@@ -44,9 +50,9 @@
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"utimes": "^5.2.1",
|
||||
"vitest": "^1.3.0"
|
||||
"vitest": "^2.0.5"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.1"
|
||||
"node": "20.17.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ export default defineConfig({
|
||||
workers: 1,
|
||||
reporter: 'html',
|
||||
use: {
|
||||
baseURL: 'http://127.0.0.1:2283',
|
||||
baseURL: 'http://127.0.0.1:2285',
|
||||
trace: 'on-first-retry',
|
||||
},
|
||||
|
||||
@@ -54,7 +54,7 @@ export default defineConfig({
|
||||
/* Run your local dev server before starting the tests */
|
||||
webServer: {
|
||||
command: 'docker compose up --build -V --remove-orphans',
|
||||
url: 'http://127.0.0.1:2283',
|
||||
url: 'http://127.0.0.1:2285',
|
||||
reuseExistingServer: true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -344,16 +344,16 @@ describe('/albums', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /albums/count', () => {
|
||||
describe('GET /albums/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/albums/count');
|
||||
const { status, body } = await request(app).get('/albums/statistics');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return total count of albums the user has access to', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/albums/count')
|
||||
.get('/albums/statistics')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
258
e2e/src/api/specs/api-key.e2e-spec.ts
Normal file
258
e2e/src/api/specs/api-key.e2e-spec.ts
Normal file
@@ -0,0 +1,258 @@
|
||||
import { LoginResponseDto, Permission, createApiKey } from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const create = (accessToken: string, permissions: Permission[]) =>
|
||||
createApiKey({ apiKeyCreateDto: { name: 'api key', permissions } }, { headers: asBearerAuth(accessToken) });
|
||||
|
||||
describe('/api-keys', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await utils.resetDatabase(['api_keys']);
|
||||
});
|
||||
|
||||
describe('POST /api-keys', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/api-keys').send({ name: 'API Key' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not work without permission', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.ApiKeyRead]);
|
||||
const { status, body } = await request(app).post('/api-keys').set('x-api-key', secret).send({ name: 'API Key' });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('apiKey.create'));
|
||||
});
|
||||
|
||||
it('should work with apiKey.create', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.ApiKeyCreate, Permission.ApiKeyRead]);
|
||||
const { status, body } = await request(app)
|
||||
.post('/api-keys')
|
||||
.set('x-api-key', secret)
|
||||
.send({
|
||||
name: 'API Key',
|
||||
permissions: [Permission.ApiKeyRead],
|
||||
});
|
||||
expect(body).toEqual({
|
||||
secret: expect.any(String),
|
||||
apiKey: {
|
||||
id: expect.any(String),
|
||||
name: 'API Key',
|
||||
permissions: [Permission.ApiKeyRead],
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
},
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should not create an api key with all permissions', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.ApiKeyCreate]);
|
||||
const { status, body } = await request(app)
|
||||
.post('/api-keys')
|
||||
.set('x-api-key', secret)
|
||||
.send({ name: 'API Key', permissions: [Permission.All] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Cannot grant permissions you do not have'));
|
||||
});
|
||||
|
||||
it('should not create an api key with more permissions', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.ApiKeyCreate]);
|
||||
const { status, body } = await request(app)
|
||||
.post('/api-keys')
|
||||
.set('x-api-key', secret)
|
||||
.send({ name: 'API Key', permissions: [Permission.ApiKeyRead] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Cannot grant permissions you do not have'));
|
||||
});
|
||||
|
||||
it('should create an api key', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/api-keys')
|
||||
.send({ name: 'API Key', permissions: [Permission.All] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual({
|
||||
apiKey: {
|
||||
id: expect.any(String),
|
||||
name: 'API Key',
|
||||
permissions: [Permission.All],
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
},
|
||||
secret: expect.any(String),
|
||||
});
|
||||
expect(status).toEqual(201);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api-keys', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/api-keys');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should start off empty', async () => {
|
||||
const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([]);
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should return a list of api keys', async () => {
|
||||
const [{ apiKey: apiKey1 }, { apiKey: apiKey2 }, { apiKey: apiKey3 }] = await Promise.all([
|
||||
create(admin.accessToken, [Permission.All]),
|
||||
create(admin.accessToken, [Permission.All]),
|
||||
create(admin.accessToken, [Permission.All]),
|
||||
]);
|
||||
const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toHaveLength(3);
|
||||
expect(body).toEqual(expect.arrayContaining([apiKey1, apiKey2, apiKey3]));
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/api-keys/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
.get(`/api-keys/${apiKey.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/api-keys/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should get api key details', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
.get(`/api-keys/${apiKey.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
name: 'api key',
|
||||
permissions: [Permission.All],
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/api-keys/${uuidDto.notFound}`).send({ name: 'new name' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/api-keys/${apiKey.id}`)
|
||||
.send({ name: 'new name' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/api-keys/${uuidDto.invalid}`)
|
||||
.send({ name: 'new name' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should update api key details', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/api-keys/${apiKey.id}`)
|
||||
.send({ name: 'new name' })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
name: 'new name',
|
||||
permissions: [Permission.All],
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/api-keys/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/api-keys/${apiKey.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/api-keys/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should delete an api key', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status } = await request(app)
|
||||
.delete(`/api-keys/${apiKey.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
});
|
||||
|
||||
describe('authentication', () => {
|
||||
it('should work as a header', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app).get('/api-keys').set('x-api-key', secret);
|
||||
expect(body).toHaveLength(1);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
it('should work as a query param', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app).get(`/api-keys?apiKey=${secret}`);
|
||||
expect(body).toHaveLength(1);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
SharedLinkType,
|
||||
getAssetInfo,
|
||||
getMyUser,
|
||||
updateAssets,
|
||||
} from '@immich/sdk';
|
||||
import { exiftool } from 'exiftool-vendored';
|
||||
import { DateTime } from 'luxon';
|
||||
@@ -43,6 +42,7 @@ const makeUploadDto = (options?: { omit: string }): Record<string, any> => {
|
||||
const TEN_TIMES = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-springs.jpg`;
|
||||
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
|
||||
|
||||
const readTags = async (bytes: Buffer, filename: string) => {
|
||||
const filepath = join(tempDir, filename);
|
||||
@@ -66,25 +66,23 @@ describe('/asset', () => {
|
||||
let timeBucketUser: LoginResponseDto;
|
||||
let quotaUser: LoginResponseDto;
|
||||
let statsUser: LoginResponseDto;
|
||||
let stackUser: LoginResponseDto;
|
||||
|
||||
let user1Assets: AssetMediaResponseDto[];
|
||||
let user2Assets: AssetMediaResponseDto[];
|
||||
let stackAssets: AssetMediaResponseDto[];
|
||||
let locationAsset: AssetMediaResponseDto;
|
||||
let ratingAsset: AssetMediaResponseDto;
|
||||
|
||||
const setupTests = async () => {
|
||||
await utils.resetDatabase();
|
||||
admin = await utils.adminSetup({ onboarding: false });
|
||||
|
||||
[websocket, user1, user2, statsUser, quotaUser, timeBucketUser, stackUser] = await Promise.all([
|
||||
[websocket, user1, user2, statsUser, quotaUser, timeBucketUser] = await Promise.all([
|
||||
utils.connectWebsocket(admin.accessToken),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('1')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('2')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('stats')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.userQuota),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('stack')),
|
||||
]);
|
||||
|
||||
await utils.createPartner(user1.accessToken, user2.userId);
|
||||
@@ -99,6 +97,16 @@ describe('/asset', () => {
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: locationAsset.id });
|
||||
|
||||
// asset rating
|
||||
ratingAsset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename: 'mongolels.jpg',
|
||||
bytes: await readFile(ratingAssetFilepath),
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: ratingAsset.id });
|
||||
|
||||
user1Assets = await Promise.all([
|
||||
utils.createAsset(user1.accessToken),
|
||||
utils.createAsset(user1.accessToken),
|
||||
@@ -137,20 +145,6 @@ describe('/asset', () => {
|
||||
}),
|
||||
]);
|
||||
|
||||
// stacks
|
||||
stackAssets = await Promise.all([
|
||||
utils.createAsset(stackUser.accessToken),
|
||||
utils.createAsset(stackUser.accessToken),
|
||||
utils.createAsset(stackUser.accessToken),
|
||||
utils.createAsset(stackUser.accessToken),
|
||||
utils.createAsset(stackUser.accessToken),
|
||||
]);
|
||||
|
||||
await updateAssets(
|
||||
{ assetBulkUpdateDto: { stackParentId: stackAssets[0].id, ids: [stackAssets[1].id, stackAssets[2].id] } },
|
||||
{ headers: asBearerAuth(stackUser.accessToken) },
|
||||
);
|
||||
|
||||
const person1 = await utils.createPerson(user1.accessToken, {
|
||||
name: 'Test Person',
|
||||
});
|
||||
@@ -214,6 +208,22 @@ describe('/asset', () => {
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id });
|
||||
});
|
||||
|
||||
it('should get the asset rating', async () => {
|
||||
await utils.waitForWebsocketEvent({
|
||||
event: 'assetUpload',
|
||||
id: ratingAsset.id,
|
||||
});
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${ratingAsset.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({
|
||||
id: ratingAsset.id,
|
||||
exifInfo: expect.objectContaining({ rating: 3 }),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with a shared link', async () => {
|
||||
const sharedLink = await utils.createSharedLink(user1.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
@@ -353,6 +363,8 @@ describe('/asset', () => {
|
||||
utils.createAsset(user1.accessToken),
|
||||
utils.createAsset(user1.accessToken),
|
||||
]);
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration');
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
@@ -389,17 +401,14 @@ describe('/asset', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it.each(TEN_TIMES)(
|
||||
'should return 1 asset if there are 10 assets in the database but user 2 only has 1',
|
||||
async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/random')
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`);
|
||||
it.skip('should return 1 asset if there are 10 assets in the database but user 2 only has 1', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/random')
|
||||
.set('Authorization', `Bearer ${user2.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]);
|
||||
},
|
||||
);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]);
|
||||
});
|
||||
|
||||
it('should return error', async () => {
|
||||
const { status } = await request(app)
|
||||
@@ -472,6 +481,44 @@ describe('/asset', () => {
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should update date time original when sidecar file contains DateTimeOriginal', async () => {
|
||||
const sidecarData = `<?xpacket begin='?' id='W5M0MpCehiHzreSzNTczkc9d'?>
|
||||
<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='Image::ExifTool 12.40'>
|
||||
<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>
|
||||
<rdf:Description rdf:about=''
|
||||
xmlns:exif='http://ns.adobe.com/exif/1.0/'>
|
||||
<exif:ExifVersion>0220</exif:ExifVersion> <exif:DateTimeOriginal>2024-07-11T10:32:52Z</exif:DateTimeOriginal>
|
||||
<exif:GPSVersionID>2.3.0.0</exif:GPSVersionID>
|
||||
</rdf:Description>
|
||||
</rdf:RDF>
|
||||
</x:xmpmeta>
|
||||
<?xpacket end='w'?>`;
|
||||
|
||||
const { id } = await utils.createAsset(user1.accessToken, {
|
||||
sidecarData: {
|
||||
bytes: Buffer.from(sidecarData),
|
||||
filename: 'example.xmp',
|
||||
},
|
||||
});
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
const assetInfo = await utils.getAssetInfo(user1.accessToken, id);
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBe('2024-07-11T10:32:52.000Z');
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
|
||||
|
||||
expect(body).toMatchObject({
|
||||
id,
|
||||
exifInfo: expect.objectContaining({
|
||||
dateTimeOriginal: '2023-11-20T01:11:00.000Z',
|
||||
}),
|
||||
});
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should reject invalid gps coordinates', async () => {
|
||||
for (const test of [
|
||||
{ latitude: 12 },
|
||||
@@ -507,6 +554,22 @@ describe('/asset', () => {
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it.skip('should geocode country from gps data in the middle of nowhere', async () => {
|
||||
const { status } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ latitude: 42, longitude: 69 });
|
||||
expect(status).toEqual(200);
|
||||
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
|
||||
const asset = await getAssetInfo({ id: user1Assets[0].id }, { headers: asBearerAuth(user1.accessToken) });
|
||||
expect(asset).toMatchObject({
|
||||
id: user1Assets[0].id,
|
||||
exifInfo: expect.objectContaining({ city: null, country: 'Kazakhstan' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('should set the description', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
@@ -521,6 +584,31 @@ describe('/asset', () => {
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should set the rating', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ rating: 2 });
|
||||
expect(body).toMatchObject({
|
||||
id: user1Assets[0].id,
|
||||
exifInfo: expect.objectContaining({
|
||||
rating: 2,
|
||||
}),
|
||||
});
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should reject invalid rating', async () => {
|
||||
for (const test of [{ rating: 7 }, { rating: 3.5 }, { rating: null }]) {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.send(test)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
}
|
||||
});
|
||||
|
||||
it('should return tagged people', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
@@ -719,145 +807,8 @@ describe('/asset', () => {
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid parent id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ stackParentId: uuidDto.invalid, ids: [stackAssets[0].id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['stackParentId must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access to the parent', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ stackParentId: stackAssets[3].id, ids: [user1Assets[0].id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should add stack children', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ stackParentId: stackAssets[0].id, ids: [stackAssets[3].id] });
|
||||
|
||||
expect(status).toBe(204);
|
||||
|
||||
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
expect(asset.stack).not.toBeUndefined();
|
||||
expect(asset.stack).toEqual(expect.arrayContaining([expect.objectContaining({ id: stackAssets[3].id })]));
|
||||
});
|
||||
|
||||
it('should remove stack children', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ removeParent: true, ids: [stackAssets[1].id] });
|
||||
|
||||
expect(status).toBe(204);
|
||||
|
||||
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
expect(asset.stack).not.toBeUndefined();
|
||||
expect(asset.stack).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ id: stackAssets[2].id }),
|
||||
expect.objectContaining({ id: stackAssets[3].id }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove all stack children', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ removeParent: true, ids: [stackAssets[2].id, stackAssets[3].id] });
|
||||
|
||||
expect(status).toBe(204);
|
||||
|
||||
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
expect(asset.stack).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should merge stack children', async () => {
|
||||
// create stack after previous test removed stack children
|
||||
await updateAssets(
|
||||
{ assetBulkUpdateDto: { stackParentId: stackAssets[0].id, ids: [stackAssets[1].id, stackAssets[2].id] } },
|
||||
{ headers: asBearerAuth(stackUser.accessToken) },
|
||||
);
|
||||
|
||||
const { status } = await request(app)
|
||||
.put('/assets')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ stackParentId: stackAssets[3].id, ids: [stackAssets[0].id] });
|
||||
|
||||
expect(status).toBe(204);
|
||||
|
||||
const asset = await getAssetInfo({ id: stackAssets[3].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
expect(asset.stack).not.toBeUndefined();
|
||||
expect(asset.stack).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ id: stackAssets[0].id }),
|
||||
expect.objectContaining({ id: stackAssets[1].id }),
|
||||
expect.objectContaining({ id: stackAssets[2].id }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets/stack/parent', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put('/assets/stack/parent');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/assets/stack/parent')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ oldParentId: uuidDto.invalid, newParentId: uuidDto.invalid });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put('/assets/stack/parent')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ oldParentId: stackAssets[3].id, newParentId: stackAssets[0].id });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should make old parent child of new parent', async () => {
|
||||
const { status } = await request(app)
|
||||
.put('/assets/stack/parent')
|
||||
.set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
.send({ oldParentId: stackAssets[3].id, newParentId: stackAssets[0].id });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
|
||||
// new parent
|
||||
expect(asset.stack).not.toBeUndefined();
|
||||
expect(asset.stack).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ id: stackAssets[1].id }),
|
||||
expect.objectContaining({ id: stackAssets[2].id }),
|
||||
expect.objectContaining({ id: stackAssets[3].id }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
describe('POST /assets', () => {
|
||||
beforeAll(setupTests, 30_000);
|
||||
|
||||
@@ -886,13 +837,12 @@ describe('/asset', () => {
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it.each([
|
||||
const tests = [
|
||||
{
|
||||
input: 'formats/avif/8bit-sRGB.avif',
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: '8bit-sRGB.avif',
|
||||
resized: true,
|
||||
exifInfo: {
|
||||
description: '',
|
||||
exifImageHeight: 1080,
|
||||
@@ -908,7 +858,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: 'el_torcal_rocks.jpg',
|
||||
resized: true,
|
||||
exifInfo: {
|
||||
dateTimeOriginal: '2012-08-05T11:39:59.000Z',
|
||||
exifImageWidth: 512,
|
||||
@@ -932,7 +881,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: '8bit-sRGB.jxl',
|
||||
resized: true,
|
||||
exifInfo: {
|
||||
description: '',
|
||||
exifImageHeight: 1080,
|
||||
@@ -948,7 +896,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: 'IMG_2682.heic',
|
||||
resized: true,
|
||||
fileCreatedAt: '2019-03-21T16:04:22.348Z',
|
||||
exifInfo: {
|
||||
dateTimeOriginal: '2019-03-21T16:04:22.348Z',
|
||||
@@ -973,7 +920,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: 'density_plot.png',
|
||||
resized: true,
|
||||
exifInfo: {
|
||||
exifImageWidth: 800,
|
||||
exifImageHeight: 800,
|
||||
@@ -988,7 +934,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: 'glarus.nef',
|
||||
resized: true,
|
||||
fileCreatedAt: '2010-07-20T17:27:12.000Z',
|
||||
exifInfo: {
|
||||
make: 'NIKON CORPORATION',
|
||||
@@ -1010,7 +955,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: 'philadelphia.nef',
|
||||
resized: true,
|
||||
fileCreatedAt: '2016-09-22T22:10:29.060Z',
|
||||
exifInfo: {
|
||||
make: 'NIKON CORPORATION',
|
||||
@@ -1033,7 +977,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: '4_3.rw2',
|
||||
resized: true,
|
||||
fileCreatedAt: '2018-05-10T08:42:37.842Z',
|
||||
exifInfo: {
|
||||
make: 'Panasonic',
|
||||
@@ -1057,7 +1000,6 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: '12bit-compressed-(3_2).arw',
|
||||
resized: true,
|
||||
fileCreatedAt: '2016-09-27T10:51:44.000Z',
|
||||
exifInfo: {
|
||||
make: 'SONY',
|
||||
@@ -1082,8 +1024,7 @@ describe('/asset', () => {
|
||||
expected: {
|
||||
type: AssetTypeEnum.Image,
|
||||
originalFileName: '14bit-uncompressed-(3_2).arw',
|
||||
resized: true,
|
||||
fileCreatedAt: '2016-01-08T15:08:01.000Z',
|
||||
fileCreatedAt: '2016-01-08T14:08:01.000Z',
|
||||
exifInfo: {
|
||||
make: 'SONY',
|
||||
model: 'ILCE-7M2',
|
||||
@@ -1095,28 +1036,39 @@ describe('/asset', () => {
|
||||
iso: 100,
|
||||
lensModel: 'E 25mm F2',
|
||||
fileSizeInByte: 49_512_448,
|
||||
dateTimeOriginal: '2016-01-08T15:08:01.000Z',
|
||||
dateTimeOriginal: '2016-01-08T14:08:01.000Z',
|
||||
latitude: null,
|
||||
longitude: null,
|
||||
orientation: '1',
|
||||
},
|
||||
},
|
||||
},
|
||||
])(`should upload and generate a thumbnail for $input`, async ({ input, expected }) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
const { id, status } = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
];
|
||||
|
||||
expect(status).toBe(AssetMediaStatus.Created);
|
||||
it(`should upload and generate a thumbnail for different file types`, async () => {
|
||||
// upload in parallel
|
||||
const assets = await Promise.all(
|
||||
tests.map(async ({ input }) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
return utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: id });
|
||||
for (const { id, status } of assets) {
|
||||
expect(status).toBe(AssetMediaStatus.Created);
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
|
||||
}
|
||||
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
for (const [i, { id }] of assets.entries()) {
|
||||
const { expected } = tests[i];
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
|
||||
expect(asset).toMatchObject(expected);
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
|
||||
expect(asset).toMatchObject(expected);
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle a duplicate', async () => {
|
||||
@@ -1170,17 +1122,25 @@ describe('/asset', () => {
|
||||
// into the test here.
|
||||
it.each([
|
||||
{
|
||||
filepath: 'formats/motionphoto/Samsung One UI 5.jpg',
|
||||
filepath: 'formats/motionphoto/samsung-one-ui-5.jpg',
|
||||
checksum: 'fr14niqCq6N20HB8rJYEvpsUVtI=',
|
||||
},
|
||||
{
|
||||
filepath: 'formats/motionphoto/Samsung One UI 6.jpg',
|
||||
filepath: 'formats/motionphoto/samsung-one-ui-6.jpg',
|
||||
checksum: 'lT9Uviw/FFJYCjfIxAGPTjzAmmw=',
|
||||
},
|
||||
{
|
||||
filepath: 'formats/motionphoto/Samsung One UI 6.heic',
|
||||
filepath: 'formats/motionphoto/samsung-one-ui-6.heic',
|
||||
checksum: '/ejgzywvgvzvVhUYVfvkLzFBAF0=',
|
||||
},
|
||||
{
|
||||
filepath: 'formats/motionphoto/pixel-6-pro.jpg',
|
||||
checksum: 'bFhLGbdK058PSk4FTfrSnoKWykc=',
|
||||
},
|
||||
{
|
||||
filepath: 'formats/motionphoto/pixel-8a.jpg',
|
||||
checksum: '7YdY+WF0h+CXHbiXpi0HiCMTTjs=',
|
||||
},
|
||||
])(`should extract motionphoto video from $filepath`, async ({ filepath, checksum }) => {
|
||||
const response = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
|
||||
@@ -353,7 +353,7 @@ describe('/libraries', () => {
|
||||
|
||||
expect(assets.count).toBe(2);
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
|
||||
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 3 });
|
||||
@@ -361,11 +361,11 @@ describe('/libraries', () => {
|
||||
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(newAssets.count).toBe(3);
|
||||
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
|
||||
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
|
||||
});
|
||||
|
||||
it('should offline missing files', async () => {
|
||||
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
|
||||
it('should offline a file missing from disk', async () => {
|
||||
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp`],
|
||||
@@ -374,7 +374,40 @@ describe('/libraries', () => {
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.png`);
|
||||
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
expect(assets.count).toBe(3);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
expect(newAssets.count).toBe(3);
|
||||
|
||||
expect(newAssets.items).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'assetC.png',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should offline a file outside of import paths', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp`],
|
||||
});
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
await request(app)
|
||||
.put(`/libraries/${library.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ importPaths: [`${testAssetDirInternal}/temp/directoryA`] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
@@ -383,6 +416,45 @@ describe('/libraries', () => {
|
||||
|
||||
expect(assets.items).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: false,
|
||||
originalFileName: 'assetA.png',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'assetB.png',
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should offline a file covered by an exclusion pattern', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp`],
|
||||
});
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
await request(app)
|
||||
.put(`/libraries/${library.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ exclusionPatterns: ['**/directoryB/**'] });
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(assets.count).toBe(2);
|
||||
|
||||
expect(assets.items).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
isOffline: false,
|
||||
originalFileName: 'assetA.png',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
isOffline: true,
|
||||
originalFileName: 'assetB.png',
|
||||
@@ -434,6 +506,8 @@ describe('/libraries', () => {
|
||||
await utils.waitForWebsocketEvent({ event: 'assetDelete', total: 1 });
|
||||
|
||||
expect(existsSync(`${testAssetDir}/temp/offline1/assetA.png`)).toBe(true);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
|
||||
});
|
||||
|
||||
it('should scan new files', async () => {
|
||||
@@ -445,14 +519,14 @@ describe('/libraries', () => {
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
|
||||
utils.createImageFile(`${testAssetDir}/temp/directoryC/assetC.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
|
||||
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(assets.count).toBe(3);
|
||||
expect(assets.items).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
@@ -460,6 +534,8 @@ describe('/libraries', () => {
|
||||
}),
|
||||
]),
|
||||
);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/directoryC/assetC.png`);
|
||||
});
|
||||
|
||||
describe('with refreshModifiedFiles=true', () => {
|
||||
@@ -559,10 +635,11 @@ describe('/libraries', () => {
|
||||
it('should remove offline files', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/offline2`],
|
||||
importPaths: [`${testAssetDirInternal}/temp/offline`],
|
||||
});
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/online.png`);
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
@@ -570,9 +647,9 @@ describe('/libraries', () => {
|
||||
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
|
||||
libraryId: library.id,
|
||||
});
|
||||
expect(initialAssets.count).toBe(1);
|
||||
expect(initialAssets.count).toBe(2);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline2/assetA.png`);
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
@@ -593,7 +670,54 @@ describe('/libraries', () => {
|
||||
|
||||
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(assets.count).toBe(0);
|
||||
expect(assets.count).toBe(1);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/online.png`);
|
||||
});
|
||||
|
||||
it('should remove offline files from trash', async () => {
|
||||
const library = await utils.createLibrary(admin.accessToken, {
|
||||
ownerId: admin.userId,
|
||||
importPaths: [`${testAssetDirInternal}/temp/offline`],
|
||||
});
|
||||
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/online.png`);
|
||||
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
|
||||
libraryId: library.id,
|
||||
});
|
||||
|
||||
expect(initialAssets.count).toBe(2);
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
|
||||
|
||||
await scan(admin.accessToken, library.id);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
|
||||
const { assets: offlineAssets } = await utils.metadataSearch(admin.accessToken, {
|
||||
libraryId: library.id,
|
||||
isOffline: true,
|
||||
});
|
||||
expect(offlineAssets.count).toBe(1);
|
||||
|
||||
const { status } = await request(app)
|
||||
.post(`/libraries/${library.id}/removeOffline`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send();
|
||||
expect(status).toBe(204);
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'library');
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'backgroundTask');
|
||||
|
||||
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
|
||||
|
||||
expect(assets.count).toBe(1);
|
||||
expect(assets.items[0].isOffline).toBe(false);
|
||||
expect(assets.items[0].originalPath).toEqual(`${testAssetDirInternal}/temp/offline/online.png`);
|
||||
|
||||
utils.removeImageFile(`${testAssetDir}/temp/offline/online.png`);
|
||||
});
|
||||
|
||||
it('should not remove online files', async () => {
|
||||
|
||||
@@ -159,4 +159,75 @@ describe('/map', () => {
|
||||
expect(body).toEqual(expect.objectContaining({ id: 'immich-map-dark' }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /map/reverse-geocode', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/map/reverse-geocode');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should throw an error if a lat is not provided', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/reverse-geocode?lon=123')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['lat must be a number between -90 and 90']));
|
||||
});
|
||||
|
||||
it('should throw an error if a lat is not a number', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/reverse-geocode?lat=abc&lon=123.456')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['lat must be a number between -90 and 90']));
|
||||
});
|
||||
|
||||
it('should throw an error if a lat is out of range', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/reverse-geocode?lat=91&lon=123.456')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['lat must be a number between -90 and 90']));
|
||||
});
|
||||
|
||||
it('should throw an error if a lon is not provided', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/reverse-geocode?lat=75')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['lon must be a number between -180 and 180']));
|
||||
});
|
||||
|
||||
const reverseGeocodeTestCases = [
|
||||
{
|
||||
name: 'Vaucluse',
|
||||
lat: -33.858_977_058_663_13,
|
||||
lon: 151.278_490_730_270_48,
|
||||
results: [{ city: 'Vaucluse', state: 'New South Wales', country: 'Australia' }],
|
||||
},
|
||||
{
|
||||
name: 'Ravenhall',
|
||||
lat: -37.765_732_399_174_75,
|
||||
lon: 144.752_453_164_883_3,
|
||||
results: [{ city: 'Ravenhall', state: 'Victoria', country: 'Australia' }],
|
||||
},
|
||||
{
|
||||
name: 'Scarborough',
|
||||
lat: -31.894_346_156_789_997,
|
||||
lon: 115.757_617_103_904_64,
|
||||
results: [{ city: 'Scarborough', state: 'Western Australia', country: 'Australia' }],
|
||||
},
|
||||
];
|
||||
|
||||
it.each(reverseGeocodeTestCases)(`should resolve to $name`, async ({ lat, lon, results }) => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/map/reverse-geocode?lat=${lat}&lon=${lon}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(Array.isArray(body)).toBe(true);
|
||||
expect(body.length).toBe(results.length);
|
||||
expect(body).toEqual(results);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,12 +1,85 @@
|
||||
import {
|
||||
LoginResponseDto,
|
||||
SystemConfigOAuthDto,
|
||||
getConfigDefaults,
|
||||
getMyUser,
|
||||
startOAuth,
|
||||
updateConfig,
|
||||
} from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import { OAuthClient, OAuthUser } from 'src/setup/auth-server';
|
||||
import { app, asBearerAuth, baseUrl, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const authServer = {
|
||||
internal: 'http://auth-server:3000',
|
||||
external: 'http://127.0.0.1:3000',
|
||||
};
|
||||
|
||||
const redirect = async (url: string, cookies?: string[]) => {
|
||||
const { headers } = await request(url)
|
||||
.get('/')
|
||||
.set('Cookie', cookies || []);
|
||||
return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location };
|
||||
};
|
||||
|
||||
const loginWithOAuth = async (sub: OAuthUser | string) => {
|
||||
const { url } = await startOAuth({ oAuthConfigDto: { redirectUri: `${baseUrl}/auth/login` } });
|
||||
|
||||
// login
|
||||
const response1 = await redirect(url.replace(authServer.internal, authServer.external));
|
||||
const response2 = await request(authServer.external + response1.location)
|
||||
.post('/')
|
||||
.set('Cookie', response1.cookies)
|
||||
.type('form')
|
||||
.send({ prompt: 'login', login: sub, password: 'password' });
|
||||
|
||||
// approve
|
||||
const response3 = await redirect(response2.header.location, response1.cookies);
|
||||
const response4 = await request(authServer.external + response3.location)
|
||||
.post('/')
|
||||
.type('form')
|
||||
.set('Cookie', response3.cookies)
|
||||
.send({ prompt: 'consent' });
|
||||
|
||||
const response5 = await redirect(response4.header.location, response3.cookies.slice(1));
|
||||
const redirectUrl = response5.location;
|
||||
|
||||
expect(redirectUrl).toBeDefined();
|
||||
const params = new URL(redirectUrl).searchParams;
|
||||
expect(params.get('code')).toBeDefined();
|
||||
expect(params.get('state')).toBeDefined();
|
||||
|
||||
return redirectUrl;
|
||||
};
|
||||
|
||||
const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => {
|
||||
const options = { headers: asBearerAuth(token) };
|
||||
const defaults = await getConfigDefaults(options);
|
||||
const merged = {
|
||||
...defaults.oauth,
|
||||
buttonText: 'Login with Immich',
|
||||
issuerUrl: `${authServer.internal}/.well-known/openid-configuration`,
|
||||
...dto,
|
||||
};
|
||||
await updateConfig({ systemConfigDto: { ...defaults, oauth: merged } }, options);
|
||||
};
|
||||
|
||||
describe(`/oauth`, () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
await utils.adminSetup();
|
||||
admin = await utils.adminSetup();
|
||||
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
storageLabelClaim: 'immich_username',
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /oauth/authorize', () => {
|
||||
@@ -15,5 +88,171 @@ describe(`/oauth`, () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['redirectUri must be a string', 'redirectUri should not be empty']));
|
||||
});
|
||||
|
||||
it('should return a redirect uri', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/oauth/authorize')
|
||||
.send({ redirectUri: 'http://127.0.0.1:2285/auth/login' });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({ url: expect.stringContaining(`${authServer.internal}/auth?`) });
|
||||
|
||||
const params = new URL(body.url).searchParams;
|
||||
expect(params.get('client_id')).toBe('client-default');
|
||||
expect(params.get('response_type')).toBe('code');
|
||||
expect(params.get('redirect_uri')).toBe('http://127.0.0.1:2285/auth/login');
|
||||
expect(params.get('state')).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /oauth/callback', () => {
|
||||
it(`should throw an error if a url is not provided`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({});
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url must be a string', 'url should not be empty']));
|
||||
});
|
||||
|
||||
it(`should throw an error if the url is empty`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url: '' });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url should not be empty']));
|
||||
});
|
||||
|
||||
it('should auto register the user by default', async () => {
|
||||
const url = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-auto-register@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle a user without an email', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.NO_EMAIL);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address'));
|
||||
});
|
||||
|
||||
it('should set the quota from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_QUOTA);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-quota@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.quotaSizeInBytes).toBe(25 * 2 ** 30); // 25 GiB;
|
||||
});
|
||||
|
||||
it('should set the storage label from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_USERNAME);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-username@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.storageLabel).toBe('user-username');
|
||||
});
|
||||
|
||||
it('should work with RS256 signed tokens', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_TOKENS,
|
||||
clientSecret: OAuthClient.RS256_TOKENS,
|
||||
autoRegister: true,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-RS256-token');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-RS256-token@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with RS256 signed user profiles', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_PROFILE,
|
||||
clientSecret: OAuthClient.RS256_PROFILE,
|
||||
buttonText: 'Login with Immich',
|
||||
profileSigningAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-profile');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-signed-profile@immich.app',
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for an invalid token algorithm', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'something-that-does-not-work',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-bad');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(500);
|
||||
expect(body).toMatchObject({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Failed to finish oauth',
|
||||
statusCode: 500,
|
||||
});
|
||||
});
|
||||
|
||||
describe('autoRegister: false', () => {
|
||||
beforeAll(async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
autoRegister: false,
|
||||
buttonText: 'Login with Immich',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not auto register the user', async () => {
|
||||
const url = await loginWithOAuth('oauth-no-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.'));
|
||||
});
|
||||
|
||||
it('should link to an existing user by email', async () => {
|
||||
const { userId } = await utils.userSetup(admin.accessToken, {
|
||||
name: 'OAuth User 3',
|
||||
email: 'oauth-user3@immich.app',
|
||||
password: 'password',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-user3');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId,
|
||||
userEmail: 'oauth-user3@immich.app',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,10 +6,19 @@ import request from 'supertest';
|
||||
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const invalidBirthday = [
|
||||
{ birthDate: 'false', response: 'birthDate must be a date string' },
|
||||
{ birthDate: '123567', response: 'birthDate must be a date string' },
|
||||
{ birthDate: 123_567, response: 'birthDate must be a date string' },
|
||||
{ birthDate: new Date(9999, 0, 0).toISOString(), response: ['Birth date cannot be in the future'] },
|
||||
{
|
||||
birthDate: 'false',
|
||||
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
|
||||
},
|
||||
{
|
||||
birthDate: '123567',
|
||||
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
|
||||
},
|
||||
{
|
||||
birthDate: 123_567,
|
||||
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
|
||||
},
|
||||
{ birthDate: '9999-01-01', response: ['Birth date cannot be in the future'] },
|
||||
];
|
||||
|
||||
describe('/people', () => {
|
||||
@@ -65,6 +74,7 @@ describe('/people', () => {
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
hasNextPage: false,
|
||||
total: 3,
|
||||
hidden: 1,
|
||||
people: [
|
||||
@@ -80,6 +90,7 @@ describe('/people', () => {
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
hasNextPage: false,
|
||||
total: 3,
|
||||
hidden: 1,
|
||||
people: [
|
||||
@@ -88,6 +99,21 @@ describe('/people', () => {
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should support pagination', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/people')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.query({ withHidden: true, page: 2, size: 1 });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
hasNextPage: true,
|
||||
total: 3,
|
||||
hidden: 1,
|
||||
people: [expect.objectContaining({ name: 'visible_person' })],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /people/:id', () => {
|
||||
@@ -168,13 +194,13 @@ describe('/people', () => {
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({
|
||||
name: 'New Person',
|
||||
birthDate: '1990-01-01T05:00:00.000Z',
|
||||
birthDate: '1990-01-01',
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
id: expect.any(String),
|
||||
name: 'New Person',
|
||||
birthDate: '1990-01-01T05:00:00.000Z',
|
||||
birthDate: '1990-01-01',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -216,7 +242,7 @@ describe('/people', () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/people/${visiblePerson.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ birthDate: '1990-01-01T05:00:00.000Z' });
|
||||
.send({ birthDate: '1990-01-01' });
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ birthDate: '1990-01-01' });
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AssetMediaResponseDto, LoginResponseDto, deleteAssets, getMapMarkers, updateAsset } from '@immich/sdk';
|
||||
import { AssetMediaResponseDto, LoginResponseDto, deleteAssets, updateAsset } from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
@@ -32,9 +32,6 @@ describe('/search', () => {
|
||||
let assetOneJpg5: AssetMediaResponseDto;
|
||||
let assetSprings: AssetMediaResponseDto;
|
||||
let assetLast: AssetMediaResponseDto;
|
||||
let cities: string[];
|
||||
let states: string[];
|
||||
let countries: string[];
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
@@ -49,9 +46,9 @@ describe('/search', () => {
|
||||
{ filename: '/albums/nature/silver_fir.jpg' },
|
||||
{ filename: '/formats/heic/IMG_2682.heic' },
|
||||
{ filename: '/formats/jpg/el_torcal_rocks.jpg' },
|
||||
{ filename: '/formats/motionphoto/Samsung One UI 6.jpg' },
|
||||
{ filename: '/formats/motionphoto/Samsung One UI 6.heic' },
|
||||
{ filename: '/formats/motionphoto/Samsung One UI 5.jpg' },
|
||||
{ filename: '/formats/motionphoto/samsung-one-ui-6.jpg' },
|
||||
{ filename: '/formats/motionphoto/samsung-one-ui-6.heic' },
|
||||
{ filename: '/formats/motionphoto/samsung-one-ui-5.jpg' },
|
||||
|
||||
{ filename: '/metadata/gps-position/thompson-springs.jpg', dto: { isArchived: true } },
|
||||
|
||||
@@ -85,7 +82,7 @@ describe('/search', () => {
|
||||
// note: the coordinates here are not the actual coordinates of the images and are random for most of them
|
||||
const coordinates = [
|
||||
{ latitude: 48.853_41, longitude: 2.3488 }, // paris
|
||||
{ latitude: 63.0695, longitude: -151.0074 }, // denali
|
||||
{ latitude: 35.6895, longitude: 139.691_71 }, // tokyo
|
||||
{ latitude: 52.524_37, longitude: 13.410_53 }, // berlin
|
||||
{ latitude: 1.314_663_1, longitude: 103.845_409_3 }, // singapore
|
||||
{ latitude: 41.013_84, longitude: 28.949_66 }, // istanbul
|
||||
@@ -101,16 +98,15 @@ describe('/search', () => {
|
||||
{ latitude: 31.634_16, longitude: -7.999_94 }, // marrakesh
|
||||
{ latitude: 38.523_735_4, longitude: -78.488_619_4 }, // tanners ridge
|
||||
{ latitude: 59.938_63, longitude: 30.314_13 }, // st. petersburg
|
||||
{ latitude: 35.6895, longitude: 139.691_71 }, // tokyo
|
||||
];
|
||||
|
||||
const updates = assets.map((asset, i) =>
|
||||
updateAsset({ id: asset.id, updateAssetDto: coordinates[i] }, { headers: asBearerAuth(admin.accessToken) }),
|
||||
const updates = coordinates.map((dto, i) =>
|
||||
updateAsset({ id: assets[i].id, updateAssetDto: dto }, { headers: asBearerAuth(admin.accessToken) }),
|
||||
);
|
||||
|
||||
await Promise.all(updates);
|
||||
for (const asset of assets) {
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpdate', id: asset.id });
|
||||
for (const [i] of coordinates.entries()) {
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpdate', id: assets[i].id });
|
||||
}
|
||||
|
||||
[
|
||||
@@ -137,12 +133,6 @@ describe('/search', () => {
|
||||
assetLast = assets.at(-1) as AssetMediaResponseDto;
|
||||
|
||||
await deleteAssets({ assetBulkDeleteDto: { ids: [assetSilver.id] } }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
const mapMarkers = await getMapMarkers({}, { headers: asBearerAuth(admin.accessToken) });
|
||||
const nonTrashed = mapMarkers.filter((mark) => mark.id !== assetSilver.id);
|
||||
cities = [...new Set(nonTrashed.map((mark) => mark.city).filter((entry): entry is string => !!entry))].sort();
|
||||
states = [...new Set(nonTrashed.map((mark) => mark.state).filter((entry): entry is string => !!entry))].sort();
|
||||
countries = [...new Set(nonTrashed.map((mark) => mark.country).filter((entry): entry is string => !!entry))].sort();
|
||||
}, 30_000);
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -315,29 +305,126 @@ describe('/search', () => {
|
||||
{
|
||||
should: 'should search by originalFilename with spaces',
|
||||
deferred: () => ({
|
||||
dto: { originalFileName: 'Samsung One', type: 'IMAGE' },
|
||||
dto: { originalFileName: 'samsung-one', type: 'IMAGE' },
|
||||
assets: [assetOneJpg5, assetOneJpg6, assetOneHeic6],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search by city',
|
||||
deferred: () => ({ dto: { city: 'Accra' }, assets: [assetHeic] }),
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
city: 'Accra',
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetHeic],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: "should search city ('')",
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
city: '',
|
||||
isVisible: true,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search city (null)',
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
city: null,
|
||||
isVisible: true,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search by state',
|
||||
deferred: () => ({ dto: { state: 'New York' }, assets: [assetDensity] }),
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
state: 'New York',
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetDensity],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: "should search state ('')",
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
state: '',
|
||||
isVisible: true,
|
||||
withExif: true,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast, assetNotocactus],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search state (null)',
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
state: null,
|
||||
isVisible: true,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast, assetNotocactus],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search by country',
|
||||
deferred: () => ({ dto: { country: 'France' }, assets: [assetFalcon] }),
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
country: 'France',
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetFalcon],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: "should search country ('')",
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
country: '',
|
||||
isVisible: true,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search country (null)',
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
country: null,
|
||||
isVisible: true,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search by make',
|
||||
deferred: () => ({ dto: { make: 'Canon' }, assets: [assetFalcon, assetDenali] }),
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
make: 'Canon',
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetFalcon, assetDenali],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should search by model',
|
||||
deferred: () => ({ dto: { model: 'Canon EOS 7D' }, assets: [assetDenali] }),
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
model: 'Canon EOS 7D',
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetDenali],
|
||||
}),
|
||||
},
|
||||
{
|
||||
should: 'should allow searching the upload library (libraryId: null)',
|
||||
@@ -450,32 +537,79 @@ describe('/search', () => {
|
||||
|
||||
it('should get suggestions for country', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=country')
|
||||
.get('/search/suggestions?type=country&includeNull=true')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual(countries);
|
||||
expect(body).toEqual([
|
||||
'Cuba',
|
||||
'France',
|
||||
'Georgia',
|
||||
'Germany',
|
||||
'Ghana',
|
||||
'Japan',
|
||||
'Morocco',
|
||||
"People's Republic of China",
|
||||
'Russian Federation',
|
||||
'Singapore',
|
||||
'Spain',
|
||||
'Switzerland',
|
||||
'United States of America',
|
||||
null,
|
||||
]);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
it('should get suggestions for state', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=state')
|
||||
.get('/search/suggestions?type=state&includeNull=true')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toHaveLength(states.length);
|
||||
expect(body).toEqual(expect.arrayContaining(states));
|
||||
expect(body).toEqual([
|
||||
'Andalusia',
|
||||
'Berlin',
|
||||
'Glarus',
|
||||
'Greater Accra',
|
||||
'Havana',
|
||||
'Île-de-France',
|
||||
'Marrakesh-Safi',
|
||||
'Mississippi',
|
||||
'New York',
|
||||
'Shanghai',
|
||||
'St.-Petersburg',
|
||||
'Tbilisi',
|
||||
'Tokyo',
|
||||
'Virginia',
|
||||
null,
|
||||
]);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
it('should get suggestions for city', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=city')
|
||||
.get('/search/suggestions?type=city&includeNull=true')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual(cities);
|
||||
expect(body).toEqual([
|
||||
'Accra',
|
||||
'Berlin',
|
||||
'Glarus',
|
||||
'Havana',
|
||||
'Marrakesh',
|
||||
'Montalbán de Córdoba',
|
||||
'New York City',
|
||||
'Novena',
|
||||
'Paris',
|
||||
'Philadelphia',
|
||||
'Saint Petersburg',
|
||||
'Shanghai',
|
||||
'Stanley',
|
||||
'Tbilisi',
|
||||
'Tokyo',
|
||||
null,
|
||||
]);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
it('should get suggestions for camera make', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=camera-make')
|
||||
.get('/search/suggestions?type=camera-make&includeNull=true')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([
|
||||
'Apple',
|
||||
@@ -485,13 +619,14 @@ describe('/search', () => {
|
||||
'PENTAX Corporation',
|
||||
'samsung',
|
||||
'SONY',
|
||||
null,
|
||||
]);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
it('should get suggestions for camera model', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=camera-model')
|
||||
.get('/search/suggestions?type=camera-model&includeNull=true')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([
|
||||
'Canon EOS 7D',
|
||||
@@ -506,6 +641,7 @@ describe('/search', () => {
|
||||
'SM-F711N',
|
||||
'SM-S906U',
|
||||
'SM-T970',
|
||||
null,
|
||||
]);
|
||||
expect(status).toBe(200);
|
||||
});
|
||||
|
||||
@@ -254,7 +254,7 @@ describe('/server', () => {
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send(serverLicense);
|
||||
const { status } = await request(app).get('/server/license').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(status).toBe(404);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -112,6 +112,13 @@ describe('/shared-links', () => {
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta name="description" content="1 shared photos & videos" />`);
|
||||
});
|
||||
|
||||
it('should have fqdn og:image meta tag for shared asset', async () => {
|
||||
const resp = await request(shareUrl).get(`/${linkWithAssets.key}`);
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(`<meta property="og:image" content="http://`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /shared-links', () => {
|
||||
|
||||
211
e2e/src/api/specs/stack.e2e-spec.ts
Normal file
211
e2e/src/api/specs/stack.e2e-spec.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import { AssetMediaResponseDto, LoginResponseDto, searchStacks } from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/stacks', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user1: LoginResponseDto;
|
||||
let user2: LoginResponseDto;
|
||||
let asset: AssetMediaResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
|
||||
[user1, user2] = await Promise.all([
|
||||
utils.userSetup(admin.accessToken, createUserDto.user1),
|
||||
utils.userSetup(admin.accessToken, createUserDto.user2),
|
||||
]);
|
||||
|
||||
asset = await utils.createAsset(user1.accessToken);
|
||||
});
|
||||
|
||||
describe('POST /stacks', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/stacks')
|
||||
.send({ assetIds: [asset.id] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require at least two assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/stacks')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ assetIds: [asset.id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/stacks')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ assetIds: [uuidDto.invalid, uuidDto.invalid] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const user2Asset = await utils.createAsset(user2.accessToken);
|
||||
const { status, body } = await request(app)
|
||||
.post('/stacks')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ assetIds: [asset.id, user2Asset.id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should create a stack', async () => {
|
||||
const [asset1, asset2] = await Promise.all([
|
||||
utils.createAsset(user1.accessToken),
|
||||
utils.createAsset(user1.accessToken),
|
||||
]);
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.post('/stacks')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ assetIds: [asset1.id, asset2.id] });
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
primaryAssetId: asset1.id,
|
||||
assets: [expect.objectContaining({ id: asset1.id }), expect.objectContaining({ id: asset2.id })],
|
||||
});
|
||||
});
|
||||
|
||||
it('should merge an existing stack', async () => {
|
||||
const [asset1, asset2, asset3] = await Promise.all([
|
||||
utils.createAsset(user1.accessToken),
|
||||
utils.createAsset(user1.accessToken),
|
||||
utils.createAsset(user1.accessToken),
|
||||
]);
|
||||
|
||||
const response1 = await request(app)
|
||||
.post('/stacks')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ assetIds: [asset1.id, asset2.id] });
|
||||
|
||||
expect(response1.status).toBe(201);
|
||||
|
||||
const stacksBefore = await searchStacks({}, { headers: asBearerAuth(user1.accessToken) });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.post('/stacks')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ assetIds: [asset1.id, asset3.id] });
|
||||
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
primaryAssetId: asset1.id,
|
||||
assets: expect.arrayContaining([
|
||||
expect.objectContaining({ id: asset1.id }),
|
||||
expect.objectContaining({ id: asset2.id }),
|
||||
expect.objectContaining({ id: asset3.id }),
|
||||
]),
|
||||
});
|
||||
|
||||
const stacksAfter = await searchStacks({}, { headers: asBearerAuth(user1.accessToken) });
|
||||
expect(stacksAfter.length).toBe(stacksBefore.length);
|
||||
});
|
||||
|
||||
// it('should require a valid parent id', async () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .put('/assets')
|
||||
// .set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
// .send({ stackParentId: uuidDto.invalid, ids: [stackAssets[0].id] });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.badRequest(['stackParentId must be a UUID']));
|
||||
// });
|
||||
});
|
||||
|
||||
// it('should require access to the parent', async () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .put('/assets')
|
||||
// .set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
// .send({ stackParentId: stackAssets[3].id, ids: [user1Assets[0].id] });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.noPermission);
|
||||
// });
|
||||
|
||||
// it('should add stack children', async () => {
|
||||
// const { status } = await request(app)
|
||||
// .put('/assets')
|
||||
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
// .send({ stackParentId: stackAssets[0].id, ids: [stackAssets[3].id] });
|
||||
|
||||
// expect(status).toBe(204);
|
||||
|
||||
// const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
// expect(asset.stack).not.toBeUndefined();
|
||||
// expect(asset.stack).toEqual(expect.arrayContaining([expect.objectContaining({ id: stackAssets[3].id })]));
|
||||
// });
|
||||
|
||||
// it('should remove stack children', async () => {
|
||||
// const { status } = await request(app)
|
||||
// .put('/assets')
|
||||
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
// .send({ removeParent: true, ids: [stackAssets[1].id] });
|
||||
|
||||
// expect(status).toBe(204);
|
||||
|
||||
// const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
// expect(asset.stack).not.toBeUndefined();
|
||||
// expect(asset.stack).toEqual(
|
||||
// expect.arrayContaining([
|
||||
// expect.objectContaining({ id: stackAssets[2].id }),
|
||||
// expect.objectContaining({ id: stackAssets[3].id }),
|
||||
// ]),
|
||||
// );
|
||||
// });
|
||||
|
||||
// it('should remove all stack children', async () => {
|
||||
// const { status } = await request(app)
|
||||
// .put('/assets')
|
||||
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
// .send({ removeParent: true, ids: [stackAssets[2].id, stackAssets[3].id] });
|
||||
|
||||
// expect(status).toBe(204);
|
||||
|
||||
// const asset = await getAssetInfo({ id: stackAssets[0].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
// expect(asset.stack).toBeUndefined();
|
||||
// });
|
||||
|
||||
// it('should merge stack children', async () => {
|
||||
// // create stack after previous test removed stack children
|
||||
// await updateAssets(
|
||||
// { assetBulkUpdateDto: { stackParentId: stackAssets[0].id, ids: [stackAssets[1].id, stackAssets[2].id] } },
|
||||
// { headers: asBearerAuth(stackUser.accessToken) },
|
||||
// );
|
||||
|
||||
// const { status } = await request(app)
|
||||
// .put('/assets')
|
||||
// .set('Authorization', `Bearer ${stackUser.accessToken}`)
|
||||
// .send({ stackParentId: stackAssets[3].id, ids: [stackAssets[0].id] });
|
||||
|
||||
// expect(status).toBe(204);
|
||||
|
||||
// const asset = await getAssetInfo({ id: stackAssets[3].id }, { headers: asBearerAuth(stackUser.accessToken) });
|
||||
// expect(asset.stack).not.toBeUndefined();
|
||||
// expect(asset.stack).toEqual(
|
||||
// expect.arrayContaining([
|
||||
// expect.objectContaining({ id: stackAssets[0].id }),
|
||||
// expect.objectContaining({ id: stackAssets[1].id }),
|
||||
// expect.objectContaining({ id: stackAssets[2].id }),
|
||||
// ]),
|
||||
// );
|
||||
// });
|
||||
});
|
||||
603
e2e/src/api/specs/tag.e2e-spec.ts
Normal file
603
e2e/src/api/specs/tag.e2e-spec.ts
Normal file
@@ -0,0 +1,603 @@
|
||||
import {
|
||||
AssetMediaResponseDto,
|
||||
LoginResponseDto,
|
||||
Permission,
|
||||
TagCreateDto,
|
||||
TagResponseDto,
|
||||
createTag,
|
||||
getAllTags,
|
||||
tagAssets,
|
||||
upsertTags,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const create = (accessToken: string, dto: TagCreateDto) =>
|
||||
createTag({ tagCreateDto: dto }, { headers: asBearerAuth(accessToken) });
|
||||
|
||||
const upsert = (accessToken: string, tags: string[]) =>
|
||||
upsertTags({ tagUpsertDto: { tags } }, { headers: asBearerAuth(accessToken) });
|
||||
|
||||
describe('/tags', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let user: LoginResponseDto;
|
||||
let userAsset: AssetMediaResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
user = await utils.userSetup(admin.accessToken, createUserDto.user1);
|
||||
userAsset = await utils.createAsset(user.accessToken);
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
// tagging assets eventually triggers metadata extraction which can impact other tests
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
|
||||
await utils.resetDatabase(['tags']);
|
||||
});
|
||||
|
||||
describe('POST /tags', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/tags').send({ name: 'TagA' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app).post('/tags').set('x-api-key', secret).send({ name: 'TagA' });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.create'));
|
||||
});
|
||||
|
||||
it('should work with tag.create', async () => {
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.TagCreate]);
|
||||
const { status, body } = await request(app).post('/tags').set('x-api-key', secret).send({ name: 'TagA' });
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
name: 'TagA',
|
||||
value: 'TagA',
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should create a tag', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/tags')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ name: 'TagA' });
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
name: 'TagA',
|
||||
value: 'TagA',
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should allow multiple users to create tags with the same value', async () => {
|
||||
await create(admin.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.post('/tags')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({ name: 'TagA' });
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
name: 'TagA',
|
||||
value: 'TagA',
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should create a nested tag', async () => {
|
||||
const parent = await create(admin.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.post('/tags')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ name: 'TagB', parentId: parent.id });
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
parentId: parent.id,
|
||||
name: 'TagB',
|
||||
value: 'TagA/TagB',
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /tags', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/tags');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app).get('/tags').set('x-api-key', secret);
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.read'));
|
||||
});
|
||||
|
||||
it('should start off empty', async () => {
|
||||
const { status, body } = await request(app).get('/tags').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([]);
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should return a list of tags', async () => {
|
||||
const [tagA, tagB, tagC] = await Promise.all([
|
||||
create(admin.accessToken, { name: 'TagA' }),
|
||||
create(admin.accessToken, { name: 'TagB' }),
|
||||
create(admin.accessToken, { name: 'TagC' }),
|
||||
]);
|
||||
const { status, body } = await request(app).get('/tags').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toHaveLength(3);
|
||||
expect(body).toEqual([tagA, tagB, tagC]);
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should return a nested tags', async () => {
|
||||
await upsert(admin.accessToken, ['TagA/TagB/TagC', 'TagD']);
|
||||
const { status, body } = await request(app).get('/tags').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(body).toHaveLength(4);
|
||||
expect(status).toEqual(200);
|
||||
|
||||
const tags = body as TagResponseDto[];
|
||||
const tagA = tags.find((tag) => tag.value === 'TagA') as TagResponseDto;
|
||||
const tagB = tags.find((tag) => tag.value === 'TagA/TagB') as TagResponseDto;
|
||||
const tagC = tags.find((tag) => tag.value === 'TagA/TagB/TagC') as TagResponseDto;
|
||||
const tagD = tags.find((tag) => tag.value === 'TagD') as TagResponseDto;
|
||||
|
||||
expect(tagA).toEqual(expect.objectContaining({ name: 'TagA', value: 'TagA' }));
|
||||
expect(tagB).toEqual(expect.objectContaining({ name: 'TagB', value: 'TagA/TagB', parentId: tagA.id }));
|
||||
expect(tagC).toEqual(expect.objectContaining({ name: 'TagC', value: 'TagA/TagB/TagC', parentId: tagB.id }));
|
||||
expect(tagD).toEqual(expect.objectContaining({ name: 'TagD', value: 'TagD' }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /tags', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/tags`).send({ name: 'TagA/TagB' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app).put('/tags').set('x-api-key', secret).send({ name: 'TagA' });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.create'));
|
||||
});
|
||||
|
||||
it('should upsert tags', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags`)
|
||||
.send({ tags: ['TagA/TagB/TagC/TagD'] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ name: 'TagD', value: 'TagA/TagB/TagC/TagD' })]);
|
||||
});
|
||||
|
||||
it('should upsert tags in parallel without conflicts', async () => {
|
||||
const [[tag1], [tag2], [tag3], [tag4]] = await Promise.all([
|
||||
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
|
||||
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
|
||||
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
|
||||
upsert(admin.accessToken, ['TagA/TagB/TagC/TagD']),
|
||||
]);
|
||||
|
||||
const { id, parentId, createdAt } = tag1;
|
||||
for (const tag of [tag1, tag2, tag3, tag4]) {
|
||||
expect(tag).toMatchObject({
|
||||
id,
|
||||
parentId,
|
||||
createdAt,
|
||||
name: 'TagD',
|
||||
value: 'TagA/TagB/TagC/TagD',
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /tags/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/tags/assets`).send({ tagIds: [], assetIds: [] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app)
|
||||
.put('/tags/assets')
|
||||
.set('x-api-key', secret)
|
||||
.send({ assetIds: [], tagIds: [] });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.asset'));
|
||||
});
|
||||
|
||||
it('should skip assets that are not owned by the user', async () => {
|
||||
const [tagA, tagB, tagC, assetA, assetB] = await Promise.all([
|
||||
create(user.accessToken, { name: 'TagA' }),
|
||||
create(user.accessToken, { name: 'TagB' }),
|
||||
create(user.accessToken, { name: 'TagC' }),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(admin.accessToken),
|
||||
]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/assets`)
|
||||
.send({ tagIds: [tagA.id, tagB.id, tagC.id], assetIds: [assetA.id, assetB.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ count: 3 });
|
||||
});
|
||||
|
||||
it('should skip tags that are not owned by the user', async () => {
|
||||
const [tagA, tagB, tagC, assetA, assetB] = await Promise.all([
|
||||
create(user.accessToken, { name: 'TagA' }),
|
||||
create(user.accessToken, { name: 'TagB' }),
|
||||
create(admin.accessToken, { name: 'TagC' }),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/assets`)
|
||||
.send({ tagIds: [tagA.id, tagB.id, tagC.id], assetIds: [assetA.id, assetB.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ count: 4 });
|
||||
});
|
||||
|
||||
it('should bulk tag assets', async () => {
|
||||
const [tagA, tagB, tagC, assetA, assetB] = await Promise.all([
|
||||
create(user.accessToken, { name: 'TagA' }),
|
||||
create(user.accessToken, { name: 'TagB' }),
|
||||
create(user.accessToken, { name: 'TagC' }),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/assets`)
|
||||
.send({ tagIds: [tagA.id, tagB.id, tagC.id], assetIds: [assetA.id, assetB.id] })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ count: 6 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /tags/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/tags/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.get(`/tags/${tag.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app)
|
||||
.get(`/tags/${uuidDto.notFound}`)
|
||||
.set('x-api-key', secret)
|
||||
.send({ assetIds: [], tagIds: [] });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.read'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/tags/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should get tag details', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.get(`/tags/${tag.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
name: 'TagA',
|
||||
value: 'TagA',
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should get nested tag details', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
const tagB = await create(user.accessToken, { name: 'TagB', parentId: tagA.id });
|
||||
const tagC = await create(user.accessToken, { name: 'TagC', parentId: tagB.id });
|
||||
const tagD = await create(user.accessToken, { name: 'TagD', parentId: tagC.id });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get(`/tags/${tagD.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
parentId: tagC.id,
|
||||
name: 'TagD',
|
||||
value: 'TagA/TagB/TagC/TagD',
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /tags/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app).put(`/tags/${tag.id}`).send({ color: '#000000' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const tag = await create(admin.accessToken, { name: 'tagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tag.id}`)
|
||||
.send({ color: '#000000' })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tag.id}`)
|
||||
.set('x-api-key', secret)
|
||||
.send({ color: '#000000' });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.update'));
|
||||
});
|
||||
|
||||
it('should update a tag', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'tagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tag.id}`)
|
||||
.send({ color: '#000000' })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ color: `#000000` }));
|
||||
});
|
||||
|
||||
it('should update a tag color without a # prefix', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'tagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tag.id}`)
|
||||
.send({ color: '000000' })
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(expect.objectContaining({ color: `#000000` }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /tags/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/tags/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/tags/${tag.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app).delete(`/tags/${tag.id}`).set('x-api-key', secret);
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.delete'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/tags/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should delete a tag', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status } = await request(app)
|
||||
.delete(`/tags/${tag.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
|
||||
it('should delete a nested tag (root)', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
await create(user.accessToken, { name: 'TagB', parentId: tagA.id });
|
||||
const { status } = await request(app)
|
||||
.delete(`/tags/${tagA.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
const tags = await getAllTags({ headers: asBearerAuth(user.accessToken) });
|
||||
expect(tags.length).toBe(0);
|
||||
});
|
||||
|
||||
it('should delete a nested tag (leaf)', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
const tagB = await create(user.accessToken, { name: 'TagB', parentId: tagA.id });
|
||||
const { status } = await request(app)
|
||||
.delete(`/tags/${tagB.id}`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
const tags = await getAllTags({ headers: asBearerAuth(user.accessToken) });
|
||||
expect(tags.length).toBe(1);
|
||||
expect(tags[0]).toEqual(tagA);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /tags/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tagA.id}/assets`)
|
||||
.send({ ids: [userAsset.id] });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tag.id}/assets`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ ids: [userAsset.id] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tag.id}/assets`)
|
||||
.set('x-api-key', secret)
|
||||
.send({ ids: [userAsset.id] });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.asset'));
|
||||
});
|
||||
|
||||
it('should be able to tag own asset', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tagA.id}/assets`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({ ids: [userAsset.id] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ id: userAsset.id, success: true })]);
|
||||
});
|
||||
|
||||
it("should not be able to add assets to another user's tag", async () => {
|
||||
const tagA = await create(admin.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tagA.id}/assets`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({ ids: [userAsset.id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Not found or no tag.asset access'));
|
||||
});
|
||||
|
||||
it('should add duplicate assets only once', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.put(`/tags/${tagA.id}/assets`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({ ids: [userAsset.id, userAsset.id] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
expect.objectContaining({ id: userAsset.id, success: true }),
|
||||
expect.objectContaining({ id: userAsset.id, success: false, error: 'duplicate' }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /tags/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const tagA = await create(admin.accessToken, { name: 'TagA' });
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/tags/${tagA}/assets`)
|
||||
.send({ ids: [userAsset.id] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
await tagAssets(
|
||||
{ id: tagA.id, bulkIdsDto: { ids: [userAsset.id] } },
|
||||
{ headers: asBearerAuth(user.accessToken) },
|
||||
);
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/tags/${tagA.id}/assets`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ ids: [userAsset.id] });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should require authorization (api key)', async () => {
|
||||
const tag = await create(user.accessToken, { name: 'TagA' });
|
||||
const { secret } = await utils.createApiKey(user.accessToken, [Permission.AssetRead]);
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/tags/${tag.id}/assets`)
|
||||
.set('x-api-key', secret)
|
||||
.send({ ids: [userAsset.id] });
|
||||
expect(status).toBe(403);
|
||||
expect(body).toEqual(errorDto.missingPermission('tag.asset'));
|
||||
});
|
||||
|
||||
it('should be able to remove own asset from own tag', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
await tagAssets(
|
||||
{ id: tagA.id, bulkIdsDto: { ids: [userAsset.id] } },
|
||||
{ headers: asBearerAuth(user.accessToken) },
|
||||
);
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/tags/${tagA.id}/assets`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({ ids: [userAsset.id] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ id: userAsset.id, success: true })]);
|
||||
});
|
||||
|
||||
it('should remove duplicate assets only once', async () => {
|
||||
const tagA = await create(user.accessToken, { name: 'TagA' });
|
||||
await tagAssets(
|
||||
{ id: tagA.id, bulkIdsDto: { ids: [userAsset.id] } },
|
||||
{ headers: asBearerAuth(user.accessToken) },
|
||||
);
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/tags/${tagA.id}/assets`)
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.send({ ids: [userAsset.id, userAsset.id] });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
expect.objectContaining({ id: userAsset.id, success: true }),
|
||||
expect.objectContaining({ id: userAsset.id, success: false, error: 'not_found' }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -42,6 +42,23 @@ describe('/trash', () => {
|
||||
const after = await getAssetStatistics({ isTrashed: true }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after.total).toBe(0);
|
||||
});
|
||||
|
||||
it('should empty the trash with archived assets', async () => {
|
||||
const { id: assetId } = await utils.createAsset(admin.accessToken);
|
||||
await utils.archiveAssets(admin.accessToken, [assetId]);
|
||||
await utils.deleteAssets(admin.accessToken, [assetId]);
|
||||
|
||||
const before = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(before).toStrictEqual(expect.objectContaining({ id: assetId, isTrashed: true, isArchived: true }));
|
||||
|
||||
const { status } = await request(app).post('/trash/empty').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(204);
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetDelete', id: assetId });
|
||||
|
||||
const after = await getAssetStatistics({ isTrashed: true }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after.total).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /trash/restore', () => {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user