mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 12:51:32 -08:00
Compare commits
431 Commits
update-exi
...
v1.134.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58ae77ec92 | ||
|
|
4794a1a092 | ||
|
|
6abcfaef99 | ||
|
|
f6903696cb | ||
|
|
724a081bb5 | ||
|
|
4e332db2fb | ||
|
|
0712183a18 | ||
|
|
d004c03990 | ||
|
|
fff651f8a5 | ||
|
|
e2720e85bb | ||
|
|
5fdc8c9481 | ||
|
|
a3404cf420 | ||
|
|
5268dc4ee2 | ||
|
|
ef060e97b6 | ||
|
|
a9851df8d1 | ||
|
|
099a1e4210 | ||
|
|
79d760ccd7 | ||
|
|
369d3dfa38 | ||
|
|
93e53f6d74 | ||
|
|
d8f0a69dc8 | ||
|
|
09d9fa9755 | ||
|
|
118dc8cf5a | ||
|
|
9557395991 | ||
|
|
a5d63d6953 | ||
|
|
5ee4a43e74 | ||
|
|
c3aeb6c497 | ||
|
|
d22fb2d5db | ||
|
|
c4df96bd72 | ||
|
|
40e7b58ba4 | ||
|
|
4743a085f1 | ||
|
|
911c877e72 | ||
|
|
806000e671 | ||
|
|
54bafccbf9 | ||
|
|
e61c575b01 | ||
|
|
e12c67742c | ||
|
|
4878c500a5 | ||
|
|
2fa7a40996 | ||
|
|
963dd3210a | ||
|
|
4fdf75311c | ||
|
|
529359de2d | ||
|
|
2d7377a5e9 | ||
|
|
8fcf47e5cb | ||
|
|
c7dc31151d | ||
|
|
065f7c7d5d | ||
|
|
15877ddf1f | ||
|
|
1b8fa51315 | ||
|
|
1f84cbe7e5 | ||
|
|
b194aee754 | ||
|
|
91b961642a | ||
|
|
c61ea483ba | ||
|
|
c278bb0e5b | ||
|
|
bc8e08f5e8 | ||
|
|
0b8fc7b493 | ||
|
|
7bb25a5c8d | ||
|
|
58c1b92816 | ||
|
|
55adc136c8 | ||
|
|
cd288533a1 | ||
|
|
58af574241 | ||
|
|
6954b11be1 | ||
|
|
bc906f7343 | ||
|
|
760b08506a | ||
|
|
6b31e333bb | ||
|
|
493b9b7a54 | ||
|
|
188188a844 | ||
|
|
b2ef8ea7dd | ||
|
|
a6c4bd1555 | ||
|
|
a02fe89ec9 | ||
|
|
98e998e814 | ||
|
|
b83b28cd73 | ||
|
|
9771e48049 | ||
|
|
86db0aafe5 | ||
|
|
12b7a079c1 | ||
|
|
53420b7c02 | ||
|
|
c05aa445d8 | ||
|
|
bdf19ce331 | ||
|
|
895e0eacfe | ||
|
|
e7b60a9278 | ||
|
|
4e2fc9f017 | ||
|
|
d1e6682df0 | ||
|
|
965498d19b | ||
|
|
62f24a79f4 | ||
|
|
495a959879 | ||
|
|
a6a4dfcfd3 | ||
|
|
0d773af6c3 | ||
|
|
fe71894308 | ||
|
|
397808dd1a | ||
|
|
e7edbcdf04 | ||
|
|
59f666b115 | ||
|
|
dc8962f2bc | ||
|
|
00a77c2d6a | ||
|
|
c8641d24f6 | ||
|
|
2431e04a09 | ||
|
|
9e47093501 | ||
|
|
230c286b97 | ||
|
|
14970c5539 | ||
|
|
adb17c4d58 | ||
|
|
56156b97e7 | ||
|
|
c411c1472a | ||
|
|
0bbe70e6a3 | ||
|
|
a65c905621 | ||
|
|
61d784f4e7 | ||
|
|
b63d6cdcd6 | ||
|
|
fa45a26cff | ||
|
|
8f045bc602 | ||
|
|
5353658114 | ||
|
|
21880aec14 | ||
|
|
48d746d9d5 | ||
|
|
8ab5040351 | ||
|
|
1219fd82a0 | ||
|
|
28d8357cc5 | ||
|
|
a9e7d0388b | ||
|
|
86d64f3483 | ||
|
|
c1150fe7e3 | ||
|
|
ecb66fdb2c | ||
|
|
c046651f23 | ||
|
|
6117329057 | ||
|
|
585997d46f | ||
|
|
7146ec99b1 | ||
|
|
b7b0b9b6d8 | ||
|
|
4935f3e0bb | ||
|
|
709a7b70aa | ||
|
|
6a4d21205f | ||
|
|
3a0ddfb92d | ||
|
|
cd03d0c0f2 | ||
|
|
117b263887 | ||
|
|
f357f3324f | ||
|
|
7d95bad5cb | ||
|
|
77b0505006 | ||
|
|
fac1beb7d8 | ||
|
|
3944f5d73b | ||
|
|
4445288758 | ||
|
|
4efc41d5d9 | ||
|
|
c9d45eee86 | ||
|
|
b3b774cfe5 | ||
|
|
15e894b9b5 | ||
|
|
ca06d0aa83 | ||
|
|
0cd51ae9c5 | ||
|
|
68f6111b77 | ||
|
|
668288ca20 | ||
|
|
3fdc1df89c | ||
|
|
989d9dbe51 | ||
|
|
48112d84a3 | ||
|
|
80dfe7a5e9 | ||
|
|
ce90a2ec1a | ||
|
|
dccbe0b3ed | ||
|
|
c0ad12f279 | ||
|
|
9c484b23a9 | ||
|
|
eed014482d | ||
|
|
d271e6a3ae | ||
|
|
60c43081ed | ||
|
|
81d959a27e | ||
|
|
bb775110ef | ||
|
|
7280331b76 | ||
|
|
93ee6ee0a5 | ||
|
|
7544a678ec | ||
|
|
3066c8198c | ||
|
|
eb8dfa283e | ||
|
|
41a127e2ab | ||
|
|
feb475561e | ||
|
|
4c4c67f0d2 | ||
|
|
381b66bf70 | ||
|
|
a89f3ad97c | ||
|
|
c473511133 | ||
|
|
0d66a6b51f | ||
|
|
66400b2e8e | ||
|
|
87cdf0ebd9 | ||
|
|
3f719bd8d7 | ||
|
|
55af925ab3 | ||
|
|
ff63b0fa8f | ||
|
|
f21fe8716c | ||
|
|
6a69dafd31 | ||
|
|
47b1938f17 | ||
|
|
2ffcfe06f3 | ||
|
|
89551edee5 | ||
|
|
cb6c541ae1 | ||
|
|
b1e1362246 | ||
|
|
ccc2b191dd | ||
|
|
bb7010b2bb | ||
|
|
8db666bc38 | ||
|
|
eace0f716d | ||
|
|
96743b6c33 | ||
|
|
ff181cf346 | ||
|
|
0cd5960007 | ||
|
|
698592c1b0 | ||
|
|
f75d853e9a | ||
|
|
3a1e3e82e7 | ||
|
|
0beb3ac4c1 | ||
|
|
894545aeed | ||
|
|
5250269fa4 | ||
|
|
a169fb6a79 | ||
|
|
09ced9a171 | ||
|
|
a6e5e4f625 | ||
|
|
bbd8de177b | ||
|
|
867f6e64f9 | ||
|
|
ec6379b0b2 | ||
|
|
2a80251dc3 | ||
|
|
d33ce13561 | ||
|
|
016d7a6ceb | ||
|
|
8ff25a4f7a | ||
|
|
61a3eba1bd | ||
|
|
7072e48cbe | ||
|
|
ece977d9ca | ||
|
|
2af8095880 | ||
|
|
30822fcd10 | ||
|
|
c578273e7a | ||
|
|
118a3fc9db | ||
|
|
1138f6dcce | ||
|
|
33f3751b72 | ||
|
|
b8509e6411 | ||
|
|
bd43edbcd7 | ||
|
|
c8b4a7e1f1 | ||
|
|
f34f83e164 | ||
|
|
df2cf5d106 | ||
|
|
52975eadb3 | ||
|
|
12610e4a9f | ||
|
|
2b3efa02d8 | ||
|
|
a21a997f21 | ||
|
|
7d61ed7ce4 | ||
|
|
8f7baf8336 | ||
|
|
44923acfd6 | ||
|
|
ab95881ebb | ||
|
|
8801ae5821 | ||
|
|
ea9f11bf39 | ||
|
|
62fc5b3c7d | ||
|
|
15d431ba6a | ||
|
|
5d21ba3166 | ||
|
|
da7a81b752 | ||
|
|
becdc3dcf5 | ||
|
|
84b51e3cbb | ||
|
|
b845184c80 | ||
|
|
1fde02ee1e | ||
|
|
526c02297c | ||
|
|
732b06eec8 | ||
|
|
436cff72b5 | ||
|
|
be5cc2cdf5 | ||
|
|
094a41ac9a | ||
|
|
ebad6a008f | ||
|
|
0c261ffbe2 | ||
|
|
6df6103c67 | ||
|
|
8c5116bc1d | ||
|
|
e3812a0e36 | ||
|
|
4b1ced439b | ||
|
|
2e8a286540 | ||
|
|
038a82c4f1 | ||
|
|
2c2dd01bf0 | ||
|
|
ac73e163df | ||
|
|
d89e88bb3f | ||
|
|
3ce353393a | ||
|
|
0e4cf9ac57 | ||
|
|
07290580a6 | ||
|
|
d9ce74b896 | ||
|
|
4c0f79b162 | ||
|
|
9851d24628 | ||
|
|
fe6cbd93b1 | ||
|
|
df20788088 | ||
|
|
3d042cc7f1 | ||
|
|
85446c5862 | ||
|
|
fb52ac0f5b | ||
|
|
48bcbee6ed | ||
|
|
f621f8ef2c | ||
|
|
7f69abbf0d | ||
|
|
895b2bf5cd | ||
|
|
f64e6f5dc3 | ||
|
|
64e738f79d | ||
|
|
a17390a422 | ||
|
|
1b5fc9c665 | ||
|
|
23717ce981 | ||
|
|
2fd05e8447 | ||
|
|
c664d99a34 | ||
|
|
21c7d70336 | ||
|
|
ad272333db | ||
|
|
460d594791 | ||
|
|
e6c575c33e | ||
|
|
85ac0512a6 | ||
|
|
205260d31c | ||
|
|
3858973be5 | ||
|
|
02994883fe | ||
|
|
a1f8150c30 | ||
|
|
d85ef19bfc | ||
|
|
d0014bdf94 | ||
|
|
e822e3eca9 | ||
|
|
644defa4a1 | ||
|
|
1fe3c7b9b3 | ||
|
|
0d60be3d87 | ||
|
|
765da7b182 | ||
|
|
b037158028 | ||
|
|
a03902f174 | ||
|
|
1d610ad9cb | ||
|
|
dab4870fed | ||
|
|
37f5e6e2cb | ||
|
|
57d622bc43 | ||
|
|
c167e46ec7 | ||
|
|
6ce8a1deeb | ||
|
|
f659ef4b7a | ||
|
|
bb6cdc99ab | ||
|
|
830b4dadcb | ||
|
|
d2f2f8d672 | ||
|
|
be1062474b | ||
|
|
64000d9d76 | ||
|
|
59fa8fbd0e | ||
|
|
19746a8685 | ||
|
|
987e5ab76c | ||
|
|
1b5e981a45 | ||
|
|
b7a0cf2470 | ||
|
|
13d6bd67b1 | ||
|
|
1de2eae12d | ||
|
|
bc5875ba8d | ||
|
|
0426b574fe | ||
|
|
2c3658e642 | ||
|
|
a493dab294 | ||
|
|
699fdd0d1b | ||
|
|
a774153f67 | ||
|
|
ca12aff3a4 | ||
|
|
550c1c0a10 | ||
|
|
92ac1193e6 | ||
|
|
2a95eccf6a | ||
|
|
ee017803bf | ||
|
|
0986a71ce3 | ||
|
|
af36eaa61b | ||
|
|
fda68f972f | ||
|
|
a8eec92da7 | ||
|
|
ad8511c978 | ||
|
|
fe8c5e8107 | ||
|
|
c70140e707 | ||
|
|
010b144754 | ||
|
|
b71039e83c | ||
|
|
56a4aa9ffe | ||
|
|
488dc4efbd | ||
|
|
f0ff8581da | ||
|
|
c49fd2065b | ||
|
|
21a6eb30ff | ||
|
|
9e063c993c | ||
|
|
dd1fcd5be5 | ||
|
|
52ae06c119 | ||
|
|
854ea13d6a | ||
|
|
504930947d | ||
|
|
0e6ac87645 | ||
|
|
bd2deda50c | ||
|
|
160bb492a2 | ||
|
|
6474a78b8b | ||
|
|
e275f2d8b3 | ||
|
|
81ed54aa61 | ||
|
|
067338b0ed | ||
|
|
5e68f8c519 | ||
|
|
242a559e0f | ||
|
|
ed2b54527c | ||
|
|
8b38f8a58d | ||
|
|
29b30570bf | ||
|
|
586a7a173b | ||
|
|
1bbfacfc09 | ||
|
|
85c2d36d99 | ||
|
|
8cefa0b84b | ||
|
|
f50e5d006c | ||
|
|
8f8ff3adc0 | ||
|
|
c4c35ed140 | ||
|
|
be2f670d35 | ||
|
|
7efcba2b12 | ||
|
|
459c815086 | ||
|
|
36fa61c013 | ||
|
|
8da5f21fcf | ||
|
|
76db8cf604 | ||
|
|
21becbf1b0 | ||
|
|
26f0ea4cb5 | ||
|
|
19e5a6f68f | ||
|
|
78f8e23834 | ||
|
|
5bceefce75 | ||
|
|
b710ad36f3 | ||
|
|
270d178a2e | ||
|
|
309528c807 | ||
|
|
7c422363fb | ||
|
|
3eb316abea | ||
|
|
b3371e16f2 | ||
|
|
b2c903c000 | ||
|
|
17e720440d | ||
|
|
a522130122 | ||
|
|
cecd9c24a4 | ||
|
|
f189c7b101 | ||
|
|
c5f087a3ca | ||
|
|
72f6d7791e | ||
|
|
f73fce1046 | ||
|
|
f2edcde1b2 | ||
|
|
9d0dd9dff8 | ||
|
|
c3d10c5be2 | ||
|
|
bd92748ddd | ||
|
|
aad5c3bada | ||
|
|
b2753103c6 | ||
|
|
e3f3baadb0 | ||
|
|
0b69d1c147 | ||
|
|
5a51ad3622 | ||
|
|
664c99278a | ||
|
|
184e142d87 | ||
|
|
8b00578c7b | ||
|
|
7562088fac | ||
|
|
79d4ce2d6d | ||
|
|
983f656a6b | ||
|
|
ab2a7006f9 | ||
|
|
1f18fe31f0 | ||
|
|
a373034629 | ||
|
|
5dac315af7 | ||
|
|
8309b73a02 | ||
|
|
e440cbe353 | ||
|
|
5548eb0dad | ||
|
|
3bec8dc337 | ||
|
|
5bcb58c3e7 | ||
|
|
c62fc155c8 | ||
|
|
40e3322b25 | ||
|
|
25f2b9602f | ||
|
|
ae6653392e | ||
|
|
d7a782da34 | ||
|
|
08b5952c87 | ||
|
|
584e5894bf | ||
|
|
52d4b2fe57 | ||
|
|
92f0973a46 | ||
|
|
75c83cb704 | ||
|
|
0b22d3348e | ||
|
|
abde0fbe60 | ||
|
|
eaa0e07329 | ||
|
|
9fd2c5220d | ||
|
|
7fcab4b251 | ||
|
|
e3995fb5f4 | ||
|
|
6d3f3d8616 | ||
|
|
4412680679 | ||
|
|
7df2c9c905 | ||
|
|
7a1e8ce6d8 | ||
|
|
8aea07b750 | ||
|
|
94dba29298 | ||
|
|
9e49783e49 | ||
|
|
43e3075f93 | ||
|
|
d03647904b | ||
|
|
206545356d | ||
|
|
3e372500b0 |
@@ -1,10 +1,10 @@
|
||||
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:b0b88ef6a5abf21194343d2c5b2829dddd9be1142f65f6a5e4390a51d5a70dd8
|
||||
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:a20b8a3538313487ac9266875bbf733e544c1aa2091df2bb99ab592a6d4f7399
|
||||
FROM ${BASEIMAGE}
|
||||
|
||||
# Flutter SDK
|
||||
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
|
||||
ENV FLUTTER_CHANNEL="stable"
|
||||
ENV FLUTTER_VERSION="3.29.1"
|
||||
ENV FLUTTER_VERSION="3.29.3"
|
||||
ENV FLUTTER_HOME=/flutter
|
||||
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
|
||||
|
||||
|
||||
2
.github/.nvmrc
vendored
2
.github/.nvmrc
vendored
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -14,7 +14,6 @@ body:
|
||||
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
|
||||
options:
|
||||
- label: 'Yes'
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: feature
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -6,7 +6,6 @@ body:
|
||||
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
|
||||
options:
|
||||
- label: 'Yes'
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
118
.github/actions/image-build/action.yml
vendored
Normal file
118
.github/actions/image-build/action.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
name: 'Single arch image build'
|
||||
description: 'Build single-arch image on platform appropriate runner'
|
||||
inputs:
|
||||
image:
|
||||
description: 'Name of the image to build'
|
||||
required: true
|
||||
ghcr-token:
|
||||
description: 'GitHub Container Registry token'
|
||||
required: true
|
||||
platform:
|
||||
description: 'Platform to build for'
|
||||
required: true
|
||||
artifact-key-base:
|
||||
description: 'Base key for artifact name'
|
||||
required: true
|
||||
context:
|
||||
description: 'Path to build context'
|
||||
required: true
|
||||
dockerfile:
|
||||
description: 'Path to Dockerfile'
|
||||
required: true
|
||||
build-args:
|
||||
description: 'Docker build arguments'
|
||||
required: false
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
shell: bash
|
||||
env:
|
||||
PLATFORM: ${{ inputs.platform }}
|
||||
run: |
|
||||
echo "platform-pair=${PLATFORM//\//-}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ inputs.ghcr-token }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
id: cache-key-suffix
|
||||
shell: bash
|
||||
env:
|
||||
REF: ${{ github.ref_name }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "cache-key-suffix=pr-${{ github.event.number }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "suffix=${SUFFIX}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
id: cache-target
|
||||
shell: bash
|
||||
env:
|
||||
BUILD_ARGS: ${{ inputs.build-args }}
|
||||
IMAGE: ${{ inputs.image }}
|
||||
SUFFIX: ${{ steps.cache-key-suffix.outputs.suffix }}
|
||||
PLATFORM_PAIR: ${{ steps.prepare.outputs.platform-pair }}
|
||||
run: |
|
||||
HASH=$(sha256sum <<< "${BUILD_ARGS}" | cut -d' ' -f1)
|
||||
CACHE_KEY="${PLATFORM_PAIR}-${HASH}"
|
||||
echo "cache-key-base=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${IMAGE}-build-cache:${CACHE_KEY}-${SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
context: ${{ inputs.context }}
|
||||
file: ${{ inputs.dockerfile }}
|
||||
platforms: ${{ inputs.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ inputs.image }}-build-cache:${{ steps.cache-target.outputs.cache-key-base }}-${{ steps.cache-key-suffix.outputs.suffix }}
|
||||
type=registry,ref=${{ inputs.image }}-build-cache:${{ steps.cache-target.outputs.cache-key-base }}-main
|
||||
outputs: type=image,"name=${{ inputs.image }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
||||
build-args: |
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.meta.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
${{ inputs.build-args }}
|
||||
|
||||
- name: Export digest
|
||||
shell: bash
|
||||
run: | # zizmor: ignore[template-injection]
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: ${{ inputs.artifact-key-base }}-${{ steps.cache-target.outputs.cache-key-base }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
45
.github/workflows/build-mobile.yml
vendored
45
.github/workflows/build-mobile.yml
vendored
@@ -7,6 +7,15 @@ on:
|
||||
ref:
|
||||
required: false
|
||||
type: string
|
||||
secrets:
|
||||
KEY_JKS:
|
||||
required: true
|
||||
ALIAS:
|
||||
required: true
|
||||
ANDROID_KEY_PASSWORD:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
@@ -15,16 +24,23 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
@@ -38,31 +54,26 @@ jobs:
|
||||
build-sign-android:
|
||||
name: Build and sign Android
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
- name: Determine ref
|
||||
id: get-ref
|
||||
run: |
|
||||
input_ref="${{ inputs.ref }}"
|
||||
github_ref="${{ github.sha }}"
|
||||
ref="${input_ref:-$github_ref}"
|
||||
echo "ref=$ref" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ steps.get-ref.outputs.ref }}
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -78,6 +89,10 @@ jobs:
|
||||
working-directory: ./mobile
|
||||
run: flutter pub get
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Build Android App Bundle
|
||||
working-directory: ./mobile
|
||||
env:
|
||||
@@ -89,7 +104,7 @@ jobs:
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
|
||||
19
.github/workflows/cache-cleanup.yml
vendored
19
.github/workflows/cache-cleanup.yml
vendored
@@ -8,31 +8,38 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
name: Cleanup
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Cleanup
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
REF: ${{ github.ref }}
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH=${{ github.ref }}
|
||||
|
||||
echo "Fetching list of cache keys"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
25
.github/workflows/cli.yml
vendored
25
.github/workflows/cli.yml
vendored
@@ -16,21 +16,25 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: CLI Publish
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
@@ -48,11 +52,16 @@ jobs:
|
||||
docker:
|
||||
name: Docker
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
needs: publish
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
@@ -61,7 +70,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@@ -76,7 +85,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
@@ -87,7 +96,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
12
.github/workflows/codeql-analysis.yml
vendored
12
.github/workflows/codeql-analysis.yml
vendored
@@ -24,6 +24,8 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
@@ -42,11 +44,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@45775bd8235c68ba998cffa5171334d58593da47 # v3
|
||||
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -59,7 +63,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@45775bd8235c68ba998cffa5171334d58593da47 # v3
|
||||
uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -72,6 +76,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@45775bd8235c68ba998cffa5171334d58593da47 # v3
|
||||
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
440
.github/workflows/docker.yml
vendored
440
.github/workflows/docker.yml
vendored
@@ -12,20 +12,23 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
server:
|
||||
@@ -37,6 +40,8 @@ jobs:
|
||||
- 'machine-learning/**'
|
||||
workflow:
|
||||
- '.github/workflows/docker.yml'
|
||||
- '.github/workflows/multi-runner-build.yml'
|
||||
- '.github/actions/image-build'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
@@ -45,6 +50,9 @@ jobs:
|
||||
retag_ml:
|
||||
name: Re-Tag ML
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@@ -52,24 +60,28 @@ jobs:
|
||||
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
|
||||
retag_server:
|
||||
name: Re-Tag Server
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@@ -77,376 +89,91 @@ jobs:
|
||||
suffix: ['']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Re-tag image
|
||||
env:
|
||||
REGISTRY_NAME: 'ghcr.io'
|
||||
REPOSITORY: ${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD: main${{ matrix.suffix }}
|
||||
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
run: |
|
||||
REGISTRY_NAME="ghcr.io"
|
||||
REPOSITORY=${{ github.repository_owner }}/immich-server
|
||||
TAG_OLD=main${{ matrix.suffix }}
|
||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
|
||||
build_and_push_ml:
|
||||
machine-learning:
|
||||
name: Build and Push ML
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
env:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
||||
strategy:
|
||||
# Prevent a failure in one image from stopping the other builds
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
device: cpu
|
||||
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
device: cpu
|
||||
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
device: cuda
|
||||
suffix: -cuda
|
||||
|
||||
- platform: linux/amd64
|
||||
runner: mich
|
||||
device: rocm
|
||||
suffix: -rocm
|
||||
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
device: openvino
|
||||
suffix: -openvino
|
||||
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
device: armnn
|
||||
suffix: -armnn
|
||||
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
device: rknn
|
||||
suffix: -rknn
|
||||
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }}
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-main
|
||||
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: ml-digests-${{ matrix.device }}-${{ env.PLATFORM_PAIR }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge_ml:
|
||||
name: Merge & Push ML
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
||||
DOCKER_REPO: altran1502/immich-machine-learning
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- device: cpu
|
||||
tag-suffix: ''
|
||||
- device: cuda
|
||||
suffix: -cuda
|
||||
- device: rocm
|
||||
suffix: -rocm
|
||||
tag-suffix: '-cuda'
|
||||
platforms: linux/amd64
|
||||
- device: openvino
|
||||
suffix: -openvino
|
||||
tag-suffix: '-openvino'
|
||||
platforms: linux/amd64
|
||||
- device: armnn
|
||||
suffix: -armnn
|
||||
tag-suffix: '-armnn'
|
||||
platforms: linux/arm64
|
||||
- device: rknn
|
||||
suffix: -rknn
|
||||
needs:
|
||||
- build_and_push_ml
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: ml-digests-${{ matrix.device }}-*
|
||||
merge-multiple: true
|
||||
tag-suffix: '-rknn'
|
||||
platforms: linux/arm64
|
||||
- device: rocm
|
||||
tag-suffix: '-rocm'
|
||||
platforms: linux/amd64
|
||||
runner-mapping: '{"linux/amd64": "mich"}'
|
||||
uses: ./.github/workflows/multi-runner-build.yml
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
with:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
dockerfile: machine-learning/Dockerfile
|
||||
platforms: ${{ matrix.platforms }}
|
||||
runner-mapping: ${{ matrix.runner-mapping }}
|
||||
tag-suffix: ${{ matrix.tag-suffix }}
|
||||
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
images: |
|
||||
name=${{ env.GHCR_REPO }}
|
||||
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr
|
||||
# Tag with long commit sha hash
|
||||
type=sha,format=long,prefix=commit-
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
|
||||
build_and_push_server:
|
||||
server:
|
||||
name: Build and Push Server
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
env:
|
||||
uses: ./.github/workflows/multi-runner-build.yml
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
with:
|
||||
image: immich-server
|
||||
context: .
|
||||
file: server/Dockerfile
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }}
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-main
|
||||
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
||||
build-args: |
|
||||
DEVICE=cpu
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: server-digests-${{ env.PLATFORM_PAIR }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge_server:
|
||||
name: Merge & Push Server
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
||||
DOCKER_REPO: altran1502/immich-server
|
||||
needs:
|
||||
- build_and_push_server
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: server-digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
images: |
|
||||
name=${{ env.GHCR_REPO }}
|
||||
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr
|
||||
# Tag with long commit sha hash
|
||||
type=sha,format=long,prefix=commit-
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
||||
dockerfile: server/Dockerfile
|
||||
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||
build-args: |
|
||||
DEVICE=cpu
|
||||
|
||||
success-check-server:
|
||||
name: Docker Build & Push Server Success
|
||||
needs: [merge_server, retag_server]
|
||||
needs: [server, retag_server]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
@@ -455,11 +182,13 @@ jobs:
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
# zizmor: ignore[template-injection]
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
|
||||
success-check-ml:
|
||||
name: Docker Build & Push ML Success
|
||||
needs: [merge_ml, retag_ml]
|
||||
needs: [machine-learning, retag_ml]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
@@ -468,4 +197,5 @@ jobs:
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
# zizmor: ignore[template-injection]
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
|
||||
21
.github/workflows/docs-build.yml
vendored
21
.github/workflows/docs-build.yml
vendored
@@ -10,16 +10,22 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
docs:
|
||||
@@ -33,6 +39,8 @@ jobs:
|
||||
build:
|
||||
name: Docs Build
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
@@ -41,10 +49,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
|
||||
@@ -58,8 +68,9 @@ jobs:
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
include-hidden-files: true
|
||||
retention-days: 1
|
||||
|
||||
55
.github/workflows/docs-deploy.yml
vendored
55
.github/workflows/docs-deploy.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: Docs deploy
|
||||
on:
|
||||
workflow_run:
|
||||
workflow_run: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
workflows: ['Docs build']
|
||||
types:
|
||||
- completed
|
||||
@@ -9,6 +9,9 @@ jobs:
|
||||
checks:
|
||||
name: Docs Deploy Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: read
|
||||
outputs:
|
||||
parameters: ${{ steps.parameters.outputs.result }}
|
||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||
@@ -17,7 +20,7 @@ jobs:
|
||||
run: echo 'The triggering workflow did not succeed' && exit 1
|
||||
- name: Get artifact
|
||||
id: get-artifact
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
@@ -35,7 +38,9 @@ jobs:
|
||||
return { found: true, id: matchArtifact.id };
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
with:
|
||||
script: |
|
||||
const eventType = context.payload.workflow_run.event;
|
||||
@@ -57,7 +62,8 @@ jobs:
|
||||
} else if (eventType == "pull_request") {
|
||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||
if(!pull_number) {
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
||||
const {HEAD_SHA} = process.env;
|
||||
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
|
||||
const items = response.data.items
|
||||
if (items.length < 1) {
|
||||
throw new Error("No pull request found for the commit")
|
||||
@@ -95,30 +101,36 @@ jobs:
|
||||
name: Docs Deploy
|
||||
runs-on: ubuntu-latest
|
||||
needs: checks
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
pull-requests: write
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
|
||||
with:
|
||||
script: |
|
||||
const json = `${{ needs.checks.outputs.parameters }}`;
|
||||
const parameters = JSON.parse(json);
|
||||
const parameters = JSON.parse(process.env.PARAM_JSON);
|
||||
core.setOutput("event", parameters.event);
|
||||
core.setOutput("name", parameters.name);
|
||||
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||
|
||||
- run: |
|
||||
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
|
||||
with:
|
||||
script: |
|
||||
let artifact = ${{ needs.checks.outputs.artifact }};
|
||||
let artifact = JSON.parse(process.env.ARTIFACT_JSON);
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
@@ -138,7 +150,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -153,7 +165,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -162,12 +174,15 @@ jobs:
|
||||
|
||||
- name: Output Cleaning
|
||||
id: clean
|
||||
env:
|
||||
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
|
||||
run: |
|
||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
||||
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
|
||||
# TODO: Action is deprecated
|
||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
@@ -184,7 +199,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -192,7 +207,7 @@ jobs:
|
||||
tg_command: 'apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||
with:
|
||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||
|
||||
15
.github/workflows/docs-destroy.yml
vendored
15
.github/workflows/docs-destroy.yml
vendored
@@ -1,15 +1,22 @@
|
||||
name: Docs destroy
|
||||
on:
|
||||
pull_request_target:
|
||||
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
types: [closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Docs Destroy
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Destroy Docs Subdomain
|
||||
env:
|
||||
@@ -18,7 +25,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -26,7 +33,7 @@ jobs:
|
||||
tg_command: 'destroy -refresh=false'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
with:
|
||||
number: ${{ github.event.number }}
|
||||
delete: true
|
||||
|
||||
14
.github/workflows/fix-format.yml
vendored
14
.github/workflows/fix-format.yml
vendored
@@ -4,28 +4,32 @@ on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
fix-formatting:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.label.name == 'fix:formatting' }}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: 'Checkout'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
@@ -33,13 +37,13 @@ jobs:
|
||||
run: make install-all && make format-all
|
||||
|
||||
- name: Commit and push
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: fix formatting'
|
||||
|
||||
- name: Remove label
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
if: always()
|
||||
with:
|
||||
script: |
|
||||
|
||||
185
.github/workflows/multi-runner-build.yml
vendored
Normal file
185
.github/workflows/multi-runner-build.yml
vendored
Normal file
@@ -0,0 +1,185 @@
|
||||
name: 'Multi-runner container image build'
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
image:
|
||||
description: 'Name of the image'
|
||||
type: string
|
||||
required: true
|
||||
context:
|
||||
description: 'Path to build context'
|
||||
type: string
|
||||
required: true
|
||||
dockerfile:
|
||||
description: 'Path to Dockerfile'
|
||||
type: string
|
||||
required: true
|
||||
tag-suffix:
|
||||
description: 'Suffix to append to the image tag'
|
||||
type: string
|
||||
default: ''
|
||||
dockerhub-push:
|
||||
description: 'Push to Docker Hub'
|
||||
type: boolean
|
||||
default: false
|
||||
build-args:
|
||||
description: 'Docker build arguments'
|
||||
type: string
|
||||
required: false
|
||||
platforms:
|
||||
description: 'Platforms to build for'
|
||||
type: string
|
||||
runner-mapping:
|
||||
description: 'Mapping from platforms to runners'
|
||||
type: string
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME:
|
||||
required: false
|
||||
DOCKERHUB_TOKEN:
|
||||
required: false
|
||||
|
||||
env:
|
||||
GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/${{ inputs.image }}
|
||||
DOCKERHUB_IMAGE: altran1502/${{ inputs.image }}
|
||||
|
||||
jobs:
|
||||
matrix:
|
||||
name: 'Generate matrix'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.matrix.outputs.matrix }}
|
||||
key: ${{ steps.artifact-key.outputs.base }}
|
||||
steps:
|
||||
- name: Generate build matrix
|
||||
id: matrix
|
||||
shell: bash
|
||||
env:
|
||||
PLATFORMS: ${{ inputs.platforms || 'linux/amd64,linux/arm64' }}
|
||||
RUNNER_MAPPING: ${{ inputs.runner-mapping || '{"linux/amd64":"ubuntu-latest","linux/arm64":"ubuntu-24.04-arm"}' }}
|
||||
run: |
|
||||
matrix=$(jq -R -c \
|
||||
--argjson runner_mapping "${RUNNER_MAPPING}" \
|
||||
'split(",") | map({platform: ., runner: $runner_mapping[.]})' \
|
||||
<<< "${PLATFORMS}")
|
||||
echo "${matrix}"
|
||||
echo "matrix=${matrix}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Determine artifact key
|
||||
id: artifact-key
|
||||
shell: bash
|
||||
env:
|
||||
IMAGE: ${{ inputs.image }}
|
||||
SUFFIX: ${{ inputs.tag-suffix }}
|
||||
run: |
|
||||
if [[ -n "${SUFFIX}" ]]; then
|
||||
base="${IMAGE}${SUFFIX}-digests"
|
||||
else
|
||||
base="${IMAGE}-digests"
|
||||
fi
|
||||
echo "${base}"
|
||||
echo "base=${base}" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
needs: matrix
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.matrix.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: ./.github/actions/image-build
|
||||
with:
|
||||
context: ${{ inputs.context }}
|
||||
dockerfile: ${{ inputs.dockerfile }}
|
||||
image: ${{ env.GHCR_IMAGE }}
|
||||
ghcr-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
platform: ${{ matrix.platform }}
|
||||
artifact-key-base: ${{ needs.matrix.outputs.key }}
|
||||
build-args: ${{ inputs.build-args }}
|
||||
|
||||
merge:
|
||||
needs: [matrix, build]
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: ${{ needs.matrix.outputs.key }}-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ inputs.dockerhub-push }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
suffix=${{ inputs.tag-suffix }}
|
||||
images: |
|
||||
name=${{ env.GHCR_IMAGE }}
|
||||
name=${{ env.DOCKERHUB_IMAGE }},enable=${{ inputs.dockerhub-push }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr
|
||||
# Tag with long commit sha hash
|
||||
type=sha,format=long,prefix=commit-
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
# Process annotations
|
||||
declare -a ANNOTATIONS=()
|
||||
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
|
||||
while IFS= read -r annotation; do
|
||||
# Extract key and value by removing the manifest: prefix
|
||||
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
|
||||
key="${BASH_REMATCH[1]}"
|
||||
value="${BASH_REMATCH[2]}"
|
||||
# Use array to properly handle arguments with spaces
|
||||
ANNOTATIONS+=(--annotation "index:$key=$value")
|
||||
fi
|
||||
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
|
||||
fi
|
||||
|
||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
SOURCE_ARGS=$(printf "${GHCR_IMAGE}@sha256:%s " *)
|
||||
|
||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||
6
.github/workflows/pr-label-validation.yml
vendored
6
.github/workflows/pr-label-validation.yml
vendored
@@ -1,9 +1,11 @@
|
||||
name: PR Label Validation
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
types: [opened, labeled, unlabeled, synchronize]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-release-label:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -12,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@388fd6af37b34cdfe5a23b37060e763217e58b03 # v5
|
||||
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
|
||||
6
.github/workflows/pr-labeler.yml
vendored
6
.github/workflows/pr-labeler.yml
vendored
@@ -1,6 +1,8 @@
|
||||
name: 'Pull Request Labeler'
|
||||
on:
|
||||
- pull_request_target
|
||||
- pull_request_target # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
@@ -9,4 +11,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
|
||||
@@ -4,9 +4,13 @@ on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: PR Conventional Commit Validation
|
||||
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0
|
||||
|
||||
41
.github/workflows/prepare-release.yml
vendored
41
.github/workflows/prepare-release.yml
vendored
@@ -21,35 +21,40 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}-root
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
bump_version:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||
|
||||
permissions: {} # No job-level permissions are needed because it uses the app-token
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
|
||||
- name: Bump version
|
||||
run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
|
||||
env:
|
||||
SERVER_BUMP: ${{ inputs.serverBump }}
|
||||
MOBILE_BUMP: ${{ inputs.mobileBump }}
|
||||
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
|
||||
|
||||
- name: Commit and tag
|
||||
id: push-tag
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: version ${{ env.IMMICH_VERSION }}'
|
||||
@@ -59,37 +64,47 @@ jobs:
|
||||
build_mobile:
|
||||
uses: ./.github/workflows/build-mobile.yml
|
||||
needs: bump_version
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
secrets:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
|
||||
prepare_release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build_mobile
|
||||
|
||||
permissions:
|
||||
actions: read # To download the app artifact
|
||||
# No content permissions are needed because it uses the app-token
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v2
|
||||
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
generate_release_notes: true
|
||||
body_path: misc/release/notes.tmpl
|
||||
files: |
|
||||
|
||||
6
.github/workflows/preview-label.yaml
vendored
6
.github/workflows/preview-label.yaml
vendored
@@ -4,6 +4,8 @@ on:
|
||||
pull_request:
|
||||
types: [labeled, closed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
comment-status:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -11,7 +13,7 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
with:
|
||||
message-id: 'preview-status'
|
||||
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
|
||||
@@ -22,7 +24,7 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
|
||||
12
.github/workflows/sdk.yml
vendored
12
.github/workflows/sdk.yml
vendored
@@ -4,20 +4,24 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
name: Publish `@immich/sdk`
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
|
||||
57
.github/workflows/static_analysis.yml
vendored
57
.github/workflows/static_analysis.yml
vendored
@@ -9,16 +9,22 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
@@ -33,15 +39,17 @@ jobs:
|
||||
name: Run Dart Code Analysis
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -50,12 +58,16 @@ jobs:
|
||||
run: dart pub get
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation; dart format lib/generated/codegen_loader.g.dart
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run Build Runner
|
||||
run: make build
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
@@ -65,9 +77,11 @@ jobs:
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
- name: Run dart analyze
|
||||
@@ -81,3 +95,30 @@ jobs:
|
||||
- name: Run dart custom_lint
|
||||
run: dart run custom_lint
|
||||
working-directory: ./mobile
|
||||
|
||||
zizmor:
|
||||
name: zizmor
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
actions: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
|
||||
- name: Run zizmor 🌈
|
||||
run: uvx zizmor --format=sarif . > results.sarif
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
|
||||
264
.github/workflows/test.yml
vendored
264
.github/workflows/test.yml
vendored
@@ -9,10 +9,15 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run_i18n: ${{ steps.found_paths.outputs.i18n == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
@@ -24,11 +29,16 @@ jobs:
|
||||
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/**'
|
||||
web:
|
||||
- 'web/**'
|
||||
- 'i18n/**'
|
||||
@@ -58,16 +68,20 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
@@ -95,16 +109,20 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
|
||||
@@ -136,16 +154,20 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||
runs-on: windows-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
|
||||
@@ -165,21 +187,25 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Test & Lint Web
|
||||
web-lint:
|
||||
name: Lint Web
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mich
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
|
||||
@@ -191,7 +217,7 @@ jobs:
|
||||
run: npm ci
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: npm run lint:p
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
@@ -202,6 +228,35 @@ jobs:
|
||||
run: npm run check:svelte
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Test Web
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check:typescript
|
||||
if: ${{ !cancelled() }}
|
||||
@@ -210,21 +265,65 @@ jobs:
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
i18n-tests:
|
||||
name: Test i18n
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_i18n == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm --prefix=web ci
|
||||
|
||||
- name: Format
|
||||
run: npm --prefix=web run format:i18n
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
i18n/**
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: i18n files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
e2e-tests-lint:
|
||||
name: End-to-End Lint
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
@@ -254,16 +353,20 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
@@ -278,19 +381,25 @@ jobs:
|
||||
name: End-to-End Tests (Server & CLI)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||
runs-on: mich
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
@@ -320,19 +429,25 @@ jobs:
|
||||
name: End-to-End Tests (Web)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||
runs-on: mich
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
|
||||
@@ -357,18 +472,43 @@ jobs:
|
||||
run: npx playwright test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
success-check-e2e:
|
||||
name: End-to-End Tests Success
|
||||
needs: [e2e-tests-server-cli, e2e-tests-web]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
# zizmor: ignore[template-injection]
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
|
||||
mobile-unit-tests:
|
||||
name: Unit Test Mobile
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test -j 1
|
||||
@@ -378,14 +518,19 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./machine-learning
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@0c5e2b8115b80b4c7c5ddf6ffdd634974642d182 # v5
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||
# with:
|
||||
# python-version: 3.11
|
||||
@@ -411,16 +556,20 @@ jobs:
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./.github
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './.github/.nvmrc'
|
||||
|
||||
@@ -434,25 +583,34 @@ jobs:
|
||||
shellcheck:
|
||||
name: ShellCheck
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
**/openapi/**
|
||||
**/openapi**
|
||||
**/node_modules/**
|
||||
|
||||
generated-api-up-to-date:
|
||||
name: OpenAPI Clients
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
@@ -466,7 +624,7 @@ jobs:
|
||||
run: make open-api
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
@@ -476,17 +634,21 @@ jobs:
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated files not up to date!"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
generated-typeorm-migrations-up-to-date:
|
||||
name: TypeORM Checks
|
||||
sql-schema-up-to-date:
|
||||
name: SQL Schema Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:1076bb152a3000df23911fdec83f14ea83f0dd0c42bc7d4e14b854e9bda1b0c9
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
@@ -504,10 +666,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
|
||||
@@ -521,23 +685,25 @@ jobs:
|
||||
run: npm run migrations:run
|
||||
|
||||
- name: Test npm run schema:reset command works
|
||||
run: npm run typeorm:schema:reset
|
||||
run: npm run schema:reset
|
||||
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
run: npm run migrations:generate TestMigration
|
||||
run: npm run migrations:generate src/TestMigration
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
server/src
|
||||
- name: Verify migration files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated migration files not up to date!"
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
cat ./src/*-TestMigration.ts
|
||||
exit 1
|
||||
|
||||
@@ -547,7 +713,7 @@ jobs:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-sql-files
|
||||
with:
|
||||
files: |
|
||||
@@ -555,9 +721,11 @@ jobs:
|
||||
|
||||
- name: Verify SQL files have not changed
|
||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: Generated SQL files not up to date!"
|
||||
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
# mobile-integration-tests:
|
||||
|
||||
20
.github/workflows/weblate-lock.yml
vendored
20
.github/workflows/weblate-lock.yml
vendored
@@ -4,30 +4,32 @@ on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/!(en)**\.json'
|
||||
- name: Debug
|
||||
run: |
|
||||
echo "Should run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}"
|
||||
echo "Found i18n paths: ${{ steps.found_paths.outputs.i18n }}"
|
||||
echo "Head ref: ${{ github.head_ref }}"
|
||||
|
||||
enforce-lock:
|
||||
name: Check Weblate Lock
|
||||
needs: [pre-job]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||
steps:
|
||||
- name: Check weblate lock
|
||||
@@ -36,7 +38,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
- name: Find Pull Request
|
||||
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1
|
||||
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1.9.0
|
||||
id: find-pr
|
||||
with:
|
||||
branch: chore/translations
|
||||
@@ -47,6 +49,7 @@ jobs:
|
||||
name: Weblate Lock Check Success
|
||||
needs: [enforce-lock]
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
@@ -54,4 +57,5 @@ jobs:
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
# zizmor: ignore[template-injection]
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@
|
||||
.DS_Store
|
||||
.vscode/*
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
|
||||
docker/upload
|
||||
|
||||
10
.vscode/extensions.json
vendored
Normal file
10
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"esbenp.prettier-vscode",
|
||||
"svelte.svelte-vscode",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"dart-code.flutter",
|
||||
"dart-code.dart-code",
|
||||
"dcmdev.dcm-vscode-extension"
|
||||
]
|
||||
}
|
||||
80
.vscode/settings.json
vendored
80
.vscode/settings.json
vendored
@@ -1,45 +1,63 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"svelte"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"[dart]": {
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.selectionHighlight": false,
|
||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||
"editor.suggestSelection": "first",
|
||||
"editor.tabCompletion": "onlySnippets",
|
||||
"editor.wordBasedSuggestions": "off",
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
||||
"editor.wordBasedSuggestions": "off"
|
||||
},
|
||||
"cSpell.words": [
|
||||
"immich"
|
||||
],
|
||||
"[javascript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": ["immich"],
|
||||
"editor.formatOnSave": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart"
|
||||
}
|
||||
}
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
|
||||
3
Makefile
3
Makefile
@@ -17,6 +17,9 @@ e2e:
|
||||
prod:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-down:
|
||||
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
|
||||
|
||||
prod-scale:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
|
||||
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
1413
cli/package-lock.json
generated
1413
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.61",
|
||||
"version": "2.2.68",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.15.21",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@@ -69,6 +69,6 @@
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ name: immich-dev
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['/usr/src/app/bin/immich-dev']
|
||||
command: [ '/usr/src/app/bin/immich-dev' ]
|
||||
image: immich-server-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
@@ -48,7 +48,7 @@ services:
|
||||
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
|
||||
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
|
||||
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/third-party
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/community-guides
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
@@ -70,7 +70,7 @@ services:
|
||||
# user: 0:0
|
||||
build:
|
||||
context: ../web
|
||||
command: ['/usr/src/app/bin/immich-web']
|
||||
command: [ '/usr/src/app/bin/immich-web' ]
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
@@ -116,13 +116,13 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0@sha256:fa4f6e0971f454cd95fec5a9aaed2ed93d8f46725cc6bc61e0698e97dba96da1
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -134,25 +134,6 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
|
||||
@@ -56,14 +56,14 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0@sha256:fa4f6e0971f454cd95fec5a9aaed2ed93d8f46725cc6bc61e0698e97dba96da1
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -75,14 +75,6 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
|
||||
restart: always
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
@@ -90,7 +82,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:502ad90314c7485892ce696cb14a99fceab9fc27af29f4b427f41bd39701a199
|
||||
image: prom/prometheus@sha256:78ed1f9050eb9eaf766af6e580230b1c4965728650e332cd1ee918c0c4699775
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -102,7 +94,7 @@ services:
|
||||
command: [ './run.sh', '-disable-reporting' ]
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.6.0-ubuntu@sha256:fd8fa48213c624e1a95122f1d93abbf1cf1cbe85fc73212c1e599dbd76c63ff8
|
||||
image: grafana/grafana:11.6.1-ubuntu@sha256:6fc273288470ef499dd3c6b36aeade093170d4f608f864c5dd3a7fabeae77b50
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -49,30 +49,24 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0-pgvectors0.2.0@sha256:fa4f6e0971f454cd95fec5a9aaed2ed93d8f46725cc6bc61e0698e97dba96da1
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
|
||||
# DB_STORAGE_TYPE: 'HDD'
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -23,23 +23,32 @@ Refer to the official [postgres documentation](https://www.postgresql.org/docs/c
|
||||
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
||||
:::
|
||||
|
||||
### Automatic Database Backups
|
||||
### Automatic Database Dumps
|
||||
|
||||
For convenience, Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
|
||||
As mentioned above, you should make your own backup of these together with the asset folders as noted below.
|
||||
You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
||||
By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
|
||||
:::warning
|
||||
The automatic database dumps can be used to restore the database in the event of damage to the Postgres database files.
|
||||
There is no monitoring for these dumps and you will not be notified if they are unsuccessful.
|
||||
:::
|
||||
|
||||
#### Trigger Backup
|
||||
:::caution
|
||||
The database dumps do **NOT** contain any pictures or videos, only metadata. They are only usable with a copy of the other files in `UPLOAD_LOCATION` as outlined below.
|
||||
:::
|
||||
|
||||
You are able to trigger a backup in the [admin job status page](http://my.immich.app/admin/jobs-status).
|
||||
Visit the page, open the "Create job" modal from the top right, select "Backup Database" and click "Confirm".
|
||||
A job will run and trigger a backup, you can verify this worked correctly by checking the logs or the backup folder.
|
||||
This backup will count towards the last X backups that will be kept based on your settings.
|
||||
For disaster-recovery purposes, Immich will automatically create database dumps. The dumps are stored in `UPLOAD_LOCATION/backups`.
|
||||
Please be sure to make your own, independent backup of the database together with the asset folders as noted below.
|
||||
You can adjust the schedule and amount of kept database dumps in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
||||
By default, Immich will keep the last 14 database dumps and create a new dump every day at 2:00 AM.
|
||||
|
||||
#### Trigger Dump
|
||||
|
||||
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
|
||||
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
|
||||
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
|
||||
This dumps will count towards the last `X` dumps that will be kept based on your settings.
|
||||
|
||||
#### Restoring
|
||||
|
||||
We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
|
||||
We hope to make restoring simpler in future versions, for now you can find the database dumps in the `UPLOAD_LOCATION/backups` folder on your host.
|
||||
Then please follow the steps in the following section for restoring the database.
|
||||
|
||||
### Manual Backup and Restore
|
||||
|
||||
@@ -10,12 +10,16 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
|
||||
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
|
||||
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
|
||||
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
|
||||
|
||||
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
|
||||
|
||||
:::note
|
||||
Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported. Postgres 17 is nominally compatible, but pgvecto.rs does not have prebuilt images or packages for it as of writing.
|
||||
Immich is known to work with Postgres versions `>= 14, < 18`.
|
||||
|
||||
Make sure the installed version of pgvecto.rs is compatible with your version of Immich. The current accepted range for pgvecto.rs is `>= 0.2.0, < 0.4.0`.
|
||||
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.4.0`.
|
||||
:::
|
||||
|
||||
## Specifying the connection URL
|
||||
@@ -53,21 +57,81 @@ CREATE DATABASE <immichdatabasename>;
|
||||
\c <immichdatabasename>
|
||||
BEGIN;
|
||||
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
|
||||
CREATE EXTENSION vectors;
|
||||
CREATE EXTENSION vchord CASCADE;
|
||||
CREATE EXTENSION earthdistance CASCADE;
|
||||
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
|
||||
ALTER SCHEMA vectors OWNER TO <immichdbusername>;
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
### Updating pgvecto.rs
|
||||
### Updating VectorChord
|
||||
|
||||
When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
|
||||
When installing a new version of VectorChord, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vchord UPDATE;`.
|
||||
|
||||
### Common errors
|
||||
## Migrating to VectorChord
|
||||
|
||||
#### Permission denied for view
|
||||
VectorChord is the successor extension to pgvecto.rs, allowing for higher performance, lower memory usage and higher quality results for smart search and facial recognition.
|
||||
|
||||
If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO <immichdbusername>;`.
|
||||
### Migrating from pgvecto.rs
|
||||
|
||||
[vectors-install]: https://docs.vectorchord.ai/getting-started/installation.html
|
||||
Support for pgvecto.rs will be dropped in a later release, hence we recommend all users currently using pgvecto.rs to migrate to VectorChord at their convenience. There are two primary approaches to do so.
|
||||
|
||||
The easiest option is to have both extensions installed during the migration:
|
||||
|
||||
1. Ensure you still have pgvecto.rs installed
|
||||
2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
|
||||
3. [Install VectorChord][vchord-install]
|
||||
4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed
|
||||
5. Restart the Postgres database
|
||||
6. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client
|
||||
7. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output
|
||||
8. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;`
|
||||
9. Drop the old schema by running `DROP SCHEMA vectors;`
|
||||
10. Remove the `vectors.so` entry from the `shared_preload_libraries` setting
|
||||
11. Restart the Postgres database
|
||||
12. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain installed as it provides the data types used by `vchord`
|
||||
|
||||
If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps:
|
||||
|
||||
1. While pgvecto.rs is still installed, run the following SQL command using psql or your choice of database client. Take note of the number outputted by this command as you will need it later
|
||||
|
||||
```sql
|
||||
SELECT atttypmod as dimsize
|
||||
FROM pg_attribute f
|
||||
JOIN pg_class c ON c.oid = f.attrelid
|
||||
WHERE c.relkind = 'r'::char
|
||||
AND f.attnum > 0
|
||||
AND c.relname = 'smart_search'::text
|
||||
AND f.attname = 'embedding'::text;
|
||||
```
|
||||
|
||||
2. Remove references to pgvecto.rs using the below SQL commands
|
||||
|
||||
```sql
|
||||
DROP INDEX IF EXISTS clip_index;
|
||||
DROP INDEX IF EXISTS face_index;
|
||||
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE real[];
|
||||
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE real[];
|
||||
```
|
||||
|
||||
3. [Install VectorChord][vchord-install]
|
||||
4. Change the columns back to the appropriate vector types, replacing `<number>` with the number from step 1
|
||||
|
||||
```sql
|
||||
CREATE EXTENSION IF NOT EXISTS vchord CASCADE;
|
||||
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE vector(<number>);
|
||||
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512);
|
||||
```
|
||||
|
||||
5. Start Immich and let it create new indices using VectorChord
|
||||
|
||||
### Migrating from pgvector
|
||||
|
||||
1. Ensure you have at least 0.7.0 of pgvector installed. If it is below that, please upgrade it and run the SQL command `ALTER EXTENSION vector UPDATE;` using psql or your choice of database client
|
||||
2. Follow the Prerequisites to install VectorChord
|
||||
3. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;`
|
||||
4. Remove the `DB_VECTOR_EXTENSION=pgvector` environmental variable as it will make Immich still use pgvector if set
|
||||
5. Start Immich and let it create new indices using VectorChord
|
||||
|
||||
Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps.
|
||||
|
||||
[vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html
|
||||
[pg-apt]: https://www.postgresql.org/download/linux/#generic
|
||||
|
||||
@@ -22,7 +22,7 @@ server {
|
||||
client_max_body_size 50000M;
|
||||
|
||||
# Set headers
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
# Database Migrations
|
||||
|
||||
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
|
||||
1. Run the command
|
||||
|
||||
```bash
|
||||
npm run typeorm:migrations:generate <migration-name>
|
||||
npm run migrations:generate <migration-name>
|
||||
```
|
||||
|
||||
2. Check if the migration file makes sense.
|
||||
3. Move the migration file to folder `./server/src/migrations` in your code editor.
|
||||
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
|
||||
|
||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||
|
||||
@@ -63,22 +63,41 @@ If you only want to do web development connected to an existing, remote backend,
|
||||
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
|
||||
```
|
||||
|
||||
If you're using PowerShell on Windows you may need to set the env var separately like so:
|
||||
|
||||
```powershell
|
||||
$env:IMMICH_SERVER_URL = "https://demo.immich.app/"
|
||||
npm run dev
|
||||
```
|
||||
|
||||
#### `@immich/ui`
|
||||
|
||||
To see local changes to `@immich/ui` in Immich, do the following:
|
||||
|
||||
1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui`
|
||||
1. Build the `@immich/ui` project via `npm run build`
|
||||
1. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
|
||||
1. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
|
||||
1. Start up the stack via `make dev`
|
||||
1. After making changes in `@immich/ui`, rebuild it (`npm run build`)
|
||||
2. Build the `@immich/ui` project via `npm run build`
|
||||
3. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
|
||||
4. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
|
||||
5. Uncomment the import statement in `web/src/app.css` file `@import '/usr/ui/dist/theme/default.css';` and comment out `@import '@immich/ui/theme/default.css';`
|
||||
6. Start up the stack via `make dev`
|
||||
7. After making changes in `@immich/ui`, rebuild it (`npm run build`)
|
||||
|
||||
### Mobile app
|
||||
|
||||
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x and FVM to be installed on your system.
|
||||
#### Setup
|
||||
|
||||
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
||||
1. Setup Flutter toolchain using FVM.
|
||||
2. Run `flutter pub get` to install the dependencies.
|
||||
3. Run `make translation` to generate the translation file.
|
||||
4. Run `fvm flutter run` to start the app.
|
||||
|
||||
#### Translation
|
||||
|
||||
To add a new translation text, enter the key-value pair in the `i18n/en.json` in the root of the immich project. Then, from the `mobile/` directory, run
|
||||
|
||||
```bash
|
||||
make translation
|
||||
```
|
||||
|
||||
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.
|
||||
|
||||
@@ -96,32 +115,72 @@ Note: Activating the license is not required.
|
||||
|
||||
### VSCode
|
||||
|
||||
Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions.
|
||||
Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions. These extensions are listed in the `extensions.json` file under `.vscode/` and should appear as workspace recommendations.
|
||||
|
||||
in User `settings.json` (`cmd + shift + p` and search for `Open User Settings JSON`) add the following:
|
||||
Here are the settings we use, they should be active as workspace settings (`settings.json`):
|
||||
|
||||
```json title="settings.json"
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript][typescript][css]": {
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"[dart]": {
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.selectionHighlight": false,
|
||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||
"editor.suggestSelection": "first",
|
||||
"editor.tabCompletion": "onlySnippets",
|
||||
"editor.wordBasedSuggestions": "off",
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
||||
}
|
||||
"editor.wordBasedSuggestions": "off"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": ["immich"],
|
||||
"editor.formatOnSave": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
```
|
||||
|
||||
11
docs/docs/features/casting.md
Normal file
11
docs/docs/features/casting.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Chromecast support
|
||||
|
||||
Immich supports the Google's Cast protocol so that photos and videos can be cast to devices such as a Chromecast and a Nest Hub. This feature is considered experimental and has several important limitations listed below. Currently, this feature is only supported by the web client, support on Android and iOS is planned for the future.
|
||||
|
||||
## Limitations
|
||||
|
||||
To use casting with Immich, there are a few prerequisites:
|
||||
|
||||
1. Your instance must be accessed via an HTTPS connection in order for the casting menu to show.
|
||||
2. Your instance must be publicly accessible via HTTPS and a DNS record for the server must be accessible via Google's DNS servers (`8.8.8.8` and `8.8.4.4`)
|
||||
3. Videos must be in a format that is compatible with Google Cast. For more info, check out [Google's documentation](https://developers.google.com/cast/docs/media)
|
||||
@@ -42,6 +42,12 @@ docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich
|
||||
|
||||
Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key.
|
||||
|
||||
This `docker run` command will directly run the command `immich` inside the container. You can directly append the desired parameters (see under "usage") to the commandline like this:
|
||||
|
||||
```bash
|
||||
docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich-instance/api -e IMMICH_API_KEY=your-api-key ghcr.io/immich-app/immich-cli:latest upload -a -c 5 --recursive directory/
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
<details>
|
||||
|
||||
@@ -121,6 +121,6 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
||||
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||
[jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
|
||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
|
||||
[jellyfin-lp]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#low-power-encoding
|
||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#known-issues-and-limitations-on-linux
|
||||
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases
|
||||
|
||||
@@ -72,7 +72,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
|
||||
|
||||
### Nightly job
|
||||
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library managment page.
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library management page.
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
|
||||
- The GPU must have compute capability 5.2 or greater.
|
||||
- The server must have the official NVIDIA driver installed.
|
||||
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
||||
- The installed driver must be >= 545 (it must support CUDA 12.3).
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||
|
||||
#### ROCm
|
||||
|
||||
@@ -5,7 +5,7 @@ import TabItem from '@theme/TabItem';
|
||||
|
||||
Immich uses Postgres as its search database for both metadata and contextual CLIP search.
|
||||
|
||||
Contextual CLIP search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
Contextual CLIP search is powered by the [VectorChord](https://github.com/tensorchord/VectorChord) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
|
||||
## Advanced Search Filters
|
||||
|
||||
@@ -92,7 +92,7 @@ Memory and execution time estimates were obtained without acceleration on a 7800
|
||||
|
||||
**Execution Time (ms)**: After warming up the model with one pass, the mean execution time of 100 passes with the same input.
|
||||
|
||||
**Memory (MiB)**: The peak RSS usage of the process afer performing the above timing benchmark. Does not include image decoding, concurrent processing, the web server, etc., which are relatively constant factors.
|
||||
**Memory (MiB)**: The peak RSS usage of the process after performing the above timing benchmark. Does not include image decoding, concurrent processing, the web server, etc., which are relatively constant factors.
|
||||
|
||||
**Recall (%)**: Evaluated on Crossmodal-3600, the average of the recall@1, recall@5 and recall@10 results for zeroshot image retrieval. Chinese (Simplified), English, French, German, Italian, Japanese, Korean, Polish, Russian, Spanish and Turkish are additionally tested on XTD-10. Chinese (Simplified) and English are additionally tested on Flickr30k. The recall metrics are the average across all tested datasets.
|
||||
|
||||
|
||||
@@ -14,14 +14,14 @@ online generators you can use.
|
||||
2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.)
|
||||
3. Save your selections. Reload the map, and enjoy your custom map style!
|
||||
|
||||
## Use Maptiler to build a custom style
|
||||
## Use MapTiler to build a custom style
|
||||
|
||||
Customizing the map style can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
|
||||
Customizing the map style can be done easily using MapTiler, if you do not want to write an entire JSON document by hand.
|
||||
|
||||
1. Create a free account at https://cloud.maptiler.com
|
||||
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
|
||||
3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer.
|
||||
4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account.
|
||||
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. Maptiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>
|
||||
6. Maptiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
|
||||
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to Maptiler.
|
||||
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. MapTiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>
|
||||
6. MapTiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
|
||||
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to MapTiler.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Database Queries
|
||||
|
||||
:::danger
|
||||
Keep in mind that mucking around in the database might set the moon on fire. Avoid modifying the database directly when possible, and always have current backups.
|
||||
Keep in mind that mucking around in the database might set the Moon on fire. Avoid modifying the database directly when possible, and always have current backups.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
|
||||
@@ -2,53 +2,13 @@
|
||||
sidebar_position: 30
|
||||
---
|
||||
|
||||
import CodeBlock from '@theme/CodeBlock';
|
||||
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
|
||||
|
||||
# Docker Compose [Recommended]
|
||||
|
||||
Docker Compose is the recommended method to run Immich in production. Below are the steps to deploy Immich with Docker Compose.
|
||||
|
||||
## Step 1 - Download the required files
|
||||
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx';
|
||||
|
||||
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
|
||||
|
||||
```bash title="Move to the directory you created"
|
||||
mkdir ./immich-app
|
||||
cd ./immich-app
|
||||
```
|
||||
|
||||
Download [`docker-compose.yml`][compose-file] and [`example.env`][env-file] by running the following commands:
|
||||
|
||||
```bash title="Get docker-compose.yml file"
|
||||
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
```
|
||||
|
||||
```bash title="Get .env file"
|
||||
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
```
|
||||
|
||||
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
|
||||
|
||||
## Step 2 - Populate the .env file with custom values
|
||||
|
||||
<CodeBlock language="bash" title="Default environmental variable content">
|
||||
{ExampleEnv}
|
||||
</CodeBlock>
|
||||
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
|
||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
|
||||
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
|
||||
- Set your timezone by uncommenting the `TZ=` line.
|
||||
- Populate custom database information if necessary.
|
||||
|
||||
## Step 3 - Start the containers
|
||||
|
||||
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
|
||||
|
||||
```bash title="Start the containers"
|
||||
docker compose up -d
|
||||
```
|
||||
<DockerComposeSteps />
|
||||
|
||||
:::info Docker version
|
||||
If you get an error such as `unknown shorthand flag: 'd' in -d` or `open <location of your .env file>: permission denied`, you are probably running the wrong Docker version. (This happens, for example, with the docker.io package in Ubuntu 22.04.3 LTS.) You can correct the problem by following the complete [Docker Engine install](https://docs.docker.com/engine/install/) procedure for your distribution, crucially the "Uninstall old versions" and "Install using the apt/rpm repository" sections. These replace the distro's Docker packages with Docker's official ones.
|
||||
@@ -70,6 +30,3 @@ If you get an error `can't set healthcheck.start_interval as feature require Doc
|
||||
## Next Steps
|
||||
|
||||
Read the [Post Installation](/docs/install/post-install.mdx) steps and [upgrade instructions](/docs/install/upgrading.md).
|
||||
|
||||
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
[env-file]: https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
|
||||
@@ -72,20 +72,21 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
||||
|
||||
## Database
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
|
||||
\*2: This setting cannot be changed after the server has successfully started up.
|
||||
\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector.
|
||||
|
||||
:::info
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/la
|
||||
|
||||
## Step 2 - Populate the .env file with custom values
|
||||
|
||||
Follow [Step 2 in Docker Compose](./docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue.
|
||||
Follow [Step 2 in Docker Compose](/docs/install/docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue.
|
||||
|
||||
## Step 3 - Create a new project in Container Manager
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
sidebar_position: 80
|
||||
---
|
||||
|
||||
# TrueNAS SCALE [Community]
|
||||
# TrueNAS [Community]
|
||||
|
||||
:::note
|
||||
This is a community contribution and not officially supported by the Immich team, but included here for convenience.
|
||||
@@ -12,17 +12,17 @@ Community support can be found in the dedicated channel on the [Discord Server](
|
||||
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/charts/tree/master/community/immich).**
|
||||
:::
|
||||
|
||||
Immich can easily be installed on TrueNAS SCALE via the **Community** train application.
|
||||
Consider reviewing the TrueNAS [Apps tutorial](https://www.truenas.com/docs/scale/scaletutorials/apps/) if you have not previously configured applications on your system.
|
||||
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application.
|
||||
Consider reviewing the TrueNAS [Apps resources](https://apps.truenas.com/getting-started/) if you have not previously configured applications on your system.
|
||||
|
||||
TrueNAS SCALE makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
||||
TrueNAS Community Edition makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
||||
|
||||
## First Steps
|
||||
|
||||
The Immich app in TrueNAS SCALE installs, completes the initial configuration, then starts the Immich web portal.
|
||||
When updates become available, SCALE alerts and provides easy updates.
|
||||
The Immich app in TrueNAS Community Edition installs, completes the initial configuration, then starts the Immich web portal.
|
||||
When updates become available, TrueNAS alerts and provides easy updates.
|
||||
|
||||
Before installing the Immich app in SCALE, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
||||
Before installing the Immich app in TrueNAS, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
||||
You may also configure environment variables at any time after deploying the application.
|
||||
|
||||
### Setting up Storage Datasets
|
||||
@@ -126,9 +126,9 @@ className="border rounded-xl"
|
||||
|
||||
Accept the default port `30041` in **WebUI Port** or enter a custom port number.
|
||||
:::info Allowed Port Numbers
|
||||
Only numbers within the range 9000-65535 may be used on SCALE versions below TrueNAS Scale 24.10 Electric Eel.
|
||||
Only numbers within the range 9000-65535 may be used on TrueNAS versions below TrueNAS Community Edition 24.10 Electric Eel.
|
||||
|
||||
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/references/defaultports/).
|
||||
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/solutions/optimizations/security/#truenas-default-ports).
|
||||
:::
|
||||
|
||||
### Storage Configuration
|
||||
@@ -173,7 +173,7 @@ className="border rounded-xl"
|
||||
|
||||
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
||||
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
|
||||
The **Host Path** is the location on the TrueNAS SCALE server where your external library is located.
|
||||
The **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
|
||||
|
||||
<!-- A section for Labels would go here but I don't know what they do. -->
|
||||
|
||||
@@ -188,17 +188,17 @@ className="border rounded-xl"
|
||||
|
||||
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
|
||||
|
||||
Accept the default **Memory** limit of `4096` MB or specify the number of MB of RAM. If you're using Machine Learning you should probably set this above 8000 MB.
|
||||
Specify the **Memory** limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
|
||||
|
||||
:::info Older SCALE Versions
|
||||
Before TrueNAS SCALE version 24.10 Electric Eel:
|
||||
:::info Older TrueNAS Versions
|
||||
Before TrueNAS Community Edition version 24.10 Electric Eel:
|
||||
|
||||
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
|
||||
|
||||
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
|
||||
:::
|
||||
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://www.truenas.com/docs/truenasapps/#gpu-passthrough)
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
|
||||
|
||||
### Install
|
||||
|
||||
@@ -240,7 +240,7 @@ className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::info
|
||||
Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
Some Environment Variables are not available for the TrueNAS Community Edition app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
|
||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
||||
:::
|
||||
@@ -251,7 +251,7 @@ Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`
|
||||
Make sure to read the general [upgrade instructions](/docs/install/upgrading.md).
|
||||
:::
|
||||
|
||||
When updates become available, SCALE alerts and provides easy updates.
|
||||
When updates become available, TrueNAS alerts and provides easy updates.
|
||||
To update the app to the latest version:
|
||||
|
||||
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Comparison
|
||||
|
||||
BIN
docs/docs/overview/img/social-preview-light.webp
Normal file
BIN
docs/docs/overview/img/social-preview-light.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 233 KiB |
@@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Quick start
|
||||
@@ -10,11 +10,20 @@ to install and use it.
|
||||
|
||||
## Requirements
|
||||
|
||||
Check the [requirements page](/docs/install/requirements) to get started.
|
||||
- A system with at least 4GB of RAM and 2 CPU cores.
|
||||
- [Docker](https://docs.docker.com/engine/install/)
|
||||
|
||||
> For a more detailed list of requirements, see the [requirements page](/docs/install/requirements).
|
||||
|
||||
---
|
||||
|
||||
## Set up the server
|
||||
|
||||
Follow the [Docker Compose (Recommended)](/docs/install/docker-compose) instructions to install the server.
|
||||
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx';
|
||||
|
||||
<DockerComposeSteps />
|
||||
|
||||
---
|
||||
|
||||
## Try the web app
|
||||
|
||||
@@ -26,6 +35,8 @@ Try uploading a picture from your browser.
|
||||
|
||||
<img src={require('./img/upload-button.webp').default} title="Upload button" />
|
||||
|
||||
---
|
||||
|
||||
## Try the mobile app
|
||||
|
||||
### Download the Mobile App
|
||||
@@ -56,6 +67,8 @@ You can select the **Jobs** tab to see Immich processing your photos.
|
||||
|
||||
<img src={require('/docs/guides/img/jobs-tab.webp').default} title="Jobs tab" width={300} />
|
||||
|
||||
---
|
||||
|
||||
## Review the database backup and restore process
|
||||
|
||||
Immich has built-in database backups. You can refer to the
|
||||
@@ -65,6 +78,8 @@ Immich has built-in database backups. You can refer to the
|
||||
The database only contains metadata and user information. You must setup manual backups of the images and videos stored in `UPLOAD_LOCATION`.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
## Where to go from here?
|
||||
|
||||
You may decide you'd like to install the server a different way; the Install category on the left menu provides many options.
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Introduction
|
||||
# Welcome to Immich
|
||||
|
||||
<img src={require('./img/feature-panel.webp').default} alt="Immich - Self-hosted photos and videos backup tool" />
|
||||
<img
|
||||
src={require('./img/social-preview-light.webp').default}
|
||||
alt="Immich - Self-hosted photos and videos backup tool"
|
||||
data-theme="light"
|
||||
/>
|
||||
|
||||
## Welcome!
|
||||
|
||||
43
docs/docs/partials/_docker-compose-install-steps.mdx
Normal file
43
docs/docs/partials/_docker-compose-install-steps.mdx
Normal file
@@ -0,0 +1,43 @@
|
||||
import CodeBlock from '@theme/CodeBlock';
|
||||
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
|
||||
|
||||
### Step 1 - Download the required files
|
||||
|
||||
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
|
||||
|
||||
```bash title="Move to the directory you created"
|
||||
mkdir ./immich-app
|
||||
cd ./immich-app
|
||||
```
|
||||
|
||||
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) by running the following commands:
|
||||
|
||||
```bash title="Get docker-compose.yml file"
|
||||
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
```
|
||||
|
||||
```bash title="Get .env file"
|
||||
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
```
|
||||
|
||||
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
|
||||
|
||||
### Step 2 - Populate the .env file with custom values
|
||||
|
||||
<CodeBlock language="bash" title="Default environmental variable content">
|
||||
{ExampleEnv}
|
||||
</CodeBlock>
|
||||
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
|
||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
|
||||
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
|
||||
- Set your timezone by uncommenting the `TZ=` line.
|
||||
- Populate custom database information if necessary.
|
||||
|
||||
### Step 3 - Start the containers
|
||||
|
||||
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
|
||||
|
||||
```bash title="Start the containers"
|
||||
docker compose up -d
|
||||
```
|
||||
@@ -95,7 +95,7 @@ const config = {
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
to: '/docs/overview/introduction',
|
||||
to: '/docs/overview/welcome',
|
||||
position: 'right',
|
||||
label: 'Docs',
|
||||
},
|
||||
@@ -124,6 +124,12 @@ const config = {
|
||||
label: 'Discord',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
type: 'html',
|
||||
position: 'right',
|
||||
value:
|
||||
'<a href="https://buy.immich.app" target="_blank" class="no-underline hover:no-underline"><button class="buy-button bg-immich-primary dark:bg-immich-dark-primary text-white dark:text-black rounded-xl">Buy Immich</button></a>',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
@@ -134,7 +140,7 @@ const config = {
|
||||
items: [
|
||||
{
|
||||
label: 'Welcome',
|
||||
to: '/docs/overview/introduction',
|
||||
to: '/docs/overview/welcome',
|
||||
},
|
||||
{
|
||||
label: 'Installation',
|
||||
|
||||
5324
docs/package-lock.json
generated
5324
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -57,6 +57,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,13 +40,9 @@ const projects: CommunityProjectProps[] = [
|
||||
},
|
||||
{
|
||||
title: 'Lightroom Immich Plugin: lrc-immich-plugin',
|
||||
description: 'Another Lightroom plugin to publish or export photos from Lightroom to Immich.',
|
||||
url: 'https://github.com/bmachek/lrc-immich-plugin',
|
||||
},
|
||||
{
|
||||
title: 'Immich Duplicate Finder',
|
||||
description: 'Webapp that uses machine learning to identify near-duplicate images.',
|
||||
url: 'https://github.com/vale46n1/immich_duplicate_finder',
|
||||
description:
|
||||
'Lightroom plugin to publish, export photos from Lightroom to Immich. Import from Immich to Lightroom is also supported.',
|
||||
url: 'https://blog.fokuspunk.de/lrc-immich-plugin/',
|
||||
},
|
||||
{
|
||||
title: 'Immich-Tiktok-Remover',
|
||||
|
||||
@@ -7,14 +7,22 @@
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Be+Vietnam+Pro:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap');
|
||||
|
||||
body {
|
||||
font-family: 'Be Vietnam Pro', serif;
|
||||
font-optical-sizing: auto;
|
||||
/* font-size: 1.125rem;
|
||||
@font-face {
|
||||
font-family: 'Overpass';
|
||||
src: url('/fonts/overpass/Overpass.ttf') format('truetype-variations');
|
||||
font-weight: 1 999;
|
||||
font-style: normal;
|
||||
ascent-override: 106.25%;
|
||||
size-adjust: 106.25%; */
|
||||
size-adjust: 106.25%;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Overpass Mono';
|
||||
src: url('/fonts/overpass/OverpassMono.ttf') format('truetype-variations');
|
||||
font-weight: 1 999;
|
||||
font-style: normal;
|
||||
ascent-override: 106.25%;
|
||||
size-adjust: 106.25%;
|
||||
}
|
||||
|
||||
.breadcrumbs__link {
|
||||
@@ -29,6 +37,7 @@ img {
|
||||
|
||||
/* You can override the default Infima variables here. */
|
||||
:root {
|
||||
font-family: 'Overpass', sans-serif;
|
||||
--ifm-color-primary: #4250af;
|
||||
--ifm-color-primary-dark: #4250af;
|
||||
--ifm-color-primary-darker: #4250af;
|
||||
@@ -59,14 +68,12 @@ div[class^='announcementBar_'] {
|
||||
}
|
||||
|
||||
.menu__link {
|
||||
padding: 10px;
|
||||
padding-left: 16px;
|
||||
padding: 10px 10px 10px 16px;
|
||||
border-radius: 24px;
|
||||
margin-right: 16px;
|
||||
}
|
||||
|
||||
.menu__list-item-collapsible {
|
||||
border-radius: 10px;
|
||||
margin-right: 16px;
|
||||
border-radius: 24px;
|
||||
}
|
||||
@@ -83,3 +90,12 @@ div[class*='navbar__items'] > li:has(a[class*='version-switcher-34ab39']) {
|
||||
code {
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.buy-button {
|
||||
padding: 8px 14px;
|
||||
border: 1px solid transparent;
|
||||
font-family: 'Overpass', sans-serif;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
box-shadow: 0 0 5px 2px rgba(181, 206, 254, 0.4);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import {
|
||||
mdiBug,
|
||||
mdiCalendarToday,
|
||||
mdiCrosshairsOff,
|
||||
mdiCrop,
|
||||
mdiDatabase,
|
||||
mdiLeadPencil,
|
||||
mdiLockOff,
|
||||
@@ -22,6 +23,18 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
|
||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||
|
||||
const items: Item[] = [
|
||||
{
|
||||
icon: mdiCrop,
|
||||
iconColor: 'tomato',
|
||||
title: 'Image dimensions in EXIF metadata are cursed',
|
||||
description:
|
||||
'The dimensions in EXIF metadata can be different from the actual dimensions of the image, causing issues with cropping and resizing.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/17974',
|
||||
text: '#17974',
|
||||
},
|
||||
date: new Date(2025, 5, 5),
|
||||
},
|
||||
{
|
||||
icon: mdiMicrosoftWindows,
|
||||
iconColor: '#357EC7',
|
||||
|
||||
5
docs/src/pages/errors.md
Normal file
5
docs/src/pages/errors.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Errors
|
||||
|
||||
## TypeORM Upgrade
|
||||
|
||||
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.
|
||||
@@ -4,6 +4,7 @@ import Layout from '@theme/Layout';
|
||||
import { discordPath, discordViewBox } from '@site/src/components/svg-paths';
|
||||
import ThemedImage from '@theme/ThemedImage';
|
||||
import Icon from '@mdi/react';
|
||||
|
||||
function HomepageHeader() {
|
||||
return (
|
||||
<header>
|
||||
@@ -12,11 +13,14 @@ function HomepageHeader() {
|
||||
<div className="w-full h-[120vh] absolute left-0 top-0 backdrop-blur-3xl bg-immich-bg/40 dark:bg-transparent"></div>
|
||||
</div>
|
||||
<section className="text-center pt-12 sm:pt-24 bg-immich-bg/50 dark:bg-immich-dark-bg/80">
|
||||
<ThemedImage
|
||||
sources={{ dark: 'img/logomark-dark.svg', light: 'img/logomark-light.svg' }}
|
||||
className="h-[115px] w-[115px] mb-2 antialiased rounded-none"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<a href="https://futo.org" target="_blank" rel="noopener noreferrer">
|
||||
<ThemedImage
|
||||
sources={{ dark: 'img/logomark-dark-with-futo.svg', light: 'img/logomark-light-with-futo.svg' }}
|
||||
className="h-[125px] w-[125px] mb-2 antialiased rounded-none"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
</a>
|
||||
|
||||
<div className="mt-8">
|
||||
<p className="text-3xl md:text-5xl sm:leading-tight mb-1 font-extrabold text-black/90 dark:text-white px-4">
|
||||
Self-hosted{' '}
|
||||
@@ -27,7 +31,7 @@ function HomepageHeader() {
|
||||
solution<span className="block"></span>
|
||||
</p>
|
||||
|
||||
<p className="max-w-1/4 m-auto mt-4 px-4">
|
||||
<p className="max-w-1/4 m-auto mt-4 px-4 text-lg text-gray-700 dark:text-gray-100">
|
||||
Easily back up, organize, and manage your photos on your own server. Immich helps you
|
||||
<span className="sm:block"></span> browse, search and organize your photos and videos with ease, without
|
||||
sacrificing your privacy.
|
||||
@@ -35,27 +39,21 @@ function HomepageHeader() {
|
||||
</div>
|
||||
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-9 gap-4 ">
|
||||
<Link
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold uppercase"
|
||||
to="docs/overview/introduction"
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold"
|
||||
to="docs/overview/quick-start"
|
||||
>
|
||||
Get started
|
||||
Get Started
|
||||
</Link>
|
||||
|
||||
<Link
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-white/90 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold"
|
||||
to="https://demo.immich.app/"
|
||||
>
|
||||
Demo
|
||||
</Link>
|
||||
|
||||
<Link
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
||||
to="https://immich.store"
|
||||
>
|
||||
Buy Merch
|
||||
Open Demo
|
||||
</Link>
|
||||
</div>
|
||||
<div className="my-12 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
|
||||
|
||||
<div className="my-8 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
|
||||
<Icon
|
||||
path={discordPath}
|
||||
viewBox={discordViewBox} /* viewBox may show an error in your IDE but it is normal. */
|
||||
@@ -88,11 +86,18 @@ function HomepageHeader() {
|
||||
<img className="h-24" alt="Get it on Google Play" src="/img/google-play-badge.png" />
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div className="h-24">
|
||||
<a href="https://apps.apple.com/sg/app/immich/id1613945652">
|
||||
<img className="h-24 sm:p-3.5 p-3" alt="Download on the App Store" src="/img/ios-app-store-badge.svg" />
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div className="h-24">
|
||||
<a href="https://github.com/immich-app/immich/releases/latest">
|
||||
<img className="h-24 sm:p-3.5 p-3" alt="Download APK" src="/img/download-apk-github.svg" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
<ThemedImage
|
||||
sources={{ dark: '/img/app-qr-code-dark.svg', light: '/img/app-qr-code-light.svg' }}
|
||||
@@ -111,7 +116,7 @@ export default function Home(): JSX.Element {
|
||||
<HomepageHeader />
|
||||
<div className="flex flex-col place-items-center text-center place-content-center dark:bg-immich-dark-bg py-8">
|
||||
<p>This project is available under GNU AGPL v3 license.</p>
|
||||
<p className="text-xs">Privacy should not be a luxury</p>
|
||||
<p className="text-sm">Privacy should not be a luxury</p>
|
||||
</div>
|
||||
</Layout>
|
||||
);
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import React from 'react';
|
||||
import Link from '@docusaurus/Link';
|
||||
import Layout from '@theme/Layout';
|
||||
function HomepageHeader() {
|
||||
return (
|
||||
|
||||
@@ -76,13 +76,18 @@ import {
|
||||
mdiWeb,
|
||||
mdiDatabaseOutline,
|
||||
mdiLinkEdit,
|
||||
mdiTagFaces,
|
||||
mdiMovieOpenPlayOutline,
|
||||
mdiCast,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
import { Item, Timeline } from '../components/timeline';
|
||||
|
||||
const releases = {
|
||||
'v1.133.0': new Date(2025, 4, 21),
|
||||
'v1.130.0': new Date(2025, 2, 25),
|
||||
'v1.127.0': new Date(2025, 1, 26),
|
||||
'v1.122.0': new Date(2024, 11, 5),
|
||||
'v1.120.0': new Date(2024, 10, 6),
|
||||
'v1.114.0': new Date(2024, 8, 6),
|
||||
@@ -213,14 +218,6 @@ const roadmap: Item[] = [
|
||||
iconColor: 'indianred',
|
||||
title: 'Stable release',
|
||||
description: 'Immich goes stable',
|
||||
getDateLabel: () => 'Planned for early 2025',
|
||||
},
|
||||
{
|
||||
done: false,
|
||||
icon: mdiLockOutline,
|
||||
iconColor: 'sandybrown',
|
||||
title: 'Private/locked photos',
|
||||
description: 'Private assets with extra protections',
|
||||
getDateLabel: () => 'Planned for 2025',
|
||||
},
|
||||
{
|
||||
@@ -242,6 +239,27 @@ const roadmap: Item[] = [
|
||||
];
|
||||
|
||||
const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiCast,
|
||||
iconColor: 'aqua',
|
||||
title: 'Google Cast (web)',
|
||||
description: 'Cast assets to Google Cast/Chromecast compatible devices',
|
||||
release: 'v1.133.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiLockOutline,
|
||||
iconColor: 'sandybrown',
|
||||
title: 'Private/locked photos',
|
||||
description: 'Private assets with extra protections',
|
||||
release: 'v1.133.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiFolderMultiple,
|
||||
iconColor: 'brown',
|
||||
title: 'Folders view in the mobile app',
|
||||
description: 'Browse your photos and videos in their folder structure inside the mobile app',
|
||||
release: 'v1.130.0',
|
||||
}),
|
||||
{
|
||||
icon: mdiStar,
|
||||
iconColor: 'gold',
|
||||
@@ -249,6 +267,14 @@ const milestones: Item[] = [
|
||||
description: 'Reached 60K Stars on GitHub!',
|
||||
getDateLabel: withLanguage(new Date(2025, 2, 4)),
|
||||
},
|
||||
withRelease({
|
||||
icon: mdiTagFaces,
|
||||
iconColor: 'teal',
|
||||
title: 'Manual face tagging',
|
||||
description:
|
||||
'Manually tag or remove faces in photos and videos, even when automatic detection misses or misidentifies them.',
|
||||
release: 'v1.127.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiLinkEdit,
|
||||
iconColor: 'crimson',
|
||||
@@ -266,8 +292,8 @@ const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiDatabaseOutline,
|
||||
iconColor: 'brown',
|
||||
title: 'Automatic database backups',
|
||||
description: 'Database backups are now integrated into the Immich server',
|
||||
title: 'Automatic database dumps',
|
||||
description: 'Database dumps are now integrated into the Immich server',
|
||||
release: 'v1.120.0',
|
||||
}),
|
||||
{
|
||||
@@ -300,7 +326,7 @@ const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiFolderMultiple,
|
||||
iconColor: 'brown',
|
||||
title: 'Folders',
|
||||
title: 'Folders view',
|
||||
description: 'Browse your photos and videos in their folder structure',
|
||||
release: 'v1.113.0',
|
||||
}),
|
||||
|
||||
5
docs/static/.well-known/security.txt
vendored
Normal file
5
docs/static/.well-known/security.txt
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
Policy: https://github.com/immich-app/immich/blob/main/SECURITY.md
|
||||
Contact: mailto:security@immich.app
|
||||
Preferred-Languages: en
|
||||
Expires: 2026-05-01T23:59:00.000Z
|
||||
Canonical: https://immich.app/.well-known/security.txt
|
||||
1
docs/static/_redirects
vendored
1
docs/static/_redirects
vendored
@@ -30,3 +30,4 @@
|
||||
/docs/guides/api-album-sync /docs/community-projects 307
|
||||
/docs/guides/remove-offline-files /docs/community-projects 307
|
||||
/milestones /roadmap 307
|
||||
/docs/overview/introduction /docs/overview/welcome 307
|
||||
168
docs/static/archived-versions.json
vendored
168
docs/static/archived-versions.json
vendored
@@ -1,36 +1,28 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.134.0",
|
||||
"url": "https://v1.134.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.133.1",
|
||||
"url": "https://v1.133.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.133.0",
|
||||
"url": "https://v1.133.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.132.3",
|
||||
"url": "https://v1.132.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.3",
|
||||
"url": "https://v1.131.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.2",
|
||||
"url": "https://v1.131.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.1",
|
||||
"url": "https://v1.131.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.0",
|
||||
"url": "https://v1.131.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.3",
|
||||
"url": "https://v1.130.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.2",
|
||||
"url": "https://v1.130.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.1",
|
||||
"url": "https://v1.130.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.0",
|
||||
"url": "https://v1.130.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.129.0",
|
||||
"url": "https://v1.129.0.archive.immich.app"
|
||||
@@ -47,46 +39,14 @@
|
||||
"label": "v1.126.1",
|
||||
"url": "https://v1.126.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.126.0",
|
||||
"url": "https://v1.126.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.7",
|
||||
"url": "https://v1.125.7.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.6",
|
||||
"url": "https://v1.125.6.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.5",
|
||||
"url": "https://v1.125.5.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.3",
|
||||
"url": "https://v1.125.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.2",
|
||||
"url": "https://v1.125.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.1",
|
||||
"url": "https://v1.125.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.124.2",
|
||||
"url": "https://v1.124.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.124.1",
|
||||
"url": "https://v1.124.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.124.0",
|
||||
"url": "https://v1.124.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.123.0",
|
||||
"url": "https://v1.123.0.archive.immich.app"
|
||||
@@ -95,18 +55,6 @@
|
||||
"label": "v1.122.3",
|
||||
"url": "https://v1.122.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.122.2",
|
||||
"url": "https://v1.122.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.122.1",
|
||||
"url": "https://v1.122.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.122.0",
|
||||
"url": "https://v1.122.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.121.0",
|
||||
"url": "https://v1.121.0.archive.immich.app"
|
||||
@@ -115,34 +63,14 @@
|
||||
"label": "v1.120.2",
|
||||
"url": "https://v1.120.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.120.1",
|
||||
"url": "https://v1.120.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.120.0",
|
||||
"url": "https://v1.120.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.119.1",
|
||||
"url": "https://v1.119.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.119.0",
|
||||
"url": "https://v1.119.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.118.2",
|
||||
"url": "https://v1.118.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.118.1",
|
||||
"url": "https://v1.118.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.118.0",
|
||||
"url": "https://v1.118.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.117.0",
|
||||
"url": "https://v1.117.0.archive.immich.app"
|
||||
@@ -151,14 +79,6 @@
|
||||
"label": "v1.116.2",
|
||||
"url": "https://v1.116.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.116.1",
|
||||
"url": "https://v1.116.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.116.0",
|
||||
"url": "https://v1.116.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.115.0",
|
||||
"url": "https://v1.115.0.archive.immich.app"
|
||||
@@ -171,18 +91,10 @@
|
||||
"label": "v1.113.1",
|
||||
"url": "https://v1.113.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.113.0",
|
||||
"url": "https://v1.113.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.112.1",
|
||||
"url": "https://v1.112.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.112.0",
|
||||
"url": "https://v1.112.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.111.0",
|
||||
"url": "https://v1.111.0.archive.immich.app"
|
||||
@@ -195,14 +107,6 @@
|
||||
"label": "v1.109.2",
|
||||
"url": "https://v1.109.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.109.1",
|
||||
"url": "https://v1.109.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.109.0",
|
||||
"url": "https://v1.109.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.108.0",
|
||||
"url": "https://v1.108.0.archive.immich.app"
|
||||
@@ -211,38 +115,14 @@
|
||||
"label": "v1.107.2",
|
||||
"url": "https://v1.107.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.1",
|
||||
"url": "https://v1.107.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.0",
|
||||
"url": "https://v1.107.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.4",
|
||||
"url": "https://v1.106.4.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.3",
|
||||
"url": "https://v1.106.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.2",
|
||||
"url": "https://v1.106.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.1",
|
||||
"url": "https://v1.106.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.105.1",
|
||||
"url": "https://v1.105.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.105.0",
|
||||
"url": "https://v1.105.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.104.0",
|
||||
"url": "https://v1.104.0.archive.immich.app"
|
||||
@@ -251,26 +131,10 @@
|
||||
"label": "v1.103.1",
|
||||
"url": "https://v1.103.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.103.0",
|
||||
"url": "https://v1.103.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.3",
|
||||
"url": "https://v1.102.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.2",
|
||||
"url": "https://v1.102.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.1",
|
||||
"url": "https://v1.102.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.0",
|
||||
"url": "https://v1.102.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.101.0",
|
||||
"url": "https://v1.101.0.archive.immich.app"
|
||||
|
||||
BIN
docs/static/fonts/overpass/Overpass-Italic.ttf
vendored
Normal file
BIN
docs/static/fonts/overpass/Overpass-Italic.ttf
vendored
Normal file
Binary file not shown.
BIN
docs/static/fonts/overpass/Overpass.ttf
vendored
Normal file
BIN
docs/static/fonts/overpass/Overpass.ttf
vendored
Normal file
Binary file not shown.
BIN
docs/static/fonts/overpass/OverpassMono.ttf
vendored
Normal file
BIN
docs/static/fonts/overpass/OverpassMono.ttf
vendored
Normal file
Binary file not shown.
13
docs/static/img/download-apk-github.svg
vendored
Normal file
13
docs/static/img/download-apk-github.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 14 KiB |
22
docs/static/img/logomark-dark-with-futo.svg
vendored
Normal file
22
docs/static/img/logomark-dark-with-futo.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 75 KiB |
21
docs/static/img/logomark-light-with-futo.svg
vendored
Normal file
21
docs/static/img/logomark-light-with-futo.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 75 KiB |
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -34,11 +34,11 @@ services:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
||||
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:e6d1209c1c13791c6f9fbf726c41865e3320dfe2445a6b4ffb03e25f904b3b37
|
||||
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
|
||||
2788
e2e/package-lock.json
generated
2788
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.131.3",
|
||||
"version": "1.134.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,9 +25,9 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.14.0",
|
||||
"@types/node": "^22.15.21",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
@@ -52,6 +52,6 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,38 +46,6 @@ describe('/activities', () => {
|
||||
});
|
||||
|
||||
describe('GET /activities', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/activities');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require an albumId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should reject an invalid albumId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
.query({ albumId: uuidDto.invalid })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should reject an invalid assetId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
.query({ albumId: uuidDto.notFound, assetId: uuidDto.invalid })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['assetId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should start off empty', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
@@ -192,30 +160,6 @@ describe('/activities', () => {
|
||||
});
|
||||
|
||||
describe('POST /activities', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/activities');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require an albumId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/activities')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ albumId: uuidDto.invalid });
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should require a comment when type is comment', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/activities')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ albumId: uuidDto.notFound, type: 'comment', comment: null });
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['comment must be a string', 'comment should not be empty']));
|
||||
});
|
||||
|
||||
it('should add a comment to an album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/activities')
|
||||
@@ -330,20 +274,6 @@ describe('/activities', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /activities/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/activities/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/activities/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should remove a comment from an album', async () => {
|
||||
const reaction = await createActivity({
|
||||
albumId: album.id,
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -128,28 +128,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('GET /albums', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/albums');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should reject an invalid shared param', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/albums?shared=invalid')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['shared must be a boolean value']));
|
||||
});
|
||||
|
||||
it('should reject an invalid assetId param', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/albums?assetId=invalid')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['assetId must be a UUID']));
|
||||
});
|
||||
|
||||
it("should not show other users' favorites", async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/albums/${user1Albums[0].id}?withoutAssets=false`)
|
||||
@@ -323,12 +301,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('GET /albums/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/albums/${user1Albums[0].id}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return album info for own album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/albums/${user1Albums[0].id}?withoutAssets=false`)
|
||||
@@ -421,12 +393,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('GET /albums/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/albums/statistics');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return total count of albums the user has access to', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/albums/statistics')
|
||||
@@ -438,12 +404,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('POST /albums', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/albums').send({ albumName: 'New album' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should create an album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/albums')
|
||||
@@ -471,12 +431,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('PUT /albums/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/albums/${user1Albums[0].id}/assets`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should be able to add own asset to own album', async () => {
|
||||
const asset = await utils.createAsset(user1.accessToken);
|
||||
const { status, body } = await request(app)
|
||||
@@ -526,14 +480,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('PATCH /albums/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.patch(`/albums/${uuidDto.notFound}`)
|
||||
.send({ albumName: 'New album name' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should update an album', async () => {
|
||||
const album = await utils.createAlbum(user1.accessToken, {
|
||||
albumName: 'New album',
|
||||
@@ -576,15 +522,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /albums/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/albums/${user1Albums[0].id}/assets`)
|
||||
.send({ ids: [user1Asset1.id] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/albums/${user1Albums[1].id}/assets`)
|
||||
@@ -679,13 +616,6 @@ describe('/albums', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/albums/${user1Albums[0].id}/users`).send({ sharedUserIds: [] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should be able to add user to own album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/albums/${album.id}/users`)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { LoginResponseDto, Permission, createApiKey } from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -24,12 +24,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('POST /api-keys', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/api-keys').send({ name: 'API Key' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not work without permission', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.ApiKeyRead]);
|
||||
const { status, body } = await request(app).post('/api-keys').set('x-api-key', secret).send({ name: 'API Key' });
|
||||
@@ -99,12 +93,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('GET /api-keys', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/api-keys');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should start off empty', async () => {
|
||||
const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([]);
|
||||
@@ -125,12 +113,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('GET /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/api-keys/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -140,14 +122,6 @@ describe('/api-keys', () => {
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/api-keys/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should get api key details', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -165,12 +139,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('PUT /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/api-keys/${uuidDto.notFound}`).send({ name: 'new name' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -181,15 +149,6 @@ describe('/api-keys', () => {
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/api-keys/${uuidDto.invalid}`)
|
||||
.send({ name: 'new name' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should update api key details', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -208,12 +167,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/api-keys/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -223,14 +176,6 @@ describe('/api-keys', () => {
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/api-keys/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should delete an api key', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status } = await request(app)
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
AssetMediaStatus,
|
||||
AssetResponseDto,
|
||||
AssetTypeEnum,
|
||||
AssetVisibility,
|
||||
getAssetInfo,
|
||||
getMyUser,
|
||||
LoginResponseDto,
|
||||
@@ -22,27 +23,9 @@ import { app, asBearerAuth, tempDir, TEN_TIMES, testAssetDir, utils } from 'src/
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const makeUploadDto = (options?: { omit: string }): Record<string, any> => {
|
||||
const dto: Record<string, any> = {
|
||||
deviceAssetId: 'example-image',
|
||||
deviceId: 'TEST',
|
||||
fileCreatedAt: new Date().toISOString(),
|
||||
fileModifiedAt: new Date().toISOString(),
|
||||
isFavorite: 'testing',
|
||||
duration: '0:00:00.000000',
|
||||
};
|
||||
|
||||
const omit = options?.omit;
|
||||
if (omit) {
|
||||
delete dto[omit];
|
||||
}
|
||||
|
||||
return dto;
|
||||
};
|
||||
|
||||
const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-springs.jpg`;
|
||||
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
|
||||
const facesAssetFilepath = `${testAssetDir}/metadata/faces/portrait.jpg`;
|
||||
const facesAssetDir = `${testAssetDir}/metadata/faces`;
|
||||
|
||||
const readTags = async (bytes: Buffer, filename: string) => {
|
||||
const filepath = join(tempDir, filename);
|
||||
@@ -137,9 +120,9 @@ describe('/asset', () => {
|
||||
// stats
|
||||
utils.createAsset(statsUser.accessToken),
|
||||
utils.createAsset(statsUser.accessToken, { isFavorite: true }),
|
||||
utils.createAsset(statsUser.accessToken, { isArchived: true }),
|
||||
utils.createAsset(statsUser.accessToken, { visibility: AssetVisibility.Archive }),
|
||||
utils.createAsset(statsUser.accessToken, {
|
||||
isArchived: true,
|
||||
visibility: AssetVisibility.Archive,
|
||||
isFavorite: true,
|
||||
assetData: { filename: 'example.mp4' },
|
||||
}),
|
||||
@@ -160,13 +143,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id/original', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}/original`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download the file', async () => {
|
||||
const response = await request(app)
|
||||
.get(`/assets/${user1Assets[0].id}/original`)
|
||||
@@ -178,20 +154,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}`);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
expect(status).toBe(401);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${user2Assets[0].id}`)
|
||||
@@ -224,31 +186,22 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should get the asset faces', async () => {
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.metadata.faces.import = true;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
// asset faces
|
||||
const facesAsset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
describe('faces', () => {
|
||||
const metadataFaceTests = [
|
||||
{
|
||||
description: 'without orientation',
|
||||
filename: 'portrait.jpg',
|
||||
bytes: await readFile(facesAssetFilepath),
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: facesAsset.id });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${facesAsset.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body.id).toEqual(facesAsset.id);
|
||||
expect(body.people).toMatchObject([
|
||||
{
|
||||
description: 'adjusting face regions to orientation',
|
||||
filename: 'portrait-orientation-6.jpg',
|
||||
},
|
||||
];
|
||||
// should produce same resulting face region coordinates for any orientation
|
||||
const expectedFaces = [
|
||||
{
|
||||
name: 'Marie Curie',
|
||||
birthDate: null,
|
||||
thumbnailPath: '',
|
||||
isHidden: false,
|
||||
faces: [
|
||||
{
|
||||
@@ -265,7 +218,6 @@ describe('/asset', () => {
|
||||
{
|
||||
name: 'Pierre Curie',
|
||||
birthDate: null,
|
||||
thumbnailPath: '',
|
||||
isHidden: false,
|
||||
faces: [
|
||||
{
|
||||
@@ -279,7 +231,30 @@ describe('/asset', () => {
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
];
|
||||
|
||||
it.each(metadataFaceTests)('should get the asset faces from $filename $description', async ({ filename }) => {
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.metadata.faces.import = true;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
const facesAsset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: await readFile(`${facesAssetDir}/${filename}`),
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: facesAsset.id });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${facesAsset.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body.id).toEqual(facesAsset.id);
|
||||
expect(body.people).toMatchObject(expectedFaces);
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with a shared link', async () => {
|
||||
@@ -333,7 +308,7 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
it('disallows viewing archived assets', async () => {
|
||||
const asset = await utils.createAsset(user1.accessToken, { isArchived: true });
|
||||
const asset = await utils.createAsset(user1.accessToken, { visibility: AssetVisibility.Archive });
|
||||
|
||||
const { status } = await request(app)
|
||||
.get(`/assets/${asset.id}`)
|
||||
@@ -354,13 +329,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/assets/statistics');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return stats of all assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
@@ -384,7 +352,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isArchived: true });
|
||||
.query({ visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ images: 1, videos: 1, total: 2 });
|
||||
@@ -394,7 +362,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isFavorite: true, isArchived: true });
|
||||
.query({ isFavorite: true, visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ images: 0, videos: 1, total: 1 });
|
||||
@@ -404,7 +372,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isFavorite: false, isArchived: false });
|
||||
.query({ isFavorite: false, visibility: AssetVisibility.Timeline });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ images: 1, videos: 0, total: 1 });
|
||||
@@ -425,13 +393,6 @@ describe('/asset', () => {
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration');
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/assets/random');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it.each(TEN_TIMES)('should return 1 random assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/random')
|
||||
@@ -467,31 +428,9 @@ describe('/asset', () => {
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]);
|
||||
});
|
||||
|
||||
it('should return error', async () => {
|
||||
const { status } = await request(app)
|
||||
.get('/assets/random?count=ABC')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/assets/:${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user2Assets[0].id}`)
|
||||
@@ -519,7 +458,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ isArchived: true });
|
||||
.send({ visibility: AssetVisibility.Archive });
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id, isArchived: true });
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
@@ -619,28 +558,6 @@ describe('/asset', () => {
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should reject invalid gps coordinates', async () => {
|
||||
for (const test of [
|
||||
{ latitude: 12 },
|
||||
{ longitude: 12 },
|
||||
{ latitude: 12, longitude: 'abc' },
|
||||
{ latitude: 'abc', longitude: 12 },
|
||||
{ latitude: null, longitude: 12 },
|
||||
{ latitude: 12, longitude: null },
|
||||
{ latitude: 91, longitude: 12 },
|
||||
{ latitude: -91, longitude: 12 },
|
||||
{ latitude: 12, longitude: -181 },
|
||||
{ latitude: 12, longitude: 181 },
|
||||
]) {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.send(test)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
}
|
||||
});
|
||||
|
||||
it('should update gps data', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
@@ -712,17 +629,6 @@ describe('/asset', () => {
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should reject invalid rating', async () => {
|
||||
for (const test of [{ rating: 7 }, { rating: 3.5 }, { rating: null }]) {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.send(test)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
}
|
||||
});
|
||||
|
||||
it('should return tagged people', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
@@ -746,25 +652,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/assets`)
|
||||
.send({ ids: [uuidDto.notFound] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should throw an error when the id is not found', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/assets`)
|
||||
@@ -877,13 +764,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id/thumbnail', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/thumbnail`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not include gps data for webp thumbnails', async () => {
|
||||
await utils.waitForWebsocketEvent({
|
||||
event: 'assetUpload',
|
||||
@@ -919,13 +799,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id/original', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/original`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download the original', async () => {
|
||||
const { status, body, type } = await request(app)
|
||||
.get(`/assets/${locationAsset.id}/original`)
|
||||
@@ -946,43 +819,9 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put('/assets');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets', () => {
|
||||
beforeAll(setupTests, 30_000);
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/assets`);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
expect(status).toBe(401);
|
||||
});
|
||||
|
||||
it.each([
|
||||
{ should: 'require `deviceAssetId`', dto: { ...makeUploadDto({ omit: 'deviceAssetId' }) } },
|
||||
{ should: 'require `deviceId`', dto: { ...makeUploadDto({ omit: 'deviceId' }) } },
|
||||
{ should: 'require `fileCreatedAt`', dto: { ...makeUploadDto({ omit: 'fileCreatedAt' }) } },
|
||||
{ should: 'require `fileModifiedAt`', dto: { ...makeUploadDto({ omit: 'fileModifiedAt' }) } },
|
||||
{ should: 'require `duration`', dto: { ...makeUploadDto({ omit: 'duration' }) } },
|
||||
{ should: 'throw if `isFavorite` is not a boolean', dto: { ...makeUploadDto(), isFavorite: 'not-a-boolean' } },
|
||||
{ should: 'throw if `isVisible` is not a boolean', dto: { ...makeUploadDto(), isVisible: 'not-a-boolean' } },
|
||||
{ should: 'throw if `isArchived` is not a boolean', dto: { ...makeUploadDto(), isArchived: 'not-a-boolean' } },
|
||||
])('should $should', async ({ dto }) => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/assets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.attach('assetData', makeRandomImage(), 'example.png')
|
||||
.field(dto);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
const tests = [
|
||||
{
|
||||
input: 'formats/avif/8bit-sRGB.avif',
|
||||
@@ -1141,7 +980,7 @@ describe('/asset', () => {
|
||||
fNumber: 8,
|
||||
focalLength: 97,
|
||||
iso: 100,
|
||||
lensModel: 'E PZ 18-105mm F4 G OSS',
|
||||
lensModel: 'Sony E PZ 18-105mm F4 G OSS',
|
||||
fileSizeInByte: 25_001_984,
|
||||
dateTimeOriginal: '2016-09-27T10:51:44+00:00',
|
||||
orientation: '1',
|
||||
@@ -1163,7 +1002,7 @@ describe('/asset', () => {
|
||||
fNumber: 22,
|
||||
focalLength: 25,
|
||||
iso: 100,
|
||||
lensModel: 'E 25mm F2',
|
||||
lensModel: 'Zeiss Batis 25mm F2',
|
||||
fileSizeInByte: 49_512_448,
|
||||
dateTimeOriginal: '2016-01-08T14:08:01+00:00',
|
||||
orientation: '1',
|
||||
@@ -1234,7 +1073,7 @@ describe('/asset', () => {
|
||||
focalLength: 18.3,
|
||||
iso: 100,
|
||||
latitude: 36.613_24,
|
||||
lensModel: 'GR LENS 18.3mm F2.8',
|
||||
lensModel: '18.3mm F2.8',
|
||||
longitude: -121.897_85,
|
||||
make: 'RICOH IMAGING COMPANY, LTD.',
|
||||
model: 'RICOH GR III',
|
||||
@@ -1244,31 +1083,21 @@ describe('/asset', () => {
|
||||
},
|
||||
];
|
||||
|
||||
it(`should upload and generate a thumbnail for different file types`, async () => {
|
||||
// upload in parallel
|
||||
const assets = await Promise.all(
|
||||
tests.map(async ({ input }) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
return utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
}),
|
||||
);
|
||||
it.each(tests)(`should upload and generate a thumbnail for different file types`, async ({ input, expected }) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
const response = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
|
||||
for (const { id, status } of assets) {
|
||||
expect(status).toBe(AssetMediaStatus.Created);
|
||||
// longer timeout as the thumbnail generation from full-size raw files can take a while
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
|
||||
}
|
||||
expect(response.status).toBe(AssetMediaStatus.Created);
|
||||
const id = response.id;
|
||||
// longer timeout as the thumbnail generation from full-size raw files can take a while
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
|
||||
|
||||
for (const [i, { id }] of assets.entries()) {
|
||||
const { expected } = tests[i];
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
|
||||
expect(asset).toMatchObject(expected);
|
||||
}
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
|
||||
expect(asset).toMatchObject(expected);
|
||||
});
|
||||
|
||||
it('should handle a duplicate', async () => {
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import { deleteAssets, getAuditFiles, updateAsset, type LoginResponseDto } from '@immich/sdk';
|
||||
import { asBearerAuth, utils } from 'src/utils';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
describe('/audits', () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
await utils.resetFilesystem();
|
||||
|
||||
admin = await utils.adminSetup();
|
||||
});
|
||||
|
||||
// TODO: Enable these tests again once #7436 is resolved as these were flaky
|
||||
describe.skip('GET :/file-report', () => {
|
||||
it('excludes assets without issues from report', async () => {
|
||||
const [trashedAsset, archivedAsset] = await Promise.all([
|
||||
utils.createAsset(admin.accessToken),
|
||||
utils.createAsset(admin.accessToken),
|
||||
utils.createAsset(admin.accessToken),
|
||||
]);
|
||||
|
||||
await Promise.all([
|
||||
deleteAssets({ assetBulkDeleteDto: { ids: [trashedAsset.id] } }, { headers: asBearerAuth(admin.accessToken) }),
|
||||
updateAsset(
|
||||
{
|
||||
id: archivedAsset.id,
|
||||
updateAssetDto: { isArchived: true },
|
||||
},
|
||||
{ headers: asBearerAuth(admin.accessToken) },
|
||||
),
|
||||
]);
|
||||
|
||||
const body = await getAuditFiles({
|
||||
headers: asBearerAuth(admin.accessToken),
|
||||
});
|
||||
|
||||
expect(body.orphans).toHaveLength(0);
|
||||
expect(body.extras).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,7 +5,7 @@ import { app, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const { name, email, password } = signupDto.admin;
|
||||
const { email, password } = signupDto.admin;
|
||||
|
||||
describe(`/auth/admin-sign-up`, () => {
|
||||
beforeEach(async () => {
|
||||
@@ -13,58 +13,12 @@ describe(`/auth/admin-sign-up`, () => {
|
||||
});
|
||||
|
||||
describe('POST /auth/admin-sign-up', () => {
|
||||
const invalid = [
|
||||
{
|
||||
should: 'require an email address',
|
||||
data: { name, password },
|
||||
},
|
||||
{
|
||||
should: 'require a password',
|
||||
data: { name, email },
|
||||
},
|
||||
{
|
||||
should: 'require a name',
|
||||
data: { email, password },
|
||||
},
|
||||
{
|
||||
should: 'require a valid email',
|
||||
data: { name, email: 'immich', password },
|
||||
},
|
||||
];
|
||||
|
||||
for (const { should, data } of invalid) {
|
||||
it(`should ${should}`, async () => {
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(data);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
}
|
||||
|
||||
it(`should sign up the admin`, async () => {
|
||||
const { status, body } = await request(app).post('/auth/admin-sign-up').send(signupDto.admin);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual(signupResponseDto.admin);
|
||||
});
|
||||
|
||||
it('should sign up the admin with a local domain', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/auth/admin-sign-up')
|
||||
.send({ ...signupDto.admin, email: 'admin@local' });
|
||||
expect(status).toEqual(201);
|
||||
expect(body).toEqual({
|
||||
...signupResponseDto.admin,
|
||||
email: 'admin@local',
|
||||
});
|
||||
});
|
||||
|
||||
it('should transform email to lower case', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/auth/admin-sign-up')
|
||||
.send({ ...signupDto.admin, email: 'aDmIn@IMMICH.cloud' });
|
||||
expect(status).toEqual(201);
|
||||
expect(body).toEqual(signupResponseDto.admin);
|
||||
});
|
||||
|
||||
it('should not allow a second admin to sign up', async () => {
|
||||
await signUpAdmin({ signUpDto: signupDto.admin });
|
||||
|
||||
@@ -92,22 +46,6 @@ describe('/auth/*', () => {
|
||||
expect(body).toEqual(errorDto.incorrectLogin);
|
||||
});
|
||||
|
||||
for (const key of Object.keys(loginDto.admin)) {
|
||||
it(`should not allow null ${key}`, async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/auth/login')
|
||||
.send({ ...loginDto.admin, [key]: null });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should reject an invalid email', async () => {
|
||||
const { status, body } = await request(app).post('/auth/login').send({ email: [], password });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.invalidEmail);
|
||||
});
|
||||
}
|
||||
|
||||
it('should accept a correct password', async () => {
|
||||
const { status, body, headers } = await request(app).post('/auth/login').send({ email, password });
|
||||
expect(status).toBe(201);
|
||||
@@ -162,14 +100,6 @@ describe('/auth/*', () => {
|
||||
});
|
||||
|
||||
describe('POST /auth/change-password', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
.send({ password, newPassword: 'Password1234' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require the current password', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/auth/change-password`)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { AssetMediaResponseDto, LoginResponseDto } from '@immich/sdk';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, tempDir, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
@@ -17,15 +16,6 @@ describe('/download', () => {
|
||||
});
|
||||
|
||||
describe('POST /download/info', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/download/info`)
|
||||
.send({ assetIds: [asset1.id] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download info', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/download/info')
|
||||
@@ -42,15 +32,6 @@ describe('/download', () => {
|
||||
});
|
||||
|
||||
describe('POST /download/archive', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/download/archive`)
|
||||
.send({ assetIds: [asset1.id, asset2.id] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download an archive', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/download/archive')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { LoginResponseDto } from '@immich/sdk';
|
||||
import { AssetVisibility, LoginResponseDto } from '@immich/sdk';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import { Socket } from 'socket.io-client';
|
||||
@@ -44,7 +44,7 @@ describe('/map', () => {
|
||||
it('should get map markers for all non-archived assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/map/markers')
|
||||
.query({ isArchived: false })
|
||||
.query({ visibility: AssetVisibility.Timeline })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
startOAuth,
|
||||
updateConfig,
|
||||
} from '@immich/sdk';
|
||||
import { createHash, randomBytes } from 'node:crypto';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { OAuthClient, OAuthUser } from 'src/setup/auth-server';
|
||||
import { app, asBearerAuth, baseUrl, utils } from 'src/utils';
|
||||
@@ -21,18 +22,30 @@ const mobileOverrideRedirectUri = 'https://photos.immich.app/oauth/mobile-redire
|
||||
|
||||
const redirect = async (url: string, cookies?: string[]) => {
|
||||
const { headers } = await request(url)
|
||||
.get('/')
|
||||
.get('')
|
||||
.set('Cookie', cookies || []);
|
||||
return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location };
|
||||
};
|
||||
|
||||
// Function to generate a code challenge from the verifier
|
||||
const generateCodeChallenge = async (codeVerifier: string): Promise<string> => {
|
||||
const hashed = createHash('sha256').update(codeVerifier).digest();
|
||||
return hashed.toString('base64url');
|
||||
};
|
||||
|
||||
const loginWithOAuth = async (sub: OAuthUser | string, redirectUri?: string) => {
|
||||
const { url } = await startOAuth({ oAuthConfigDto: { redirectUri: redirectUri ?? `${baseUrl}/auth/login` } });
|
||||
const state = randomBytes(16).toString('base64url');
|
||||
const codeVerifier = randomBytes(64).toString('base64url');
|
||||
const codeChallenge = await generateCodeChallenge(codeVerifier);
|
||||
|
||||
const { url } = await startOAuth({
|
||||
oAuthConfigDto: { redirectUri: redirectUri ?? `${baseUrl}/auth/login`, state, codeChallenge },
|
||||
});
|
||||
|
||||
// login
|
||||
const response1 = await redirect(url.replace(authServer.internal, authServer.external));
|
||||
const response2 = await request(authServer.external + response1.location)
|
||||
.post('/')
|
||||
.post('')
|
||||
.set('Cookie', response1.cookies)
|
||||
.type('form')
|
||||
.send({ prompt: 'login', login: sub, password: 'password' });
|
||||
@@ -40,7 +53,7 @@ const loginWithOAuth = async (sub: OAuthUser | string, redirectUri?: string) =>
|
||||
// approve
|
||||
const response3 = await redirect(response2.header.location, response1.cookies);
|
||||
const response4 = await request(authServer.external + response3.location)
|
||||
.post('/')
|
||||
.post('')
|
||||
.type('form')
|
||||
.set('Cookie', response3.cookies)
|
||||
.send({ prompt: 'consent' });
|
||||
@@ -51,9 +64,9 @@ const loginWithOAuth = async (sub: OAuthUser | string, redirectUri?: string) =>
|
||||
expect(redirectUrl).toBeDefined();
|
||||
const params = new URL(redirectUrl).searchParams;
|
||||
expect(params.get('code')).toBeDefined();
|
||||
expect(params.get('state')).toBeDefined();
|
||||
expect(params.get('state')).toBe(state);
|
||||
|
||||
return redirectUrl;
|
||||
return { url: redirectUrl, state, codeVerifier };
|
||||
};
|
||||
|
||||
const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => {
|
||||
@@ -119,9 +132,42 @@ describe(`/oauth`, () => {
|
||||
expect(body).toEqual(errorDto.badRequest(['url should not be empty']));
|
||||
});
|
||||
|
||||
it('should auto register the user by default', async () => {
|
||||
const url = await loginWithOAuth('oauth-auto-register');
|
||||
it(`should throw an error if the state is not provided`, async () => {
|
||||
const { url } = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('OAuth state is missing'));
|
||||
});
|
||||
|
||||
it(`should throw an error if the state mismatches`, async () => {
|
||||
const callbackParams = await loginWithOAuth('oauth-auto-register');
|
||||
const { state } = await loginWithOAuth('oauth-auto-register');
|
||||
const { status } = await request(app)
|
||||
.post('/oauth/callback')
|
||||
.send({ ...callbackParams, state });
|
||||
expect(status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it(`should throw an error if the codeVerifier is not provided`, async () => {
|
||||
const { url, state } = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url, state });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('OAuth code verifier is missing'));
|
||||
});
|
||||
|
||||
it(`should throw an error if the codeVerifier doesn't match the challenge`, async () => {
|
||||
const callbackParams = await loginWithOAuth('oauth-auto-register');
|
||||
const { codeVerifier } = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app)
|
||||
.post('/oauth/callback')
|
||||
.send({ ...callbackParams, codeVerifier });
|
||||
console.log(body);
|
||||
expect(status).toBeGreaterThanOrEqual(400);
|
||||
});
|
||||
|
||||
it('should auto register the user by default', async () => {
|
||||
const callbackParams = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
@@ -132,16 +178,30 @@ describe(`/oauth`, () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow passing state and codeVerifier via cookies', async () => {
|
||||
const { url, state, codeVerifier } = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app)
|
||||
.post('/oauth/callback')
|
||||
.set('Cookie', [`immich_oauth_state=${state}`, `immich_oauth_code_verifier=${codeVerifier}`])
|
||||
.send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-auto-register@immich.app',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle a user without an email', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.NO_EMAIL);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth(OAuthUser.NO_EMAIL);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address'));
|
||||
});
|
||||
|
||||
it('should set the quota from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_QUOTA);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth(OAuthUser.WITH_QUOTA);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
@@ -154,8 +214,8 @@ describe(`/oauth`, () => {
|
||||
});
|
||||
|
||||
it('should set the storage label from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_USERNAME);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth(OAuthUser.WITH_USERNAME);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
@@ -176,8 +236,8 @@ describe(`/oauth`, () => {
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-RS256-token');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth('oauth-RS256-token');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
@@ -196,8 +256,8 @@ describe(`/oauth`, () => {
|
||||
buttonText: 'Login with Immich',
|
||||
profileSigningAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-profile');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth('oauth-signed-profile');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId: expect.any(String),
|
||||
@@ -213,8 +273,8 @@ describe(`/oauth`, () => {
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'something-that-does-not-work',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-bad');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth('oauth-signed-bad');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(500);
|
||||
expect(body).toMatchObject({
|
||||
error: 'Internal Server Error',
|
||||
@@ -235,8 +295,8 @@ describe(`/oauth`, () => {
|
||||
});
|
||||
|
||||
it('should not auto register the user', async () => {
|
||||
const url = await loginWithOAuth('oauth-no-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth('oauth-no-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.'));
|
||||
});
|
||||
@@ -247,8 +307,8 @@ describe(`/oauth`, () => {
|
||||
email: 'oauth-user3@immich.app',
|
||||
password: 'password',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-user3');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
const callbackParams = await loginWithOAuth('oauth-user3');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send(callbackParams);
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId,
|
||||
@@ -286,13 +346,15 @@ describe(`/oauth`, () => {
|
||||
});
|
||||
|
||||
it('should auto register the user by default', async () => {
|
||||
const url = await loginWithOAuth('oauth-mobile-override', 'app.immich:///oauth-callback');
|
||||
expect(url).toEqual(expect.stringContaining(mobileOverrideRedirectUri));
|
||||
const callbackParams = await loginWithOAuth('oauth-mobile-override', 'app.immich:///oauth-callback');
|
||||
expect(callbackParams.url).toEqual(expect.stringContaining(mobileOverrideRedirectUri));
|
||||
|
||||
// simulate redirecting back to mobile app
|
||||
const redirectUri = url.replace(mobileOverrideRedirectUri, 'app.immich:///oauth-callback');
|
||||
const url = callbackParams.url.replace(mobileOverrideRedirectUri, 'app.immich:///oauth-callback');
|
||||
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url: redirectUri });
|
||||
const { status, body } = await request(app)
|
||||
.post('/oauth/callback')
|
||||
.send({ ...callbackParams, url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
|
||||
@@ -5,22 +5,6 @@ import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, beforeEach, describe, expect, it } from 'vitest';
|
||||
|
||||
const invalidBirthday = [
|
||||
{
|
||||
birthDate: 'false',
|
||||
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
|
||||
},
|
||||
{
|
||||
birthDate: '123567',
|
||||
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
|
||||
},
|
||||
{
|
||||
birthDate: 123_567,
|
||||
response: ['birthDate must be a string in the format yyyy-MM-dd', 'Birth date cannot be in the future'],
|
||||
},
|
||||
{ birthDate: '9999-01-01', response: ['Birth date cannot be in the future'] },
|
||||
];
|
||||
|
||||
describe('/people', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let visiblePerson: PersonResponseDto;
|
||||
@@ -58,14 +42,6 @@ describe('/people', () => {
|
||||
|
||||
describe('GET /people', () => {
|
||||
beforeEach(async () => {});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/people');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return all people (including hidden)', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/people')
|
||||
@@ -117,13 +93,6 @@ describe('/people', () => {
|
||||
});
|
||||
|
||||
describe('GET /people/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/people/${uuidDto.notFound}`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should throw error if person with id does not exist', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/people/${uuidDto.notFound}`)
|
||||
@@ -144,13 +113,6 @@ describe('/people', () => {
|
||||
});
|
||||
|
||||
describe('GET /people/:id/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/people/${multipleAssetsPerson.id}/statistics`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should throw error if person with id does not exist', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/people/${uuidDto.notFound}/statistics`)
|
||||
@@ -171,23 +133,6 @@ describe('/people', () => {
|
||||
});
|
||||
|
||||
describe('POST /people', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/people`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
for (const { birthDate, response } of invalidBirthday) {
|
||||
it(`should not accept an invalid birth date [${birthDate}]`, async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/people`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ birthDate });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(response));
|
||||
});
|
||||
}
|
||||
|
||||
it('should create a person', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/people`)
|
||||
@@ -223,39 +168,6 @@ describe('/people', () => {
|
||||
});
|
||||
|
||||
describe('PUT /people/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/people/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
for (const { key, type } of [
|
||||
{ key: 'name', type: 'string' },
|
||||
{ key: 'featureFaceAssetId', type: 'string' },
|
||||
{ key: 'isHidden', type: 'boolean value' },
|
||||
{ key: 'isFavorite', type: 'boolean value' },
|
||||
]) {
|
||||
it(`should not allow null ${key}`, async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/people/${visiblePerson.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ [key]: null });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest([`${key} must be a ${type}`]));
|
||||
});
|
||||
}
|
||||
|
||||
for (const { birthDate, response } of invalidBirthday) {
|
||||
it(`should not accept an invalid birth date [${birthDate}]`, async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/people/${visiblePerson.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ birthDate });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(response));
|
||||
});
|
||||
}
|
||||
|
||||
it('should update a date of birth', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/people/${visiblePerson.id}`)
|
||||
@@ -312,12 +224,6 @@ describe('/people', () => {
|
||||
});
|
||||
|
||||
describe('POST /people/:id/merge', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/people/${uuidDto.notFound}/merge`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not supporting merging a person into themselves', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/people/${visiblePerson.id}/merge`)
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
import { AssetMediaResponseDto, AssetResponseDto, deleteAssets, LoginResponseDto, updateAsset } from '@immich/sdk';
|
||||
import {
|
||||
AssetMediaResponseDto,
|
||||
AssetResponseDto,
|
||||
AssetVisibility,
|
||||
deleteAssets,
|
||||
LoginResponseDto,
|
||||
updateAsset,
|
||||
} from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { readFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, TEN_TIMES, testAssetDir, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
@@ -50,7 +56,7 @@ describe('/search', () => {
|
||||
{ filename: '/formats/motionphoto/samsung-one-ui-6.heic' },
|
||||
{ filename: '/formats/motionphoto/samsung-one-ui-5.jpg' },
|
||||
|
||||
{ filename: '/metadata/gps-position/thompson-springs.jpg', dto: { isArchived: true } },
|
||||
{ filename: '/metadata/gps-position/thompson-springs.jpg', dto: { visibility: AssetVisibility.Archive } },
|
||||
|
||||
// used for search suggestions
|
||||
{ filename: '/formats/png/density_plot.png' },
|
||||
@@ -141,65 +147,6 @@ describe('/search', () => {
|
||||
});
|
||||
|
||||
describe('POST /search/metadata', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/search/metadata');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
const badTests = [
|
||||
{
|
||||
should: 'should reject page as a string',
|
||||
dto: { page: 'abc' },
|
||||
expected: ['page must not be less than 1', 'page must be an integer number'],
|
||||
},
|
||||
{
|
||||
should: 'should reject page as a decimal',
|
||||
dto: { page: 1.5 },
|
||||
expected: ['page must be an integer number'],
|
||||
},
|
||||
{
|
||||
should: 'should reject page as a negative number',
|
||||
dto: { page: -10 },
|
||||
expected: ['page must not be less than 1'],
|
||||
},
|
||||
{
|
||||
should: 'should reject page as 0',
|
||||
dto: { page: 0 },
|
||||
expected: ['page must not be less than 1'],
|
||||
},
|
||||
{
|
||||
should: 'should reject size as a string',
|
||||
dto: { size: 'abc' },
|
||||
expected: [
|
||||
'size must not be greater than 1000',
|
||||
'size must not be less than 1',
|
||||
'size must be an integer number',
|
||||
],
|
||||
},
|
||||
{
|
||||
should: 'should reject an invalid size',
|
||||
dto: { size: -1.5 },
|
||||
expected: ['size must not be less than 1', 'size must be an integer number'],
|
||||
},
|
||||
...['isArchived', 'isFavorite', 'isEncoded', 'isOffline', 'isMotion', 'isVisible'].map((value) => ({
|
||||
should: `should reject ${value} not a boolean`,
|
||||
dto: { [value]: 'immich' },
|
||||
expected: [`${value} must be a boolean value`],
|
||||
})),
|
||||
];
|
||||
|
||||
for (const { should, dto, expected } of badTests) {
|
||||
it(should, async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/search/metadata')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send(dto);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expected));
|
||||
});
|
||||
}
|
||||
|
||||
const searchTests = [
|
||||
{
|
||||
should: 'should get my assets',
|
||||
@@ -231,12 +178,12 @@ describe('/search', () => {
|
||||
deferred: () => ({ dto: { size: 1, isFavorite: false }, assets: [assetLast] }),
|
||||
},
|
||||
{
|
||||
should: 'should search by isArchived (true)',
|
||||
deferred: () => ({ dto: { isArchived: true }, assets: [assetSprings] }),
|
||||
should: 'should search by visibility (AssetVisibility.Archive)',
|
||||
deferred: () => ({ dto: { visibility: AssetVisibility.Archive }, assets: [assetSprings] }),
|
||||
},
|
||||
{
|
||||
should: 'should search by isArchived (false)',
|
||||
deferred: () => ({ dto: { size: 1, isArchived: false }, assets: [assetLast] }),
|
||||
should: 'should search by visibility (AssetVisibility.Timeline)',
|
||||
deferred: () => ({ dto: { size: 1, visibility: AssetVisibility.Timeline }, assets: [assetLast] }),
|
||||
},
|
||||
{
|
||||
should: 'should search by type (image)',
|
||||
@@ -245,7 +192,7 @@ describe('/search', () => {
|
||||
{
|
||||
should: 'should search by type (video)',
|
||||
deferred: () => ({
|
||||
dto: { type: 'VIDEO' },
|
||||
dto: { type: 'VIDEO', visibility: AssetVisibility.Hidden },
|
||||
assets: [
|
||||
// the three live motion photos
|
||||
{ id: expect.any(String) },
|
||||
@@ -289,13 +236,6 @@ describe('/search', () => {
|
||||
should: 'should search by takenAfter (no results)',
|
||||
deferred: () => ({ dto: { takenAfter: today.plus({ hour: 1 }).toJSDate() }, assets: [] }),
|
||||
},
|
||||
// {
|
||||
// should: 'should search by originalPath',
|
||||
// deferred: () => ({
|
||||
// dto: { originalPath: asset1.originalPath },
|
||||
// assets: [asset1],
|
||||
// }),
|
||||
// },
|
||||
{
|
||||
should: 'should search by originalFilename',
|
||||
deferred: () => ({
|
||||
@@ -325,7 +265,7 @@ describe('/search', () => {
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
city: '',
|
||||
isVisible: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
@@ -336,7 +276,7 @@ describe('/search', () => {
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
city: null,
|
||||
isVisible: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
@@ -357,7 +297,7 @@ describe('/search', () => {
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
state: '',
|
||||
isVisible: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
withExif: true,
|
||||
includeNull: true,
|
||||
},
|
||||
@@ -369,7 +309,7 @@ describe('/search', () => {
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
state: null,
|
||||
isVisible: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast, assetNotocactus],
|
||||
@@ -390,7 +330,7 @@ describe('/search', () => {
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
country: '',
|
||||
isVisible: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
@@ -401,7 +341,7 @@ describe('/search', () => {
|
||||
deferred: () => ({
|
||||
dto: {
|
||||
country: null,
|
||||
isVisible: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
includeNull: true,
|
||||
},
|
||||
assets: [assetLast],
|
||||
@@ -454,14 +394,6 @@ describe('/search', () => {
|
||||
}
|
||||
});
|
||||
|
||||
describe('POST /search/smart', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/search/smart');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /search/random', () => {
|
||||
beforeAll(async () => {
|
||||
await Promise.all([
|
||||
@@ -476,13 +408,6 @@ describe('/search', () => {
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration');
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/search/random').send({ size: 1 });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it.each(TEN_TIMES)('should return 1 random assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/search/random')
|
||||
@@ -512,12 +437,6 @@ describe('/search', () => {
|
||||
});
|
||||
|
||||
describe('GET /search/explore', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/search/explore');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get explore data', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/explore')
|
||||
@@ -528,12 +447,6 @@ describe('/search', () => {
|
||||
});
|
||||
|
||||
describe('GET /search/places', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/search/places');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get relevant places', async () => {
|
||||
const name = 'Paris';
|
||||
|
||||
@@ -552,12 +465,6 @@ describe('/search', () => {
|
||||
});
|
||||
|
||||
describe('GET /search/cities', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/search/cities');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get all cities', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/cities')
|
||||
@@ -576,12 +483,6 @@ describe('/search', () => {
|
||||
});
|
||||
|
||||
describe('GET /search/suggestions', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/search/suggestions');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get suggestions for country (including null)', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/search/suggestions?type=country&includeNull=true')
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import { AssetMediaResponseDto, LoginResponseDto, SharedLinkType, TimeBucketSize } from '@immich/sdk';
|
||||
import {
|
||||
AssetMediaResponseDto,
|
||||
AssetVisibility,
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
TimeBucketAssetResponseDto,
|
||||
} from '@immich/sdk';
|
||||
import { DateTime } from 'luxon';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
@@ -19,7 +25,8 @@ describe('/timeline', () => {
|
||||
let user: LoginResponseDto;
|
||||
let timeBucketUser: LoginResponseDto;
|
||||
|
||||
let userAssets: AssetMediaResponseDto[];
|
||||
let user1Assets: AssetMediaResponseDto[];
|
||||
let user2Assets: AssetMediaResponseDto[];
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
@@ -29,7 +36,7 @@ describe('/timeline', () => {
|
||||
utils.userSetup(admin.accessToken, createUserDto.create('time-bucket')),
|
||||
]);
|
||||
|
||||
userAssets = await Promise.all([
|
||||
user1Assets = await Promise.all([
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken),
|
||||
utils.createAsset(user.accessToken, {
|
||||
@@ -42,17 +49,20 @@ describe('/timeline', () => {
|
||||
utils.createAsset(user.accessToken),
|
||||
]);
|
||||
|
||||
await Promise.all([
|
||||
user2Assets = await Promise.all([
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-01-01').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-10').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-11').toISOString() }),
|
||||
utils.createAsset(timeBucketUser.accessToken, { fileCreatedAt: new Date('1970-02-12').toISOString() }),
|
||||
]);
|
||||
|
||||
await utils.deleteAssets(timeBucketUser.accessToken, [user2Assets[4].id]);
|
||||
});
|
||||
|
||||
describe('GET /timeline/buckets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/buckets').query({ size: TimeBucketSize.Month });
|
||||
const { status, body } = await request(app).get('/timeline/buckets');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
@@ -60,8 +70,7 @@ describe('/timeline', () => {
|
||||
it('should get time buckets by month', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month });
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual(
|
||||
@@ -75,36 +84,20 @@ describe('/timeline', () => {
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const sharedLink = await utils.createSharedLink(user.accessToken, {
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: userAssets.map(({ id }) => id),
|
||||
assetIds: user1Assets.map(({ id }) => id),
|
||||
});
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.query({ key: sharedLink.key, size: TimeBucketSize.Month });
|
||||
const { status, body } = await request(app).get('/timeline/buckets').query({ key: sharedLink.key });
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.noPermission);
|
||||
});
|
||||
|
||||
it('should get time buckets by day', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Day });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([
|
||||
{ count: 2, timeBucket: '1970-02-11T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-02-10T00:00:00.000Z' },
|
||||
{ count: 1, timeBucket: '1970-01-01T00:00:00.000Z' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: true });
|
||||
.query({ withPartners: true, visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
@@ -112,7 +105,7 @@ describe('/timeline', () => {
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isArchived: undefined });
|
||||
.query({ withPartners: true, visibility: undefined });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
@@ -122,7 +115,7 @@ describe('/timeline', () => {
|
||||
const req1 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: true });
|
||||
.query({ withPartners: true, isFavorite: true });
|
||||
|
||||
expect(req1.status).toBe(400);
|
||||
expect(req1.body).toEqual(errorDto.badRequest());
|
||||
@@ -130,7 +123,7 @@ describe('/timeline', () => {
|
||||
const req2 = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isFavorite: false });
|
||||
.query({ withPartners: true, isFavorite: false });
|
||||
|
||||
expect(req2.status).toBe(400);
|
||||
expect(req2.body).toEqual(errorDto.badRequest());
|
||||
@@ -140,7 +133,7 @@ describe('/timeline', () => {
|
||||
const req = await request(app)
|
||||
.get('/timeline/buckets')
|
||||
.set('Authorization', `Bearer ${user.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, withPartners: true, isTrashed: true });
|
||||
.query({ withPartners: true, isTrashed: true });
|
||||
|
||||
expect(req.status).toBe(400);
|
||||
expect(req.body).toEqual(errorDto.badRequest());
|
||||
@@ -150,7 +143,6 @@ describe('/timeline', () => {
|
||||
describe('GET /timeline/bucket', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/timeline/bucket').query({
|
||||
size: TimeBucketSize.Month,
|
||||
timeBucket: '1900-01-01',
|
||||
});
|
||||
|
||||
@@ -161,11 +153,27 @@ describe('/timeline', () => {
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.query({ size: TimeBucketSize.Month, timeBucket: '012345-01-01' })
|
||||
.query({ timeBucket: '012345-01-01' })
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([]);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
localDateTime: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
// TODO enable date string validation while still accepting 5 digit years
|
||||
@@ -173,7 +181,7 @@ describe('/timeline', () => {
|
||||
// const { status, body } = await request(app)
|
||||
// .get('/timeline/bucket')
|
||||
// .set('Authorization', `Bearer ${user.accessToken}`)
|
||||
// .query({ size: TimeBucketSize.Month, timeBucket: 'foo' });
|
||||
// .query({ timeBucket: 'foo' });
|
||||
|
||||
// expect(status).toBe(400);
|
||||
// expect(body).toEqual(errorDto.badRequest);
|
||||
@@ -183,10 +191,38 @@ describe('/timeline', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ size: TimeBucketSize.Month, timeBucket: '1970-02-10' });
|
||||
.query({ timeBucket: '1970-02-10' });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([]);
|
||||
expect(body).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
localDateTime: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return time bucket in trash', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/timeline/bucket')
|
||||
.set('Authorization', `Bearer ${timeBucketUser.accessToken}`)
|
||||
.query({ timeBucket: '1970-02-01T00:00:00.000Z', isTrashed: true });
|
||||
|
||||
expect(status).toBe(200);
|
||||
|
||||
const timeBucket: TimeBucketAssetResponseDto = body;
|
||||
expect(timeBucket.isTrashed).toEqual([true]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -215,6 +215,19 @@ describe('/admin/users', () => {
|
||||
const user = await getMyUser({ headers: asBearerAuth(token.accessToken) });
|
||||
expect(user).toMatchObject({ email: nonAdmin.userEmail });
|
||||
});
|
||||
|
||||
it('should update the avatar color', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${admin.userId}`)
|
||||
.send({ avatarColor: 'orange' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ avatarColor: 'orange' });
|
||||
|
||||
const after = await getUserAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ avatarColor: 'orange' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /admin/users/:id/preferences', () => {
|
||||
@@ -240,19 +253,6 @@ describe('/admin/users', () => {
|
||||
expect(after).toMatchObject({ memories: { enabled: false } });
|
||||
});
|
||||
|
||||
it('should update the avatar color', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${admin.userId}/preferences`)
|
||||
.send({ avatar: { color: 'orange' } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ avatar: { color: 'orange' } });
|
||||
|
||||
const after = await getUserPreferencesAdmin({ id: admin.userId }, { headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ avatar: { color: 'orange' } });
|
||||
});
|
||||
|
||||
it('should update download archive size', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/admin/users/${admin.userId}/preferences`)
|
||||
|
||||
@@ -31,33 +31,7 @@ describe('/users', () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('GET /users', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/users');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should get users', async () => {
|
||||
const { status, body } = await request(app).get('/users').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(200);
|
||||
expect(body).toHaveLength(2);
|
||||
expect(body).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ email: 'admin@immich.cloud' }),
|
||||
expect.objectContaining({ email: 'user2@immich.cloud' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /users/me', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/users/me`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not work for shared links', async () => {
|
||||
const album = await utils.createAlbum(admin.accessToken, { albumName: 'Album' });
|
||||
const sharedLink = await utils.createSharedLink(admin.accessToken, {
|
||||
@@ -99,24 +73,6 @@ describe('/users', () => {
|
||||
});
|
||||
|
||||
describe('PUT /users/me', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/users/me`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
for (const key of ['email', 'name']) {
|
||||
it(`should not allow null ${key}`, async () => {
|
||||
const dto = { [key]: null };
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send(dto);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
}
|
||||
|
||||
it('should update first and last name', async () => {
|
||||
const before = await getMyUser({ headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
@@ -183,6 +139,19 @@ describe('/users', () => {
|
||||
profileChangedAt: expect.anything(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should update avatar color', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me`)
|
||||
.send({ avatarColor: 'blue' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ avatarColor: 'blue' });
|
||||
|
||||
const after = await getMyUser({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ avatarColor: 'blue' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /users/me/preferences', () => {
|
||||
@@ -202,19 +171,6 @@ describe('/users', () => {
|
||||
expect(after).toMatchObject({ memories: { enabled: false } });
|
||||
});
|
||||
|
||||
it('should update avatar color', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/preferences`)
|
||||
.send({ avatar: { color: 'blue' } })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toMatchObject({ avatar: { color: 'blue' } });
|
||||
|
||||
const after = await getMyPreferences({ headers: asBearerAuth(admin.accessToken) });
|
||||
expect(after).toMatchObject({ avatar: { color: 'blue' } });
|
||||
});
|
||||
|
||||
it('should require an integer for download archive size', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/preferences`)
|
||||
@@ -269,11 +225,6 @@ describe('/users', () => {
|
||||
});
|
||||
|
||||
describe('GET /users/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status } = await request(app).get(`/users/${admin.userId}`);
|
||||
expect(status).toEqual(401);
|
||||
});
|
||||
|
||||
it('should get the user', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/users/${admin.userId}`)
|
||||
@@ -292,12 +243,6 @@ describe('/users', () => {
|
||||
});
|
||||
|
||||
describe('GET /server/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/users/me/license');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return the user license', async () => {
|
||||
await request(app)
|
||||
.put('/users/me/license')
|
||||
@@ -315,11 +260,6 @@ describe('/users', () => {
|
||||
});
|
||||
|
||||
describe('PUT /users/me/license', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status } = await request(app).put(`/users/me/license`);
|
||||
expect(status).toEqual(401);
|
||||
});
|
||||
|
||||
it('should set the user license', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/users/me/license`)
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
AssetMediaCreateDto,
|
||||
AssetMediaResponseDto,
|
||||
AssetResponseDto,
|
||||
AssetVisibility,
|
||||
CheckExistingAssetsDto,
|
||||
CreateAlbumDto,
|
||||
CreateLibraryDto,
|
||||
@@ -429,7 +430,10 @@ export const utils = {
|
||||
},
|
||||
|
||||
archiveAssets: (accessToken: string, ids: string[]) =>
|
||||
updateAssets({ assetBulkUpdateDto: { ids, isArchived: true } }, { headers: asBearerAuth(accessToken) }),
|
||||
updateAssets(
|
||||
{ assetBulkUpdateDto: { ids, visibility: AssetVisibility.Archive } },
|
||||
{ headers: asBearerAuth(accessToken) },
|
||||
),
|
||||
|
||||
deleteAssets: (accessToken: string, ids: string[]) =>
|
||||
deleteAssets({ assetBulkDeleteDto: { ids } }, { headers: asBearerAuth(accessToken) }),
|
||||
|
||||
@@ -25,7 +25,7 @@ test.describe('Registration', () => {
|
||||
|
||||
// login
|
||||
await expect(page).toHaveTitle(/Login/);
|
||||
await page.goto('/auth/login');
|
||||
await page.goto('/auth/login?autoLaunch=0');
|
||||
await page.getByLabel('Email').fill('admin@immich.app');
|
||||
await page.getByLabel('Password').fill('password');
|
||||
await page.getByRole('button', { name: 'Login' }).click();
|
||||
@@ -59,7 +59,7 @@ test.describe('Registration', () => {
|
||||
await context.clearCookies();
|
||||
|
||||
// login
|
||||
await page.goto('/auth/login');
|
||||
await page.goto('/auth/login?autoLaunch=0');
|
||||
await page.getByLabel('Email').fill('user@immich.cloud');
|
||||
await page.getByLabel('Password').fill('password');
|
||||
await page.getByRole('button', { name: 'Login' }).click();
|
||||
@@ -72,7 +72,7 @@ test.describe('Registration', () => {
|
||||
await page.getByRole('button', { name: 'Change password' }).click();
|
||||
|
||||
// login with new password
|
||||
await expect(page).toHaveURL('/auth/login');
|
||||
await expect(page).toHaveURL('/auth/login?autoLaunch=0');
|
||||
await page.getByLabel('Email').fill('user@immich.cloud');
|
||||
await page.getByLabel('Password').fill('new-password');
|
||||
await page.getByRole('button', { name: 'Login' }).click();
|
||||
|
||||
@@ -21,23 +21,9 @@ test.describe('Photo Viewer', () => {
|
||||
test.beforeEach(async ({ context, page }) => {
|
||||
// before each test, login as user
|
||||
await utils.setAuthCookies(context, admin.accessToken);
|
||||
await page.goto('/photos');
|
||||
await page.waitForLoadState('networkidle');
|
||||
});
|
||||
|
||||
test('initially shows a loading spinner', async ({ page }) => {
|
||||
await page.route(`/api/assets/${asset.id}/thumbnail**`, async (route) => {
|
||||
// slow down the request for thumbnail, so spinner has chance to show up
|
||||
await new Promise((f) => setTimeout(f, 2000));
|
||||
await route.continue();
|
||||
});
|
||||
await page.goto(`/photos/${asset.id}`);
|
||||
await page.waitForLoadState('load');
|
||||
// this is the spinner
|
||||
await page.waitForSelector('svg[role=status]');
|
||||
await expect(page.getByTestId('loading-spinner')).toBeVisible();
|
||||
});
|
||||
|
||||
test('loads original photo when zoomed', async ({ page }) => {
|
||||
await page.goto(`/photos/${asset.id}`);
|
||||
await expect.poll(async () => await imageLocator(page).getAttribute('src')).toContain('thumbnail');
|
||||
|
||||
@@ -47,15 +47,13 @@ test.describe('Shared Links', () => {
|
||||
await page.locator(`[data-asset-id="${asset.id}"]`).hover();
|
||||
await page.waitForSelector('[data-group] svg');
|
||||
await page.getByRole('checkbox').click();
|
||||
await page.getByRole('button', { name: 'Download' }).click();
|
||||
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
|
||||
await Promise.all([page.waitForEvent('download'), page.getByRole('button', { name: 'Download' }).click()]);
|
||||
});
|
||||
|
||||
test('download all from shared link', async ({ page }) => {
|
||||
await page.goto(`/share/${sharedLink.key}`);
|
||||
await page.getByRole('heading', { name: 'Test Album' }).waitFor();
|
||||
await page.getByRole('button', { name: 'Download' }).click();
|
||||
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
|
||||
await Promise.all([page.waitForEvent('download'), page.getByRole('button', { name: 'Download' }).click()]);
|
||||
});
|
||||
|
||||
test('enter password for a shared link', async ({ page }) => {
|
||||
|
||||
Submodule e2e/test-assets updated: 9e3b964b08...8885d6d01c
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user