mirror of
https://github.com/immich-app/immich.git
synced 2025-12-14 00:30:47 -08:00
Compare commits
822 Commits
chore/asyn
...
flutter-3.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
adb59cf3a0 | ||
|
|
dbdb64f6c5 | ||
|
|
2b1b20ab0b | ||
|
|
44d49b9671 | ||
|
|
0e81c20cbb | ||
|
|
1f18a09061 | ||
|
|
0257f1a743 | ||
|
|
6f39a706b2 | ||
|
|
10181defb1 | ||
|
|
8ea40973a7 | ||
|
|
be247395db | ||
|
|
78224961d1 | ||
|
|
b054e9dc2c | ||
|
|
f0d881b4f8 | ||
|
|
9677eb37e1 | ||
|
|
dc23bc4d55 | ||
|
|
e9f8d68f62 | ||
|
|
3f08768854 | ||
|
|
f029910dc7 | ||
|
|
b5593823a2 | ||
|
|
a40d35555f | ||
|
|
0205e89e34 | ||
|
|
a231d7be64 | ||
|
|
219f5b25a4 | ||
|
|
486bb47ddb | ||
|
|
58ae77ec92 | ||
|
|
4794a1a092 | ||
|
|
6abcfaef99 | ||
|
|
f6903696cb | ||
|
|
724a081bb5 | ||
|
|
4e332db2fb | ||
|
|
0712183a18 | ||
|
|
d004c03990 | ||
|
|
fff651f8a5 | ||
|
|
e2720e85bb | ||
|
|
5fdc8c9481 | ||
|
|
a3404cf420 | ||
|
|
5268dc4ee2 | ||
|
|
ef060e97b6 | ||
|
|
a9851df8d1 | ||
|
|
099a1e4210 | ||
|
|
79d760ccd7 | ||
|
|
369d3dfa38 | ||
|
|
93e53f6d74 | ||
|
|
d8f0a69dc8 | ||
|
|
09d9fa9755 | ||
|
|
118dc8cf5a | ||
|
|
9557395991 | ||
|
|
a5d63d6953 | ||
|
|
5ee4a43e74 | ||
|
|
c3aeb6c497 | ||
|
|
d22fb2d5db | ||
|
|
c4df96bd72 | ||
|
|
40e7b58ba4 | ||
|
|
4743a085f1 | ||
|
|
911c877e72 | ||
|
|
806000e671 | ||
|
|
54bafccbf9 | ||
|
|
e61c575b01 | ||
|
|
e12c67742c | ||
|
|
4878c500a5 | ||
|
|
2fa7a40996 | ||
|
|
963dd3210a | ||
|
|
4fdf75311c | ||
|
|
529359de2d | ||
|
|
2d7377a5e9 | ||
|
|
8fcf47e5cb | ||
|
|
c7dc31151d | ||
|
|
065f7c7d5d | ||
|
|
15877ddf1f | ||
|
|
1b8fa51315 | ||
|
|
1f84cbe7e5 | ||
|
|
b194aee754 | ||
|
|
91b961642a | ||
|
|
c61ea483ba | ||
|
|
c278bb0e5b | ||
|
|
bc8e08f5e8 | ||
|
|
0b8fc7b493 | ||
|
|
7bb25a5c8d | ||
|
|
58c1b92816 | ||
|
|
55adc136c8 | ||
|
|
cd288533a1 | ||
|
|
58af574241 | ||
|
|
6954b11be1 | ||
|
|
bc906f7343 | ||
|
|
760b08506a | ||
|
|
6b31e333bb | ||
|
|
493b9b7a54 | ||
|
|
188188a844 | ||
|
|
b2ef8ea7dd | ||
|
|
a6c4bd1555 | ||
|
|
a02fe89ec9 | ||
|
|
98e998e814 | ||
|
|
b83b28cd73 | ||
|
|
9771e48049 | ||
|
|
86db0aafe5 | ||
|
|
12b7a079c1 | ||
|
|
53420b7c02 | ||
|
|
c05aa445d8 | ||
|
|
bdf19ce331 | ||
|
|
895e0eacfe | ||
|
|
e7b60a9278 | ||
|
|
4e2fc9f017 | ||
|
|
d1e6682df0 | ||
|
|
965498d19b | ||
|
|
62f24a79f4 | ||
|
|
495a959879 | ||
|
|
a6a4dfcfd3 | ||
|
|
0d773af6c3 | ||
|
|
fe71894308 | ||
|
|
397808dd1a | ||
|
|
e7edbcdf04 | ||
|
|
59f666b115 | ||
|
|
dc8962f2bc | ||
|
|
00a77c2d6a | ||
|
|
c8641d24f6 | ||
|
|
2431e04a09 | ||
|
|
9e47093501 | ||
|
|
230c286b97 | ||
|
|
14970c5539 | ||
|
|
adb17c4d58 | ||
|
|
56156b97e7 | ||
|
|
c411c1472a | ||
|
|
0bbe70e6a3 | ||
|
|
a65c905621 | ||
|
|
61d784f4e7 | ||
|
|
b63d6cdcd6 | ||
|
|
fa45a26cff | ||
|
|
8f045bc602 | ||
|
|
5353658114 | ||
|
|
21880aec14 | ||
|
|
48d746d9d5 | ||
|
|
8ab5040351 | ||
|
|
1219fd82a0 | ||
|
|
28d8357cc5 | ||
|
|
a9e7d0388b | ||
|
|
86d64f3483 | ||
|
|
c1150fe7e3 | ||
|
|
ecb66fdb2c | ||
|
|
c046651f23 | ||
|
|
6117329057 | ||
|
|
585997d46f | ||
|
|
7146ec99b1 | ||
|
|
b7b0b9b6d8 | ||
|
|
4935f3e0bb | ||
|
|
709a7b70aa | ||
|
|
6a4d21205f | ||
|
|
3a0ddfb92d | ||
|
|
cd03d0c0f2 | ||
|
|
117b263887 | ||
|
|
f357f3324f | ||
|
|
7d95bad5cb | ||
|
|
77b0505006 | ||
|
|
fac1beb7d8 | ||
|
|
3944f5d73b | ||
|
|
4445288758 | ||
|
|
4efc41d5d9 | ||
|
|
c9d45eee86 | ||
|
|
b3b774cfe5 | ||
|
|
15e894b9b5 | ||
|
|
ca06d0aa83 | ||
|
|
0cd51ae9c5 | ||
|
|
68f6111b77 | ||
|
|
668288ca20 | ||
|
|
3fdc1df89c | ||
|
|
989d9dbe51 | ||
|
|
48112d84a3 | ||
|
|
80dfe7a5e9 | ||
|
|
ce90a2ec1a | ||
|
|
dccbe0b3ed | ||
|
|
c0ad12f279 | ||
|
|
9c484b23a9 | ||
|
|
eed014482d | ||
|
|
d271e6a3ae | ||
|
|
60c43081ed | ||
|
|
81d959a27e | ||
|
|
bb775110ef | ||
|
|
7280331b76 | ||
|
|
93ee6ee0a5 | ||
|
|
7544a678ec | ||
|
|
3066c8198c | ||
|
|
eb8dfa283e | ||
|
|
41a127e2ab | ||
|
|
feb475561e | ||
|
|
4c4c67f0d2 | ||
|
|
381b66bf70 | ||
|
|
a89f3ad97c | ||
|
|
c473511133 | ||
|
|
0d66a6b51f | ||
|
|
66400b2e8e | ||
|
|
87cdf0ebd9 | ||
|
|
3f719bd8d7 | ||
|
|
55af925ab3 | ||
|
|
ff63b0fa8f | ||
|
|
f21fe8716c | ||
|
|
6a69dafd31 | ||
|
|
47b1938f17 | ||
|
|
2ffcfe06f3 | ||
|
|
89551edee5 | ||
|
|
cb6c541ae1 | ||
|
|
b1e1362246 | ||
|
|
ccc2b191dd | ||
|
|
bb7010b2bb | ||
|
|
8db666bc38 | ||
|
|
eace0f716d | ||
|
|
96743b6c33 | ||
|
|
ff181cf346 | ||
|
|
0cd5960007 | ||
|
|
698592c1b0 | ||
|
|
f75d853e9a | ||
|
|
3a1e3e82e7 | ||
|
|
0beb3ac4c1 | ||
|
|
894545aeed | ||
|
|
5250269fa4 | ||
|
|
a169fb6a79 | ||
|
|
09ced9a171 | ||
|
|
a6e5e4f625 | ||
|
|
bbd8de177b | ||
|
|
867f6e64f9 | ||
|
|
ec6379b0b2 | ||
|
|
2a80251dc3 | ||
|
|
d33ce13561 | ||
|
|
016d7a6ceb | ||
|
|
8ff25a4f7a | ||
|
|
61a3eba1bd | ||
|
|
7072e48cbe | ||
|
|
ece977d9ca | ||
|
|
2af8095880 | ||
|
|
30822fcd10 | ||
|
|
c578273e7a | ||
|
|
118a3fc9db | ||
|
|
1138f6dcce | ||
|
|
33f3751b72 | ||
|
|
b8509e6411 | ||
|
|
bd43edbcd7 | ||
|
|
c8b4a7e1f1 | ||
|
|
f34f83e164 | ||
|
|
df2cf5d106 | ||
|
|
52975eadb3 | ||
|
|
12610e4a9f | ||
|
|
2b3efa02d8 | ||
|
|
a21a997f21 | ||
|
|
7d61ed7ce4 | ||
|
|
8f7baf8336 | ||
|
|
44923acfd6 | ||
|
|
ab95881ebb | ||
|
|
8801ae5821 | ||
|
|
ea9f11bf39 | ||
|
|
62fc5b3c7d | ||
|
|
15d431ba6a | ||
|
|
5d21ba3166 | ||
|
|
da7a81b752 | ||
|
|
becdc3dcf5 | ||
|
|
84b51e3cbb | ||
|
|
b845184c80 | ||
|
|
1fde02ee1e | ||
|
|
526c02297c | ||
|
|
732b06eec8 | ||
|
|
436cff72b5 | ||
|
|
be5cc2cdf5 | ||
|
|
094a41ac9a | ||
|
|
ebad6a008f | ||
|
|
0c261ffbe2 | ||
|
|
6df6103c67 | ||
|
|
8c5116bc1d | ||
|
|
e3812a0e36 | ||
|
|
4b1ced439b | ||
|
|
2e8a286540 | ||
|
|
038a82c4f1 | ||
|
|
2c2dd01bf0 | ||
|
|
ac73e163df | ||
|
|
d89e88bb3f | ||
|
|
3ce353393a | ||
|
|
0e4cf9ac57 | ||
|
|
07290580a6 | ||
|
|
d9ce74b896 | ||
|
|
4c0f79b162 | ||
|
|
9851d24628 | ||
|
|
fe6cbd93b1 | ||
|
|
df20788088 | ||
|
|
3d042cc7f1 | ||
|
|
85446c5862 | ||
|
|
fb52ac0f5b | ||
|
|
48bcbee6ed | ||
|
|
f621f8ef2c | ||
|
|
7f69abbf0d | ||
|
|
895b2bf5cd | ||
|
|
f64e6f5dc3 | ||
|
|
64e738f79d | ||
|
|
a17390a422 | ||
|
|
1b5fc9c665 | ||
|
|
23717ce981 | ||
|
|
2fd05e8447 | ||
|
|
c664d99a34 | ||
|
|
21c7d70336 | ||
|
|
ad272333db | ||
|
|
460d594791 | ||
|
|
e6c575c33e | ||
|
|
85ac0512a6 | ||
|
|
205260d31c | ||
|
|
3858973be5 | ||
|
|
02994883fe | ||
|
|
a1f8150c30 | ||
|
|
d85ef19bfc | ||
|
|
d0014bdf94 | ||
|
|
e822e3eca9 | ||
|
|
644defa4a1 | ||
|
|
1fe3c7b9b3 | ||
|
|
0d60be3d87 | ||
|
|
765da7b182 | ||
|
|
b037158028 | ||
|
|
a03902f174 | ||
|
|
1d610ad9cb | ||
|
|
dab4870fed | ||
|
|
37f5e6e2cb | ||
|
|
57d622bc43 | ||
|
|
c167e46ec7 | ||
|
|
6ce8a1deeb | ||
|
|
f659ef4b7a | ||
|
|
bb6cdc99ab | ||
|
|
830b4dadcb | ||
|
|
d2f2f8d672 | ||
|
|
be1062474b | ||
|
|
64000d9d76 | ||
|
|
59fa8fbd0e | ||
|
|
19746a8685 | ||
|
|
987e5ab76c | ||
|
|
1b5e981a45 | ||
|
|
b7a0cf2470 | ||
|
|
13d6bd67b1 | ||
|
|
1de2eae12d | ||
|
|
bc5875ba8d | ||
|
|
0426b574fe | ||
|
|
2c3658e642 | ||
|
|
a493dab294 | ||
|
|
699fdd0d1b | ||
|
|
a774153f67 | ||
|
|
ca12aff3a4 | ||
|
|
550c1c0a10 | ||
|
|
92ac1193e6 | ||
|
|
2a95eccf6a | ||
|
|
ee017803bf | ||
|
|
0986a71ce3 | ||
|
|
af36eaa61b | ||
|
|
fda68f972f | ||
|
|
a8eec92da7 | ||
|
|
ad8511c978 | ||
|
|
fe8c5e8107 | ||
|
|
c70140e707 | ||
|
|
010b144754 | ||
|
|
b71039e83c | ||
|
|
56a4aa9ffe | ||
|
|
488dc4efbd | ||
|
|
f0ff8581da | ||
|
|
c49fd2065b | ||
|
|
21a6eb30ff | ||
|
|
9e063c993c | ||
|
|
dd1fcd5be5 | ||
|
|
52ae06c119 | ||
|
|
854ea13d6a | ||
|
|
504930947d | ||
|
|
0e6ac87645 | ||
|
|
bd2deda50c | ||
|
|
160bb492a2 | ||
|
|
6474a78b8b | ||
|
|
e275f2d8b3 | ||
|
|
81ed54aa61 | ||
|
|
067338b0ed | ||
|
|
5e68f8c519 | ||
|
|
242a559e0f | ||
|
|
ed2b54527c | ||
|
|
8b38f8a58d | ||
|
|
29b30570bf | ||
|
|
586a7a173b | ||
|
|
1bbfacfc09 | ||
|
|
85c2d36d99 | ||
|
|
8cefa0b84b | ||
|
|
f50e5d006c | ||
|
|
8f8ff3adc0 | ||
|
|
c4c35ed140 | ||
|
|
be2f670d35 | ||
|
|
7efcba2b12 | ||
|
|
459c815086 | ||
|
|
36fa61c013 | ||
|
|
8da5f21fcf | ||
|
|
76db8cf604 | ||
|
|
21becbf1b0 | ||
|
|
26f0ea4cb5 | ||
|
|
19e5a6f68f | ||
|
|
78f8e23834 | ||
|
|
5bceefce75 | ||
|
|
b710ad36f3 | ||
|
|
270d178a2e | ||
|
|
309528c807 | ||
|
|
7c422363fb | ||
|
|
3eb316abea | ||
|
|
b3371e16f2 | ||
|
|
b2c903c000 | ||
|
|
17e720440d | ||
|
|
a522130122 | ||
|
|
cecd9c24a4 | ||
|
|
f189c7b101 | ||
|
|
c5f087a3ca | ||
|
|
72f6d7791e | ||
|
|
f73fce1046 | ||
|
|
f2edcde1b2 | ||
|
|
9d0dd9dff8 | ||
|
|
c3d10c5be2 | ||
|
|
bd92748ddd | ||
|
|
aad5c3bada | ||
|
|
b2753103c6 | ||
|
|
e3f3baadb0 | ||
|
|
0b69d1c147 | ||
|
|
5a51ad3622 | ||
|
|
664c99278a | ||
|
|
184e142d87 | ||
|
|
8b00578c7b | ||
|
|
7562088fac | ||
|
|
79d4ce2d6d | ||
|
|
983f656a6b | ||
|
|
ab2a7006f9 | ||
|
|
1f18fe31f0 | ||
|
|
a373034629 | ||
|
|
5dac315af7 | ||
|
|
8309b73a02 | ||
|
|
e440cbe353 | ||
|
|
5548eb0dad | ||
|
|
3bec8dc337 | ||
|
|
5bcb58c3e7 | ||
|
|
c62fc155c8 | ||
|
|
40e3322b25 | ||
|
|
25f2b9602f | ||
|
|
ae6653392e | ||
|
|
d7a782da34 | ||
|
|
08b5952c87 | ||
|
|
584e5894bf | ||
|
|
52d4b2fe57 | ||
|
|
92f0973a46 | ||
|
|
75c83cb704 | ||
|
|
0b22d3348e | ||
|
|
abde0fbe60 | ||
|
|
eaa0e07329 | ||
|
|
9fd2c5220d | ||
|
|
7fcab4b251 | ||
|
|
e3995fb5f4 | ||
|
|
6d3f3d8616 | ||
|
|
4412680679 | ||
|
|
7df2c9c905 | ||
|
|
7a1e8ce6d8 | ||
|
|
8aea07b750 | ||
|
|
94dba29298 | ||
|
|
9e49783e49 | ||
|
|
43e3075f93 | ||
|
|
d03647904b | ||
|
|
206545356d | ||
|
|
3e372500b0 | ||
|
|
8943ec23ba | ||
|
|
04b03f2924 | ||
|
|
cf2c0260a6 | ||
|
|
ae8af84101 | ||
|
|
4794eeca88 | ||
|
|
ac65d46ec6 | ||
|
|
e5ca79dd44 | ||
|
|
49be6d7fd8 | ||
|
|
15c6506aee | ||
|
|
2c31a11e41 | ||
|
|
b6c5a03533 | ||
|
|
75bc32b47b | ||
|
|
fdbe6d649f | ||
|
|
2b131fe935 | ||
|
|
6ae24fbbd4 | ||
|
|
7f116d8e98 | ||
|
|
bd0840c411 | ||
|
|
a5123dec1a | ||
|
|
ffd18c5459 | ||
|
|
8242ff9bab | ||
|
|
8203b6c450 | ||
|
|
b352cf3336 | ||
|
|
96ed9a8c4a | ||
|
|
e7a5b96ed0 | ||
|
|
51c2c60231 | ||
|
|
43d585ce55 | ||
|
|
042da669d1 | ||
|
|
a724f147fe | ||
|
|
1e4b9ae5b7 | ||
|
|
99cddf1fd6 | ||
|
|
30d33f968f | ||
|
|
31ee19181a | ||
|
|
b58a450152 | ||
|
|
b87ba6865b | ||
|
|
565cceb323 | ||
|
|
f096dd0cc0 | ||
|
|
a3c3f9cfcb | ||
|
|
7b6a4be30c | ||
|
|
720189e2c2 | ||
|
|
dfab32c8f2 | ||
|
|
60174d662d | ||
|
|
8b6a765e12 | ||
|
|
2248a38567 | ||
|
|
97e52c5156 | ||
|
|
e8b4ac0522 | ||
|
|
548298b0c7 | ||
|
|
40cff2893c | ||
|
|
b621281351 | ||
|
|
4336afd6bf | ||
|
|
5a456ef277 | ||
|
|
5cb5fcbf62 | ||
|
|
95e3b15776 | ||
|
|
50335dc363 | ||
|
|
6e62c09d84 | ||
|
|
00d3b8d83a | ||
|
|
d911b76c08 | ||
|
|
502854cee1 | ||
|
|
59e5c82569 | ||
|
|
e4b0c00885 | ||
|
|
946507231d | ||
|
|
20ba800a50 | ||
|
|
f434e858ed | ||
|
|
3e03c47fbf | ||
|
|
9aa3850769 | ||
|
|
628dcdeebf | ||
|
|
11bfde2aa8 | ||
|
|
69b1ac47ea | ||
|
|
4f81265694 | ||
|
|
3428a876c7 | ||
|
|
bd822657d3 | ||
|
|
9e7744a9ab | ||
|
|
7729fe80fa | ||
|
|
68e24ad168 | ||
|
|
186c573565 | ||
|
|
5b63b9fc8b | ||
|
|
5c80e8734b | ||
|
|
a5093a9434 | ||
|
|
637ad1fdcb | ||
|
|
6789c2ac19 | ||
|
|
838a8dd9a6 | ||
|
|
d71c5602c3 | ||
|
|
8c50e3e80e | ||
|
|
efcb1129ce | ||
|
|
faabda4446 | ||
|
|
b8b2898c87 | ||
|
|
b25914c2a5 | ||
|
|
d613f15606 | ||
|
|
a831876fdc | ||
|
|
09f4476f97 | ||
|
|
238c151ac3 | ||
|
|
e4f83680d9 | ||
|
|
74f7fd4b53 | ||
|
|
f4dbfd856e | ||
|
|
55a3c30664 | ||
|
|
6fa0cb534a | ||
|
|
9f0dbfc150 | ||
|
|
6419ac74af | ||
|
|
d2bcf5d716 | ||
|
|
c8331f111f | ||
|
|
4b4bcd23f4 | ||
|
|
3fde5a8328 | ||
|
|
cc3ea32cd2 | ||
|
|
431cf281da | ||
|
|
8f786fd7dd | ||
|
|
3e73765375 | ||
|
|
411521b21d | ||
|
|
e163808348 | ||
|
|
411772123f | ||
|
|
84c35e35d6 | ||
|
|
f7d730eb05 | ||
|
|
16e0166d22 | ||
|
|
43f8f473e9 | ||
|
|
cc393b2b7b | ||
|
|
6341962de4 | ||
|
|
c26b28f6a4 | ||
|
|
c72c82c401 | ||
|
|
fecf3809a6 | ||
|
|
619bd72de9 | ||
|
|
fd4a5f71b5 | ||
|
|
2f8725c66f | ||
|
|
9fbd6369b9 | ||
|
|
c547d849d9 | ||
|
|
6ba94ac2f2 | ||
|
|
dfb0626c91 | ||
|
|
392ce7deb2 | ||
|
|
75df8fc10e | ||
|
|
4cf7c55680 | ||
|
|
b8ff93a3c9 | ||
|
|
37eb70c1eb | ||
|
|
aa4d6405f4 | ||
|
|
ae447542a4 | ||
|
|
90f21d9047 | ||
|
|
567a92fe77 | ||
|
|
8d6f5a2da9 | ||
|
|
69662e1ab4 | ||
|
|
42b1efb679 | ||
|
|
b8bc11b0d9 | ||
|
|
91065db3ff | ||
|
|
c14668bdd4 | ||
|
|
9757f70064 | ||
|
|
4a0045db44 | ||
|
|
a651a4bf0e | ||
|
|
8bc80076bb | ||
|
|
89656472ef | ||
|
|
d9c6ec06e5 | ||
|
|
4bfef2460a | ||
|
|
ad151130f9 | ||
|
|
a77608e36b | ||
|
|
9e015c7f97 | ||
|
|
df8ba21b7d | ||
|
|
a285b1898e | ||
|
|
6a8e38042d | ||
|
|
55b52ecbec | ||
|
|
b5d5c40c69 | ||
|
|
b00da18e84 | ||
|
|
3c87341902 | ||
|
|
bcd9248b43 | ||
|
|
dbc279f843 | ||
|
|
21954939cf | ||
|
|
d537f2c2d1 | ||
|
|
1820c0aa0d | ||
|
|
0d805a1f5b | ||
|
|
f5e6042eb1 | ||
|
|
8de71ddaf3 | ||
|
|
7075c5b393 | ||
|
|
9398b0d4b3 | ||
|
|
1a0a9ef36c | ||
|
|
ce456709b5 | ||
|
|
bc90678276 | ||
|
|
217a90bf61 | ||
|
|
62ba8c3e71 | ||
|
|
564724b398 | ||
|
|
cedeba8723 | ||
|
|
1d994333a6 | ||
|
|
db8155f738 | ||
|
|
4d723f4b56 | ||
|
|
898b3e75c2 | ||
|
|
8c2d02c362 | ||
|
|
d7a6e78bf0 | ||
|
|
8723f585e0 | ||
|
|
9f46ba8eb4 | ||
|
|
fe19f9ba84 | ||
|
|
b609f35841 | ||
|
|
9cf3b88f80 | ||
|
|
e96ffd43e7 | ||
|
|
dd263b010c | ||
|
|
6c2985df26 | ||
|
|
2b37caba03 | ||
|
|
6a40aa83b7 | ||
|
|
93907a89d8 | ||
|
|
3ce8608662 | ||
|
|
d0e283f687 | ||
|
|
f8b40188e2 | ||
|
|
9105e696bf | ||
|
|
0a8135dde4 | ||
|
|
0bb95544e5 | ||
|
|
14c3b99c0f | ||
|
|
1e184a70f1 | ||
|
|
9a4495eb5b | ||
|
|
8ad95b368b | ||
|
|
b778a86c99 | ||
|
|
a65ce2ac55 | ||
|
|
f69d7e7bad | ||
|
|
858d1e9d9b | ||
|
|
a1a61f19eb | ||
|
|
996ffed5eb | ||
|
|
2d7a94ce23 | ||
|
|
72a7be26c0 | ||
|
|
77fad86b82 | ||
|
|
52d90a8280 | ||
|
|
d1c8fe5303 | ||
|
|
a75718ce99 | ||
|
|
d72d715f6b | ||
|
|
16fd19994b | ||
|
|
83ed03920e | ||
|
|
9c825e15de | ||
|
|
b8acae2f21 | ||
|
|
c80afea468 | ||
|
|
6caa11d079 | ||
|
|
653fa3f0b1 | ||
|
|
2be8b6c16d | ||
|
|
6bb0aa217c | ||
|
|
04fd83d9da | ||
|
|
ba9e3715f0 | ||
|
|
ac1b2d2fab | ||
|
|
d7e0f0e70e | ||
|
|
decc878267 | ||
|
|
e0a09f2ea0 | ||
|
|
b9ecdf9286 | ||
|
|
4c719cc3bb | ||
|
|
81df812f56 | ||
|
|
f0f0056fe3 | ||
|
|
48dddb78d4 | ||
|
|
5d86e6d2d3 | ||
|
|
75fa305e98 | ||
|
|
8cd5aec4c5 | ||
|
|
cb489a1aa9 | ||
|
|
1382b27349 | ||
|
|
1b35400043 | ||
|
|
a96bba4b26 | ||
|
|
e97df503f2 | ||
|
|
fe959b2f05 | ||
|
|
f794c3e0df | ||
|
|
57272904d6 | ||
|
|
2496bd7514 | ||
|
|
c6ede48e59 | ||
|
|
70a08707d2 | ||
|
|
2f8e89c7ec | ||
|
|
9870ad9687 | ||
|
|
097749d872 | ||
|
|
bdabea4030 | ||
|
|
6da77600e5 | ||
|
|
573d9a7733 | ||
|
|
2aac679185 | ||
|
|
82624b0979 | ||
|
|
17c5094719 | ||
|
|
051431b757 | ||
|
|
6c5f99c47a | ||
|
|
1e127ae3a1 | ||
|
|
fd46d43726 | ||
|
|
5252c013ec | ||
|
|
3f06a494a9 | ||
|
|
086d8a448a | ||
|
|
8ace44fb95 | ||
|
|
ce74f765b1 | ||
|
|
b0bf4e4fff | ||
|
|
2d106755f6 | ||
|
|
f82786a297 | ||
|
|
c12986d38c | ||
|
|
19c40e3be9 | ||
|
|
9959755dda | ||
|
|
fdf2331c82 | ||
|
|
e03d7f888e | ||
|
|
2eeed6524f | ||
|
|
d45fa491ce | ||
|
|
5c82c485d7 | ||
|
|
feb65bf5a7 | ||
|
|
2cdbb0a37c | ||
|
|
fe931faf17 | ||
|
|
4ebc25c754 | ||
|
|
deb399ea15 | ||
|
|
d01b7a0d67 | ||
|
|
3af26ee94a | ||
|
|
bc61497461 | ||
|
|
1ed1a0a1fc | ||
|
|
2875303b4c | ||
|
|
d84009648e | ||
|
|
fc2df05190 | ||
|
|
69b5365965 | ||
|
|
c110c9b00e | ||
|
|
b241a80339 | ||
|
|
31dd15ce8a | ||
|
|
6108587c8b | ||
|
|
3e50f668d9 | ||
|
|
9b82617e22 | ||
|
|
76cb32d8d0 | ||
|
|
e8f3348833 | ||
|
|
9922c8de59 | ||
|
|
3f4bbab4eb | ||
|
|
2da9e3152b | ||
|
|
56b85f7479 | ||
|
|
8b43066632 | ||
|
|
20acdcd884 | ||
|
|
22d348beca | ||
|
|
3b0af1c8a9 | ||
|
|
61c8237a4d | ||
|
|
d740f0283a | ||
|
|
4ada28ac99 | ||
|
|
63c01b78e2 | ||
|
|
1423cfd53c | ||
|
|
867eec86f5 | ||
|
|
86e8effd8e | ||
|
|
49d393216a | ||
|
|
75c9f63757 | ||
|
|
63984890df | ||
|
|
1356468c38 | ||
|
|
c23c53bf6f | ||
|
|
0dcfc43461 | ||
|
|
d1fd0076cc | ||
|
|
ff19502035 | ||
|
|
6ef069b537 | ||
|
|
a03e999bde | ||
|
|
ad1ba4be5f | ||
|
|
f89e74181b | ||
|
|
e2c34f17ba | ||
|
|
23b1256592 | ||
|
|
7bbc1d9f68 | ||
|
|
8b24c31d20 | ||
|
|
7f61ac6983 | ||
|
|
4db8f0c666 | ||
|
|
3d6a6f77a8 | ||
|
|
5698f446f7 | ||
|
|
eb74fafb00 | ||
|
|
24da25dbbf | ||
|
|
9b842d4cca | ||
|
|
a99bd94717 | ||
|
|
4b568dcbb3 | ||
|
|
12ab56c885 | ||
|
|
eed6465b41 | ||
|
|
5f6c16080b | ||
|
|
a2aab1f373 | ||
|
|
8e076ecfe4 | ||
|
|
fe702ba6d7 | ||
|
|
869839f642 | ||
|
|
8885e3105e | ||
|
|
6e51c4ec71 | ||
|
|
6bf2e8dbcb | ||
|
|
366f23774a | ||
|
|
fd5e931617 | ||
|
|
d8d87bb565 | ||
|
|
6cc1978b2d | ||
|
|
506d2d0f81 | ||
|
|
f13d13b2ea | ||
|
|
2510684bf7 | ||
|
|
c8eef5ad4d | ||
|
|
0cb3dc6211 | ||
|
|
f11080cc2d | ||
|
|
efcf773ea0 | ||
|
|
dc143046e3 | ||
|
|
e684062569 | ||
|
|
5c0538e52c | ||
|
|
84cf0d1670 | ||
|
|
bfcde05b1c | ||
|
|
b3b15e9b61 | ||
|
|
819e56d9ca | ||
|
|
9a98712db7 |
@@ -1,10 +1,10 @@
|
|||||||
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:9791f4aa527774bc370c6bd2f6705ce5a686f1e6f204badd8dfaacce28c631ae
|
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:fb211a0ea31a6177507498c084682aae8c9c31ca27668ea122246aa16a4723a0
|
||||||
FROM ${BASEIMAGE}
|
FROM ${BASEIMAGE}
|
||||||
|
|
||||||
# Flutter SDK
|
# Flutter SDK
|
||||||
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
|
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
|
||||||
ENV FLUTTER_CHANNEL="stable"
|
ENV FLUTTER_CHANNEL="stable"
|
||||||
ENV FLUTTER_VERSION="3.24.5"
|
ENV FLUTTER_VERSION="3.29.3"
|
||||||
ENV FLUTTER_HOME=/flutter
|
ENV FLUTTER_HOME=/flutter
|
||||||
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
|
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
|
||||||
|
|
||||||
|
|||||||
6
.gitattributes
vendored
6
.gitattributes
vendored
@@ -6,6 +6,12 @@ mobile/openapi/**/*.dart linguist-generated=true
|
|||||||
mobile/lib/**/*.g.dart -diff -merge
|
mobile/lib/**/*.g.dart -diff -merge
|
||||||
mobile/lib/**/*.g.dart linguist-generated=true
|
mobile/lib/**/*.g.dart linguist-generated=true
|
||||||
|
|
||||||
|
mobile/lib/**/*.drift.dart -diff -merge
|
||||||
|
mobile/lib/**/*.drift.dart linguist-generated=true
|
||||||
|
|
||||||
|
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
|
||||||
|
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
|
||||||
|
|
||||||
open-api/typescript-sdk/fetch-client.ts -diff -merge
|
open-api/typescript-sdk/fetch-client.ts -diff -merge
|
||||||
open-api/typescript-sdk/fetch-client.ts linguist-generated=true
|
open-api/typescript-sdk/fetch-client.ts linguist-generated=true
|
||||||
|
|
||||||
|
|||||||
1
.github/.nvmrc
vendored
Normal file
1
.github/.nvmrc
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
22.16.0
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
title: "[Feature] feature-name-goes-here"
|
title: '[Feature] feature-name-goes-here'
|
||||||
labels: ["feature"]
|
labels: ['feature']
|
||||||
|
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
@@ -11,10 +11,9 @@ body:
|
|||||||
|
|
||||||
- type: checkboxes
|
- type: checkboxes
|
||||||
attributes:
|
attributes:
|
||||||
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
|
||||||
options:
|
options:
|
||||||
- label: "Yes"
|
- label: 'Yes'
|
||||||
required: true
|
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: feature
|
id: feature
|
||||||
|
|||||||
13
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
13
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,6 +1,12 @@
|
|||||||
name: Report an issue with Immich
|
name: Report an issue with Immich
|
||||||
description: Report an issue with Immich
|
description: Report an issue with Immich
|
||||||
body:
|
body:
|
||||||
|
- type: checkboxes
|
||||||
|
attributes:
|
||||||
|
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
|
||||||
|
options:
|
||||||
|
- label: 'Yes'
|
||||||
|
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
@@ -77,7 +83,7 @@ body:
|
|||||||
id: repro
|
id: repro
|
||||||
attributes:
|
attributes:
|
||||||
label: Reproduction steps
|
label: Reproduction steps
|
||||||
description: "How do you trigger this bug? Please walk us through it step by step."
|
description: 'How do you trigger this bug? Please walk us through it step by step.'
|
||||||
value: |
|
value: |
|
||||||
1.
|
1.
|
||||||
2.
|
2.
|
||||||
@@ -90,12 +96,13 @@ body:
|
|||||||
id: logs
|
id: logs
|
||||||
attributes:
|
attributes:
|
||||||
label: Relevant log output
|
label: Relevant log output
|
||||||
description: Please copy and paste any relevant logs below. (code formatting is
|
description:
|
||||||
|
Please copy and paste any relevant logs below. (code formatting is
|
||||||
enabled, no need for backticks)
|
enabled, no need for backticks)
|
||||||
render: shell
|
render: shell
|
||||||
validations:
|
validations:
|
||||||
required: false
|
required: false
|
||||||
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Additional information
|
label: Additional information
|
||||||
|
|||||||
118
.github/actions/image-build/action.yml
vendored
Normal file
118
.github/actions/image-build/action.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
name: 'Single arch image build'
|
||||||
|
description: 'Build single-arch image on platform appropriate runner'
|
||||||
|
inputs:
|
||||||
|
image:
|
||||||
|
description: 'Name of the image to build'
|
||||||
|
required: true
|
||||||
|
ghcr-token:
|
||||||
|
description: 'GitHub Container Registry token'
|
||||||
|
required: true
|
||||||
|
platform:
|
||||||
|
description: 'Platform to build for'
|
||||||
|
required: true
|
||||||
|
artifact-key-base:
|
||||||
|
description: 'Base key for artifact name'
|
||||||
|
required: true
|
||||||
|
context:
|
||||||
|
description: 'Path to build context'
|
||||||
|
required: true
|
||||||
|
dockerfile:
|
||||||
|
description: 'Path to Dockerfile'
|
||||||
|
required: true
|
||||||
|
build-args:
|
||||||
|
description: 'Docker build arguments'
|
||||||
|
required: false
|
||||||
|
runs:
|
||||||
|
using: 'composite'
|
||||||
|
steps:
|
||||||
|
- name: Prepare
|
||||||
|
id: prepare
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
PLATFORM: ${{ inputs.platform }}
|
||||||
|
run: |
|
||||||
|
echo "platform-pair=${PLATFORM//\//-}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||||
|
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||||
|
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ inputs.ghcr-token }}
|
||||||
|
|
||||||
|
- name: Generate cache key suffix
|
||||||
|
id: cache-key-suffix
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
REF: ${{ github.ref_name }}
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||||
|
echo "cache-key-suffix=pr-${{ github.event.number }}" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||||
|
echo "suffix=${SUFFIX}" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Generate cache target
|
||||||
|
id: cache-target
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
BUILD_ARGS: ${{ inputs.build-args }}
|
||||||
|
IMAGE: ${{ inputs.image }}
|
||||||
|
SUFFIX: ${{ steps.cache-key-suffix.outputs.suffix }}
|
||||||
|
PLATFORM_PAIR: ${{ steps.prepare.outputs.platform-pair }}
|
||||||
|
run: |
|
||||||
|
HASH=$(sha256sum <<< "${BUILD_ARGS}" | cut -d' ' -f1)
|
||||||
|
CACHE_KEY="${PLATFORM_PAIR}-${HASH}"
|
||||||
|
echo "cache-key-base=${CACHE_KEY}" >> $GITHUB_OUTPUT
|
||||||
|
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||||
|
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||||
|
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "cache-to=type=registry,ref=${IMAGE}-build-cache:${CACHE_KEY}-${SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Generate docker image tags
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||||
|
env:
|
||||||
|
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||||
|
|
||||||
|
- name: Build and push image
|
||||||
|
id: build
|
||||||
|
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||||
|
with:
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
file: ${{ inputs.dockerfile }}
|
||||||
|
platforms: ${{ inputs.platform }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||||
|
cache-from: |
|
||||||
|
type=registry,ref=${{ inputs.image }}-build-cache:${{ steps.cache-target.outputs.cache-key-base }}-${{ steps.cache-key-suffix.outputs.suffix }}
|
||||||
|
type=registry,ref=${{ inputs.image }}-build-cache:${{ steps.cache-target.outputs.cache-key-base }}-main
|
||||||
|
outputs: type=image,"name=${{ inputs.image }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
||||||
|
build-args: |
|
||||||
|
BUILD_ID=${{ github.run_id }}
|
||||||
|
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.meta.outputs.tags }}
|
||||||
|
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||||
|
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||||
|
${{ inputs.build-args }}
|
||||||
|
|
||||||
|
- name: Export digest
|
||||||
|
shell: bash
|
||||||
|
run: | # zizmor: ignore[template-injection]
|
||||||
|
mkdir -p ${{ runner.temp }}/digests
|
||||||
|
digest="${{ steps.build.outputs.digest }}"
|
||||||
|
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||||
|
|
||||||
|
- name: Upload digest
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.artifact-key-base }}-${{ steps.cache-target.outputs.cache-key-base }}
|
||||||
|
path: ${{ runner.temp }}/digests/*
|
||||||
|
if-no-files-found: error
|
||||||
|
retention-days: 1
|
||||||
28
.github/package-lock.json
generated
vendored
Normal file
28
.github/package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"name": ".github",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"devDependencies": {
|
||||||
|
"prettier": "^3.5.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/prettier": {
|
||||||
|
"version": "3.5.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz",
|
||||||
|
"integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"prettier": "bin/prettier.cjs"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/prettier/prettier?sponsor=1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
.github/package.json
vendored
Normal file
9
.github/package.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"format": "prettier --check .",
|
||||||
|
"format:fix": "prettier --write ."
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"prettier": "^3.5.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
4
.github/pull_request_template.md
vendored
4
.github/pull_request_template.md
vendored
@@ -32,5 +32,5 @@ The `/api/something` endpoint is now `/api/something-else`
|
|||||||
- [ ] I have confirmed that any new dependencies are strictly necessary.
|
- [ ] I have confirmed that any new dependencies are strictly necessary.
|
||||||
- [ ] I have written tests for new code (if applicable)
|
- [ ] I have written tests for new code (if applicable)
|
||||||
- [ ] I have followed naming conventions/patterns in the surrounding code
|
- [ ] I have followed naming conventions/patterns in the surrounding code
|
||||||
- [ ] All code in `src/services` uses repositories implementations for database calls, filesystem operations, etc.
|
- [ ] All code in `src/services/` uses repositories implementations for database calls, filesystem operations, etc.
|
||||||
- [ ] All code in `src/repositories/` is pretty basic/simple and does not have any immich specific logic (that belongs in `src/services`)
|
- [ ] All code in `src/repositories/` is pretty basic/simple and does not have any immich specific logic (that belongs in `src/services/`)
|
||||||
|
|||||||
66
.github/release.yml
vendored
66
.github/release.yml
vendored
@@ -1,33 +1,33 @@
|
|||||||
changelog:
|
changelog:
|
||||||
categories:
|
categories:
|
||||||
- title: 🚨 Breaking Changes
|
- title: 🚨 Breaking Changes
|
||||||
labels:
|
labels:
|
||||||
- changelog:breaking-change
|
- changelog:breaking-change
|
||||||
|
|
||||||
- title: 🫥 Deprecated Changes
|
- title: 🫥 Deprecated Changes
|
||||||
labels:
|
labels:
|
||||||
- changelog:deprecated
|
- changelog:deprecated
|
||||||
|
|
||||||
- title: 🔒 Security
|
- title: 🔒 Security
|
||||||
labels:
|
labels:
|
||||||
- changelog:security
|
- changelog:security
|
||||||
|
|
||||||
- title: 🚀 Features
|
- title: 🚀 Features
|
||||||
labels:
|
labels:
|
||||||
- changelog:feature
|
- changelog:feature
|
||||||
|
|
||||||
- title: 🌟 Enhancements
|
- title: 🌟 Enhancements
|
||||||
labels:
|
labels:
|
||||||
- changelog:enhancement
|
- changelog:enhancement
|
||||||
|
|
||||||
- title: 🐛 Bug fixes
|
- title: 🐛 Bug fixes
|
||||||
labels:
|
labels:
|
||||||
- changelog:bugfix
|
- changelog:bugfix
|
||||||
|
|
||||||
- title: 📚 Documentation
|
- title: 📚 Documentation
|
||||||
labels:
|
labels:
|
||||||
- changelog:documentation
|
- changelog:documentation
|
||||||
|
|
||||||
- title: 🌐 Translations
|
- title: 🌐 Translations
|
||||||
labels:
|
labels:
|
||||||
- changelog:translation
|
- changelog:translation
|
||||||
|
|||||||
49
.github/workflows/build-mobile.yml
vendored
49
.github/workflows/build-mobile.yml
vendored
@@ -7,6 +7,15 @@ on:
|
|||||||
ref:
|
ref:
|
||||||
required: false
|
required: false
|
||||||
type: string
|
type: string
|
||||||
|
secrets:
|
||||||
|
KEY_JKS:
|
||||||
|
required: true
|
||||||
|
ALIAS:
|
||||||
|
required: true
|
||||||
|
ANDROID_KEY_PASSWORD:
|
||||||
|
required: true
|
||||||
|
ANDROID_STORE_PASSWORD:
|
||||||
|
required: true
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
@@ -15,16 +24,23 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
mobile:
|
mobile:
|
||||||
@@ -38,31 +54,26 @@ jobs:
|
|||||||
build-sign-android:
|
build-sign-android:
|
||||||
name: Build and sign Android
|
name: Build and sign Android
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
# Skip when PR from a fork
|
# Skip when PR from a fork
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||||
runs-on: macos-14
|
runs-on: macos-14
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Determine ref
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
id: get-ref
|
|
||||||
run: |
|
|
||||||
input_ref="${{ inputs.ref }}"
|
|
||||||
github_ref="${{ github.sha }}"
|
|
||||||
ref="${input_ref:-$github_ref}"
|
|
||||||
echo "ref=$ref" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
with:
|
||||||
ref: ${{ steps.get-ref.outputs.ref }}
|
ref: ${{ inputs.ref || github.sha }}
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: actions/setup-java@v4
|
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
|
||||||
with:
|
with:
|
||||||
distribution: 'zulu'
|
distribution: 'zulu'
|
||||||
java-version: '17'
|
java-version: '17'
|
||||||
cache: 'gradle'
|
cache: 'gradle'
|
||||||
|
|
||||||
- name: Setup Flutter SDK
|
- name: Setup Flutter SDK
|
||||||
uses: subosito/flutter-action@v2
|
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||||
with:
|
with:
|
||||||
channel: 'stable'
|
channel: 'stable'
|
||||||
flutter-version-file: ./mobile/pubspec.yaml
|
flutter-version-file: ./mobile/pubspec.yaml
|
||||||
@@ -78,6 +89,14 @@ jobs:
|
|||||||
working-directory: ./mobile
|
working-directory: ./mobile
|
||||||
run: flutter pub get
|
run: flutter pub get
|
||||||
|
|
||||||
|
- name: Generate translation file
|
||||||
|
run: make translation
|
||||||
|
working-directory: ./mobile
|
||||||
|
|
||||||
|
- name: Generate platform APIs
|
||||||
|
run: make pigeon
|
||||||
|
working-directory: ./mobile
|
||||||
|
|
||||||
- name: Build Android App Bundle
|
- name: Build Android App Bundle
|
||||||
working-directory: ./mobile
|
working-directory: ./mobile
|
||||||
env:
|
env:
|
||||||
@@ -89,7 +108,7 @@ jobs:
|
|||||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||||
|
|
||||||
- name: Publish Android Artifact
|
- name: Publish Android Artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: release-apk-signed
|
name: release-apk-signed
|
||||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||||
|
|||||||
19
.github/workflows/cache-cleanup.yml
vendored
19
.github/workflows/cache-cleanup.yml
vendored
@@ -8,31 +8,38 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup:
|
cleanup:
|
||||||
name: Cleanup
|
name: Cleanup
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: write
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
REF: ${{ github.ref }}
|
||||||
run: |
|
run: |
|
||||||
gh extension install actions/gh-actions-cache
|
gh extension install actions/gh-actions-cache
|
||||||
|
|
||||||
REPO=${{ github.repository }}
|
REPO=${{ github.repository }}
|
||||||
BRANCH=${{ github.ref }}
|
|
||||||
|
|
||||||
echo "Fetching list of cache keys"
|
echo "Fetching list of cache keys"
|
||||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
|
cacheKeysForPR=$(gh actions-cache list -R $REPO -B ${REF} -L 100 | cut -f 1 )
|
||||||
|
|
||||||
## Setting this to not fail the workflow while deleting cache keys.
|
## Setting this to not fail the workflow while deleting cache keys.
|
||||||
set +e
|
set +e
|
||||||
echo "Deleting caches..."
|
echo "Deleting caches..."
|
||||||
for cacheKey in $cacheKeysForPR
|
for cacheKey in $cacheKeysForPR
|
||||||
do
|
do
|
||||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
gh actions-cache delete $cacheKey -R "$REPO" -B "${REF}" --confirm
|
||||||
done
|
done
|
||||||
echo "Done"
|
echo "Done"
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|||||||
30
.github/workflows/cli.yml
vendored
30
.github/workflows/cli.yml
vendored
@@ -6,7 +6,6 @@ on:
|
|||||||
- 'cli/**'
|
- 'cli/**'
|
||||||
- '.github/workflows/cli.yml'
|
- '.github/workflows/cli.yml'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
|
||||||
paths:
|
paths:
|
||||||
- 'cli/**'
|
- 'cli/**'
|
||||||
- '.github/workflows/cli.yml'
|
- '.github/workflows/cli.yml'
|
||||||
@@ -17,21 +16,25 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
permissions: {}
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish:
|
publish:
|
||||||
name: CLI Publish
|
name: CLI Publish
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./cli
|
working-directory: ./cli
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
# Setup .npmrc file to publish to npm
|
# Setup .npmrc file to publish to npm
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './cli/.nvmrc'
|
node-version-file: './cli/.nvmrc'
|
||||||
registry-url: 'https://registry.npmjs.org'
|
registry-url: 'https://registry.npmjs.org'
|
||||||
@@ -49,20 +52,25 @@ jobs:
|
|||||||
docker:
|
docker:
|
||||||
name: Docker
|
name: Docker
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
needs: publish
|
needs: publish
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3.4.0
|
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3.9.0
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
@@ -77,7 +85,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Generate docker image tags
|
- name: Generate docker image tags
|
||||||
id: metadata
|
id: metadata
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
with:
|
with:
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=false
|
latest=false
|
||||||
@@ -88,7 +96,7 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||||
|
|
||||||
- name: Build and push image
|
- name: Build and push image
|
||||||
uses: docker/build-push-action@v6.13.0
|
uses: docker/build-push-action@1dc73863535b631f98b2378be8619f83b136f4a0 # v6.17.0
|
||||||
with:
|
with:
|
||||||
file: cli/Dockerfile
|
file: cli/Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|||||||
67
.github/workflows/codeql-analysis.yml
vendored
67
.github/workflows/codeql-analysis.yml
vendored
@@ -9,14 +9,14 @@
|
|||||||
# the `language` matrix defined below to confirm you have the correct set of
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
# supported CodeQL languages.
|
# supported CodeQL languages.
|
||||||
#
|
#
|
||||||
name: "CodeQL"
|
name: 'CodeQL'
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ "main" ]
|
branches: ['main']
|
||||||
pull_request:
|
pull_request:
|
||||||
# The branches below must be a subset of the branches above
|
# The branches below must be a subset of the branches above
|
||||||
branches: [ "main" ]
|
branches: ['main']
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '20 13 * * 1'
|
- cron: '20 13 * * 1'
|
||||||
|
|
||||||
@@ -24,6 +24,8 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
analyze:
|
analyze:
|
||||||
name: Analyze
|
name: Analyze
|
||||||
@@ -36,43 +38,44 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
language: [ 'javascript', 'python' ]
|
language: ['javascript', 'python']
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
# By default, queries listed here will override any specified in a config file.
|
# By default, queries listed here will override any specified in a config file.
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
# queries: security-extended,security-and-quality
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v3
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
# - run: |
|
||||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
# - run: |
|
- name: Perform CodeQL Analysis
|
||||||
# echo "Run, Build Application using script"
|
uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||||
# ./location_of_script_within_repo/buildscript.sh
|
with:
|
||||||
|
category: '/language:${{matrix.language}}'
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v3
|
|
||||||
with:
|
|
||||||
category: "/language:${{matrix.language}}"
|
|
||||||
|
|||||||
463
.github/workflows/docker.yml
vendored
463
.github/workflows/docker.yml
vendored
@@ -5,7 +5,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
@@ -13,20 +12,23 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
permissions: {}
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
server:
|
server:
|
||||||
@@ -38,6 +40,8 @@ jobs:
|
|||||||
- 'machine-learning/**'
|
- 'machine-learning/**'
|
||||||
workflow:
|
workflow:
|
||||||
- '.github/workflows/docker.yml'
|
- '.github/workflows/docker.yml'
|
||||||
|
- '.github/workflows/multi-runner-build.yml'
|
||||||
|
- '.github/actions/image-build'
|
||||||
|
|
||||||
- name: Check if we should force jobs to run
|
- name: Check if we should force jobs to run
|
||||||
id: should_force
|
id: should_force
|
||||||
@@ -46,392 +50,130 @@ jobs:
|
|||||||
retag_ml:
|
retag_ml:
|
||||||
name: Re-Tag ML
|
name: Re-Tag ML
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
if: ${{ needs.pre-job.outputs.should_run_ml == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
suffix: ["", "-cuda", "-openvino", "-armnn"]
|
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
|
||||||
steps:
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
- name: Re-tag image
|
|
||||||
run: |
|
|
||||||
REGISTRY_NAME="ghcr.io"
|
|
||||||
REPOSITORY=${{ github.repository_owner }}/immich-machine-learning
|
|
||||||
TAG_OLD=main${{ matrix.suffix }}
|
|
||||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
|
||||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
|
|
||||||
retag_server:
|
|
||||||
name: Re-Tag Server
|
|
||||||
needs: pre-job
|
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
suffix: [""]
|
|
||||||
steps:
|
steps:
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Re-tag image
|
- name: Re-tag image
|
||||||
|
env:
|
||||||
|
REGISTRY_NAME: 'ghcr.io'
|
||||||
|
REPOSITORY: ${{ github.repository_owner }}/immich-machine-learning
|
||||||
|
TAG_OLD: main${{ matrix.suffix }}
|
||||||
|
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||||
|
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||||
run: |
|
run: |
|
||||||
REGISTRY_NAME="ghcr.io"
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
REPOSITORY=${{ github.repository_owner }}/immich-server
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
TAG_OLD=main${{ matrix.suffix }}
|
|
||||||
TAG_PR=${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
|
||||||
TAG_COMMIT=commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_PR $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
docker buildx imagetools create -t $REGISTRY_NAME/$REPOSITORY:$TAG_COMMIT $REGISTRY_NAME/$REPOSITORY:$TAG_OLD
|
|
||||||
|
|
||||||
build_and_push_ml:
|
retag_server:
|
||||||
|
name: Re-Tag Server
|
||||||
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
if: ${{ needs.pre-job.outputs.should_run_server == 'false' && !github.event.pull_request.head.repo.fork }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
suffix: ['']
|
||||||
|
steps:
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Re-tag image
|
||||||
|
env:
|
||||||
|
REGISTRY_NAME: 'ghcr.io'
|
||||||
|
REPOSITORY: ${{ github.repository_owner }}/immich-server
|
||||||
|
TAG_OLD: main${{ matrix.suffix }}
|
||||||
|
TAG_PR: ${{ github.event.number == 0 && github.ref_name || format('pr-{0}', github.event.number) }}${{ matrix.suffix }}
|
||||||
|
TAG_COMMIT: commit-${{ github.event_name != 'pull_request' && github.sha || github.event.pull_request.head.sha }}${{ matrix.suffix }}
|
||||||
|
run: |
|
||||||
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
|
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||||
|
|
||||||
|
machine-learning:
|
||||||
name: Build and Push ML
|
name: Build and Push ML
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||||
runs-on: ${{ matrix.runner }}
|
|
||||||
env:
|
|
||||||
image: immich-machine-learning
|
|
||||||
context: machine-learning
|
|
||||||
file: machine-learning/Dockerfile
|
|
||||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
|
||||||
strategy:
|
strategy:
|
||||||
# Prevent a failure in one image from stopping the other builds
|
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
runner: ubuntu-latest
|
|
||||||
device: cpu
|
|
||||||
|
|
||||||
- platform: linux/arm64
|
|
||||||
runner: ubuntu-24.04-arm
|
|
||||||
device: cpu
|
|
||||||
|
|
||||||
- platform: linux/amd64
|
|
||||||
runner: ubuntu-latest
|
|
||||||
device: cuda
|
|
||||||
suffix: -cuda
|
|
||||||
|
|
||||||
- platform: linux/amd64
|
|
||||||
runner: ubuntu-latest
|
|
||||||
device: openvino
|
|
||||||
suffix: -openvino
|
|
||||||
|
|
||||||
- platform: linux/arm64
|
|
||||||
runner: ubuntu-24.04-arm
|
|
||||||
device: armnn
|
|
||||||
suffix: -armnn
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
platform=${{ matrix.platform }}
|
|
||||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3.9.0
|
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Generate cache key suffix
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
|
||||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Generate cache target
|
|
||||||
id: cache-target
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
|
||||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
|
||||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Build and push image
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@v6.13.0
|
|
||||||
with:
|
|
||||||
context: ${{ env.context }}
|
|
||||||
file: ${{ env.file }}
|
|
||||||
platforms: ${{ matrix.platforms }}
|
|
||||||
labels: ${{ steps.metadata.outputs.labels }}
|
|
||||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
|
||||||
cache-from: |
|
|
||||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }}
|
|
||||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-main
|
|
||||||
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
|
||||||
build-args: |
|
|
||||||
DEVICE=${{ matrix.device }}
|
|
||||||
BUILD_ID=${{ github.run_id }}
|
|
||||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
|
||||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
|
||||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
|
||||||
|
|
||||||
- name: Export digest
|
|
||||||
run: |
|
|
||||||
mkdir -p ${{ runner.temp }}/digests
|
|
||||||
digest="${{ steps.build.outputs.digest }}"
|
|
||||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
|
||||||
|
|
||||||
- name: Upload digest
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ml-digests-${{ matrix.device }}-${{ env.PLATFORM_PAIR }}
|
|
||||||
path: ${{ runner.temp }}/digests/*
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
merge_ml:
|
|
||||||
name: Merge & Push ML
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
|
|
||||||
env:
|
|
||||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
|
||||||
DOCKER_REPO: altran1502/immich-machine-learning
|
|
||||||
strategy:
|
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- device: cpu
|
- device: cpu
|
||||||
|
tag-suffix: ''
|
||||||
- device: cuda
|
- device: cuda
|
||||||
suffix: -cuda
|
tag-suffix: '-cuda'
|
||||||
|
platforms: linux/amd64
|
||||||
- device: openvino
|
- device: openvino
|
||||||
suffix: -openvino
|
tag-suffix: '-openvino'
|
||||||
|
platforms: linux/amd64
|
||||||
- device: armnn
|
- device: armnn
|
||||||
suffix: -armnn
|
tag-suffix: '-armnn'
|
||||||
needs:
|
platforms: linux/arm64
|
||||||
- build_and_push_ml
|
- device: rknn
|
||||||
steps:
|
tag-suffix: '-rknn'
|
||||||
- name: Download digests
|
platforms: linux/arm64
|
||||||
uses: actions/download-artifact@v4
|
- device: rocm
|
||||||
with:
|
tag-suffix: '-rocm'
|
||||||
path: ${{ runner.temp }}/digests
|
platforms: linux/amd64
|
||||||
pattern: ml-digests-${{ matrix.device }}-*
|
runner-mapping: '{"linux/amd64": "mich"}'
|
||||||
merge-multiple: true
|
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
packages: write
|
||||||
|
secrets:
|
||||||
|
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
image: immich-machine-learning
|
||||||
|
context: machine-learning
|
||||||
|
dockerfile: machine-learning/Dockerfile
|
||||||
|
platforms: ${{ matrix.platforms }}
|
||||||
|
runner-mapping: ${{ matrix.runner-mapping }}
|
||||||
|
tag-suffix: ${{ matrix.tag-suffix }}
|
||||||
|
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||||
|
build-args: |
|
||||||
|
DEVICE=${{ matrix.device }}
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
server:
|
||||||
if: ${{ github.event_name == 'release' }}
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Login to GHCR
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Generate docker image tags
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
env:
|
|
||||||
DOCKER_METADATA_PR_HEAD_SHA: "true"
|
|
||||||
with:
|
|
||||||
flavor: |
|
|
||||||
# Disable latest tag
|
|
||||||
latest=false
|
|
||||||
images: |
|
|
||||||
name=${{ env.GHCR_REPO }}
|
|
||||||
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
|
|
||||||
tags: |
|
|
||||||
# Tag with branch name
|
|
||||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
|
||||||
# Tag with pr-number
|
|
||||||
type=ref,event=pr,suffix=${{ matrix.suffix }}
|
|
||||||
# Tag with long commit sha hash
|
|
||||||
type=sha,format=long,prefix=commit-,suffix=${{ matrix.suffix }}
|
|
||||||
# Tag with git tag on release
|
|
||||||
type=ref,event=tag,suffix=${{ matrix.suffix }}
|
|
||||||
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
|
|
||||||
|
|
||||||
- name: Create manifest list and push
|
|
||||||
working-directory: ${{ runner.temp }}/digests
|
|
||||||
run: |
|
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
|
||||||
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
|
||||||
|
|
||||||
build_and_push_server:
|
|
||||||
name: Build and Push Server
|
name: Build and Push Server
|
||||||
runs-on: ${{ matrix.runner }}
|
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||||
env:
|
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
packages: write
|
||||||
|
secrets:
|
||||||
|
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
with:
|
||||||
image: immich-server
|
image: immich-server
|
||||||
context: .
|
context: .
|
||||||
file: server/Dockerfile
|
dockerfile: server/Dockerfile
|
||||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||||
strategy:
|
build-args: |
|
||||||
fail-fast: false
|
DEVICE=cpu
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- platform: linux/amd64
|
|
||||||
runner: ubuntu-latest
|
|
||||||
- platform: linux/arm64
|
|
||||||
runner: ubuntu-24.04-arm
|
|
||||||
steps:
|
|
||||||
- name: Prepare
|
|
||||||
run: |
|
|
||||||
platform=${{ matrix.platform }}
|
|
||||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Generate cache key suffix
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
|
||||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
|
||||||
else
|
|
||||||
echo "CACHE_KEY_SUFFIX=$(echo ${{ github.ref_name }} | sed 's/[^a-zA-Z0-9]/-/g')" >> $GITHUB_ENV
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Generate cache target
|
|
||||||
id: cache-target
|
|
||||||
run: |
|
|
||||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
|
||||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
|
||||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
|
||||||
else
|
|
||||||
echo "cache-to=type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Build and push image
|
|
||||||
id: build
|
|
||||||
uses: docker/build-push-action@v6.13.0
|
|
||||||
with:
|
|
||||||
context: ${{ env.context }}
|
|
||||||
file: ${{ env.file }}
|
|
||||||
platforms: ${{ matrix.platform }}
|
|
||||||
labels: ${{ steps.metadata.outputs.labels }}
|
|
||||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
|
||||||
cache-from: |
|
|
||||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }}
|
|
||||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-main
|
|
||||||
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
|
||||||
build-args: |
|
|
||||||
DEVICE=cpu
|
|
||||||
BUILD_ID=${{ github.run_id }}
|
|
||||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
|
||||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
|
||||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
|
||||||
|
|
||||||
- name: Export digest
|
|
||||||
run: |
|
|
||||||
mkdir -p ${{ runner.temp }}/digests
|
|
||||||
digest="${{ steps.build.outputs.digest }}"
|
|
||||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
|
||||||
|
|
||||||
- name: Upload digest
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: server-digests-${{ env.PLATFORM_PAIR }}
|
|
||||||
path: ${{ runner.temp }}/digests/*
|
|
||||||
if-no-files-found: error
|
|
||||||
retention-days: 1
|
|
||||||
|
|
||||||
merge_server:
|
|
||||||
name: Merge & Push Server
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
|
|
||||||
env:
|
|
||||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
|
||||||
DOCKER_REPO: altran1502/immich-server
|
|
||||||
needs:
|
|
||||||
- build_and_push_server
|
|
||||||
steps:
|
|
||||||
- name: Download digests
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
path: ${{ runner.temp }}/digests
|
|
||||||
pattern: server-digests-*
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
|
||||||
if: ${{ github.event_name == 'release' }}
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Login to GHCR
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Generate docker image tags
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
env:
|
|
||||||
DOCKER_METADATA_PR_HEAD_SHA: "true"
|
|
||||||
with:
|
|
||||||
flavor: |
|
|
||||||
# Disable latest tag
|
|
||||||
latest=false
|
|
||||||
images: |
|
|
||||||
name=${{ env.GHCR_REPO }}
|
|
||||||
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
|
|
||||||
tags: |
|
|
||||||
# Tag with branch name
|
|
||||||
type=ref,event=branch,suffix=${{ matrix.suffix }}
|
|
||||||
# Tag with pr-number
|
|
||||||
type=ref,event=pr,suffix=${{ matrix.suffix }}
|
|
||||||
# Tag with long commit sha hash
|
|
||||||
type=sha,format=long,prefix=commit-,suffix=${{ matrix.suffix }}
|
|
||||||
# Tag with git tag on release
|
|
||||||
type=ref,event=tag,suffix=${{ matrix.suffix }}
|
|
||||||
type=raw,value=release,enable=${{ github.event_name == 'release' }},suffix=${{ matrix.suffix }}
|
|
||||||
|
|
||||||
- name: Create manifest list and push
|
|
||||||
working-directory: ${{ runner.temp }}/digests
|
|
||||||
run: |
|
|
||||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
|
||||||
$(printf '${{ env.GHCR_REPO }}@sha256:%s ' *)
|
|
||||||
|
|
||||||
success-check-server:
|
success-check-server:
|
||||||
name: Docker Build & Push Server Success
|
name: Docker Build & Push Server Success
|
||||||
needs: [merge_server, retag_server]
|
needs: [server, retag_server]
|
||||||
|
permissions: {}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: always()
|
if: always()
|
||||||
steps:
|
steps:
|
||||||
@@ -440,11 +182,13 @@ jobs:
|
|||||||
run: exit 1
|
run: exit 1
|
||||||
- name: All jobs passed or skipped
|
- name: All jobs passed or skipped
|
||||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||||
|
# zizmor: ignore[template-injection]
|
||||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||||
|
|
||||||
success-check-ml:
|
success-check-ml:
|
||||||
name: Docker Build & Push ML Success
|
name: Docker Build & Push ML Success
|
||||||
needs: [merge_ml, retag_ml]
|
needs: [machine-learning, retag_ml]
|
||||||
|
permissions: {}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: always()
|
if: always()
|
||||||
steps:
|
steps:
|
||||||
@@ -453,4 +197,5 @@ jobs:
|
|||||||
run: exit 1
|
run: exit 1
|
||||||
- name: All jobs passed or skipped
|
- name: All jobs passed or skipped
|
||||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||||
|
# zizmor: ignore[template-injection]
|
||||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||||
|
|||||||
22
.github/workflows/docs-build.yml
vendored
22
.github/workflows/docs-build.yml
vendored
@@ -3,7 +3,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main]
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main]
|
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
@@ -11,16 +10,22 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
docs:
|
docs:
|
||||||
@@ -34,6 +39,8 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
name: Docs Build
|
name: Docs Build
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
defaults:
|
defaults:
|
||||||
@@ -42,10 +49,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './docs/.nvmrc'
|
node-version-file: './docs/.nvmrc'
|
||||||
|
|
||||||
@@ -59,8 +68,9 @@ jobs:
|
|||||||
run: npm run build
|
run: npm run build
|
||||||
|
|
||||||
- name: Upload build output
|
- name: Upload build output
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: docs-build-output
|
name: docs-build-output
|
||||||
path: docs/build/
|
path: docs/build/
|
||||||
|
include-hidden-files: true
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|||||||
77
.github/workflows/docs-deploy.yml
vendored
77
.github/workflows/docs-deploy.yml
vendored
@@ -1,7 +1,7 @@
|
|||||||
name: Docs deploy
|
name: Docs deploy
|
||||||
on:
|
on:
|
||||||
workflow_run:
|
workflow_run: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||||
workflows: ["Docs build"]
|
workflows: ['Docs build']
|
||||||
types:
|
types:
|
||||||
- completed
|
- completed
|
||||||
|
|
||||||
@@ -9,6 +9,9 @@ jobs:
|
|||||||
checks:
|
checks:
|
||||||
name: Docs Deploy Checks
|
name: Docs Deploy Checks
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
pull-requests: read
|
||||||
outputs:
|
outputs:
|
||||||
parameters: ${{ steps.parameters.outputs.result }}
|
parameters: ${{ steps.parameters.outputs.result }}
|
||||||
artifact: ${{ steps.get-artifact.outputs.result }}
|
artifact: ${{ steps.get-artifact.outputs.result }}
|
||||||
@@ -17,7 +20,7 @@ jobs:
|
|||||||
run: echo 'The triggering workflow did not succeed' && exit 1
|
run: echo 'The triggering workflow did not succeed' && exit 1
|
||||||
- name: Get artifact
|
- name: Get artifact
|
||||||
id: get-artifact
|
id: get-artifact
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||||
@@ -35,7 +38,9 @@ jobs:
|
|||||||
return { found: true, id: matchArtifact.id };
|
return { found: true, id: matchArtifact.id };
|
||||||
- name: Determine deploy parameters
|
- name: Determine deploy parameters
|
||||||
id: parameters
|
id: parameters
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
|
env:
|
||||||
|
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const eventType = context.payload.workflow_run.event;
|
const eventType = context.payload.workflow_run.event;
|
||||||
@@ -57,7 +62,8 @@ jobs:
|
|||||||
} else if (eventType == "pull_request") {
|
} else if (eventType == "pull_request") {
|
||||||
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
let pull_number = context.payload.workflow_run.pull_requests[0]?.number;
|
||||||
if(!pull_number) {
|
if(!pull_number) {
|
||||||
const response = await github.rest.search.issuesAndPullRequests({q: 'repo:${{ github.repository }} is:pr sha:${{ github.event.workflow_run.head_sha }}',per_page: 1,})
|
const {HEAD_SHA} = process.env;
|
||||||
|
const response = await github.rest.search.issuesAndPullRequests({q: `repo:${{ github.repository }} is:pr sha:${HEAD_SHA}`,per_page: 1,})
|
||||||
const items = response.data.items
|
const items = response.data.items
|
||||||
if (items.length < 1) {
|
if (items.length < 1) {
|
||||||
throw new Error("No pull request found for the commit")
|
throw new Error("No pull request found for the commit")
|
||||||
@@ -95,30 +101,36 @@ jobs:
|
|||||||
name: Docs Deploy
|
name: Docs Deploy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: checks
|
needs: checks
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
pull-requests: write
|
||||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Load parameters
|
- name: Load parameters
|
||||||
id: parameters
|
id: parameters
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
|
env:
|
||||||
|
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const json = `${{ needs.checks.outputs.parameters }}`;
|
const parameters = JSON.parse(process.env.PARAM_JSON);
|
||||||
const parameters = JSON.parse(json);
|
|
||||||
core.setOutput("event", parameters.event);
|
core.setOutput("event", parameters.event);
|
||||||
core.setOutput("name", parameters.name);
|
core.setOutput("name", parameters.name);
|
||||||
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||||
|
|
||||||
- run: |
|
|
||||||
echo "Starting docs deployment for ${{ steps.parameters.outputs.event }} ${{ steps.parameters.outputs.name }}"
|
|
||||||
|
|
||||||
- name: Download artifact
|
- name: Download artifact
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
|
env:
|
||||||
|
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
let artifact = ${{ needs.checks.outputs.artifact }};
|
let artifact = JSON.parse(process.env.ARTIFACT_JSON);
|
||||||
let download = await github.rest.actions.downloadArtifact({
|
let download = await github.rest.actions.downloadArtifact({
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
@@ -138,12 +150,12 @@ jobs:
|
|||||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
uses: gruntwork-io/terragrunt-action@v2
|
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||||
with:
|
with:
|
||||||
tg_version: "0.58.12"
|
tg_version: '0.58.12'
|
||||||
tofu_version: "1.7.1"
|
tofu_version: '1.7.1'
|
||||||
tg_dir: "deployment/modules/cloudflare/docs"
|
tg_dir: 'deployment/modules/cloudflare/docs'
|
||||||
tg_command: "apply"
|
tg_command: 'apply'
|
||||||
|
|
||||||
- name: Deploy Docs Subdomain Output
|
- name: Deploy Docs Subdomain Output
|
||||||
id: docs-output
|
id: docs-output
|
||||||
@@ -153,27 +165,30 @@ jobs:
|
|||||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
uses: gruntwork-io/terragrunt-action@v2
|
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||||
with:
|
with:
|
||||||
tg_version: "0.58.12"
|
tg_version: '0.58.12'
|
||||||
tofu_version: "1.7.1"
|
tofu_version: '1.7.1'
|
||||||
tg_dir: "deployment/modules/cloudflare/docs"
|
tg_dir: 'deployment/modules/cloudflare/docs'
|
||||||
tg_command: "output -json"
|
tg_command: 'output -json'
|
||||||
|
|
||||||
- name: Output Cleaning
|
- name: Output Cleaning
|
||||||
id: clean
|
id: clean
|
||||||
|
env:
|
||||||
|
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
|
||||||
run: |
|
run: |
|
||||||
TG_OUT=$(echo '${{ steps.docs-output.outputs.tg_action_output }}' | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
|
||||||
echo "output=$TG_OUT" >> $GITHUB_OUTPUT
|
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Publish to Cloudflare Pages
|
- name: Publish to Cloudflare Pages
|
||||||
uses: cloudflare/pages-action@v1
|
# TODO: Action is deprecated
|
||||||
|
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
|
||||||
with:
|
with:
|
||||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
|
||||||
workingDirectory: "docs"
|
workingDirectory: 'docs'
|
||||||
directory: "build"
|
directory: 'build'
|
||||||
branch: ${{ steps.parameters.outputs.name }}
|
branch: ${{ steps.parameters.outputs.name }}
|
||||||
wranglerVersion: '3'
|
wranglerVersion: '3'
|
||||||
|
|
||||||
@@ -184,7 +199,7 @@ jobs:
|
|||||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
uses: gruntwork-io/terragrunt-action@v2
|
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||||
with:
|
with:
|
||||||
tg_version: '0.58.12'
|
tg_version: '0.58.12'
|
||||||
tofu_version: '1.7.1'
|
tofu_version: '1.7.1'
|
||||||
@@ -192,7 +207,7 @@ jobs:
|
|||||||
tg_command: 'apply'
|
tg_command: 'apply'
|
||||||
|
|
||||||
- name: Comment
|
- name: Comment
|
||||||
uses: actions-cool/maintain-one-comment@v3
|
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||||
with:
|
with:
|
||||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||||
|
|||||||
27
.github/workflows/docs-destroy.yml
vendored
27
.github/workflows/docs-destroy.yml
vendored
@@ -1,32 +1,39 @@
|
|||||||
name: Docs destroy
|
name: Docs destroy
|
||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||||
types: [closed]
|
types: [closed]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy:
|
deploy:
|
||||||
name: Docs Destroy
|
name: Docs Destroy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Destroy Docs Subdomain
|
- name: Destroy Docs Subdomain
|
||||||
env:
|
env:
|
||||||
TF_VAR_prefix_name: "pr-${{ github.event.number }}"
|
TF_VAR_prefix_name: 'pr-${{ github.event.number }}'
|
||||||
TF_VAR_prefix_event_type: "pr"
|
TF_VAR_prefix_event_type: 'pr'
|
||||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||||
uses: gruntwork-io/terragrunt-action@v2
|
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||||
with:
|
with:
|
||||||
tg_version: "0.58.12"
|
tg_version: '0.58.12'
|
||||||
tofu_version: "1.7.1"
|
tofu_version: '1.7.1'
|
||||||
tg_dir: "deployment/modules/cloudflare/docs"
|
tg_dir: 'deployment/modules/cloudflare/docs'
|
||||||
tg_command: "destroy -refresh=false"
|
tg_command: 'destroy -refresh=false'
|
||||||
|
|
||||||
- name: Comment
|
- name: Comment
|
||||||
uses: actions-cool/maintain-one-comment@v3
|
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||||
with:
|
with:
|
||||||
number: ${{ github.event.number }}
|
number: ${{ github.event.number }}
|
||||||
delete: true
|
delete: true
|
||||||
|
|||||||
15
.github/workflows/fix-format.yml
vendored
15
.github/workflows/fix-format.yml
vendored
@@ -4,28 +4,32 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [labeled]
|
types: [labeled]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fix-formatting:
|
fix-formatting:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: ${{ github.event.label.name == 'fix:formatting' }}
|
if: ${{ github.event.label.name == 'fix:formatting' }}
|
||||||
permissions:
|
permissions:
|
||||||
|
contents: write
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Generate a token
|
- name: Generate a token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||||
|
|
||||||
- name: 'Checkout'
|
- name: 'Checkout'
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.ref }}
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
token: ${{ steps.generate-token.outputs.token }}
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
|
persist-credentials: true
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './server/.nvmrc'
|
node-version-file: './server/.nvmrc'
|
||||||
|
|
||||||
@@ -33,13 +37,13 @@ jobs:
|
|||||||
run: make install-all && make format-all
|
run: make install-all && make format-all
|
||||||
|
|
||||||
- name: Commit and push
|
- name: Commit and push
|
||||||
uses: EndBug/add-and-commit@v9
|
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||||
with:
|
with:
|
||||||
default_author: github_actions
|
default_author: github_actions
|
||||||
message: 'chore: fix formatting'
|
message: 'chore: fix formatting'
|
||||||
|
|
||||||
- name: Remove label
|
- name: Remove label
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
if: always()
|
if: always()
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
@@ -49,4 +53,3 @@ jobs:
|
|||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
name: 'fix:formatting'
|
name: 'fix:formatting'
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
185
.github/workflows/multi-runner-build.yml
vendored
Normal file
185
.github/workflows/multi-runner-build.yml
vendored
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
name: 'Multi-runner container image build'
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
image:
|
||||||
|
description: 'Name of the image'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
context:
|
||||||
|
description: 'Path to build context'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
dockerfile:
|
||||||
|
description: 'Path to Dockerfile'
|
||||||
|
type: string
|
||||||
|
required: true
|
||||||
|
tag-suffix:
|
||||||
|
description: 'Suffix to append to the image tag'
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
dockerhub-push:
|
||||||
|
description: 'Push to Docker Hub'
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
build-args:
|
||||||
|
description: 'Docker build arguments'
|
||||||
|
type: string
|
||||||
|
required: false
|
||||||
|
platforms:
|
||||||
|
description: 'Platforms to build for'
|
||||||
|
type: string
|
||||||
|
runner-mapping:
|
||||||
|
description: 'Mapping from platforms to runners'
|
||||||
|
type: string
|
||||||
|
secrets:
|
||||||
|
DOCKERHUB_USERNAME:
|
||||||
|
required: false
|
||||||
|
DOCKERHUB_TOKEN:
|
||||||
|
required: false
|
||||||
|
|
||||||
|
env:
|
||||||
|
GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/${{ inputs.image }}
|
||||||
|
DOCKERHUB_IMAGE: altran1502/${{ inputs.image }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
matrix:
|
||||||
|
name: 'Generate matrix'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
matrix: ${{ steps.matrix.outputs.matrix }}
|
||||||
|
key: ${{ steps.artifact-key.outputs.base }}
|
||||||
|
steps:
|
||||||
|
- name: Generate build matrix
|
||||||
|
id: matrix
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
PLATFORMS: ${{ inputs.platforms || 'linux/amd64,linux/arm64' }}
|
||||||
|
RUNNER_MAPPING: ${{ inputs.runner-mapping || '{"linux/amd64":"ubuntu-latest","linux/arm64":"ubuntu-24.04-arm"}' }}
|
||||||
|
run: |
|
||||||
|
matrix=$(jq -R -c \
|
||||||
|
--argjson runner_mapping "${RUNNER_MAPPING}" \
|
||||||
|
'split(",") | map({platform: ., runner: $runner_mapping[.]})' \
|
||||||
|
<<< "${PLATFORMS}")
|
||||||
|
echo "${matrix}"
|
||||||
|
echo "matrix=${matrix}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Determine artifact key
|
||||||
|
id: artifact-key
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
IMAGE: ${{ inputs.image }}
|
||||||
|
SUFFIX: ${{ inputs.tag-suffix }}
|
||||||
|
run: |
|
||||||
|
if [[ -n "${SUFFIX}" ]]; then
|
||||||
|
base="${IMAGE}${SUFFIX}-digests"
|
||||||
|
else
|
||||||
|
base="${IMAGE}-digests"
|
||||||
|
fi
|
||||||
|
echo "${base}"
|
||||||
|
echo "base=${base}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: matrix
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include: ${{ fromJson(needs.matrix.outputs.matrix) }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- uses: ./.github/actions/image-build
|
||||||
|
with:
|
||||||
|
context: ${{ inputs.context }}
|
||||||
|
dockerfile: ${{ inputs.dockerfile }}
|
||||||
|
image: ${{ env.GHCR_IMAGE }}
|
||||||
|
ghcr-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
platform: ${{ matrix.platform }}
|
||||||
|
artifact-key-base: ${{ needs.matrix.outputs.key }}
|
||||||
|
build-args: ${{ inputs.build-args }}
|
||||||
|
|
||||||
|
merge:
|
||||||
|
needs: [matrix, build]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Download digests
|
||||||
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
|
||||||
|
with:
|
||||||
|
path: ${{ runner.temp }}/digests
|
||||||
|
pattern: ${{ needs.matrix.outputs.key }}-*
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Login to Docker Hub
|
||||||
|
if: ${{ inputs.dockerhub-push }}
|
||||||
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Login to GHCR
|
||||||
|
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.repository_owner }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||||
|
|
||||||
|
- name: Generate docker image tags
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||||
|
env:
|
||||||
|
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||||
|
with:
|
||||||
|
flavor: |
|
||||||
|
# Disable latest tag
|
||||||
|
latest=false
|
||||||
|
suffix=${{ inputs.tag-suffix }}
|
||||||
|
images: |
|
||||||
|
name=${{ env.GHCR_IMAGE }}
|
||||||
|
name=${{ env.DOCKERHUB_IMAGE }},enable=${{ inputs.dockerhub-push }}
|
||||||
|
tags: |
|
||||||
|
# Tag with branch name
|
||||||
|
type=ref,event=branch
|
||||||
|
# Tag with pr-number
|
||||||
|
type=ref,event=pr
|
||||||
|
# Tag with long commit sha hash
|
||||||
|
type=sha,format=long,prefix=commit-
|
||||||
|
# Tag with git tag on release
|
||||||
|
type=ref,event=tag
|
||||||
|
type=raw,value=release,enable=${{ github.event_name == 'release' }}
|
||||||
|
|
||||||
|
- name: Create manifest list and push
|
||||||
|
working-directory: ${{ runner.temp }}/digests
|
||||||
|
run: |
|
||||||
|
# Process annotations
|
||||||
|
declare -a ANNOTATIONS=()
|
||||||
|
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
|
||||||
|
while IFS= read -r annotation; do
|
||||||
|
# Extract key and value by removing the manifest: prefix
|
||||||
|
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
|
||||||
|
key="${BASH_REMATCH[1]}"
|
||||||
|
value="${BASH_REMATCH[2]}"
|
||||||
|
# Use array to properly handle arguments with spaces
|
||||||
|
ANNOTATIONS+=(--annotation "index:$key=$value")
|
||||||
|
fi
|
||||||
|
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
|
||||||
|
fi
|
||||||
|
|
||||||
|
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||||
|
SOURCE_ARGS=$(printf "${GHCR_IMAGE}@sha256:%s " *)
|
||||||
|
|
||||||
|
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||||
10
.github/workflows/pr-label-validation.yml
vendored
10
.github/workflows/pr-label-validation.yml
vendored
@@ -1,9 +1,11 @@
|
|||||||
name: PR Label Validation
|
name: PR Label Validation
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target: # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||||
types: [opened, labeled, unlabeled, synchronize]
|
types: [opened, labeled, unlabeled, synchronize]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
validate-release-label:
|
validate-release-label:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -12,11 +14,11 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: Require PR to have a changelog label
|
- name: Require PR to have a changelog label
|
||||||
uses: mheap/github-action-required-labels@v5
|
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0
|
||||||
with:
|
with:
|
||||||
mode: exactly
|
mode: exactly
|
||||||
count: 1
|
count: 1
|
||||||
use_regex: true
|
use_regex: true
|
||||||
labels: "changelog:.*"
|
labels: 'changelog:.*'
|
||||||
add_comment: true
|
add_comment: true
|
||||||
message: "Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label."
|
message: 'Label error. Requires {{errorString}} {{count}} of: {{ provided }}. Found: {{ applied }}. A maintainer will add the required label.'
|
||||||
|
|||||||
8
.github/workflows/pr-labeler.yml
vendored
8
.github/workflows/pr-labeler.yml
vendored
@@ -1,6 +1,8 @@
|
|||||||
name: "Pull Request Labeler"
|
name: 'Pull Request Labeler'
|
||||||
on:
|
on:
|
||||||
- pull_request_target
|
- pull_request_target # zizmor: ignore[dangerous-triggers] no attacker inputs are used here
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
labeler:
|
labeler:
|
||||||
@@ -9,4 +11,4 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@v5
|
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||||
|
|||||||
@@ -4,12 +4,16 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [opened, synchronize, reopened, edited]
|
types: [opened, synchronize, reopened, edited]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
validate-pr-title:
|
validate-pr-title:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- name: PR Conventional Commit Validation
|
- name: PR Conventional Commit Validation
|
||||||
uses: ytanikin/PRConventionalCommits@1.3.0
|
uses: ytanikin/PRConventionalCommits@b628c5a234cc32513014b7bfdd1e47b532124d98 # 1.3.0
|
||||||
with:
|
with:
|
||||||
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
|
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
|
||||||
add_label: 'false'
|
add_label: 'false'
|
||||||
|
|||||||
45
.github/workflows/prepare-release.yml
vendored
45
.github/workflows/prepare-release.yml
vendored
@@ -21,35 +21,40 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}-root
|
group: ${{ github.workflow }}-${{ github.ref }}-root
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
bump_version:
|
bump_version:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
|
||||||
|
permissions: {} # No job-level permissions are needed because it uses the app-token
|
||||||
steps:
|
steps:
|
||||||
- name: Generate a token
|
- name: Generate a token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate-token.outputs.token }}
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
|
persist-credentials: true
|
||||||
|
|
||||||
- name: Install Poetry
|
- name: Install uv
|
||||||
run: pipx install poetry
|
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||||
|
|
||||||
- name: Bump version
|
- name: Bump version
|
||||||
run: misc/release/pump-version.sh -s "${{ inputs.serverBump }}" -m "${{ inputs.mobileBump }}"
|
env:
|
||||||
|
SERVER_BUMP: ${{ inputs.serverBump }}
|
||||||
|
MOBILE_BUMP: ${{ inputs.mobileBump }}
|
||||||
|
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
|
||||||
|
|
||||||
- name: Commit and tag
|
- name: Commit and tag
|
||||||
id: push-tag
|
id: push-tag
|
||||||
uses: EndBug/add-and-commit@v9
|
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||||
with:
|
with:
|
||||||
default_author: github_actions
|
default_author: github_actions
|
||||||
message: 'chore: version ${{ env.IMMICH_VERSION }}'
|
message: 'chore: version ${{ env.IMMICH_VERSION }}'
|
||||||
@@ -59,37 +64,47 @@ jobs:
|
|||||||
build_mobile:
|
build_mobile:
|
||||||
uses: ./.github/workflows/build-mobile.yml
|
uses: ./.github/workflows/build-mobile.yml
|
||||||
needs: bump_version
|
needs: bump_version
|
||||||
secrets: inherit
|
permissions:
|
||||||
|
contents: read
|
||||||
|
secrets:
|
||||||
|
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||||
|
ALIAS: ${{ secrets.ALIAS }}
|
||||||
|
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||||
|
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||||
with:
|
with:
|
||||||
ref: ${{ needs.bump_version.outputs.ref }}
|
ref: ${{ needs.bump_version.outputs.ref }}
|
||||||
|
|
||||||
prepare_release:
|
prepare_release:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: build_mobile
|
needs: build_mobile
|
||||||
|
permissions:
|
||||||
|
actions: read # To download the app artifact
|
||||||
|
# No content permissions are needed because it uses the app-token
|
||||||
steps:
|
steps:
|
||||||
- name: Generate a token
|
- name: Generate a token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
token: ${{ steps.generate-token.outputs.token }}
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Download APK
|
- name: Download APK
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
with:
|
with:
|
||||||
name: release-apk-signed
|
name: release-apk-signed
|
||||||
|
|
||||||
- name: Create draft release
|
- name: Create draft release
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2.2.2
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
tag_name: ${{ env.IMMICH_VERSION }}
|
tag_name: ${{ env.IMMICH_VERSION }}
|
||||||
|
token: ${{ steps.generate-token.outputs.token }}
|
||||||
generate_release_notes: true
|
generate_release_notes: true
|
||||||
body_path: misc/release/notes.tmpl
|
body_path: misc/release/notes.tmpl
|
||||||
files: |
|
files: |
|
||||||
|
|||||||
12
.github/workflows/preview-label.yaml
vendored
12
.github/workflows/preview-label.yaml
vendored
@@ -2,7 +2,9 @@ name: Preview label
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
types: [labeled]
|
types: [labeled, closed]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
comment-status:
|
comment-status:
|
||||||
@@ -11,10 +13,10 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- uses: mshick/add-pr-comment@v2
|
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||||
with:
|
with:
|
||||||
message-id: "preview-status"
|
message-id: 'preview-status'
|
||||||
message: "Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/"
|
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
|
||||||
|
|
||||||
remove-label:
|
remove-label:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -22,7 +24,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/github-script@v7
|
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
github.rest.issues.removeLabel({
|
github.rest.issues.removeLabel({
|
||||||
|
|||||||
12
.github/workflows/sdk.yml
vendored
12
.github/workflows/sdk.yml
vendored
@@ -4,20 +4,24 @@ on:
|
|||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
|
||||||
permissions:
|
permissions: {}
|
||||||
packages: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish:
|
publish:
|
||||||
name: Publish `@immich/sdk`
|
name: Publish `@immich/sdk`
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./open-api/typescript-sdk
|
working-directory: ./open-api/typescript-sdk
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
# Setup .npmrc file to publish to npm
|
# Setup .npmrc file to publish to npm
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||||
registry-url: 'https://registry.npmjs.org'
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
|||||||
80
.github/workflows/static_analysis.yml
vendored
80
.github/workflows/static_analysis.yml
vendored
@@ -9,16 +9,22 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
mobile:
|
mobile:
|
||||||
@@ -33,15 +39,17 @@ jobs:
|
|||||||
name: Run Dart Code Analysis
|
name: Run Dart Code Analysis
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Flutter SDK
|
- name: Setup Flutter SDK
|
||||||
uses: subosito/flutter-action@v2
|
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||||
with:
|
with:
|
||||||
channel: 'stable'
|
channel: 'stable'
|
||||||
flutter-version-file: ./mobile/pubspec.yaml
|
flutter-version-file: ./mobile/pubspec.yaml
|
||||||
@@ -50,6 +58,36 @@ jobs:
|
|||||||
run: dart pub get
|
run: dart pub get
|
||||||
working-directory: ./mobile
|
working-directory: ./mobile
|
||||||
|
|
||||||
|
- name: Generate translation file
|
||||||
|
run: make translation
|
||||||
|
working-directory: ./mobile
|
||||||
|
|
||||||
|
- name: Run Build Runner
|
||||||
|
run: make build
|
||||||
|
working-directory: ./mobile
|
||||||
|
|
||||||
|
- name: Generate platform API
|
||||||
|
run: make pigeon
|
||||||
|
working-directory: ./mobile
|
||||||
|
|
||||||
|
- name: Find file changes
|
||||||
|
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||||
|
id: verify-changed-files
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
mobile/**/*.g.dart
|
||||||
|
mobile/**/*.gr.dart
|
||||||
|
mobile/**/*.drift.dart
|
||||||
|
|
||||||
|
- name: Verify files have not changed
|
||||||
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||||
|
run: |
|
||||||
|
echo "ERROR: Generated files not up to date! Run make_build inside the mobile directory"
|
||||||
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
|
exit 1
|
||||||
|
|
||||||
- name: Run dart analyze
|
- name: Run dart analyze
|
||||||
run: dart analyze --fatal-infos
|
run: dart analyze --fatal-infos
|
||||||
working-directory: ./mobile
|
working-directory: ./mobile
|
||||||
@@ -62,7 +100,29 @@ jobs:
|
|||||||
run: dart run custom_lint
|
run: dart run custom_lint
|
||||||
working-directory: ./mobile
|
working-directory: ./mobile
|
||||||
|
|
||||||
# Enable after riverpod generator migration is completed
|
zizmor:
|
||||||
# - name: Run dart custom lint
|
name: zizmor
|
||||||
# run: dart run custom_lint
|
runs-on: ubuntu-latest
|
||||||
# working-directory: ./mobile
|
permissions:
|
||||||
|
security-events: write
|
||||||
|
contents: read
|
||||||
|
actions: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Install the latest version of uv
|
||||||
|
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||||
|
|
||||||
|
- name: Run zizmor 🌈
|
||||||
|
run: uvx zizmor --format=sarif . > results.sarif
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Upload SARIF file
|
||||||
|
uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
|
||||||
|
with:
|
||||||
|
sarif_file: results.sarif
|
||||||
|
category: zizmor
|
||||||
|
|||||||
311
.github/workflows/test.yml
vendored
311
.github/workflows/test.yml
vendored
@@ -9,10 +9,15 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-job:
|
pre-job:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
outputs:
|
outputs:
|
||||||
|
should_run_i18n: ${{ steps.found_paths.outputs.i18n == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
@@ -21,13 +26,19 @@ jobs:
|
|||||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_e2e_web: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
should_run_e2e_server_cli: ${{ steps.found_paths.outputs.e2e == 'true' || steps.found_paths.outputs.server == 'true' || steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||||
|
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- id: found_paths
|
- id: found_paths
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
with:
|
with:
|
||||||
filters: |
|
filters: |
|
||||||
|
i18n:
|
||||||
|
- 'i18n/**'
|
||||||
web:
|
web:
|
||||||
- 'web/**'
|
- 'web/**'
|
||||||
- 'i18n/**'
|
- 'i18n/**'
|
||||||
@@ -45,6 +56,8 @@ jobs:
|
|||||||
- 'machine-learning/**'
|
- 'machine-learning/**'
|
||||||
workflow:
|
workflow:
|
||||||
- '.github/workflows/test.yml'
|
- '.github/workflows/test.yml'
|
||||||
|
.github:
|
||||||
|
- '.github/**'
|
||||||
|
|
||||||
- name: Check if we should force jobs to run
|
- name: Check if we should force jobs to run
|
||||||
id: should_force
|
id: should_force
|
||||||
@@ -55,16 +68,20 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./server
|
working-directory: ./server
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './server/.nvmrc'
|
node-version-file: './server/.nvmrc'
|
||||||
|
|
||||||
@@ -92,16 +109,20 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./cli
|
working-directory: ./cli
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './cli/.nvmrc'
|
node-version-file: './cli/.nvmrc'
|
||||||
|
|
||||||
@@ -133,16 +154,20 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_cli == 'true' }}
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./cli
|
working-directory: ./cli
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './cli/.nvmrc'
|
node-version-file: './cli/.nvmrc'
|
||||||
|
|
||||||
@@ -162,21 +187,25 @@ jobs:
|
|||||||
run: npm run test:cov
|
run: npm run test:cov
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
web-unit-tests:
|
web-lint:
|
||||||
name: Test & Lint Web
|
name: Lint Web
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: mich
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./web
|
working-directory: ./web
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './web/.nvmrc'
|
node-version-file: './web/.nvmrc'
|
||||||
|
|
||||||
@@ -188,7 +217,7 @@ jobs:
|
|||||||
run: npm ci
|
run: npm ci
|
||||||
|
|
||||||
- name: Run linter
|
- name: Run linter
|
||||||
run: npm run lint
|
run: npm run lint:p
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
- name: Run formatter
|
- name: Run formatter
|
||||||
@@ -199,6 +228,35 @@ jobs:
|
|||||||
run: npm run check:svelte
|
run: npm run check:svelte
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
|
web-unit-tests:
|
||||||
|
name: Test Web
|
||||||
|
needs: pre-job
|
||||||
|
if: ${{ needs.pre-job.outputs.should_run_web == 'true' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./web
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
|
with:
|
||||||
|
node-version-file: './web/.nvmrc'
|
||||||
|
|
||||||
|
- name: Run setup typescript-sdk
|
||||||
|
run: npm ci && npm run build
|
||||||
|
working-directory: ./open-api/typescript-sdk
|
||||||
|
|
||||||
|
- name: Run npm install
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
- name: Run tsc
|
- name: Run tsc
|
||||||
run: npm run check:typescript
|
run: npm run check:typescript
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
@@ -207,21 +265,65 @@ jobs:
|
|||||||
run: npm run test:cov
|
run: npm run test:cov
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
|
i18n-tests:
|
||||||
|
name: Test i18n
|
||||||
|
needs: pre-job
|
||||||
|
if: ${{ needs.pre-job.outputs.should_run_i18n == 'true' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
|
with:
|
||||||
|
node-version-file: './web/.nvmrc'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm --prefix=web ci
|
||||||
|
|
||||||
|
- name: Format
|
||||||
|
run: npm --prefix=web run format:i18n
|
||||||
|
|
||||||
|
- name: Find file changes
|
||||||
|
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||||
|
id: verify-changed-files
|
||||||
|
with:
|
||||||
|
files: |
|
||||||
|
i18n/**
|
||||||
|
|
||||||
|
- name: Verify files have not changed
|
||||||
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||||
|
run: |
|
||||||
|
echo "ERROR: i18n files not up to date!"
|
||||||
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
|
exit 1
|
||||||
|
|
||||||
e2e-tests-lint:
|
e2e-tests-lint:
|
||||||
name: End-to-End Lint
|
name: End-to-End Lint
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './e2e/.nvmrc'
|
node-version-file: './e2e/.nvmrc'
|
||||||
|
|
||||||
@@ -251,16 +353,20 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./server
|
working-directory: ./server
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './server/.nvmrc'
|
node-version-file: './server/.nvmrc'
|
||||||
|
|
||||||
@@ -275,19 +381,25 @@ jobs:
|
|||||||
name: End-to-End Tests (Server & CLI)
|
name: End-to-End Tests (Server & CLI)
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||||
runs-on: mich
|
runs-on: ${{ matrix.runner }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: 'recursive'
|
submodules: 'recursive'
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './e2e/.nvmrc'
|
node-version-file: './e2e/.nvmrc'
|
||||||
|
|
||||||
@@ -317,19 +429,25 @@ jobs:
|
|||||||
name: End-to-End Tests (Web)
|
name: End-to-End Tests (Web)
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||||
runs-on: mich
|
runs-on: ${{ matrix.runner }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
persist-credentials: false
|
||||||
submodules: 'recursive'
|
submodules: 'recursive'
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './e2e/.nvmrc'
|
node-version-file: './e2e/.nvmrc'
|
||||||
|
|
||||||
@@ -354,18 +472,43 @@ jobs:
|
|||||||
run: npx playwright test
|
run: npx playwright test
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
|
success-check-e2e:
|
||||||
|
name: End-to-End Tests Success
|
||||||
|
needs: [e2e-tests-server-cli, e2e-tests-web]
|
||||||
|
permissions: {}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Any jobs failed?
|
||||||
|
if: ${{ contains(needs.*.result, 'failure') }}
|
||||||
|
run: exit 1
|
||||||
|
- name: All jobs passed or skipped
|
||||||
|
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||||
|
# zizmor: ignore[template-injection]
|
||||||
|
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||||
|
|
||||||
mobile-unit-tests:
|
mobile-unit-tests:
|
||||||
name: Unit Test Mobile
|
name: Unit Test Mobile
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_mobile == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Flutter SDK
|
- name: Setup Flutter SDK
|
||||||
uses: subosito/flutter-action@v2
|
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2.19.0
|
||||||
with:
|
with:
|
||||||
channel: 'stable'
|
channel: 'stable'
|
||||||
flutter-version-file: ./mobile/pubspec.yaml
|
flutter-version-file: ./mobile/pubspec.yaml
|
||||||
|
|
||||||
|
- name: Generate translation file
|
||||||
|
run: make translation
|
||||||
|
working-directory: ./mobile
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
working-directory: ./mobile
|
working-directory: ./mobile
|
||||||
run: flutter test -j 1
|
run: flutter test -j 1
|
||||||
@@ -375,55 +518,99 @@ jobs:
|
|||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./machine-learning
|
working-directory: ./machine-learning
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- name: Install poetry
|
|
||||||
run: pipx install poetry
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
with:
|
||||||
python-version: 3.11
|
persist-credentials: false
|
||||||
cache: 'poetry'
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||||
|
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||||
|
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||||
|
# with:
|
||||||
|
# python-version: 3.11
|
||||||
|
# cache: 'uv'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
poetry install --with dev --with cpu
|
uv sync --extra cpu
|
||||||
- name: Lint with ruff
|
- name: Lint with ruff
|
||||||
run: |
|
run: |
|
||||||
poetry run ruff check --output-format=github app export
|
uv run ruff check --output-format=github immich_ml
|
||||||
- name: Check black formatting
|
- name: Check black formatting
|
||||||
run: |
|
run: |
|
||||||
poetry run black --check app export
|
uv run black --check immich_ml
|
||||||
- name: Run mypy type checking
|
- name: Run mypy type checking
|
||||||
run: |
|
run: |
|
||||||
poetry run mypy --install-types --non-interactive --strict app/
|
uv run mypy --strict immich_ml/
|
||||||
- name: Run tests and coverage
|
- name: Run tests and coverage
|
||||||
run: |
|
run: |
|
||||||
poetry run pytest app --cov=app --cov-report term-missing
|
uv run pytest --cov=immich_ml --cov-report term-missing
|
||||||
|
|
||||||
|
github-files-formatting:
|
||||||
|
name: .github Files Formatting
|
||||||
|
needs: pre-job
|
||||||
|
if: ${{ needs.pre-job.outputs['should_run_.github'] == 'true' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: ./.github
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
|
with:
|
||||||
|
node-version-file: './.github/.nvmrc'
|
||||||
|
|
||||||
|
- name: Run npm install
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Run formatter
|
||||||
|
run: npm run format
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
shellcheck:
|
shellcheck:
|
||||||
name: ShellCheck
|
name: ShellCheck
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Run ShellCheck
|
- name: Run ShellCheck
|
||||||
uses: ludeeus/action-shellcheck@master
|
uses: ludeeus/action-shellcheck@master
|
||||||
with:
|
with:
|
||||||
ignore_paths: >-
|
ignore_paths: >-
|
||||||
**/open-api/**
|
**/open-api/**
|
||||||
**/openapi/**
|
**/openapi**
|
||||||
**/node_modules/**
|
**/node_modules/**
|
||||||
|
|
||||||
generated-api-up-to-date:
|
generated-api-up-to-date:
|
||||||
name: OpenAPI Clients
|
name: OpenAPI Clients
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './server/.nvmrc'
|
node-version-file: './server/.nvmrc'
|
||||||
|
|
||||||
@@ -437,7 +624,7 @@ jobs:
|
|||||||
run: make open-api
|
run: make open-api
|
||||||
|
|
||||||
- name: Find file changes
|
- name: Find file changes
|
||||||
uses: tj-actions/verify-changed-files@v20
|
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||||
id: verify-changed-files
|
id: verify-changed-files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
@@ -447,17 +634,21 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify files have not changed
|
- name: Verify files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||||
run: |
|
run: |
|
||||||
echo "ERROR: Generated files not up to date!"
|
echo "ERROR: Generated files not up to date!"
|
||||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
generated-typeorm-migrations-up-to-date:
|
sql-schema-up-to-date:
|
||||||
name: TypeORM Checks
|
name: SQL Schema Checks
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1
|
||||||
env:
|
env:
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
@@ -475,10 +666,12 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Setup Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||||
with:
|
with:
|
||||||
node-version-file: './server/.nvmrc'
|
node-version-file: './server/.nvmrc'
|
||||||
|
|
||||||
@@ -489,27 +682,29 @@ jobs:
|
|||||||
run: npm run build
|
run: npm run build
|
||||||
|
|
||||||
- name: Run existing migrations
|
- name: Run existing migrations
|
||||||
run: npm run typeorm:migrations:run
|
run: npm run migrations:run
|
||||||
|
|
||||||
- name: Test npm run schema:reset command works
|
- name: Test npm run schema:reset command works
|
||||||
run: npm run typeorm:schema:reset
|
run: npm run schema:reset
|
||||||
|
|
||||||
- name: Generate new migrations
|
- name: Generate new migrations
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
|
run: npm run migrations:generate src/TestMigration
|
||||||
|
|
||||||
- name: Find file changes
|
- name: Find file changes
|
||||||
uses: tj-actions/verify-changed-files@v20
|
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||||
id: verify-changed-files
|
id: verify-changed-files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
server/src/migrations/
|
server/src
|
||||||
- name: Verify migration files have not changed
|
- name: Verify migration files have not changed
|
||||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||||
run: |
|
run: |
|
||||||
echo "ERROR: Generated migration files not up to date!"
|
echo "ERROR: Generated migration files not up to date!"
|
||||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
cat ./src/migrations/*-TestMigration.ts
|
cat ./src/*-TestMigration.ts
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
- name: Run SQL generation
|
- name: Run SQL generation
|
||||||
@@ -518,7 +713,7 @@ jobs:
|
|||||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||||
|
|
||||||
- name: Find file changes
|
- name: Find file changes
|
||||||
uses: tj-actions/verify-changed-files@v20
|
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||||
id: verify-changed-sql-files
|
id: verify-changed-sql-files
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
@@ -526,9 +721,11 @@ jobs:
|
|||||||
|
|
||||||
- name: Verify SQL files have not changed
|
- name: Verify SQL files have not changed
|
||||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||||
|
env:
|
||||||
|
CHANGED_FILES: ${{ steps.verify-changed-sql-files.outputs.changed_files }}
|
||||||
run: |
|
run: |
|
||||||
echo "ERROR: Generated SQL files not up to date!"
|
echo "ERROR: Generated SQL files not up to date!"
|
||||||
echo "Changed files: ${{ steps.verify-changed-sql-files.outputs.changed_files }}"
|
echo "Changed files: ${CHANGED_FILES}"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
||||||
# mobile-integration-tests:
|
# mobile-integration-tests:
|
||||||
|
|||||||
61
.github/workflows/weblate-lock.yml
vendored
Normal file
61
.github/workflows/weblate-lock.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
name: Weblate checks
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [main]
|
||||||
|
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pre-job:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
outputs:
|
||||||
|
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
- id: found_paths
|
||||||
|
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||||
|
with:
|
||||||
|
filters: |
|
||||||
|
i18n:
|
||||||
|
- 'i18n/!(en)**\.json'
|
||||||
|
|
||||||
|
enforce-lock:
|
||||||
|
name: Check Weblate Lock
|
||||||
|
needs: [pre-job]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions: {}
|
||||||
|
if: ${{ needs.pre-job.outputs.should_run == 'true' }}
|
||||||
|
steps:
|
||||||
|
- name: Check weblate lock
|
||||||
|
run: |
|
||||||
|
if [[ "false" = $(curl https://hosted.weblate.org/api/components/immich/immich/lock/ | jq .locked) ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
- name: Find Pull Request
|
||||||
|
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1.9.0
|
||||||
|
id: find-pr
|
||||||
|
with:
|
||||||
|
branch: chore/translations
|
||||||
|
- name: Fail if existing weblate PR
|
||||||
|
if: ${{ steps.find-pr.outputs.number }}
|
||||||
|
run: exit 1
|
||||||
|
success-check-lock:
|
||||||
|
name: Weblate Lock Check Success
|
||||||
|
needs: [enforce-lock]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions: {}
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Any jobs failed?
|
||||||
|
if: ${{ contains(needs.*.result, 'failure') }}
|
||||||
|
run: exit 1
|
||||||
|
- name: All jobs passed or skipped
|
||||||
|
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||||
|
# zizmor: ignore[template-injection]
|
||||||
|
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
.vscode/*
|
.vscode/*
|
||||||
!.vscode/launch.json
|
!.vscode/launch.json
|
||||||
|
!.vscode/extensions.json
|
||||||
.idea
|
.idea
|
||||||
|
|
||||||
docker/upload
|
docker/upload
|
||||||
|
|||||||
10
.vscode/extensions.json
vendored
Normal file
10
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"recommendations": [
|
||||||
|
"esbenp.prettier-vscode",
|
||||||
|
"svelte.svelte-vscode",
|
||||||
|
"dbaeumer.vscode-eslint",
|
||||||
|
"dart-code.flutter",
|
||||||
|
"dart-code.dart-code",
|
||||||
|
"dcmdev.dcm-vscode-extension"
|
||||||
|
]
|
||||||
|
}
|
||||||
77
.vscode/settings.json
vendored
77
.vscode/settings.json
vendored
@@ -1,44 +1,63 @@
|
|||||||
{
|
{
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"[javascript]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
|
||||||
"editor.tabSize": 2,
|
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[typescript]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
|
||||||
"editor.tabSize": 2,
|
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[css]": {
|
"[css]": {
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
"editor.tabSize": 2,
|
"editor.formatOnSave": true,
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[svelte]": {
|
|
||||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
|
||||||
"editor.tabSize": 2
|
"editor.tabSize": 2
|
||||||
},
|
},
|
||||||
"svelte.enable-ts-plugin": true,
|
|
||||||
"eslint.validate": [
|
|
||||||
"javascript",
|
|
||||||
"svelte"
|
|
||||||
],
|
|
||||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
|
||||||
"[dart]": {
|
"[dart]": {
|
||||||
|
"editor.defaultFormatter": "Dart-Code.dart-code",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"editor.selectionHighlight": false,
|
"editor.selectionHighlight": false,
|
||||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||||
"editor.suggestSelection": "first",
|
"editor.suggestSelection": "first",
|
||||||
"editor.tabCompletion": "onlySnippets",
|
"editor.tabCompletion": "onlySnippets",
|
||||||
"editor.wordBasedSuggestions": "off",
|
"editor.wordBasedSuggestions": "off"
|
||||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
|
||||||
},
|
},
|
||||||
"cSpell.words": [
|
"[javascript]": {
|
||||||
"immich"
|
"editor.codeActionsOnSave": {
|
||||||
],
|
"source.organizeImports": "explicit",
|
||||||
|
"source.removeUnusedImports": "explicit"
|
||||||
|
},
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[json]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[jsonc]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[svelte]": {
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": "explicit",
|
||||||
|
"source.removeUnusedImports": "explicit"
|
||||||
|
},
|
||||||
|
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[typescript]": {
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": "explicit",
|
||||||
|
"source.removeUnusedImports": "explicit"
|
||||||
|
},
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"cSpell.words": ["immich"],
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"eslint.validate": ["javascript", "svelte"],
|
||||||
"explorer.fileNesting.enabled": true,
|
"explorer.fileNesting.enabled": true,
|
||||||
"explorer.fileNesting.patterns": {
|
"explorer.fileNesting.patterns": {
|
||||||
|
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||||
}
|
},
|
||||||
}
|
"svelte.enable-ts-plugin": true,
|
||||||
|
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||||
|
}
|
||||||
|
|||||||
13
Makefile
13
Makefile
@@ -17,6 +17,9 @@ e2e:
|
|||||||
prod:
|
prod:
|
||||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||||
|
|
||||||
|
prod-down:
|
||||||
|
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
|
||||||
|
|
||||||
prod-scale:
|
prod-scale:
|
||||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||||
|
|
||||||
@@ -39,7 +42,7 @@ attach-server:
|
|||||||
renovate:
|
renovate:
|
||||||
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
|
LOG_LEVEL=debug npx renovate --platform=local --repository-cache=reset
|
||||||
|
|
||||||
MODULES = e2e server web cli sdk docs
|
MODULES = e2e server web cli sdk docs .github
|
||||||
|
|
||||||
audit-%:
|
audit-%:
|
||||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||||
@@ -77,14 +80,14 @@ test-medium:
|
|||||||
test-medium-dev:
|
test-medium-dev:
|
||||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||||
|
|
||||||
build-all: $(foreach M,$(filter-out e2e,$(MODULES)),build-$M) ;
|
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
|
||||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||||
check-all: $(foreach M,$(filter-out sdk cli docs,$(MODULES)),check-$M) ;
|
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
|
||||||
lint-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),lint-$M) ;
|
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
|
||||||
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
|
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
|
||||||
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
||||||
hygiene-all: lint-all format-all check-all sql audit-all;
|
hygiene-all: lint-all format-all check-all sql audit-all;
|
||||||
test-all: $(foreach M,$(filter-out sdk docs,$(MODULES)),test-$M) ;
|
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||||
|
|||||||
12
README.md
12
README.md
@@ -1,11 +1,11 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<br/>
|
<br/>
|
||||||
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
||||||
<a href="https://discord.immich.app">
|
<a href="https://discord.immich.app">
|
||||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
||||||
</a>
|
</a>
|
||||||
<br/>
|
<br/>
|
||||||
<br/>
|
<br/>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
@@ -61,9 +61,7 @@
|
|||||||
|
|
||||||
## Demo
|
## Demo
|
||||||
|
|
||||||
Access the demo [here](https://demo.immich.app). The demo is running on a Free-tier Oracle VM in Amsterdam with a 2.4Ghz quad-core ARM64 CPU and 24GB RAM.
|
Access the demo [here](https://demo.immich.app). For the mobile app, you can use `https://demo.immich.app` for the `Server Endpoint URL`.
|
||||||
|
|
||||||
For the mobile app, you can use `https://demo.immich.app/api` for the `Server Endpoint URL`
|
|
||||||
|
|
||||||
### Login credentials
|
### Login credentials
|
||||||
|
|
||||||
@@ -104,7 +102,7 @@ For the mobile app, you can use `https://demo.immich.app/api` for the `Server En
|
|||||||
| Read-only gallery | Yes | Yes |
|
| Read-only gallery | Yes | Yes |
|
||||||
| Stacked Photos | Yes | Yes |
|
| Stacked Photos | Yes | Yes |
|
||||||
| Tags | No | Yes |
|
| Tags | No | Yes |
|
||||||
| Folder View | No | Yes |
|
| Folder View | Yes | Yes |
|
||||||
|
|
||||||
## Translations
|
## Translations
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
22.14.0
|
22.16.0
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM node:22.13.1-alpine3.20@sha256:c52e20859a92b3eccbd3a36c5e1a90adc20617d8d421d65e8a622e87b5dac963 AS core
|
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
|
||||||
|
|
||||||
WORKDIR /usr/src/open-api/typescript-sdk
|
WORKDIR /usr/src/open-api/typescript-sdk
|
||||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||||
|
|||||||
@@ -1,39 +1,29 @@
|
|||||||
import { FlatCompat } from '@eslint/eslintrc';
|
|
||||||
import js from '@eslint/js';
|
import js from '@eslint/js';
|
||||||
import typescriptEslint from '@typescript-eslint/eslint-plugin';
|
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
|
||||||
import tsParser from '@typescript-eslint/parser';
|
import eslintPluginUnicorn from 'eslint-plugin-unicorn';
|
||||||
import globals from 'globals';
|
import globals from 'globals';
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import { fileURLToPath } from 'node:url';
|
import { fileURLToPath } from 'node:url';
|
||||||
|
import typescriptEslint from 'typescript-eslint';
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
const __dirname = path.dirname(__filename);
|
const __dirname = path.dirname(__filename);
|
||||||
const compat = new FlatCompat({
|
|
||||||
baseDirectory: __dirname,
|
|
||||||
recommendedConfig: js.configs.recommended,
|
|
||||||
allConfig: js.configs.all,
|
|
||||||
});
|
|
||||||
|
|
||||||
export default [
|
export default typescriptEslint.config([
|
||||||
|
eslintPluginUnicorn.configs.recommended,
|
||||||
|
eslintPluginPrettierRecommended,
|
||||||
|
js.configs.recommended,
|
||||||
|
typescriptEslint.configs.recommended,
|
||||||
{
|
{
|
||||||
ignores: ['eslint.config.mjs', 'dist'],
|
ignores: ['eslint.config.mjs', 'dist'],
|
||||||
},
|
},
|
||||||
...compat.extends(
|
|
||||||
'plugin:@typescript-eslint/recommended',
|
|
||||||
'plugin:prettier/recommended',
|
|
||||||
'plugin:unicorn/recommended',
|
|
||||||
),
|
|
||||||
{
|
{
|
||||||
plugins: {
|
|
||||||
'@typescript-eslint': typescriptEslint,
|
|
||||||
},
|
|
||||||
|
|
||||||
languageOptions: {
|
languageOptions: {
|
||||||
globals: {
|
globals: {
|
||||||
...globals.node,
|
...globals.node,
|
||||||
},
|
},
|
||||||
|
|
||||||
parser: tsParser,
|
parser: typescriptEslint.parser,
|
||||||
ecmaVersion: 5,
|
ecmaVersion: 5,
|
||||||
sourceType: 'module',
|
sourceType: 'module',
|
||||||
|
|
||||||
@@ -58,4 +48,4 @@ export default [
|
|||||||
'object-shorthand': ['error', 'always'],
|
'object-shorthand': ['error', 'always'],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
];
|
]);
|
||||||
|
|||||||
2242
cli/package-lock.json
generated
2242
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@immich/cli",
|
"name": "@immich/cli",
|
||||||
"version": "2.2.51",
|
"version": "2.2.68",
|
||||||
"description": "Command Line Interface (CLI) for Immich",
|
"description": "Command Line Interface (CLI) for Immich",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"exports": "./dist/index.js",
|
"exports": "./dist/index.js",
|
||||||
@@ -19,10 +19,9 @@
|
|||||||
"@types/byte-size": "^8.1.0",
|
"@types/byte-size": "^8.1.0",
|
||||||
"@types/cli-progress": "^3.11.0",
|
"@types/cli-progress": "^3.11.0",
|
||||||
"@types/lodash-es": "^4.17.12",
|
"@types/lodash-es": "^4.17.12",
|
||||||
|
"@types/micromatch": "^4.0.9",
|
||||||
"@types/mock-fs": "^4.13.1",
|
"@types/mock-fs": "^4.13.1",
|
||||||
"@types/node": "^22.13.4",
|
"@types/node": "^22.15.21",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.15.0",
|
|
||||||
"@typescript-eslint/parser": "^8.15.0",
|
|
||||||
"@vitest/coverage-v8": "^3.0.0",
|
"@vitest/coverage-v8": "^3.0.0",
|
||||||
"byte-size": "^9.0.0",
|
"byte-size": "^9.0.0",
|
||||||
"cli-progress": "^3.12.0",
|
"cli-progress": "^3.12.0",
|
||||||
@@ -30,12 +29,13 @@
|
|||||||
"eslint": "^9.14.0",
|
"eslint": "^9.14.0",
|
||||||
"eslint-config-prettier": "^10.0.0",
|
"eslint-config-prettier": "^10.0.0",
|
||||||
"eslint-plugin-prettier": "^5.1.3",
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
"eslint-plugin-unicorn": "^56.0.1",
|
"eslint-plugin-unicorn": "^57.0.0",
|
||||||
"globals": "^15.9.0",
|
"globals": "^16.0.0",
|
||||||
"mock-fs": "^5.2.0",
|
"mock-fs": "^5.2.0",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"prettier-plugin-organize-imports": "^4.0.0",
|
"prettier-plugin-organize-imports": "^4.0.0",
|
||||||
"typescript": "^5.3.3",
|
"typescript": "^5.3.3",
|
||||||
|
"typescript-eslint": "^8.28.0",
|
||||||
"vite": "^6.0.0",
|
"vite": "^6.0.0",
|
||||||
"vite-tsconfig-paths": "^5.0.0",
|
"vite-tsconfig-paths": "^5.0.0",
|
||||||
"vitest": "^3.0.0",
|
"vitest": "^3.0.0",
|
||||||
@@ -62,11 +62,13 @@
|
|||||||
"node": ">=20.0.0"
|
"node": ">=20.0.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"chokidar": "^4.0.3",
|
||||||
"fast-glob": "^3.3.2",
|
"fast-glob": "^3.3.2",
|
||||||
"fastq": "^1.17.1",
|
"fastq": "^1.17.1",
|
||||||
"lodash-es": "^4.17.21"
|
"lodash-es": "^4.17.21",
|
||||||
|
"micromatch": "^4.0.8"
|
||||||
},
|
},
|
||||||
"volta": {
|
"volta": {
|
||||||
"node": "22.14.0"
|
"node": "22.16.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
import * as os from 'node:os';
|
import * as os from 'node:os';
|
||||||
import * as path from 'node:path';
|
import * as path from 'node:path';
|
||||||
import { describe, expect, it, vi } from 'vitest';
|
import { setTimeout as sleep } from 'node:timers/promises';
|
||||||
|
import { describe, expect, it, MockedFunction, vi } from 'vitest';
|
||||||
|
|
||||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
import { Action, checkBulkUpload, defaults, getSupportedMediaTypes, Reason } from '@immich/sdk';
|
||||||
import createFetchMock from 'vitest-fetch-mock';
|
import createFetchMock from 'vitest-fetch-mock';
|
||||||
|
|
||||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
import { checkForDuplicates, getAlbumName, startWatch, uploadFiles, UploadOptionsDto } from 'src/commands/asset';
|
||||||
|
|
||||||
vi.mock('@immich/sdk');
|
vi.mock('@immich/sdk');
|
||||||
|
|
||||||
@@ -199,3 +200,112 @@ describe('checkForDuplicates', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('startWatch', () => {
|
||||||
|
let testFolder: string;
|
||||||
|
let checkBulkUploadMocked: MockedFunction<typeof checkBulkUpload>;
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
|
||||||
|
vi.mocked(getSupportedMediaTypes).mockResolvedValue({
|
||||||
|
image: ['.jpg'],
|
||||||
|
sidecar: ['.xmp'],
|
||||||
|
video: ['.mp4'],
|
||||||
|
});
|
||||||
|
|
||||||
|
testFolder = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'test-startWatch-'));
|
||||||
|
checkBulkUploadMocked = vi.mocked(checkBulkUpload);
|
||||||
|
checkBulkUploadMocked.mockResolvedValue({
|
||||||
|
results: [],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should start watching a directory and upload new files', async () => {
|
||||||
|
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||||
|
|
||||||
|
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||||
|
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||||
|
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||||
|
|
||||||
|
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||||
|
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: [
|
||||||
|
expect.objectContaining({
|
||||||
|
id: testFilePath,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filter out unsupported files', async () => {
|
||||||
|
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||||
|
const unsupportedFilePath = path.join(testFolder, 'test.txt');
|
||||||
|
|
||||||
|
await startWatch([testFolder], { concurrency: 1 }, { batchSize: 1, debounceTimeMs: 10 });
|
||||||
|
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||||
|
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||||
|
await fs.promises.writeFile(unsupportedFilePath, 'testtxt');
|
||||||
|
|
||||||
|
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||||
|
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: testFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: unsupportedFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should filger out ignored patterns', async () => {
|
||||||
|
const testFilePath = path.join(testFolder, 'test.jpg');
|
||||||
|
const ignoredPattern = 'ignored';
|
||||||
|
const ignoredFolder = path.join(testFolder, ignoredPattern);
|
||||||
|
await fs.promises.mkdir(ignoredFolder, { recursive: true });
|
||||||
|
const ignoredFilePath = path.join(ignoredFolder, 'ignored.jpg');
|
||||||
|
|
||||||
|
await startWatch([testFolder], { concurrency: 1, ignore: ignoredPattern }, { batchSize: 1, debounceTimeMs: 10 });
|
||||||
|
await sleep(100); // to debounce the watcher from considering the test file as a existing file
|
||||||
|
await fs.promises.writeFile(testFilePath, 'testjpg');
|
||||||
|
await fs.promises.writeFile(ignoredFilePath, 'ignoredjpg');
|
||||||
|
|
||||||
|
await vi.waitUntil(() => checkBulkUploadMocked.mock.calls.length > 0, 3000);
|
||||||
|
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: testFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(checkBulkUpload).not.toHaveBeenCalledWith({
|
||||||
|
assetBulkUploadCheckDto: {
|
||||||
|
assets: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
id: ignoredFilePath,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await fs.promises.rm(testFolder, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -12,13 +12,18 @@ import {
|
|||||||
getSupportedMediaTypes,
|
getSupportedMediaTypes,
|
||||||
} from '@immich/sdk';
|
} from '@immich/sdk';
|
||||||
import byteSize from 'byte-size';
|
import byteSize from 'byte-size';
|
||||||
|
import { Matcher, watch as watchFs } from 'chokidar';
|
||||||
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
import { MultiBar, Presets, SingleBar } from 'cli-progress';
|
||||||
import { chunk } from 'lodash-es';
|
import { chunk } from 'lodash-es';
|
||||||
|
import micromatch from 'micromatch';
|
||||||
import { Stats, createReadStream } from 'node:fs';
|
import { Stats, createReadStream } from 'node:fs';
|
||||||
import { stat, unlink } from 'node:fs/promises';
|
import { stat, unlink } from 'node:fs/promises';
|
||||||
import path, { basename } from 'node:path';
|
import path, { basename } from 'node:path';
|
||||||
import { Queue } from 'src/queue';
|
import { Queue } from 'src/queue';
|
||||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
import { BaseOptions, Batcher, authenticate, crawl, sha1 } from 'src/utils';
|
||||||
|
|
||||||
|
const UPLOAD_WATCH_BATCH_SIZE = 100;
|
||||||
|
const UPLOAD_WATCH_DEBOUNCE_TIME_MS = 10_000;
|
||||||
|
|
||||||
const s = (count: number) => (count === 1 ? '' : 's');
|
const s = (count: number) => (count === 1 ? '' : 's');
|
||||||
|
|
||||||
@@ -36,6 +41,8 @@ export interface UploadOptionsDto {
|
|||||||
albumName?: string;
|
albumName?: string;
|
||||||
includeHidden?: boolean;
|
includeHidden?: boolean;
|
||||||
concurrency: number;
|
concurrency: number;
|
||||||
|
progress?: boolean;
|
||||||
|
watch?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
class UploadFile extends File {
|
class UploadFile extends File {
|
||||||
@@ -55,19 +62,94 @@ class UploadFile extends File {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
|
||||||
|
const { newFiles, duplicates } = await checkForDuplicates(files, options);
|
||||||
|
const newAssets = await uploadFiles(newFiles, options);
|
||||||
|
await updateAlbums([...newAssets, ...duplicates], options);
|
||||||
|
await deleteFiles(newFiles, options);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const startWatch = async (
|
||||||
|
paths: string[],
|
||||||
|
options: UploadOptionsDto,
|
||||||
|
{
|
||||||
|
batchSize = UPLOAD_WATCH_BATCH_SIZE,
|
||||||
|
debounceTimeMs = UPLOAD_WATCH_DEBOUNCE_TIME_MS,
|
||||||
|
}: { batchSize?: number; debounceTimeMs?: number } = {},
|
||||||
|
) => {
|
||||||
|
const watcherIgnored: Matcher[] = [];
|
||||||
|
const { image, video } = await getSupportedMediaTypes();
|
||||||
|
const extensions = new Set([...image, ...video]);
|
||||||
|
|
||||||
|
if (options.ignore) {
|
||||||
|
watcherIgnored.push((path) => micromatch.contains(path, `**/${options.ignore}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathsBatcher = new Batcher<string>({
|
||||||
|
batchSize,
|
||||||
|
debounceTimeMs,
|
||||||
|
onBatch: async (paths: string[]) => {
|
||||||
|
const uniquePaths = [...new Set(paths)];
|
||||||
|
await uploadBatch(uniquePaths, options);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const onFile = async (path: string, stats?: Stats) => {
|
||||||
|
if (stats?.isDirectory()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const ext = '.' + path.split('.').pop()?.toLowerCase();
|
||||||
|
if (!ext || !extensions.has(ext)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.progress) {
|
||||||
|
// logging when progress is disabled as it can cause issues with the progress bar rendering
|
||||||
|
console.log(`Change detected: ${path}`);
|
||||||
|
}
|
||||||
|
pathsBatcher.add(path);
|
||||||
|
};
|
||||||
|
const fsWatcher = watchFs(paths, {
|
||||||
|
ignoreInitial: true,
|
||||||
|
ignored: watcherIgnored,
|
||||||
|
alwaysStat: true,
|
||||||
|
awaitWriteFinish: true,
|
||||||
|
depth: options.recursive ? undefined : 1,
|
||||||
|
persistent: true,
|
||||||
|
})
|
||||||
|
.on('add', onFile)
|
||||||
|
.on('change', onFile)
|
||||||
|
.on('error', (error) => console.error(`Watcher error: ${error}`));
|
||||||
|
|
||||||
|
process.on('SIGINT', async () => {
|
||||||
|
console.log('Exiting...');
|
||||||
|
await fsWatcher.close();
|
||||||
|
process.exit();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||||
await authenticate(baseOptions);
|
await authenticate(baseOptions);
|
||||||
|
|
||||||
const scanFiles = await scan(paths, options);
|
const scanFiles = await scan(paths, options);
|
||||||
|
|
||||||
if (scanFiles.length === 0) {
|
if (scanFiles.length === 0) {
|
||||||
console.log('No files found, exiting');
|
if (options.watch) {
|
||||||
return;
|
console.log('No files found initially.');
|
||||||
|
} else {
|
||||||
|
console.log('No files found, exiting');
|
||||||
|
return;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
if (options.watch) {
|
||||||
const newAssets = await uploadFiles(newFiles, options);
|
console.log('Watching for changes...');
|
||||||
await updateAlbums([...newAssets, ...duplicates], options);
|
await startWatch(paths, options);
|
||||||
await deleteFiles(newFiles, options);
|
// watcher does not handle the initial scan
|
||||||
|
// as the scan() is a more efficient quick start with batched results
|
||||||
|
}
|
||||||
|
|
||||||
|
await uploadBatch(scanFiles, options);
|
||||||
};
|
};
|
||||||
|
|
||||||
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||||
@@ -85,19 +167,25 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
|||||||
return files;
|
return files;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
export const checkForDuplicates = async (files: string[], { concurrency, skipHash, progress }: UploadOptionsDto) => {
|
||||||
if (skipHash) {
|
if (skipHash) {
|
||||||
console.log('Skipping hash check, assuming all files are new');
|
console.log('Skipping hash check, assuming all files are new');
|
||||||
return { newFiles: files, duplicates: [] };
|
return { newFiles: files, duplicates: [] };
|
||||||
}
|
}
|
||||||
|
|
||||||
const multiBar = new MultiBar(
|
let multiBar: MultiBar | undefined;
|
||||||
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
|
||||||
Presets.shades_classic,
|
|
||||||
);
|
|
||||||
|
|
||||||
const hashProgressBar = multiBar.create(files.length, 0, { message: 'Hashing files ' });
|
if (progress) {
|
||||||
const checkProgressBar = multiBar.create(files.length, 0, { message: 'Checking for duplicates' });
|
multiBar = new MultiBar(
|
||||||
|
{ format: '{message} | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Received ${files.length} files, hashing...`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const hashProgressBar = multiBar?.create(files.length, 0, { message: 'Hashing files ' });
|
||||||
|
const checkProgressBar = multiBar?.create(files.length, 0, { message: 'Checking for duplicates' });
|
||||||
|
|
||||||
const newFiles: string[] = [];
|
const newFiles: string[] = [];
|
||||||
const duplicates: Asset[] = [];
|
const duplicates: Asset[] = [];
|
||||||
@@ -117,7 +205,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
checkProgressBar.increment(assets.length);
|
checkProgressBar?.increment(assets.length);
|
||||||
},
|
},
|
||||||
{ concurrency, retry: 3 },
|
{ concurrency, retry: 3 },
|
||||||
);
|
);
|
||||||
@@ -137,7 +225,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
void checkBulkUploadQueue.push(batch);
|
void checkBulkUploadQueue.push(batch);
|
||||||
}
|
}
|
||||||
|
|
||||||
hashProgressBar.increment();
|
hashProgressBar?.increment();
|
||||||
return results;
|
return results;
|
||||||
},
|
},
|
||||||
{ concurrency, retry: 3 },
|
{ concurrency, retry: 3 },
|
||||||
@@ -155,7 +243,7 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
|
|
||||||
await checkBulkUploadQueue.drained();
|
await checkBulkUploadQueue.drained();
|
||||||
|
|
||||||
multiBar.stop();
|
multiBar?.stop();
|
||||||
|
|
||||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||||
|
|
||||||
@@ -171,7 +259,10 @@ export const checkForDuplicates = async (files: string[], { concurrency, skipHas
|
|||||||
return { newFiles, duplicates };
|
return { newFiles, duplicates };
|
||||||
};
|
};
|
||||||
|
|
||||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
export const uploadFiles = async (
|
||||||
|
files: string[],
|
||||||
|
{ dryRun, concurrency, progress }: UploadOptionsDto,
|
||||||
|
): Promise<Asset[]> => {
|
||||||
if (files.length === 0) {
|
if (files.length === 0) {
|
||||||
console.log('All assets were already uploaded, nothing to do.');
|
console.log('All assets were already uploaded, nothing to do.');
|
||||||
return [];
|
return [];
|
||||||
@@ -191,12 +282,20 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
|||||||
return files.map((filepath) => ({ id: '', filepath }));
|
return files.map((filepath) => ({ id: '', filepath }));
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadProgress = new SingleBar(
|
let uploadProgress: SingleBar | undefined;
|
||||||
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
|
||||||
Presets.shades_classic,
|
if (progress) {
|
||||||
);
|
uploadProgress = new SingleBar(
|
||||||
uploadProgress.start(totalSize, 0);
|
{
|
||||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
||||||
|
},
|
||||||
|
Presets.shades_classic,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Uploading ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||||
|
}
|
||||||
|
uploadProgress?.start(totalSize, 0);
|
||||||
|
uploadProgress?.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||||
|
|
||||||
let duplicateCount = 0;
|
let duplicateCount = 0;
|
||||||
let duplicateSize = 0;
|
let duplicateSize = 0;
|
||||||
@@ -222,7 +321,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
|||||||
successSize += stats.size ?? 0;
|
successSize += stats.size ?? 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
uploadProgress?.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
},
|
},
|
||||||
@@ -235,7 +334,7 @@ export const uploadFiles = async (files: string[], { dryRun, concurrency }: Uplo
|
|||||||
|
|
||||||
await queue.drained();
|
await queue.drained();
|
||||||
|
|
||||||
uploadProgress.stop();
|
uploadProgress?.stop();
|
||||||
|
|
||||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||||
if (duplicateCount > 0) {
|
if (duplicateCount > 0) {
|
||||||
|
|||||||
@@ -69,6 +69,13 @@ program
|
|||||||
.default(4),
|
.default(4),
|
||||||
)
|
)
|
||||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||||
|
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
|
||||||
|
.addOption(
|
||||||
|
new Option('--watch', 'Watch for changes and upload automatically')
|
||||||
|
.env('IMMICH_WATCH_CHANGES')
|
||||||
|
.default(false)
|
||||||
|
.implies({ progress: false }),
|
||||||
|
)
|
||||||
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
.argument('[paths...]', 'One or more paths to assets to be uploaded')
|
||||||
.action((paths, options) => upload(paths, program.opts(), options));
|
.action((paths, options) => upload(paths, program.opts(), options));
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import mockfs from 'mock-fs';
|
import mockfs from 'mock-fs';
|
||||||
import { readFileSync } from 'node:fs';
|
import { readFileSync } from 'node:fs';
|
||||||
import { CrawlOptions, crawl } from 'src/utils';
|
import { Batcher, CrawlOptions, crawl } from 'src/utils';
|
||||||
|
import { Mock } from 'vitest';
|
||||||
|
|
||||||
interface Test {
|
interface Test {
|
||||||
test: string;
|
test: string;
|
||||||
@@ -303,3 +304,38 @@ describe('crawl', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('Batcher', () => {
|
||||||
|
let batcher: Batcher;
|
||||||
|
let onBatch: Mock;
|
||||||
|
beforeEach(() => {
|
||||||
|
onBatch = vi.fn();
|
||||||
|
batcher = new Batcher({ batchSize: 2, onBatch });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trigger onBatch() when a batch limit is reached', async () => {
|
||||||
|
batcher.add('a');
|
||||||
|
batcher.add('b');
|
||||||
|
batcher.add('c');
|
||||||
|
expect(onBatch).toHaveBeenCalledOnce();
|
||||||
|
expect(onBatch).toHaveBeenCalledWith(['a', 'b']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trigger onBatch() when flush() is called', async () => {
|
||||||
|
batcher.add('a');
|
||||||
|
batcher.flush();
|
||||||
|
expect(onBatch).toHaveBeenCalledOnce();
|
||||||
|
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should trigger onBatch() when debounce time reached', async () => {
|
||||||
|
vi.useFakeTimers();
|
||||||
|
batcher = new Batcher({ batchSize: 2, debounceTimeMs: 100, onBatch });
|
||||||
|
batcher.add('a');
|
||||||
|
expect(onBatch).not.toHaveBeenCalled();
|
||||||
|
vi.advanceTimersByTime(200);
|
||||||
|
expect(onBatch).toHaveBeenCalledOnce();
|
||||||
|
expect(onBatch).toHaveBeenCalledWith(['a']);
|
||||||
|
vi.useRealTimers();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@@ -172,3 +172,64 @@ export const sha1 = (filepath: string) => {
|
|||||||
rs.on('end', () => resolve(hash.digest('hex')));
|
rs.on('end', () => resolve(hash.digest('hex')));
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Batches items and calls onBatch to process them
|
||||||
|
* when the batch size is reached or the debounce time has passed.
|
||||||
|
*/
|
||||||
|
export class Batcher<T = unknown> {
|
||||||
|
private items: T[] = [];
|
||||||
|
private readonly batchSize: number;
|
||||||
|
private readonly debounceTimeMs?: number;
|
||||||
|
private readonly onBatch: (items: T[]) => void;
|
||||||
|
private debounceTimer?: NodeJS.Timeout;
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
batchSize,
|
||||||
|
debounceTimeMs,
|
||||||
|
onBatch,
|
||||||
|
}: {
|
||||||
|
batchSize: number;
|
||||||
|
debounceTimeMs?: number;
|
||||||
|
onBatch: (items: T[]) => Promise<void>;
|
||||||
|
}) {
|
||||||
|
this.batchSize = batchSize;
|
||||||
|
this.debounceTimeMs = debounceTimeMs;
|
||||||
|
this.onBatch = onBatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
private setDebounceTimer() {
|
||||||
|
if (this.debounceTimer) {
|
||||||
|
clearTimeout(this.debounceTimer);
|
||||||
|
}
|
||||||
|
if (this.debounceTimeMs) {
|
||||||
|
this.debounceTimer = setTimeout(() => this.flush(), this.debounceTimeMs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private clearDebounceTimer() {
|
||||||
|
if (this.debounceTimer) {
|
||||||
|
clearTimeout(this.debounceTimer);
|
||||||
|
this.debounceTimer = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
add(item: T) {
|
||||||
|
this.items.push(item);
|
||||||
|
this.setDebounceTimer();
|
||||||
|
if (this.items.length >= this.batchSize) {
|
||||||
|
this.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
flush() {
|
||||||
|
this.clearDebounceTimer();
|
||||||
|
if (this.items.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.onBatch(this.items);
|
||||||
|
|
||||||
|
this.items = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ name: immich-dev
|
|||||||
services:
|
services:
|
||||||
immich-server:
|
immich-server:
|
||||||
container_name: immich_server
|
container_name: immich_server
|
||||||
command: ['/usr/src/app/bin/immich-dev']
|
command: [ '/usr/src/app/bin/immich-dev' ]
|
||||||
image: immich-server-dev:latest
|
image: immich-server-dev:latest
|
||||||
# extends:
|
# extends:
|
||||||
# file: hwaccel.transcoding.yml
|
# file: hwaccel.transcoding.yml
|
||||||
@@ -25,7 +25,7 @@ services:
|
|||||||
context: ../
|
context: ../
|
||||||
dockerfile: server/Dockerfile
|
dockerfile: server/Dockerfile
|
||||||
target: dev
|
target: dev
|
||||||
restart: always
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- ../server:/usr/src/app
|
- ../server:/usr/src/app
|
||||||
- ../open-api:/usr/src/open-api
|
- ../open-api:/usr/src/open-api
|
||||||
@@ -48,7 +48,7 @@ services:
|
|||||||
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
|
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
|
||||||
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
|
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
|
||||||
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
|
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
|
||||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/third-party
|
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/community-guides
|
||||||
ulimits:
|
ulimits:
|
||||||
nofile:
|
nofile:
|
||||||
soft: 1048576
|
soft: 1048576
|
||||||
@@ -70,7 +70,7 @@ services:
|
|||||||
# user: 0:0
|
# user: 0:0
|
||||||
build:
|
build:
|
||||||
context: ../web
|
context: ../web
|
||||||
command: ['/usr/src/app/bin/immich-web']
|
command: [ '/usr/src/app/bin/immich-web' ]
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
ports:
|
ports:
|
||||||
@@ -95,12 +95,12 @@ services:
|
|||||||
image: immich-machine-learning-dev:latest
|
image: immich-machine-learning-dev:latest
|
||||||
# extends:
|
# extends:
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||||
build:
|
build:
|
||||||
context: ../machine-learning
|
context: ../machine-learning
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
args:
|
args:
|
||||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||||
ports:
|
ports:
|
||||||
- 3003:3003
|
- 3003:3003
|
||||||
volumes:
|
volumes:
|
||||||
@@ -116,13 +116,13 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: redis-cli ping || exit 1
|
test: redis-cli ping || exit 1
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1-pgvectors0.2.0
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
@@ -134,25 +134,6 @@ services:
|
|||||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- 5432:5432
|
||||||
healthcheck:
|
|
||||||
test: >-
|
|
||||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
|
||||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
|
||||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
|
||||||
echo "checksum failure count is $$Chksum";
|
|
||||||
[ "$$Chksum" = '0' ] || exit 1
|
|
||||||
interval: 5m
|
|
||||||
start_interval: 30s
|
|
||||||
start_period: 5m
|
|
||||||
command: >-
|
|
||||||
postgres
|
|
||||||
-c shared_preload_libraries=vectors.so
|
|
||||||
-c 'search_path="$$user", public, vectors'
|
|
||||||
-c logging_collector=on
|
|
||||||
-c max_wal_size=2GB
|
|
||||||
-c shared_buffers=512MB
|
|
||||||
-c wal_compression=on
|
|
||||||
|
|
||||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||||
# immich-prometheus:
|
# immich-prometheus:
|
||||||
# container_name: immich_prometheus
|
# container_name: immich_prometheus
|
||||||
|
|||||||
@@ -38,12 +38,12 @@ services:
|
|||||||
image: immich-machine-learning:latest
|
image: immich-machine-learning:latest
|
||||||
# extends:
|
# extends:
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||||
build:
|
build:
|
||||||
context: ../machine-learning
|
context: ../machine-learning
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
args:
|
args:
|
||||||
- DEVICE=cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference
|
- DEVICE=cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference
|
||||||
ports:
|
ports:
|
||||||
- 3003:3003
|
- 3003:3003
|
||||||
volumes:
|
volumes:
|
||||||
@@ -56,14 +56,14 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: redis-cli ping || exit 1
|
test: redis-cli ping || exit 1
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1-pgvectors0.2.0
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- .env
|
||||||
environment:
|
environment:
|
||||||
@@ -75,24 +75,6 @@ services:
|
|||||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- 5432:5432
|
||||||
healthcheck:
|
|
||||||
test: >-
|
|
||||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
|
||||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
|
||||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
|
||||||
echo "checksum failure count is $$Chksum";
|
|
||||||
[ "$$Chksum" = '0' ] || exit 1
|
|
||||||
interval: 5m
|
|
||||||
start_interval: 30s
|
|
||||||
start_period: 5m
|
|
||||||
command: >-
|
|
||||||
postgres
|
|
||||||
-c shared_preload_libraries=vectors.so
|
|
||||||
-c 'search_path="$$user", public, vectors'
|
|
||||||
-c logging_collector=on
|
|
||||||
-c max_wal_size=2GB
|
|
||||||
-c shared_buffers=512MB
|
|
||||||
-c wal_compression=on
|
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||||
@@ -100,7 +82,7 @@ services:
|
|||||||
container_name: immich_prometheus
|
container_name: immich_prometheus
|
||||||
ports:
|
ports:
|
||||||
- 9090:9090
|
- 9090:9090
|
||||||
image: prom/prometheus@sha256:5888c188cf09e3f7eebc97369c3b2ce713e844cdbd88ccf36f5047c958aea120
|
image: prom/prometheus@sha256:78ed1f9050eb9eaf766af6e580230b1c4965728650e332cd1ee918c0c4699775
|
||||||
volumes:
|
volumes:
|
||||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||||
- prometheus-data:/prometheus
|
- prometheus-data:/prometheus
|
||||||
@@ -109,10 +91,10 @@ services:
|
|||||||
# add data source for http://immich-prometheus:9090 to get started
|
# add data source for http://immich-prometheus:9090 to get started
|
||||||
immich-grafana:
|
immich-grafana:
|
||||||
container_name: immich_grafana
|
container_name: immich_grafana
|
||||||
command: ['./run.sh', '-disable-reporting']
|
command: [ './run.sh', '-disable-reporting' ]
|
||||||
ports:
|
ports:
|
||||||
- 3000:3000
|
- 3000:3000
|
||||||
image: grafana/grafana:11.5.1-ubuntu@sha256:9a4ab78cec1a2ec7d1ca5dfd5aacec6412706a1bc9e971fc7184e2f6696a63f5
|
image: grafana/grafana:11.6.1-ubuntu@sha256:6fc273288470ef499dd3c6b36aeade093170d4f608f864c5dd3a7fabeae77b50
|
||||||
volumes:
|
volumes:
|
||||||
- grafana-data:/var/lib/grafana
|
- grafana-data:/var/lib/grafana
|
||||||
|
|
||||||
|
|||||||
@@ -33,12 +33,12 @@ services:
|
|||||||
|
|
||||||
immich-machine-learning:
|
immich-machine-learning:
|
||||||
container_name: immich_machine_learning
|
container_name: immich_machine_learning
|
||||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag.
|
||||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
|
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/ml-hardware-acceleration
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: cpu # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
# service: cpu # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||||
volumes:
|
volumes:
|
||||||
- model-cache:/cache
|
- model-cache:/cache
|
||||||
env_file:
|
env_file:
|
||||||
@@ -49,40 +49,24 @@ services:
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: immich_redis
|
container_name: immich_redis
|
||||||
image: docker.io/redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
image: docker.io/valkey/valkey:8-bookworm@sha256:ff21bc0f8194dc9c105b769aeabf9585fea6a8ed649c0781caeac5cb3c247884
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: redis-cli ping || exit 1
|
test: redis-cli ping || exit 1
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.1-pgvectors0.2.0
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||||
POSTGRES_USER: ${DB_USERNAME}
|
POSTGRES_USER: ${DB_USERNAME}
|
||||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||||
|
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
|
||||||
|
# DB_STORAGE_TYPE: 'HDD'
|
||||||
volumes:
|
volumes:
|
||||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||||
healthcheck:
|
|
||||||
test: >-
|
|
||||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
|
||||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
|
||||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
|
||||||
echo "checksum failure count is $$Chksum";
|
|
||||||
[ "$$Chksum" = '0' ] || exit 1
|
|
||||||
interval: 5m
|
|
||||||
start_interval: 30s
|
|
||||||
start_period: 5m
|
|
||||||
command: >-
|
|
||||||
postgres
|
|
||||||
-c shared_preload_libraries=vectors.so
|
|
||||||
-c 'search_path="$$user", public, vectors'
|
|
||||||
-c logging_collector=on
|
|
||||||
-c max_wal_size=2GB
|
|
||||||
-c shared_buffers=512MB
|
|
||||||
-c wal_compression=on
|
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
|
|
||||||
# The location where your uploaded files are stored
|
# The location where your uploaded files are stored
|
||||||
UPLOAD_LOCATION=./library
|
UPLOAD_LOCATION=./library
|
||||||
# The location where your database files are stored
|
|
||||||
|
# The location where your database files are stored. Network shares are not supported for the database
|
||||||
DB_DATA_LOCATION=./postgres
|
DB_DATA_LOCATION=./postgres
|
||||||
|
|
||||||
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||||
|
|||||||
@@ -13,6 +13,13 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
|
- /lib/firmware/mali_csffw.bin:/lib/firmware/mali_csffw.bin:ro # Mali firmware for your chipset (not always required depending on the driver)
|
||||||
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
|
- /usr/lib/libmali.so:/usr/lib/libmali.so:ro # Mali driver for your chipset (always required)
|
||||||
|
|
||||||
|
rknn:
|
||||||
|
security_opt:
|
||||||
|
- systempaths=unconfined
|
||||||
|
- apparmor=unconfined
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
|
||||||
cpu: {}
|
cpu: {}
|
||||||
|
|
||||||
@@ -26,6 +33,13 @@ services:
|
|||||||
capabilities:
|
capabilities:
|
||||||
- gpu
|
- gpu
|
||||||
|
|
||||||
|
rocm:
|
||||||
|
group_add:
|
||||||
|
- video
|
||||||
|
devices:
|
||||||
|
- /dev/dri:/dev/dri
|
||||||
|
- /dev/kfd:/dev/kfd
|
||||||
|
|
||||||
openvino:
|
openvino:
|
||||||
device_cgroup_rules:
|
device_cgroup_rules:
|
||||||
- 'c 189:* rmw'
|
- 'c 189:* rmw'
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
22.14.0
|
22.16.0
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ See [Backup and Restore](/docs/administration/backup-and-restore.md).
|
|||||||
|
|
||||||
### Does Immich support reading existing face tag metadata?
|
### Does Immich support reading existing face tag metadata?
|
||||||
|
|
||||||
No, it currently does not. There is an [open feature request on GitHub](https://github.com/immich-app/immich/discussions/4348).
|
Yes, it creates new faces and persons from the imported asset metadata. For details see the [feature request #4348](https://github.com/immich-app/immich/discussions/4348) and [PR #6455](https://github.com/immich-app/immich/pull/6455).
|
||||||
|
|
||||||
### Does Immich support the filtering of NSFW images?
|
### Does Immich support the filtering of NSFW images?
|
||||||
|
|
||||||
@@ -262,7 +262,7 @@ No, this is not supported. Only models listed in the [Hugging Face][huggingface]
|
|||||||
|
|
||||||
### I want to be able to search in other languages besides English. How can I do that?
|
### I want to be able to search in other languages besides English. How can I do that?
|
||||||
|
|
||||||
You can change to a multilingual CLIP model. See [here](/docs/features/searching#clip-model) for instructions.
|
You can change to a multilingual CLIP model. See [here](/docs/features/searching#clip-models) for instructions.
|
||||||
|
|
||||||
### Does Immich support Facial Recognition for videos?
|
### Does Immich support Facial Recognition for videos?
|
||||||
|
|
||||||
|
|||||||
@@ -23,16 +23,32 @@ Refer to the official [postgres documentation](https://www.postgresql.org/docs/c
|
|||||||
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Automatic Database Backups
|
### Automatic Database Dumps
|
||||||
|
|
||||||
For convenience, Immich will automatically create database backups by default. The backups are stored in `UPLOAD_LOCATION/backups`.
|
:::warning
|
||||||
As mentioned above, you should make your own backup of these together with the asset folders as noted below.
|
The automatic database dumps can be used to restore the database in the event of damage to the Postgres database files.
|
||||||
You can adjust the schedule and amount of kept backups in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
There is no monitoring for these dumps and you will not be notified if they are unsuccessful.
|
||||||
By default, Immich will keep the last 14 backups and create a new backup every day at 2:00 AM.
|
:::
|
||||||
|
|
||||||
|
:::caution
|
||||||
|
The database dumps do **NOT** contain any pictures or videos, only metadata. They are only usable with a copy of the other files in `UPLOAD_LOCATION` as outlined below.
|
||||||
|
:::
|
||||||
|
|
||||||
|
For disaster-recovery purposes, Immich will automatically create database dumps. The dumps are stored in `UPLOAD_LOCATION/backups`.
|
||||||
|
Please be sure to make your own, independent backup of the database together with the asset folders as noted below.
|
||||||
|
You can adjust the schedule and amount of kept database dumps in the [admin settings](http://my.immich.app/admin/system-settings?isOpen=backup).
|
||||||
|
By default, Immich will keep the last 14 database dumps and create a new dump every day at 2:00 AM.
|
||||||
|
|
||||||
|
#### Trigger Dump
|
||||||
|
|
||||||
|
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
|
||||||
|
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
|
||||||
|
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
|
||||||
|
This dumps will count towards the last `X` dumps that will be kept based on your settings.
|
||||||
|
|
||||||
#### Restoring
|
#### Restoring
|
||||||
|
|
||||||
We hope to make restoring simpler in future versions, for now you can find the backups in the `UPLOAD_LOCATION/backups` folder on your host.
|
We hope to make restoring simpler in future versions, for now you can find the database dumps in the `UPLOAD_LOCATION/backups` folder on your host.
|
||||||
Then please follow the steps in the following section for restoring the database.
|
Then please follow the steps in the following section for restoring the database.
|
||||||
|
|
||||||
### Manual Backup and Restore
|
### Manual Backup and Restore
|
||||||
@@ -53,7 +69,7 @@ docker compose create # Create Docker containers for Immich apps witho
|
|||||||
docker start immich_postgres # Start Postgres server
|
docker start immich_postgres # Start Postgres server
|
||||||
sleep 10 # Wait for Postgres server to start up
|
sleep 10 # Wait for Postgres server to start up
|
||||||
# Check the database user if you deviated from the default
|
# Check the database user if you deviated from the default
|
||||||
gunzip < "/path/to/backup/dump.sql.gz" \
|
gunzip --stdout "/path/to/backup/dump.sql.gz" \
|
||||||
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
||||||
| docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup
|
| docker exec -i immich_postgres psql --dbname=postgres --username=<DB_USERNAME> # Restore Backup
|
||||||
docker compose up -d # Start remainder of Immich apps
|
docker compose up -d # Start remainder of Immich apps
|
||||||
@@ -76,8 +92,8 @@ docker compose create # Create Docker containers for
|
|||||||
docker start immich_postgres # Start Postgres server
|
docker start immich_postgres # Start Postgres server
|
||||||
sleep 10 # Wait for Postgres server to start up
|
sleep 10 # Wait for Postgres server to start up
|
||||||
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command
|
docker exec -it immich_postgres bash # Enter the Docker shell and run the following command
|
||||||
# Check the database user if you deviated from the default. If your backup ends in `.gz`, replace `cat` with `gunzip`
|
# Check the database user if you deviated from the default. If your backup ends in `.gz`, replace `cat` with `gunzip --stdout`
|
||||||
cat < "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=postgres --username=<DB_USERNAME>
|
cat "/dump.sql" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | psql --dbname=postgres --username=<DB_USERNAME>
|
||||||
exit # Exit the Docker shell
|
exit # Exit the Docker shell
|
||||||
docker compose up -d # Start remainder of Immich apps
|
docker compose up -d # Start remainder of Immich apps
|
||||||
```
|
```
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 16 KiB |
@@ -10,12 +10,16 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
|
|||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
|
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
|
||||||
|
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
|
||||||
|
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
|
||||||
|
|
||||||
|
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported. Postgres 17 is nominally compatible, but pgvecto.rs does not have prebuilt images or packages for it as of writing.
|
Immich is known to work with Postgres versions `>= 14, < 18`.
|
||||||
|
|
||||||
Make sure the installed version of pgvecto.rs is compatible with your version of Immich. The current accepted range for pgvecto.rs is `>= 0.2.0, < 0.4.0`.
|
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Specifying the connection URL
|
## Specifying the connection URL
|
||||||
@@ -53,21 +57,81 @@ CREATE DATABASE <immichdatabasename>;
|
|||||||
\c <immichdatabasename>
|
\c <immichdatabasename>
|
||||||
BEGIN;
|
BEGIN;
|
||||||
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
|
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
|
||||||
CREATE EXTENSION vectors;
|
CREATE EXTENSION vchord CASCADE;
|
||||||
CREATE EXTENSION earthdistance CASCADE;
|
CREATE EXTENSION earthdistance CASCADE;
|
||||||
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
|
|
||||||
ALTER SCHEMA vectors OWNER TO <immichdbusername>;
|
|
||||||
COMMIT;
|
COMMIT;
|
||||||
```
|
```
|
||||||
|
|
||||||
### Updating pgvecto.rs
|
### Updating VectorChord
|
||||||
|
|
||||||
When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
|
When installing a new version of VectorChord, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vchord UPDATE;`.
|
||||||
|
|
||||||
### Common errors
|
## Migrating to VectorChord
|
||||||
|
|
||||||
#### Permission denied for view
|
VectorChord is the successor extension to pgvecto.rs, allowing for higher performance, lower memory usage and higher quality results for smart search and facial recognition.
|
||||||
|
|
||||||
If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO <immichdbusername>;`.
|
### Migrating from pgvecto.rs
|
||||||
|
|
||||||
[vectors-install]: https://docs.vectorchord.ai/getting-started/installation.html
|
Support for pgvecto.rs will be dropped in a later release, hence we recommend all users currently using pgvecto.rs to migrate to VectorChord at their convenience. There are two primary approaches to do so.
|
||||||
|
|
||||||
|
The easiest option is to have both extensions installed during the migration:
|
||||||
|
|
||||||
|
1. Ensure you still have pgvecto.rs installed
|
||||||
|
2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
|
||||||
|
3. [Install VectorChord][vchord-install]
|
||||||
|
4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed
|
||||||
|
5. Restart the Postgres database
|
||||||
|
6. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client
|
||||||
|
7. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output
|
||||||
|
8. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;`
|
||||||
|
9. Drop the old schema by running `DROP SCHEMA vectors;`
|
||||||
|
10. Remove the `vectors.so` entry from the `shared_preload_libraries` setting
|
||||||
|
11. Restart the Postgres database
|
||||||
|
12. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain installed as it provides the data types used by `vchord`
|
||||||
|
|
||||||
|
If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps:
|
||||||
|
|
||||||
|
1. While pgvecto.rs is still installed, run the following SQL command using psql or your choice of database client. Take note of the number outputted by this command as you will need it later
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT atttypmod as dimsize
|
||||||
|
FROM pg_attribute f
|
||||||
|
JOIN pg_class c ON c.oid = f.attrelid
|
||||||
|
WHERE c.relkind = 'r'::char
|
||||||
|
AND f.attnum > 0
|
||||||
|
AND c.relname = 'smart_search'::text
|
||||||
|
AND f.attname = 'embedding'::text;
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Remove references to pgvecto.rs using the below SQL commands
|
||||||
|
|
||||||
|
```sql
|
||||||
|
DROP INDEX IF EXISTS clip_index;
|
||||||
|
DROP INDEX IF EXISTS face_index;
|
||||||
|
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE real[];
|
||||||
|
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE real[];
|
||||||
|
```
|
||||||
|
|
||||||
|
3. [Install VectorChord][vchord-install]
|
||||||
|
4. Change the columns back to the appropriate vector types, replacing `<number>` with the number from step 1
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE EXTENSION IF NOT EXISTS vchord CASCADE;
|
||||||
|
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE vector(<number>);
|
||||||
|
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512);
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Start Immich and let it create new indices using VectorChord
|
||||||
|
|
||||||
|
### Migrating from pgvector
|
||||||
|
|
||||||
|
1. Ensure you have at least 0.7.0 of pgvector installed. If it is below that, please upgrade it and run the SQL command `ALTER EXTENSION vector UPDATE;` using psql or your choice of database client
|
||||||
|
2. Follow the Prerequisites to install VectorChord
|
||||||
|
3. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;`
|
||||||
|
4. Remove the `DB_VECTOR_EXTENSION=pgvector` environmental variable as it will make Immich still use pgvector if set
|
||||||
|
5. Start Immich and let it create new indices using VectorChord
|
||||||
|
|
||||||
|
Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps.
|
||||||
|
|
||||||
|
[vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html
|
||||||
|
[pg-apt]: https://www.postgresql.org/download/linux/#generic
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ server {
|
|||||||
client_max_body_size 50000M;
|
client_max_body_size 50000M;
|
||||||
|
|
||||||
# Set headers
|
# Set headers
|
||||||
proxy_set_header Host $http_host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ The `immich-server` docker image comes preinstalled with an administrative CLI (
|
|||||||
| `enable-oauth-login` | Enable OAuth login |
|
| `enable-oauth-login` | Enable OAuth login |
|
||||||
| `disable-oauth-login` | Disable OAuth login |
|
| `disable-oauth-login` | Disable OAuth login |
|
||||||
| `list-users` | List Immich users |
|
| `list-users` | List Immich users |
|
||||||
|
| `version` | Print Immich version |
|
||||||
|
|
||||||
## How to run a command
|
## How to run a command
|
||||||
|
|
||||||
@@ -80,3 +81,10 @@ immich-admin list-users
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Print Immich Version
|
||||||
|
|
||||||
|
```
|
||||||
|
immich-admin version
|
||||||
|
v1.129.0
|
||||||
|
```
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ Admin can send a welcome email if the Email option is set, you can learn here ho
|
|||||||
|
|
||||||
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.
|
Admin can specify the storage quota for the user as the instance's admin; once the limit is reached, the user won't be able to upload to the instance anymore.
|
||||||
|
|
||||||
In order to select a storage quota, click on the pencil icon and enter the storage quota in GiB. You can choose an unlimited quota using the value 0 (default).
|
In order to select a storage quota, click on the pencil icon and enter the storage quota in GiB. You can choose an unlimited quota by leaving it empty (default).
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
The system administrator can see the usage quota percentage of all users in Server Stats page.
|
The system administrator can see the usage quota percentage of all users in Server Stats page.
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
# Database Migrations
|
# Database Migrations
|
||||||
|
|
||||||
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
After making any changes in the `server/src/schema`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||||
|
|
||||||
1. Run the command
|
1. Run the command
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm run typeorm:migrations:generate <migration-name>
|
npm run migrations:generate <migration-name>
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Check if the migration file makes sense.
|
2. Check if the migration file makes sense.
|
||||||
3. Move the migration file to folder `./server/src/migrations` in your code editor.
|
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
|
||||||
|
|
||||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||||
|
|||||||
@@ -63,22 +63,41 @@ If you only want to do web development connected to an existing, remote backend,
|
|||||||
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
|
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you're using PowerShell on Windows you may need to set the env var separately like so:
|
||||||
|
|
||||||
|
```powershell
|
||||||
|
$env:IMMICH_SERVER_URL = "https://demo.immich.app/"
|
||||||
|
npm run dev
|
||||||
|
```
|
||||||
|
|
||||||
#### `@immich/ui`
|
#### `@immich/ui`
|
||||||
|
|
||||||
To see local changes to `@immich/ui` in Immich, do the following:
|
To see local changes to `@immich/ui` in Immich, do the following:
|
||||||
|
|
||||||
1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui`
|
1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui`
|
||||||
1. Build the `@immich/ui` project via `npm run build`
|
2. Build the `@immich/ui` project via `npm run build`
|
||||||
1. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
|
3. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
|
||||||
1. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
|
4. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
|
||||||
1. Start up the stack via `make dev`
|
5. Uncomment the import statement in `web/src/app.css` file `@import '/usr/ui/dist/theme/default.css';` and comment out `@import '@immich/ui/theme/default.css';`
|
||||||
1. After making changes in `@immich/ui`, rebuild it (`npm run build`)
|
6. Start up the stack via `make dev`
|
||||||
|
7. After making changes in `@immich/ui`, rebuild it (`npm run build`)
|
||||||
|
|
||||||
### Mobile app
|
### Mobile app
|
||||||
|
|
||||||
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x and FVM to be installed on your system.
|
#### Setup
|
||||||
|
|
||||||
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
1. Setup Flutter toolchain using FVM.
|
||||||
|
2. Run `flutter pub get` to install the dependencies.
|
||||||
|
3. Run `make translation` to generate the translation file.
|
||||||
|
4. Run `fvm flutter run` to start the app.
|
||||||
|
|
||||||
|
#### Translation
|
||||||
|
|
||||||
|
To add a new translation text, enter the key-value pair in the `i18n/en.json` in the root of the immich project. Then, from the `mobile/` directory, run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make translation
|
||||||
|
```
|
||||||
|
|
||||||
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.
|
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.
|
||||||
|
|
||||||
@@ -96,32 +115,72 @@ Note: Activating the license is not required.
|
|||||||
|
|
||||||
### VSCode
|
### VSCode
|
||||||
|
|
||||||
Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions.
|
Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions. These extensions are listed in the `extensions.json` file under `.vscode/` and should appear as workspace recommendations.
|
||||||
|
|
||||||
in User `settings.json` (`cmd + shift + p` and search for `Open User Settings JSON`) add the following:
|
Here are the settings we use, they should be active as workspace settings (`settings.json`):
|
||||||
|
|
||||||
```json title="settings.json"
|
```json title="settings.json"
|
||||||
{
|
{
|
||||||
"editor.formatOnSave": true,
|
"[css]": {
|
||||||
"[javascript][typescript][css]": {
|
|
||||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
"editor.tabSize": 2,
|
"editor.formatOnSave": true,
|
||||||
"editor.formatOnSave": true
|
|
||||||
},
|
|
||||||
"[svelte]": {
|
|
||||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
|
||||||
"editor.tabSize": 2
|
"editor.tabSize": 2
|
||||||
},
|
},
|
||||||
"svelte.enable-ts-plugin": true,
|
|
||||||
"eslint.validate": ["javascript", "svelte"],
|
|
||||||
"[dart]": {
|
"[dart]": {
|
||||||
|
"editor.defaultFormatter": "Dart-Code.dart-code",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"editor.selectionHighlight": false,
|
"editor.selectionHighlight": false,
|
||||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||||
"editor.suggestSelection": "first",
|
"editor.suggestSelection": "first",
|
||||||
"editor.tabCompletion": "onlySnippets",
|
"editor.tabCompletion": "onlySnippets",
|
||||||
"editor.wordBasedSuggestions": "off",
|
"editor.wordBasedSuggestions": "off"
|
||||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
},
|
||||||
}
|
"[javascript]": {
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": "explicit",
|
||||||
|
"source.removeUnusedImports": "explicit"
|
||||||
|
},
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[json]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[jsonc]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[svelte]": {
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": "explicit",
|
||||||
|
"source.removeUnusedImports": "explicit"
|
||||||
|
},
|
||||||
|
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"[typescript]": {
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.organizeImports": "explicit",
|
||||||
|
"source.removeUnusedImports": "explicit"
|
||||||
|
},
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.tabSize": 2
|
||||||
|
},
|
||||||
|
"cSpell.words": ["immich"],
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"eslint.validate": ["javascript", "svelte"],
|
||||||
|
"explorer.fileNesting.enabled": true,
|
||||||
|
"explorer.fileNesting.patterns": {
|
||||||
|
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||||
|
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||||
|
},
|
||||||
|
"svelte.enable-ts-plugin": true,
|
||||||
|
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|||||||
19
docs/docs/features/casting.md
Normal file
19
docs/docs/features/casting.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Chromecast support
|
||||||
|
|
||||||
|
Immich supports the Google's Cast protocol so that photos and videos can be cast to devices such as a Chromecast and a Nest Hub. This feature is considered experimental and has several important limitations listed below. Currently, this feature is only supported by the web client, support on Android and iOS is planned for the future.
|
||||||
|
|
||||||
|
## Enable Google Cast Support
|
||||||
|
|
||||||
|
Google Cast support is disabled by default. The web UI uses Google-provided scripts and must retreive them from Google servers when the page loads. This is a privacy concern for some and is thus opt-in.
|
||||||
|
|
||||||
|
You can enable Google Cast support through `Account Settings > Features > Cast > Google Cast`
|
||||||
|
|
||||||
|
<img src={require('./img/gcast-enable.webp').default} width="70%" title='Enable Google Cast Support' />
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
To use casting with Immich, there are a few prerequisites:
|
||||||
|
|
||||||
|
1. Your instance must be accessed via an HTTPS connection in order for the casting menu to show.
|
||||||
|
2. Your instance must be publicly accessible via HTTPS and a DNS record for the server must be accessible via Google's DNS servers (`8.8.8.8` and `8.8.4.4`)
|
||||||
|
3. Videos must be in a format that is compatible with Google Cast. For more info, check out [Google's documentation](https://developers.google.com/cast/docs/media)
|
||||||
@@ -42,6 +42,12 @@ docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich
|
|||||||
|
|
||||||
Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key.
|
Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variables as suitable. You can also use a Docker env file to store your sensitive API key.
|
||||||
|
|
||||||
|
This `docker run` command will directly run the command `immich` inside the container. You can directly append the desired parameters (see under "usage") to the commandline like this:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -it -v "$(pwd)":/import:ro -e IMMICH_INSTANCE_URL=https://your-immich-instance/api -e IMMICH_API_KEY=your-api-key ghcr.io/immich-app/immich-cli:latest upload -a -c 5 --recursive directory/
|
||||||
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
@@ -112,7 +118,7 @@ You begin by authenticating to your Immich server. For instance:
|
|||||||
immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
||||||
```
|
```
|
||||||
|
|
||||||
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
|
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/immich/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
|
||||||
|
|
||||||
Once you are authenticated, you can upload assets to your Immich server.
|
Once you are authenticated, you can upload assets to your Immich server.
|
||||||
|
|
||||||
|
|||||||
@@ -69,6 +69,8 @@ Navigating to Administration > Settings > Machine Learning Settings > Facial Rec
|
|||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
|
It's better to only tweak the parameters here than to set them to something very different unless you're ready to test a variety of options. If you do need to set a parameter to a strict setting, relaxing other settings can be a good option to compensate, and vice versa.
|
||||||
|
|
||||||
|
You can learn how the tune the result in this [Guide](/docs/guides/better-facial-clusters)
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Facial recognition model
|
### Facial recognition model
|
||||||
|
|||||||
@@ -121,6 +121,6 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
|||||||
|
|
||||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||||
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||||
[jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
|
[jellyfin-lp]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#low-power-encoding
|
||||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
|
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#known-issues-and-limitations-on-linux
|
||||||
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases
|
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases
|
||||||
|
|||||||
BIN
docs/docs/features/img/gcast-enable.webp
Normal file
BIN
docs/docs/features/img/gcast-enable.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 4.9 MiB After Width: | Height: | Size: 4.9 MiB |
@@ -37,7 +37,7 @@ To validate that Immich can reach your external library, start a shell inside th
|
|||||||
|
|
||||||
### Exclusion Patterns
|
### Exclusion Patterns
|
||||||
|
|
||||||
By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library. Under the hood, Immich uses the [glob](https://www.npmjs.com/package/glob) package to match patterns, so please refer to [their documentation](https://github.com/isaacs/node-glob#glob-primer) to see what patterns are supported.
|
By default, all files in the import paths will be added to the library. If there are files that should not be added, exclusion patterns can be used to exclude them. Exclusion patterns are glob patterns are matched against the full file path. If a file matches an exclusion pattern, it will not be added to the library. Exclusion patterns can be added in the Scan Settings page for each library.
|
||||||
|
|
||||||
Some basic examples:
|
Some basic examples:
|
||||||
|
|
||||||
@@ -48,7 +48,11 @@ Some basic examples:
|
|||||||
|
|
||||||
Special characters such as @ should be escaped, for instance:
|
Special characters such as @ should be escaped, for instance:
|
||||||
|
|
||||||
- `**/\@eadir/**` will exclude all files in any directory named `@eadir`
|
- `**/\@eaDir/**` will exclude all files in any directory named `@eaDir`
|
||||||
|
|
||||||
|
:::info
|
||||||
|
Internally, Immich uses the [glob](https://www.npmjs.com/package/glob) package to process exclusion patterns, and sometimes those patterns are translated into [Postgres LIKE patterns](https://www.postgresql.org/docs/current/functions-matching.html). The intention is to support basic folder exclusions but we recommend against advanced usage since those can't reliably be translated to the Postgres syntax. Please refer to the [glob documentation](https://github.com/isaacs/node-glob#glob-primer) for a basic overview on glob patterns.
|
||||||
|
:::
|
||||||
|
|
||||||
### Automatic watching (EXPERIMENTAL)
|
### Automatic watching (EXPERIMENTAL)
|
||||||
|
|
||||||
@@ -68,7 +72,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
|
|||||||
|
|
||||||
### Nightly job
|
### Nightly job
|
||||||
|
|
||||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion.
|
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library management page.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
@@ -91,7 +95,7 @@ The `immich-server` container will need access to the gallery. Modify your docke
|
|||||||
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
+ - /mnt/nas/christmas-trip:/mnt/media/christmas-trip:ro
|
||||||
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
+ - /home/user/old-pics:/mnt/media/old-pics:ro
|
||||||
+ - /mnt/media/videos:/mnt/media/videos:ro
|
+ - /mnt/media/videos:/mnt/media/videos:ro
|
||||||
+ - /mnt/media/videos2:/mnt/media/videos2 # the files in this folder can be deleted, as it does not end with :ro
|
+ - /mnt/media/videos2:/mnt/media/videos2 # WARNING: Immich will be able to delete the files in this folder, as it does not end with :ro
|
||||||
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
+ - "C:/Users/user_name/Desktop/my media:/mnt/media/my-media:ro" # import path in Windows system.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,9 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
|||||||
|
|
||||||
- ARM NN (Mali)
|
- ARM NN (Mali)
|
||||||
- CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher)
|
- CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher)
|
||||||
|
- ROCm (AMD GPUs)
|
||||||
- OpenVINO (Intel GPUs such as Iris Xe and Arc)
|
- OpenVINO (Intel GPUs such as Iris Xe and Arc)
|
||||||
|
- RKNN (Rockchip)
|
||||||
|
|
||||||
## Limitations
|
## Limitations
|
||||||
|
|
||||||
@@ -19,6 +21,7 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
|||||||
- Only Linux and Windows (through WSL2) servers are supported.
|
- Only Linux and Windows (through WSL2) servers are supported.
|
||||||
- ARM NN is only supported on devices with Mali GPUs. Other Arm devices are not supported.
|
- ARM NN is only supported on devices with Mali GPUs. Other Arm devices are not supported.
|
||||||
- Some models may not be compatible with certain backends. CUDA is the most reliable.
|
- Some models may not be compatible with certain backends. CUDA is the most reliable.
|
||||||
|
- Search latency isn't improved by ARM NN due to model compatibility issues preventing its use. However, smart search jobs do make use of ARM NN.
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
@@ -33,30 +36,47 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
|||||||
- The `hwaccel.ml.yml` file assumes the path to it is `/usr/lib/libmali.so`, so update accordingly if it is elsewhere
|
- The `hwaccel.ml.yml` file assumes the path to it is `/usr/lib/libmali.so`, so update accordingly if it is elsewhere
|
||||||
- The `hwaccel.ml.yml` file assumes an additional file `/lib/firmware/mali_csffw.bin`, so update accordingly if your device's driver does not require this file
|
- The `hwaccel.ml.yml` file assumes an additional file `/lib/firmware/mali_csffw.bin`, so update accordingly if your device's driver does not require this file
|
||||||
- Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for ARM NN specific settings
|
- Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for ARM NN specific settings
|
||||||
|
- In particular, the `MACHINE_LEARNING_ANN_FP16_TURBO` can significantly improve performance at the cost of very slightly lower accuracy
|
||||||
|
|
||||||
#### CUDA
|
#### CUDA
|
||||||
|
|
||||||
- The GPU must have compute capability 5.2 or greater.
|
- The GPU must have compute capability 5.2 or greater.
|
||||||
- The server must have the official NVIDIA driver installed.
|
- The server must have the official NVIDIA driver installed.
|
||||||
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
- The installed driver must be >= 545 (it must support CUDA 12.3).
|
||||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||||
|
|
||||||
|
#### ROCm
|
||||||
|
|
||||||
|
- The GPU must be supported by ROCm. If it isn't officially supported, you can attempt to use the `HSA_OVERRIDE_GFX_VERSION` environmental variable: `HSA_OVERRIDE_GFX_VERSION=<a supported version, e.g. 10.3.0>`. If this doesn't work, you might need to also set `HSA_USE_SVM=0`.
|
||||||
|
- The ROCm image is quite large and requires at least 35GiB of free disk space. However, pulling later updates to the service through Docker will generally only amount to a few hundred megabytes as the rest will be cached.
|
||||||
|
- This backend is new and may experience some issues. For example, GPU power consumption can be higher than usual after running inference, even if the machine learning service is idle. In this case, it will only go back to normal after being idle for 5 minutes (configurable with the [MACHINE_LEARNING_MODEL_TTL](/docs/install/environment-variables) setting).
|
||||||
|
|
||||||
#### OpenVINO
|
#### OpenVINO
|
||||||
|
|
||||||
- Integrated GPUs are more likely to experience issues than discrete GPUs, especially for older processors or servers with low RAM.
|
- Integrated GPUs are more likely to experience issues than discrete GPUs, especially for older processors or servers with low RAM.
|
||||||
- Ensure the server's kernel version is new enough to use the device for hardware accceleration.
|
- Ensure the server's kernel version is new enough to use the device for hardware accceleration.
|
||||||
- Expect higher RAM usage when using OpenVINO compared to CPU processing.
|
- Expect higher RAM usage when using OpenVINO compared to CPU processing.
|
||||||
|
|
||||||
|
#### RKNN
|
||||||
|
|
||||||
|
- You must have a supported Rockchip SoC: only RK3566, RK3568, RK3576 and RK3588 are supported at this moment.
|
||||||
|
- Make sure you have the appropriate linux kernel driver installed
|
||||||
|
- This is usually pre-installed on the device vendor's Linux images
|
||||||
|
- RKNPU driver V0.9.8 or later must be available in the host server
|
||||||
|
- You may confirm this by running `cat /sys/kernel/debug/rknpu/version` to check the version
|
||||||
|
- Optional: Configure your `.env` file, see [environment variables](/docs/install/environment-variables) for RKNN specific settings
|
||||||
|
- In particular, setting `MACHINE_LEARNING_RKNN_THREADS` to 2 or 3 can _dramatically_ improve performance for RK3576 and RK3588 compared to the default of 1, at the expense of multiplying the amount of RAM each model uses by that amount.
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
|
|
||||||
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||||
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||||
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, openvino] to the `image` section's tag at the end of the line.
|
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, rocm, openvino, rknn] to the `image` section's tag at the end of the line.
|
||||||
4. Redeploy the `immich-machine-learning` container with these updated settings.
|
4. Redeploy the `immich-machine-learning` container with these updated settings.
|
||||||
|
|
||||||
### Confirming Device Usage
|
### Confirming Device Usage
|
||||||
|
|
||||||
You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel and `intel_gpu_top` for Intel.
|
You can confirm the device is being recognized and used by checking its utilization. There are many tools to display this, such as `nvtop` for NVIDIA or Intel, `intel_gpu_top` for Intel, and `radeontop` for AMD.
|
||||||
|
|
||||||
You can also check the logs of the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, or when you search with text in Immich, you should either see a log for `Available ORT providers` containing the relevant provider (e.g. `CUDAExecutionProvider` in the case of CUDA), or a `Loaded ANN model` log entry without errors in the case of ARM NN.
|
You can also check the logs of the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, or when you search with text in Immich, you should either see a log for `Available ORT providers` containing the relevant provider (e.g. `CUDAExecutionProvider` in the case of CUDA), or a `Loaded ANN model` log entry without errors in the case of ARM NN.
|
||||||
|
|
||||||
@@ -127,3 +147,12 @@ Note that you should increase job concurrencies to increase overall utilization
|
|||||||
- If you encounter an error when a model is running, try a different model to see if the issue is model-specific.
|
- If you encounter an error when a model is running, try a different model to see if the issue is model-specific.
|
||||||
- You may want to increase concurrency past the default for higher utilization. However, keep in mind that this will also increase VRAM consumption.
|
- You may want to increase concurrency past the default for higher utilization. However, keep in mind that this will also increase VRAM consumption.
|
||||||
- Larger models benefit more from hardware acceleration, if you have the VRAM for them.
|
- Larger models benefit more from hardware acceleration, if you have the VRAM for them.
|
||||||
|
- Compared to ARM NN, RKNPU has:
|
||||||
|
- Wider model support (including for search, which ARM NN does not accelerate)
|
||||||
|
- Less heat generation
|
||||||
|
- Very slightly lower accuracy (RKNPU always uses FP16, while ARM NN by default uses higher precision FP32 unless `MACHINE_LEARNING_ANN_FP16_TURBO` is enabled)
|
||||||
|
- Varying speed (tested on RK3588):
|
||||||
|
- If `MACHINE_LEARNING_RKNN_THREADS` is at the default of 1, RKNPU will have substantially lower throughput for ML jobs than ARM NN in most cases, but similar latency (such as when searching)
|
||||||
|
- If `MACHINE_LEARNING_RKNN_THREADS` is set to 3, it will be somewhat faster than ARM NN at FP32, but somewhat slower than ARM NN if `MACHINE_LEARNING_ANN_FP16_TURBO` is enabled
|
||||||
|
- When other tasks also use the GPU (like transcoding), RKNPU has a significant advantage over ARM NN as it uses the otherwise idle NPU instead of competing for GPU usage
|
||||||
|
- Lower RAM usage if `MACHINE_LEARNING_RKNN_THREADS` is at the default of 1, but significantly higher if greater than 1 (which is necessary for it to fully utilize the NPU and hence be comparable in speed to ARM NN)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -18,7 +18,7 @@ For the full list, refer to the [Immich source code](https://github.com/immich-a
|
|||||||
| `JPEG 2000` | `.jp2` | :white_check_mark: | |
|
| `JPEG 2000` | `.jp2` | :white_check_mark: | |
|
||||||
| `JPEG` | `.webp` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
|
| `JPEG` | `.webp` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
|
||||||
| `JPEG XL` | `.jxl` | :white_check_mark: | |
|
| `JPEG XL` | `.jxl` | :white_check_mark: | |
|
||||||
| `PNG` | `.webp` | :white_check_mark: | |
|
| `PNG` | `.png` | :white_check_mark: | |
|
||||||
| `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop |
|
| `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop |
|
||||||
| `RAW` | `.raw` | :white_check_mark: | |
|
| `RAW` | `.raw` | :white_check_mark: | |
|
||||||
| `RW2` | `.rw2` | :white_check_mark: | |
|
| `RW2` | `.rw2` | :white_check_mark: | |
|
||||||
|
|||||||
72
docs/docs/guides/better-facial-clusters.md
Normal file
72
docs/docs/guides/better-facial-clusters.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Better Facial Recognition Clusters
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
This guide explains how to optimize facial recognition in systems with large image libraries. By following these steps, you'll achieve better clustering of faces, reducing the need for manual merging.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Important Notes
|
||||||
|
|
||||||
|
- **Best Suited For:** Large image libraries after importing a significant number of images.
|
||||||
|
- **Warning:** This method deletes all previously assigned names.
|
||||||
|
- **Tip:** **Always take a [backup](/docs/administration/backup-and-restore#database) before proceeding!**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Step-by-Step Instructions
|
||||||
|
|
||||||
|
### Objective
|
||||||
|
|
||||||
|
To enhance face clustering and ensure the model effectively identifies faces using qualitative initial data.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Steps
|
||||||
|
|
||||||
|
#### 1. Adjust Machine Learning Settings
|
||||||
|
|
||||||
|
Navigate to:
|
||||||
|
**Admin → Administration → Settings → Machine Learning Settings**
|
||||||
|
|
||||||
|
Make the following changes:
|
||||||
|
|
||||||
|
- **Maximum recognition distance (Optional):**
|
||||||
|
Lower this value, e.g., to **0.4**, if the library contains people with similar facial features.
|
||||||
|
- **Minimum recognized faces:**
|
||||||
|
Set this to a **high value** (e.g., 20 For libraries with a large amount of assets (~100K+), and 10 for libraries with medium amount of assets (~40K+)).
|
||||||
|
> A high value ensures clusters only include faces that appear at least 20/`value` times in the library, improving the initial clustering process.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### 2. Run Reset Jobs
|
||||||
|
|
||||||
|
Go to:
|
||||||
|
**Admin → Administration → Settings → Jobs**
|
||||||
|
|
||||||
|
Perform the following:
|
||||||
|
|
||||||
|
1. **FACIAL RECOGNITION → Reset**
|
||||||
|
|
||||||
|
> These reset jobs rebuild the recognition model based on the new settings.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
#### 3. Refine Recognition with Lower Thresholds
|
||||||
|
|
||||||
|
Once the reset jobs are complete, refine the recognition as follows:
|
||||||
|
|
||||||
|
- **Step 1:**
|
||||||
|
Return to **Minimum recognized faces** in Machine Learning Settings and lower the value to **10** (In medium libraries we will lower the value from 10 to 5).
|
||||||
|
|
||||||
|
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||||
|
|
||||||
|
- **Step 2:**
|
||||||
|
Lower the value again to **3**.
|
||||||
|
> Run the job: **FACIAL RECOGNITION → MISSING Mode**
|
||||||
|
|
||||||
|
:::tip try different values
|
||||||
|
For certain libraries with a larger or smaller amount of assets, other settings will be better or worse. It is recommended to try different values **before assigning names** and see which settings work best for your library.
|
||||||
|
:::
|
||||||
|
|
||||||
|
---
|
||||||
@@ -14,14 +14,14 @@ online generators you can use.
|
|||||||
2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.)
|
2. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.)
|
||||||
3. Save your selections. Reload the map, and enjoy your custom map style!
|
3. Save your selections. Reload the map, and enjoy your custom map style!
|
||||||
|
|
||||||
## Use Maptiler to build a custom style
|
## Use MapTiler to build a custom style
|
||||||
|
|
||||||
Customizing the map style can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
|
Customizing the map style can be done easily using MapTiler, if you do not want to write an entire JSON document by hand.
|
||||||
|
|
||||||
1. Create a free account at https://cloud.maptiler.com
|
1. Create a free account at https://cloud.maptiler.com
|
||||||
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
|
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
|
||||||
3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer.
|
3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer.
|
||||||
4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account.
|
4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account.
|
||||||
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. Maptiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>
|
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. MapTiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>
|
||||||
6. Maptiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
|
6. MapTiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
|
||||||
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to Maptiler.
|
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to MapTiler.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Database Queries
|
# Database Queries
|
||||||
|
|
||||||
:::danger
|
:::danger
|
||||||
Keep in mind that mucking around in the database might set the moon on fire. Avoid modifying the database directly when possible, and always have current backups.
|
Keep in mind that mucking around in the database might set the Moon on fire. Avoid modifying the database directly when possible, and always have current backups.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
@@ -31,6 +31,10 @@ SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
|||||||
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
SELECT * FROM "assets" WHERE "id" = '9f94e60f-65b6-47b7-ae44-a4df7b57f0e9';
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sql title="Find by partial ID"
|
||||||
|
SELECT * FROM "assets" WHERE "id"::text LIKE '%ab431d3a%';
|
||||||
|
```
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
||||||
:::
|
:::
|
||||||
|
|||||||
@@ -23,12 +23,12 @@ name: immich_remote_ml
|
|||||||
services:
|
services:
|
||||||
immich-machine-learning:
|
immich-machine-learning:
|
||||||
container_name: immich_machine_learning
|
container_name: immich_machine_learning
|
||||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
# For hardware acceleration, add one of -[armnn, cuda, rocm, openvino, rknn] to the image tag.
|
||||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||||
# extends:
|
# extends:
|
||||||
# file: hwaccel.ml.yml
|
# file: hwaccel.ml.yml
|
||||||
# service: # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
# service: # set to one of [armnn, cuda, rocm, openvino, openvino-wsl, rknn] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||||
volumes:
|
volumes:
|
||||||
- model-cache:/cache
|
- model-cache:/cache
|
||||||
restart: always
|
restart: always
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 100
|
||||||
|
---
|
||||||
|
|
||||||
# Config File
|
# Config File
|
||||||
|
|
||||||
A config file can be provided as an alternative to the UI configuration.
|
A config file can be provided as an alternative to the UI configuration.
|
||||||
|
|||||||
@@ -2,53 +2,13 @@
|
|||||||
sidebar_position: 30
|
sidebar_position: 30
|
||||||
---
|
---
|
||||||
|
|
||||||
import CodeBlock from '@theme/CodeBlock';
|
|
||||||
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
|
|
||||||
|
|
||||||
# Docker Compose [Recommended]
|
# Docker Compose [Recommended]
|
||||||
|
|
||||||
Docker Compose is the recommended method to run Immich in production. Below are the steps to deploy Immich with Docker Compose.
|
Docker Compose is the recommended method to run Immich in production. Below are the steps to deploy Immich with Docker Compose.
|
||||||
|
|
||||||
## Step 1 - Download the required files
|
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx';
|
||||||
|
|
||||||
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
|
<DockerComposeSteps />
|
||||||
|
|
||||||
```bash title="Move to the directory you created"
|
|
||||||
mkdir ./immich-app
|
|
||||||
cd ./immich-app
|
|
||||||
```
|
|
||||||
|
|
||||||
Download [`docker-compose.yml`][compose-file] and [`example.env`][env-file] by running the following commands:
|
|
||||||
|
|
||||||
```bash title="Get docker-compose.yml file"
|
|
||||||
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash title="Get .env file"
|
|
||||||
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
|
||||||
```
|
|
||||||
|
|
||||||
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
|
|
||||||
|
|
||||||
## Step 2 - Populate the .env file with custom values
|
|
||||||
|
|
||||||
<CodeBlock language="bash" title="Default environmental variable content">
|
|
||||||
{ExampleEnv}
|
|
||||||
</CodeBlock>
|
|
||||||
|
|
||||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
|
|
||||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
|
|
||||||
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
|
|
||||||
- Set your timezone by uncommenting the `TZ=` line.
|
|
||||||
- Populate custom database information if necessary.
|
|
||||||
|
|
||||||
## Step 3 - Start the containers
|
|
||||||
|
|
||||||
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
|
|
||||||
|
|
||||||
```bash title="Start the containers"
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
:::info Docker version
|
:::info Docker version
|
||||||
If you get an error such as `unknown shorthand flag: 'd' in -d` or `open <location of your .env file>: permission denied`, you are probably running the wrong Docker version. (This happens, for example, with the docker.io package in Ubuntu 22.04.3 LTS.) You can correct the problem by following the complete [Docker Engine install](https://docs.docker.com/engine/install/) procedure for your distribution, crucially the "Uninstall old versions" and "Install using the apt/rpm repository" sections. These replace the distro's Docker packages with Docker's official ones.
|
If you get an error such as `unknown shorthand flag: 'd' in -d` or `open <location of your .env file>: permission denied`, you are probably running the wrong Docker version. (This happens, for example, with the docker.io package in Ubuntu 22.04.3 LTS.) You can correct the problem by following the complete [Docker Engine install](https://docs.docker.com/engine/install/) procedure for your distribution, crucially the "Uninstall old versions" and "Install using the apt/rpm repository" sections. These replace the distro's Docker packages with Docker's official ones.
|
||||||
@@ -69,39 +29,4 @@ If you get an error `can't set healthcheck.start_interval as feature require Doc
|
|||||||
|
|
||||||
## Next Steps
|
## Next Steps
|
||||||
|
|
||||||
Read the [Post Installation](/docs/install/post-install.mdx) steps or setup optional features below.
|
Read the [Post Installation](/docs/install/post-install.mdx) steps and [upgrade instructions](/docs/install/upgrading.md).
|
||||||
|
|
||||||
### Setting up optional features
|
|
||||||
|
|
||||||
- [External Libraries](/docs/features/libraries.md): Adding your existing photo library to Immich
|
|
||||||
- [Hardware Transcoding](/docs/features/hardware-transcoding.md): Speeding up video transcoding
|
|
||||||
- [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md): Speeding up various machine learning tasks in Immich
|
|
||||||
|
|
||||||
### Upgrading
|
|
||||||
|
|
||||||
:::danger Read the release notes
|
|
||||||
Immich is currently under heavy development, which means you can expect [breaking changes][breaking] and bugs. Therefore, we recommend reading the release notes prior to updating and to take special care when using automated tools like [Watchtower][watchtower].
|
|
||||||
|
|
||||||
You can see versions that had breaking changes [here][breaking].
|
|
||||||
:::
|
|
||||||
|
|
||||||
If `IMMICH_VERSION` is set, it will need to be updated to the latest or desired version.
|
|
||||||
|
|
||||||
When a new version of Immich is [released][releases], the application can be upgraded and restarted with the following commands, run in the directory with the `docker-compose.yml` file:
|
|
||||||
|
|
||||||
```bash title="Upgrade and restart Immich"
|
|
||||||
docker compose pull && docker compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
To clean up disk space, the old version's obsolete container images can be deleted with the following command:
|
|
||||||
|
|
||||||
```bash title="Clean up unused Docker images"
|
|
||||||
docker image prune
|
|
||||||
```
|
|
||||||
|
|
||||||
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
|
||||||
[env-file]: https://github.com/immich-app/immich/releases/latest/download/example.env
|
|
||||||
[watchtower]: https://containrrr.dev/watchtower/
|
|
||||||
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
|
|
||||||
[container-auth]: https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry
|
|
||||||
[releases]: https://github.com/immich-app/immich/releases
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ Just restarting the containers does not replace the environment within the conta
|
|||||||
|
|
||||||
In order to recreate the container using docker compose, run `docker compose up -d`.
|
In order to recreate the container using docker compose, run `docker compose up -d`.
|
||||||
In most cases docker will recognize that the `.env` file has changed and recreate the affected containers.
|
In most cases docker will recognize that the `.env` file has changed and recreate the affected containers.
|
||||||
If this should not work, try running `docker compose up -d --force-recreate`.
|
If this does not work, try running `docker compose up -d --force-recreate`.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@@ -20,8 +20,8 @@ If this should not work, try running `docker compose up -d --force-recreate`.
|
|||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
| :----------------- | :------------------------------ | :-------: | :----------------------- |
|
| :----------------- | :------------------------------ | :-------: | :----------------------- |
|
||||||
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
|
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
|
||||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server |
|
| `UPLOAD_LOCATION` | Host path for uploads | | server |
|
||||||
| `DB_DATA_LOCATION` | Host Path for Postgres database | | database |
|
| `DB_DATA_LOCATION` | Host path for Postgres database | | database |
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
||||||
@@ -33,15 +33,15 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
|||||||
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
| :---------------------------------- | :---------------------------------------------------------------------------------------- | :--------------------------: | :----------------------- | :----------------- |
|
||||||
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
|
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
|
||||||
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
|
||||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
| `IMMICH_LOG_LEVEL` | Log level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||||
| `IMMICH_MEDIA_LOCATION` | Media Location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `./upload`<sup>\*3</sup> | server | api, microservices |
|
||||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
| `CPU_CORES` | Number of cores available to the Immich server | auto-detected CPU core count | server | |
|
||||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||||
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
| `IMMICH_MICROSERVICES_METRICS_PORT` | Port for the OTEL metrics | `8082` | server | microservices |
|
||||||
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
|
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
|
||||||
| `IMMICH_TRUSTED_PROXIES` | List of comma separated IPs set as trusted proxies | | server | api |
|
| `IMMICH_TRUSTED_PROXIES` | List of comma-separated IPs set as trusted proxies | | server | api |
|
||||||
| `IMMICH_IGNORE_MOUNT_CHECK_ERRORS` | See [System Integrity](/docs/administration/system-integrity) | | server | api, microservices |
|
| `IMMICH_IGNORE_MOUNT_CHECK_ERRORS` | See [System Integrity](/docs/administration/system-integrity) | | server | api, microservices |
|
||||||
|
|
||||||
\*1: `TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
|
\*1: `TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
|
||||||
@@ -50,7 +50,7 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
|||||||
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
\*2: This path is where the Immich code looks for the files, which is internal to the docker container. Setting it to a path on your host will certainly break things, you should use the `UPLOAD_LOCATION` variable instead.
|
||||||
|
|
||||||
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
\*3: With the default `WORKDIR` of `/usr/src/app`, this path will resolve to `/usr/src/app/upload`.
|
||||||
It only need to be set if the Immich deployment method is changing.
|
It only needs to be set if the Immich deployment method is changing.
|
||||||
|
|
||||||
## Workers
|
## Workers
|
||||||
|
|
||||||
@@ -72,20 +72,21 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
|||||||
|
|
||||||
## Database
|
## Database
|
||||||
|
|
||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- |
|
||||||
| `DB_URL` | Database URL | | server |
|
| `DB_URL` | Database URL | | server |
|
||||||
| `DB_HOSTNAME` | Database Host | `database` | server |
|
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||||
| `DB_PORT` | Database Port | `5432` | server |
|
| `DB_PORT` | Database port | `5432` | server |
|
||||||
| `DB_USERNAME` | Database User | `postgres` | server, database<sup>\*1</sup> |
|
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||||
| `DB_PASSWORD` | Database Password | `postgres` | server, database<sup>\*1</sup> |
|
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||||
| `DB_DATABASE_NAME` | Database Name | `immich` | server, database<sup>\*1</sup> |
|
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database Vector Extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||||
|
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||||
|
|
||||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||||
|
|
||||||
\*2: This setting cannot be changed after the server has successfully started up.
|
\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector.
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
|
|
||||||
@@ -103,18 +104,18 @@ When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSW
|
|||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
| :--------------- | :------------- | :-----: | :--------- |
|
| :--------------- | :------------- | :-----: | :--------- |
|
||||||
| `REDIS_URL` | Redis URL | | server |
|
| `REDIS_URL` | Redis URL | | server |
|
||||||
| `REDIS_SOCKET` | Redis Socket | | server |
|
| `REDIS_SOCKET` | Redis socket | | server |
|
||||||
| `REDIS_HOSTNAME` | Redis Host | `redis` | server |
|
| `REDIS_HOSTNAME` | Redis host | `redis` | server |
|
||||||
| `REDIS_PORT` | Redis Port | `6379` | server |
|
| `REDIS_PORT` | Redis port | `6379` | server |
|
||||||
| `REDIS_USERNAME` | Redis Username | | server |
|
| `REDIS_USERNAME` | Redis username | | server |
|
||||||
| `REDIS_PASSWORD` | Redis Password | | server |
|
| `REDIS_PASSWORD` | Redis password | | server |
|
||||||
| `REDIS_DBINDEX` | Redis DB Index | `0` | server |
|
| `REDIS_DBINDEX` | Redis DB index | `0` | server |
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
||||||
|
|
||||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
||||||
More info can be found in the upstream [ioredis] documentation.
|
More information can be found in the upstream [ioredis] documentation.
|
||||||
|
|
||||||
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
||||||
:::
|
:::
|
||||||
@@ -170,6 +171,8 @@ Redis (Sentinel) URL example JSON before encoding:
|
|||||||
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
|
||||||
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
|
| `MACHINE_LEARNING_PING_TIMEOUT` | How long (ms) to wait for a PING response when checking if an ML server is available | `2000` | server |
|
||||||
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
|
| `MACHINE_LEARNING_AVAILABILITY_BACKOFF_TIME` | How long to ignore ML servers that are offline before trying again | `30000` | server |
|
||||||
|
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
|
||||||
|
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
|
||||||
|
|
||||||
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.
|
||||||
|
|
||||||
@@ -181,7 +184,11 @@ Redis (Sentinel) URL example JSON before encoding:
|
|||||||
|
|
||||||
:::info
|
:::info
|
||||||
|
|
||||||
Other machine learning parameters can be tuned from the admin UI.
|
While the `textual` model is the only one required for smart search, some users may experience slow first searches
|
||||||
|
due to backups triggering loading of the other models into memory, which blocks other requests until completed.
|
||||||
|
To avoid this, you can preload the other models (`visual`, `recognition`, and `detection`) if you have enough RAM to do so.
|
||||||
|
|
||||||
|
Additional machine learning parameters can be tuned from the admin UI.
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@@ -212,7 +219,7 @@ the `_FILE` variable should be set to the path of a file containing the variable
|
|||||||
details on how to use Docker Secrets in the Postgres image.
|
details on how to use Docker Secrets in the Postgres image.
|
||||||
|
|
||||||
\*2: See [this comment][docker-secrets-example] for an example of how
|
\*2: See [this comment][docker-secrets-example] for an example of how
|
||||||
to use use a Docker secret for the password in the Redis container.
|
to use a Docker secret for the password in the Redis container.
|
||||||
|
|
||||||
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||||
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
||||||
|
|||||||
@@ -41,3 +41,9 @@ A list of common steps to take after installing Immich include:
|
|||||||
## Step 7 - Setup Server Backups
|
## Step 7 - Setup Server Backups
|
||||||
|
|
||||||
<ServerBackup />
|
<ServerBackup />
|
||||||
|
|
||||||
|
## Setting up optional features
|
||||||
|
|
||||||
|
- [External Libraries](/docs/features/libraries.md): Adding your existing photo library to Immich
|
||||||
|
- [Hardware Transcoding](/docs/features/hardware-transcoding.md): Speeding up video transcoding
|
||||||
|
- [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md): Speeding up various machine learning tasks in Immich
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/la
|
|||||||
|
|
||||||
## Step 2 - Populate the .env file with custom values
|
## Step 2 - Populate the .env file with custom values
|
||||||
|
|
||||||
Follow [Step 2 in Docker Compose](./docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue.
|
Follow [Step 2 in Docker Compose](/docs/install/docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue.
|
||||||
|
|
||||||
## Step 3 - Create a new project in Container Manager
|
## Step 3 - Create a new project in Container Manager
|
||||||
|
|
||||||
@@ -67,10 +67,4 @@ Click "**Edit Rules**" and add the following firewall rules:
|
|||||||
|
|
||||||
## Next Steps
|
## Next Steps
|
||||||
|
|
||||||
Read the [Post Installation](/docs/install/post-install.mdx) steps or setup optional features below.
|
Read the [Post Installation](/docs/install/post-install.mdx) steps and [upgrade instructions](/docs/install/upgrading.md).
|
||||||
|
|
||||||
### Setting up optional features
|
|
||||||
|
|
||||||
- [External Libraries](/docs/features/libraries.md): Adding your existing photo library to Immich
|
|
||||||
- [Hardware Transcoding](/docs/features/hardware-transcoding.md): Speeding up video transcoding
|
|
||||||
- [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md): Speeding up various machine learning tasks in Immich
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
sidebar_position: 80
|
sidebar_position: 80
|
||||||
---
|
---
|
||||||
|
|
||||||
# TrueNAS SCALE [Community]
|
# TrueNAS [Community]
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
This is a community contribution and not officially supported by the Immich team, but included here for convenience.
|
This is a community contribution and not officially supported by the Immich team, but included here for convenience.
|
||||||
@@ -12,17 +12,17 @@ Community support can be found in the dedicated channel on the [Discord Server](
|
|||||||
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/charts/tree/master/community/immich).**
|
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/charts/tree/master/community/immich).**
|
||||||
:::
|
:::
|
||||||
|
|
||||||
Immich can easily be installed on TrueNAS SCALE via the **Community** train application.
|
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application.
|
||||||
Consider reviewing the TrueNAS [Apps tutorial](https://www.truenas.com/docs/scale/scaletutorials/apps/) if you have not previously configured applications on your system.
|
Consider reviewing the TrueNAS [Apps resources](https://apps.truenas.com/getting-started/) if you have not previously configured applications on your system.
|
||||||
|
|
||||||
TrueNAS SCALE makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
TrueNAS Community Edition makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
||||||
|
|
||||||
## First Steps
|
## First Steps
|
||||||
|
|
||||||
The Immich app in TrueNAS SCALE installs, completes the initial configuration, then starts the Immich web portal.
|
The Immich app in TrueNAS Community Edition installs, completes the initial configuration, then starts the Immich web portal.
|
||||||
When updates become available, SCALE alerts and provides easy updates.
|
When updates become available, TrueNAS alerts and provides easy updates.
|
||||||
|
|
||||||
Before installing the Immich app in SCALE, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
Before installing the Immich app in TrueNAS, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
||||||
You may also configure environment variables at any time after deploying the application.
|
You may also configure environment variables at any time after deploying the application.
|
||||||
|
|
||||||
### Setting up Storage Datasets
|
### Setting up Storage Datasets
|
||||||
@@ -126,9 +126,9 @@ className="border rounded-xl"
|
|||||||
|
|
||||||
Accept the default port `30041` in **WebUI Port** or enter a custom port number.
|
Accept the default port `30041` in **WebUI Port** or enter a custom port number.
|
||||||
:::info Allowed Port Numbers
|
:::info Allowed Port Numbers
|
||||||
Only numbers within the range 9000-65535 may be used on SCALE versions below TrueNAS Scale 24.10 Electric Eel.
|
Only numbers within the range 9000-65535 may be used on TrueNAS versions below TrueNAS Community Edition 24.10 Electric Eel.
|
||||||
|
|
||||||
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/references/defaultports/).
|
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/solutions/optimizations/security/#truenas-default-ports).
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Storage Configuration
|
### Storage Configuration
|
||||||
@@ -173,7 +173,7 @@ className="border rounded-xl"
|
|||||||
|
|
||||||
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
||||||
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
|
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
|
||||||
The **Host Path** is the location on the TrueNAS SCALE server where your external library is located.
|
The **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
|
||||||
|
|
||||||
<!-- A section for Labels would go here but I don't know what they do. -->
|
<!-- A section for Labels would go here but I don't know what they do. -->
|
||||||
|
|
||||||
@@ -188,17 +188,17 @@ className="border rounded-xl"
|
|||||||
|
|
||||||
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
|
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
|
||||||
|
|
||||||
Accept the default **Memory** limit of `4096` MB or specify the number of MB of RAM. If you're using Machine Learning you should probably set this above 8000 MB.
|
Specify the **Memory** limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
|
||||||
|
|
||||||
:::info Older SCALE Versions
|
:::info Older TrueNAS Versions
|
||||||
Before TrueNAS SCALE version 24.10 Electric Eel:
|
Before TrueNAS Community Edition version 24.10 Electric Eel:
|
||||||
|
|
||||||
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
|
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
|
||||||
|
|
||||||
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
|
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
|
||||||
:::
|
:::
|
||||||
|
|
||||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://www.truenas.com/docs/truenasapps/#gpu-passthrough)
|
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
|
||||||
|
|
||||||
### Install
|
### Install
|
||||||
|
|
||||||
@@ -240,14 +240,18 @@ className="border rounded-xl"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
:::info
|
:::info
|
||||||
Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
Some Environment Variables are not available for the TrueNAS Community Edition app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||||
|
|
||||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Updating the App
|
## Updating the App
|
||||||
|
|
||||||
When updates become available, SCALE alerts and provides easy updates.
|
:::danger
|
||||||
|
Make sure to read the general [upgrade instructions](/docs/install/upgrading.md).
|
||||||
|
:::
|
||||||
|
|
||||||
|
When updates become available, TrueNAS alerts and provides easy updates.
|
||||||
To update the app to the latest version:
|
To update the app to the latest version:
|
||||||
|
|
||||||
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
|||||||
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
||||||
|
|
||||||
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
||||||
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata`). If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
||||||
|
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid05.webp').default}
|
src={require('./img/unraid05.webp').default}
|
||||||
@@ -131,6 +131,10 @@ For more information on how to use the application once installed, please refer
|
|||||||
|
|
||||||
## Updating Steps
|
## Updating Steps
|
||||||
|
|
||||||
|
:::danger
|
||||||
|
Make sure to read the general [upgrade instructions](/docs/install/upgrading.md).
|
||||||
|
:::
|
||||||
|
|
||||||
Updating is extremely easy however it's important to be aware that containers managed via the Docker Compose Manager plugin do not integrate with Unraid's native dockerman UI, the label "_update ready_" will always be present on containers installed via the Docker Compose Manager.
|
Updating is extremely easy however it's important to be aware that containers managed via the Docker Compose Manager plugin do not integrate with Unraid's native dockerman UI, the label "_update ready_" will always be present on containers installed via the Docker Compose Manager.
|
||||||
|
|
||||||
<img
|
<img
|
||||||
|
|||||||
29
docs/docs/install/upgrading.md
Normal file
29
docs/docs/install/upgrading.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 95
|
||||||
|
---
|
||||||
|
|
||||||
|
# Upgrading
|
||||||
|
|
||||||
|
:::danger Read the release notes
|
||||||
|
Immich is currently under heavy development, which means you can expect [breaking changes][breaking] and bugs. You should read the release notes prior to updating and take special care when using automated tools like [Watchtower][watchtower].
|
||||||
|
|
||||||
|
You can see versions that had breaking changes [here][breaking].
|
||||||
|
:::
|
||||||
|
|
||||||
|
When a new version of Immich is [released][releases], you should read the release notes and account for any breaking changes noted (as mentioned above).
|
||||||
|
If you use `IMMICH_VERSION` in your `.env` file, it will need to be updated to the latest or desired version.
|
||||||
|
After that, the application can be upgraded and restarted with the following commands, run in the directory with the `docker-compose.yml` file:
|
||||||
|
|
||||||
|
```bash title="Upgrade and restart Immich"
|
||||||
|
docker compose pull && docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
To clean up disk space, the old version's obsolete container images can be deleted with the following command:
|
||||||
|
|
||||||
|
```bash title="Clean up unused Docker images"
|
||||||
|
docker image prune
|
||||||
|
```
|
||||||
|
|
||||||
|
[watchtower]: https://containrrr.dev/watchtower/
|
||||||
|
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
|
||||||
|
[releases]: https://github.com/immich-app/immich/releases
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 2
|
sidebar_position: 3
|
||||||
---
|
---
|
||||||
|
|
||||||
# Comparison
|
# Comparison
|
||||||
|
|||||||
BIN
docs/docs/overview/img/social-preview-light.webp
Normal file
BIN
docs/docs/overview/img/social-preview-light.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 233 KiB |
@@ -1,5 +1,5 @@
|
|||||||
---
|
---
|
||||||
sidebar_position: 3
|
sidebar_position: 2
|
||||||
---
|
---
|
||||||
|
|
||||||
# Quick start
|
# Quick start
|
||||||
@@ -10,11 +10,20 @@ to install and use it.
|
|||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
Check the [requirements page](/docs/install/requirements) to get started.
|
- A system with at least 4GB of RAM and 2 CPU cores.
|
||||||
|
- [Docker](https://docs.docker.com/engine/install/)
|
||||||
|
|
||||||
|
> For a more detailed list of requirements, see the [requirements page](/docs/install/requirements).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Set up the server
|
## Set up the server
|
||||||
|
|
||||||
Follow the [Docker Compose (Recommended)](/docs/install/docker-compose) instructions to install the server.
|
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx';
|
||||||
|
|
||||||
|
<DockerComposeSteps />
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Try the web app
|
## Try the web app
|
||||||
|
|
||||||
@@ -26,6 +35,8 @@ Try uploading a picture from your browser.
|
|||||||
|
|
||||||
<img src={require('./img/upload-button.webp').default} title="Upload button" />
|
<img src={require('./img/upload-button.webp').default} title="Upload button" />
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Try the mobile app
|
## Try the mobile app
|
||||||
|
|
||||||
### Download the Mobile App
|
### Download the Mobile App
|
||||||
@@ -56,6 +67,8 @@ You can select the **Jobs** tab to see Immich processing your photos.
|
|||||||
|
|
||||||
<img src={require('/docs/guides/img/jobs-tab.webp').default} title="Jobs tab" width={300} />
|
<img src={require('/docs/guides/img/jobs-tab.webp').default} title="Jobs tab" width={300} />
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Review the database backup and restore process
|
## Review the database backup and restore process
|
||||||
|
|
||||||
Immich has built-in database backups. You can refer to the
|
Immich has built-in database backups. You can refer to the
|
||||||
@@ -65,6 +78,8 @@ Immich has built-in database backups. You can refer to the
|
|||||||
The database only contains metadata and user information. You must setup manual backups of the images and videos stored in `UPLOAD_LOCATION`.
|
The database only contains metadata and user information. You must setup manual backups of the images and videos stored in `UPLOAD_LOCATION`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Where to go from here?
|
## Where to go from here?
|
||||||
|
|
||||||
You may decide you'd like to install the server a different way; the Install category on the left menu provides many options.
|
You may decide you'd like to install the server a different way; the Install category on the left menu provides many options.
|
||||||
|
|||||||
@@ -2,9 +2,13 @@
|
|||||||
sidebar_position: 1
|
sidebar_position: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
# Introduction
|
# Welcome to Immich
|
||||||
|
|
||||||
<img src={require('./img/feature-panel.webp').default} alt="Immich - Self-hosted photos and videos backup tool" />
|
<img
|
||||||
|
src={require('./img/social-preview-light.webp').default}
|
||||||
|
alt="Immich - Self-hosted photos and videos backup tool"
|
||||||
|
data-theme="light"
|
||||||
|
/>
|
||||||
|
|
||||||
## Welcome!
|
## Welcome!
|
||||||
|
|
||||||
43
docs/docs/partials/_docker-compose-install-steps.mdx
Normal file
43
docs/docs/partials/_docker-compose-install-steps.mdx
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import CodeBlock from '@theme/CodeBlock';
|
||||||
|
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
|
||||||
|
|
||||||
|
### Step 1 - Download the required files
|
||||||
|
|
||||||
|
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
|
||||||
|
|
||||||
|
```bash title="Move to the directory you created"
|
||||||
|
mkdir ./immich-app
|
||||||
|
cd ./immich-app
|
||||||
|
```
|
||||||
|
|
||||||
|
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) by running the following commands:
|
||||||
|
|
||||||
|
```bash title="Get docker-compose.yml file"
|
||||||
|
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash title="Get .env file"
|
||||||
|
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||||
|
```
|
||||||
|
|
||||||
|
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
|
||||||
|
|
||||||
|
### Step 2 - Populate the .env file with custom values
|
||||||
|
|
||||||
|
<CodeBlock language="bash" title="Default environmental variable content">
|
||||||
|
{ExampleEnv}
|
||||||
|
</CodeBlock>
|
||||||
|
|
||||||
|
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
|
||||||
|
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
|
||||||
|
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
|
||||||
|
- Set your timezone by uncommenting the `TZ=` line.
|
||||||
|
- Populate custom database information if necessary.
|
||||||
|
|
||||||
|
### Step 3 - Start the containers
|
||||||
|
|
||||||
|
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
|
||||||
|
|
||||||
|
```bash title="Start the containers"
|
||||||
|
docker compose up -d
|
||||||
|
```
|
||||||
@@ -1,2 +1,7 @@
|
|||||||
Now that you have imported some pictures, you should setup server backups to preserve your memories.
|
Now that you have imported some pictures, you should setup server backups to preserve your memories.
|
||||||
You can do so by following our [backup guide](/docs/administration/backup-and-restore.md).
|
You can do so by following our [backup guide](/docs/administration/backup-and-restore.md).
|
||||||
|
|
||||||
|
:::danger
|
||||||
|
Immich is still under heavy development _and_ handles very important data.
|
||||||
|
It is essential that you set up good backups, and test them.
|
||||||
|
:::
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ const config = {
|
|||||||
position: 'right',
|
position: 'right',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
to: '/docs/overview/introduction',
|
to: '/docs/overview/welcome',
|
||||||
position: 'right',
|
position: 'right',
|
||||||
label: 'Docs',
|
label: 'Docs',
|
||||||
},
|
},
|
||||||
@@ -124,6 +124,12 @@ const config = {
|
|||||||
label: 'Discord',
|
label: 'Discord',
|
||||||
position: 'right',
|
position: 'right',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
type: 'html',
|
||||||
|
position: 'right',
|
||||||
|
value:
|
||||||
|
'<a href="https://buy.immich.app" target="_blank" class="no-underline hover:no-underline"><button class="buy-button bg-immich-primary dark:bg-immich-dark-primary text-white dark:text-black rounded-xl">Buy Immich</button></a>',
|
||||||
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
footer: {
|
footer: {
|
||||||
@@ -134,7 +140,7 @@ const config = {
|
|||||||
items: [
|
items: [
|
||||||
{
|
{
|
||||||
label: 'Welcome',
|
label: 'Welcome',
|
||||||
to: '/docs/overview/introduction',
|
to: '/docs/overview/welcome',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Installation',
|
label: 'Installation',
|
||||||
|
|||||||
5348
docs/package-lock.json
generated
5348
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -57,6 +57,6 @@
|
|||||||
"node": ">=20"
|
"node": ">=20"
|
||||||
},
|
},
|
||||||
"volta": {
|
"volta": {
|
||||||
"node": "22.14.0"
|
"node": "22.16.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,13 +40,9 @@ const projects: CommunityProjectProps[] = [
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Lightroom Immich Plugin: lrc-immich-plugin',
|
title: 'Lightroom Immich Plugin: lrc-immich-plugin',
|
||||||
description: 'Another Lightroom plugin to publish or export photos from Lightroom to Immich.',
|
description:
|
||||||
url: 'https://github.com/bmachek/lrc-immich-plugin',
|
'Lightroom plugin to publish, export photos from Lightroom to Immich. Import from Immich to Lightroom is also supported.',
|
||||||
},
|
url: 'https://blog.fokuspunk.de/lrc-immich-plugin/',
|
||||||
{
|
|
||||||
title: 'Immich Duplicate Finder',
|
|
||||||
description: 'Webapp that uses machine learning to identify near-duplicate images.',
|
|
||||||
url: 'https://github.com/vale46n1/immich_duplicate_finder',
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Immich-Tiktok-Remover',
|
title: 'Immich-Tiktok-Remover',
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
export const discordPath =
|
export const discordPath =
|
||||||
'M 9.1367188 3.8691406 C 9.1217187 3.8691406 9.1067969 3.8700938 9.0917969 3.8710938 C 8.9647969 3.8810937 5.9534375 4.1403594 4.0234375 5.6933594 C 3.0154375 6.6253594 1 12.073203 1 16.783203 C 1 16.866203 1.0215 16.946531 1.0625 17.019531 C 2.4535 19.462531 6.2473281 20.102859 7.1113281 20.130859 L 7.1269531 20.130859 C 7.2799531 20.130859 7.4236719 20.057594 7.5136719 19.933594 L 8.3886719 18.732422 C 6.0296719 18.122422 4.8248594 17.086391 4.7558594 17.025391 C 4.5578594 16.850391 4.5378906 16.549563 4.7128906 16.351562 C 4.8068906 16.244563 4.9383125 16.189453 5.0703125 16.189453 C 5.1823125 16.189453 5.2957188 16.228594 5.3867188 16.308594 C 5.4157187 16.334594 7.6340469 18.216797 11.998047 18.216797 C 16.370047 18.216797 18.589328 16.325641 18.611328 16.306641 C 18.702328 16.227641 18.815734 16.189453 18.927734 16.189453 C 19.059734 16.189453 19.190156 16.243562 19.285156 16.351562 C 19.459156 16.549563 19.441141 16.851391 19.244141 17.025391 C 19.174141 17.087391 17.968375 18.120469 15.609375 18.730469 L 16.484375 19.933594 C 16.574375 20.057594 16.718094 20.130859 16.871094 20.130859 L 16.886719 20.130859 C 17.751719 20.103859 21.5465 19.463531 22.9375 17.019531 C 22.9785 16.947531 23 16.866203 23 16.783203 C 23 12.073203 20.984172 6.624875 19.951172 5.671875 C 18.047172 4.140875 15.036203 3.8820937 14.908203 3.8710938 C 14.895203 3.8700938 14.880188 3.8691406 14.867188 3.8691406 C 14.681188 3.8691406 14.510594 3.9793906 14.433594 4.1503906 C 14.427594 4.1623906 14.362062 4.3138281 14.289062 4.5488281 C 15.548063 4.7608281 17.094141 5.1895937 18.494141 6.0585938 C 18.718141 6.1975938 18.787437 6.4917969 18.648438 6.7167969 C 18.558438 6.8627969 18.402188 6.9433594 18.242188 6.9433594 C 18.156188 6.9433594 18.069234 6.9200937 17.990234 6.8710938 C 15.584234 5.3800938 12.578 5.3046875 12 5.3046875 C 11.422 5.3046875 8.4157187 5.3810469 6.0117188 6.8730469 C 5.9327188 6.9210469 5.8457656 6.9433594 5.7597656 6.9433594 C 5.5997656 6.9433594 5.4425625 6.86475 5.3515625 6.71875 C 5.2115625 6.49375 5.2818594 6.1985938 5.5058594 6.0585938 C 6.9058594 5.1905937 8.4528906 4.7627812 9.7128906 4.5507812 C 9.6388906 4.3147813 9.5714062 4.1643437 9.5664062 4.1523438 C 9.4894063 3.9813438 9.3217188 3.8691406 9.1367188 3.8691406 z M 12 7.3046875 C 12.296 7.3046875 14.950594 7.3403125 16.933594 8.5703125 C 17.326594 8.8143125 17.777234 8.9453125 18.240234 8.9453125 C 18.633234 8.9453125 19.010656 8.8555 19.347656 8.6875 C 19.964656 10.2405 20.690828 12.686219 20.923828 15.199219 C 20.883828 15.143219 20.840922 15.089109 20.794922 15.037109 C 20.324922 14.498109 19.644687 14.191406 18.929688 14.191406 C 18.332687 14.191406 17.754078 14.405437 17.330078 14.773438 C 17.257078 14.832437 15.505 16.21875 12 16.21875 C 8.496 16.21875 6.7450313 14.834687 6.7070312 14.804688 C 6.2540312 14.407687 5.6742656 14.189453 5.0722656 14.189453 C 4.3612656 14.189453 3.6838438 14.494391 3.2148438 15.025391 C 3.1658438 15.080391 3.1201719 15.138266 3.0761719 15.197266 C 3.3091719 12.686266 4.0344375 10.235594 4.6484375 8.6835938 C 4.9864375 8.8525938 5.3657656 8.9433594 5.7597656 8.9433594 C 6.2217656 8.9433594 6.6724531 8.8143125 7.0644531 8.5703125 C 9.0494531 7.3393125 11.704 7.3046875 12 7.3046875 z M 8.890625 10.044922 C 7.966625 10.044922 7.2167969 10.901031 7.2167969 11.957031 C 7.2167969 13.013031 7.965625 13.869141 8.890625 13.869141 C 9.815625 13.869141 10.564453 13.013031 10.564453 11.957031 C 10.564453 10.900031 9.815625 10.044922 8.890625 10.044922 z M 15.109375 10.044922 C 14.185375 10.044922 13.435547 10.901031 13.435547 11.957031 C 13.435547 13.013031 14.184375 13.869141 15.109375 13.869141 C 16.034375 13.869141 16.783203 13.013031 16.783203 11.957031 C 16.783203 10.900031 16.033375 10.044922 15.109375 10.044922 z';
|
'M81.15,0c-1.2376,2.1973-2.3489,4.4704-3.3591,6.794-9.5975-1.4396-19.3718-1.4396-28.9945,0-.985-2.3236-2.1216-4.5967-3.3591-6.794-9.0166,1.5407-17.8059,4.2431-26.1405,8.0568C2.779,32.5304-1.6914,56.3725.5312,79.8863c9.6732,7.1476,20.5083,12.603,32.0505,16.0884,2.6014-3.4854,4.8998-7.1981,6.8698-11.0623-3.738-1.3891-7.3497-3.1318-10.8098-5.1523.9092-.6567,1.7932-1.3386,2.6519-1.9953,20.281,9.547,43.7696,9.547,64.0758,0,.8587.7072,1.7427,1.3891,2.6519,1.9953-3.4601,2.0457-7.0718,3.7632-10.835,5.1776,1.97,3.8642,4.2683,7.5769,6.8698,11.0623,11.5419-3.4854,22.3769-8.9156,32.0509-16.0631,2.626-27.2771-4.496-50.9172-18.817-71.8548C98.9811,4.2684,90.1918,1.5659,81.1752.0505l-.0252-.0505ZM42.2802,65.4144c-6.2383,0-11.4159-5.6575-11.4159-12.6535s4.9755-12.6788,11.3907-12.6788,11.5169,5.708,11.4159,12.6788c-.101,6.9708-5.026,12.6535-11.3907,12.6535ZM84.3576,65.4144c-6.2637,0-11.3907-5.6575-11.3907-12.6535s4.9755-12.6788,11.3907-12.6788,11.4917,5.708,11.3906,12.6788c-.101,6.9708-5.026,12.6535-11.3906,12.6535Z';
|
||||||
|
export const discordViewBox = '0 0 126.644 96';
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import React, { useEffect, useState } from 'react';
|
|||||||
|
|
||||||
export default function VersionSwitcher(): JSX.Element {
|
export default function VersionSwitcher(): JSX.Element {
|
||||||
const [versions, setVersions] = useState([]);
|
const [versions, setVersions] = useState([]);
|
||||||
const [label, setLabel] = useState('Versions');
|
const [activeLabel, setLabel] = useState('Versions');
|
||||||
|
|
||||||
const windowSize = useWindowSize();
|
const windowSize = useWindowSize();
|
||||||
|
|
||||||
@@ -48,12 +48,13 @@ export default function VersionSwitcher(): JSX.Element {
|
|||||||
versions.length > 0 && (
|
versions.length > 0 && (
|
||||||
<DropdownNavbarItem
|
<DropdownNavbarItem
|
||||||
className="version-switcher-34ab39"
|
className="version-switcher-34ab39"
|
||||||
label={label}
|
label={activeLabel}
|
||||||
mobile={windowSize === 'mobile'}
|
mobile={windowSize === 'mobile'}
|
||||||
items={versions.map(({ label, url }) => ({
|
items={versions.map(({ label, url }) => ({
|
||||||
label,
|
label,
|
||||||
to: new URL(location.pathname + location.search + location.hash, url).href,
|
to: new URL(location.pathname + location.search + location.hash, url).href,
|
||||||
target: '_self',
|
target: '_self',
|
||||||
|
className: label === activeLabel ? 'dropdown__link--active menu__link--active' : '', // workaround because React Router `<NavLink>` only supports using URL path for checking if active: https://v5.reactrouter.com/web/api/NavLink/isactive-func
|
||||||
}))}
|
}))}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -7,14 +7,22 @@
|
|||||||
@tailwind components;
|
@tailwind components;
|
||||||
@tailwind utilities;
|
@tailwind utilities;
|
||||||
|
|
||||||
@import url('https://fonts.googleapis.com/css2?family=Be+Vietnam+Pro:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap');
|
@font-face {
|
||||||
|
font-family: 'Overpass';
|
||||||
body {
|
src: url('/fonts/overpass/Overpass.ttf') format('truetype-variations');
|
||||||
font-family: 'Be Vietnam Pro', serif;
|
font-weight: 1 999;
|
||||||
font-optical-sizing: auto;
|
font-style: normal;
|
||||||
/* font-size: 1.125rem;
|
|
||||||
ascent-override: 106.25%;
|
ascent-override: 106.25%;
|
||||||
size-adjust: 106.25%; */
|
size-adjust: 106.25%;
|
||||||
|
}
|
||||||
|
|
||||||
|
@font-face {
|
||||||
|
font-family: 'Overpass Mono';
|
||||||
|
src: url('/fonts/overpass/OverpassMono.ttf') format('truetype-variations');
|
||||||
|
font-weight: 1 999;
|
||||||
|
font-style: normal;
|
||||||
|
ascent-override: 106.25%;
|
||||||
|
size-adjust: 106.25%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.breadcrumbs__link {
|
.breadcrumbs__link {
|
||||||
@@ -29,6 +37,7 @@ img {
|
|||||||
|
|
||||||
/* You can override the default Infima variables here. */
|
/* You can override the default Infima variables here. */
|
||||||
:root {
|
:root {
|
||||||
|
font-family: 'Overpass', sans-serif;
|
||||||
--ifm-color-primary: #4250af;
|
--ifm-color-primary: #4250af;
|
||||||
--ifm-color-primary-dark: #4250af;
|
--ifm-color-primary-dark: #4250af;
|
||||||
--ifm-color-primary-darker: #4250af;
|
--ifm-color-primary-darker: #4250af;
|
||||||
@@ -59,14 +68,12 @@ div[class^='announcementBar_'] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.menu__link {
|
.menu__link {
|
||||||
padding: 10px;
|
padding: 10px 10px 10px 16px;
|
||||||
padding-left: 16px;
|
|
||||||
border-radius: 24px;
|
border-radius: 24px;
|
||||||
margin-right: 16px;
|
margin-right: 16px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.menu__list-item-collapsible {
|
.menu__list-item-collapsible {
|
||||||
border-radius: 10px;
|
|
||||||
margin-right: 16px;
|
margin-right: 16px;
|
||||||
border-radius: 24px;
|
border-radius: 24px;
|
||||||
}
|
}
|
||||||
@@ -83,3 +90,12 @@ div[class*='navbar__items'] > li:has(a[class*='version-switcher-34ab39']) {
|
|||||||
code {
|
code {
|
||||||
font-weight: 600;
|
font-weight: 600;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.buy-button {
|
||||||
|
padding: 8px 14px;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
font-family: 'Overpass', sans-serif;
|
||||||
|
font-weight: 500;
|
||||||
|
cursor: pointer;
|
||||||
|
box-shadow: 0 0 5px 2px rgba(181, 206, 254, 0.4);
|
||||||
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import {
|
|||||||
mdiBug,
|
mdiBug,
|
||||||
mdiCalendarToday,
|
mdiCalendarToday,
|
||||||
mdiCrosshairsOff,
|
mdiCrosshairsOff,
|
||||||
|
mdiCrop,
|
||||||
mdiDatabase,
|
mdiDatabase,
|
||||||
mdiLeadPencil,
|
mdiLeadPencil,
|
||||||
mdiLockOff,
|
mdiLockOff,
|
||||||
@@ -22,6 +23,18 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
|
|||||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||||
|
|
||||||
const items: Item[] = [
|
const items: Item[] = [
|
||||||
|
{
|
||||||
|
icon: mdiCrop,
|
||||||
|
iconColor: 'tomato',
|
||||||
|
title: 'Image dimensions in EXIF metadata are cursed',
|
||||||
|
description:
|
||||||
|
'The dimensions in EXIF metadata can be different from the actual dimensions of the image, causing issues with cropping and resizing.',
|
||||||
|
link: {
|
||||||
|
url: 'https://github.com/immich-app/immich/pull/17974',
|
||||||
|
text: '#17974',
|
||||||
|
},
|
||||||
|
date: new Date(2025, 5, 5),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
icon: mdiMicrosoftWindows,
|
icon: mdiMicrosoftWindows,
|
||||||
iconColor: '#357EC7',
|
iconColor: '#357EC7',
|
||||||
|
|||||||
5
docs/src/pages/errors.md
Normal file
5
docs/src/pages/errors.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Errors
|
||||||
|
|
||||||
|
## TypeORM Upgrade
|
||||||
|
|
||||||
|
The upgrade to Immich `v2.x.x` has a required upgrade path to `v1.132.0+`. This means it is required to start up the application at least once on version `1.132.0` (or later). Doing so will complete database schema upgrades that are required for `v2.0.0`. After Immich has successfully booted on this version, shut the system down and try the `v2.x.x` upgrade again.
|
||||||
@@ -1,12 +1,11 @@
|
|||||||
import React from 'react';
|
import React from 'react';
|
||||||
import Link from '@docusaurus/Link';
|
import Link from '@docusaurus/Link';
|
||||||
import Layout from '@theme/Layout';
|
import Layout from '@theme/Layout';
|
||||||
import { useColorMode } from '@docusaurus/theme-common';
|
import { discordPath, discordViewBox } from '@site/src/components/svg-paths';
|
||||||
import { discordPath } from '@site/src/components/svg-paths';
|
import ThemedImage from '@theme/ThemedImage';
|
||||||
import Icon from '@mdi/react';
|
import Icon from '@mdi/react';
|
||||||
function HomepageHeader() {
|
|
||||||
const { isDarkTheme } = useColorMode();
|
|
||||||
|
|
||||||
|
function HomepageHeader() {
|
||||||
return (
|
return (
|
||||||
<header>
|
<header>
|
||||||
<div className="top-[calc(12%)] md:top-[calc(30%)] h-screen w-full absolute -z-10">
|
<div className="top-[calc(12%)] md:top-[calc(30%)] h-screen w-full absolute -z-10">
|
||||||
@@ -14,11 +13,14 @@ function HomepageHeader() {
|
|||||||
<div className="w-full h-[120vh] absolute left-0 top-0 backdrop-blur-3xl bg-immich-bg/40 dark:bg-transparent"></div>
|
<div className="w-full h-[120vh] absolute left-0 top-0 backdrop-blur-3xl bg-immich-bg/40 dark:bg-transparent"></div>
|
||||||
</div>
|
</div>
|
||||||
<section className="text-center pt-12 sm:pt-24 bg-immich-bg/50 dark:bg-immich-dark-bg/80">
|
<section className="text-center pt-12 sm:pt-24 bg-immich-bg/50 dark:bg-immich-dark-bg/80">
|
||||||
<img
|
<a href="https://futo.org" target="_blank" rel="noopener noreferrer">
|
||||||
src={isDarkTheme ? 'img/logomark-dark.svg' : 'img/logomark-light.svg'}
|
<ThemedImage
|
||||||
className="h-[115px] w-[115px] mb-2 antialiased rounded-none"
|
sources={{ dark: 'img/logomark-dark-with-futo.svg', light: 'img/logomark-light-with-futo.svg' }}
|
||||||
alt="Immich logo"
|
className="h-[125px] w-[125px] mb-2 antialiased rounded-none"
|
||||||
/>
|
alt="Immich logo"
|
||||||
|
/>
|
||||||
|
</a>
|
||||||
|
|
||||||
<div className="mt-8">
|
<div className="mt-8">
|
||||||
<p className="text-3xl md:text-5xl sm:leading-tight mb-1 font-extrabold text-black/90 dark:text-white px-4">
|
<p className="text-3xl md:text-5xl sm:leading-tight mb-1 font-extrabold text-black/90 dark:text-white px-4">
|
||||||
Self-hosted{' '}
|
Self-hosted{' '}
|
||||||
@@ -29,56 +31,49 @@ function HomepageHeader() {
|
|||||||
solution<span className="block"></span>
|
solution<span className="block"></span>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p className="max-w-1/4 m-auto mt-4 px-4">
|
<p className="max-w-1/4 m-auto mt-4 px-4 text-lg text-gray-700 dark:text-gray-100">
|
||||||
Easily back up, organize, and manage your photos on your own server. Immich helps you
|
Easily back up, organize, and manage your photos on your own server. Immich helps you
|
||||||
<span className="sm:block"></span> browse, search and organize your photos and videos with ease, without
|
<span className="sm:block"></span> browse, search and organize your photos and videos with ease, without
|
||||||
sacrificing your privacy.
|
sacrificing your privacy.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-9 gap-4 ">
|
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-9 gap-4 ">
|
||||||
<Link
|
<Link
|
||||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold uppercase"
|
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold"
|
||||||
to="docs/overview/introduction"
|
to="docs/overview/quick-start"
|
||||||
>
|
>
|
||||||
Get started
|
Get Started
|
||||||
</Link>
|
</Link>
|
||||||
|
|
||||||
<Link
|
<Link
|
||||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
className="flex place-items-center place-content-center py-3 px-8 border bg-white/90 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold"
|
||||||
to="https://demo.immich.app/"
|
to="https://demo.immich.app/"
|
||||||
>
|
>
|
||||||
Demo
|
Open Demo
|
||||||
</Link>
|
|
||||||
|
|
||||||
<Link
|
|
||||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
|
||||||
to="https://immich.store"
|
|
||||||
>
|
|
||||||
Buy Merch
|
|
||||||
</Link>
|
</Link>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="my-12 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
|
<div className="my-8 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
|
||||||
<Icon path={discordPath} size={1} />
|
<Icon
|
||||||
|
path={discordPath}
|
||||||
|
viewBox={discordViewBox} /* viewBox may show an error in your IDE but it is normal. */
|
||||||
|
size={1}
|
||||||
|
/>
|
||||||
<Link to="https://discord.immich.app/">Join our Discord</Link>
|
<Link to="https://discord.immich.app/">Join our Discord</Link>
|
||||||
</div>
|
</div>
|
||||||
<img
|
<ThemedImage
|
||||||
src={isDarkTheme ? '/img/screenshot-dark.webp' : '/img/screenshot-light.webp'}
|
sources={{ dark: '/img/screenshot-dark.webp', light: '/img/screenshot-light.webp' }}
|
||||||
alt="screenshots"
|
alt="screenshots"
|
||||||
className="w-[95%] lg:w-[85%] xl:w-[70%] 2xl:w-[60%] "
|
className="w-[95%] lg:w-[85%] xl:w-[70%] 2xl:w-[60%] "
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<div className="mx-[25%] m-auto my-14 md:my-28">
|
<div className="mx-[25%] m-auto my-14 md:my-28">
|
||||||
<hr className="border bg-gray-500 dark:bg-gray-400" />
|
<hr className="border bg-gray-500 dark:bg-gray-400" />
|
||||||
</div>
|
</div>
|
||||||
|
<ThemedImage
|
||||||
<img
|
sources={{ dark: 'img/logomark-dark.svg', light: 'img/logomark-light.svg' }}
|
||||||
src={isDarkTheme ? 'img/logomark-dark.svg' : 'img/logomark-light.svg'}
|
|
||||||
className="h-[115px] w-[115px] mb-2 antialiased rounded-none"
|
className="h-[115px] w-[115px] mb-2 antialiased rounded-none"
|
||||||
alt="Immich logo"
|
alt="Immich logo"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<div>
|
<div>
|
||||||
<p className="font-bold text-2xl md:text-5xl ">Download the mobile app</p>
|
<p className="font-bold text-2xl md:text-5xl ">Download the mobile app</p>
|
||||||
<p className="text-lg">
|
<p className="text-lg">
|
||||||
@@ -91,15 +86,21 @@ function HomepageHeader() {
|
|||||||
<img className="h-24" alt="Get it on Google Play" src="/img/google-play-badge.png" />
|
<img className="h-24" alt="Get it on Google Play" src="/img/google-play-badge.png" />
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="h-24">
|
<div className="h-24">
|
||||||
<a href="https://apps.apple.com/sg/app/immich/id1613945652">
|
<a href="https://apps.apple.com/sg/app/immich/id1613945652">
|
||||||
<img className="h-24 sm:p-3.5 p-3" alt="Download on the App Store" src="/img/ios-app-store-badge.svg" />
|
<img className="h-24 sm:p-3.5 p-3" alt="Download on the App Store" src="/img/ios-app-store-badge.svg" />
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
|
||||||
|
|
||||||
<img
|
<div className="h-24">
|
||||||
src={isDarkTheme ? '/img/app-qr-code-dark.svg' : '/img/app-qr-code-light.svg'}
|
<a href="https://github.com/immich-app/immich/releases/latest">
|
||||||
|
<img className="h-24 sm:p-3.5 p-3" alt="Download APK" src="/img/download-apk-github.svg" />
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<ThemedImage
|
||||||
|
sources={{ dark: '/img/app-qr-code-dark.svg', light: '/img/app-qr-code-light.svg' }}
|
||||||
alt="app qr code"
|
alt="app qr code"
|
||||||
width={'150px'}
|
width={'150px'}
|
||||||
className="shadow-lg p-3 my-8 dark:bg-immich-dark-bg "
|
className="shadow-lg p-3 my-8 dark:bg-immich-dark-bg "
|
||||||
@@ -115,7 +116,7 @@ export default function Home(): JSX.Element {
|
|||||||
<HomepageHeader />
|
<HomepageHeader />
|
||||||
<div className="flex flex-col place-items-center text-center place-content-center dark:bg-immich-dark-bg py-8">
|
<div className="flex flex-col place-items-center text-center place-content-center dark:bg-immich-dark-bg py-8">
|
||||||
<p>This project is available under GNU AGPL v3 license.</p>
|
<p>This project is available under GNU AGPL v3 license.</p>
|
||||||
<p className="text-xs">Privacy should not be a luxury</p>
|
<p className="text-sm">Privacy should not be a luxury</p>
|
||||||
</div>
|
</div>
|
||||||
</Layout>
|
</Layout>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
import React from 'react';
|
import React from 'react';
|
||||||
import Link from '@docusaurus/Link';
|
|
||||||
import Layout from '@theme/Layout';
|
import Layout from '@theme/Layout';
|
||||||
import { useColorMode } from '@docusaurus/theme-common';
|
|
||||||
function HomepageHeader() {
|
function HomepageHeader() {
|
||||||
const { isDarkTheme } = useColorMode();
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<header>
|
<header>
|
||||||
<section className="max-w-[900px] m-4 p-4 md:p-6 md:m-auto md:my-12 border border-red-400 rounded-2xl bg-slate-200 dark:bg-immich-dark-gray">
|
<section className="max-w-[900px] m-4 p-4 md:p-6 md:m-auto md:my-12 border border-red-400 rounded-2xl bg-slate-200 dark:bg-immich-dark-gray">
|
||||||
|
|||||||
@@ -76,13 +76,18 @@ import {
|
|||||||
mdiWeb,
|
mdiWeb,
|
||||||
mdiDatabaseOutline,
|
mdiDatabaseOutline,
|
||||||
mdiLinkEdit,
|
mdiLinkEdit,
|
||||||
|
mdiTagFaces,
|
||||||
mdiMovieOpenPlayOutline,
|
mdiMovieOpenPlayOutline,
|
||||||
|
mdiCast,
|
||||||
} from '@mdi/js';
|
} from '@mdi/js';
|
||||||
import Layout from '@theme/Layout';
|
import Layout from '@theme/Layout';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
import { Item, Timeline } from '../components/timeline';
|
import { Item, Timeline } from '../components/timeline';
|
||||||
|
|
||||||
const releases = {
|
const releases = {
|
||||||
|
'v1.133.0': new Date(2025, 4, 21),
|
||||||
|
'v1.130.0': new Date(2025, 2, 25),
|
||||||
|
'v1.127.0': new Date(2025, 1, 26),
|
||||||
'v1.122.0': new Date(2024, 11, 5),
|
'v1.122.0': new Date(2024, 11, 5),
|
||||||
'v1.120.0': new Date(2024, 10, 6),
|
'v1.120.0': new Date(2024, 10, 6),
|
||||||
'v1.114.0': new Date(2024, 8, 6),
|
'v1.114.0': new Date(2024, 8, 6),
|
||||||
@@ -213,14 +218,6 @@ const roadmap: Item[] = [
|
|||||||
iconColor: 'indianred',
|
iconColor: 'indianred',
|
||||||
title: 'Stable release',
|
title: 'Stable release',
|
||||||
description: 'Immich goes stable',
|
description: 'Immich goes stable',
|
||||||
getDateLabel: () => 'Planned for early 2025',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
done: false,
|
|
||||||
icon: mdiLockOutline,
|
|
||||||
iconColor: 'sandybrown',
|
|
||||||
title: 'Private/locked photos',
|
|
||||||
description: 'Private assets with extra protections',
|
|
||||||
getDateLabel: () => 'Planned for 2025',
|
getDateLabel: () => 'Planned for 2025',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -242,6 +239,42 @@ const roadmap: Item[] = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
const milestones: Item[] = [
|
const milestones: Item[] = [
|
||||||
|
withRelease({
|
||||||
|
icon: mdiCast,
|
||||||
|
iconColor: 'aqua',
|
||||||
|
title: 'Google Cast (web)',
|
||||||
|
description: 'Cast assets to Google Cast/Chromecast compatible devices',
|
||||||
|
release: 'v1.133.0',
|
||||||
|
}),
|
||||||
|
withRelease({
|
||||||
|
icon: mdiLockOutline,
|
||||||
|
iconColor: 'sandybrown',
|
||||||
|
title: 'Private/locked photos',
|
||||||
|
description: 'Private assets with extra protections',
|
||||||
|
release: 'v1.133.0',
|
||||||
|
}),
|
||||||
|
withRelease({
|
||||||
|
icon: mdiFolderMultiple,
|
||||||
|
iconColor: 'brown',
|
||||||
|
title: 'Folders view in the mobile app',
|
||||||
|
description: 'Browse your photos and videos in their folder structure inside the mobile app',
|
||||||
|
release: 'v1.130.0',
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
icon: mdiStar,
|
||||||
|
iconColor: 'gold',
|
||||||
|
title: '60,000 Stars',
|
||||||
|
description: 'Reached 60K Stars on GitHub!',
|
||||||
|
getDateLabel: withLanguage(new Date(2025, 2, 4)),
|
||||||
|
},
|
||||||
|
withRelease({
|
||||||
|
icon: mdiTagFaces,
|
||||||
|
iconColor: 'teal',
|
||||||
|
title: 'Manual face tagging',
|
||||||
|
description:
|
||||||
|
'Manually tag or remove faces in photos and videos, even when automatic detection misses or misidentifies them.',
|
||||||
|
release: 'v1.127.0',
|
||||||
|
}),
|
||||||
withRelease({
|
withRelease({
|
||||||
icon: mdiLinkEdit,
|
icon: mdiLinkEdit,
|
||||||
iconColor: 'crimson',
|
iconColor: 'crimson',
|
||||||
@@ -259,8 +292,8 @@ const milestones: Item[] = [
|
|||||||
withRelease({
|
withRelease({
|
||||||
icon: mdiDatabaseOutline,
|
icon: mdiDatabaseOutline,
|
||||||
iconColor: 'brown',
|
iconColor: 'brown',
|
||||||
title: 'Automatic database backups',
|
title: 'Automatic database dumps',
|
||||||
description: 'Database backups are now integrated into the Immich server',
|
description: 'Database dumps are now integrated into the Immich server',
|
||||||
release: 'v1.120.0',
|
release: 'v1.120.0',
|
||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
@@ -293,7 +326,7 @@ const milestones: Item[] = [
|
|||||||
withRelease({
|
withRelease({
|
||||||
icon: mdiFolderMultiple,
|
icon: mdiFolderMultiple,
|
||||||
iconColor: 'brown',
|
iconColor: 'brown',
|
||||||
title: 'Folders',
|
title: 'Folders view',
|
||||||
description: 'Browse your photos and videos in their folder structure',
|
description: 'Browse your photos and videos in their folder structure',
|
||||||
release: 'v1.113.0',
|
release: 'v1.113.0',
|
||||||
}),
|
}),
|
||||||
|
|||||||
5
docs/static/.well-known/security.txt
vendored
Normal file
5
docs/static/.well-known/security.txt
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
Policy: https://github.com/immich-app/immich/blob/main/SECURITY.md
|
||||||
|
Contact: mailto:security@immich.app
|
||||||
|
Preferred-Languages: en
|
||||||
|
Expires: 2026-05-01T23:59:00.000Z
|
||||||
|
Canonical: https://immich.app/.well-known/security.txt
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user