mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 12:51:32 -08:00
Compare commits
605 Commits
refactor/w
...
refact/ass
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab988f3be6 | ||
|
|
b8dc1a4b1f | ||
|
|
769d0aed87 | ||
|
|
09cbc5d3f4 | ||
|
|
a2a9797fab | ||
|
|
3d35e65f27 | ||
|
|
df76735f4a | ||
|
|
6feca56da8 | ||
|
|
97aabe466e | ||
|
|
72a53f43c8 | ||
|
|
30b4f334d8 | ||
|
|
6c6a32c63e | ||
|
|
6fed223405 | ||
|
|
3105094a3d | ||
|
|
b96c95beda | ||
|
|
926ff075a3 | ||
|
|
934649c8df | ||
|
|
a43159f4ba | ||
|
|
ea3a14ed25 | ||
|
|
24a4cba953 | ||
|
|
fda22c83b9 | ||
|
|
2a8019726c | ||
|
|
5f76cdddc7 | ||
|
|
48be10e48b | ||
|
|
6c11ef62e8 | ||
|
|
65dce58aa4 | ||
|
|
64cc7239fe | ||
|
|
5f89c2d111 | ||
|
|
4621ec5ea2 | ||
|
|
881a96cdf9 | ||
|
|
b001ba44f5 | ||
|
|
afb444c92c | ||
|
|
027c4a8b34 | ||
|
|
eca9b56847 | ||
|
|
5b0575b956 | ||
|
|
05064f87f0 | ||
|
|
522cdbac99 | ||
|
|
9240bbc6ff | ||
|
|
3751f8bc57 | ||
|
|
88b8afb8d6 | ||
|
|
2e13543d5d | ||
|
|
bcfc967d77 | ||
|
|
7d0e8f50f7 | ||
|
|
c759233d8c | ||
|
|
bfe32c2bb9 | ||
|
|
6c7b2e4b5c | ||
|
|
7edbeb2ed6 | ||
|
|
4e59a55c1d | ||
|
|
c2d7337d12 | ||
|
|
c1b82bed9b | ||
|
|
9ca31abae9 | ||
|
|
ebcf133bea | ||
|
|
1923f1a887 | ||
|
|
ce14324c97 | ||
|
|
6a309129b7 | ||
|
|
bcb1bf4692 | ||
|
|
7f89999abe | ||
|
|
813186e618 | ||
|
|
20d9204ada | ||
|
|
3a9e79a452 | ||
|
|
03966146fe | ||
|
|
ecc58a8971 | ||
|
|
c705a7b280 | ||
|
|
ef278b4fb0 | ||
|
|
4cd633dc68 | ||
|
|
a18c6fa910 | ||
|
|
90aa0dc14d | ||
|
|
ce8c80dad0 | ||
|
|
81eb98d4e5 | ||
|
|
2b03802e9c | ||
|
|
484311e9bb | ||
|
|
366539bc4c | ||
|
|
69b1331026 | ||
|
|
af30d97668 | ||
|
|
9b047d30e4 | ||
|
|
6a5597b36b | ||
|
|
c10b795e99 | ||
|
|
b606d4fe73 | ||
|
|
4c2ad44303 | ||
|
|
698d3004b4 | ||
|
|
fe4d6edbdc | ||
|
|
798debfde3 | ||
|
|
6563fa608a | ||
|
|
1a90fc8e58 | ||
|
|
c707f9cef4 | ||
|
|
6fda863c08 | ||
|
|
373b654156 | ||
|
|
a5d84ba552 | ||
|
|
1dc8fa2979 | ||
|
|
0426699f13 | ||
|
|
8154ec29df | ||
|
|
3024cd343b | ||
|
|
0b44d4b6f2 | ||
|
|
a04c6ed80d | ||
|
|
1c50e19894 | ||
|
|
e61d7f2616 | ||
|
|
a6b0869714 | ||
|
|
9c25b8ba7d | ||
|
|
3c72f489d8 | ||
|
|
1f2c779b36 | ||
|
|
5c74f634b7 | ||
|
|
ecc99bfd16 | ||
|
|
ff4d70e351 | ||
|
|
42c2389eb5 | ||
|
|
33c9f88ba4 | ||
|
|
11c469907f | ||
|
|
7c43e6c3c8 | ||
|
|
00aa385972 | ||
|
|
a5ed453929 | ||
|
|
dd8969cb7d | ||
|
|
bce4f93c90 | ||
|
|
a4c0dc5007 | ||
|
|
d233a7d97a | ||
|
|
5cdbb65d28 | ||
|
|
3434544864 | ||
|
|
269bf4b344 | ||
|
|
f9435a538b | ||
|
|
10e2ec2841 | ||
|
|
fe91b44ab9 | ||
|
|
747a72120e | ||
|
|
910661e75c | ||
|
|
c8a135a7ae | ||
|
|
08d1cf5bde | ||
|
|
3e62497fd0 | ||
|
|
a1bc862a32 | ||
|
|
75bf3aa1be | ||
|
|
38e68d16f9 | ||
|
|
caf11fbb96 | ||
|
|
f99c6feac5 | ||
|
|
5122512f19 | ||
|
|
49ed212af8 | ||
|
|
e29103b69f | ||
|
|
14b771d7c7 | ||
|
|
07aa51638c | ||
|
|
0a9a520ed2 | ||
|
|
de81006367 | ||
|
|
e0144b4ece | ||
|
|
65e8d75e82 | ||
|
|
023bcffdb8 | ||
|
|
06f1d0dc4d | ||
|
|
c6641d4859 | ||
|
|
91cbd56c1c | ||
|
|
35280b94cc | ||
|
|
4c69511225 | ||
|
|
0684a3ada4 | ||
|
|
a0f44f147b | ||
|
|
15c488ccd9 | ||
|
|
bc062da11b | ||
|
|
8038ae1e7a | ||
|
|
f28c0d912c | ||
|
|
bd70824961 | ||
|
|
749f63e4a0 | ||
|
|
db68d1af9b | ||
|
|
864fe3d0d6 | ||
|
|
00536bf074 | ||
|
|
0d3efe229d | ||
|
|
3b0a803089 | ||
|
|
bcda2c6e22 | ||
|
|
7347f64958 | ||
|
|
176d53c1b3 | ||
|
|
5fc448bc97 | ||
|
|
3d0c851636 | ||
|
|
16fcb657b7 | ||
|
|
32b57bcbfc | ||
|
|
7f56443b24 | ||
|
|
189442e9c4 | ||
|
|
523fe5bef7 | ||
|
|
77a362f0c0 | ||
|
|
5f5308631e | ||
|
|
004c2f2496 | ||
|
|
e2dfbd66c3 | ||
|
|
de756d9497 | ||
|
|
103b83d2d6 | ||
|
|
f54cfa7a5a | ||
|
|
ed5b260eeb | ||
|
|
8923d5b0a3 | ||
|
|
2f3d4e15d2 | ||
|
|
c9bcae813b | ||
|
|
bddb43e1d4 | ||
|
|
176656b5f4 | ||
|
|
5cd186d3d4 | ||
|
|
144cc8ab6d | ||
|
|
0322a8b1d9 | ||
|
|
94e9adf625 | ||
|
|
24edf23bc8 | ||
|
|
d784c7737a | ||
|
|
fdc7a154c0 | ||
|
|
e5219f1f31 | ||
|
|
22eef5f3c5 | ||
|
|
5179c5badf | ||
|
|
4c5cd14270 | ||
|
|
38ad15af4c | ||
|
|
7a001d27a5 | ||
|
|
08e2b22db8 | ||
|
|
5dd3a6e13f | ||
|
|
bedcf50196 | ||
|
|
c03e72c1da | ||
|
|
b50d9fa448 | ||
|
|
4b4ee5abf3 | ||
|
|
6499057b4c | ||
|
|
e88bd74fd2 | ||
|
|
16745e77d4 | ||
|
|
c0ed2210b4 | ||
|
|
f9ed314b37 | ||
|
|
160ca28253 | ||
|
|
9380625762 | ||
|
|
ade7cd258d | ||
|
|
63996f4dd3 | ||
|
|
360f68b86b | ||
|
|
bf212bf235 | ||
|
|
b890440f6b | ||
|
|
16f83c0aa9 | ||
|
|
2572413b2b | ||
|
|
14d785cec9 | ||
|
|
242817c49a | ||
|
|
74f79cae69 | ||
|
|
ac0e94c003 | ||
|
|
047c7821a3 | ||
|
|
ccb0e711f0 | ||
|
|
3fb2c3a7bf | ||
|
|
197a1886c3 | ||
|
|
adac30c9a1 | ||
|
|
ae04a62030 | ||
|
|
02246cdd1f | ||
|
|
2d05a5482f | ||
|
|
7b2237b86b | ||
|
|
84024f6cdc | ||
|
|
5574b2dd39 | ||
|
|
e88eb44aba | ||
|
|
75c24f0023 | ||
|
|
e376366b7b | ||
|
|
48e16f0a5a | ||
|
|
e8ba9dd208 | ||
|
|
a932cbae38 | ||
|
|
526206b2a5 | ||
|
|
de2115d11e | ||
|
|
e0ac588ca8 | ||
|
|
0c965ae2ea | ||
|
|
28e05537bd | ||
|
|
acca040524 | ||
|
|
b0a0ae6cd3 | ||
|
|
fb4be6e231 | ||
|
|
ecb16d9907 | ||
|
|
737fedd527 | ||
|
|
b557f3b7f2 | ||
|
|
ce6631f7e0 | ||
|
|
b46e066cc2 | ||
|
|
55f4e93456 | ||
|
|
81423420c8 | ||
|
|
a9bd651692 | ||
|
|
afda7b9525 | ||
|
|
86f64fd0bf | ||
|
|
19013af58f | ||
|
|
e746d27f5e | ||
|
|
90c8fdba96 | ||
|
|
e2ffc9d5a1 | ||
|
|
f64a3003af | ||
|
|
a26d703335 | ||
|
|
5d0ad853f4 | ||
|
|
19ff39c2b9 | ||
|
|
8733d1e554 | ||
|
|
1fb8861e35 | ||
|
|
70b9a4c8f1 | ||
|
|
2da94439c7 | ||
|
|
3d3e5dc547 | ||
|
|
daf1bee7ac | ||
|
|
6b4d5e3beb | ||
|
|
6b9233c71a | ||
|
|
b4a798c39f | ||
|
|
edae9c2d3d | ||
|
|
246d593c9d | ||
|
|
e4322ae0a2 | ||
|
|
e506c7fb19 | ||
|
|
393e8d50b2 | ||
|
|
74438f5bd8 | ||
|
|
e7d7886f44 | ||
|
|
97e86e409a | ||
|
|
72401aa6b1 | ||
|
|
fb94fd3132 | ||
|
|
a02e1f5e7c | ||
|
|
d544053c67 | ||
|
|
df927dd3ce | ||
|
|
d48702f943 | ||
|
|
fa22e865a4 | ||
|
|
b5c3a675b2 | ||
|
|
5589616921 | ||
|
|
a53d033622 | ||
|
|
36506250c4 | ||
|
|
31af44dd2a | ||
|
|
c89ac5b5e5 | ||
|
|
daf1a48b54 | ||
|
|
091a101f39 | ||
|
|
d118b46c3f | ||
|
|
ad3f58bcda | ||
|
|
0711a9006f | ||
|
|
9c18fef9b2 | ||
|
|
d00c872dc1 | ||
|
|
3a5fed99e1 | ||
|
|
e2defbc49a | ||
|
|
f4e4e6628e | ||
|
|
9d04853b34 | ||
|
|
97503d11c5 | ||
|
|
cbf68b006e | ||
|
|
4b9a7b2ce0 | ||
|
|
b854a3dd47 | ||
|
|
aebd68e24e | ||
|
|
0f42babb6b | ||
|
|
dbdb64f6c5 | ||
|
|
2b1b20ab0b | ||
|
|
44d49b9671 | ||
|
|
0e81c20cbb | ||
|
|
1f18a09061 | ||
|
|
0257f1a743 | ||
|
|
6f39a706b2 | ||
|
|
10181defb1 | ||
|
|
8ea40973a7 | ||
|
|
be247395db | ||
|
|
78224961d1 | ||
|
|
b054e9dc2c | ||
|
|
f0d881b4f8 | ||
|
|
9677eb37e1 | ||
|
|
dc23bc4d55 | ||
|
|
e9f8d68f62 | ||
|
|
3f08768854 | ||
|
|
f029910dc7 | ||
|
|
b5593823a2 | ||
|
|
a40d35555f | ||
|
|
0205e89e34 | ||
|
|
a231d7be64 | ||
|
|
219f5b25a4 | ||
|
|
486bb47ddb | ||
|
|
58ae77ec92 | ||
|
|
4794a1a092 | ||
|
|
6abcfaef99 | ||
|
|
f6903696cb | ||
|
|
724a081bb5 | ||
|
|
4e332db2fb | ||
|
|
0712183a18 | ||
|
|
d004c03990 | ||
|
|
fff651f8a5 | ||
|
|
e2720e85bb | ||
|
|
5fdc8c9481 | ||
|
|
a3404cf420 | ||
|
|
5268dc4ee2 | ||
|
|
ef060e97b6 | ||
|
|
a9851df8d1 | ||
|
|
099a1e4210 | ||
|
|
79d760ccd7 | ||
|
|
369d3dfa38 | ||
|
|
93e53f6d74 | ||
|
|
d8f0a69dc8 | ||
|
|
09d9fa9755 | ||
|
|
118dc8cf5a | ||
|
|
9557395991 | ||
|
|
a5d63d6953 | ||
|
|
5ee4a43e74 | ||
|
|
c3aeb6c497 | ||
|
|
d22fb2d5db | ||
|
|
c4df96bd72 | ||
|
|
40e7b58ba4 | ||
|
|
4743a085f1 | ||
|
|
911c877e72 | ||
|
|
806000e671 | ||
|
|
54bafccbf9 | ||
|
|
e61c575b01 | ||
|
|
e12c67742c | ||
|
|
4878c500a5 | ||
|
|
2fa7a40996 | ||
|
|
963dd3210a | ||
|
|
4fdf75311c | ||
|
|
529359de2d | ||
|
|
2d7377a5e9 | ||
|
|
8fcf47e5cb | ||
|
|
c7dc31151d | ||
|
|
065f7c7d5d | ||
|
|
15877ddf1f | ||
|
|
1b8fa51315 | ||
|
|
1f84cbe7e5 | ||
|
|
b194aee754 | ||
|
|
91b961642a | ||
|
|
c61ea483ba | ||
|
|
c278bb0e5b | ||
|
|
bc8e08f5e8 | ||
|
|
0b8fc7b493 | ||
|
|
7bb25a5c8d | ||
|
|
58c1b92816 | ||
|
|
55adc136c8 | ||
|
|
cd288533a1 | ||
|
|
58af574241 | ||
|
|
6954b11be1 | ||
|
|
bc906f7343 | ||
|
|
760b08506a | ||
|
|
6b31e333bb | ||
|
|
493b9b7a54 | ||
|
|
188188a844 | ||
|
|
b2ef8ea7dd | ||
|
|
a6c4bd1555 | ||
|
|
a02fe89ec9 | ||
|
|
98e998e814 | ||
|
|
b83b28cd73 | ||
|
|
9771e48049 | ||
|
|
86db0aafe5 | ||
|
|
12b7a079c1 | ||
|
|
53420b7c02 | ||
|
|
c05aa445d8 | ||
|
|
bdf19ce331 | ||
|
|
895e0eacfe | ||
|
|
e7b60a9278 | ||
|
|
4e2fc9f017 | ||
|
|
d1e6682df0 | ||
|
|
965498d19b | ||
|
|
62f24a79f4 | ||
|
|
495a959879 | ||
|
|
a6a4dfcfd3 | ||
|
|
0d773af6c3 | ||
|
|
fe71894308 | ||
|
|
397808dd1a | ||
|
|
e7edbcdf04 | ||
|
|
59f666b115 | ||
|
|
dc8962f2bc | ||
|
|
00a77c2d6a | ||
|
|
c8641d24f6 | ||
|
|
2431e04a09 | ||
|
|
9e47093501 | ||
|
|
230c286b97 | ||
|
|
14970c5539 | ||
|
|
adb17c4d58 | ||
|
|
56156b97e7 | ||
|
|
c411c1472a | ||
|
|
0bbe70e6a3 | ||
|
|
a65c905621 | ||
|
|
61d784f4e7 | ||
|
|
b63d6cdcd6 | ||
|
|
fa45a26cff | ||
|
|
8f045bc602 | ||
|
|
5353658114 | ||
|
|
21880aec14 | ||
|
|
48d746d9d5 | ||
|
|
8ab5040351 | ||
|
|
1219fd82a0 | ||
|
|
28d8357cc5 | ||
|
|
a9e7d0388b | ||
|
|
86d64f3483 | ||
|
|
c1150fe7e3 | ||
|
|
ecb66fdb2c | ||
|
|
c046651f23 | ||
|
|
6117329057 | ||
|
|
585997d46f | ||
|
|
7146ec99b1 | ||
|
|
b7b0b9b6d8 | ||
|
|
4935f3e0bb | ||
|
|
709a7b70aa | ||
|
|
6a4d21205f | ||
|
|
3a0ddfb92d | ||
|
|
cd03d0c0f2 | ||
|
|
117b263887 | ||
|
|
f357f3324f | ||
|
|
7d95bad5cb | ||
|
|
77b0505006 | ||
|
|
fac1beb7d8 | ||
|
|
3944f5d73b | ||
|
|
4445288758 | ||
|
|
4efc41d5d9 | ||
|
|
c9d45eee86 | ||
|
|
b3b774cfe5 | ||
|
|
15e894b9b5 | ||
|
|
ca06d0aa83 | ||
|
|
0cd51ae9c5 | ||
|
|
68f6111b77 | ||
|
|
668288ca20 | ||
|
|
3fdc1df89c | ||
|
|
989d9dbe51 | ||
|
|
48112d84a3 | ||
|
|
80dfe7a5e9 | ||
|
|
ce90a2ec1a | ||
|
|
dccbe0b3ed | ||
|
|
c0ad12f279 | ||
|
|
9c484b23a9 | ||
|
|
eed014482d | ||
|
|
d271e6a3ae | ||
|
|
60c43081ed | ||
|
|
81d959a27e | ||
|
|
bb775110ef | ||
|
|
7280331b76 | ||
|
|
93ee6ee0a5 | ||
|
|
7544a678ec | ||
|
|
3066c8198c | ||
|
|
eb8dfa283e | ||
|
|
41a127e2ab | ||
|
|
feb475561e | ||
|
|
4c4c67f0d2 | ||
|
|
381b66bf70 | ||
|
|
a89f3ad97c | ||
|
|
c473511133 | ||
|
|
0d66a6b51f | ||
|
|
66400b2e8e | ||
|
|
87cdf0ebd9 | ||
|
|
3f719bd8d7 | ||
|
|
55af925ab3 | ||
|
|
ff63b0fa8f | ||
|
|
f21fe8716c | ||
|
|
6a69dafd31 | ||
|
|
47b1938f17 | ||
|
|
2ffcfe06f3 | ||
|
|
89551edee5 | ||
|
|
cb6c541ae1 | ||
|
|
b1e1362246 | ||
|
|
ccc2b191dd | ||
|
|
bb7010b2bb | ||
|
|
8db666bc38 | ||
|
|
eace0f716d | ||
|
|
96743b6c33 | ||
|
|
ff181cf346 | ||
|
|
0cd5960007 | ||
|
|
698592c1b0 | ||
|
|
f75d853e9a | ||
|
|
3a1e3e82e7 | ||
|
|
0beb3ac4c1 | ||
|
|
894545aeed | ||
|
|
5250269fa4 | ||
|
|
a169fb6a79 | ||
|
|
09ced9a171 | ||
|
|
a6e5e4f625 | ||
|
|
bbd8de177b | ||
|
|
867f6e64f9 | ||
|
|
ec6379b0b2 | ||
|
|
2a80251dc3 | ||
|
|
d33ce13561 | ||
|
|
016d7a6ceb | ||
|
|
8ff25a4f7a | ||
|
|
61a3eba1bd | ||
|
|
7072e48cbe | ||
|
|
ece977d9ca | ||
|
|
2af8095880 | ||
|
|
30822fcd10 | ||
|
|
c578273e7a | ||
|
|
118a3fc9db | ||
|
|
1138f6dcce | ||
|
|
33f3751b72 | ||
|
|
b8509e6411 | ||
|
|
bd43edbcd7 | ||
|
|
c8b4a7e1f1 | ||
|
|
f34f83e164 | ||
|
|
df2cf5d106 | ||
|
|
52975eadb3 | ||
|
|
12610e4a9f | ||
|
|
2b3efa02d8 | ||
|
|
a21a997f21 | ||
|
|
7d61ed7ce4 | ||
|
|
8f7baf8336 | ||
|
|
44923acfd6 | ||
|
|
ab95881ebb | ||
|
|
8801ae5821 | ||
|
|
ea9f11bf39 | ||
|
|
62fc5b3c7d | ||
|
|
15d431ba6a | ||
|
|
5d21ba3166 | ||
|
|
da7a81b752 | ||
|
|
becdc3dcf5 | ||
|
|
84b51e3cbb | ||
|
|
b845184c80 | ||
|
|
1fde02ee1e | ||
|
|
526c02297c | ||
|
|
732b06eec8 | ||
|
|
436cff72b5 | ||
|
|
be5cc2cdf5 | ||
|
|
094a41ac9a | ||
|
|
ebad6a008f | ||
|
|
0c261ffbe2 | ||
|
|
6df6103c67 | ||
|
|
8c5116bc1d | ||
|
|
e3812a0e36 | ||
|
|
4b1ced439b | ||
|
|
2e8a286540 | ||
|
|
038a82c4f1 | ||
|
|
2c2dd01bf0 | ||
|
|
ac73e163df | ||
|
|
d89e88bb3f | ||
|
|
3ce353393a | ||
|
|
0e4cf9ac57 | ||
|
|
07290580a6 | ||
|
|
d9ce74b896 | ||
|
|
4c0f79b162 | ||
|
|
9851d24628 | ||
|
|
fe6cbd93b1 | ||
|
|
df20788088 | ||
|
|
3d042cc7f1 | ||
|
|
85446c5862 | ||
|
|
fb52ac0f5b | ||
|
|
48bcbee6ed | ||
|
|
f621f8ef2c | ||
|
|
7f69abbf0d | ||
|
|
895b2bf5cd | ||
|
|
f64e6f5dc3 | ||
|
|
64e738f79d | ||
|
|
a17390a422 | ||
|
|
1b5fc9c665 | ||
|
|
23717ce981 | ||
|
|
2fd05e8447 | ||
|
|
c664d99a34 | ||
|
|
21c7d70336 | ||
|
|
ad272333db | ||
|
|
460d594791 | ||
|
|
e6c575c33e | ||
|
|
85ac0512a6 |
2
.devcontainer/.gitignore
vendored
2
.devcontainer/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
.env
|
||||
library
|
||||
@@ -1,16 +0,0 @@
|
||||
ARG BASEIMAGE=mcr.microsoft.com/devcontainers/typescript-node:22@sha256:a20b8a3538313487ac9266875bbf733e544c1aa2091df2bb99ab592a6d4f7399
|
||||
FROM ${BASEIMAGE}
|
||||
|
||||
# Flutter SDK
|
||||
# https://flutter.dev/docs/development/tools/sdk/releases?tab=linux
|
||||
ENV FLUTTER_CHANNEL="stable"
|
||||
ENV FLUTTER_VERSION="3.29.1"
|
||||
ENV FLUTTER_HOME=/flutter
|
||||
ENV PATH=${PATH}:${FLUTTER_HOME}/bin
|
||||
|
||||
# Flutter SDK
|
||||
RUN mkdir -p ${FLUTTER_HOME} \
|
||||
&& curl -C - --output flutter.tar.xz https://storage.googleapis.com/flutter_infra_release/releases/${FLUTTER_CHANNEL}/linux/flutter_linux_${FLUTTER_VERSION}-${FLUTTER_CHANNEL}.tar.xz \
|
||||
&& tar -xf flutter.tar.xz --strip-components=1 -C ${FLUTTER_HOME} \
|
||||
&& rm flutter.tar.xz \
|
||||
&& chown -R 1000:1000 ${FLUTTER_HOME}
|
||||
@@ -1,26 +1,67 @@
|
||||
{
|
||||
"name": "Immich",
|
||||
"service": "immich-devcontainer",
|
||||
"name": "Immich - Backend, Frontend and ML",
|
||||
"service": "immich-server",
|
||||
"runServices": [
|
||||
"immich-server",
|
||||
"redis",
|
||||
"database",
|
||||
"immich-machine-learning"
|
||||
],
|
||||
"dockerComposeFile": [
|
||||
"docker-compose.yml",
|
||||
"../docker/docker-compose.dev.yml"
|
||||
"../docker/docker-compose.dev.yml",
|
||||
"./server/container-compose-overrides.yml"
|
||||
],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"Dart-Code.dart-code",
|
||||
"Dart-Code.flutter",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"dcmdev.dcm-vscode-extension",
|
||||
"esbenp.prettier-vscode",
|
||||
"svelte.svelte-vscode"
|
||||
"svelte.svelte-vscode",
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"foxundermoon.shell-format",
|
||||
"timonwong.shellcheck",
|
||||
"rvest.vs-code-prettier-eslint",
|
||||
"bluebrown.yamlfmt",
|
||||
"vkrishna04.cspell-sync",
|
||||
"vitest.explorer",
|
||||
"ms-playwright.playwright",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [],
|
||||
"initializeCommand": "bash .devcontainer/scripts/initializeCommand.sh",
|
||||
"onCreateCommand": "bash .devcontainer/scripts/onCreateCommand.sh",
|
||||
"forwardPorts": [3000, 9231, 9230, 2283],
|
||||
"portsAttributes": {
|
||||
"3000": {
|
||||
"label": "Immich - Frontend HTTP",
|
||||
"description": "The frontend of the Immich project",
|
||||
"onAutoForward": "openBrowserOnce"
|
||||
},
|
||||
"2283": {
|
||||
"label": "Immich - API Server - HTTP",
|
||||
"description": "The API server of the Immich project"
|
||||
},
|
||||
"9231": {
|
||||
"label": "Immich - API Server - DEBUG",
|
||||
"description": "The API server of the Immich project"
|
||||
},
|
||||
"9230": {
|
||||
"label": "Immich - Workers - DEBUG",
|
||||
"description": "The workers of the Immich project"
|
||||
}
|
||||
},
|
||||
"overrideCommand": true,
|
||||
"workspaceFolder": "/immich",
|
||||
"remoteUser": "node"
|
||||
"workspaceFolder": "/workspaces/immich",
|
||||
"remoteUser": "node",
|
||||
"userEnvProbe": "loginInteractiveShell",
|
||||
"remoteEnv": {
|
||||
// The location where your uploaded files are stored
|
||||
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./library}",
|
||||
// Connection secret for postgres. You should change it to a random password
|
||||
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
|
||||
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
|
||||
// The database username
|
||||
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
|
||||
// The database name
|
||||
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
services:
|
||||
immich-devcontainer:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
extra_hosts:
|
||||
- 'host.docker.internal:host-gateway'
|
||||
volumes:
|
||||
- ..:/immich:cached
|
||||
34
.devcontainer/mobile/container-compose-overrides.yml
Normal file
34
.devcontainer/mobile/container-compose-overrides.yml
Normal file
@@ -0,0 +1,34 @@
|
||||
services:
|
||||
immich-server:
|
||||
build:
|
||||
target: dev-container-mobile
|
||||
environment:
|
||||
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
|
||||
volumes: !override # bind mount host to /workspaces/immich
|
||||
- ..:/workspaces/immich
|
||||
- cli_node_modules:/workspaces/immich/cli/node_modules
|
||||
- e2e_node_modules:/workspaces/immich/e2e/node_modules
|
||||
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
|
||||
- server_node_modules:/workspaces/immich/server/node_modules
|
||||
- web_node_modules:/workspaces/immich/web/node_modules
|
||||
- ${UPLOAD_LOCATION}/photos:/workspaces/immich/server/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/workspaces/immich/server/upload/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
database:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
|
||||
volumes:
|
||||
# Node modules for each service to avoid conflicts and ensure consistent dependencies
|
||||
cli_node_modules:
|
||||
e2e_node_modules:
|
||||
open_api_node_modules:
|
||||
server_node_modules:
|
||||
web_node_modules:
|
||||
|
||||
# UPLOAD_LOCATION must be set to a absolute path or vol-upload
|
||||
vol-upload:
|
||||
|
||||
# DB_DATA_LOCATION must be set to a absolute path or vol-database
|
||||
vol-database:
|
||||
52
.devcontainer/mobile/devcontainer.json
Normal file
52
.devcontainer/mobile/devcontainer.json
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"name": "Immich - Mobile",
|
||||
"service": "immich-server",
|
||||
"runServices": [
|
||||
"immich-server",
|
||||
"redis",
|
||||
"database",
|
||||
"immich-machine-learning"
|
||||
],
|
||||
"dockerComposeFile": [
|
||||
"../../docker/docker-compose.dev.yml",
|
||||
"./container-compose-overrides.yml"
|
||||
],
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"Dart-Code.dart-code",
|
||||
"Dart-Code.flutter",
|
||||
"dcmdev.dcm-vscode-extension",
|
||||
"esbenp.prettier-vscode",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"svelte.svelte-vscode",
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"foxundermoon.shell-format",
|
||||
"timonwong.shellcheck",
|
||||
"rvest.vs-code-prettier-eslint",
|
||||
"bluebrown.yamlfmt",
|
||||
"vkrishna04.cspell-sync",
|
||||
"vitest.explorer",
|
||||
"ms-playwright.playwright",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
},
|
||||
"forwardPorts": [],
|
||||
"overrideCommand": true,
|
||||
"workspaceFolder": "/workspaces/immich",
|
||||
"remoteUser": "node",
|
||||
"userEnvProbe": "loginInteractiveShell",
|
||||
"remoteEnv": {
|
||||
// The location where your uploaded files are stored
|
||||
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
|
||||
// Connection secret for postgres. You should change it to a random password
|
||||
// Please use only the characters `A-Za-z0-9`, without special characters or spaces
|
||||
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
|
||||
// The database username
|
||||
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
|
||||
// The database name
|
||||
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# If .env file does not exist, create it by copying example.env from the docker folder
|
||||
if [ ! -f ".devcontainer/.env" ]; then
|
||||
cp docker/example.env .devcontainer/.env
|
||||
fi
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Enable multiarch for arm64 if necessary
|
||||
if [ "$(dpkg --print-architecture)" = "arm64" ]; then
|
||||
sudo dpkg --add-architecture amd64 && \
|
||||
sudo apt-get update && \
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
libc6:amd64 \
|
||||
libstdc++6:amd64 \
|
||||
libgcc1:amd64
|
||||
fi
|
||||
|
||||
# Install DCM
|
||||
wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg
|
||||
sudo echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install dcm
|
||||
|
||||
dart --disable-analytics
|
||||
|
||||
# Install immich
|
||||
cd /immich || exit
|
||||
make install-all
|
||||
82
.devcontainer/server/container-common.sh
Executable file
82
.devcontainer/server/container-common.sh
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/bin/bash
|
||||
export IMMICH_PORT="${DEV_SERVER_PORT:-2283}"
|
||||
export DEV_PORT="${DEV_PORT:-3000}"
|
||||
|
||||
# search for immich directory inside workspace.
|
||||
# /workspaces/immich is the bind mount, but other directories can be mounted if runing
|
||||
# Devcontainer: Clone [repository|pull request] in container volumne
|
||||
WORKSPACES_DIR="/workspaces"
|
||||
IMMICH_DIR="$WORKSPACES_DIR/immich"
|
||||
IMMICH_DEVCONTAINER_LOG="$HOME/immich-devcontainer.log"
|
||||
|
||||
log() {
|
||||
# Display command on console, log with timestamp to file
|
||||
echo "$*"
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" >>"$IMMICH_DEVCONTAINER_LOG"
|
||||
}
|
||||
|
||||
run_cmd() {
|
||||
# Ensure log directory exists
|
||||
mkdir -p "$(dirname "$IMMICH_DEVCONTAINER_LOG")"
|
||||
|
||||
log "$@"
|
||||
|
||||
# Execute command: display normally on console, log with timestamps to file
|
||||
"$@" 2>&1 | tee >(while IFS= read -r line; do
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $line" >>"$IMMICH_DEVCONTAINER_LOG"
|
||||
done)
|
||||
|
||||
# Preserve exit status
|
||||
return "${PIPESTATUS[0]}"
|
||||
}
|
||||
|
||||
# Find directories excluding /workspaces/immich
|
||||
mapfile -t other_dirs < <(find "$WORKSPACES_DIR" -mindepth 1 -maxdepth 1 -type d ! -path "$IMMICH_DIR" ! -name ".*")
|
||||
|
||||
if [ ${#other_dirs[@]} -gt 1 ]; then
|
||||
log "Error: More than one directory found in $WORKSPACES_DIR other than $IMMICH_DIR."
|
||||
exit 1
|
||||
elif [ ${#other_dirs[@]} -eq 1 ]; then
|
||||
export IMMICH_WORKSPACE="${other_dirs[0]}"
|
||||
else
|
||||
export IMMICH_WORKSPACE="$IMMICH_DIR"
|
||||
fi
|
||||
|
||||
log "Found immich workspace in $IMMICH_WORKSPACE"
|
||||
log ""
|
||||
|
||||
fix_permissions() {
|
||||
|
||||
log "Fixing permissions for ${IMMICH_WORKSPACE}"
|
||||
|
||||
run_cmd sudo find "${IMMICH_WORKSPACE}/server/upload" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres/*" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres" -exec chown node {} +
|
||||
|
||||
# Change ownership for directories that exist
|
||||
for dir in "${IMMICH_WORKSPACE}/.vscode" \
|
||||
"${IMMICH_WORKSPACE}/cli/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/e2e/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/server/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/server/dist" \
|
||||
"${IMMICH_WORKSPACE}/web/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/web/dist"; do
|
||||
if [ -d "$dir" ]; then
|
||||
run_cmd sudo chown node -R "$dir"
|
||||
fi
|
||||
done
|
||||
|
||||
log ""
|
||||
}
|
||||
|
||||
install_dependencies() {
|
||||
|
||||
log "Installing dependencies"
|
||||
(
|
||||
cd "${IMMICH_WORKSPACE}" || exit 1
|
||||
run_cmd make ci-server
|
||||
run_cmd make ci-sdk
|
||||
run_cmd make build-sdk
|
||||
run_cmd make ci-web
|
||||
)
|
||||
log ""
|
||||
}
|
||||
49
.devcontainer/server/container-compose-overrides.yml
Normal file
49
.devcontainer/server/container-compose-overrides.yml
Normal file
@@ -0,0 +1,49 @@
|
||||
services:
|
||||
immich-server:
|
||||
build:
|
||||
target: dev-container-server
|
||||
env_file: !reset []
|
||||
hostname: immich-dev
|
||||
environment:
|
||||
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
|
||||
volumes: !override
|
||||
- ..:/workspaces/immich
|
||||
- cli_node_modules:/workspaces/immich/cli/node_modules
|
||||
- e2e_node_modules:/workspaces/immich/e2e/node_modules
|
||||
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
|
||||
- server_node_modules:/workspaces/immich/server/node_modules
|
||||
- web_node_modules:/workspaces/immich/web/node_modules
|
||||
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/workspaces/immich/server/upload
|
||||
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/workspaces/immich/server/upload/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
immich-web:
|
||||
env_file: !reset []
|
||||
|
||||
immich-machine-learning:
|
||||
env_file: !reset []
|
||||
|
||||
database:
|
||||
env_file: !reset []
|
||||
environment: !override
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD-postgres}
|
||||
POSTGRES_USER: ${DB_USERNAME-postgres}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME-immich}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
POSTGRES_HOST_AUTH_METHOD: md5
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
|
||||
|
||||
redis:
|
||||
env_file: !reset []
|
||||
|
||||
volumes:
|
||||
# Node modules for each service to avoid conflicts and ensure consistent dependencies
|
||||
cli_node_modules:
|
||||
e2e_node_modules:
|
||||
open_api_node_modules:
|
||||
server_node_modules:
|
||||
web_node_modules:
|
||||
upload1-devcontainer-volume:
|
||||
upload2-devcontainer-volume:
|
||||
postgres-devcontainer-volume:
|
||||
17
.devcontainer/server/container-start-backend.sh
Executable file
17
.devcontainer/server/container-start-backend.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
# shellcheck source=common.sh
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
log "Starting Nest API Server"
|
||||
log ""
|
||||
cd "${IMMICH_WORKSPACE}/server" || (
|
||||
log "Immich workspace not found"
|
||||
exit 1
|
||||
)
|
||||
|
||||
while true; do
|
||||
run_cmd node ./node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch
|
||||
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
22
.devcontainer/server/container-start-frontend.sh
Executable file
22
.devcontainer/server/container-start-frontend.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash
|
||||
# shellcheck source=common.sh
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
log "Starting Immich Web Frontend"
|
||||
log ""
|
||||
cd "${IMMICH_WORKSPACE}/web" || (
|
||||
log "Immich Workspace not found"
|
||||
exit 1
|
||||
)
|
||||
|
||||
until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_PORT}/api/server/config"; do
|
||||
log "Waiting for api server..."
|
||||
sleep 1
|
||||
done
|
||||
|
||||
while true; do
|
||||
run_cmd node ./node_modules/.bin/vite dev --host 0.0.0.0 --port "${DEV_PORT}"
|
||||
log "Web crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
20
.devcontainer/server/container-start.sh
Executable file
20
.devcontainer/server/container-start.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
# shellcheck source=common.sh
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
log "Setting up Immich dev container..."
|
||||
fix_permissions
|
||||
|
||||
log "Installing npm dependencies (node_modules)..."
|
||||
install_dependencies
|
||||
|
||||
log "Setup complete, please wait while backend and frontend services automatically start"
|
||||
log
|
||||
log "If necessary, the services may be manually started using"
|
||||
log
|
||||
log "$ /immich-devcontainer/container-start-backend.sh"
|
||||
log "$ /immich-devcontainer/container-start-frontend.sh"
|
||||
log
|
||||
log "From different terminal windows, as these scripts automatically restart the server"
|
||||
log "on error, and will continuously run in a loop"
|
||||
@@ -6,7 +6,11 @@ design/
|
||||
docker/
|
||||
!docker/scripts
|
||||
docs/
|
||||
!docs/package.json
|
||||
!docs/package-lock.json
|
||||
e2e/
|
||||
!e2e/package.json
|
||||
!e2e/package-lock.json
|
||||
fastlane/
|
||||
machine-learning/
|
||||
misc/
|
||||
|
||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -9,6 +9,9 @@ mobile/lib/**/*.g.dart linguist-generated=true
|
||||
mobile/lib/**/*.drift.dart -diff -merge
|
||||
mobile/lib/**/*.drift.dart linguist-generated=true
|
||||
|
||||
mobile/drift_schemas/main/drift_schema_*.json -diff -merge
|
||||
mobile/drift_schemas/main/drift_schema_*.json linguist-generated=true
|
||||
|
||||
open-api/typescript-sdk/fetch-client.ts -diff -merge
|
||||
open-api/typescript-sdk/fetch-client.ts linguist-generated=true
|
||||
|
||||
|
||||
2
.github/.nvmrc
vendored
2
.github/.nvmrc
vendored
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -14,7 +14,6 @@ body:
|
||||
label: I have searched the existing feature requests, both open and closed, to make sure this is not a duplicate request.
|
||||
options:
|
||||
- label: 'Yes'
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: feature
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -6,7 +6,6 @@ body:
|
||||
label: I have searched the existing issues, both open and closed, to make sure this is not a duplicate report.
|
||||
options:
|
||||
- label: 'Yes'
|
||||
required: true
|
||||
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
84
.github/workflows/build-mobile.yml
vendored
84
.github/workflows/build-mobile.yml
vendored
@@ -35,12 +35,12 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
@@ -58,32 +58,54 @@ jobs:
|
||||
contents: read
|
||||
# Skip when PR from a fork
|
||||
if: ${{ !github.event.pull_request.head.repo.fork && github.actor != 'dependabot[bot]' && needs.pre-job.outputs.should_run == 'true' }}
|
||||
runs-on: macos-14
|
||||
runs-on: mich
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
- name: Install missing deps
|
||||
run: |
|
||||
sudo add-apt-repository ppa:rmescandon/yq
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y yq xz-utils ninja-build zstd
|
||||
|
||||
- name: Create the Keystore
|
||||
env:
|
||||
KEY_JKS: ${{ secrets.KEY_JKS }}
|
||||
working-directory: ./mobile
|
||||
run: echo $KEY_JKS | base64 -d > android/key.jks
|
||||
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
|
||||
|
||||
- uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
|
||||
- name: Restore Gradle Cache
|
||||
id: cache-gradle-restore
|
||||
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: build-mobile-gradle-${{ runner.os }}-main
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@395322a6cded4e9ed503aebd4cc1965625f8e59a # v2.20.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
cache: true
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: android-actions/setup-android@9fc6c4e9069bf8d3d10b2204b1fb8f6ef7065407 # v3.2.2
|
||||
with:
|
||||
packages: ''
|
||||
|
||||
- name: Get Packages
|
||||
working-directory: ./mobile
|
||||
@@ -93,18 +115,40 @@ jobs:
|
||||
run: make translation
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate platform APIs
|
||||
run: make pigeon
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Build Android App Bundle
|
||||
working-directory: ./mobile
|
||||
env:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
IS_MAIN: ${{ github.ref == 'refs/heads/main' }}
|
||||
run: |
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
if [[ $IS_MAIN == 'true' ]]; then
|
||||
flutter build apk --release
|
||||
flutter build apk --release --split-per-abi --target-platform android-arm,android-arm64,android-x64
|
||||
else
|
||||
flutter build apk --debug --split-per-abi --target-platform android-arm64
|
||||
fi
|
||||
|
||||
- name: Publish Android Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: release-apk-signed
|
||||
path: mobile/build/app/outputs/flutter-apk/*.apk
|
||||
|
||||
- name: Save Gradle Cache
|
||||
id: cache-gradle-save
|
||||
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
~/.android/sdk
|
||||
mobile/android/.gradle
|
||||
mobile/.dart_tool
|
||||
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}
|
||||
|
||||
2
.github/workflows/cache-cleanup.yml
vendored
2
.github/workflows/cache-cleanup.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
actions: write
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
17
.github/workflows/cli.yml
vendored
17
.github/workflows/cli.yml
vendored
@@ -29,15 +29,18 @@ jobs:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
- name: Build SDK
|
||||
@@ -59,7 +62,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -67,10 +70,10 @@ jobs:
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@@ -85,7 +88,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
@@ -96,7 +99,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -44,13 +44,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@45775bd8235c68ba998cffa5171334d58593da47 # v3
|
||||
uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@45775bd8235c68ba998cffa5171334d58593da47 # v3
|
||||
uses: github/codeql-action/autobuild@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -76,6 +76,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@45775bd8235c68ba998cffa5171334d58593da47 # v3
|
||||
uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
483
.github/workflows/docker.yml
vendored
483
.github/workflows/docker.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
server:
|
||||
@@ -40,6 +40,8 @@ jobs:
|
||||
- 'machine-learning/**'
|
||||
workflow:
|
||||
- '.github/workflows/docker.yml'
|
||||
- '.github/workflows/multi-runner-build.yml'
|
||||
- '.github/actions/image-build'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
@@ -58,7 +60,7 @@ jobs:
|
||||
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -87,7 +89,7 @@ jobs:
|
||||
suffix: ['']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -103,452 +105,89 @@ jobs:
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_PR}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
docker buildx imagetools create -t "${REGISTRY_NAME}/${REPOSITORY}:${TAG_COMMIT}" "${REGISTRY_NAME}/${REPOSITORY}:${TAG_OLD}"
|
||||
|
||||
build_and_push_ml:
|
||||
machine-learning:
|
||||
name: Build and Push ML
|
||||
needs: pre-job
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
env:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
file: machine-learning/Dockerfile
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
||||
strategy:
|
||||
# Prevent a failure in one image from stopping the other builds
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
device: cpu
|
||||
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
device: cpu
|
||||
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
device: cuda
|
||||
suffix: -cuda
|
||||
|
||||
- platform: linux/amd64
|
||||
runner: mich
|
||||
device: rocm
|
||||
suffix: -rocm
|
||||
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
device: openvino
|
||||
suffix: -openvino
|
||||
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
device: armnn
|
||||
suffix: -armnn
|
||||
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
device: rknn
|
||||
suffix: -rknn
|
||||
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
env:
|
||||
REF: ${{ github.ref_name }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${{ matrix.device }}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-${{ env.CACHE_KEY_SUFFIX }}
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ matrix.device }}-main
|
||||
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Export digest
|
||||
run: | # zizmor: ignore[template-injection]
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: ml-digests-${{ matrix.device }}-${{ env.PLATFORM_PAIR }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge_ml:
|
||||
name: Merge & Push ML
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_ml == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-machine-learning
|
||||
DOCKER_REPO: altran1502/immich-machine-learning
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- device: cpu
|
||||
tag-suffix: ''
|
||||
- device: cuda
|
||||
suffix: -cuda
|
||||
- device: rocm
|
||||
suffix: -rocm
|
||||
tag-suffix: '-cuda'
|
||||
platforms: linux/amd64
|
||||
- device: openvino
|
||||
suffix: -openvino
|
||||
tag-suffix: '-openvino'
|
||||
platforms: linux/amd64
|
||||
- device: armnn
|
||||
suffix: -armnn
|
||||
tag-suffix: '-armnn'
|
||||
platforms: linux/arm64
|
||||
- device: rknn
|
||||
suffix: -rknn
|
||||
needs:
|
||||
- build_and_push_ml
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: ml-digests-${{ matrix.device }}-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
images: |
|
||||
name=${{ env.GHCR_REPO }}
|
||||
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr
|
||||
# Tag with long commit sha hash
|
||||
type=sha,format=long,prefix=commit-
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
# Process annotations
|
||||
declare -a ANNOTATIONS=()
|
||||
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
|
||||
while IFS= read -r annotation; do
|
||||
# Extract key and value by removing the manifest: prefix
|
||||
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
|
||||
key="${BASH_REMATCH[1]}"
|
||||
value="${BASH_REMATCH[2]}"
|
||||
# Use array to properly handle arguments with spaces
|
||||
ANNOTATIONS+=(--annotation "index:$key=$value")
|
||||
fi
|
||||
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
|
||||
fi
|
||||
|
||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||
|
||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||
|
||||
build_and_push_server:
|
||||
name: Build and Push Server
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
env:
|
||||
image: immich-server
|
||||
context: .
|
||||
file: server/Dockerfile
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate cache key suffix
|
||||
env:
|
||||
REF: ${{ github.ref_name }}
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||
echo "CACHE_KEY_SUFFIX=pr-${{ github.event.number }}" >> $GITHUB_ENV
|
||||
else
|
||||
SUFFIX=$(echo "${REF}" | sed 's/[^a-zA-Z0-9]/-/g')
|
||||
echo "CACHE_KEY_SUFFIX=${SUFFIX}" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Generate cache target
|
||||
id: cache-target
|
||||
run: |
|
||||
if [[ "${{ github.event.pull_request.head.repo.fork }}" == "true" ]]; then
|
||||
# Essentially just ignore the cache output (forks can't write to registry cache)
|
||||
echo "cache-to=type=local,dest=/tmp/discard,ignore-error=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "cache-to=type=registry,ref=${GHCR_REPO}-build-cache:${PLATFORM_PAIR}-${CACHE_KEY_SUFFIX},mode=max,compression=zstd" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
|
||||
- name: Build and push image
|
||||
id: build
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: ${{ env.context }}
|
||||
file: ${{ env.file }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-to: ${{ steps.cache-target.outputs.cache-to }}
|
||||
cache-from: |
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-${{ env.CACHE_KEY_SUFFIX }}
|
||||
type=registry,ref=${{ env.GHCR_REPO }}-build-cache:${{ env.PLATFORM_PAIR }}-main
|
||||
outputs: type=image,"name=${{ env.GHCR_REPO }}",push-by-digest=true,name-canonical=true,push=${{ !github.event.pull_request.head.repo.fork }}
|
||||
build-args: |
|
||||
DEVICE=cpu
|
||||
BUILD_ID=${{ github.run_id }}
|
||||
BUILD_IMAGE=${{ github.event_name == 'release' && github.ref_name || steps.metadata.outputs.tags }}
|
||||
BUILD_SOURCE_REF=${{ github.ref_name }}
|
||||
BUILD_SOURCE_COMMIT=${{ github.sha }}
|
||||
|
||||
- name: Export digest
|
||||
run: | # zizmor: ignore[template-injection]
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
with:
|
||||
name: server-digests-${{ env.PLATFORM_PAIR }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge_server:
|
||||
name: Merge & Push Server
|
||||
runs-on: ubuntu-latest
|
||||
tag-suffix: '-rknn'
|
||||
platforms: linux/arm64
|
||||
- device: rocm
|
||||
tag-suffix: '-rocm'
|
||||
platforms: linux/amd64
|
||||
runner-mapping: '{"linux/amd64": "mich"}'
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' && !github.event.pull_request.head.repo.fork }}
|
||||
env:
|
||||
GHCR_REPO: ghcr.io/${{ github.repository_owner }}/immich-server
|
||||
DOCKER_REPO: altran1502/immich-server
|
||||
needs:
|
||||
- build_and_push_server
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: server-digests-*
|
||||
merge-multiple: true
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
with:
|
||||
image: immich-machine-learning
|
||||
context: machine-learning
|
||||
dockerfile: machine-learning/Dockerfile
|
||||
platforms: ${{ matrix.platforms }}
|
||||
runner-mapping: ${{ matrix.runner-mapping }}
|
||||
tag-suffix: ${{ matrix.tag-suffix }}
|
||||
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||
build-args: |
|
||||
DEVICE=${{ matrix.device }}
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5
|
||||
env:
|
||||
DOCKER_METADATA_PR_HEAD_SHA: 'true'
|
||||
with:
|
||||
flavor: |
|
||||
# Disable latest tag
|
||||
latest=false
|
||||
suffix=${{ matrix.suffix }}
|
||||
images: |
|
||||
name=${{ env.GHCR_REPO }}
|
||||
name=${{ env.DOCKER_REPO }},enable=${{ github.event_name == 'release' }}
|
||||
tags: |
|
||||
# Tag with branch name
|
||||
type=ref,event=branch
|
||||
# Tag with pr-number
|
||||
type=ref,event=pr
|
||||
# Tag with long commit sha hash
|
||||
type=sha,format=long,prefix=commit-
|
||||
# Tag with git tag on release
|
||||
type=ref,event=tag
|
||||
type=raw,value=release,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
# Process annotations
|
||||
declare -a ANNOTATIONS=()
|
||||
if [[ -n "$DOCKER_METADATA_OUTPUT_JSON" ]]; then
|
||||
while IFS= read -r annotation; do
|
||||
# Extract key and value by removing the manifest: prefix
|
||||
if [[ "$annotation" =~ ^manifest:(.+)=(.+)$ ]]; then
|
||||
key="${BASH_REMATCH[1]}"
|
||||
value="${BASH_REMATCH[2]}"
|
||||
# Use array to properly handle arguments with spaces
|
||||
ANNOTATIONS+=(--annotation "index:$key=$value")
|
||||
fi
|
||||
done < <(jq -r '.annotations[]' <<< "$DOCKER_METADATA_OUTPUT_JSON")
|
||||
fi
|
||||
|
||||
TAGS=$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
SOURCE_ARGS=$(printf "${GHCR_REPO}@sha256:%s " *)
|
||||
|
||||
docker buildx imagetools create $TAGS "${ANNOTATIONS[@]}" $SOURCE_ARGS
|
||||
server:
|
||||
name: Build and Push Server
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_server == 'true' }}
|
||||
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@094bfb927b8cd75b343abaac27b3241be0fccfe9 # multi-runner-build-workflow-0.1.0
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
packages: write
|
||||
secrets:
|
||||
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
with:
|
||||
image: immich-server
|
||||
context: .
|
||||
dockerfile: server/Dockerfile
|
||||
dockerhub-push: ${{ github.event_name == 'release' }}
|
||||
build-args: |
|
||||
DEVICE=cpu
|
||||
|
||||
success-check-server:
|
||||
name: Docker Build & Push Server Success
|
||||
needs: [merge_server, retag_server]
|
||||
needs: [server, retag_server]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
# zizmor: ignore[template-injection]
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
success-check-ml:
|
||||
name: Docker Build & Push ML Success
|
||||
needs: [merge_ml, retag_ml]
|
||||
needs: [machine-learning, retag_ml]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
# zizmor: ignore[template-injection]
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
13
.github/workflows/docs-build.yml
vendored
13
.github/workflows/docs-build.yml
vendored
@@ -21,11 +21,11 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
docs:
|
||||
@@ -49,14 +49,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
@@ -68,8 +70,9 @@ jobs:
|
||||
run: npm run build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: docs-build-output
|
||||
path: docs/build/
|
||||
include-hidden-files: true
|
||||
retention-days: 1
|
||||
|
||||
21
.github/workflows/docs-deploy.yml
vendored
21
.github/workflows/docs-deploy.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
run: echo 'The triggering workflow did not succeed' && exit 1
|
||||
- name: Get artifact
|
||||
id: get-artifact
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
return { found: true, id: matchArtifact.id };
|
||||
- name: Determine deploy parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
HEAD_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||
with:
|
||||
@@ -108,13 +108,13 @@ jobs:
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Load parameters
|
||||
id: parameters
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
PARAM_JSON: ${{ needs.checks.outputs.parameters }}
|
||||
with:
|
||||
@@ -125,7 +125,7 @@ jobs:
|
||||
core.setOutput("shouldDeploy", parameters.shouldDeploy);
|
||||
|
||||
- name: Download artifact
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
ARTIFACT_JSON: ${{ needs.checks.outputs.artifact }}
|
||||
with:
|
||||
@@ -150,7 +150,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -165,7 +165,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -181,7 +181,8 @@ jobs:
|
||||
echo "output=$CLEANED" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish to Cloudflare Pages
|
||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1
|
||||
# TODO: Action is deprecated
|
||||
uses: cloudflare/pages-action@f0a1cd58cd66095dee69bfa18fa5efd1dde93bca # v1.5.0
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
@@ -198,7 +199,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -206,7 +207,7 @@ jobs:
|
||||
tg_command: 'apply'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
if: ${{ steps.parameters.outputs.event == 'pr' }}
|
||||
with:
|
||||
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
|
||||
|
||||
6
.github/workflows/docs-destroy.yml
vendored
6
.github/workflows/docs-destroy.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
|
||||
uses: gruntwork-io/terragrunt-action@9559e51d05873b0ea467c42bbabcb5c067642ccc # v2
|
||||
uses: gruntwork-io/terragrunt-action@aee21a7df999be8b471c2a8564c6cd853cb674e1 # v2.1.8
|
||||
with:
|
||||
tg_version: '0.58.12'
|
||||
tofu_version: '1.7.1'
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
tg_command: 'destroy -refresh=false'
|
||||
|
||||
- name: Comment
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3
|
||||
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
|
||||
with:
|
||||
number: ${{ github.event.number }}
|
||||
delete: true
|
||||
|
||||
12
.github/workflows/fix-format.yml
vendored
12
.github/workflows/fix-format.yml
vendored
@@ -16,34 +16,36 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: 'Checkout'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
|
||||
- name: Commit and push
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: fix formatting'
|
||||
|
||||
- name: Remove label
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
if: always()
|
||||
with:
|
||||
script: |
|
||||
|
||||
2
.github/workflows/pr-label-validation.yml
vendored
2
.github/workflows/pr-label-validation.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Require PR to have a changelog label
|
||||
uses: mheap/github-action-required-labels@388fd6af37b34cdfe5a23b37060e763217e58b03 # v5
|
||||
uses: mheap/github-action-required-labels@fb29a14a076b0f74099f6198f77750e8fc236016 # v5.5.0
|
||||
with:
|
||||
mode: exactly
|
||||
count: 1
|
||||
|
||||
2
.github/workflows/pr-labeler.yml
vendored
2
.github/workflows/pr-labeler.yml
vendored
@@ -11,4 +11,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
|
||||
16
.github/workflows/prepare-release.yml
vendored
16
.github/workflows/prepare-release.yml
vendored
@@ -32,19 +32,19 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
|
||||
- name: Bump version
|
||||
env:
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
|
||||
- name: Commit and tag
|
||||
id: push-tag
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9
|
||||
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
|
||||
with:
|
||||
default_author: github_actions
|
||||
message: 'chore: version ${{ env.IMMICH_VERSION }}'
|
||||
@@ -83,24 +83,24 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@3ff1caaa28b64c9cc276ce0a02e2ff584f3900c5 # v2
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download APK
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: release-apk-signed
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@da05d552573ad5aba039eaac05058a918a7bf631 # v2
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
with:
|
||||
draft: true
|
||||
tag_name: ${{ env.IMMICH_VERSION }}
|
||||
|
||||
4
.github/workflows/preview-label.yaml
vendored
4
.github/workflows/preview-label.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
with:
|
||||
message-id: 'preview-status'
|
||||
message: 'Deploying preview environment to https://pr-${{ github.event.pull_request.number }}.preview.internal.immich.cloud/'
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
|
||||
6
.github/workflows/sdk.yml
vendored
6
.github/workflows/sdk.yml
vendored
@@ -16,15 +16,17 @@ jobs:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
- name: Build
|
||||
|
||||
34
.github/workflows/static_analysis.yml
vendored
34
.github/workflows/static_analysis.yml
vendored
@@ -20,11 +20,11 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
mobile:
|
||||
@@ -44,12 +44,12 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
uses: subosito/flutter-action@395322a6cded4e9ed503aebd4cc1965625f8e59a # v2.20.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
@@ -58,16 +58,28 @@ jobs:
|
||||
run: dart pub get
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Install DCM
|
||||
run: |
|
||||
sudo apt-get update
|
||||
wget -qO- https://dcm.dev/pgp-key.public | sudo gpg --dearmor -o /usr/share/keyrings/dcm.gpg
|
||||
echo 'deb [signed-by=/usr/share/keyrings/dcm.gpg arch=amd64] https://dcm.dev/debian stable main' | sudo tee /etc/apt/sources.list.d/dart_stable.list
|
||||
sudo apt-get update
|
||||
sudo apt-get install dcm
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation; dart format lib/generated/codegen_loader.g.dart
|
||||
run: make translation
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run Build Runner
|
||||
run: make build
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate platform API
|
||||
run: make pigeon
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
@@ -96,6 +108,10 @@ jobs:
|
||||
run: dart run custom_lint
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run DCM
|
||||
run: dcm analyze lib --fatal-style --fatal-warnings
|
||||
working-directory: ./mobile
|
||||
|
||||
zizmor:
|
||||
name: zizmor
|
||||
runs-on: ubuntu-latest
|
||||
@@ -105,12 +121,12 @@ jobs:
|
||||
actions: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
|
||||
- name: Run zizmor 🌈
|
||||
run: uvx zizmor --format=sarif . > results.sarif
|
||||
@@ -118,7 +134,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
uses: github/codeql-action/upload-sarif@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
|
||||
186
.github/workflows/test.yml
vendored
186
.github/workflows/test.yml
vendored
@@ -17,6 +17,7 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should_run_i18n: ${{ steps.found_paths.outputs.i18n == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_web: ${{ steps.found_paths.outputs.web == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_server: ${{ steps.found_paths.outputs.server == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
should_run_cli: ${{ steps.found_paths.outputs.cli == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
@@ -28,14 +29,16 @@ jobs:
|
||||
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
- 'i18n/**'
|
||||
web:
|
||||
- 'web/**'
|
||||
- 'i18n/**'
|
||||
@@ -73,14 +76,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
@@ -98,7 +103,7 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run small tests & coverage
|
||||
run: npm run test:cov
|
||||
run: npm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests:
|
||||
@@ -114,14 +119,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -143,7 +150,7 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
run: npm run test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests-win:
|
||||
@@ -159,14 +166,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -181,7 +190,7 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
run: npm run test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-lint:
|
||||
@@ -197,14 +206,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -238,14 +249,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -259,9 +272,51 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
run: npm run test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
i18n-tests:
|
||||
name: Test i18n
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_i18n == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm --prefix=web ci
|
||||
|
||||
- name: Format
|
||||
run: npm --prefix=web run format:i18n
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
i18n/**
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.verify-changed-files.outputs.changed_files }}
|
||||
run: |
|
||||
echo "ERROR: i18n files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
e2e-tests-lint:
|
||||
name: End-to-End Lint
|
||||
needs: pre-job
|
||||
@@ -275,14 +330,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -318,14 +375,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
@@ -338,24 +397,29 @@ jobs:
|
||||
name: End-to-End Tests (Server & CLI)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||
runs-on: mich
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -383,24 +447,29 @@ jobs:
|
||||
name: End-to-End Tests (Web)
|
||||
needs: pre-job
|
||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||
runs-on: mich
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
@@ -412,7 +481,7 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: npx playwright install --with-deps chromium
|
||||
run: npx playwright install chromium --only-shell
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Docker build
|
||||
@@ -423,6 +492,17 @@ jobs:
|
||||
run: npx playwright test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
success-check-e2e:
|
||||
name: End-to-End Tests Success
|
||||
needs: [e2e-tests-server-cli, e2e-tests-web]
|
||||
permissions: {}
|
||||
runs-on: ubuntu-latest
|
||||
if: always()
|
||||
steps:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
mobile-unit-tests:
|
||||
name: Unit Test Mobile
|
||||
needs: pre-job
|
||||
@@ -431,15 +511,20 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@e938fdf56512cc96ef2f93601a5a40bde3801046 # v2
|
||||
uses: subosito/flutter-action@395322a6cded4e9ed503aebd4cc1965625f8e59a # v2.20.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test -j 1
|
||||
@@ -455,13 +540,13 @@ jobs:
|
||||
run:
|
||||
working-directory: ./machine-learning
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
|
||||
# with:
|
||||
# python-version: 3.11
|
||||
@@ -495,14 +580,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './.github/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
@@ -517,12 +604,12 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@master
|
||||
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
@@ -536,14 +623,16 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
@@ -555,7 +644,7 @@ jobs:
|
||||
run: make open-api
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
@@ -572,14 +661,14 @@ jobs:
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
generated-typeorm-migrations-up-to-date:
|
||||
name: TypeORM Checks
|
||||
sql-schema-up-to-date:
|
||||
name: SQL Schema Checks
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
services:
|
||||
postgres:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3@sha256:1f5583fe3397210a0fbc7f11b0cec18bacc4a99e3e8ea0548e9bd6bcf26ec37a
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
@@ -597,14 +686,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm ci
|
||||
@@ -620,10 +711,10 @@ jobs:
|
||||
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
run: npm run migrations:generate TestMigration
|
||||
run: npm run migrations:generate src/TestMigration
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
@@ -644,7 +735,7 @@ jobs:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-sql-files
|
||||
with:
|
||||
files: |
|
||||
@@ -657,6 +748,7 @@ jobs:
|
||||
run: |
|
||||
echo "ERROR: Generated SQL files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
git diff
|
||||
exit 1
|
||||
|
||||
# mobile-integration-tests:
|
||||
|
||||
16
.github/workflows/weblate-lock.yml
vendored
16
.github/workflows/weblate-lock.yml
vendored
@@ -15,11 +15,11 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
i18n:
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
- name: Find Pull Request
|
||||
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1
|
||||
uses: juliangruber/find-pull-request-action@48b6133aa6c826f267ebd33aa2d29470f9d9e7d0 # v1.9.0
|
||||
id: find-pr
|
||||
with:
|
||||
branch: chore/translations
|
||||
@@ -52,10 +52,6 @@ jobs:
|
||||
permissions: {}
|
||||
if: always()
|
||||
steps:
|
||||
- name: Any jobs failed?
|
||||
if: ${{ contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: All jobs passed or skipped
|
||||
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||
# zizmor: ignore[template-injection]
|
||||
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -3,6 +3,7 @@
|
||||
.DS_Store
|
||||
.vscode/*
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
|
||||
docker/upload
|
||||
|
||||
10
.vscode/extensions.json
vendored
Normal file
10
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"esbenp.prettier-vscode",
|
||||
"svelte.svelte-vscode",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"dart-code.flutter",
|
||||
"dart-code.dart-code",
|
||||
"dcmdev.dcm-vscode-extension"
|
||||
]
|
||||
}
|
||||
80
.vscode/settings.json
vendored
80
.vscode/settings.json
vendored
@@ -1,45 +1,63 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"svelte"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"[dart]": {
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.selectionHighlight": false,
|
||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||
"editor.suggestSelection": "first",
|
||||
"editor.tabCompletion": "onlySnippets",
|
||||
"editor.wordBasedSuggestions": "off",
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
||||
"editor.wordBasedSuggestions": "off"
|
||||
},
|
||||
"cSpell.words": [
|
||||
"immich"
|
||||
],
|
||||
"[javascript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": ["immich"],
|
||||
"editor.formatOnSave": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart"
|
||||
}
|
||||
}
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
|
||||
72
.vscode/tasks.json
vendored
Normal file
72
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Fix Permissions, Install Dependencies",
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start.sh ] && /immich-devcontainer/container-start.sh || exit 0",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich API Server (Nest)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-backend.sh ] && /immich-devcontainer/container-start-backend.sh || exit 0",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich Web Server (Vite)",
|
||||
"dependsOn": ["Fix Permissions, Install Dependencies"],
|
||||
"type": "shell",
|
||||
"command": "[ -f /immich-devcontainer/container-start-frontend.sh ] && /immich-devcontainer/container-start-frontend.sh || exit 0",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "dedicated",
|
||||
"showReuseMessage": true,
|
||||
"clear": false,
|
||||
"group": "Devcontainer tasks",
|
||||
"close": true
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "default"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Immich Server and Web",
|
||||
"dependsOn": ["Immich Web Server (Vite)", "Immich API Server (Nest)"],
|
||||
"runOptions": {
|
||||
"runOn": "folderOpen"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
8
Makefile
8
Makefile
@@ -17,6 +17,9 @@ e2e:
|
||||
prod:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --remove-orphans
|
||||
|
||||
prod-down:
|
||||
docker compose -f ./docker/docker-compose.prod.yml down --remove-orphans
|
||||
|
||||
prod-scale:
|
||||
docker compose -f ./docker/docker-compose.prod.yml up --build -V --scale immich-server=3 --scale immich-microservices=3 --remove-orphans
|
||||
|
||||
@@ -45,6 +48,8 @@ audit-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
install-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
ci-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) ci
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
@@ -78,7 +83,8 @@ test-medium-dev:
|
||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
ci-all: $(foreach M,$(filter-out .github,$(MODULES)),ci-$M) ;
|
||||
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
|
||||
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
|
||||
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:22.14.0-alpine3.20@sha256:40be979442621049f40b1d51a26b55e281246b5de4e5f51a18da7beb6e17e3f9 AS core
|
||||
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
608
cli/package-lock.json
generated
608
cli/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.65",
|
||||
"version": "2.2.72",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.14.1",
|
||||
"@types/node": "^22.15.32",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@@ -29,7 +29,7 @@
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.0.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^57.0.0",
|
||||
"eslint-plugin-unicorn": "^59.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
@@ -69,6 +69,6 @@
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ export interface UploadOptionsDto {
|
||||
concurrency: number;
|
||||
progress?: boolean;
|
||||
watch?: boolean;
|
||||
jsonOutput?: boolean;
|
||||
}
|
||||
|
||||
class UploadFile extends File {
|
||||
@@ -65,8 +66,14 @@ class UploadFile extends File {
|
||||
const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
|
||||
const { newFiles, duplicates } = await checkForDuplicates(files, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
if (options.jsonOutput) {
|
||||
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
|
||||
}
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
await deleteFiles(
|
||||
newAssets.map(({ filepath }) => filepath),
|
||||
options,
|
||||
);
|
||||
};
|
||||
|
||||
export const startWatch = async (
|
||||
|
||||
@@ -68,6 +68,11 @@ program
|
||||
.env('IMMICH_UPLOAD_CONCURRENCY')
|
||||
.default(4),
|
||||
)
|
||||
.addOption(
|
||||
new Option('-j, --json-output', 'Output detailed information in json format')
|
||||
.env('IMMICH_JSON_OUTPUT')
|
||||
.default(false),
|
||||
)
|
||||
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
|
||||
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
|
||||
.addOption(
|
||||
|
||||
@@ -48,7 +48,7 @@ services:
|
||||
IMMICH_THIRD_PARTY_SOURCE_URL: https://github.com/immich-app/immich/
|
||||
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
|
||||
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://immich.app/docs
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/third-party
|
||||
IMMICH_THIRD_PARTY_SUPPORT_URL: https://immich.app/docs/community-guides
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
@@ -116,13 +116,13 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -134,25 +134,7 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
--command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
echo "checksum failure count is $$Chksum";
|
||||
[ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres
|
||||
-c shared_preload_libraries=vectors.so
|
||||
-c 'search_path="$$user", public, vectors'
|
||||
-c logging_collector=on
|
||||
-c max_wal_size=2GB
|
||||
-c shared_buffers=512MB
|
||||
-c wal_compression=on
|
||||
|
||||
shm_size: 128mb
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
|
||||
@@ -56,14 +56,14 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -75,14 +75,7 @@ services:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
|
||||
@@ -90,7 +83,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:339ce86a59413be18d0e445472891d022725b4803fab609069110205e79fb2f1
|
||||
image: prom/prometheus@sha256:9abc6cf6aea7710d163dbb28d8eeb7dc5baef01e38fa4cd146a406dd9f07f70d
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -99,10 +92,10 @@ services:
|
||||
# add data source for http://immich-prometheus:9090 to get started
|
||||
immich-grafana:
|
||||
container_name: immich_grafana
|
||||
command: [ './run.sh', '-disable-reporting' ]
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:11.6.0-ubuntu@sha256:fd8fa48213c624e1a95122f1d93abbf1cf1cbe85fc73212c1e599dbd76c63ff8
|
||||
image: grafana/grafana:12.0.2-ubuntu@sha256:0512d81cdeaaff0e370a9aa66027b465d1f1f04379c3a9c801a905fabbdbc7a5
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -49,30 +49,25 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:42cba146593a5ea9a622002c1b7cba5da7be248650cbb64ecb9c6c33d29794b1
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:fec42f399876eb6faf9e008570597741c87ff7662a54185593e74b09ce83d177
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0@sha256:5f6a838e4e44c8e0e019d0ebfe3ee8952b69afc2809b2c25f7b0119641978e91
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
# Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
|
||||
# DB_STORAGE_TYPE: 'HDD'
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: >-
|
||||
pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1; Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: >-
|
||||
postgres -c shared_preload_libraries=vectors.so -c 'search_path="$$user", public, vectors' -c logging_collector=on -c max_wal_size=2GB -c shared_buffers=512MB -c wal_compression=on
|
||||
shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -490,7 +490,7 @@ You can also scan the Postgres database file structure for errors:
|
||||
<details>
|
||||
<summary>Scan for file structure errors</summary>
|
||||
```bash
|
||||
docker exec -it immich_postgres pg_amcheck --username=postgres --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
docker exec -it immich_postgres pg_amcheck --username=<DB_USERNAME> --heapallindexed --parent-check --rootdescend --progress --all --install-missing
|
||||
```
|
||||
|
||||
A normal result will end something like this and return with an exit code of `0`:
|
||||
|
||||
@@ -57,7 +57,7 @@ Then please follow the steps in the following section for restoring the database
|
||||
<TabItem value="Linux system" label="Linux system" default>
|
||||
|
||||
```bash title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
@@ -79,7 +79,7 @@ docker compose up -d # Start remainder of Immich apps
|
||||
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||
|
||||
```powershell title='Backup'
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres))
|
||||
[System.IO.File]::WriteAllLines("C:\absolute\path\to\backup\dump.sql", (docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME>))
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
@@ -219,3 +219,10 @@ When you turn off the storage template engine, it will leave the assets in `UPLO
|
||||
Do not touch the files inside these folders under any circumstances except taking a backup. Changing or removing an asset can cause untracked and missing files.
|
||||
You can think of it as App-Which-Must-Not-Be-Named, the only access to viewing, changing and deleting assets is only through the mobile or browser interface.
|
||||
:::
|
||||
|
||||
## Backup ordering
|
||||
|
||||
A backup of Immich should contain both the database and the asset files. When backing these up it's possible for them to get out of sync, potentially resulting in broken assets after you restore.
|
||||
The best way of dealing with this is to stop the immich-server container while you take a backup. If nothing is changing then the backup will always be in sync.
|
||||
|
||||
If stopping the container is not an option, then the recommended order is to back up the database first, and the filesystem second. This way, the worst case scenario is that there are files on the filesystem that the database doesn't know about. If necessary, these can be (re)uploaded manually after a restore. If the backup is done the other way around, with the filesystem first and the database second, it's possible for the restored database to reference files that aren't in the filesystem backup, thus resulting in broken assets.
|
||||
|
||||
@@ -93,6 +93,7 @@ The `.well-known/openid-configuration` part of the url is optional and will be a
|
||||
## Auto Launch
|
||||
|
||||
When Auto Launch is enabled, the login page will automatically redirect the user to the OAuth authorization url, to login with OAuth. To access the login screen again, use the browser's back button, or navigate directly to `/auth/login?autoLaunch=0`.
|
||||
Auto Launch can also be enabled on a per-request basis by navigating to `/auth/login?authLaunch=1`, this can be useful in situations where Immich is called from e.g. Nextcloud using the _External sites_ app and the _oidc_ app so as to enable users to directly interact with a logged-in instance of Immich.
|
||||
|
||||
## Mobile Redirect URI
|
||||
|
||||
|
||||
@@ -10,12 +10,16 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
|
||||
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
|
||||
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
|
||||
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
|
||||
|
||||
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
|
||||
|
||||
:::note
|
||||
Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported. Postgres 17 is nominally compatible, but pgvecto.rs does not have prebuilt images or packages for it as of writing.
|
||||
Immich is known to work with Postgres versions `>= 14, < 18`.
|
||||
|
||||
Make sure the installed version of pgvecto.rs is compatible with your version of Immich. The current accepted range for pgvecto.rs is `>= 0.2.0, < 0.4.0`.
|
||||
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`.
|
||||
:::
|
||||
|
||||
## Specifying the connection URL
|
||||
@@ -53,21 +57,99 @@ CREATE DATABASE <immichdatabasename>;
|
||||
\c <immichdatabasename>
|
||||
BEGIN;
|
||||
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
|
||||
CREATE EXTENSION vectors;
|
||||
CREATE EXTENSION vchord CASCADE;
|
||||
CREATE EXTENSION earthdistance CASCADE;
|
||||
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
|
||||
ALTER SCHEMA vectors OWNER TO <immichdbusername>;
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
### Updating pgvecto.rs
|
||||
### Updating VectorChord
|
||||
|
||||
When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
|
||||
When installing a new version of VectorChord, you will need to manually update the extension and reindex by connecting to the Immich database and running:
|
||||
|
||||
### Common errors
|
||||
```
|
||||
ALTER EXTENSION vchord UPDATE;
|
||||
REINDEX INDEX face_index;
|
||||
REINDEX INDEX clip_index;
|
||||
```
|
||||
|
||||
#### Permission denied for view
|
||||
## Migrating to VectorChord
|
||||
|
||||
If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO <immichdbusername>;`.
|
||||
VectorChord is the successor extension to pgvecto.rs, allowing for higher performance, lower memory usage and higher quality results for smart search and facial recognition.
|
||||
|
||||
[vectors-install]: https://docs.vectorchord.ai/getting-started/installation.html
|
||||
### Migrating from pgvecto.rs
|
||||
|
||||
Support for pgvecto.rs will be dropped in a later release, hence we recommend all users currently using pgvecto.rs to migrate to VectorChord at their convenience. There are two primary approaches to do so.
|
||||
|
||||
The easiest option is to have both extensions installed during the migration:
|
||||
|
||||
<details>
|
||||
<summary>Migration steps (automatic)</summary>
|
||||
1. Ensure you still have pgvecto.rs installed
|
||||
2. Install `pgvector` (`>= 0.7.0, < 1.0.0`). The easiest way to do this is on Debian/Ubuntu by adding the [PostgreSQL Apt repository][pg-apt] and then running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`)
|
||||
3. [Install VectorChord][vchord-install]
|
||||
4. Add `shared_preload_libraries= 'vchord.so, vectors.so'` to your `postgresql.conf`, making sure to include _both_ `vchord.so` and `vectors.so`. You may include other libraries here as well if needed
|
||||
5. Restart the Postgres database
|
||||
6. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;` using psql or your choice of database client
|
||||
7. Start Immich and wait for the logs `Reindexed face_index` and `Reindexed clip_index` to be output
|
||||
8. If Immich does not have superuser permissions, run the SQL command `DROP EXTENSION vectors;`
|
||||
9. Drop the old schema by running `DROP SCHEMA vectors;`
|
||||
10. Remove the `vectors.so` entry from the `shared_preload_libraries` setting
|
||||
11. Restart the Postgres database
|
||||
12. Uninstall pgvecto.rs (e.g. `apt-get purge vectors-pg14` on Debian-based environments, replacing `pg14` as appropriate). `pgvector` must remain installed as it provides the data types used by `vchord`
|
||||
|
||||
</details>
|
||||
|
||||
If it is not possible to have both VectorChord and pgvecto.rs installed at the same time, you can perform the migration with more manual steps:
|
||||
|
||||
<details>
|
||||
<summary>Migration steps (manual)</summary>
|
||||
1. While pgvecto.rs is still installed, run the following SQL command using psql or your choice of database client. Take note of the number outputted by this command as you will need it later
|
||||
|
||||
```sql
|
||||
SELECT atttypmod as dimsize
|
||||
FROM pg_attribute f
|
||||
JOIN pg_class c ON c.oid = f.attrelid
|
||||
WHERE c.relkind = 'r'::char
|
||||
AND f.attnum > 0
|
||||
AND c.relname = 'smart_search'::text
|
||||
AND f.attname = 'embedding'::text;
|
||||
```
|
||||
|
||||
2. Remove references to pgvecto.rs using the below SQL commands
|
||||
|
||||
```sql
|
||||
DROP INDEX IF EXISTS clip_index;
|
||||
DROP INDEX IF EXISTS face_index;
|
||||
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE real[];
|
||||
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE real[];
|
||||
```
|
||||
|
||||
3. [Install VectorChord][vchord-install]
|
||||
4. Change the columns back to the appropriate vector types, replacing `<number>` with the number from step 1
|
||||
|
||||
```sql
|
||||
CREATE EXTENSION IF NOT EXISTS vchord CASCADE;
|
||||
ALTER TABLE smart_search ALTER COLUMN embedding SET DATA TYPE vector(<number>);
|
||||
ALTER TABLE face_search ALTER COLUMN embedding SET DATA TYPE vector(512);
|
||||
```
|
||||
|
||||
5. Start Immich and let it create new indices using VectorChord
|
||||
|
||||
</details>
|
||||
|
||||
### Migrating from pgvector
|
||||
|
||||
<details>
|
||||
<summary>Migration steps</summary>
|
||||
1. Ensure you have at least 0.7.0 of pgvector installed. If it is below that, please upgrade it and run the SQL command `ALTER EXTENSION vector UPDATE;` using psql or your choice of database client
|
||||
2. Follow the Prerequisites to install VectorChord
|
||||
3. If Immich does not have superuser permissions, run the SQL command `CREATE EXTENSION vchord CASCADE;`
|
||||
4. Remove the `DB_VECTOR_EXTENSION=pgvector` environmental variable as it will make Immich still use pgvector if set
|
||||
5. Start Immich and let it create new indices using VectorChord
|
||||
|
||||
</details>
|
||||
|
||||
Note that VectorChord itself uses pgvector types, so you should not uninstall pgvector after following these steps.
|
||||
|
||||
[vchord-install]: https://docs.vectorchord.ai/vectorchord/getting-started/installation.html
|
||||
[pg-apt]: https://www.postgresql.org/download/linux/#generic
|
||||
|
||||
@@ -22,7 +22,7 @@ server {
|
||||
client_max_body_size 50000M;
|
||||
|
||||
# Set headers
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
481
docs/docs/developer/devcontainers.md
Normal file
481
docs/docs/developer/devcontainers.md
Normal file
@@ -0,0 +1,481 @@
|
||||
---
|
||||
title: Devcontainers
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Development with Dev Containers
|
||||
|
||||
Dev Containers provide a consistent, reproducible development environment using Docker containers. With a single click, you can get started with an Immich development environment on Mac, Linux, Windows, or in the cloud using GitHub Codespaces.
|
||||
|
||||
[](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/immich-app/immich/)
|
||||
|
||||
[](https://codespaces.new/immich-app/immich/)
|
||||
|
||||
[Learn more about Dev Containers](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/adding-a-dev-container-configuration/introduction-to-dev-containers)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before getting started, ensure you have:
|
||||
|
||||
- **Docker Desktop** (latest version)
|
||||
- [Mac](https://docs.docker.com/desktop/install/mac-install/)
|
||||
- [Windows](https://docs.docker.com/desktop/install/windows-install/) (with WSL2 backend recommended)
|
||||
- [Linux](https://docs.docker.com/desktop/install/linux-install/)
|
||||
- **Visual Studio Code** with the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
|
||||
- **Git** for cloning the repository
|
||||
- At least **8GB of RAM** (16GB recommended)
|
||||
- **20GB of free disk space**
|
||||
|
||||
:::tip Alternative Development Environments
|
||||
While this guide focuses on VS Code, you have many options for Dev Container development:
|
||||
|
||||
**Local Editors:**
|
||||
|
||||
- [IntelliJ IDEA](https://www.jetbrains.com/help/idea/connect-to-devcontainer.html) - Full JetBrains IDE support
|
||||
- [neovim](https://github.com/jamestthompson3/nvim-remote-containers) - Lightweight terminal-based editor
|
||||
- [Emacs](https://github.com/emacs-lsp/lsp-docker) - Extensible text editor
|
||||
- [DevContainer CLI](https://github.com/devcontainers/cli) - Command-line interface
|
||||
|
||||
**Cloud-Based Solutions:**
|
||||
|
||||
- [GitHub Codespaces](https://github.com/features/codespaces) - Fully integrated with GitHub, excellent devcontainer.json support
|
||||
- [GitPod](https://www.gitpod.io) - SaaS platform with recent Dev Container support (historically used gitpod.yml)
|
||||
|
||||
**Self-Hostable Options:**
|
||||
|
||||
- [Coder](https://coder.com) - Enterprise-focused, requires Terraform knowledge, self-managed
|
||||
- [DevPod](https://devpod.sh) - Client-only tool with excellent devcontainer.json support, works with any provider (local, cloud, or on-premise)
|
||||
:::
|
||||
|
||||
## Dev Container Services
|
||||
|
||||
The Dev Container environment consists of the following services:
|
||||
|
||||
| Service | Container Name | Description | Ports |
|
||||
| ---------------- | ------------------------- | --------------------------------------------------------- | ----------------------------------------------------------------------- |
|
||||
| Server & Web | `immich-server` | Runs both API server and web frontend in development mode | 2283 (API)<br/>3000 (Web)<br/>9230 (Workers Debug)<br/>9231 (API Debug) |
|
||||
| Database | `database` | PostgreSQL database | 5432 |
|
||||
| Cache | `redis` | Valkey cache server | 6379 |
|
||||
| Machine Learning | `immich-machine-learning` | Immich ML model inference server | 3003 |
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Step 1: Clone the Repository
|
||||
|
||||
```bash
|
||||
git clone https://github.com/immich-app/immich.git
|
||||
cd immich
|
||||
```
|
||||
|
||||
### Step 2: Configure Environment Variables
|
||||
|
||||
The immich dev containers read environment variables from your shell environment, not from `.env` files. This allows them to work in cloud environments without pre-configuration.
|
||||
|
||||
:::important Required Configuration
|
||||
When running locally, and if you want to create (or use an existing) DB and/or photo storage folder, you must set the `UPLOAD_LOCATION` variable in your shell environment before launching the Dev Container. This determines where uploaded files are stored and also where the DB stores it data.
|
||||
|
||||
```bash
|
||||
# Set temporarily for current session
|
||||
export UPLOAD_LOCATION=/opt/dev_upload_folder
|
||||
|
||||
# Or add to your shell profile for persistence
|
||||
# (~/.bashrc, ~/.zshrc, ~/.bash_profile, etc.)
|
||||
echo 'export UPLOAD_LOCATION=/opt/dev_upload_folder' >> ~/.bashrc
|
||||
source ~/.bashrc
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
### Step 3: Launch the Dev Container
|
||||
|
||||
#### Using VS Code UI:
|
||||
|
||||
1. Open the cloned repository in VS Code
|
||||
2. Press `F1` or `Ctrl/Cmd+Shift+P` to open the command palette
|
||||
3. Type and select "Dev Containers: Rebuild and Reopen in Container"
|
||||
4. Select "Immich - Backend, Frontend and ML" from the list
|
||||
5. Wait for the container to build and start (this may take several minutes on first run)
|
||||
|
||||
#### Using VS Code Quick Actions:
|
||||
|
||||
1. Open the repository in VS Code
|
||||
2. You should see a popup asking if you want to reopen in a container
|
||||
3. Click "Reopen in Container"
|
||||
|
||||
#### Using Command Line:
|
||||
|
||||
```bash
|
||||
# Using the DevContainer CLI
|
||||
devcontainer up --workspace-folder .
|
||||
```
|
||||
|
||||
## Environment Variable Details
|
||||
|
||||
### How Dev Containers Handle Environment Variables
|
||||
|
||||
Unlike the Immich developer setup based on Docker Compose which uses `.env` files, Immich Dev Containers read environment variables from your shell environment. This is configured in `.devcontainer/devcontainer.json`:
|
||||
|
||||
```json
|
||||
"remoteEnv": {
|
||||
"UPLOAD_LOCATION": "${localEnv:UPLOAD_LOCATION:./Library}",
|
||||
"DB_PASSWORD": "${localEnv:DB_PASSWORD:postgres}",
|
||||
"DB_USERNAME": "${localEnv:DB_USERNAME:postgres}",
|
||||
"DB_DATABASE_NAME": "${localEnv:DB_DATABASE_NAME:immich}"
|
||||
}
|
||||
```
|
||||
|
||||
The `${localEnv:VARIABLE:default}` syntax reads from your shell environment with optional defaults.
|
||||
|
||||
### Upload Location Path Resolution
|
||||
|
||||
The `UPLOAD_LOCATION` environment variable controls where files are stored:
|
||||
|
||||
**Default:** `./Library` (relative to the `docker` directory)
|
||||
**Resolved to:** `<immich-root>/docker/Library`
|
||||
|
||||
**Bind Mounts Created:**
|
||||
|
||||
```yaml
|
||||
# From .devcontainer/server/container-compose-overrides.yml
|
||||
- ${UPLOAD_LOCATION-./Library}/photos:/workspaces/immich/server/upload
|
||||
- ${UPLOAD_LOCATION-./Library}/postgres:/var/lib/postgresql/data
|
||||
```
|
||||
|
||||
### Database Configuration
|
||||
|
||||
These variables have sensible defaults (for development) but can be customized:
|
||||
|
||||
| Variable | Default | Description |
|
||||
| ------------------ | ---------- | ------------------- |
|
||||
| `DB_PASSWORD` | `postgres` | PostgreSQL password |
|
||||
| `DB_USERNAME` | `postgres` | PostgreSQL username |
|
||||
| `DB_DATABASE_NAME` | `immich` | Database name |
|
||||
|
||||
### Setting Environment Variables
|
||||
|
||||
Add these to your shell profile (`~/.bashrc`, `~/.zshrc`, `~/.bash_profile`, etc.):
|
||||
|
||||
```bash
|
||||
# Required
|
||||
export UPLOAD_LOCATION=./Library # or absolute path
|
||||
|
||||
# Optional (only if using non-default values)
|
||||
export DB_PASSWORD=your_password
|
||||
export DB_USERNAME=your_username
|
||||
export DB_DATABASE_NAME=your_database
|
||||
```
|
||||
|
||||
Remember to reload your shell configuration:
|
||||
|
||||
```bash
|
||||
source ~/.bashrc # or ~/.zshrc, etc.
|
||||
```
|
||||
|
||||
## Git Configuration
|
||||
|
||||
### SSH Keys and Authentication
|
||||
|
||||
To use your SSH keys for GitHub access inside the Dev Container:
|
||||
|
||||
1. **Start SSH Agent** on your host machine:
|
||||
|
||||
```bash
|
||||
eval "$(ssh-agent -s)"
|
||||
ssh-add ~/.ssh/id_rsa # or your key path
|
||||
```
|
||||
|
||||
2. **VS Code automatically forwards your SSH agent** to the container
|
||||
|
||||
For detailed instructions, see the [VS Code guide on sharing Git credentials](https://code.visualstudio.com/remote/advancedcontainers/sharing-git-credentials).
|
||||
|
||||
### Commit Signing
|
||||
|
||||
To use your SSH key for commit signing, see the [GitHub guide on SSH commit signing](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key#telling-git-about-your-ssh-key).
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Automatic Setup
|
||||
|
||||
When the Dev Container starts, it automatically:
|
||||
|
||||
1. **Runs post-create script** (`container-server-post-create.sh`):
|
||||
|
||||
- Adjusts file permissions for the `node` user
|
||||
- Installs dependencies: `npm install` in all packages
|
||||
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
|
||||
|
||||
2. **Starts development servers** via VS Code tasks:
|
||||
|
||||
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
|
||||
- `Immich Web Server (Vite)` - Web frontend with hot-reloading on port 3000
|
||||
- Both servers watch for file changes and recompile automatically
|
||||
|
||||
3. **Configures port forwarding**:
|
||||
- Web UI: http://localhost:3000 (opens automatically)
|
||||
- API: http://localhost:2283
|
||||
- Debug ports: 9230 (workers), 9231 (API)
|
||||
|
||||
:::info
|
||||
The Dev Container setup replaces the `make dev` command from the traditional setup. All services start automatically when you open the container.
|
||||
:::
|
||||
|
||||
### Accessing Services
|
||||
|
||||
Once running, you can access:
|
||||
|
||||
| Service | URL | Description |
|
||||
| -------- | --------------------- | ---------------------------------------------------------------------------------------------- |
|
||||
| Web UI | http://localhost:3000 | Main web interface |
|
||||
| API | http://localhost:2283 | REST API endpoints (Not used directly, web UI will expose this over http://localhost:3000/api) |
|
||||
| Database | localhost:5432 | PostgreSQL (username: `postgres`) (Not used directly) |
|
||||
|
||||
### Connecting Mobile Apps
|
||||
|
||||
To connect the mobile app to your Dev Container:
|
||||
|
||||
1. Find your machine's IP address
|
||||
2. In the mobile app, use: `http://YOUR_IP:3000/api`
|
||||
3. Ensure your firewall allows connections on port 2283
|
||||
|
||||
### Making Code Changes
|
||||
|
||||
- **Server code** (`/server`): Changes trigger automatic restart
|
||||
- **Web code** (`/web`): Changes trigger hot module replacement
|
||||
- **Database migrations**: Run `npm run sync:sql` in the server directory
|
||||
- **API changes**: Regenerate TypeScript SDK with `make open-api`
|
||||
|
||||
## Testing
|
||||
|
||||
### Running Tests
|
||||
|
||||
The Dev Container supports multiple ways to run tests:
|
||||
|
||||
#### Using Make Commands (Recommended)
|
||||
|
||||
```bash
|
||||
# Run tests for specific components
|
||||
make test-server # Server unit tests
|
||||
make test-web # Web unit tests
|
||||
make test-e2e # End-to-end tests
|
||||
make test-cli # CLI tests
|
||||
|
||||
# Run all tests
|
||||
make test-all # Runs tests for all components
|
||||
|
||||
# Medium tests (integration tests)
|
||||
make test-medium-dev # End-to-end tests
|
||||
```
|
||||
|
||||
#### Using NPM Directly
|
||||
|
||||
```bash
|
||||
# Server tests
|
||||
cd /workspaces/immich/server
|
||||
npm test # Run all tests
|
||||
npm run test:watch # Watch mode
|
||||
npm run test:cov # Coverage report
|
||||
|
||||
# Web tests
|
||||
cd /workspaces/immich/web
|
||||
npm test # Run all tests
|
||||
npm run test:watch # Watch mode
|
||||
|
||||
# E2E tests
|
||||
cd /workspaces/immich/e2e
|
||||
npm run test # Run API tests
|
||||
npm run test:web # Run web UI tests
|
||||
```
|
||||
|
||||
### Code Quality Commands
|
||||
|
||||
```bash
|
||||
# Linting
|
||||
make lint-server # Lint server code
|
||||
make lint-web # Lint web code
|
||||
make lint-all # Lint all components
|
||||
|
||||
# Formatting
|
||||
make format-server # Format server code
|
||||
make format-web # Format web code
|
||||
make format-all # Format all code
|
||||
|
||||
# Type checking
|
||||
make check-server # Type check server
|
||||
make check-web # Type check web
|
||||
make check-all # Check all components
|
||||
|
||||
# Complete hygiene check
|
||||
make hygiene-all # Runs lint, format, check, SQL sync, and audit
|
||||
```
|
||||
|
||||
### Additional Make Commands
|
||||
|
||||
```bash
|
||||
# Build commands
|
||||
make build-server # Build server
|
||||
make build-web # Build web app
|
||||
make build-all # Build everything
|
||||
|
||||
# API generation
|
||||
make open-api # Generate OpenAPI specs
|
||||
make open-api-typescript # Generate TypeScript SDK
|
||||
make open-api-dart # Generate Dart SDK
|
||||
|
||||
# Database
|
||||
make sql # Sync database schema
|
||||
|
||||
# Dependencies
|
||||
make install-server # Install server dependencies
|
||||
make install-web # Install web dependencies
|
||||
make install-all # Install all dependencies
|
||||
```
|
||||
|
||||
### Debugging
|
||||
|
||||
The Dev Container is pre-configured for debugging:
|
||||
|
||||
1. **API Server Debugging**:
|
||||
|
||||
- Set breakpoints in VS Code
|
||||
- Press `F5` or use "Run and Debug" panel
|
||||
- Select "Attach to Server" configuration
|
||||
- Debug port: 9231
|
||||
|
||||
2. **Worker Debugging**:
|
||||
|
||||
- Use "Attach to Workers" configuration
|
||||
- Debug port: 9230
|
||||
|
||||
3. **Web Debugging**:
|
||||
- Use browser DevTools
|
||||
- VS Code debugger for Chrome/Edge extensions supported
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### Permission Errors
|
||||
|
||||
**Problem**: `EACCES` or permission denied errors
|
||||
**Solution**:
|
||||
|
||||
- The Dev Container runs as the `node` user (UID 1000)
|
||||
- If your host UID differs, you may see permission issues
|
||||
- Try rebuilding the container: "Dev Containers: Rebuild Container"
|
||||
|
||||
#### Container Won't Start
|
||||
|
||||
**Problem**: Dev Container fails to start or build
|
||||
**Solution**:
|
||||
|
||||
1. Check Docker is running: `docker ps`
|
||||
2. Clean Docker resources: `docker system prune -a`
|
||||
3. Check available disk space
|
||||
4. Review Docker Desktop resource limits
|
||||
|
||||
#### Port Already in Use
|
||||
|
||||
**Problem**: "Port 3000/2283 is already in use"
|
||||
**Solution**:
|
||||
|
||||
1. Check for conflicting services: `lsof -i :3000` (macOS/Linux)
|
||||
2. Stop conflicting services or change port mappings
|
||||
3. Restart Docker Desktop
|
||||
|
||||
#### Upload Location Not Set
|
||||
|
||||
**Problem**: Errors about missing UPLOAD_LOCATION
|
||||
**Solution**:
|
||||
|
||||
1. Set the environment variable: `export UPLOAD_LOCATION=./Library`
|
||||
2. Add to your shell profile for persistence
|
||||
3. Restart your terminal and VS Code
|
||||
|
||||
#### Database Connection Failed
|
||||
|
||||
**Problem**: Cannot connect to PostgreSQL
|
||||
**Solution**:
|
||||
|
||||
1. Ensure all containers are running: `docker ps`
|
||||
2. Check logs: "Dev Containers: Show Container Log"
|
||||
3. Verify database credentials match environment variables
|
||||
|
||||
### Getting Help
|
||||
|
||||
If you encounter issues:
|
||||
|
||||
1. Check container logs: View → Output → Select "Dev Containers"
|
||||
2. Rebuild without cache: "Dev Containers: Rebuild Container Without Cache"
|
||||
3. Review [common Docker issues](https://docs.docker.com/desktop/troubleshoot/)
|
||||
4. Ask in [Discord](https://discord.immich.app) `#help-desk-support` channel
|
||||
|
||||
## Mobile Development
|
||||
|
||||
While the Dev Container focuses on server and web development, you can connect mobile apps for testing:
|
||||
|
||||
### Connecting iOS/Android Apps
|
||||
|
||||
1. **Ensure API is accessible**:
|
||||
|
||||
```bash
|
||||
# Find your machine's IP
|
||||
# macOS
|
||||
ipconfig getifaddr en0
|
||||
# Linux
|
||||
hostname -I
|
||||
# Windows (in WSL2)
|
||||
ip addr show eth0
|
||||
```
|
||||
|
||||
2. **Configure mobile app**:
|
||||
|
||||
- Server URL: `http://YOUR_IP:2283/api`
|
||||
- Ensure firewall allows port 2283
|
||||
|
||||
3. **For full mobile development**, see the [mobile development guide](/docs/developer/setup) which covers:
|
||||
- Flutter setup
|
||||
- Running on simulators/devices
|
||||
- Mobile-specific debugging
|
||||
|
||||
## Advanced Configuration
|
||||
|
||||
### Custom VS Code Extensions
|
||||
|
||||
Add extensions to `.devcontainer/devcontainer.json`:
|
||||
|
||||
```json
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"your.extension-id"
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Additional Services
|
||||
|
||||
To add services (e.g., Redis Commander), modify:
|
||||
|
||||
1. `/docker/docker-compose.dev.yml` - Add service definition
|
||||
2. `/.devcontainer/server/container-compose-overrides.yml` - Add overrides if needed
|
||||
|
||||
### Resource Limits
|
||||
|
||||
Adjust Docker Desktop resources:
|
||||
|
||||
- **macOS/Windows**: Docker Desktop → Settings → Resources
|
||||
- **Linux**: Modify Docker daemon configuration
|
||||
|
||||
Recommended minimums:
|
||||
|
||||
- CPU: 4 cores
|
||||
- Memory: 8GB
|
||||
- Disk: 20GB
|
||||
|
||||
## Next Steps
|
||||
|
||||
- Read the [architecture overview](/docs/developer/architecture)
|
||||
- Learn about [database migrations](/docs/developer/database-migrations)
|
||||
- Explore [API documentation](/docs/api)
|
||||
- Join `#immich` on [Discord](https://discord.immich.app)
|
||||
@@ -75,11 +75,12 @@ npm run dev
|
||||
To see local changes to `@immich/ui` in Immich, do the following:
|
||||
|
||||
1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui`
|
||||
1. Build the `@immich/ui` project via `npm run build`
|
||||
1. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
|
||||
1. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
|
||||
1. Start up the stack via `make dev`
|
||||
1. After making changes in `@immich/ui`, rebuild it (`npm run build`)
|
||||
2. Build the `@immich/ui` project via `npm run build`
|
||||
3. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
|
||||
4. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
|
||||
5. Uncomment the import statement in `web/src/app.css` file `@import '/usr/ui/dist/theme/default.css';` and comment out `@import '@immich/ui/theme/default.css';`
|
||||
6. Start up the stack via `make dev`
|
||||
7. After making changes in `@immich/ui`, rebuild it (`npm run build`)
|
||||
|
||||
### Mobile app
|
||||
|
||||
@@ -114,32 +115,72 @@ Note: Activating the license is not required.
|
||||
|
||||
### VSCode
|
||||
|
||||
Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions.
|
||||
Install `Flutter`, `DCM`, `Prettier`, `ESLint` and `Svelte` extensions. These extensions are listed in the `extensions.json` file under `.vscode/` and should appear as workspace recommendations.
|
||||
|
||||
in User `settings.json` (`cmd + shift + p` and search for `Open User Settings JSON`) add the following:
|
||||
Here are the settings we use, they should be active as workspace settings (`settings.json`):
|
||||
|
||||
```json title="settings.json"
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript][typescript][css]": {
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"[dart]": {
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.selectionHighlight": false,
|
||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||
"editor.suggestSelection": "first",
|
||||
"editor.tabCompletion": "onlySnippets",
|
||||
"editor.wordBasedSuggestions": "off",
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
||||
}
|
||||
"editor.wordBasedSuggestions": "off"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit",
|
||||
"source.removeUnusedImports": "explicit"
|
||||
},
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"cSpell.words": ["immich"],
|
||||
"editor.formatOnSave": true,
|
||||
"eslint.validate": ["javascript", "svelte"],
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||
}
|
||||
```
|
||||
|
||||
19
docs/docs/features/casting.md
Normal file
19
docs/docs/features/casting.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Chromecast support
|
||||
|
||||
Immich supports the Google's Cast protocol so that photos and videos can be cast to devices such as a Chromecast and a Nest Hub. This feature is considered experimental and has several important limitations listed below. Currently, this feature is only supported by the web client, support on Android and iOS is planned for the future.
|
||||
|
||||
## Enable Google Cast Support
|
||||
|
||||
Google Cast support is disabled by default. The web UI uses Google-provided scripts and must retreive them from Google servers when the page loads. This is a privacy concern for some and is thus opt-in.
|
||||
|
||||
You can enable Google Cast support through `Account Settings > Features > Cast > Google Cast`
|
||||
|
||||
<img src={require('./img/gcast-enable.webp').default} width="70%" title='Enable Google Cast Support' />
|
||||
|
||||
## Limitations
|
||||
|
||||
To use casting with Immich, there are a few prerequisites:
|
||||
|
||||
1. Your instance must be accessed via an HTTPS connection in order for the casting menu to show.
|
||||
2. Your instance must be publicly accessible via HTTPS and a DNS record for the server must be accessible via Google's DNS servers (`8.8.8.8` and `8.8.4.4`)
|
||||
3. Videos must be in a format that is compatible with Google Cast. For more info, check out [Google's documentation](https://developers.google.com/cast/docs/media)
|
||||
@@ -90,19 +90,22 @@ Usage: immich upload [paths...] [options]
|
||||
Upload assets
|
||||
|
||||
Arguments:
|
||||
paths One or more paths to assets to be uploaded
|
||||
paths One or more paths to assets to be uploaded
|
||||
|
||||
Options:
|
||||
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
|
||||
-i, --ignore [paths...] Paths to ignore (default: [], env: IMMICH_IGNORE_PATHS)
|
||||
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
|
||||
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
|
||||
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
|
||||
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
|
||||
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
|
||||
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
|
||||
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
||||
--help display help for command
|
||||
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
|
||||
-i, --ignore <pattern> Pattern to ignore (env: IMMICH_IGNORE_PATHS)
|
||||
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
|
||||
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
|
||||
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
|
||||
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
|
||||
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
|
||||
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
|
||||
-j, --json-output Output detailed information in json format (default: false, env: IMMICH_JSON_OUTPUT)
|
||||
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
||||
--no-progress Hide progress bars (env: IMMICH_PROGRESS_BAR)
|
||||
--watch Watch for changes and upload automatically (default: false, env: IMMICH_WATCH_CHANGES)
|
||||
--help display help for command
|
||||
```
|
||||
|
||||
</details>
|
||||
@@ -172,6 +175,16 @@ By default, hidden files are skipped. If you want to include hidden files, use t
|
||||
immich upload --include-hidden --recursive directory/
|
||||
```
|
||||
|
||||
You can use the `--json-output` option to get a json printed which includes
|
||||
three keys: `newFiles`, `duplicates` and `newAssets`. Due to some logging
|
||||
output you will need to strip the first three lines of output to get the json.
|
||||
For example to get a list of files that would be uploaded for further
|
||||
processing:
|
||||
|
||||
```bash
|
||||
immich upload --dry-run . | tail -n +4 | jq .newFiles[]
|
||||
```
|
||||
|
||||
### Obtain the API Key
|
||||
|
||||
The API key can be obtained in the user setting panel on the web interface.
|
||||
|
||||
@@ -121,6 +121,6 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
||||
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||
[jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
|
||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
|
||||
[jellyfin-lp]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#low-power-encoding
|
||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/post-install/transcoding/hardware-acceleration/intel#known-issues-and-limitations-on-linux
|
||||
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases
|
||||
|
||||
BIN
docs/docs/features/img/gcast-enable.webp
Normal file
BIN
docs/docs/features/img/gcast-enable.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 19 KiB |
@@ -72,7 +72,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
|
||||
|
||||
### Nightly job
|
||||
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library managment page.
|
||||
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion. It is possible to trigger the cleanup by clicking "Scan all libraries" in the library management page.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -112,12 +112,15 @@ _Remember to run `docker compose up -d` to register the changes. Make sure you c
|
||||
|
||||
These actions must be performed by the Immich administrator.
|
||||
|
||||
- Click on Administration -> Libraries
|
||||
- Click on Create External Library
|
||||
- Click on your avatar on the upper right corner
|
||||
- Click on Administration -> External Libraries
|
||||
- Click on Create an external library…
|
||||
- Select which user owns the library, this can not be changed later
|
||||
- Enter `/mnt/media/christmas-trip` then click Add
|
||||
- Click on Save
|
||||
- Click the drop-down menu on the newly created library
|
||||
- Click on Scan
|
||||
- Click the drop-down menu on the newly created library
|
||||
- Click on Rename Library and rename it to "Christmas Trip"
|
||||
|
||||
NOTE: We have to use the `/mnt/media/christmas-trip` path and not the `/mnt/nas/christmas-trip` path since all paths have to be what the Docker containers see.
|
||||
|
||||
@@ -5,7 +5,7 @@ import TabItem from '@theme/TabItem';
|
||||
|
||||
Immich uses Postgres as its search database for both metadata and contextual CLIP search.
|
||||
|
||||
Contextual CLIP search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
Contextual CLIP search is powered by the [VectorChord](https://github.com/tensorchord/VectorChord) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
|
||||
## Advanced Search Filters
|
||||
|
||||
@@ -92,7 +92,7 @@ Memory and execution time estimates were obtained without acceleration on a 7800
|
||||
|
||||
**Execution Time (ms)**: After warming up the model with one pass, the mean execution time of 100 passes with the same input.
|
||||
|
||||
**Memory (MiB)**: The peak RSS usage of the process afer performing the above timing benchmark. Does not include image decoding, concurrent processing, the web server, etc., which are relatively constant factors.
|
||||
**Memory (MiB)**: The peak RSS usage of the process after performing the above timing benchmark. Does not include image decoding, concurrent processing, the web server, etc., which are relatively constant factors.
|
||||
|
||||
**Recall (%)**: Evaluated on Crossmodal-3600, the average of the recall@1, recall@5 and recall@10 results for zeroshot image retrieval. Chinese (Simplified), English, French, German, Italian, Japanese, Korean, Polish, Russian, Spanish and Turkish are additionally tested on XTD-10. Chinese (Simplified) and English are additionally tested on Flickr30k. The recall metrics are the average across all tested datasets.
|
||||
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 10 KiB |
@@ -52,9 +52,9 @@ REMOTE_BACKUP_PATH="/path/to/remote/backup/directory"
|
||||
### Local
|
||||
|
||||
# Backup Immich database
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> > "$UPLOAD_LOCATION"/database-backup/immich-database.sql
|
||||
# For deduplicating backup programs such as Borg or Restic, compressing the content can increase backup size by making it harder to deduplicate. If you are using a different program or still prefer to compress, you can use the following command instead:
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
# docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=<DB_USERNAME> | /usr/bin/gzip --rsyncable > "$UPLOAD_LOCATION"/database-backup/immich-database.sql.gz
|
||||
|
||||
### Append to local Borg repository
|
||||
borg create "$BACKUP_PATH/immich-borg::{now}" "$UPLOAD_LOCATION" --exclude "$UPLOAD_LOCATION"/thumbs/ --exclude "$UPLOAD_LOCATION"/encoded-video/
|
||||
|
||||
@@ -123,7 +123,7 @@ The default configuration looks like this:
|
||||
"buttonText": "Login with OAuth",
|
||||
"clientId": "",
|
||||
"clientSecret": "",
|
||||
"defaultStorageQuota": 0,
|
||||
"defaultStorageQuota": null,
|
||||
"enabled": false,
|
||||
"issuerUrl": "",
|
||||
"mobileOverrideEnabled": false,
|
||||
|
||||
@@ -2,53 +2,13 @@
|
||||
sidebar_position: 30
|
||||
---
|
||||
|
||||
import CodeBlock from '@theme/CodeBlock';
|
||||
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
|
||||
|
||||
# Docker Compose [Recommended]
|
||||
|
||||
Docker Compose is the recommended method to run Immich in production. Below are the steps to deploy Immich with Docker Compose.
|
||||
|
||||
## Step 1 - Download the required files
|
||||
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx';
|
||||
|
||||
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
|
||||
|
||||
```bash title="Move to the directory you created"
|
||||
mkdir ./immich-app
|
||||
cd ./immich-app
|
||||
```
|
||||
|
||||
Download [`docker-compose.yml`][compose-file] and [`example.env`][env-file] by running the following commands:
|
||||
|
||||
```bash title="Get docker-compose.yml file"
|
||||
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
```
|
||||
|
||||
```bash title="Get .env file"
|
||||
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
```
|
||||
|
||||
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
|
||||
|
||||
## Step 2 - Populate the .env file with custom values
|
||||
|
||||
<CodeBlock language="bash" title="Default environmental variable content">
|
||||
{ExampleEnv}
|
||||
</CodeBlock>
|
||||
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
|
||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
|
||||
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
|
||||
- Set your timezone by uncommenting the `TZ=` line.
|
||||
- Populate custom database information if necessary.
|
||||
|
||||
## Step 3 - Start the containers
|
||||
|
||||
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
|
||||
|
||||
```bash title="Start the containers"
|
||||
docker compose up -d
|
||||
```
|
||||
<DockerComposeSteps />
|
||||
|
||||
:::info Docker version
|
||||
If you get an error such as `unknown shorthand flag: 'd' in -d` or `open <location of your .env file>: permission denied`, you are probably running the wrong Docker version. (This happens, for example, with the docker.io package in Ubuntu 22.04.3 LTS.) You can correct the problem by following the complete [Docker Engine install](https://docs.docker.com/engine/install/) procedure for your distribution, crucially the "Uninstall old versions" and "Install using the apt/rpm repository" sections. These replace the distro's Docker packages with Docker's official ones.
|
||||
@@ -70,6 +30,3 @@ If you get an error `can't set healthcheck.start_interval as feature require Doc
|
||||
## Next Steps
|
||||
|
||||
Read the [Post Installation](/docs/install/post-install.mdx) steps and [upgrade instructions](/docs/install/upgrading.md).
|
||||
|
||||
[compose-file]: https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
[env-file]: https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
|
||||
@@ -72,20 +72,21 @@ Information on the current workers can be found [here](/docs/administration/jobs
|
||||
|
||||
## Database
|
||||
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :----------------------------------------------------------------------- | :----------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`pgvector`, `pgvecto.rs`]) | `pgvecto.rs` | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
| Variable | Description | Default | Containers |
|
||||
| :---------------------------------- | :--------------------------------------------------------------------------- | :--------: | :----------------------------- |
|
||||
| `DB_URL` | Database URL | | server |
|
||||
| `DB_HOSTNAME` | Database host | `database` | server |
|
||||
| `DB_PORT` | Database port | `5432` | server |
|
||||
| `DB_USERNAME` | Database user | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_PASSWORD` | Database password | `postgres` | server, database<sup>\*1</sup> |
|
||||
| `DB_DATABASE_NAME` | Database name | `immich` | server, database<sup>\*1</sup> |
|
||||
| `DB_SSL_MODE` | Database SSL mode | | server |
|
||||
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
|
||||
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
|
||||
|
||||
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
|
||||
|
||||
\*2: This setting cannot be changed after the server has successfully started up.
|
||||
\*2: If not provided, the appropriate extension to use is auto-detected at startup by introspecting the database. When multiple extensions are installed, the order of preference is VectorChord, pgvecto.rs, pgvector.
|
||||
|
||||
:::info
|
||||
|
||||
|
||||
@@ -39,8 +39,8 @@ alt="Dot Env Example"
|
||||
/>
|
||||
|
||||
- Change the default `DB_PASSWORD`, and add custom database connection information if necessary.
|
||||
- Change `DB_DATA_LOCATION` to a folder where the database will be saved to disk.
|
||||
- Change `UPLOAD_LOCATION` to a folder where media (uploaded and generated) will be stored.
|
||||
- Change `DB_DATA_LOCATION` to a folder (absolute path) where the database will be saved to disk.
|
||||
- Change `UPLOAD_LOCATION` to a folder (absolute path) where media (uploaded and generated) will be stored.
|
||||
|
||||
11. Click on "**Deploy the stack**".
|
||||
|
||||
|
||||
@@ -25,11 +25,11 @@ When you're all done, you should have the following:
|
||||
- `./docker/immich-app/postgres`
|
||||
- `./docker/immich-app/library`
|
||||
|
||||
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) to your computer. Upload the files to the `./docker/immich-app` directory.
|
||||
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) to your computer. Upload the files to the `./docker/immich-app` directory, and rename `example.env` to `.env`.
|
||||
|
||||
## Step 2 - Populate the .env file with custom values
|
||||
|
||||
Follow [Step 2 in Docker Compose](./docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue.
|
||||
Follow [Step 2 in Docker Compose](/docs/install/docker-compose#step-2---populate-the-env-file-with-custom-values) for instructions on customizing the `.env` file, and then return back to this guide to continue.
|
||||
|
||||
## Step 3 - Create a new project in Container Manager
|
||||
|
||||
|
||||
@@ -2,27 +2,27 @@
|
||||
sidebar_position: 80
|
||||
---
|
||||
|
||||
# TrueNAS SCALE [Community]
|
||||
# TrueNAS [Community]
|
||||
|
||||
:::note
|
||||
This is a community contribution and not officially supported by the Immich team, but included here for convenience.
|
||||
|
||||
Community support can be found in the dedicated channel on the [Discord Server](https://discord.immich.app/).
|
||||
|
||||
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/charts/tree/master/community/immich).**
|
||||
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
|
||||
:::
|
||||
|
||||
Immich can easily be installed on TrueNAS SCALE via the **Community** train application.
|
||||
Consider reviewing the TrueNAS [Apps tutorial](https://www.truenas.com/docs/scale/scaletutorials/apps/) if you have not previously configured applications on your system.
|
||||
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application.
|
||||
Consider reviewing the TrueNAS [Apps resources](https://apps.truenas.com/getting-started/) if you have not previously configured applications on your system.
|
||||
|
||||
TrueNAS SCALE makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
||||
TrueNAS Community Edition makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
||||
|
||||
## First Steps
|
||||
|
||||
The Immich app in TrueNAS SCALE installs, completes the initial configuration, then starts the Immich web portal.
|
||||
When updates become available, SCALE alerts and provides easy updates.
|
||||
The Immich app in TrueNAS Community Edition installs, completes the initial configuration, then starts the Immich web portal.
|
||||
When updates become available, TrueNAS alerts and provides easy updates.
|
||||
|
||||
Before installing the Immich app in SCALE, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
||||
Before installing the Immich app in TrueNAS, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
||||
You may also configure environment variables at any time after deploying the application.
|
||||
|
||||
### Setting up Storage Datasets
|
||||
@@ -126,9 +126,9 @@ className="border rounded-xl"
|
||||
|
||||
Accept the default port `30041` in **WebUI Port** or enter a custom port number.
|
||||
:::info Allowed Port Numbers
|
||||
Only numbers within the range 9000-65535 may be used on SCALE versions below TrueNAS Scale 24.10 Electric Eel.
|
||||
Only numbers within the range 9000-65535 may be used on TrueNAS versions below TrueNAS Community Edition 24.10 Electric Eel.
|
||||
|
||||
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/references/defaultports/).
|
||||
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/solutions/optimizations/security/#truenas-default-ports).
|
||||
:::
|
||||
|
||||
### Storage Configuration
|
||||
@@ -173,7 +173,7 @@ className="border rounded-xl"
|
||||
|
||||
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
||||
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
|
||||
The **Host Path** is the location on the TrueNAS SCALE server where your external library is located.
|
||||
The **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
|
||||
|
||||
<!-- A section for Labels would go here but I don't know what they do. -->
|
||||
|
||||
@@ -188,17 +188,17 @@ className="border rounded-xl"
|
||||
|
||||
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
|
||||
|
||||
Accept the default **Memory** limit of `4096` MB or specify the number of MB of RAM. If you're using Machine Learning you should probably set this above 8000 MB.
|
||||
Specify the **Memory** limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
|
||||
|
||||
:::info Older SCALE Versions
|
||||
Before TrueNAS SCALE version 24.10 Electric Eel:
|
||||
:::info Older TrueNAS Versions
|
||||
Before TrueNAS Community Edition version 24.10 Electric Eel:
|
||||
|
||||
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
|
||||
|
||||
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
|
||||
:::
|
||||
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://www.truenas.com/docs/truenasapps/#gpu-passthrough)
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
|
||||
|
||||
### Install
|
||||
|
||||
@@ -240,7 +240,7 @@ className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::info
|
||||
Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
Some Environment Variables are not available for the TrueNAS Community Edition app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
|
||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
||||
:::
|
||||
@@ -251,7 +251,7 @@ Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`
|
||||
Make sure to read the general [upgrade instructions](/docs/install/upgrading.md).
|
||||
:::
|
||||
|
||||
When updates become available, SCALE alerts and provides easy updates.
|
||||
When updates become available, TrueNAS alerts and provides easy updates.
|
||||
To update the app to the latest version:
|
||||
|
||||
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 2
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Comparison
|
||||
|
||||
BIN
docs/docs/overview/img/social-preview-light.webp
Normal file
BIN
docs/docs/overview/img/social-preview-light.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 233 KiB |
@@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
sidebar_position: 2
|
||||
---
|
||||
|
||||
# Quick start
|
||||
@@ -10,11 +10,20 @@ to install and use it.
|
||||
|
||||
## Requirements
|
||||
|
||||
Check the [requirements page](/docs/install/requirements) to get started.
|
||||
- A system with at least 4GB of RAM and 2 CPU cores.
|
||||
- [Docker](https://docs.docker.com/engine/install/)
|
||||
|
||||
> For a more detailed list of requirements, see the [requirements page](/docs/install/requirements).
|
||||
|
||||
---
|
||||
|
||||
## Set up the server
|
||||
|
||||
Follow the [Docker Compose (Recommended)](/docs/install/docker-compose) instructions to install the server.
|
||||
import DockerComposeSteps from '/docs/partials/_docker-compose-install-steps.mdx';
|
||||
|
||||
<DockerComposeSteps />
|
||||
|
||||
---
|
||||
|
||||
## Try the web app
|
||||
|
||||
@@ -26,6 +35,8 @@ Try uploading a picture from your browser.
|
||||
|
||||
<img src={require('./img/upload-button.webp').default} title="Upload button" />
|
||||
|
||||
---
|
||||
|
||||
## Try the mobile app
|
||||
|
||||
### Download the Mobile App
|
||||
@@ -56,6 +67,8 @@ You can select the **Jobs** tab to see Immich processing your photos.
|
||||
|
||||
<img src={require('/docs/guides/img/jobs-tab.webp').default} title="Jobs tab" width={300} />
|
||||
|
||||
---
|
||||
|
||||
## Review the database backup and restore process
|
||||
|
||||
Immich has built-in database backups. You can refer to the
|
||||
@@ -65,6 +78,8 @@ Immich has built-in database backups. You can refer to the
|
||||
The database only contains metadata and user information. You must setup manual backups of the images and videos stored in `UPLOAD_LOCATION`.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
## Where to go from here?
|
||||
|
||||
You may decide you'd like to install the server a different way; the Install category on the left menu provides many options.
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
sidebar_position: 1
|
||||
---
|
||||
|
||||
# Introduction
|
||||
# Welcome to Immich
|
||||
|
||||
<img src={require('./img/feature-panel.webp').default} alt="Immich - Self-hosted photos and videos backup tool" />
|
||||
<img
|
||||
src={require('./img/social-preview-light.webp').default}
|
||||
alt="Immich - Self-hosted photos and videos backup tool"
|
||||
data-theme="light"
|
||||
/>
|
||||
|
||||
## Welcome!
|
||||
|
||||
43
docs/docs/partials/_docker-compose-install-steps.mdx
Normal file
43
docs/docs/partials/_docker-compose-install-steps.mdx
Normal file
@@ -0,0 +1,43 @@
|
||||
import CodeBlock from '@theme/CodeBlock';
|
||||
import ExampleEnv from '!!raw-loader!../../../docker/example.env';
|
||||
|
||||
### Step 1 - Download the required files
|
||||
|
||||
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
|
||||
|
||||
```bash title="Move to the directory you created"
|
||||
mkdir ./immich-app
|
||||
cd ./immich-app
|
||||
```
|
||||
|
||||
Download [`docker-compose.yml`](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) and [`example.env`](https://github.com/immich-app/immich/releases/latest/download/example.env) by running the following commands:
|
||||
|
||||
```bash title="Get docker-compose.yml file"
|
||||
wget -O docker-compose.yml https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
```
|
||||
|
||||
```bash title="Get .env file"
|
||||
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
```
|
||||
|
||||
You can alternatively download these two files from your browser and move them to the directory that you created, in which case ensure that you rename `example.env` to `.env`.
|
||||
|
||||
### Step 2 - Populate the .env file with custom values
|
||||
|
||||
<CodeBlock language="bash" title="Default environmental variable content">
|
||||
{ExampleEnv}
|
||||
</CodeBlock>
|
||||
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets. It should be a new directory on the server with enough free space.
|
||||
- Consider changing `DB_PASSWORD` to a custom value. Postgres is not publicly exposed, so this password is only used for local authentication.
|
||||
To avoid issues with Docker parsing this value, it is best to use only the characters `A-Za-z0-9`. `pwgen` is a handy utility for this.
|
||||
- Set your timezone by uncommenting the `TZ=` line.
|
||||
- Populate custom database information if necessary.
|
||||
|
||||
### Step 3 - Start the containers
|
||||
|
||||
From the directory you created in Step 1 (which should now contain your customized `docker-compose.yml` and `.env` files), run the following command to start Immich as a background service:
|
||||
|
||||
```bash title="Start the containers"
|
||||
docker compose up -d
|
||||
```
|
||||
@@ -95,7 +95,7 @@ const config = {
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
to: '/docs/overview/introduction',
|
||||
to: '/docs/overview/welcome',
|
||||
position: 'right',
|
||||
label: 'Docs',
|
||||
},
|
||||
@@ -124,6 +124,12 @@ const config = {
|
||||
label: 'Discord',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
type: 'html',
|
||||
position: 'right',
|
||||
value:
|
||||
'<a href="https://buy.immich.app" target="_blank" class="no-underline hover:no-underline"><button class="buy-button bg-immich-primary dark:bg-immich-dark-primary text-white dark:text-black rounded-xl">Buy Immich</button></a>',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
@@ -134,7 +140,7 @@ const config = {
|
||||
items: [
|
||||
{
|
||||
label: 'Welcome',
|
||||
to: '/docs/overview/introduction',
|
||||
to: '/docs/overview/welcome',
|
||||
},
|
||||
{
|
||||
label: 'Installation',
|
||||
|
||||
@@ -57,6 +57,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,11 +44,6 @@ const projects: CommunityProjectProps[] = [
|
||||
'Lightroom plugin to publish, export photos from Lightroom to Immich. Import from Immich to Lightroom is also supported.',
|
||||
url: 'https://blog.fokuspunk.de/lrc-immich-plugin/',
|
||||
},
|
||||
{
|
||||
title: 'Immich Duplicate Finder',
|
||||
description: 'Webapp that uses machine learning to identify near-duplicate images.',
|
||||
url: 'https://github.com/vale46n1/immich_duplicate_finder',
|
||||
},
|
||||
{
|
||||
title: 'Immich-Tiktok-Remover',
|
||||
description: 'Script to search for and remove TikTok videos from your Immich library.',
|
||||
|
||||
@@ -7,14 +7,22 @@
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Be+Vietnam+Pro:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;0,800;0,900;1,100;1,200;1,300;1,400;1,500;1,600;1,700;1,800;1,900&display=swap');
|
||||
|
||||
body {
|
||||
font-family: 'Be Vietnam Pro', serif;
|
||||
font-optical-sizing: auto;
|
||||
/* font-size: 1.125rem;
|
||||
@font-face {
|
||||
font-family: 'Overpass';
|
||||
src: url('/fonts/overpass/Overpass.ttf') format('truetype-variations');
|
||||
font-weight: 1 999;
|
||||
font-style: normal;
|
||||
ascent-override: 106.25%;
|
||||
size-adjust: 106.25%; */
|
||||
size-adjust: 106.25%;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: 'Overpass Mono';
|
||||
src: url('/fonts/overpass/OverpassMono.ttf') format('truetype-variations');
|
||||
font-weight: 1 999;
|
||||
font-style: normal;
|
||||
ascent-override: 106.25%;
|
||||
size-adjust: 106.25%;
|
||||
}
|
||||
|
||||
.breadcrumbs__link {
|
||||
@@ -29,6 +37,7 @@ img {
|
||||
|
||||
/* You can override the default Infima variables here. */
|
||||
:root {
|
||||
font-family: 'Overpass', sans-serif;
|
||||
--ifm-color-primary: #4250af;
|
||||
--ifm-color-primary-dark: #4250af;
|
||||
--ifm-color-primary-darker: #4250af;
|
||||
@@ -59,14 +68,12 @@ div[class^='announcementBar_'] {
|
||||
}
|
||||
|
||||
.menu__link {
|
||||
padding: 10px;
|
||||
padding-left: 16px;
|
||||
padding: 10px 10px 10px 16px;
|
||||
border-radius: 24px;
|
||||
margin-right: 16px;
|
||||
}
|
||||
|
||||
.menu__list-item-collapsible {
|
||||
border-radius: 10px;
|
||||
margin-right: 16px;
|
||||
border-radius: 24px;
|
||||
}
|
||||
@@ -83,3 +90,12 @@ div[class*='navbar__items'] > li:has(a[class*='version-switcher-34ab39']) {
|
||||
code {
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.buy-button {
|
||||
padding: 8px 14px;
|
||||
border: 1px solid transparent;
|
||||
font-family: 'Overpass', sans-serif;
|
||||
font-weight: 500;
|
||||
cursor: pointer;
|
||||
box-shadow: 0 0 5px 2px rgba(181, 206, 254, 0.4);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import {
|
||||
mdiBug,
|
||||
mdiCalendarToday,
|
||||
mdiCrosshairsOff,
|
||||
mdiCrop,
|
||||
mdiDatabase,
|
||||
mdiLeadPencil,
|
||||
mdiLockOff,
|
||||
@@ -12,6 +13,9 @@ import {
|
||||
mdiTrashCan,
|
||||
mdiWeb,
|
||||
mdiWrap,
|
||||
mdiCloudKeyOutline,
|
||||
mdiRegex,
|
||||
mdiCodeJson,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
@@ -22,6 +26,53 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
|
||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||
|
||||
const items: Item[] = [
|
||||
{
|
||||
icon: mdiRegex,
|
||||
iconColor: 'purple',
|
||||
title: 'Zitadel Actions are cursed',
|
||||
description:
|
||||
"Zitadel is cursed because its custom scripting feature is executed with a JS engine that doesn't support regex named capture groups.",
|
||||
link: {
|
||||
url: 'https://github.com/dop251/goja',
|
||||
text: 'Go JS engine',
|
||||
},
|
||||
date: new Date(2025, 5, 4),
|
||||
},
|
||||
{
|
||||
icon: mdiCloudKeyOutline,
|
||||
iconColor: '#0078d4',
|
||||
title: 'Entra is cursed',
|
||||
description:
|
||||
"Microsoft Entra supports PKCE, but doesn't include it in its OpenID discovery document. This leads to clients thinking PKCE isn't available.",
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/18725',
|
||||
text: '#18725',
|
||||
},
|
||||
date: new Date(2025, 4, 30),
|
||||
},
|
||||
{
|
||||
icon: mdiCrop,
|
||||
iconColor: 'tomato',
|
||||
title: 'Image dimensions in EXIF metadata are cursed',
|
||||
description:
|
||||
'The dimensions in EXIF metadata can be different from the actual dimensions of the image, causing issues with cropping and resizing.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/17974',
|
||||
text: '#17974',
|
||||
},
|
||||
date: new Date(2025, 4, 5),
|
||||
},
|
||||
{
|
||||
icon: mdiCodeJson,
|
||||
iconColor: 'yellow',
|
||||
title: 'YAML whitespace is cursed',
|
||||
description: 'YAML whitespaces are often handled in unintuitive ways.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/17309',
|
||||
text: '#17309',
|
||||
},
|
||||
date: new Date(2025, 3, 1),
|
||||
},
|
||||
{
|
||||
icon: mdiMicrosoftWindows,
|
||||
iconColor: '#357EC7',
|
||||
|
||||
@@ -4,7 +4,7 @@ import Layout from '@theme/Layout';
|
||||
import { discordPath, discordViewBox } from '@site/src/components/svg-paths';
|
||||
import ThemedImage from '@theme/ThemedImage';
|
||||
import Icon from '@mdi/react';
|
||||
import { mdiAndroid } from '@mdi/js';
|
||||
|
||||
function HomepageHeader() {
|
||||
return (
|
||||
<header>
|
||||
@@ -13,11 +13,14 @@ function HomepageHeader() {
|
||||
<div className="w-full h-[120vh] absolute left-0 top-0 backdrop-blur-3xl bg-immich-bg/40 dark:bg-transparent"></div>
|
||||
</div>
|
||||
<section className="text-center pt-12 sm:pt-24 bg-immich-bg/50 dark:bg-immich-dark-bg/80">
|
||||
<ThemedImage
|
||||
sources={{ dark: 'img/logomark-dark.svg', light: 'img/logomark-light.svg' }}
|
||||
className="h-[115px] w-[115px] mb-2 antialiased rounded-none"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<a href="https://futo.org" target="_blank" rel="noopener noreferrer">
|
||||
<ThemedImage
|
||||
sources={{ dark: 'img/logomark-dark-with-futo.svg', light: 'img/logomark-light-with-futo.svg' }}
|
||||
className="h-[125px] w-[125px] mb-2 antialiased rounded-none"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
</a>
|
||||
|
||||
<div className="mt-8">
|
||||
<p className="text-3xl md:text-5xl sm:leading-tight mb-1 font-extrabold text-black/90 dark:text-white px-4">
|
||||
Self-hosted{' '}
|
||||
@@ -28,7 +31,7 @@ function HomepageHeader() {
|
||||
solution<span className="block"></span>
|
||||
</p>
|
||||
|
||||
<p className="max-w-1/4 m-auto mt-4 px-4">
|
||||
<p className="max-w-1/4 m-auto mt-4 px-4 text-lg text-gray-700 dark:text-gray-100">
|
||||
Easily back up, organize, and manage your photos on your own server. Immich helps you
|
||||
<span className="sm:block"></span> browse, search and organize your photos and videos with ease, without
|
||||
sacrificing your privacy.
|
||||
@@ -36,27 +39,21 @@ function HomepageHeader() {
|
||||
</div>
|
||||
<div className="flex flex-col sm:flex-row place-items-center place-content-center mt-9 gap-4 ">
|
||||
<Link
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold uppercase"
|
||||
to="docs/overview/introduction"
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary dark:bg-immich-dark-primary rounded-xl no-underline hover:no-underline text-white hover:text-gray-50 dark:text-immich-dark-bg font-bold"
|
||||
to="docs/overview/quick-start"
|
||||
>
|
||||
Get started
|
||||
Get Started
|
||||
</Link>
|
||||
|
||||
<Link
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-white/90 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold"
|
||||
to="https://demo.immich.app/"
|
||||
>
|
||||
Demo
|
||||
</Link>
|
||||
|
||||
<Link
|
||||
className="flex place-items-center place-content-center py-3 px-8 border bg-immich-primary/10 dark:bg-gray-300 rounded-xl hover:no-underline text-immich-primary dark:text-immich-dark-bg font-bold uppercase"
|
||||
to="https://immich.store"
|
||||
>
|
||||
Buy Merch
|
||||
Open Demo
|
||||
</Link>
|
||||
</div>
|
||||
<div className="my-12 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
|
||||
|
||||
<div className="my-8 flex gap-1 font-medium place-items-center place-content-center text-immich-primary dark:text-immich-dark-primary">
|
||||
<Icon
|
||||
path={discordPath}
|
||||
viewBox={discordViewBox} /* viewBox may show an error in your IDE but it is normal. */
|
||||
@@ -119,7 +116,7 @@ export default function Home(): JSX.Element {
|
||||
<HomepageHeader />
|
||||
<div className="flex flex-col place-items-center text-center place-content-center dark:bg-immich-dark-bg py-8">
|
||||
<p>This project is available under GNU AGPL v3 license.</p>
|
||||
<p className="text-xs">Privacy should not be a luxury</p>
|
||||
<p className="text-sm">Privacy should not be a luxury</p>
|
||||
</div>
|
||||
</Layout>
|
||||
);
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import React from 'react';
|
||||
import Link from '@docusaurus/Link';
|
||||
import Layout from '@theme/Layout';
|
||||
function HomepageHeader() {
|
||||
return (
|
||||
|
||||
@@ -78,12 +78,14 @@ import {
|
||||
mdiLinkEdit,
|
||||
mdiTagFaces,
|
||||
mdiMovieOpenPlayOutline,
|
||||
mdiCast,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
import { Item, Timeline } from '../components/timeline';
|
||||
|
||||
const releases = {
|
||||
'v1.133.0': new Date(2025, 4, 21),
|
||||
'v1.130.0': new Date(2025, 2, 25),
|
||||
'v1.127.0': new Date(2025, 1, 26),
|
||||
'v1.122.0': new Date(2024, 11, 5),
|
||||
@@ -216,14 +218,6 @@ const roadmap: Item[] = [
|
||||
iconColor: 'indianred',
|
||||
title: 'Stable release',
|
||||
description: 'Immich goes stable',
|
||||
getDateLabel: () => 'Planned for early 2025',
|
||||
},
|
||||
{
|
||||
done: false,
|
||||
icon: mdiLockOutline,
|
||||
iconColor: 'sandybrown',
|
||||
title: 'Private/locked photos',
|
||||
description: 'Private assets with extra protections',
|
||||
getDateLabel: () => 'Planned for 2025',
|
||||
},
|
||||
{
|
||||
@@ -245,6 +239,20 @@ const roadmap: Item[] = [
|
||||
];
|
||||
|
||||
const milestones: Item[] = [
|
||||
withRelease({
|
||||
icon: mdiCast,
|
||||
iconColor: 'aqua',
|
||||
title: 'Google Cast (web)',
|
||||
description: 'Cast assets to Google Cast/Chromecast compatible devices',
|
||||
release: 'v1.133.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiLockOutline,
|
||||
iconColor: 'sandybrown',
|
||||
title: 'Private/locked photos',
|
||||
description: 'Private assets with extra protections',
|
||||
release: 'v1.133.0',
|
||||
}),
|
||||
withRelease({
|
||||
icon: mdiFolderMultiple,
|
||||
iconColor: 'brown',
|
||||
|
||||
5
docs/static/.well-known/security.txt
vendored
Normal file
5
docs/static/.well-known/security.txt
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
Policy: https://github.com/immich-app/immich/blob/main/SECURITY.md
|
||||
Contact: mailto:security@immich.app
|
||||
Preferred-Languages: en
|
||||
Expires: 2026-05-01T23:59:00.000Z
|
||||
Canonical: https://immich.app/.well-known/security.txt
|
||||
1
docs/static/_redirects
vendored
1
docs/static/_redirects
vendored
@@ -30,3 +30,4 @@
|
||||
/docs/guides/api-album-sync /docs/community-projects 307
|
||||
/docs/guides/remove-offline-files /docs/community-projects 307
|
||||
/milestones /roadmap 307
|
||||
/docs/overview/introduction /docs/overview/welcome 307
|
||||
192
docs/static/archived-versions.json
vendored
192
docs/static/archived-versions.json
vendored
@@ -1,52 +1,44 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.135.3",
|
||||
"url": "https://v1.135.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.135.2",
|
||||
"url": "https://v1.135.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.135.1",
|
||||
"url": "https://v1.135.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.135.0",
|
||||
"url": "https://v1.135.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.134.0",
|
||||
"url": "https://v1.134.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.133.1",
|
||||
"url": "https://v1.133.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.133.0",
|
||||
"url": "https://v1.133.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.132.3",
|
||||
"url": "https://v1.132.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.132.2",
|
||||
"url": "https://v1.132.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.132.1",
|
||||
"url": "https://v1.132.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.132.0",
|
||||
"url": "https://v1.132.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.3",
|
||||
"url": "https://v1.131.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.2",
|
||||
"url": "https://v1.131.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.1",
|
||||
"url": "https://v1.131.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.131.0",
|
||||
"url": "https://v1.131.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.3",
|
||||
"url": "https://v1.130.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.2",
|
||||
"url": "https://v1.130.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.1",
|
||||
"url": "https://v1.130.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.130.0",
|
||||
"url": "https://v1.130.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.129.0",
|
||||
"url": "https://v1.129.0.archive.immich.app"
|
||||
@@ -63,46 +55,14 @@
|
||||
"label": "v1.126.1",
|
||||
"url": "https://v1.126.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.126.0",
|
||||
"url": "https://v1.126.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.7",
|
||||
"url": "https://v1.125.7.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.6",
|
||||
"url": "https://v1.125.6.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.5",
|
||||
"url": "https://v1.125.5.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.3",
|
||||
"url": "https://v1.125.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.2",
|
||||
"url": "https://v1.125.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.125.1",
|
||||
"url": "https://v1.125.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.124.2",
|
||||
"url": "https://v1.124.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.124.1",
|
||||
"url": "https://v1.124.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.124.0",
|
||||
"url": "https://v1.124.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.123.0",
|
||||
"url": "https://v1.123.0.archive.immich.app"
|
||||
@@ -111,18 +71,6 @@
|
||||
"label": "v1.122.3",
|
||||
"url": "https://v1.122.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.122.2",
|
||||
"url": "https://v1.122.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.122.1",
|
||||
"url": "https://v1.122.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.122.0",
|
||||
"url": "https://v1.122.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.121.0",
|
||||
"url": "https://v1.121.0.archive.immich.app"
|
||||
@@ -131,34 +79,14 @@
|
||||
"label": "v1.120.2",
|
||||
"url": "https://v1.120.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.120.1",
|
||||
"url": "https://v1.120.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.120.0",
|
||||
"url": "https://v1.120.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.119.1",
|
||||
"url": "https://v1.119.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.119.0",
|
||||
"url": "https://v1.119.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.118.2",
|
||||
"url": "https://v1.118.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.118.1",
|
||||
"url": "https://v1.118.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.118.0",
|
||||
"url": "https://v1.118.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.117.0",
|
||||
"url": "https://v1.117.0.archive.immich.app"
|
||||
@@ -167,14 +95,6 @@
|
||||
"label": "v1.116.2",
|
||||
"url": "https://v1.116.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.116.1",
|
||||
"url": "https://v1.116.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.116.0",
|
||||
"url": "https://v1.116.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.115.0",
|
||||
"url": "https://v1.115.0.archive.immich.app"
|
||||
@@ -187,18 +107,10 @@
|
||||
"label": "v1.113.1",
|
||||
"url": "https://v1.113.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.113.0",
|
||||
"url": "https://v1.113.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.112.1",
|
||||
"url": "https://v1.112.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.112.0",
|
||||
"url": "https://v1.112.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.111.0",
|
||||
"url": "https://v1.111.0.archive.immich.app"
|
||||
@@ -211,14 +123,6 @@
|
||||
"label": "v1.109.2",
|
||||
"url": "https://v1.109.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.109.1",
|
||||
"url": "https://v1.109.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.109.0",
|
||||
"url": "https://v1.109.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.108.0",
|
||||
"url": "https://v1.108.0.archive.immich.app"
|
||||
@@ -227,38 +131,14 @@
|
||||
"label": "v1.107.2",
|
||||
"url": "https://v1.107.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.1",
|
||||
"url": "https://v1.107.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.0",
|
||||
"url": "https://v1.107.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.4",
|
||||
"url": "https://v1.106.4.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.3",
|
||||
"url": "https://v1.106.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.2",
|
||||
"url": "https://v1.106.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.106.1",
|
||||
"url": "https://v1.106.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.105.1",
|
||||
"url": "https://v1.105.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.105.0",
|
||||
"url": "https://v1.105.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.104.0",
|
||||
"url": "https://v1.104.0.archive.immich.app"
|
||||
@@ -267,26 +147,10 @@
|
||||
"label": "v1.103.1",
|
||||
"url": "https://v1.103.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.103.0",
|
||||
"url": "https://v1.103.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.3",
|
||||
"url": "https://v1.102.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.2",
|
||||
"url": "https://v1.102.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.1",
|
||||
"url": "https://v1.102.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.102.0",
|
||||
"url": "https://v1.102.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.101.0",
|
||||
"url": "https://v1.101.0.archive.immich.app"
|
||||
|
||||
BIN
docs/static/fonts/overpass/Overpass-Italic.ttf
vendored
Normal file
BIN
docs/static/fonts/overpass/Overpass-Italic.ttf
vendored
Normal file
Binary file not shown.
BIN
docs/static/fonts/overpass/Overpass.ttf
vendored
Normal file
BIN
docs/static/fonts/overpass/Overpass.ttf
vendored
Normal file
Binary file not shown.
BIN
docs/static/fonts/overpass/OverpassMono.ttf
vendored
Normal file
BIN
docs/static/fonts/overpass/OverpassMono.ttf
vendored
Normal file
Binary file not shown.
22
docs/static/img/logomark-dark-with-futo.svg
vendored
Normal file
22
docs/static/img/logomark-dark-with-futo.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 75 KiB |
21
docs/static/img/logomark-light-with-futo.svg
vendored
Normal file
21
docs/static/img/logomark-light-with-futo.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 75 KiB |
@@ -1 +1 @@
|
||||
22.14.0
|
||||
22.16.0
|
||||
|
||||
@@ -28,20 +28,28 @@ services:
|
||||
extra_hosts:
|
||||
- 'auth-server:host-gateway'
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
redis:
|
||||
condition: service_started
|
||||
database:
|
||||
condition: service_healthy
|
||||
ports:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:148bb5411c184abd288d9aaed139c98123eeb8824c5d3fce03cf721db58066d8
|
||||
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
|
||||
|
||||
database:
|
||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:3aef84a0a4fabbda17ef115c3019ba0c914ec73e9f6e59203674322d858b8eea
|
||||
command: -c fsync=off -c shared_preload_libraries=vchord.so -c config_file=/var/lib/postgresql/data/postgresql.conf
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: immich
|
||||
ports:
|
||||
- 5435:5432
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', 'pg_isready -U postgres -d immich']
|
||||
interval: 1s
|
||||
timeout: 5s
|
||||
retries: 30
|
||||
start_period: 10s
|
||||
|
||||
2132
e2e/package-lock.json
generated
2132
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.132.3",
|
||||
"version": "1.135.3",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,25 +25,26 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.14.1",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/node": "^22.15.32",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.0.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^57.0.0",
|
||||
"eslint-plugin-unicorn": "^59.0.0",
|
||||
"exiftool-vendored": "^28.3.1",
|
||||
"globals": "^16.0.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
"oidc-provider": "^8.5.1",
|
||||
"oidc-provider": "^9.0.0",
|
||||
"pg": "^8.11.3",
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"sharp": "^0.33.5",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
@@ -52,6 +53,6 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.14.0"
|
||||
"node": "22.16.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,38 +46,6 @@ describe('/activities', () => {
|
||||
});
|
||||
|
||||
describe('GET /activities', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/activities');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require an albumId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should reject an invalid albumId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
.query({ albumId: uuidDto.invalid })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should reject an invalid assetId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
.query({ albumId: uuidDto.notFound, assetId: uuidDto.invalid })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['assetId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should start off empty', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/activities')
|
||||
@@ -192,30 +160,6 @@ describe('/activities', () => {
|
||||
});
|
||||
|
||||
describe('POST /activities', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/activities');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require an albumId', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/activities')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ albumId: uuidDto.invalid });
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(expect.arrayContaining(['albumId must be a UUID'])));
|
||||
});
|
||||
|
||||
it('should require a comment when type is comment', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/activities')
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ albumId: uuidDto.notFound, type: 'comment', comment: null });
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['comment must be a string', 'comment should not be empty']));
|
||||
});
|
||||
|
||||
it('should add a comment to an album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/activities')
|
||||
@@ -330,20 +274,6 @@ describe('/activities', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /activities/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/activities/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/activities/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should remove a comment from an album', async () => {
|
||||
const reaction = await createActivity({
|
||||
albumId: album.id,
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
LoginResponseDto,
|
||||
SharedLinkType,
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -128,28 +128,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('GET /albums', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/albums');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should reject an invalid shared param', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/albums?shared=invalid')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['shared must be a boolean value']));
|
||||
});
|
||||
|
||||
it('should reject an invalid assetId param', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/albums?assetId=invalid')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toEqual(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['assetId must be a UUID']));
|
||||
});
|
||||
|
||||
it("should not show other users' favorites", async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/albums/${user1Albums[0].id}?withoutAssets=false`)
|
||||
@@ -323,12 +301,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('GET /albums/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/albums/${user1Albums[0].id}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return album info for own album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/albums/${user1Albums[0].id}?withoutAssets=false`)
|
||||
@@ -421,12 +393,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('GET /albums/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/albums/statistics');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return total count of albums the user has access to', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/albums/statistics')
|
||||
@@ -438,12 +404,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('POST /albums', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/albums').send({ albumName: 'New album' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should create an album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/albums')
|
||||
@@ -468,15 +428,18 @@ describe('/albums', () => {
|
||||
order: AssetOrder.Desc,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not be able to share album with owner', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/albums')
|
||||
.send({ albumName: 'New album', albumUsers: [{ role: AlbumUserRole.Editor, userId: user1.userId }] })
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Cannot share album with owner'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /albums/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/albums/${user1Albums[0].id}/assets`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should be able to add own asset to own album', async () => {
|
||||
const asset = await utils.createAsset(user1.accessToken);
|
||||
const { status, body } = await request(app)
|
||||
@@ -526,14 +489,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('PATCH /albums/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.patch(`/albums/${uuidDto.notFound}`)
|
||||
.send({ albumName: 'New album name' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should update an album', async () => {
|
||||
const album = await utils.createAlbum(user1.accessToken, {
|
||||
albumName: 'New album',
|
||||
@@ -576,15 +531,6 @@ describe('/albums', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /albums/:id/assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/albums/${user1Albums[0].id}/assets`)
|
||||
.send({ ids: [user1Asset1.id] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/albums/${user1Albums[1].id}/assets`)
|
||||
@@ -679,13 +625,6 @@ describe('/albums', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/albums/${user1Albums[0].id}/users`).send({ sharedUserIds: [] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should be able to add user to own album', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/albums/${album.id}/users`)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { LoginResponseDto, Permission, createApiKey } from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { createUserDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
@@ -24,12 +24,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('POST /api-keys', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post('/api-keys').send({ name: 'API Key' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not work without permission', async () => {
|
||||
const { secret } = await create(user.accessToken, [Permission.ApiKeyRead]);
|
||||
const { status, body } = await request(app).post('/api-keys').set('x-api-key', secret).send({ name: 'API Key' });
|
||||
@@ -99,12 +93,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('GET /api-keys', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/api-keys');
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should start off empty', async () => {
|
||||
const { status, body } = await request(app).get('/api-keys').set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(body).toEqual([]);
|
||||
@@ -125,12 +113,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('GET /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/api-keys/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -140,14 +122,6 @@ describe('/api-keys', () => {
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/api-keys/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should get api key details', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -165,42 +139,30 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('PUT /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/api-keys/${uuidDto.notFound}`).send({ name: 'new name' });
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/api-keys/${apiKey.id}`)
|
||||
.send({ name: 'new name' })
|
||||
.send({ name: 'new name', permissions: [Permission.All] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/api-keys/${uuidDto.invalid}`)
|
||||
.send({ name: 'new name' })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should update api key details', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
.put(`/api-keys/${apiKey.id}`)
|
||||
.send({ name: 'new name' })
|
||||
.send({
|
||||
name: 'new name',
|
||||
permissions: [Permission.ActivityCreate, Permission.ActivityRead, Permission.ActivityUpdate],
|
||||
})
|
||||
.set('Authorization', `Bearer ${user.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({
|
||||
id: expect.any(String),
|
||||
name: 'new name',
|
||||
permissions: [Permission.All],
|
||||
permissions: [Permission.ActivityCreate, Permission.ActivityRead, Permission.ActivityUpdate],
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
});
|
||||
@@ -208,12 +170,6 @@ describe('/api-keys', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /api-keys/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).delete(`/api-keys/${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require authorization', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status, body } = await request(app)
|
||||
@@ -223,14 +179,6 @@ describe('/api-keys', () => {
|
||||
expect(body).toEqual(errorDto.badRequest('API Key not found'));
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/api-keys/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should delete an api key', async () => {
|
||||
const { apiKey } = await create(user.accessToken, [Permission.All]);
|
||||
const { status } = await request(app)
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
AssetMediaStatus,
|
||||
AssetResponseDto,
|
||||
AssetTypeEnum,
|
||||
AssetVisibility,
|
||||
getAssetInfo,
|
||||
getMyUser,
|
||||
LoginResponseDto,
|
||||
@@ -14,6 +15,7 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -22,27 +24,9 @@ import { app, asBearerAuth, tempDir, TEN_TIMES, testAssetDir, utils } from 'src/
|
||||
import request from 'supertest';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const makeUploadDto = (options?: { omit: string }): Record<string, any> => {
|
||||
const dto: Record<string, any> = {
|
||||
deviceAssetId: 'example-image',
|
||||
deviceId: 'TEST',
|
||||
fileCreatedAt: new Date().toISOString(),
|
||||
fileModifiedAt: new Date().toISOString(),
|
||||
isFavorite: 'testing',
|
||||
duration: '0:00:00.000000',
|
||||
};
|
||||
|
||||
const omit = options?.omit;
|
||||
if (omit) {
|
||||
delete dto[omit];
|
||||
}
|
||||
|
||||
return dto;
|
||||
};
|
||||
|
||||
const locationAssetFilepath = `${testAssetDir}/metadata/gps-position/thompson-springs.jpg`;
|
||||
const ratingAssetFilepath = `${testAssetDir}/metadata/rating/mongolels.jpg`;
|
||||
const facesAssetFilepath = `${testAssetDir}/metadata/faces/portrait.jpg`;
|
||||
const facesAssetDir = `${testAssetDir}/metadata/faces`;
|
||||
|
||||
const readTags = async (bytes: Buffer, filename: string) => {
|
||||
const filepath = join(tempDir, filename);
|
||||
@@ -57,6 +41,40 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -137,9 +155,9 @@ describe('/asset', () => {
|
||||
// stats
|
||||
utils.createAsset(statsUser.accessToken),
|
||||
utils.createAsset(statsUser.accessToken, { isFavorite: true }),
|
||||
utils.createAsset(statsUser.accessToken, { isArchived: true }),
|
||||
utils.createAsset(statsUser.accessToken, { visibility: AssetVisibility.Archive }),
|
||||
utils.createAsset(statsUser.accessToken, {
|
||||
isArchived: true,
|
||||
visibility: AssetVisibility.Archive,
|
||||
isFavorite: true,
|
||||
assetData: { filename: 'example.mp4' },
|
||||
}),
|
||||
@@ -160,13 +178,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id/original', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}/original`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download the file', async () => {
|
||||
const response = await request(app)
|
||||
.get(`/assets/${user1Assets[0].id}/original`)
|
||||
@@ -178,20 +189,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${uuidDto.notFound}`);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
expect(status).toBe(401);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${user2Assets[0].id}`)
|
||||
@@ -224,31 +221,22 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should get the asset faces', async () => {
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.metadata.faces.import = true;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
// asset faces
|
||||
const facesAsset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
describe('faces', () => {
|
||||
const metadataFaceTests = [
|
||||
{
|
||||
description: 'without orientation',
|
||||
filename: 'portrait.jpg',
|
||||
bytes: await readFile(facesAssetFilepath),
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: facesAsset.id });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${facesAsset.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(body.id).toEqual(facesAsset.id);
|
||||
expect(body.people).toMatchObject([
|
||||
{
|
||||
description: 'adjusting face regions to orientation',
|
||||
filename: 'portrait-orientation-6.jpg',
|
||||
},
|
||||
];
|
||||
// should produce same resulting face region coordinates for any orientation
|
||||
const expectedFaces = [
|
||||
{
|
||||
name: 'Marie Curie',
|
||||
birthDate: null,
|
||||
thumbnailPath: '',
|
||||
isHidden: false,
|
||||
faces: [
|
||||
{
|
||||
@@ -265,7 +253,6 @@ describe('/asset', () => {
|
||||
{
|
||||
name: 'Pierre Curie',
|
||||
birthDate: null,
|
||||
thumbnailPath: '',
|
||||
isHidden: false,
|
||||
faces: [
|
||||
{
|
||||
@@ -279,7 +266,30 @@ describe('/asset', () => {
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
];
|
||||
|
||||
it.each(metadataFaceTests)('should get the asset faces from $filename $description', async ({ filename }) => {
|
||||
const config = await utils.getSystemConfig(admin.accessToken);
|
||||
config.metadata.faces.import = true;
|
||||
await updateConfig({ systemConfigDto: config }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
const facesAsset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: await readFile(`${facesAssetDir}/${filename}`),
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: facesAsset.id });
|
||||
|
||||
const { status, body } = await request(app)
|
||||
.get(`/assets/${facesAsset.id}`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body.id).toEqual(facesAsset.id);
|
||||
expect(body.people).toMatchObject(expectedFaces);
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with a shared link', async () => {
|
||||
@@ -333,7 +343,7 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
it('disallows viewing archived assets', async () => {
|
||||
const asset = await utils.createAsset(user1.accessToken, { isArchived: true });
|
||||
const asset = await utils.createAsset(user1.accessToken, { visibility: AssetVisibility.Archive });
|
||||
|
||||
const { status } = await request(app)
|
||||
.get(`/assets/${asset.id}`)
|
||||
@@ -354,13 +364,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/statistics', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/assets/statistics');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should return stats of all assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
@@ -384,7 +387,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isArchived: true });
|
||||
.query({ visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ images: 1, videos: 1, total: 2 });
|
||||
@@ -394,7 +397,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isFavorite: true, isArchived: true });
|
||||
.query({ isFavorite: true, visibility: AssetVisibility.Archive });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ images: 0, videos: 1, total: 1 });
|
||||
@@ -404,7 +407,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/statistics')
|
||||
.set('Authorization', `Bearer ${statsUser.accessToken}`)
|
||||
.query({ isFavorite: false, isArchived: false });
|
||||
.query({ isFavorite: false, visibility: AssetVisibility.Timeline });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual({ images: 1, videos: 0, total: 1 });
|
||||
@@ -425,13 +428,6 @@ describe('/asset', () => {
|
||||
await utils.waitForQueueFinish(admin.accessToken, 'thumbnailGeneration');
|
||||
});
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/assets/random');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it.each(TEN_TIMES)('should return 1 random assets', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.get('/assets/random')
|
||||
@@ -467,31 +463,9 @@ describe('/asset', () => {
|
||||
expect(status).toBe(200);
|
||||
expect(body).toEqual([expect.objectContaining({ id: user2Assets[0].id })]);
|
||||
});
|
||||
|
||||
it('should return error', async () => {
|
||||
const { status } = await request(app)
|
||||
.get('/assets/random?count=ABC')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets/:id', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put(`/assets/:${uuidDto.notFound}`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid id', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${uuidDto.invalid}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['id must be a UUID']));
|
||||
});
|
||||
|
||||
it('should require access', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user2Assets[0].id}`)
|
||||
@@ -519,7 +493,7 @@ describe('/asset', () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ isArchived: true });
|
||||
.send({ visibility: AssetVisibility.Archive });
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id, isArchived: true });
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
@@ -619,28 +593,6 @@ describe('/asset', () => {
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should reject invalid gps coordinates', async () => {
|
||||
for (const test of [
|
||||
{ latitude: 12 },
|
||||
{ longitude: 12 },
|
||||
{ latitude: 12, longitude: 'abc' },
|
||||
{ latitude: 'abc', longitude: 12 },
|
||||
{ latitude: null, longitude: 12 },
|
||||
{ latitude: 12, longitude: null },
|
||||
{ latitude: 91, longitude: 12 },
|
||||
{ latitude: -91, longitude: 12 },
|
||||
{ latitude: 12, longitude: -181 },
|
||||
{ latitude: 12, longitude: 181 },
|
||||
]) {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.send(test)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
}
|
||||
});
|
||||
|
||||
it('should update gps data', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
@@ -712,17 +664,6 @@ describe('/asset', () => {
|
||||
expect(status).toEqual(200);
|
||||
});
|
||||
|
||||
it('should reject invalid rating', async () => {
|
||||
for (const test of [{ rating: 7 }, { rating: 3.5 }, { rating: null }]) {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
.send(test)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
}
|
||||
});
|
||||
|
||||
it('should return tagged people', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/${user1Assets[0].id}`)
|
||||
@@ -746,25 +687,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('DELETE /assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/assets`)
|
||||
.send({ ids: [uuidDto.notFound] });
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should require a valid uuid', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/assets`)
|
||||
.send({ ids: [uuidDto.invalid] })
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['each value in ids must be a UUID']));
|
||||
});
|
||||
|
||||
it('should throw an error when the id is not found', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.delete(`/assets`)
|
||||
@@ -877,13 +799,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id/thumbnail', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/thumbnail`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not include gps data for webp thumbnails', async () => {
|
||||
await utils.waitForWebsocketEvent({
|
||||
event: 'assetUpload',
|
||||
@@ -919,13 +834,6 @@ describe('/asset', () => {
|
||||
});
|
||||
|
||||
describe('GET /assets/:id/original', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get(`/assets/${locationAsset.id}/original`);
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should download the original', async () => {
|
||||
const { status, body, type } = await request(app)
|
||||
.get(`/assets/${locationAsset.id}/original`)
|
||||
@@ -946,43 +854,9 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).put('/assets');
|
||||
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets', () => {
|
||||
beforeAll(setupTests, 30_000);
|
||||
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/assets`);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
expect(status).toBe(401);
|
||||
});
|
||||
|
||||
it.each([
|
||||
{ should: 'require `deviceAssetId`', dto: { ...makeUploadDto({ omit: 'deviceAssetId' }) } },
|
||||
{ should: 'require `deviceId`', dto: { ...makeUploadDto({ omit: 'deviceId' }) } },
|
||||
{ should: 'require `fileCreatedAt`', dto: { ...makeUploadDto({ omit: 'fileCreatedAt' }) } },
|
||||
{ should: 'require `fileModifiedAt`', dto: { ...makeUploadDto({ omit: 'fileModifiedAt' }) } },
|
||||
{ should: 'require `duration`', dto: { ...makeUploadDto({ omit: 'duration' }) } },
|
||||
{ should: 'throw if `isFavorite` is not a boolean', dto: { ...makeUploadDto(), isFavorite: 'not-a-boolean' } },
|
||||
{ should: 'throw if `isVisible` is not a boolean', dto: { ...makeUploadDto(), isVisible: 'not-a-boolean' } },
|
||||
{ should: 'throw if `isArchived` is not a boolean', dto: { ...makeUploadDto(), isArchived: 'not-a-boolean' } },
|
||||
])('should $should', async ({ dto }) => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/assets')
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.attach('assetData', makeRandomImage(), 'example.png')
|
||||
.field(dto);
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
const tests = [
|
||||
{
|
||||
input: 'formats/avif/8bit-sRGB.avif',
|
||||
@@ -1244,31 +1118,21 @@ describe('/asset', () => {
|
||||
},
|
||||
];
|
||||
|
||||
it(`should upload and generate a thumbnail for different file types`, async () => {
|
||||
// upload in parallel
|
||||
const assets = await Promise.all(
|
||||
tests.map(async ({ input }) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
return utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
}),
|
||||
);
|
||||
it.each(tests)(`should upload and generate a thumbnail for different file types`, async ({ input, expected }) => {
|
||||
const filepath = join(testAssetDir, input);
|
||||
const response = await utils.createAsset(admin.accessToken, {
|
||||
assetData: { bytes: await readFile(filepath), filename: basename(filepath) },
|
||||
});
|
||||
|
||||
for (const { id, status } of assets) {
|
||||
expect(status).toBe(AssetMediaStatus.Created);
|
||||
// longer timeout as the thumbnail generation from full-size raw files can take a while
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
|
||||
}
|
||||
expect(response.status).toBe(AssetMediaStatus.Created);
|
||||
const id = response.id;
|
||||
// longer timeout as the thumbnail generation from full-size raw files can take a while
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id });
|
||||
|
||||
for (const [i, { id }] of assets.entries()) {
|
||||
const { expected } = tests[i];
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
|
||||
expect(asset).toMatchObject(expected);
|
||||
}
|
||||
const asset = await utils.getAssetInfo(admin.accessToken, id);
|
||||
expect(asset.exifInfo).toBeDefined();
|
||||
expect(asset.exifInfo).toMatchObject(expected.exifInfo);
|
||||
expect(asset).toMatchObject(expected);
|
||||
});
|
||||
|
||||
it('should handle a duplicate', async () => {
|
||||
@@ -1361,6 +1225,411 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreateDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user