Compare commits
647 Commits
v1.99.0
...
feat/serve
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f48318168b | ||
|
|
dda736840b | ||
|
|
4c7347d653 | ||
|
|
dca420ef70 | ||
|
|
21bd20fd75 | ||
|
|
351dd647a9 | ||
|
|
298370b7be | ||
|
|
e3d39837d0 | ||
|
|
38f4a02a14 | ||
|
|
fc5615eff6 | ||
|
|
6879bcb7a4 | ||
|
|
11152f9b3d | ||
|
|
75830a4878 | ||
|
|
e7c8501930 | ||
|
|
50f9b2d44e | ||
|
|
9628ea2d24 | ||
|
|
4d4bb8b6a7 | ||
|
|
99f0aa868a | ||
|
|
459fee9ee4 | ||
|
|
871f3ea468 | ||
|
|
98c4c683ae | ||
|
|
8a7b0f66a4 | ||
|
|
9e71256191 | ||
|
|
d5cf8e4bfe | ||
|
|
95012dc19b | ||
|
|
f197f5d530 | ||
|
|
7168707395 | ||
|
|
847cb90038 | ||
|
|
602f0a3499 | ||
|
|
fdaa0e5413 | ||
|
|
39d2c4f37b | ||
|
|
1f5d82e9d9 | ||
|
|
e98744f222 | ||
|
|
56ea07bcba | ||
|
|
b3b258f32f | ||
|
|
69b5eb005f | ||
|
|
3f44a33eac | ||
|
|
b2a0422efb | ||
|
|
562c43b6f5 | ||
|
|
4f21f6a2e1 | ||
|
|
76fdcc9863 | ||
|
|
57d94bce68 | ||
|
|
832d728940 | ||
|
|
8bfa6769a5 | ||
|
|
e7aa50425c | ||
|
|
a5e8b451b2 | ||
|
|
13cbdf6851 | ||
|
|
967d195a05 | ||
|
|
8f37784eae | ||
|
|
ecd018a826 | ||
|
|
202745f14b | ||
|
|
6a4c2e97c0 | ||
|
|
f6f82a5662 | ||
|
|
ae21781442 | ||
|
|
06ce8247cc | ||
|
|
a4887bfa7e | ||
|
|
27a02c75dc | ||
|
|
f8ee977b9e | ||
|
|
a3e7e8cc31 | ||
|
|
a341ab0050 | ||
|
|
61b850f0ce | ||
|
|
a3489d604b | ||
|
|
00b5ad3421 | ||
|
|
2ada4a8426 | ||
|
|
924e9f08cd | ||
|
|
91b835cfeb | ||
|
|
bb79df655d | ||
|
|
02e755bd92 | ||
|
|
0963a32a95 | ||
|
|
6119618ae2 | ||
|
|
4d044658a0 | ||
|
|
67fa598f44 | ||
|
|
9222b9d130 | ||
|
|
d6757fc2dd | ||
|
|
4f838eabbe | ||
|
|
143b9d6828 | ||
|
|
1df7be8436 | ||
|
|
5f25f28c42 | ||
|
|
ae92422df7 | ||
|
|
84d824d6a7 | ||
|
|
4353153fe6 | ||
|
|
c37bf9d5d0 | ||
|
|
5c8c0b2f5b | ||
|
|
39129721fa | ||
|
|
451416ec88 | ||
|
|
e39ee8a16f | ||
|
|
1e56352b04 | ||
|
|
470c95d614 | ||
|
|
1ad04f0b17 | ||
|
|
60427f18ce | ||
|
|
9aed736911 | ||
|
|
6b369e84fc | ||
|
|
136bb69bd0 | ||
|
|
975f2351ec | ||
|
|
2e62c7b417 | ||
|
|
2689178a35 | ||
|
|
d61418886f | ||
|
|
c03981ac1d | ||
|
|
4807fc40a6 | ||
|
|
101bc290f9 | ||
|
|
df42352f84 | ||
|
|
c8aa6a62c2 | ||
|
|
85aca2bb54 | ||
|
|
ff52300624 | ||
|
|
936a46b4ed | ||
|
|
d8eca168ca | ||
|
|
64636c0618 | ||
|
|
673e97e71d | ||
|
|
984aa8fb41 | ||
|
|
7f0f016f2e | ||
|
|
0589575154 | ||
|
|
73bf8f343a | ||
|
|
581b467b4b | ||
|
|
efb844c6cd | ||
|
|
88d4338348 | ||
|
|
ce7bbe88f9 | ||
|
|
d62e90424e | ||
|
|
0f129cae4a | ||
|
|
5583635947 | ||
|
|
b7715305b3 | ||
|
|
6c008176c9 | ||
|
|
72b1d582ba | ||
|
|
7b1112f3e3 | ||
|
|
42d0fc85ca | ||
|
|
d32b621750 | ||
|
|
d551003311 | ||
|
|
dd64379a4e | ||
|
|
c1cdda0ac4 | ||
|
|
596ab39293 | ||
|
|
31e57d27a7 | ||
|
|
f28b4e7c99 | ||
|
|
f01cf63c70 | ||
|
|
e55c5091d9 | ||
|
|
e8cdf1c300 | ||
|
|
116043b2b4 | ||
|
|
acc611a3d9 | ||
|
|
4d7aa7effd | ||
|
|
77b8c2f330 | ||
|
|
09e9e91b6a | ||
|
|
ad915ccd64 | ||
|
|
1ea55d642e | ||
|
|
3d5e55bdaa | ||
|
|
46868b3336 | ||
|
|
b1ca5455b5 | ||
|
|
462f0f76a4 | ||
|
|
3d4ae9c210 | ||
|
|
f2be310aec | ||
|
|
6fd6a8ba15 | ||
|
|
e479e556bc | ||
|
|
bf036f2f58 | ||
|
|
6d575e692b | ||
|
|
4e6aeeda4d | ||
|
|
a05c990718 | ||
|
|
844f5a16a1 | ||
|
|
1bebc7368c | ||
|
|
b7ebf3152f | ||
|
|
5985f72643 | ||
|
|
a6f557c24c | ||
|
|
1045957add | ||
|
|
540e568e9d | ||
|
|
9c5a2b97bf | ||
|
|
06402aa9fb | ||
|
|
45316f985b | ||
|
|
48927f5fb9 | ||
|
|
d121903b38 | ||
|
|
e2a31323bb | ||
|
|
aa1dc68867 | ||
|
|
2ae44022c2 | ||
|
|
a97e0caeb9 | ||
|
|
f667c9597b | ||
|
|
8fdcabaf70 | ||
|
|
55a7e54011 | ||
|
|
6f82f220b8 | ||
|
|
dd8d7732de | ||
|
|
bb4843747b | ||
|
|
f3fbb9b588 | ||
|
|
35ef82ab6b | ||
|
|
fa4cd74dfd | ||
|
|
fed8d11fb8 | ||
|
|
3a68190b99 | ||
|
|
757840c2fd | ||
|
|
a8abf2753e | ||
|
|
8743e17528 | ||
|
|
34d8879d32 | ||
|
|
55031cc117 | ||
|
|
f9dc870166 | ||
|
|
87053c8c0d | ||
|
|
57429ddc76 | ||
|
|
13a62715e4 | ||
|
|
f3407860a1 | ||
|
|
004074b25a | ||
|
|
98f87b275b | ||
|
|
8e9895df27 | ||
|
|
1167f0f2b7 | ||
|
|
81e4b69caf | ||
|
|
e4b777ecef | ||
|
|
4e7966c8e8 | ||
|
|
f10fb0723d | ||
|
|
ae08abde24 | ||
|
|
3790d8fcbc | ||
|
|
535c7a8618 | ||
|
|
725d594027 | ||
|
|
8c56c1c22b | ||
|
|
6a5bc156a6 | ||
|
|
5eaf489ecf | ||
|
|
8c54c13307 | ||
|
|
675fdc1d93 | ||
|
|
e79d1b1ec2 | ||
|
|
e9f99673b9 | ||
|
|
ee6995783f | ||
|
|
78b5990d2a | ||
|
|
f13100e261 | ||
|
|
21fe829a2c | ||
|
|
ad404d79d4 | ||
|
|
5806a3ce25 | ||
|
|
7520ffd6c3 | ||
|
|
bbb9453e1a | ||
|
|
8ff397527f | ||
|
|
a2c5eaa73e | ||
|
|
065f1410f8 | ||
|
|
bc31404909 | ||
|
|
f08e9a4447 | ||
|
|
cf79bc9ed7 | ||
|
|
090592e5ae | ||
|
|
398d99a052 | ||
|
|
c18beddef8 | ||
|
|
dc4f7bef69 | ||
|
|
48b490f5e9 | ||
|
|
5b87abb021 | ||
|
|
d26ac431b8 | ||
|
|
d7f53d93a6 | ||
|
|
ec4e6a143e | ||
|
|
7961d00e56 | ||
|
|
c1253663b7 | ||
|
|
ec4eb7cd19 | ||
|
|
16706f7f49 | ||
|
|
bf100dcde1 | ||
|
|
c2a525170e | ||
|
|
0bf923feb4 | ||
|
|
42f03af2dc | ||
|
|
9bce3417e9 | ||
|
|
4b86c7a298 | ||
|
|
a2c040a47f | ||
|
|
959b3f05d2 | ||
|
|
a0c43a2b5a | ||
|
|
b7d0bc16bb | ||
|
|
ef09fc4157 | ||
|
|
84d638645d | ||
|
|
64e9791a3f | ||
|
|
0aa3b29eeb | ||
|
|
0a598ae1b8 | ||
|
|
c0495ca23f | ||
|
|
f057fe045e | ||
|
|
eba42b245d | ||
|
|
2cf63eeeab | ||
|
|
926de96ce6 | ||
|
|
374a9b557b | ||
|
|
9e79a23bbe | ||
|
|
3212069eec | ||
|
|
c1636ef7ab | ||
|
|
0b0ab99016 | ||
|
|
4f097d9106 | ||
|
|
461f2595b5 | ||
|
|
5722c830ff | ||
|
|
72ce81f0c2 | ||
|
|
0c9bf2835d | ||
|
|
ba18776fc2 | ||
|
|
fdae0dcbe5 | ||
|
|
25262b644f | ||
|
|
4291b38769 | ||
|
|
ee4877b090 | ||
|
|
b4f6184aa6 | ||
|
|
87de809e3d | ||
|
|
59caf1fce4 | ||
|
|
6eb5d2e95e | ||
|
|
32e7cfea3d | ||
|
|
fc2e709ad4 | ||
|
|
3c7b8d560f | ||
|
|
4bb7d2df49 | ||
|
|
a2cf8c7fc7 | ||
|
|
56a42dad17 | ||
|
|
13e093b3c2 | ||
|
|
2648032163 | ||
|
|
19aa97da02 | ||
|
|
034c928d9e | ||
|
|
953896a35a | ||
|
|
833a78181b | ||
|
|
cf01ec1eb0 | ||
|
|
90882a9b26 | ||
|
|
0b68cc2da6 | ||
|
|
7ea539b753 | ||
|
|
1e34b01986 | ||
|
|
0d8a04b43c | ||
|
|
5a49de5592 | ||
|
|
0c60aaf557 | ||
|
|
0571901288 | ||
|
|
557f9d8e5f | ||
|
|
8d5729c3b2 | ||
|
|
0ff0b891a7 | ||
|
|
53d571d29e | ||
|
|
00d186ec52 | ||
|
|
69f8bfe874 | ||
|
|
1d15cfb5f3 | ||
|
|
3e03f5348f | ||
|
|
59537f8f1b | ||
|
|
f1083a4c73 | ||
|
|
52bcb46b42 | ||
|
|
d52ed51aab | ||
|
|
c9dcb5c624 | ||
|
|
40899f6137 | ||
|
|
912d0c4d74 | ||
|
|
adb607c3ee | ||
|
|
c14a2eda5d | ||
|
|
a0d03925e0 | ||
|
|
732bd1e652 | ||
|
|
2943f93098 | ||
|
|
0b3373c552 | ||
|
|
a90138e42e | ||
|
|
466451abc9 | ||
|
|
f0f9053115 | ||
|
|
dc9b51ad02 | ||
|
|
0dbe44cb78 | ||
|
|
a78260296c | ||
|
|
1e004611e4 | ||
|
|
2593110219 | ||
|
|
56ce58c718 | ||
|
|
aac789f788 | ||
|
|
99ccf28bc6 | ||
|
|
48b0b7e8bd | ||
|
|
70c78a09a4 | ||
|
|
4458cc4370 | ||
|
|
0466da03ae | ||
|
|
7a2c1bab23 | ||
|
|
0435de50f8 | ||
|
|
661540c886 | ||
|
|
f5cf057e84 | ||
|
|
a91fd772e4 | ||
|
|
7f1651df71 | ||
|
|
c9a079201a | ||
|
|
be4a783845 | ||
|
|
c30cd3b378 | ||
|
|
0d3cc28f45 | ||
|
|
f004487be0 | ||
|
|
21231d53a5 | ||
|
|
a99862120d | ||
|
|
776023b149 | ||
|
|
7d4187962a | ||
|
|
a93534fc3c | ||
|
|
cef84f6ced | ||
|
|
a2180a467d | ||
|
|
1e3dceea4d | ||
|
|
fd4514711f | ||
|
|
2dd7c13b88 | ||
|
|
40931b5668 | ||
|
|
25549b87c9 | ||
|
|
7ec62f12b5 | ||
|
|
caf76f0713 | ||
|
|
6778653825 | ||
|
|
c858b43717 | ||
|
|
6eb1b82541 | ||
|
|
71b6d8b569 | ||
|
|
3abfe3c99e | ||
|
|
171b6bb0a6 | ||
|
|
78c7ff855d | ||
|
|
57be9182d4 | ||
|
|
886e07604e | ||
|
|
431ffebddd | ||
|
|
74c921148b | ||
|
|
eaf9e5e477 | ||
|
|
4478e524f8 | ||
|
|
e72e41a7aa | ||
|
|
efd8f0d648 | ||
|
|
d2b5cc6a4a | ||
|
|
596c35dc00 | ||
|
|
112d6d60ec | ||
|
|
c50241369a | ||
|
|
b74f8273c2 | ||
|
|
8573c84605 | ||
|
|
a4f805e99b | ||
|
|
7db07bbe61 | ||
|
|
3a9df6dae8 | ||
|
|
c227f9893e | ||
|
|
a3feca2580 | ||
|
|
b21566c2fc | ||
|
|
1071396a4a | ||
|
|
f58886514d | ||
|
|
17dc12cf7d | ||
|
|
6d4d0f86cf | ||
|
|
14b1425e98 | ||
|
|
c70d9f9055 | ||
|
|
18fa6018c0 | ||
|
|
47fb9bd213 | ||
|
|
6e6deec40c | ||
|
|
877207a2e6 | ||
|
|
64cfd017b4 | ||
|
|
4c4ebf769f | ||
|
|
28d081338b | ||
|
|
50c9bc0336 | ||
|
|
ed2e4e5217 | ||
|
|
1aa8707b8a | ||
|
|
103cb60a57 | ||
|
|
58e516c766 | ||
|
|
bcdec25843 | ||
|
|
28f591d01b | ||
|
|
dba365634a | ||
|
|
1c1e461936 | ||
|
|
2db76034b1 | ||
|
|
95e67a7b1d | ||
|
|
3deaaf14c0 | ||
|
|
084a97a77a | ||
|
|
ed74213c63 | ||
|
|
7ce1662b05 | ||
|
|
f959f2de85 | ||
|
|
07716bbff7 | ||
|
|
0f74b17000 | ||
|
|
3c7f70ec30 | ||
|
|
85df3f1e99 | ||
|
|
a903898781 | ||
|
|
25e1887939 | ||
|
|
9c696e4c28 | ||
|
|
87a36846f4 | ||
|
|
ded01401f8 | ||
|
|
8aff392275 | ||
|
|
14b798fcc4 | ||
|
|
97c099e26d | ||
|
|
3eb61a9d53 | ||
|
|
e65b3a8ea0 | ||
|
|
1fdbc949d6 | ||
|
|
605da89425 | ||
|
|
0d062b32a8 | ||
|
|
a4267ed60f | ||
|
|
58346465aa | ||
|
|
ec76e5ef23 | ||
|
|
37eea2d353 | ||
|
|
8c9a092561 | ||
|
|
e421fe9860 | ||
|
|
e13d4c9c13 | ||
|
|
640f53fe0a | ||
|
|
1bca1b8bde | ||
|
|
c902c93082 | ||
|
|
f1ca1794a1 | ||
|
|
ad5d115abe | ||
|
|
56079527ef | ||
|
|
c77b9f359f | ||
|
|
7f504ec5fc | ||
|
|
b1bcd67f5a | ||
|
|
2a26574808 | ||
|
|
1d427d0581 | ||
|
|
321868963d | ||
|
|
1529b67e41 | ||
|
|
190e4b55eb | ||
|
|
9e122764e7 | ||
|
|
327b9bd59c | ||
|
|
301c217303 | ||
|
|
9883473376 | ||
|
|
6631e6eedc | ||
|
|
933b6b67f5 | ||
|
|
56e0e5d6ad | ||
|
|
369bd17c8b | ||
|
|
9681f5b360 | ||
|
|
b107894976 | ||
|
|
796c933fb8 | ||
|
|
d43daaee81 | ||
|
|
b6cdffa509 | ||
|
|
7b1562c050 | ||
|
|
20583d5334 | ||
|
|
2d03d7c373 | ||
|
|
dd15d33bce | ||
|
|
c5e8f38e1e | ||
|
|
db45ec7434 | ||
|
|
4f4bceec94 | ||
|
|
7a16233584 | ||
|
|
fff12e3d78 | ||
|
|
da750ed838 | ||
|
|
e49512896f | ||
|
|
0075243ed5 | ||
|
|
29e47dd7c1 | ||
|
|
105a74caca | ||
|
|
55b9acca78 | ||
|
|
0d130b8957 | ||
|
|
0aa5d3daeb | ||
|
|
4b622e6cfa | ||
|
|
82aeb3292a | ||
|
|
335c03d0b8 | ||
|
|
4681ff88d0 | ||
|
|
33fd27f113 | ||
|
|
527fd7d472 | ||
|
|
3a69e5e819 | ||
|
|
7e611fa99c | ||
|
|
71d346207d | ||
|
|
56d27bc1b4 | ||
|
|
e1f8e96e28 | ||
|
|
ab97f03cb5 | ||
|
|
a2e38270e4 | ||
|
|
8f981b6052 | ||
|
|
939e91f9ed | ||
|
|
22c3d26604 | ||
|
|
7aaf48cb0c | ||
|
|
afd7815420 | ||
|
|
e5fe68cbf6 | ||
|
|
3b0fff3b3d | ||
|
|
ec7015be88 | ||
|
|
19fafd8c10 | ||
|
|
e47a89b274 | ||
|
|
66650f5944 | ||
|
|
0529076ed7 | ||
|
|
7f854432ae | ||
|
|
15a2e6feeb | ||
|
|
4ed68cf673 | ||
|
|
8337da183c | ||
|
|
6dfa9e1146 | ||
|
|
282bccaca5 | ||
|
|
62d307321a | ||
|
|
f7afc0334e | ||
|
|
28e8e539f6 | ||
|
|
7cc19b50fc | ||
|
|
97c340b8a4 | ||
|
|
7b1d4a6787 | ||
|
|
0714d119d7 | ||
|
|
700622e521 | ||
|
|
3682e76dee | ||
|
|
cd0e537e3e | ||
|
|
0849dbd1af | ||
|
|
4ab4a35eba | ||
|
|
e5d9372708 | ||
|
|
8edc2fb46f | ||
|
|
e520c0d1f5 | ||
|
|
506f9f6fb9 | ||
|
|
3cb8f54307 | ||
|
|
ee4d9fff16 | ||
|
|
27be813011 | ||
|
|
861b72ef04 | ||
|
|
fd83280b70 | ||
|
|
169d9d18b0 | ||
|
|
245535ee04 | ||
|
|
5bc9158724 | ||
|
|
6a4bc777a2 | ||
|
|
34cbb18ecd | ||
|
|
e2d5a8c0bb | ||
|
|
94cd806675 | ||
|
|
b6af7788e1 | ||
|
|
c4bb9f49ff | ||
|
|
8e5695f06d | ||
|
|
395c28f5fa | ||
|
|
3e5183606c | ||
|
|
6a36bbd1d1 | ||
|
|
4b39d37cae | ||
|
|
25c9b779e4 | ||
|
|
fcc3b81745 | ||
|
|
6f677b4fae | ||
|
|
78f202603c | ||
|
|
b8c5363a15 | ||
|
|
b8b3c487d4 | ||
|
|
ec48fccb30 | ||
|
|
3f61019ca1 | ||
|
|
16513b4a6e | ||
|
|
9b705e4450 | ||
|
|
e1c2135850 | ||
|
|
9fe80c25eb | ||
|
|
13b11a39a9 | ||
|
|
8bf571bf48 | ||
|
|
613b544bf0 | ||
|
|
916603d2d4 | ||
|
|
e30eecba2c | ||
|
|
3a94be0212 | ||
|
|
6295edcdb7 | ||
|
|
335b4937ed | ||
|
|
06da0469c4 | ||
|
|
1ad893ded4 | ||
|
|
636f5fb933 | ||
|
|
c45e28ab53 | ||
|
|
c56c04a82b | ||
|
|
d431d37454 | ||
|
|
1694dd146e | ||
|
|
4a6a0aa142 | ||
|
|
c788160532 | ||
|
|
cc66159f04 | ||
|
|
c58a70ac8f | ||
|
|
1855aaea99 | ||
|
|
3901b5da44 | ||
|
|
5dc59b591d | ||
|
|
96a5710932 | ||
|
|
d36d32d07b | ||
|
|
727b3b9f53 | ||
|
|
c85563da50 | ||
|
|
a771c563ba | ||
|
|
3cc800f93a | ||
|
|
b449feb3e1 | ||
|
|
b07a565e34 | ||
|
|
787eebcf1e | ||
|
|
604b8ff17c | ||
|
|
6e93ddf2f1 | ||
|
|
b6e4be72f0 | ||
|
|
75aa8e6621 | ||
|
|
5b7417bf64 | ||
|
|
db744f500b | ||
|
|
a56cf35d8c | ||
|
|
d1e6843f3e | ||
|
|
d18868873e | ||
|
|
827014fa4b | ||
|
|
944b33983c | ||
|
|
2641185af2 | ||
|
|
64aac239f0 | ||
|
|
d6823b128c | ||
|
|
508f32c08a | ||
|
|
8ed6ed4d2b | ||
|
|
1abb0bdae8 | ||
|
|
5ef6215546 | ||
|
|
95fb9c4365 | ||
|
|
fa0a5107c2 | ||
|
|
dc3c329431 | ||
|
|
2a9f2b4515 | ||
|
|
793049388b | ||
|
|
382b63954c | ||
|
|
87ccba7f9d | ||
|
|
e21c96c0ef | ||
|
|
4de0b2f44e | ||
|
|
b588a87d4a | ||
|
|
44ed1f0919 | ||
|
|
16d0df796c | ||
|
|
9fd5d2ad9c | ||
|
|
28ad004b01 | ||
|
|
ef4a492cb1 | ||
|
|
6d9e7694b1 | ||
|
|
0c13c63bb6 | ||
|
|
907eb869bc | ||
|
|
c1402eee8e | ||
|
|
84f7ca855a | ||
|
|
2dcce03352 | ||
|
|
96a22ec3c1 | ||
|
|
4b29bccc7c | ||
|
|
40e079a247 | ||
|
|
81f0265095 | ||
|
|
92cc647cf6 | ||
|
|
048d437b0b | ||
|
|
ec9a6bca14 | ||
|
|
bd5952b943 | ||
|
|
3f0d54c752 | ||
|
|
dab4595a4e | ||
|
|
6d9ca82b19 | ||
|
|
373a03e819 | ||
|
|
d97b0259fa | ||
|
|
2267ca1949 | ||
|
|
29be53e70d | ||
|
|
851fe4a49f | ||
|
|
30f499cf2e | ||
|
|
591a641d8d |
@@ -23,7 +23,6 @@ server/node_modules/
|
||||
server/upload/
|
||||
server/dist/
|
||||
server/www/
|
||||
server/test/assets/
|
||||
|
||||
web/node_modules/
|
||||
web/coverage/
|
||||
|
||||
2
.gitattributes
vendored
@@ -2,8 +2,6 @@ mobile/openapi/**/*.md -diff -merge
|
||||
mobile/openapi/**/*.md linguist-generated=true
|
||||
mobile/openapi/**/*.dart -diff -merge
|
||||
mobile/openapi/**/*.dart linguist-generated=true
|
||||
mobile/openapi/.openapi-generator/FILES -diff -merge
|
||||
mobile/openapi/.openapi-generator/FILES linguist-generated=true
|
||||
|
||||
mobile/lib/**/*.g.dart -diff -merge
|
||||
mobile/lib/**/*.g.dart linguist-generated=true
|
||||
|
||||
@@ -6,6 +6,14 @@ body:
|
||||
attributes:
|
||||
value: |
|
||||
Please use this form to request new feature for Immich
|
||||
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I have searched the existing feature requests to make sure this is not a duplicate request.
|
||||
options:
|
||||
- label: "Yes"
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
|
||||
5
.github/FUNDING.yml
vendored
@@ -1,5 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: immich-app
|
||||
liberapay: alex.tran1502
|
||||
custom: https://www.buymeacoffee.com/altran1502
|
||||
10
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -87,6 +87,16 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant logs below. (code formatting is
|
||||
enabled, no need for backticks)
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
|
||||
10
.github/workflows/build-mobile.yml
vendored
@@ -37,15 +37,15 @@ jobs:
|
||||
|
||||
- uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: "zulu"
|
||||
java-version: "11.0.21+9"
|
||||
cache: "gradle"
|
||||
distribution: 'zulu'
|
||||
java-version: '17'
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: "stable"
|
||||
flutter-version: "3.16.9"
|
||||
channel: 'stable'
|
||||
flutter-version: '3.22.0'
|
||||
cache: true
|
||||
|
||||
- name: Create the Keystore
|
||||
|
||||
2
.github/workflows/cli.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.2.0
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
|
||||
4
.github/workflows/docker-cleanup.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.5.0
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.6.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
steps:
|
||||
- name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.5.0
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.6.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
|
||||
2
.github/workflows/docker.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.2.0
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
|
||||
15
.github/workflows/pr-require-conventional-commit.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: PR Conventional Commit Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, edited]
|
||||
|
||||
jobs:
|
||||
validate-pr-title:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: PR Conventional Commit Validation
|
||||
uses: ytanikin/PRConventionalCommits@1.2.0
|
||||
with:
|
||||
task_types: '["feat","fix","docs","test","ci","refactor","perf","chore","revert"]'
|
||||
add_label: 'false'
|
||||
4
.github/workflows/static_analysis.yml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: "stable"
|
||||
flutter-version: "3.16.9"
|
||||
channel: 'stable'
|
||||
flutter-version: '3.22.0'
|
||||
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
|
||||
42
.github/workflows/test.yml
vendored
@@ -10,19 +10,6 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
server-e2e-jobs:
|
||||
name: Server (e2e-jobs)
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Run e2e tests
|
||||
run: make server-e2e-jobs
|
||||
|
||||
doc-tests:
|
||||
name: Docs
|
||||
runs-on: ubuntu-latest
|
||||
@@ -221,7 +208,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version: '3.16.9'
|
||||
flutter-version: '3.22.0'
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test -j 1
|
||||
@@ -273,16 +260,27 @@ jobs:
|
||||
name: OpenAPI Clients
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
|
||||
- name: Build the app
|
||||
run: npm --prefix=server run build
|
||||
|
||||
- name: Run API generation
|
||||
run: make open-api
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v19
|
||||
uses: tj-actions/verify-changed-files@v20
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
mobile/openapi
|
||||
open-api/typescript-sdk
|
||||
open-api/immich-openapi-specs.json
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
run: |
|
||||
@@ -329,14 +327,14 @@ jobs:
|
||||
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
run: npm run typeorm:migrations:generate ./src/infra/migrations/TestMigration
|
||||
run: npm run typeorm:migrations:generate ./src/migrations/TestMigration
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v19
|
||||
uses: tj-actions/verify-changed-files@v20
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
server/src/infra/migrations/
|
||||
server/src/migrations/
|
||||
- name: Verify migration files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
run: |
|
||||
@@ -345,16 +343,16 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Run SQL generation
|
||||
run: npm run sql:generate
|
||||
run: npm run sync:sql
|
||||
env:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v19
|
||||
uses: tj-actions/verify-changed-files@v20
|
||||
id: verify-changed-sql-files
|
||||
with:
|
||||
files: |
|
||||
server/src/infra/sql
|
||||
server/src/queries
|
||||
|
||||
- name: Verify SQL files have not changed
|
||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||
|
||||
5
.gitignore
vendored
@@ -14,7 +14,10 @@ mobile/gradle.properties
|
||||
mobile/openapi/pubspec.lock
|
||||
mobile/*.jks
|
||||
mobile/libisar.dylib
|
||||
mobile/openapi/test
|
||||
mobile/openapi/doc
|
||||
mobile/openapi/.openapi-generator/FILES
|
||||
|
||||
open-api/typescript-sdk/build
|
||||
mobile/android/fastlane/report.xml
|
||||
mobile/ios/fastlane/report.xml
|
||||
mobile/ios/fastlane/report.xml
|
||||
|
||||
2
.gitmodules
vendored
@@ -2,5 +2,5 @@
|
||||
path = mobile/.isar
|
||||
url = https://github.com/isar/isar
|
||||
[submodule "server/test/assets"]
|
||||
path = server/test/assets
|
||||
path = e2e/test-assets
|
||||
url = https://github.com/immich-app/test-assets
|
||||
|
||||
44
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"svelte"
|
||||
],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"[dart]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.selectionHighlight": false,
|
||||
"editor.suggest.snippetsPreventQuickSuggestions": false,
|
||||
"editor.suggestSelection": "first",
|
||||
"editor.tabCompletion": "onlySnippets",
|
||||
"editor.wordBasedSuggestions": "off",
|
||||
"editor.defaultFormatter": "Dart-Code.dart-code"
|
||||
},
|
||||
"cSpell.words": [
|
||||
"immich"
|
||||
],
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
}
|
||||
}
|
||||
5
CODEOWNERS
Normal file
@@ -0,0 +1,5 @@
|
||||
/.github/ @bo0tzz
|
||||
/docker/ @bo0tzz
|
||||
/server/ @danieldietzler
|
||||
/machine-learning/ @mertalev
|
||||
/e2e/ @danieldietzler
|
||||
5
Makefile
@@ -16,9 +16,6 @@ stage:
|
||||
pull-stage:
|
||||
docker compose -f ./docker/docker-compose.staging.yml pull
|
||||
|
||||
server-e2e-jobs:
|
||||
docker compose -f ./server/e2e/docker-compose.server-e2e.yml up --renew-anon-volumes --abort-on-container-exit --exit-code-from immich-server --remove-orphans --build
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans
|
||||
@@ -40,7 +37,7 @@ open-api-typescript:
|
||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||
|
||||
sql:
|
||||
npm --prefix server run sql:generate
|
||||
npm --prefix server run sync:sql
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
|
||||
52
README.md
@@ -18,17 +18,20 @@
|
||||
</a>
|
||||
<br/>
|
||||
<p align="center">
|
||||
<a href="README_ca_ES.md">Català</a>
|
||||
<a href="README_es_ES.md">Español</a>
|
||||
<a href="README_fr_FR.md">Français</a>
|
||||
<a href="README_it_IT.md">Italiano</a>
|
||||
<a href="README_ja_JP.md">日本語</a>
|
||||
<a href="README_ko_KR.md">한국어</a>
|
||||
<a href="README_de_DE.md">Deutsch</a>
|
||||
<a href="README_nl_NL.md">Nederlands</a>
|
||||
<a href="README_tr_TR.md">Türkçe</a>
|
||||
<a href="README_zh_CN.md">中文</a>
|
||||
<a href="README_ru_RU.md">Русский</a>
|
||||
|
||||
<a href="readme_i18n/README_ca_ES.md">Català</a>
|
||||
<a href="readme_i18n/README_es_ES.md">Español</a>
|
||||
<a href="readme_i18n/README_fr_FR.md">Français</a>
|
||||
<a href="readme_i18n/README_it_IT.md">Italiano</a>
|
||||
<a href="readme_i18n/README_ja_JP.md">日本語</a>
|
||||
<a href="readme_i18n/README_ko_KR.md">한국어</a>
|
||||
<a href="readme_i18n/README_de_DE.md">Deutsch</a>
|
||||
<a href="readme_i18n/README_nl_NL.md">Nederlands</a>
|
||||
<a href="readme_i18n/README_tr_TR.md">Türkçe</a>
|
||||
<a href="readme_i18n/README_zh_CN.md">中文</a>
|
||||
<a href="readme_i18n/README_ru_RU.md">Русский</a>
|
||||
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
|
||||
<a href="readme_i18n/README_ar_JO.md">العربية</a>
|
||||
</p>
|
||||
|
||||
## Disclaimer
|
||||
@@ -47,7 +50,6 @@
|
||||
- [Introduction](https://immich.app/docs/overview/introduction)
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
|
||||
- [Support The Project](#support-the-project)
|
||||
|
||||
## Documentation
|
||||
|
||||
@@ -70,6 +72,7 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
```
|
||||
|
||||
## Activities
|
||||
|
||||

|
||||
|
||||
## Features
|
||||
@@ -106,23 +109,6 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
| Read-only gallery | Yes | Yes |
|
||||
| Stacked Photos | Yes | Yes |
|
||||
|
||||
## Support the project
|
||||
|
||||
I've committed to this project, and I will not stop. I will keep updating the docs, adding new features, and fixing bugs. But I can't do it alone. So I need your help to give me additional motivation to keep going.
|
||||
|
||||
As our hosts in the [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418) said, this is a massive undertaking of what the team and I are doing. And I would love to someday be able to do this full-time, and I am asking for your help to make that happen.
|
||||
|
||||
If you feel like this is the right cause and the app is something you are seeing yourself using for a long time, please consider supporting the project with the option below.
|
||||
|
||||
### Donation
|
||||
|
||||
- [Monthly donation](https://github.com/sponsors/immich-app) via GitHub Sponsors
|
||||
- [One-time donation](https://github.com/sponsors/immich-app?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
||||
- [Liberapay](https://liberapay.com/alex.tran1502/)
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
||||
|
||||
## Contributors
|
||||
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
@@ -131,6 +117,10 @@ If you feel like this is the right cause and the app is something you are seeing
|
||||
|
||||
## Star History
|
||||
|
||||
<a href="https://star-history.com/#immich-app/immich">
|
||||
<img src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" alt="Star History Chart" width="100%" />
|
||||
<a href="https://star-history.com/#immich-app/immich&Date">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
124
README_ru_RU.md
@@ -1,124 +0,0 @@
|
||||
<p align="center">
|
||||
<br/>
|
||||
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="License: AGPLv3"></a>
|
||||
<a href="https://discord.gg/D8JsnBEuKb">
|
||||
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
|
||||
</a>
|
||||
<br/>
|
||||
<br/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="design/immich-logo-stacked-light.svg" width="300" title="Login With Custom URL">
|
||||
</p>
|
||||
<h3 align="center">Immich - Высокопроизводительное решение для автономоного создания фото и видео архивов</h3>
|
||||
<br/>
|
||||
<a href="https://immich.app">
|
||||
<img src="design/immich-screenshots.png" title="Main Screenshot">
|
||||
</a>
|
||||
<br/>
|
||||
<p align="center">
|
||||
<a href="README_ca_ES.md">Català</a>
|
||||
<a href="README_es_ES.md">Español</a>
|
||||
<a href="README_fr_FR.md">Français</a>
|
||||
<a href="README_it_IT.md">Italiano</a>
|
||||
<a href="README_ja_JP.md">日本語</a>
|
||||
<a href="README_ko_KR.md">한국어</a>
|
||||
<a href="README_de_DE.md">Deutsch</a>
|
||||
<a href="README_nl_NL.md">Nederlands</a>
|
||||
<a href="README_tr_TR.md">Türkçe</a>
|
||||
<a href="README_zh_CN.md">中文</a>
|
||||
<a href="README_ru_RU.md">Русский</a>
|
||||
</p>
|
||||
|
||||
## Предупреждение
|
||||
|
||||
- ⚠️ Этот проект находится **в очень активной** разработке.
|
||||
- ⚠️ Ожидайте ошибок и критических изменение.
|
||||
- ⚠️ **Не используйте это приложение для бекапа ваших фото и видео.**
|
||||
- ⚠️ Всегда следуйте [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) плану резервного копирования ваших драгоценных фото и видео!
|
||||
|
||||
## Содержание
|
||||
|
||||
- [Официальная документация](https://immich.app/docs)
|
||||
- [План разработки](https://github.com/orgs/immich-app/projects/1)
|
||||
- [Демо](#demo)
|
||||
- [Возможности](#features)
|
||||
- [Введение](https://immich.app/docs/overview/introduction)
|
||||
- [Инсталяция](https://immich.app/docs/install/requirements)
|
||||
- [Гайд по доработке проекта](https://immich.app/docs/overview/support-the-project)
|
||||
- [Поддержки проект](#support-the-project)
|
||||
|
||||
## Документация
|
||||
|
||||
Вы можете найти основную документация, включая инструкции по установке по ссылке https://immich.app/.
|
||||
|
||||
## Демо
|
||||
|
||||
Вы можете посмотреть веб демо по ссылке https://demo.immich.app
|
||||
|
||||
Для мобильного приложения вы можете использовать адрес `https://demo.immich.app/api` в поле `Server Endpoint URL`
|
||||
|
||||
```bash title="Демо доступ"
|
||||
Реквизиты доступа
|
||||
логин/почта: demo@immich.app
|
||||
пароль: demo
|
||||
```
|
||||
|
||||
```
|
||||
Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
```
|
||||
|
||||
## Возможности
|
||||
|
||||
| Возможности | Приложение | Веб |
|
||||
| --------------------------------------------------- | ---------- | --- |
|
||||
| Выгрузка на сервер и просмотр видео и фото | Да | Да |
|
||||
| Авто бекап когда приложение открыто | Да | Н/Д |
|
||||
| Выбор альбома(ов) для бекапа | Да | Н/Д |
|
||||
| загрузка с сервера фото и видео на устройство | Да | Да |
|
||||
| Поддержка нескольких пользователей | Да | Да |
|
||||
| Альбомы и общие альбомы | Да | Да |
|
||||
| Прокручиваемая/перетаскиваемая полоса прокрутки | Да | Да |
|
||||
| Поддержка формата RAW | Да | Да |
|
||||
| Просмотр метаданных (EXIF, map) | Да | Да |
|
||||
| Поиск до метаданным, объектам, лицам и CLIP | Да | Да |
|
||||
| Административные функци (управление пользователями) | Нет | Да |
|
||||
| Фоновый бекпа | Да | Н/Д |
|
||||
| Виртуальная прокрутка | Да | Да |
|
||||
| Поддержка OAuth | Да | Да |
|
||||
| Ключи API | Н/Д | Да |
|
||||
| LivePhoto/MotionPhoto бекап и воспроизведение | Да | Да |
|
||||
| Настраиваемая структура хранилища | Да | Да |
|
||||
| Публичные альбомы | Нет | Да |
|
||||
| Архив и Избранное | Да | Да |
|
||||
| Мировая карта | Да | Да |
|
||||
| Совместное использование | Да | Да |
|
||||
| Распознавание лиц и группировка по лицам | Да | Да |
|
||||
| В этот день (x лет назад) | Да | Да |
|
||||
| Работа без интернета | Да | Нет |
|
||||
| Галлереи только для просмотра | Да | Да |
|
||||
| Колллажи | Да | Да |
|
||||
|
||||
## Поддержка проекта
|
||||
|
||||
Я посвятил себя этому проекту и не остановлюсь. Я буду продолжать обновлять документацию, добавлять новые функции и исправлять ошибки. Но я не могу сделать это один. Поэтому мне нужна ваша помощь, чтобы дать мне дополнительную мотивацию продолжать идти дальше.
|
||||
|
||||
Как сказали наши покровители [selfhosted.show - In the episode 'The-organization-must-not-be-name is a Hostile Actor'](https://selfhosted.show/79?t=1418), это масштабная работа, которую мы с командой делаем. И мне бы очень хотелось когда-нибудь иметь возможность заниматься этим на постоянной основе, и я прошу вашей помощи, чтобы это произошло.
|
||||
|
||||
|
||||
Если вы считаете, что это правильная причина и вы уже давно используете это приложение, рассмотрите возможность финансовой поддержки проекта, выбрав вариант ниже.
|
||||
|
||||
### Пожертвование
|
||||
|
||||
- [Ежемесячное пожертвование](https://github.com/sponsors/immich-app) via GitHub Sponsors
|
||||
- [Одноразовое пожертвование](https://github.com/sponsors/immich-app?frequency=one-time&sponsor=alextran1502) via GitHub Sponsors
|
||||
- [Librepay](https://liberapay.com/alex.tran1502/)
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 3QVAb9dCHutquVejeNXitPqZX26Yg5kxb7
|
||||
- ZCash: u1smm4wvqegcp46zss2jf5xptchgeczp4rx7a0wu3mermf2wxahm26yyz5w9mw3f2p4emwlljxjumg774kgs8rntt9yags0whnzane4n67z4c7gppq4yyvcj404ne3r769prwzd9j8ntvqp44fa6d67sf7rmcfjmds3gmeceff4u8e92rh38nd30cr96xw6vfhk6scu4ws90ldzupr3sz
|
||||
|
||||
## Авторы
|
||||
<a href="https://github.com/alextran1502/immich/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
|
||||
</a>
|
||||
@@ -21,6 +21,7 @@ module.exports = {
|
||||
'unicorn/prefer-module': 'off',
|
||||
'unicorn/prevent-abbreviations': 'off',
|
||||
'unicorn/no-process-exit': 'off',
|
||||
'unicorn/import-style': 'off',
|
||||
curly: 2,
|
||||
'prettier/prettier': 0,
|
||||
},
|
||||
|
||||
1
cli/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
20.13
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20-alpine3.19@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c as core
|
||||
FROM node:20-alpine3.19@sha256:291e84d956f1aff38454bbd3da38941461ad569a185c20aa289f71f37ea08e23 as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
997
cli/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.1.0",
|
||||
"version": "2.2.0",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -28,8 +28,7 @@
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^51.0.0",
|
||||
"glob": "^10.3.1",
|
||||
"eslint-plugin-unicorn": "^53.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
@@ -59,6 +58,10 @@
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.13.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import {
|
||||
Action,
|
||||
AssetBulkUploadCheckResult,
|
||||
AssetFileUploadResponseDto,
|
||||
addAssetsToAlbum,
|
||||
checkBulkUpload,
|
||||
createAlbum,
|
||||
@@ -8,445 +10,342 @@ import {
|
||||
getSupportedMediaTypes,
|
||||
} from '@immich/sdk';
|
||||
import byteSize from 'byte-size';
|
||||
import cliProgress from 'cli-progress';
|
||||
import { chunk, zip } from 'lodash-es';
|
||||
import { createHash } from 'node:crypto';
|
||||
import fs, { createReadStream } from 'node:fs';
|
||||
import { access, constants, stat, unlink } from 'node:fs/promises';
|
||||
import { Presets, SingleBar } from 'cli-progress';
|
||||
import { chunk } from 'lodash-es';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import { basename } from 'node:path';
|
||||
import { CrawlService } from 'src/services/crawl.service';
|
||||
import { BaseOptions, authenticate } from 'src/utils';
|
||||
import path, { basename } from 'node:path';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const zipDefined = zip as <T, U>(a: T[], b: U[]) => [T, U][];
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
|
||||
enum CheckResponseStatus {
|
||||
ACCEPT = 'accept',
|
||||
REJECT = 'reject',
|
||||
DUPLICATE = 'duplicate',
|
||||
}
|
||||
// TODO figure out why `id` is missing
|
||||
type AssetBulkUploadCheckResults = Array<AssetBulkUploadCheckResult & { id: string }>;
|
||||
type Asset = { id: string; filepath: string };
|
||||
|
||||
class Asset {
|
||||
readonly path: string;
|
||||
|
||||
id?: string;
|
||||
deviceAssetId?: string;
|
||||
fileCreatedAt?: Date;
|
||||
fileModifiedAt?: Date;
|
||||
sidecarPath?: string;
|
||||
fileSize?: number;
|
||||
interface UploadOptionsDto {
|
||||
recursive?: boolean;
|
||||
ignore?: string;
|
||||
dryRun?: boolean;
|
||||
skipHash?: boolean;
|
||||
delete?: boolean;
|
||||
album?: boolean;
|
||||
albumName?: string;
|
||||
includeHidden?: boolean;
|
||||
concurrency: number;
|
||||
}
|
||||
|
||||
constructor(path: string) {
|
||||
this.path = path;
|
||||
class UploadFile extends File {
|
||||
constructor(
|
||||
private filepath: string,
|
||||
private _size: number,
|
||||
) {
|
||||
super([], basename(filepath));
|
||||
}
|
||||
|
||||
async prepare() {
|
||||
const stats = await stat(this.path);
|
||||
this.deviceAssetId = `${basename(this.path)}-${stats.size}`.replaceAll(/\s+/g, '');
|
||||
this.fileCreatedAt = stats.mtime;
|
||||
this.fileModifiedAt = stats.mtime;
|
||||
this.fileSize = stats.size;
|
||||
this.albumName = this.extractAlbumName();
|
||||
get size() {
|
||||
return this._size;
|
||||
}
|
||||
|
||||
async getUploadFormData(): Promise<FormData> {
|
||||
if (!this.deviceAssetId) {
|
||||
throw new Error('Device asset id not set');
|
||||
}
|
||||
if (!this.fileCreatedAt) {
|
||||
throw new Error('File created at not set');
|
||||
}
|
||||
if (!this.fileModifiedAt) {
|
||||
throw new Error('File modified at not set');
|
||||
}
|
||||
|
||||
// TODO: doesn't xmp replace the file extension? Will need investigation
|
||||
const sideCarPath = `${this.path}.xmp`;
|
||||
let sidecarData: Blob | undefined = undefined;
|
||||
try {
|
||||
await access(sideCarPath, constants.R_OK);
|
||||
sidecarData = new File([await fs.openAsBlob(sideCarPath)], basename(sideCarPath));
|
||||
} catch {}
|
||||
|
||||
const data: any = {
|
||||
assetData: new File([await fs.openAsBlob(this.path)], basename(this.path)),
|
||||
deviceAssetId: this.deviceAssetId,
|
||||
deviceId: 'CLI',
|
||||
fileCreatedAt: this.fileCreatedAt.toISOString(),
|
||||
fileModifiedAt: this.fileModifiedAt.toISOString(),
|
||||
isFavorite: String(false),
|
||||
};
|
||||
const formData = new FormData();
|
||||
|
||||
for (const property in data) {
|
||||
formData.append(property, data[property]);
|
||||
}
|
||||
|
||||
if (sidecarData) {
|
||||
formData.append('sidecarData', sidecarData);
|
||||
}
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
async delete(): Promise<void> {
|
||||
return unlink(this.path);
|
||||
}
|
||||
|
||||
public async hash(): Promise<string> {
|
||||
const sha1 = (filePath: string) => {
|
||||
const hash = createHash('sha1');
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
const rs = createReadStream(filePath);
|
||||
rs.on('error', reject);
|
||||
rs.on('data', (chunk) => hash.update(chunk));
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
||||
return await sha1(this.path);
|
||||
}
|
||||
|
||||
private extractAlbumName(): string | undefined {
|
||||
return os.platform() === 'win32' ? this.path.split('\\').at(-2) : this.path.split('/').at(-2);
|
||||
stream() {
|
||||
return createReadStream(this.filepath) as any;
|
||||
}
|
||||
}
|
||||
|
||||
class UploadOptionsDto {
|
||||
recursive? = false;
|
||||
exclusionPatterns?: string[] = [];
|
||||
dryRun? = false;
|
||||
skipHash? = false;
|
||||
delete? = false;
|
||||
album? = false;
|
||||
albumName? = '';
|
||||
includeHidden? = false;
|
||||
concurrency? = 4;
|
||||
}
|
||||
export const upload = async (paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto) => {
|
||||
await authenticate(baseOptions);
|
||||
|
||||
export const upload = (paths: string[], baseOptions: BaseOptions, uploadOptions: UploadOptionsDto) =>
|
||||
new UploadCommand().run(paths, baseOptions, uploadOptions);
|
||||
const scanFiles = await scan(paths, options);
|
||||
if (scanFiles.length === 0) {
|
||||
console.log('No files found, exiting');
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO refactor this
|
||||
class UploadCommand {
|
||||
public async run(paths: string[], baseOptions: BaseOptions, options: UploadOptionsDto): Promise<void> {
|
||||
await authenticate(baseOptions);
|
||||
const { newFiles, duplicates } = await checkForDuplicates(scanFiles, options);
|
||||
const newAssets = await uploadFiles(newFiles, options);
|
||||
await updateAlbums([...newAssets, ...duplicates], options);
|
||||
await deleteFiles(newFiles, options);
|
||||
};
|
||||
|
||||
console.log('Crawling for assets...');
|
||||
const files = await this.getFiles(paths, options);
|
||||
const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
const { image, video } = await getSupportedMediaTypes();
|
||||
|
||||
if (files.length === 0) {
|
||||
console.log('No assets found, exiting');
|
||||
return;
|
||||
console.log('Crawling for assets...');
|
||||
const files = await crawl({
|
||||
pathsToCrawl,
|
||||
recursive: options.recursive,
|
||||
exclusionPattern: options.ignore,
|
||||
includeHidden: options.includeHidden,
|
||||
extensions: [...image, ...video],
|
||||
});
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
}
|
||||
|
||||
const progressBar = new SingleBar(
|
||||
{ format: 'Checking files | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
|
||||
progressBar.start(files.length, 0);
|
||||
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
|
||||
try {
|
||||
// TODO refactor into a queue
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
|
||||
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||
|
||||
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
|
||||
if (action === Action.Accept) {
|
||||
newFiles.push(filepath);
|
||||
} else {
|
||||
// rejects are always duplicates
|
||||
duplicates.push({ id: assetId as string, filepath });
|
||||
}
|
||||
progressBar.increment();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
progressBar.stop();
|
||||
}
|
||||
|
||||
const assetsToCheck = files.map((path) => new Asset(path));
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
const { newAssets, duplicateAssets } = await this.checkAssets(assetsToCheck, options.concurrency ?? 4);
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
const totalSizeUploaded = await this.upload(newAssets, options);
|
||||
const messageStart = options.dryRun ? 'Would have' : 'Successfully';
|
||||
if (newAssets.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
} else {
|
||||
console.log(
|
||||
`${messageStart} uploaded ${newAssets.length} asset${newAssets.length === 1 ? '' : 's'} (${byteSize(totalSizeUploaded)})`,
|
||||
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Compute total size first
|
||||
let totalSize = 0;
|
||||
const statsMap = new Map<string, Stats>();
|
||||
for (const filepath of files) {
|
||||
const stats = await stat(filepath);
|
||||
statsMap.set(filepath, stats);
|
||||
totalSize += stats.size;
|
||||
}
|
||||
|
||||
if (dryRun) {
|
||||
console.log(`Would have uploaded ${files.length} asset${s(files.length)} (${byteSize(totalSize)})`);
|
||||
return files.map((filepath) => ({ id: '', filepath }));
|
||||
}
|
||||
|
||||
const uploadProgress = new SingleBar(
|
||||
{ format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
let duplicateCount = 0;
|
||||
let duplicateSize = 0;
|
||||
let successCount = 0;
|
||||
let successSize = 0;
|
||||
|
||||
const newAssets: Asset[] = [];
|
||||
|
||||
try {
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
await Promise.all(
|
||||
items.map(async (filepath) => {
|
||||
const stats = statsMap.get(filepath) as Stats;
|
||||
const response = await uploadFile(filepath, stats);
|
||||
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
|
||||
if (response.duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
return response;
|
||||
}),
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
uploadProgress.stop();
|
||||
}
|
||||
|
||||
if (options.album || options.albumName) {
|
||||
const { createdAlbumCount, updatedAssetCount } = await this.updateAlbums(
|
||||
[...newAssets, ...duplicateAssets],
|
||||
options,
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
|
||||
}
|
||||
return newAssets;
|
||||
};
|
||||
|
||||
const uploadFile = async (input: string, stats: Stats): Promise<AssetFileUploadResponseDto> => {
|
||||
const { baseUrl, headers } = defaults;
|
||||
|
||||
const assetPath = path.parse(input);
|
||||
const noExtension = path.join(assetPath.dir, assetPath.name);
|
||||
|
||||
const sidecarsFiles = await Promise.all(
|
||||
// XMP sidecars can come in two filename formats. For a photo named photo.ext, the filenames are photo.ext.xmp and photo.xmp
|
||||
[`${noExtension}.xmp`, `${input}.xmp`].map(async (sidecarPath) => {
|
||||
try {
|
||||
const stats = await stat(sidecarPath);
|
||||
return new UploadFile(sidecarPath, stats.size);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
const sidecarData = sidecarsFiles.find((file): file is UploadFile => file !== false);
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('deviceAssetId', `${basename(input)}-${stats.size}`.replaceAll(/\s+/g, ''));
|
||||
formData.append('deviceId', 'CLI');
|
||||
formData.append('fileCreatedAt', stats.mtime.toISOString());
|
||||
formData.append('fileModifiedAt', stats.mtime.toISOString());
|
||||
formData.append('fileSize', String(stats.size));
|
||||
formData.append('isFavorite', 'false');
|
||||
formData.append('assetData', new UploadFile(input, stats.size));
|
||||
|
||||
if (sidecarData) {
|
||||
formData.append('sidecarData', sidecarData);
|
||||
}
|
||||
|
||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
||||
method: 'post',
|
||||
redirect: 'error',
|
||||
headers: headers as Record<string, string>,
|
||||
body: formData,
|
||||
});
|
||||
if (response.status !== 200 && response.status !== 201) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
|
||||
return response.json();
|
||||
};
|
||||
|
||||
const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<void> => {
|
||||
if (!options.delete) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.dryRun) {
|
||||
console.log(`Would have deleted ${files.length} local asset${s(files.length)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Deleting assets that have been uploaded...');
|
||||
|
||||
const deletionProgress = new SingleBar(
|
||||
{ format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
deletionProgress.start(files.length, 0);
|
||||
|
||||
try {
|
||||
for (const assetBatch of chunk(files, options.concurrency)) {
|
||||
await Promise.all(assetBatch.map((input: string) => unlink(input)));
|
||||
deletionProgress.update(assetBatch.length);
|
||||
}
|
||||
} finally {
|
||||
deletionProgress.stop();
|
||||
}
|
||||
};
|
||||
|
||||
const updateAlbums = async (assets: Asset[], options: UploadOptionsDto) => {
|
||||
if (!options.album && !options.albumName) {
|
||||
return;
|
||||
}
|
||||
const { dryRun, concurrency } = options;
|
||||
|
||||
const albums = await getAllAlbums({});
|
||||
const existingAlbums = new Map(albums.map((album) => [album.albumName, album.id]));
|
||||
const newAlbums: Set<string> = new Set();
|
||||
for (const { filepath } of assets) {
|
||||
const albumName = getAlbumName(filepath, options);
|
||||
if (albumName && !existingAlbums.has(albumName)) {
|
||||
newAlbums.add(albumName);
|
||||
}
|
||||
}
|
||||
|
||||
if (dryRun) {
|
||||
// TODO print asset counts for new albums
|
||||
console.log(`Would have created ${newAlbums.size} new album${s(newAlbums.size)}`);
|
||||
console.log(`Would have updated albums of ${assets.length} asset${s(assets.length)}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const progressBar = new SingleBar(
|
||||
{ format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
progressBar.start(newAlbums.size, 0);
|
||||
|
||||
try {
|
||||
for (const albumNames of chunk([...newAlbums], concurrency)) {
|
||||
const items = await Promise.all(
|
||||
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } })),
|
||||
);
|
||||
console.log(`${messageStart} created ${createdAlbumCount} new album${createdAlbumCount === 1 ? '' : 's'}`);
|
||||
console.log(`${messageStart} updated ${updatedAssetCount} asset${updatedAssetCount === 1 ? '' : 's'}`);
|
||||
}
|
||||
|
||||
if (!options.delete) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.dryRun) {
|
||||
console.log(`Would now have deleted assets, but skipped due to dry run`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Deleting assets that have been uploaded...');
|
||||
|
||||
await this.deleteAssets(newAssets, options);
|
||||
}
|
||||
|
||||
public async checkAssets(
|
||||
assetsToCheck: Asset[],
|
||||
concurrency: number,
|
||||
): Promise<{ newAssets: Asset[]; duplicateAssets: Asset[]; rejectedAssets: Asset[] }> {
|
||||
for (const assets of chunk(assetsToCheck, concurrency)) {
|
||||
await Promise.all(assets.map((asset: Asset) => asset.prepare()));
|
||||
}
|
||||
|
||||
const checkProgress = new cliProgress.SingleBar(
|
||||
{ format: 'Checking assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
checkProgress.start(assetsToCheck.length, 0);
|
||||
|
||||
const newAssets = [];
|
||||
const duplicateAssets = [];
|
||||
const rejectedAssets = [];
|
||||
try {
|
||||
for (const assets of chunk(assetsToCheck, concurrency)) {
|
||||
const checkedAssets = await this.getStatus(assets);
|
||||
for (const checked of checkedAssets) {
|
||||
if (checked.status === CheckResponseStatus.ACCEPT) {
|
||||
newAssets.push(checked.asset);
|
||||
} else if (checked.status === CheckResponseStatus.DUPLICATE) {
|
||||
duplicateAssets.push(checked.asset);
|
||||
} else {
|
||||
rejectedAssets.push(checked.asset);
|
||||
}
|
||||
checkProgress.increment();
|
||||
}
|
||||
for (const { id, albumName } of items) {
|
||||
existingAlbums.set(albumName, id);
|
||||
}
|
||||
} finally {
|
||||
checkProgress.stop();
|
||||
progressBar.increment(albumNames.length);
|
||||
}
|
||||
|
||||
return { newAssets, duplicateAssets, rejectedAssets };
|
||||
} finally {
|
||||
progressBar.stop();
|
||||
}
|
||||
|
||||
public async upload(assetsToUpload: Asset[], options: UploadOptionsDto): Promise<number> {
|
||||
let totalSize = 0;
|
||||
console.log(`Successfully created ${newAlbums.size} new album${s(newAlbums.size)}`);
|
||||
console.log(`Successfully updated ${assets.length} asset${s(assets.length)}`);
|
||||
|
||||
// Compute total size first
|
||||
for (const asset of assetsToUpload) {
|
||||
totalSize += asset.fileSize ?? 0;
|
||||
const albumToAssets = new Map<string, string[]>();
|
||||
for (const asset of assets) {
|
||||
const albumName = getAlbumName(asset.filepath, options);
|
||||
if (!albumName) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (options.dryRun) {
|
||||
return totalSize;
|
||||
}
|
||||
|
||||
const uploadProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Uploading assets | {bar} | {percentage}% | ETA: {eta_formatted} | {value_formatted}/{total_formatted}',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
uploadProgress.start(totalSize, 0);
|
||||
uploadProgress.update({ value_formatted: 0, total_formatted: byteSize(totalSize) });
|
||||
|
||||
let totalSizeUploaded = 0;
|
||||
try {
|
||||
for (const assets of chunk(assetsToUpload, options.concurrency)) {
|
||||
const ids = await this.uploadAssets(assets);
|
||||
for (const [asset, id] of zipDefined(assets, ids)) {
|
||||
asset.id = id;
|
||||
if (asset.fileSize) {
|
||||
totalSizeUploaded += asset.fileSize ?? 0;
|
||||
} else {
|
||||
console.log(`Could not determine file size for ${asset.path}`);
|
||||
}
|
||||
}
|
||||
uploadProgress.update(totalSizeUploaded, { value_formatted: byteSize(totalSizeUploaded) });
|
||||
const albumId = existingAlbums.get(albumName);
|
||||
if (albumId) {
|
||||
if (!albumToAssets.has(albumId)) {
|
||||
albumToAssets.set(albumId, []);
|
||||
}
|
||||
} finally {
|
||||
uploadProgress.stop();
|
||||
albumToAssets.get(albumId)?.push(asset.id);
|
||||
}
|
||||
|
||||
return totalSizeUploaded;
|
||||
}
|
||||
|
||||
public async getFiles(paths: string[], options: UploadOptionsDto): Promise<string[]> {
|
||||
const inputFiles: string[] = [];
|
||||
for (const pathArgument of paths) {
|
||||
const fileStat = await fs.promises.lstat(pathArgument);
|
||||
if (fileStat.isFile()) {
|
||||
inputFiles.push(pathArgument);
|
||||
const albumUpdateProgress = new SingleBar(
|
||||
{ format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
|
||||
Presets.shades_classic,
|
||||
);
|
||||
albumUpdateProgress.start(assets.length, 0);
|
||||
|
||||
try {
|
||||
for (const [albumId, assets] of albumToAssets.entries()) {
|
||||
for (const assetBatch of chunk(assets, Math.min(1000 * concurrency, 65_000))) {
|
||||
await addAssetsToAlbum({ id: albumId, bulkIdsDto: { ids: assetBatch } });
|
||||
albumUpdateProgress.increment(assetBatch.length);
|
||||
}
|
||||
}
|
||||
|
||||
const files: string[] = await this.crawl(paths, options);
|
||||
files.push(...inputFiles);
|
||||
return files;
|
||||
} finally {
|
||||
albumUpdateProgress.stop();
|
||||
}
|
||||
};
|
||||
|
||||
public async getAlbums(): Promise<Map<string, string>> {
|
||||
const existingAlbums = await getAllAlbums({});
|
||||
|
||||
const albumMapping = new Map<string, string>();
|
||||
for (const album of existingAlbums) {
|
||||
albumMapping.set(album.albumName, album.id);
|
||||
}
|
||||
|
||||
return albumMapping;
|
||||
}
|
||||
|
||||
public async updateAlbums(
|
||||
assets: Asset[],
|
||||
options: UploadOptionsDto,
|
||||
): Promise<{ createdAlbumCount: number; updatedAssetCount: number }> {
|
||||
if (options.albumName) {
|
||||
for (const asset of assets) {
|
||||
asset.albumName = options.albumName;
|
||||
}
|
||||
}
|
||||
|
||||
const existingAlbums = await this.getAlbums();
|
||||
const assetsToUpdate = assets.filter(
|
||||
(asset): asset is Asset & { albumName: string; id: string } => !!(asset.albumName && asset.id),
|
||||
);
|
||||
|
||||
const newAlbumsSet: Set<string> = new Set();
|
||||
for (const asset of assetsToUpdate) {
|
||||
if (!existingAlbums.has(asset.albumName)) {
|
||||
newAlbumsSet.add(asset.albumName);
|
||||
}
|
||||
}
|
||||
|
||||
const newAlbums = [...newAlbumsSet];
|
||||
|
||||
if (options.dryRun) {
|
||||
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
|
||||
}
|
||||
|
||||
const albumCreationProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Creating albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} albums',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
albumCreationProgress.start(newAlbums.length, 0);
|
||||
|
||||
try {
|
||||
for (const albumNames of chunk(newAlbums, options.concurrency)) {
|
||||
const newAlbumIds = await Promise.all(
|
||||
albumNames.map((albumName: string) => createAlbum({ createAlbumDto: { albumName } }).then((r) => r.id)),
|
||||
);
|
||||
|
||||
for (const [albumName, albumId] of zipDefined(albumNames, newAlbumIds)) {
|
||||
existingAlbums.set(albumName, albumId);
|
||||
}
|
||||
|
||||
albumCreationProgress.increment(albumNames.length);
|
||||
}
|
||||
} finally {
|
||||
albumCreationProgress.stop();
|
||||
}
|
||||
|
||||
const albumToAssets = new Map<string, string[]>();
|
||||
for (const asset of assetsToUpdate) {
|
||||
const albumId = existingAlbums.get(asset.albumName);
|
||||
if (albumId) {
|
||||
if (!albumToAssets.has(albumId)) {
|
||||
albumToAssets.set(albumId, []);
|
||||
}
|
||||
albumToAssets.get(albumId)?.push(asset.id);
|
||||
}
|
||||
}
|
||||
|
||||
const albumUpdateProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Adding assets to albums | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
albumUpdateProgress.start(assetsToUpdate.length, 0);
|
||||
|
||||
try {
|
||||
for (const [albumId, assets] of albumToAssets.entries()) {
|
||||
for (const assetBatch of chunk(assets, Math.min(1000 * (options.concurrency ?? 4), 65_000))) {
|
||||
await addAssetsToAlbum({ id: albumId, bulkIdsDto: { ids: assetBatch } });
|
||||
albumUpdateProgress.increment(assetBatch.length);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
albumUpdateProgress.stop();
|
||||
}
|
||||
|
||||
return { createdAlbumCount: newAlbums.length, updatedAssetCount: assetsToUpdate.length };
|
||||
}
|
||||
|
||||
public async deleteAssets(assets: Asset[], options: UploadOptionsDto): Promise<void> {
|
||||
const deletionProgress = new cliProgress.SingleBar(
|
||||
{
|
||||
format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets',
|
||||
},
|
||||
cliProgress.Presets.shades_classic,
|
||||
);
|
||||
deletionProgress.start(assets.length, 0);
|
||||
|
||||
try {
|
||||
for (const assetBatch of chunk(assets, options.concurrency)) {
|
||||
await Promise.all(assetBatch.map((asset: Asset) => asset.delete()));
|
||||
deletionProgress.update(assetBatch.length);
|
||||
}
|
||||
} finally {
|
||||
deletionProgress.stop();
|
||||
}
|
||||
}
|
||||
|
||||
private async getStatus(assets: Asset[]): Promise<{ asset: Asset; status: CheckResponseStatus }[]> {
|
||||
const checkResponse = await this.checkHashes(assets);
|
||||
|
||||
const responses = [];
|
||||
for (const [check, asset] of zipDefined(checkResponse, assets)) {
|
||||
if (check.assetId) {
|
||||
asset.id = check.assetId;
|
||||
}
|
||||
|
||||
if (check.action === 'accept') {
|
||||
responses.push({ asset, status: CheckResponseStatus.ACCEPT });
|
||||
} else if (check.reason === 'duplicate') {
|
||||
responses.push({ asset, status: CheckResponseStatus.DUPLICATE });
|
||||
} else {
|
||||
responses.push({ asset, status: CheckResponseStatus.REJECT });
|
||||
}
|
||||
}
|
||||
|
||||
return responses;
|
||||
}
|
||||
|
||||
private async checkHashes(assetsToCheck: Asset[]): Promise<AssetBulkUploadCheckResult[]> {
|
||||
const checksums = await Promise.all(assetsToCheck.map((asset) => asset.hash()));
|
||||
const assetBulkUploadCheckDto = {
|
||||
assets: zipDefined(assetsToCheck, checksums).map(([asset, checksum]) => ({ id: asset.path, checksum })),
|
||||
};
|
||||
const checkResponse = await checkBulkUpload({ assetBulkUploadCheckDto });
|
||||
return checkResponse.results;
|
||||
}
|
||||
|
||||
private async uploadAssets(assets: Asset[]): Promise<string[]> {
|
||||
const fileRequests = await Promise.all(assets.map((asset) => asset.getUploadFormData()));
|
||||
const results = await Promise.all(fileRequests.map((request) => this.uploadAsset(request)));
|
||||
return results.map((response) => response.id);
|
||||
}
|
||||
|
||||
private async crawl(paths: string[], options: UploadOptionsDto): Promise<string[]> {
|
||||
const formatResponse = await getSupportedMediaTypes();
|
||||
const crawlService = new CrawlService(formatResponse.image, formatResponse.video);
|
||||
|
||||
return crawlService.crawl({
|
||||
pathsToCrawl: paths,
|
||||
recursive: options.recursive,
|
||||
exclusionPatterns: options.exclusionPatterns,
|
||||
includeHidden: options.includeHidden,
|
||||
});
|
||||
}
|
||||
|
||||
private async uploadAsset(data: FormData): Promise<{ id: string }> {
|
||||
const { baseUrl, headers } = defaults;
|
||||
|
||||
const response = await fetch(`${baseUrl}/asset/upload`, {
|
||||
method: 'post',
|
||||
redirect: 'error',
|
||||
headers: headers as Record<string, string>,
|
||||
body: data,
|
||||
});
|
||||
if (response.status !== 200 && response.status !== 201) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
return response.json();
|
||||
}
|
||||
}
|
||||
const getAlbumName = (filepath: string, options: UploadOptionsDto) => {
|
||||
const folderName = os.platform() === 'win32' ? filepath.split('\\').at(-2) : filepath.split('/').at(-2);
|
||||
return options.albumName ?? folderName;
|
||||
};
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import { getMyUserInfo } from '@immich/sdk';
|
||||
import { getMyUser } from '@immich/sdk';
|
||||
import { existsSync } from 'node:fs';
|
||||
import { mkdir, unlink } from 'node:fs/promises';
|
||||
import { BaseOptions, connect, getAuthFilePath, logError, withError, writeAuthFile } from 'src/utils';
|
||||
|
||||
export const login = async (instanceUrl: string, apiKey: string, options: BaseOptions) => {
|
||||
console.log(`Logging in to ${instanceUrl}`);
|
||||
export const login = async (url: string, key: string, options: BaseOptions) => {
|
||||
console.log(`Logging in to ${url}`);
|
||||
|
||||
const { configDirectory: configDir } = options;
|
||||
|
||||
await connect(instanceUrl, apiKey);
|
||||
await connect(url, key);
|
||||
|
||||
const [error, userInfo] = await withError(getMyUserInfo());
|
||||
const [error, user] = await withError(getMyUser());
|
||||
if (error) {
|
||||
logError(error, 'Failed to load user info');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`Logged in as ${userInfo.email}`);
|
||||
console.log(`Logged in as ${user.email}`);
|
||||
|
||||
if (!existsSync(configDir)) {
|
||||
// Create config folder if it doesn't exist
|
||||
@@ -27,7 +27,7 @@ export const login = async (instanceUrl: string, apiKey: string, options: BaseOp
|
||||
}
|
||||
}
|
||||
|
||||
await writeAuthFile(configDir, { instanceUrl, apiKey });
|
||||
await writeAuthFile(configDir, { url, key });
|
||||
|
||||
console.log(`Wrote auth info to ${getAuthFilePath(configDir)}`);
|
||||
};
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
import { getAssetStatistics, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { getAssetStatistics, getMyUser, getServerVersion, getSupportedMediaTypes } from '@immich/sdk';
|
||||
import { BaseOptions, authenticate } from 'src/utils';
|
||||
|
||||
export const serverInfo = async (options: BaseOptions) => {
|
||||
await authenticate(options);
|
||||
const { url } = await authenticate(options);
|
||||
|
||||
const versionInfo = await getServerVersion();
|
||||
const mediaTypes = await getSupportedMediaTypes();
|
||||
const stats = await getAssetStatistics({});
|
||||
const [versionInfo, mediaTypes, stats, userInfo] = await Promise.all([
|
||||
getServerVersion(),
|
||||
getSupportedMediaTypes(),
|
||||
getAssetStatistics({}),
|
||||
getMyUser(),
|
||||
]);
|
||||
|
||||
console.log(`Server Version: ${versionInfo.major}.${versionInfo.minor}.${versionInfo.patch}`);
|
||||
console.log(`Image Types: ${mediaTypes.image.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(`Video Types: ${mediaTypes.video.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(`Statistics:\n Images: ${stats.images}\n Videos: ${stats.videos}\n Total: ${stats.total}`);
|
||||
console.log(`Server Info (via ${userInfo.email})`);
|
||||
console.log(` Url: ${url}`);
|
||||
console.log(` Version: ${versionInfo.major}.${versionInfo.minor}.${versionInfo.patch}`);
|
||||
console.log(` Formats:`);
|
||||
console.log(` Images: ${mediaTypes.image.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(` Videos: ${mediaTypes.video.map((extension) => extension.replace('.', ''))}`);
|
||||
console.log(` Statistics:`);
|
||||
console.log(` Images: ${stats.images}`);
|
||||
console.log(` Videos: ${stats.videos}`);
|
||||
console.log(` Total: ${stats.total}`);
|
||||
};
|
||||
|
||||
@@ -19,7 +19,7 @@ const program = new Command()
|
||||
.default(defaultConfigDirectory),
|
||||
)
|
||||
.addOption(new Option('-u, --url [url]', 'Immich server URL').env('IMMICH_INSTANCE_URL'))
|
||||
.addOption(new Option('-k, --key [apiKey]', 'Immich API key').env('IMMICH_API_KEY'));
|
||||
.addOption(new Option('-k, --key [key]', 'Immich API key').env('IMMICH_API_KEY'));
|
||||
|
||||
program
|
||||
.command('login')
|
||||
@@ -44,7 +44,7 @@ program
|
||||
.description('Upload assets')
|
||||
.usage('[paths...] [options]')
|
||||
.addOption(new Option('-r, --recursive', 'Recursive').env('IMMICH_RECURSIVE').default(false))
|
||||
.addOption(new Option('-i, --ignore [paths...]', 'Paths to ignore').env('IMMICH_IGNORE_PATHS').default([]))
|
||||
.addOption(new Option('-i, --ignore <pattern>', 'Pattern to ignore').env('IMMICH_IGNORE_PATHS'))
|
||||
.addOption(new Option('-h, --skip-hash', "Don't hash files before upload").env('IMMICH_SKIP_HASH').default(false))
|
||||
.addOption(new Option('-H, --include-hidden', 'Include hidden folders').env('IMMICH_INCLUDE_HIDDEN').default(false))
|
||||
.addOption(
|
||||
@@ -60,7 +60,8 @@ program
|
||||
.addOption(
|
||||
new Option('-n, --dry-run', "Don't perform any actions, just show what will be done")
|
||||
.env('IMMICH_DRY_RUN')
|
||||
.default(false),
|
||||
.default(false)
|
||||
.conflicts('skipHash'),
|
||||
)
|
||||
.addOption(
|
||||
new Option('-c, --concurrency <number>', 'Number of assets to upload at the same time')
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
import { glob } from 'glob';
|
||||
import * as fs from 'node:fs';
|
||||
|
||||
export class CrawlOptions {
|
||||
pathsToCrawl!: string[];
|
||||
recursive? = false;
|
||||
includeHidden? = false;
|
||||
exclusionPatterns?: string[];
|
||||
}
|
||||
|
||||
export class CrawlService {
|
||||
private readonly extensions!: string[];
|
||||
|
||||
constructor(image: string[], video: string[]) {
|
||||
this.extensions = [...image, ...video].map((extension) => extension.replace('.', ''));
|
||||
}
|
||||
|
||||
async crawl(options: CrawlOptions): Promise<string[]> {
|
||||
const { recursive, pathsToCrawl, exclusionPatterns, includeHidden } = options;
|
||||
|
||||
if (!pathsToCrawl) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const patterns: string[] = [];
|
||||
const crawledFiles: string[] = [];
|
||||
|
||||
for await (const currentPath of pathsToCrawl) {
|
||||
try {
|
||||
const stats = await fs.promises.stat(currentPath);
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(currentPath);
|
||||
} else {
|
||||
patterns.push(currentPath);
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
patterns.push(currentPath);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let searchPattern: string;
|
||||
if (patterns.length === 1) {
|
||||
searchPattern = patterns[0];
|
||||
} else if (patterns.length === 0) {
|
||||
return crawledFiles;
|
||||
} else {
|
||||
searchPattern = '{' + patterns.join(',') + '}';
|
||||
}
|
||||
|
||||
if (recursive) {
|
||||
searchPattern = searchPattern + '/**/';
|
||||
}
|
||||
|
||||
searchPattern = `${searchPattern}/*.{${this.extensions.join(',')}}`;
|
||||
|
||||
const globbedFiles = await glob(searchPattern, {
|
||||
absolute: true,
|
||||
nocase: true,
|
||||
nodir: true,
|
||||
dot: includeHidden,
|
||||
ignore: exclusionPatterns,
|
||||
});
|
||||
|
||||
return [...crawledFiles, ...globbedFiles].sort();
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,31 @@
|
||||
import mockfs from 'mock-fs';
|
||||
import { CrawlOptions, CrawlService } from './crawl.service';
|
||||
import { CrawlOptions, crawl } from 'src/utils';
|
||||
|
||||
interface Test {
|
||||
test: string;
|
||||
options: CrawlOptions;
|
||||
options: Omit<CrawlOptions, 'extensions'>;
|
||||
files: Record<string, boolean>;
|
||||
}
|
||||
|
||||
const cwd = process.cwd();
|
||||
|
||||
const extensions = [
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
'.png',
|
||||
'.heif',
|
||||
'.heic',
|
||||
'.tif',
|
||||
'.nef',
|
||||
'.webp',
|
||||
'.tiff',
|
||||
'.dng',
|
||||
'.gif',
|
||||
'.mov',
|
||||
'.mp4',
|
||||
'.webm',
|
||||
];
|
||||
|
||||
const tests: Test[] = [
|
||||
{
|
||||
test: 'should return empty when crawling an empty path list',
|
||||
@@ -49,7 +66,7 @@ const tests: Test[] = [
|
||||
test: 'should exclude by file extension',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos/'],
|
||||
exclusionPatterns: ['**/*.tif'],
|
||||
exclusionPattern: '**/*.tif',
|
||||
},
|
||||
files: {
|
||||
'/photos/image.jpg': true,
|
||||
@@ -60,7 +77,7 @@ const tests: Test[] = [
|
||||
test: 'should exclude by file extension without case sensitivity',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos/'],
|
||||
exclusionPatterns: ['**/*.TIF'],
|
||||
exclusionPattern: '**/*.TIF',
|
||||
},
|
||||
files: {
|
||||
'/photos/image.jpg': true,
|
||||
@@ -71,7 +88,7 @@ const tests: Test[] = [
|
||||
test: 'should exclude by folder',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos/'],
|
||||
exclusionPatterns: ['**/raw/**'],
|
||||
exclusionPattern: '**/raw/**',
|
||||
recursive: true,
|
||||
},
|
||||
files: {
|
||||
@@ -201,7 +218,7 @@ const tests: Test[] = [
|
||||
test: 'should support ignoring full filename',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos'],
|
||||
exclusionPatterns: ['**/image2.jpg'],
|
||||
exclusionPattern: '**/image2.jpg',
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -213,7 +230,7 @@ const tests: Test[] = [
|
||||
test: 'should support ignoring file extensions',
|
||||
options: {
|
||||
pathsToCrawl: ['/photos'],
|
||||
exclusionPatterns: ['**/*.png'],
|
||||
exclusionPattern: '**/*.png',
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -226,7 +243,7 @@ const tests: Test[] = [
|
||||
options: {
|
||||
pathsToCrawl: ['/photos'],
|
||||
recursive: true,
|
||||
exclusionPatterns: ['**/raw/**'],
|
||||
exclusionPattern: '**/raw/**',
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -241,7 +258,7 @@ const tests: Test[] = [
|
||||
options: {
|
||||
pathsToCrawl: ['/'],
|
||||
recursive: true,
|
||||
exclusionPatterns: ['/images/**'],
|
||||
exclusionPattern: '/images/**',
|
||||
},
|
||||
files: {
|
||||
'/photos/image1.jpg': true,
|
||||
@@ -251,12 +268,7 @@ const tests: Test[] = [
|
||||
},
|
||||
];
|
||||
|
||||
describe(CrawlService.name, () => {
|
||||
const sut = new CrawlService(
|
||||
['.jpg', '.jpeg', '.png', '.heif', '.heic', '.tif', '.nef', '.webp', '.tiff', '.dng', '.gif'],
|
||||
['.mov', '.mp4', '.webm'],
|
||||
);
|
||||
|
||||
describe('crawl', () => {
|
||||
afterEach(() => {
|
||||
mockfs.restore();
|
||||
});
|
||||
@@ -266,7 +278,7 @@ describe(CrawlService.name, () => {
|
||||
it(test, async () => {
|
||||
mockfs(Object.fromEntries(Object.keys(files).map((file) => [file, ''])));
|
||||
|
||||
const actual = await sut.crawl(options);
|
||||
const actual = await crawl({ ...options, extensions });
|
||||
const expected = Object.entries(files)
|
||||
.filter((entry) => entry[1])
|
||||
.map(([file]) => file);
|
||||
133
cli/src/utils.ts
@@ -1,54 +1,60 @@
|
||||
import { defaults, getMyUserInfo, isHttpError } from '@immich/sdk';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { join } from 'node:path';
|
||||
import { getMyUser, init, isHttpError } from '@immich/sdk';
|
||||
import { glob } from 'fast-glob';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { createReadStream } from 'node:fs';
|
||||
import { readFile, stat, writeFile } from 'node:fs/promises';
|
||||
import { join, resolve } from 'node:path';
|
||||
import yaml from 'yaml';
|
||||
|
||||
export interface BaseOptions {
|
||||
configDirectory: string;
|
||||
apiKey?: string;
|
||||
instanceUrl?: string;
|
||||
key?: string;
|
||||
url?: string;
|
||||
}
|
||||
|
||||
export interface AuthDto {
|
||||
instanceUrl: string;
|
||||
apiKey: string;
|
||||
}
|
||||
export type AuthDto = { url: string; key: string };
|
||||
type OldAuthDto = { instanceUrl: string; apiKey: string };
|
||||
|
||||
export const authenticate = async (options: BaseOptions): Promise<void> => {
|
||||
const { configDirectory: configDir, instanceUrl, apiKey } = options;
|
||||
export const authenticate = async (options: BaseOptions): Promise<AuthDto> => {
|
||||
const { configDirectory: configDir, url, key } = options;
|
||||
|
||||
// provided in command
|
||||
if (instanceUrl && apiKey) {
|
||||
await connect(instanceUrl, apiKey);
|
||||
return;
|
||||
if (url && key) {
|
||||
return connect(url, key);
|
||||
}
|
||||
|
||||
// fallback to file
|
||||
// fallback to auth file
|
||||
const config = await readAuthFile(configDir);
|
||||
await connect(config.instanceUrl, config.apiKey);
|
||||
const auth = await connect(config.url, config.key);
|
||||
if (auth.url !== config.url) {
|
||||
await writeAuthFile(configDir, auth);
|
||||
}
|
||||
|
||||
return auth;
|
||||
};
|
||||
|
||||
export const connect = async (instanceUrl: string, apiKey: string): Promise<void> => {
|
||||
const wellKnownUrl = new URL('.well-known/immich', instanceUrl);
|
||||
export const connect = async (url: string, key: string) => {
|
||||
const wellKnownUrl = new URL('.well-known/immich', url);
|
||||
try {
|
||||
const wellKnown = await fetch(wellKnownUrl).then((response) => response.json());
|
||||
const endpoint = new URL(wellKnown.api.endpoint, instanceUrl).toString();
|
||||
if (endpoint !== instanceUrl) {
|
||||
const endpoint = new URL(wellKnown.api.endpoint, url).toString();
|
||||
if (endpoint !== url) {
|
||||
console.debug(`Discovered API at ${endpoint}`);
|
||||
}
|
||||
instanceUrl = endpoint;
|
||||
url = endpoint;
|
||||
} catch {
|
||||
// noop
|
||||
}
|
||||
|
||||
defaults.baseUrl = instanceUrl;
|
||||
defaults.headers = { 'x-api-key': apiKey };
|
||||
init({ baseUrl: url, apiKey: key });
|
||||
|
||||
const [error] = await withError(getMyUserInfo());
|
||||
const [error] = await withError(getMyUser());
|
||||
if (isHttpError(error)) {
|
||||
logError(error, 'Failed to connect to server');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return { url, key };
|
||||
};
|
||||
|
||||
export const logError = (error: unknown, message: string) => {
|
||||
@@ -66,7 +72,12 @@ export const readAuthFile = async (dir: string) => {
|
||||
try {
|
||||
const data = await readFile(getAuthFilePath(dir));
|
||||
// TODO add class-transform/validation
|
||||
return yaml.parse(data.toString()) as AuthDto;
|
||||
const auth = yaml.parse(data.toString()) as AuthDto | OldAuthDto;
|
||||
const { instanceUrl, apiKey } = auth as OldAuthDto;
|
||||
if (instanceUrl && apiKey) {
|
||||
return { url: instanceUrl, key: apiKey };
|
||||
}
|
||||
return auth as AuthDto;
|
||||
} catch (error: Error | any) {
|
||||
if (error.code === 'ENOENT' || error.code === 'ENOTDIR') {
|
||||
console.log('No auth file exists. Please login first.');
|
||||
@@ -87,3 +98,75 @@ export const withError = async <T>(promise: Promise<T>): Promise<[Error, undefin
|
||||
return [error, undefined];
|
||||
}
|
||||
};
|
||||
|
||||
export interface CrawlOptions {
|
||||
pathsToCrawl: string[];
|
||||
recursive?: boolean;
|
||||
includeHidden?: boolean;
|
||||
exclusionPattern?: string;
|
||||
extensions: string[];
|
||||
}
|
||||
export const crawl = async (options: CrawlOptions): Promise<string[]> => {
|
||||
const { extensions: extensionsWithPeriod, recursive, pathsToCrawl, exclusionPattern, includeHidden } = options;
|
||||
const extensions = extensionsWithPeriod.map((extension) => extension.replace('.', ''));
|
||||
|
||||
if (pathsToCrawl.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const patterns: string[] = [];
|
||||
const crawledFiles: string[] = [];
|
||||
|
||||
for await (const currentPath of pathsToCrawl) {
|
||||
try {
|
||||
const absolutePath = resolve(currentPath);
|
||||
const stats = await stat(absolutePath);
|
||||
if (stats.isFile() || stats.isSymbolicLink()) {
|
||||
crawledFiles.push(absolutePath);
|
||||
} else {
|
||||
patterns.push(absolutePath);
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'ENOENT') {
|
||||
patterns.push(currentPath);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let searchPattern: string;
|
||||
if (patterns.length === 1) {
|
||||
searchPattern = patterns[0];
|
||||
} else if (patterns.length === 0) {
|
||||
return crawledFiles;
|
||||
} else {
|
||||
searchPattern = '{' + patterns.join(',') + '}';
|
||||
}
|
||||
|
||||
if (recursive) {
|
||||
searchPattern = searchPattern + '/**/';
|
||||
}
|
||||
|
||||
searchPattern = `${searchPattern}/*.{${extensions.join(',')}}`;
|
||||
|
||||
const globbedFiles = await glob(searchPattern, {
|
||||
absolute: true,
|
||||
caseSensitiveMatch: false,
|
||||
onlyFiles: true,
|
||||
dot: includeHidden,
|
||||
ignore: [`**/${exclusionPattern}`],
|
||||
});
|
||||
globbedFiles.push(...crawledFiles);
|
||||
return globbedFiles.sort();
|
||||
};
|
||||
|
||||
export const sha1 = (filepath: string) => {
|
||||
const hash = createHash('sha1');
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
const rs = createReadStream(filepath);
|
||||
rs.on('error', reject);
|
||||
rs.on('data', (chunk) => hash.update(chunk));
|
||||
rs.on('end', () => resolve(hash.digest('hex')));
|
||||
});
|
||||
};
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 MiB After Width: | Height: | Size: 1.8 MiB |
@@ -2,36 +2,34 @@
|
||||
# - https://immich.app/docs/developer/setup
|
||||
# - https://immich.app/docs/developer/troubleshooting
|
||||
|
||||
version: '3.8'
|
||||
|
||||
name: immich-dev
|
||||
|
||||
x-server-build: &server-common
|
||||
image: immich-server-dev:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: always
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['/usr/src/app/bin/immich-dev', 'immich']
|
||||
<<: *server-common
|
||||
command: ['/usr/src/app/bin/immich-dev']
|
||||
image: immich-server-dev:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: always
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ../open-api:/usr/src/open-api
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/usr/src/app/upload/upload
|
||||
- /usr/src/app/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
ports:
|
||||
- 3001:3001
|
||||
- 9230:9230
|
||||
@@ -39,19 +37,6 @@ services:
|
||||
- redis
|
||||
- database
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
command: ['/usr/src/app/bin/immich-dev', 'microservices']
|
||||
<<: *server-common
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
ports:
|
||||
- 9231:9230
|
||||
depends_on:
|
||||
- database
|
||||
- immich-server
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
image: immich-web-dev:latest
|
||||
@@ -99,7 +84,9 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:51d6c56749a4243096327e3fb964a48ed92254357108449cb6e23999c37773c5
|
||||
image: redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
@@ -110,11 +97,18 @@ services:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
# immich-prometheus:
|
||||
# container_name: immich_prometheus
|
||||
|
||||
@@ -1,41 +1,26 @@
|
||||
version: '3.8'
|
||||
|
||||
name: immich-prod
|
||||
|
||||
x-server-build: &server-common
|
||||
image: immich-server:latest
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
command: ['start.sh', 'immich']
|
||||
<<: *server-common
|
||||
image: immich-server:latest
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/photos:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 2283:3001
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
command: ['start.sh', 'microservices']
|
||||
<<: *server-common
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- immich-server
|
||||
restart: always
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -56,7 +41,9 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:51d6c56749a4243096327e3fb964a48ed92254357108449cb6e23999c37773c5
|
||||
image: redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
@@ -68,17 +55,25 @@ services:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/postgres:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
restart: always
|
||||
|
||||
# set IMMICH_METRICS=true in .env to enable metrics
|
||||
immich-prometheus:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:bc1794e85c9e00293351b967efa267ce6af1c824ac875a9d0c7ac84700a8b53e
|
||||
image: prom/prometheus@sha256:5c435642ca4d8427ca26f4901c11114023004709037880cd7860d5b7176aa731
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
@@ -90,7 +85,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:10.4.0-ubuntu@sha256:c1f582b7cc4c1b9805d187b5600ce7879550a12ef6d29571da133c3d3fc67a9c
|
||||
image: grafana/grafana:11.0.0-ubuntu@sha256:02e99d1ee0b52dc9d3000c7b5314e7a07e0dfd69cc49bb3f8ce323491ed3406b
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
version: '3.8'
|
||||
|
||||
#
|
||||
# WARNING: Make sure to use the docker-compose.yml of the current release:
|
||||
#
|
||||
@@ -14,7 +12,9 @@ services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
command: ['start.sh', 'immich']
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
@@ -27,23 +27,6 @@ services:
|
||||
- database
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
# extends: # uncomment this section for hardware acceleration - see https://immich.app/docs/features/hardware-transcoding
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
command: ['start.sh', 'microservices']
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
restart: always
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
||||
@@ -60,20 +43,28 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: registry.hub.docker.com/library/redis:6.2-alpine@sha256:51d6c56749a4243096327e3fb964a48ed92254357108449cb6e23999c37773c5
|
||||
image: docker.io/redis:6.2-alpine@sha256:e31ca60b18f7e9b78b573d156702471d4eda038803c0b8e6f01559f350031e93
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: registry.hub.docker.com/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:90724186f0a3517cf6914295b5ab410db9ce23190a2d9d0b9dd6463e3fa298f0
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: pg_isready --dbname='${DB_DATABASE_NAME}' || exit 1; Chksum="$$(psql --dbname='${DB_DATABASE_NAME}' --username='${DB_USERNAME}' --tuples-only --no-align --command='SELECT SUM(checksum_failures) FROM pg_stat_database')"; echo "checksum failure count is $$Chksum"; [ "$$Chksum" = '0' ] || exit 1
|
||||
interval: 5m
|
||||
start_interval: 30s
|
||||
start_period: 5m
|
||||
command: ["postgres", "-c" ,"shared_preload_libraries=vectors.so", "-c", 'search_path="$$user", public, vectors', "-c", "logging_collector=on", "-c", "max_wal_size=2GB", "-c", "shared_buffers=512MB", "-c", "wal_compression=on"]
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
model-cache:
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
# The location where your uploaded files are stored
|
||||
UPLOAD_LOCATION=./library
|
||||
# The location where your database files are stored
|
||||
DB_DATA_LOCATION=./postgres
|
||||
|
||||
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
||||
IMMICH_VERSION=release
|
||||
@@ -11,8 +13,5 @@ DB_PASSWORD=postgres
|
||||
|
||||
# The values below this line do not need to be changed
|
||||
###################################################################################
|
||||
DB_HOSTNAME=immich_postgres
|
||||
DB_USERNAME=postgres
|
||||
DB_DATABASE_NAME=immich
|
||||
|
||||
REDIS_HOSTNAME=immich_redis
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
version: "3.8"
|
||||
|
||||
# Configurations for hardware-accelerated machine learning
|
||||
|
||||
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
||||
# you can inline the config for a backend by copying its contents
|
||||
# you can inline the config for a backend by copying its contents
|
||||
# into the immich-machine-learning service in the docker-compose.yml file.
|
||||
|
||||
# See https://immich.app/docs/features/ml-hardware-acceleration for info on usage.
|
||||
@@ -27,12 +25,10 @@ services:
|
||||
count: 1
|
||||
capabilities:
|
||||
- gpu
|
||||
- compute
|
||||
- video
|
||||
|
||||
openvino:
|
||||
device_cgroup_rules:
|
||||
- "c 189:* rmw"
|
||||
- 'c 189:* rmw'
|
||||
devices:
|
||||
- /dev/dri:/dev/dri
|
||||
volumes:
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
version: "3.8"
|
||||
|
||||
# Configurations for hardware-accelerated transcoding
|
||||
|
||||
# If using Unraid or another platform that doesn't allow multiple Compose files,
|
||||
|
||||
1
docs/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
20.13
|
||||
@@ -1,17 +1,17 @@
|
||||
# Website
|
||||
|
||||
This website is built using [Docusaurus 2](https://docusaurus.io/), a modern static website generator.
|
||||
This website is built using [Docusaurus](https://docusaurus.io/), a modern static website generator.
|
||||
|
||||
### Installation
|
||||
|
||||
```
|
||||
$ yarn
|
||||
$ npm install
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
```
|
||||
$ yarn start
|
||||
$ npm run start
|
||||
```
|
||||
|
||||
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
||||
@@ -19,7 +19,7 @@ This command starts a local development server and opens up a browser window. Mo
|
||||
### Build
|
||||
|
||||
```
|
||||
$ yarn build
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||
@@ -29,13 +29,13 @@ This command generates static content into the `build` directory and can be serv
|
||||
Using SSH:
|
||||
|
||||
```
|
||||
$ USE_SSH=true yarn deploy
|
||||
$ USE_SSH=true npm run deploy
|
||||
```
|
||||
|
||||
Not using SSH:
|
||||
|
||||
```
|
||||
$ GIT_USER=<Your GitHub username> yarn deploy
|
||||
$ GIT_USER=<Your GitHub username> npm run deploy
|
||||
```
|
||||
|
||||
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
||||
|
||||
@@ -10,8 +10,8 @@ Hello everyone, it is my pleasure to deliver the new release of Immich to you. T
|
||||
|
||||
Some notable features are:
|
||||
|
||||
- [OAuth integration](#livephoto-ios-support-)
|
||||
- [LivePhoto support on iOS](#oauth-integration-)
|
||||
- OAuth integration
|
||||
- LivePhoto support on iOS
|
||||
- User config system
|
||||
|
||||
<!--truncate-->
|
||||
|
||||
75
docs/blog/2024/immich-core-team-goes-fulltime.mdx
Normal file
@@ -0,0 +1,75 @@
|
||||
---
|
||||
title: The Immich core team goes full-time
|
||||
authors: [alextran]
|
||||
tags: [update, announcement, futo]
|
||||
date: 2024-05-01T00:00
|
||||
---
|
||||
|
||||
**Immich is joining [FUTO](https://futo.org/)!**
|
||||
|
||||
Since the beginning of this adventure, my goal has always been to create a better world for my children. Memories are priceless, and privacy should not be a luxury. However, building quality open source has its challenges. Over the past two years, it has taken significant dedication, time, and effort.
|
||||
|
||||
Recently, a company in Austin, Texas, called FUTO contacted the team. FUTO strives to develop quality and sustainable open software. They build software alternatives that focus on giving control to users. From their mission statement:
|
||||
|
||||
“Computers should belong to you, the people. We develop and fund technology to give them back.”
|
||||
|
||||
FUTO loved Immich and wanted to see if we’d consider working with them to take the project to the next level. In short, FUTO offered to:
|
||||
|
||||
- Pay the core team to work on Immich full-time
|
||||
- Let us keep full autonomy about the project’s direction and leadership
|
||||
- Continue to license Immich under AGPL
|
||||
- Keep Immich’s development direction with no paywalled features
|
||||
- Keep Immich “built for the people” (no ads, data mining/selling, or alternative motives)
|
||||
- Provide us with financial, technical, legal, and administrative support
|
||||
|
||||
After careful deliberation, the team decided that FUTO’s vision closely aligns with our own: to build a better future by providing a polished, performant, and privacy-preserving open-source software solution for photo and video management delivered in a sustainable way.
|
||||
|
||||
Immich’s future has never looked brighter, and we look forward to realizing our vision for Immich as part of FUTO.
|
||||
|
||||
If you have more questions, we’ll host a Q&A live stream on May 9th at 3PM UTC (10AM CST). [You can ask questions here](https://www.live-ask.com/event/01HWP2SB99A1K8EXFBDKZ5Z9CF), and the stream will be live [here on our YouTube channel](https://youtube.com/live/cwz2iZwYpgg).
|
||||
|
||||
Cheers,
|
||||
|
||||
The Immich Team
|
||||
|
||||
---
|
||||
|
||||
## FAQs
|
||||
|
||||
### What is FUTO?
|
||||
|
||||
[https://futo.org/what-is-futo/](https://futo.org/what-is-futo/)
|
||||
|
||||
### Will the license change?
|
||||
|
||||
No. Immich will continue to be licensed under AGPL without a CLA.
|
||||
|
||||
### Will Immich continue to be free?
|
||||
|
||||
Yes. The Immich source code will remain freely available under the AGPL license.
|
||||
|
||||
### Is Immich getting VC funding?
|
||||
|
||||
No. Venture capital implies investment in a business, often with the expectation of a future payout (exit plan). Immich is neither a business that can be acquired nor comes with a money-making exit plan.
|
||||
|
||||
### I am currently supporting Immich through GitHub sponsors. What will happen to my donation?
|
||||
|
||||
Effective immediately, all donations to the Immich organization will be canceled. In the future, we will offer an optional, modest payment option instead. Thank you to everyone who donated to help us get this far!
|
||||
|
||||
### How is funding sustainable?
|
||||
|
||||
Immich and FUTO believe a sustainable future requires a model that does not rely on users-as-a-product. To this end, FUTO advocates that users pay for good, open software. In keeping with this model, we will adopt a purchase price. This means we no longer accept donations, but — _without limiting features for those who do not pay_ — we will soon allow you to purchase Immich through a modest payment. We encourage you to pay for the high-quality software you use to foster a healthy software culture where developers build great applications without hidden motives for their users.
|
||||
|
||||
### When does this change take effect?
|
||||
|
||||
This change takes effect immediately.
|
||||
|
||||
### What will change?
|
||||
|
||||
The following things will change as Immich joins FUTO:
|
||||
|
||||
- The brand, logo, and other Immich trademarks will be transferred to FUTO.
|
||||
- We will stop all donations to the project.
|
||||
- The core team can now dedicate our full attention to Immich
|
||||
- Before the end of the year, we plan to have a roadmap for what it will take to get Immich to a stable release.
|
||||
- Bugs will be squashed, and features will be delivered faster.
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
The admin password can be reset by running the [reset-admin-password](/docs/administration/server-commands.md) command on the immich-server.
|
||||
|
||||
### How can I see list of all users in Immich?
|
||||
### How can I see a list of all users in Immich?
|
||||
|
||||
You can see the list of all users by running [list-users](/docs/administration/server-commands.md) Command on the Immich-server.
|
||||
|
||||
@@ -24,22 +24,43 @@ You can see the list of all users by running [list-users](/docs/administration/s
|
||||
|
||||
### I cannot log into the application after an update. What can I do?
|
||||
|
||||
First, verify that the mobile app and server are both running the same version (major and minor).
|
||||
Verify that the mobile app and server are both running the same version (major and minor).
|
||||
|
||||
:::note
|
||||
App store updates sometimes take longer because the stores (Google play store and Apple app store)
|
||||
need to approve the update first which may take some time.
|
||||
App store updates sometimes take longer because the stores (Google Play Store and Apple App Store)
|
||||
need to approve the update first, and it can take some time.
|
||||
:::
|
||||
|
||||
If you still cannot login to the app, try the following:
|
||||
If you still cannot log in to the app, try the following:
|
||||
|
||||
- Check the mobile logs
|
||||
- Make sure login credentials are correct by logging in on the web app
|
||||
|
||||
### Why does foreground backup stop when I navigate away from the app? Shouldn't it transfer the job to background backup?
|
||||
|
||||
Foreground backup and background backup are two separate mechanisms. They don't communicate or interact with each other.
|
||||
|
||||
Foreground backup is controlled by the user's action, while background backup is controlled by your device's operating system. When the app is put in the background, the invocation of background tasks is delegated to the device's operating system scheduler. It decides when the background task can run and how long it can run.
|
||||
|
||||
The behaviors differ based on your device manufacturer and operating system, but most are related to battery-saving policies.
|
||||
|
||||
### Why is background backup on iOS not working?
|
||||
|
||||
On iOS (iPhone and iPad), the operating system determines if a particular app can invoke background tasks based on multiple factors, most of which the Immich app has no control over. To increase the likelihood that the background backup task is run, follow the steps below:
|
||||
|
||||
- Enable Background App Refresh for Immich in the iOS settings at `Settings > General > Background App Refresh`.
|
||||
- Disable Background App Refresh for apps that don't need background tasks to run. This will reduce the competition for background task invocation for Immich.
|
||||
- Use the Immich app more often.
|
||||
|
||||
---
|
||||
|
||||
## Assets
|
||||
|
||||
### Does Immich change the file?
|
||||
|
||||
No, Immich does not touch the original file under any circumstances,
|
||||
all edited metadata are saved in the companion sidecar file and the database.
|
||||
|
||||
### Can I add my existing photo library?
|
||||
|
||||
Yes, with an [External Library](/docs/features/libraries.md).
|
||||
@@ -50,11 +71,11 @@ Template changes will only apply to _new_ assets. To retroactively apply the tem
|
||||
|
||||
### Why are only photos and not videos being uploaded to Immich?
|
||||
|
||||
This often happens when using a reverse proxy (such as nginx or Cloudflare tunnel) in front of Immich. Make sure to set your reverse proxy to allow large `POST` requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Also check the disk space of your reverse proxy, in some cases proxies cache requests to disk before passing them on, and if disk space runs out the request fails.
|
||||
This often happens when using a reverse proxy (such as Nginx or Cloudflare tunnel) in front of Immich. Make sure to set your reverse proxy to allow large `POST` requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Also, check the disk space of your reverse proxy. In some cases, proxies cache requests to disk before passing them on, and if disk space runs out, the request fails.
|
||||
|
||||
### Why are some photos stored in the file system with the wrong date?
|
||||
|
||||
There are a few different scenarios that can lead to this situation. The solution is to run the storage migration job again. The job is only _automatically_ run once per asset, after upload. If metadata extraction originally failed, the jobs were cleared/cancelled, etc. the job may not have run automatically the first time.
|
||||
There are a few different scenarios that can lead to this situation. The solution is to rerun the storage migration job. The job is only automatically run once per asset after upload. If metadata extraction originally failed, the jobs were cleared/canceled, etc., the job may not have run automatically the first time.
|
||||
|
||||
### How can I hide photos from the timeline?
|
||||
|
||||
@@ -68,23 +89,27 @@ See [Backup and Restore](/docs/administration/backup-and-restore.md).
|
||||
|
||||
No, it currently does not. There is an [open feature request on GitHub](https://github.com/immich-app/immich/discussions/4348).
|
||||
|
||||
### Does Immich support filtering of NSFW images?
|
||||
### Does Immich support the filtering of NSFW images?
|
||||
|
||||
No, it currently does not. There is an [open feature request on Github](https://github.com/immich-app/immich/discussions/2451).
|
||||
|
||||
### Why are there so many thumbnail generation jobs?
|
||||
|
||||
There are three thubmanil jobs for each asset:
|
||||
There are three thumbnail jobs for each asset:
|
||||
|
||||
- Blurred (thumbhash)
|
||||
- Small (webp)
|
||||
- Large (jpeg)
|
||||
- Preview (Webp)
|
||||
- Thumbnail (Jpeg)
|
||||
|
||||
Also, there are additional jobs for person (face) thumbnails.
|
||||
|
||||
### Why do files from WhatsApp not appear with the correct date?
|
||||
|
||||
Files sent on WhatsApp are saved without metadata on the file. Therefore, Immich has no way of knowing the original date of the file when files are uploaded from WhatsApp, not the order of arrival on the device. [See #3527](https://github.com/immich-app/immich/issues/3527).
|
||||
|
||||
### What happens if an asset exists in more than one account?
|
||||
|
||||
There are no requirements for assets to be unique across users. If multiple users upload the same image they are processed as if they were distinct assets and jobs run and thumbnails are generated accordingly.
|
||||
There are no requirements for assets to be unique across users. If multiple users upload the same image, it is processed as if it were a distinct asset, and jobs run and thumbnails are generated accordingly.
|
||||
|
||||
### Why do HDR videos appear pale in Immich player but look normal after download?
|
||||
|
||||
@@ -96,45 +121,46 @@ Immich always keeps your original files. Alongside that, it generates a transcod
|
||||
|
||||
### How can I delete transcoded videos without deleting the original?
|
||||
|
||||
The transcoded version of an asset can be deleted by setting a transcode policy that makes it unnecessary, then running a transcoding job for that asset. This can be done on a per-asset basis by starting a transcoding job for a single asset with the _Refresh encoded videos_ button in the asset viewer options, or for all assets by running transcoding jobs for all assets from the administration page.
|
||||
The transcoded version of an asset can be deleted by setting a transcode policy that makes it unnecessary and then running a transcoding job for that asset. This can be done on a per-asset basis by starting a transcoding job for a single asset with the _Refresh encoded videos_ button in the asset viewer options or for all assets by running transcoding jobs for all assets from the administration page.
|
||||
|
||||
To update the transcode policy, navigate to Administration > Video Transcoding Settings > Transcoding Policy and select a policy from the drop-down. This policy will determine whether an existing transcode will be deleted or overwritten in the transcoding job. If a video should be transcoded according to this policy, an existing transcode is overwritten. If not, then it is deleted.
|
||||
|
||||
:::note
|
||||
For example, say you have existing transcodes with the policy "Videos higher than normal resolution or not in the desired format" and switch to a narrower policy: "Videos not in the desired format". If an asset was only transcoded due to its resolution, then running a transcoding job for it will now delete the existing transcode. This is because resolution is no longer part of the transcode policy and the transcode is unnecessary as a result. Likewise, if you set the policy to "Don't transcode any videos" and run transcoding jobs for all assets, this will delete all existing transcodes as they are all unnecessary.
|
||||
For example, say you have existing transcodes with the policy "Videos higher than normal resolution or not in the desired format" and switch to a narrower policy: "Videos not in the desired format." If an asset was only transcoded due to its resolution, running a transcoding job for it will delete the existing transcode. This is because resolution is no longer part of the transcode policy and the transcode is unnecessary. Likewise, if you set the policy to "Don't transcode any videos" and run transcoding jobs for all assets, this will delete all existing transcodes as they are unnecessary.
|
||||
:::
|
||||
|
||||
### Is it possible to compress images during backup?
|
||||
|
||||
No. Our golden rule is that the original assets should always be untouched, so we don't think this feature is a good fit for Immich.
|
||||
No. Our design principle is that the original assets should always be untouched.
|
||||
|
||||
### How can I move all data (photos, persons, albums) from one user to another?
|
||||
### How can I move all data (photos, persons, albums, libraries) from one user to another?
|
||||
|
||||
This is not officially supported, but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the psql command), or you can add for example an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file, so that you can use a web-interface.
|
||||
|
||||
:::warning
|
||||
This is an advanced operation. If you can't do it with the steps described here, this is not for you.
|
||||
:::
|
||||
This is not officially supported but can be accomplished with some database updates. You can do this on the command line (in the PostgreSQL container using the `psql` command), or you can add, for example, an [Adminer](https://www.adminer.org/) container to the `docker-compose.yml` file so that you can use a web interface.
|
||||
|
||||
<details>
|
||||
<summary>Steps</summary>
|
||||
|
||||
1. **MAKE A BACKUP** - See [backup and restore](/docs/administration/backup-and-restore.md).
|
||||
|
||||
2. Find the id of both the 'source' and the 'destination' user (it's the id column in the users table)
|
||||
2. Find the ID of both the 'source' and the 'destination' user (it's the id column in the `users` table)
|
||||
|
||||
3. Three tables need to be updated:
|
||||
3. Four tables need to be updated:
|
||||
|
||||
```sql
|
||||
// reassign albums
|
||||
BEGIN;
|
||||
-- reassign albums
|
||||
UPDATE albums SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
|
||||
// reassign people
|
||||
-- reassign people
|
||||
UPDATE person SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
|
||||
// reassign assets
|
||||
-- reassign assets
|
||||
UPDATE assets SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>'
|
||||
AND CHECKSUM NOT IN (SELECT CHECKSUM FROM assets WHERE "ownerId" = '<destinationId>');
|
||||
|
||||
-- reassign external libraries
|
||||
UPDATE libraries SET "ownerId" = '<destinationId>' WHERE "ownerId" = '<sourceId>';
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
4. There might be left-over assets in the 'source' user's library if they are skipped by the last query because of duplicate checksums. These are probably duplicates anyway, and can probably be removed.
|
||||
@@ -159,7 +185,7 @@ No, not yet. For updates on this planned feature, follow the [GitHub discussion]
|
||||
|
||||
### Can I add an external library while keeping the existing album structure?
|
||||
|
||||
We haven't put in an official mechanism to create albums from external libraries at the moment, but there are some [workarounds from the community](https://github.com/immich-app/immich/discussions/4279) to help you achieve that.
|
||||
We haven't implemented an official mechanism for creating albums from external libraries, but there are some [workarounds from the community](https://github.com/immich-app/immich/discussions/4279) to help you achieve that.
|
||||
|
||||
### What happens to duplicates in external libraries?
|
||||
|
||||
@@ -171,7 +197,7 @@ Duplicate checking only exists for upload libraries, using the file hash. Furthe
|
||||
|
||||
### How does smart search work?
|
||||
|
||||
Immich uses CLIP models, for more information about CLIP and its capabilities read about it [here](https://openai.com/research/clip).
|
||||
Immich uses CLIP models. For more information about CLIP and its capabilities, read about it [here](https://openai.com/research/clip).
|
||||
|
||||
### How does facial recognition work?
|
||||
|
||||
@@ -189,33 +215,31 @@ However, disabling all jobs will not disable the machine learning service itself
|
||||
|
||||
### I'm getting errors about models being corrupt or failing to download. What do I do?
|
||||
|
||||
You can delete the model cache volume, which is where models are downloaded to. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Huggingface](https://huggingface.co/immich-app) and place them in the cache folder.
|
||||
|
||||
### Why did Immich decide to remove object detection?
|
||||
|
||||
The feature added keywords to images for metadata search, but wasn't used for smart search. Smart search made it unnecessary as it isn't limited to exact keywords. Combined with it causing crashes on some devices, using many dependencies and causing user confusion as to how search worked, it was better to remove the job altogether.
|
||||
For more info see [here](https://github.com/immich-app/immich/pull/5903)
|
||||
You can delete the model cache volume, where models are downloaded. This will give the service a clean environment to download the model again. If models are failing to download entirely, you can manually download them from [Huggingface][huggingface] and place them in the cache folder.
|
||||
|
||||
### Can I use a custom CLIP model?
|
||||
|
||||
No, this is not supported. Only models listed in the [Huggingface](https://huggingface.co/immich-app) page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
|
||||
No, this is not supported. Only models listed in the [Huggingface][huggingface] page are compatible. Feel free to make a feature request if there's a model not listed here that you think should be added.
|
||||
|
||||
### I want to be able to search in other languages besides English. How can I do that?
|
||||
|
||||
You can change to a multilingual model listed [here](https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7) by going to Administration > Machine Learning Settings > Smart Search and replacing the name of the model. Be sure to re-run Smart Search on all assets after this change. You can then search in over 100 languages.
|
||||
|
||||
:::note
|
||||
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list](https://huggingface.co/immich-app).
|
||||
Feel free to make a feature request if there's a model you want to use that isn't in [Immich Huggingface list][huggingface].
|
||||
:::
|
||||
|
||||
### Does Immich support Facial Recognition for videos ?
|
||||
### Does Immich support Facial Recognition for videos?
|
||||
|
||||
Immich's machine learning feature operate on the generated thumbnail. If a face is visible in the video's thumbnail it will be picked up by facial recognition.
|
||||
Immich's machine learning feature operates on the generated thumbnail. If a face is visible in the video's thumbnail it will be picked up by facial recognition.
|
||||
Scanning the entire video for faces may be implemented in the future.
|
||||
|
||||
### Does Immich have animal recognition?
|
||||
|
||||
No.
|
||||
:::tip
|
||||
You can use [Smart Search](/docs/features/smart-search.md) for this to some extent. For example, if you have a Golden Retriever and a Chihuahua, type these words in the smart search and watch the results.
|
||||
:::
|
||||
|
||||
### I'm getting a lot of "faces" that aren't faces, what can I do?
|
||||
|
||||
@@ -224,14 +248,19 @@ to increase the bar for what the algorithm considers a "core face" for that pers
|
||||
|
||||
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
||||
|
||||
If you installed several models and chose not to use some of them, it might be worth deleting the old models that are in immich_model-cache.
|
||||
If you installed several models and chose not to use some of them, it might be worth deleting the old models that are in immich_model-cache. To do this you can mount the model cache and remove the undesired models.
|
||||
|
||||
To do this you can run:
|
||||
<details>
|
||||
<summary>Steps</summary>
|
||||
|
||||
- `docker run -it --rm -v immich_model-cache:/mnt ubuntu bash`
|
||||
- `cd mnt`
|
||||
- `ls`
|
||||
- and delete unused models with `rm -r <model_name>`.
|
||||
```bash
|
||||
docker run -it --rm -v immich_model-cache:/mnt-models alpine sh
|
||||
cd /mnt-models
|
||||
ls clip/ facial-recognition/
|
||||
# rm -r clip/ABC facial-recognition/DEF # delete unused models
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
@@ -248,13 +277,28 @@ The initial backup is the most intensive due to the number of jobs running. The
|
||||
- Lower the job concurrency for these jobs to 1.
|
||||
- Under Settings > Transcoding Settings > Threads, set the number of threads to a low number like 1 or 2.
|
||||
- Under Settings > Machine Learning Settings > Facial Recognition > Model Name, you can change the facial recognition model to `buffalo_s` instead of `buffalo_l`. The former is a smaller and faster model, albeit not as good.
|
||||
- You _must_ re-run the Face Detection job for all images after this for facial recognition on new images to work properly.
|
||||
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for how you can disable machine learning.
|
||||
- For facial recognition on new images to work properly, You must re-run the Face Detection job for all images after this.
|
||||
- If these changes are not enough, see [below](/docs/FAQ#how-can-i-disable-machine-learning) for instructions on how to disable machine learning.
|
||||
|
||||
### Can I limit the amount of CPU and RAM usage?
|
||||
|
||||
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows.
|
||||
You can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit) to learn how to limit this.
|
||||
By default, a container has no resource constraints and can use as much of a given resource as the host's kernel scheduler allows. To limit this, you can add the following to the `docker-compose.yml` block of any containers that you want to have limited resources.
|
||||
|
||||
<details>
|
||||
<summary>docker-compose.yml</summary>
|
||||
|
||||
```yaml
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
# Number of CPU threads
|
||||
cpus: '1.00'
|
||||
# Gigabytes of memory
|
||||
memory: '1G'
|
||||
```
|
||||
|
||||
</details>
|
||||
For more details, you can look at the [original docker docs](https://docs.docker.com/config/containers/resource_constraints/) or use this [guide](https://www.baeldung.com/ops/docker-memory-limit).
|
||||
|
||||
### How can I boost machine learning speed?
|
||||
|
||||
@@ -269,13 +313,17 @@ On a normal machine, 2 or 3 concurrent jobs can probably max the CPU. Beyond thi
|
||||
|
||||
Do not exaggerate with the amount of jobs because you're probably thoroughly overloading the server.
|
||||
|
||||
More detail can be found [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
|
||||
More details can be found [here](https://discord.com/channels/979116623879368755/994044917355663450/1174711719994605708)
|
||||
:::
|
||||
|
||||
### Why is Immich using so much of my CPU?
|
||||
|
||||
When a large amount of assets are uploaded to Immich it makes sense that the CPU and RAM will be heavily used due to machine learning work and creating image thumbnails.
|
||||
Once this process completes, the percentage of CPU usage will drop to around 3-5% usage
|
||||
When a large number of assets are uploaded to Immich, it makes sense that the CPU and RAM will be heavily used for machine learning work and creating image thumbnails.
|
||||
Once this process is completed, the percentage of CPU usage will drop to around 3-5% usage
|
||||
|
||||
### My server shows Server Status Offline | Version Unknown what can I do?
|
||||
|
||||
You need to enable Websocket on your reverse proxy.
|
||||
|
||||
---
|
||||
|
||||
@@ -288,19 +336,37 @@ Immich components are typically deployed using docker. To see logs for deployed
|
||||
### How can I run Immich as a non-root user?
|
||||
|
||||
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
||||
You may need to add an additional volume to `immich-microservices` that mounts internally to `/usr/src/app/.reverse-geocoding-dump`.
|
||||
You may need to add mount points or docker volumes for the following internal container paths:
|
||||
|
||||
- `immich-machine-learning:/.config`
|
||||
- `immich-machine-learning:/.cache`
|
||||
- `redis:/data`
|
||||
|
||||
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
|
||||
|
||||
### How can I **purge** data from Immich?
|
||||
For a further hardened system, you can add the following block to every container except for `immich_postgres`.
|
||||
|
||||
<details>
|
||||
<summary>docker-compose.yml</summary>
|
||||
|
||||
```yaml
|
||||
security_opt:
|
||||
# Prevent escalation of privileges after the container is started
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
# Prevent access to raw network traffic
|
||||
- NET_RAW
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### How can I purge data from Immich?
|
||||
|
||||
Data for Immich comes in two forms:
|
||||
|
||||
1. **Metadata** stored in a postgres database, persisted via the `pg_data` volume
|
||||
1. **Metadata** stored in a Postgres database, stored in the `DB_DATA_LOCATION` folder (previously `pg_data` Docker volume).
|
||||
2. **Files** (originals, thumbs, profile, etc.), stored in the `UPLOAD_LOCATION` folder, more [info](/docs/administration/backup-and-restore#asset-types-and-storage-locations).
|
||||
|
||||
To remove the **Metadata** you can stop Immich and delete the volume.
|
||||
|
||||
:::warning
|
||||
This will destroy your database and reset your instance, meaning that you start from scratch.
|
||||
:::
|
||||
@@ -309,13 +375,16 @@ This will destroy your database and reset your instance, meaning that you start
|
||||
docker compose down -v
|
||||
```
|
||||
|
||||
After removing the containers and volumes, there are a few directories that need to be deleted to reset Immich to a new installation. Once they are deleted, Immich can be started back up and will be a fresh installation.
|
||||
|
||||
- `DB_DATA_LOCATION` contains the database, media info, and settings.
|
||||
- `UPLOAD_LOCATION` contains all the media uploaded to Immich.
|
||||
|
||||
:::note Portainer
|
||||
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
||||
and remove all the volumes related to immcih then restart the stack.
|
||||
and remove all the volumes related to immich then restart the stack.
|
||||
:::
|
||||
|
||||
After removing the containers and volumes, the **Files** should be removed from the `UPLOAD_LOCATION` to provide a clean start.
|
||||
|
||||
### Why does the machine learning service report workers crashing?
|
||||
|
||||
:::note
|
||||
@@ -330,6 +399,47 @@ If it mentions SIGILL (note the lack of a K) or error code 132, it most likely m
|
||||
|
||||
If your version of Immich is below 1.92.0 and the crash occurs after logs about tracing or exporting a model, consider either upgrading or disabling the Tag Objects job.
|
||||
|
||||
### Why does Immich log migration errors on startup?
|
||||
## Database
|
||||
|
||||
Sometimes Immich logs errors such as "duplicate key value violates unique constraint" or "column (...) of relation (...) already exists". Because of Immich's container structure, this error can be seen when both immich and immich-microservices start at the same time and attempt to migrate or create the database structure. Since the database migration is run sequentially and inside of transactions, this error message does not cause harm to your installation of Immich and can safely be ignored. If needed, you can manually restart Immich by running `docker restart immich immich-microservices`.
|
||||
### Why am I getting database ownership errors?
|
||||
|
||||
If you get database errors such as `FATAL: data directory "/var/lib/postgresql/data" has wrong ownership` upon database startup, this is likely due to an issue with your filesystem.
|
||||
NTFS and ex/FAT/32 filesystems are not supported. See [here](/docs/install/environment-variables#supported-filesystems) for more details.
|
||||
|
||||
### How can I verify the integrity of my database?
|
||||
|
||||
If you installed Immich using v1.104.0 or later, you likely have database checksums enabled by default. You can check this by running the following command.
|
||||
A result of `on` means that checksums are enabled.
|
||||
|
||||
<details>
|
||||
<summary>Check if checksums are enabled</summary>
|
||||
|
||||
```bash
|
||||
docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="show data_checksums"
|
||||
data_checksums
|
||||
----------------
|
||||
on
|
||||
(1 row)
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
If checksums are enabled, you can check the status of the database with the following command. A normal result is all zeroes.
|
||||
|
||||
<details>
|
||||
<summary>Check for database corruption</summary>
|
||||
|
||||
```bash
|
||||
docker exec -it immich_postgres psql --dbname=immich --username=<DB_USERNAME> --command="SELECT datname, checksum_failures, checksum_last_failure FROM pg_stat_database WHERE datname IS NOT NULL"
|
||||
datname | checksum_failures | checksum_last_failure
|
||||
-----------+-------------------+-----------------------
|
||||
postgres | 0 |
|
||||
immich | 0 |
|
||||
template1 | 0 |
|
||||
template0 | 0 |
|
||||
(4 rows)
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
[huggingface]: https://huggingface.co/immich-app
|
||||
|
||||
@@ -15,39 +15,47 @@ Immich saves [file paths in the database](https://github.com/immich-app/immich/d
|
||||
Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database.
|
||||
:::
|
||||
|
||||
The recommended way to backup and restore the Immich database is to use the `pg_dumpall` command.
|
||||
The recommended way to backup and restore the Immich database is to use the `pg_dumpall` command. When restoring, you need to delete the `DB_DATA_LOCATION` folder (if it exists) to reset the database.
|
||||
|
||||
:::caution
|
||||
It is not recommended to directly backup the `DB_DATA_LOCATION` folder. Doing so while the database is running can lead to a corrupted backup that cannot be restored.
|
||||
:::
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="Linux system based Backup" label="Linux system based Backup" default>
|
||||
<TabItem value="Linux system" label="Linux system" default>
|
||||
|
||||
```bash title='Bash'
|
||||
docker exec -t immich_postgres pg_dumpall -c -U postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```bash title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch.
|
||||
# rm -rf DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch.
|
||||
docker compose pull # Update to latest version of Immich (if desired)
|
||||
docker compose create # Create Docker containers for Immich apps without running them.
|
||||
docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
gunzip < "/path/to/backup/dump.sql.gz" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
|
||||
gunzip < "/path/to/backup/dump.sql.gz" \
|
||||
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
||||
| docker exec -i immich_postgres psql --username=postgres # Restore Backup
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Windows system based Backup" label="Windows system based Backup">
|
||||
<TabItem value="Windows system (PowerShell)" label="Windows system (PowerShell)">
|
||||
|
||||
```powershell title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall -c -U postgres > "\path\to\backup\dump.sql"
|
||||
docker exec -t immich_postgres pg_dumpall --clean --if-exists --username=postgres > "\path\to\backup\dump.sql"
|
||||
```
|
||||
|
||||
```powershell title='Restore'
|
||||
docker compose down -v # CAUTION! Deletes all Immich data to start from scratch.
|
||||
# Remove-Item -Recurse -Force DB_DATA_LOCATION # CAUTION! Deletes all Immich data to start from scratch.
|
||||
docker compose pull # Update to latest version of Immich (if desired)
|
||||
docker compose create # Create Docker containers for Immich apps without running them.
|
||||
docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
gc "C:\path\to\backup\dump.sql" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
|
||||
gc "C:\path\to\backup\dump.sql" | docker exec -i immich_postgres psql --username=postgres # Restore Backup
|
||||
docker compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
@@ -56,6 +64,10 @@ docker compose up -d # Start remainder of Immich apps
|
||||
|
||||
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.).
|
||||
|
||||
:::tip
|
||||
Some deployment methods make it difficult to start the database without also starting the server or microservices. In these cases, you may set the environmental variable `DB_SKIP_MIGRATIONS=true` before starting the services. This will prevent the server from running migrations that interfere with the restore process. Note that both the server and microservices must have this variable set to prevent the migrations from running. Be sure to remove this variable and restart the services after the database is restored.
|
||||
:::
|
||||
|
||||
The database dumps can also be automated (using [this image](https://github.com/prodrigestivill/docker-postgres-backup-local)) by editing the docker compose file to match the following:
|
||||
|
||||
```yaml
|
||||
@@ -63,15 +75,17 @@ services:
|
||||
...
|
||||
backup:
|
||||
container_name: immich_db_dumper
|
||||
image: prodrigestivill/postgres-backup-local
|
||||
image: prodrigestivill/postgres-backup-local:14
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
POSTGRES_HOST: database
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_CLUSTER: 'TRUE'
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
SCHEDULE: "@daily"
|
||||
POSTGRES_EXTRA_OPTS: '--clean --if-exists'
|
||||
BACKUP_DIR: /db_dumps
|
||||
volumes:
|
||||
- ./db_dumps:/db_dumps
|
||||
@@ -82,17 +96,29 @@ services:
|
||||
Then you can restore with the same command but pointed at the latest dump.
|
||||
|
||||
```bash title='Automated Restore'
|
||||
gunzip < db_dumps/last/immich-latest.sql.gz | docker exec -i immich_postgres psql -U postgres -d immich
|
||||
gunzip < db_dumps/last/immich-latest.sql.gz \
|
||||
| sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" \
|
||||
| docker exec -i immich_postgres psql --username=postgres
|
||||
```
|
||||
|
||||
:::note
|
||||
If you see the error `ERROR: type "earth" does not exist`, or you have problems with Reverse Geocoding after a restore, add the following `sed` fragment to your restore command.
|
||||
|
||||
Example: `gunzip < "/path/to/backup/dump.sql.gz" | sed "s/SELECT pg_catalog.set_config('search_path', '', false);/SELECT pg_catalog.set_config('search_path', 'public, pg_catalog', true);/g" | docker exec -i immich_postgres psql --username=postgres`
|
||||
:::
|
||||
|
||||
## Filesystem
|
||||
|
||||
Immich stores two types of content in the filesystem: (1) original, unmodified content, and (2) generated content. Only the original content needs to be backed-up, which includes the following folders:
|
||||
Immich stores two types of content in the filesystem: (1) original, unmodified assets (photos and videos), and (2) generated content. Only the original content needs to be backed-up, which is stored in the following folders:
|
||||
|
||||
1. `UPLOAD_LOCATION/library`
|
||||
2. `UPLOAD_LOCATION/upload`
|
||||
3. `UPLOAD_LOCATION/profile`
|
||||
|
||||
:::caution
|
||||
If you moved some of these folders onto a different storage device, such as `profile/`, make sure to adjust the backup path to match your setup
|
||||
:::
|
||||
|
||||
### Asset Types and Storage Locations
|
||||
|
||||
Some storage locations are impacted by the Storage Template. See below for more details.
|
||||
@@ -101,7 +127,8 @@ Some storage locations are impacted by the Storage Template. See below for more
|
||||
<TabItem value="Storage Template Off (Default)." label="Storage Template Off (Default)." default>
|
||||
|
||||
:::note
|
||||
`UPLOAD_LOCATION/library` folder is not used by default on new machines running version 1.92.0. These are if the system administrator activated the storage template engine, for [more info](https://github.com/immich-app/immich/releases/tag/v1.92.0#:~:text=the%20partner%E2%80%99s%20assets.-,Hardening%20storage%20template).
|
||||
The `UPLOAD_LOCATION/library` folder is not used by default on new machines running version 1.92.0. It is used only if the system administrator activated the storage template engine,
|
||||
for more info read the [release notes](https://github.com/immich-app/immich/releases/tag/v1.92.0#:~:text=the%20partner%E2%80%99s%20assets.-,Hardening%20storage%20template).
|
||||
:::
|
||||
|
||||
**1. User-Specific Folders:**
|
||||
@@ -113,16 +140,16 @@ Some storage locations are impacted by the Storage Template. See below for more
|
||||
|
||||
- **Source Assets:**
|
||||
- Original assets uploaded through the browser interface & mobile & CLI.
|
||||
- Stored in `/library/upload/<userID>`.
|
||||
- Stored in `UPLOAD_LOCATION/upload/<userID>`.
|
||||
- **Avatar Images:**
|
||||
- User profile images.
|
||||
- Stored in `/library/profile/<userID>`.
|
||||
- Stored in `UPLOAD_LOCATION/profile/<userID>`.
|
||||
- **Thumbs Images:**
|
||||
- Preview images (blurred, small, large) for each asset and thumbnails for recognized faces.
|
||||
- Stored in `/library/thumbs/<userID>`.
|
||||
- Preview images (small thumbnails and large previews) for each asset and thumbnails for recognized faces.
|
||||
- Stored in `UPLOAD_LOCATION/thumbs/<userID>`.
|
||||
- **Encoded Assets:**
|
||||
- By default, unless otherwise specified re-encoded video assets for wider compatibility.
|
||||
- Stored in `/library/encoded-video/<userID>`.
|
||||
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Storage Template On" label="Storage Template On">
|
||||
@@ -130,34 +157,34 @@ Some storage locations are impacted by the Storage Template. See below for more
|
||||
:::note
|
||||
If you choose to activate the storage template engine, it will move all assets to `UPLOAD_LOCATION/library/<userID>`.
|
||||
|
||||
When you turn off the storage template engine, it will leave the assets in `UPLOAD_LOCATION/library/<userID>` and will not return them to `/library/upload`.
|
||||
**New assets** will be saved to `/library/upload`.
|
||||
When you turn off the storage template engine, it will leave the assets in `UPLOAD_LOCATION/library/<userID>` and will not return them to `UPLOAD_LOCATION/upload`.
|
||||
**New assets** will be saved to `UPLOAD_LOCATION/upload`.
|
||||
:::
|
||||
|
||||
**1. User-Specific Folders:**
|
||||
|
||||
- Each user has a unique string representing them.
|
||||
- The main user is "Admin" (but only for `UPLOAD_LOCATION/library`)
|
||||
- Other users have different string identifiers.
|
||||
- You can find your user ID in Account Account Settings -> Account -> User ID.
|
||||
- The administrator can set a Storage Label for a user, which will be used instead of `<userID>` for the `library/` folder.
|
||||
- The Admin has a default storage label of `admin`.
|
||||
- You can find your user ID and Storage Label in Account Account Settings -> Account -> User ID.
|
||||
|
||||
**2. Asset Types and Storage Locations:**
|
||||
|
||||
- **Source Assets:**
|
||||
- Original assets uploaded through the browser interface & mobile & CLI.
|
||||
- Original assets uploaded through the browser interface, mobile, and CLI.
|
||||
- Stored in `UPLOAD_LOCATION/library/<userID>`.
|
||||
- **Avatar Images:**
|
||||
- User profile images.
|
||||
- Stored in `/library/profile/<userID>`.
|
||||
- Stored in `UPLOAD_LOCATION/profile/<userID>`.
|
||||
- **Thumbs Images:**
|
||||
- Preview images (blurred, small, large) for each asset and thumbnails for recognized faces.
|
||||
- Stored in `/library/thumbs/<userID>`.
|
||||
- Stored in `UPLOCAD_LOCATION/thumbs/<userID>`.
|
||||
- **Encoded Assets:**
|
||||
- By default, unless otherwise specified re-encoded video assets for wider compatibility .
|
||||
- Stored in `/library/encoded-video/<userID>`.
|
||||
- Videos that have been re-encoded from the original for wider compatibility. The original is not removed.
|
||||
- Stored in `UPLOAD_LOCATION/encoded-video/<userID>`.
|
||||
- **Files in Upload Queue (Mobile):**
|
||||
- Files uploaded through mobile apps.
|
||||
- Temporarily located in `/library/upload/<userID>`.
|
||||
- Temporarily located in `UPLOAD_LOCATION/upload/<userID>`.
|
||||
- Transferred to `UPLOAD_LOCATION/library/<userID>` upon successful upload.
|
||||
|
||||
</TabItem>
|
||||
|
||||
BIN
docs/docs/administration/img/google-example.webp
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
docs/docs/administration/img/immich-google-example.webp
Normal file
|
After Width: | Height: | Size: 113 KiB |
BIN
docs/docs/administration/img/repair-page-1.png
Normal file
|
After Width: | Height: | Size: 68 KiB |
BIN
docs/docs/administration/img/repair-page.png
Normal file
|
After Width: | Height: | Size: 54 KiB |
BIN
docs/docs/administration/img/server-stats.png
Normal file
|
After Width: | Height: | Size: 60 KiB |
@@ -1,9 +1,13 @@
|
||||
# Jobs
|
||||
|
||||
Several Immich functionalities are implemented as jobs, which run in the background. To view the status of a job navigate to the Administration Screen, and then the `Jobs` page.
|
||||
The `immich-server` responds to API requests for data and files for the web and mobile app. To do this quickly and reliably, it offloads most other work to `immich-microservices` in the form of _jobs_. Simply put, a job is a request to process data in the background. Jobs are picked up automatically by microservices containers.
|
||||
|
||||

|
||||
When a new asset is uploaded it kicks off a series of jobs, which include metadata extraction, thumbnail generation, machine learning tasks, and storage template migration, if enabled. To view the status of a job navigate to the Administration -> Jobs page.
|
||||
|
||||
Additionally, some jobs run on a schedule, which is every night at midnight. This schedule, with the exception of [External Libraries](/docs/features/libraries) scanning, cannot be changed.
|
||||
|
||||
:::info
|
||||
Storage Migration job can be run after changing the [Storage Template](/docs/administration/storage-template.mdx), in order to apply the change to the existing library.
|
||||
:::
|
||||
|
||||
<img src={require('./img/admin-jobs.png').default} width="80%" title="Admin jobs" />
|
||||
|
||||
@@ -52,8 +52,8 @@ Before enabling OAuth in Immich, a new client application needs to be configured
|
||||
|
||||
Hostname
|
||||
|
||||
- `https://immich.example.com/auth/login`)
|
||||
- `https://immich.example.com/user-settings`)
|
||||
- `https://immich.example.com/auth/login`
|
||||
- `https://immich.example.com/user-settings`
|
||||
|
||||
## Enable OAuth
|
||||
|
||||
@@ -67,14 +67,20 @@ Once you have a new OAuth client application configured, Immich can be configure
|
||||
| Client Secret | string | (required) | Required. Client Secret (previous step) |
|
||||
| Scope | string | openid email profile | Full list of scopes to send with the request (space delimited) |
|
||||
| Signing Algorithm | string | RS256 | The algorithm used to sign the id token (examples: RS256, HS256) |
|
||||
| Storage Label Claim | string | preferred_username | Claim mapping for the user's storage label |
|
||||
| Storage Quota Claim | string | immich_quota | Claim mapping for the user's storage |
|
||||
| Storage Label Claim | string | preferred_username | Claim mapping for the user's storage label**¹** |
|
||||
| Storage Quota Claim | string | immich_quota | Claim mapping for the user's storage**¹** |
|
||||
| Default Storage Quota (GiB) | number | 0 | Default quota for user without storage quota claim (Enter 0 for unlimited quota) |
|
||||
| Button Text | string | Login with OAuth | Text for the OAuth button on the web |
|
||||
| Auto Register | boolean | true | When true, will automatically register a user the first time they sign in |
|
||||
| [Auto Launch](#auto-launch) | boolean | false | When true, will skip the login page and automatically start the OAuth login process |
|
||||
| [Mobile Redirect URI Override](#mobile-redirect-uri) | URL | (empty) | Http(s) alternative mobile redirect URI |
|
||||
|
||||
:::note Claim Options [1]
|
||||
|
||||
Claim is only used on user creation and not synchronized after that.
|
||||
|
||||
:::
|
||||
|
||||
:::info
|
||||
The Issuer URL should look something like the following, and return a valid json document.
|
||||
|
||||
@@ -104,8 +110,44 @@ Immich has a route (`/api/oauth/mobile-redirect`) that is already configured to
|
||||
|
||||
## Example Configuration
|
||||
|
||||
<details>
|
||||
<summary>Authentik Example</summary>
|
||||
|
||||
### Authentik Example
|
||||
|
||||
Here's an example of OAuth configured for Authentik:
|
||||
|
||||

|
||||
<img src={require('./img/oauth-settings.png').default} title="OAuth settings" />
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Google Example</summary>
|
||||
|
||||
### Google Example
|
||||
|
||||
Configuration of Authorised redirect URIs (Google Console)
|
||||
|
||||
<img src={require('./img/google-example.webp').default} width='50%' title="Authorised redirect URIs" />
|
||||
|
||||
Configuration of OAuth in System Settings
|
||||
|
||||
| Setting | Value |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------------------------------ |
|
||||
| Issuer URL | [https://accounts.google.com](https://accounts.google.com) |
|
||||
| Client ID | 7\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***vuls.apps.googleusercontent.com |
|
||||
| Client Secret | G\***\*\*\*\*\*\*\***\*\*\***\*\*\*\*\*\*\***OO |
|
||||
| Scope | openid email profile |
|
||||
| Signing Algorithm | RS256 |
|
||||
| Storage Label Claim | preferred_username |
|
||||
| Storage Quota Claim | immich_quota |
|
||||
| Default Storage Quota (GiB) | 0 (0 for unlimited quota) |
|
||||
| Button Text | Sign in with Google (optional) |
|
||||
| Auto Register | Enabled (optional) |
|
||||
| Auto Launch | Enabled |
|
||||
| Mobile Redirect URI Override | Enabled (required) |
|
||||
| Mobile Redirect URI | [https://demo.immich.app/api/oauth/mobile-redirect](https://demo.immich.app/api/oauth/mobile-redirect) |
|
||||
|
||||
</details>
|
||||
|
||||
[oidc]: https://openid.net/connect/
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
# Password Login
|
||||
|
||||
An overview of password login and related settings for Immich.
|
||||
|
||||
## Enable/Disable
|
||||
|
||||
Immich supports password login, which is enabled by default. The preferred way to disable it is via the [Administration Page](#administration-page), although it can also be changed via a [Server Command](#server-command) as well.
|
||||
|
||||
### Administration Page
|
||||
|
||||
To toggle the password login setting via the web, navigate to the "Administration", expand "Password Authentication", toggle the "Enabled" switch, and press "Save".
|
||||
|
||||

|
||||
|
||||
### Server Command
|
||||
|
||||
There are two [Server Commands](/docs/administration/server-commands.md) for password login:
|
||||
|
||||
1. `enable-password-login`
|
||||
2. `disable-password-login`
|
||||
|
||||
See [Server Commands](/docs/administration/server-commands.md) for more details about how to run them.
|
||||
|
||||
## Password Reset
|
||||
|
||||
### Admin
|
||||
|
||||
To reset the administrator password, use the `reset-admin-password` [Server Command](/docs/administration/server-commands.md).
|
||||
|
||||
### User
|
||||
|
||||
Immich does not currently support self-service password reset. However, the administration can reset passwords for other users. See [User Management: Password Reset](/docs/administration/user-management.mdx#password-reset) for more information about how to do this.
|
||||
69
docs/docs/administration/postgres-standalone.md
Normal file
@@ -0,0 +1,69 @@
|
||||
# Pre-existing Postgres
|
||||
|
||||
While not officially recommended, it is possible to run Immich using a pre-existing Postgres server. To use this setup, you should have a baseline level of familiarity with Postgres and the Linux command line. If you do not have these, we recommend using the default setup with a dedicated Postgres container.
|
||||
|
||||
By default, Immich expects superuser permission on the Postgres database and requires certain extensions to be installed. This guide outlines the steps required to prepare a pre-existing Postgres server to be used by Immich.
|
||||
|
||||
:::tip
|
||||
Running with a pre-existing Postgres server can unlock powerful administrative features, including logical replication and streaming write-ahead log backups using programs like pgBackRest or Barman.
|
||||
:::
|
||||
|
||||
## Prerequisites
|
||||
|
||||
You must install pgvecto.rs into your instance of Postgres using their [instructions][vectors-install]. After installation, add `shared_preload_libraries = 'vectors.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vectors.so'`.
|
||||
|
||||
:::note
|
||||
Immich is known to work with Postgres versions 14, 15, and 16. Earlier versions are unsupported.
|
||||
|
||||
Make sure the installed version of pgvecto.rs is compatible with your version of Immich. For example, if your Immich version uses the dedicated database image `tensorchord/pgvecto-rs:pg14-v0.2.1`, you must install pgvecto.rs `>= 0.2.1, < 0.3.0`.
|
||||
:::
|
||||
|
||||
## Specifying the connection URL
|
||||
|
||||
You can connect to your pre-existing Postgres server by setting the `DB_URL` environment variable in the `.env` file.
|
||||
|
||||
```
|
||||
DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename'
|
||||
|
||||
# require a SSL connection to Postgres
|
||||
# DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename?sslmode=require'
|
||||
|
||||
# require a SSL connection, but don't enforce checking the certificate name
|
||||
# DB_URL='postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename?sslmode=require&sslmode=no-verify'
|
||||
```
|
||||
|
||||
## With superuser permission
|
||||
|
||||
Typically Immich expects superuser permission in the database, which you can grant by running `ALTER USER <immichdbusername> WITH SUPERUSER;` at the `psql` console. If you prefer not to grant superuser permissions, follow the instructions in the next section.
|
||||
|
||||
## Without superuser permission
|
||||
|
||||
:::caution
|
||||
This method is recommended for **advanced users only** and often requires manual intervention when updating Immich.
|
||||
:::
|
||||
|
||||
Immich can run without superuser permissions by following the below instructions at the `psql` prompt to prepare the database.
|
||||
|
||||
```sql title="Set up Postgres for Immich"
|
||||
CREATE DATABASE <immichdatabasename>;
|
||||
\c <immichdatabasename>
|
||||
BEGIN;
|
||||
ALTER DATABASE <immichdatabasename> OWNER TO <immichdbusername>;
|
||||
CREATE EXTENSION vectors;
|
||||
CREATE EXTENSION earthdistance CASCADE;
|
||||
ALTER DATABASE <immichdatabasename> SET search_path TO "$user", public, vectors;
|
||||
ALTER SCHEMA vectors OWNER TO <immichdbusername>;
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
### Updating pgvecto.rs
|
||||
|
||||
When installing a new version of pgvecto.rs, you will need to manually update the extension by connecting to the Immich database and running `ALTER EXTENSION vectors UPDATE;`.
|
||||
|
||||
### Common errors
|
||||
|
||||
#### Permission denied for view
|
||||
|
||||
If you get the error `driverError: error: permission denied for view pg_vector_index_stat`, you can fix this by connecting to the Immich database and running `GRANT SELECT ON TABLE pg_vector_index_stat TO <immichdbusername>;`.
|
||||
|
||||
[vectors-install]: https://docs.pgvecto.rs/getting-started/installation.html
|
||||
27
docs/docs/administration/repair-page.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Repair Page
|
||||
|
||||
The repair page is designed to give information to the system administrator about files that are not tracked, or offline paths.
|
||||
|
||||
## Natural State
|
||||
|
||||
In this situation, everything is in its place and there is no problem that the system administrator should be aware of.
|
||||
|
||||
<img src={require('./img/repair-page.png').default} title="server statistic" />
|
||||
|
||||
## Any Other Situation
|
||||
|
||||
:::note RAM Usage
|
||||
Several users report a situation where the page fails to load. In order to solve this problem you should try to allocate more RAM to Immich, if the problem continues, you should stop using the reverse proxy while loading the page.
|
||||
:::
|
||||
|
||||
In any other situation, there are 3 different options that can appear:
|
||||
|
||||
- MATCHES - These files are matched by their checksums.
|
||||
|
||||
- OFFLINE PATHS - These files are the result of manually deleting files from immich or a failed file move in the past (losing track of a file).
|
||||
|
||||
- UNTRACKED FILES - These files are not tracked by the application. They can be the result of failed moves, interrupted uploads, or left behind due to a bug.
|
||||
|
||||
In addition, you can download the information from a page, mark everything (in order to check hashing) and correct the problem if a match is found in the hashing.
|
||||
|
||||
<img src={require('./img/repair-page-1.png').default} title="server statistic" />
|
||||
@@ -1,29 +1,41 @@
|
||||
# Reverse Proxy
|
||||
|
||||
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Forwarded-Host`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Real-IP`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||
|
||||
:::note
|
||||
The Repair page can take a long time to load. To avoid server timeouts or errors, we recommend specifying a timeout of at least 10 minutes on your proxy server.
|
||||
:::
|
||||
|
||||
### Nginx example config
|
||||
|
||||
Below is an example config for nginx. Make sure to include `client_max_body_size 50000M;` also in a `http` block in `/etc/nginx/nginx.conf`.
|
||||
Below is an example config for nginx. Make sure to set `public_url` to the front-facing URL of your instance, and `backend_url` to the path of the Immich server.
|
||||
|
||||
```nginx
|
||||
server {
|
||||
server_name <snip>
|
||||
server_name <public_url>;
|
||||
|
||||
# allow large file uploads
|
||||
client_max_body_size 50000M;
|
||||
|
||||
location / {
|
||||
proxy_pass http://<snip>:2283;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
# Set headers
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# http://nginx.org/en/docs/http/websocket.html
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_redirect off;
|
||||
# enable websockets: http://nginx.org/en/docs/http/websocket.html
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_redirect off;
|
||||
|
||||
# set timeout
|
||||
proxy_read_timeout 600s;
|
||||
proxy_send_timeout 600s;
|
||||
send_timeout 600s;
|
||||
|
||||
location / {
|
||||
proxy_pass http://<backend_url>:2283;
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -42,15 +54,13 @@ immich.example.org {
|
||||
|
||||
Below is an example config for Apache2 site configuration.
|
||||
|
||||
```
|
||||
```ApacheConf
|
||||
<VirtualHost *:80>
|
||||
ServerName <snip>
|
||||
ProxyRequests Off
|
||||
# set timeout in seconds
|
||||
ProxyPass / http://127.0.0.1:2283/ timeout=600 upgrade=websocket
|
||||
ProxyPassReverse / http://127.0.0.1:2283/
|
||||
ProxyPreserveHost On
|
||||
|
||||
</VirtualHost>
|
||||
```
|
||||
|
||||
**timeout:** is measured in seconds, and it is particularly useful when long operations are triggered (i.e. Repair), so the server doesn't return an error.
|
||||
|
||||
13
docs/docs/administration/server-stats.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Server Stats
|
||||
|
||||
Server statistics to show the total number of videos, photos, and usage per user.
|
||||
|
||||
:::info
|
||||
If a storage quota has been defined for the user, the usage number will be displayed as a percentage of the total storage quota allocated to them.
|
||||
:::
|
||||
|
||||
:::info External library
|
||||
External library is not included in the storage quota.
|
||||
:::
|
||||
|
||||
<img src={require('./img/server-stats.png').default} title="server statistic" />
|
||||
171
docs/docs/administration/system-settings.md
Normal file
@@ -0,0 +1,171 @@
|
||||
# System Settings
|
||||
|
||||
On the system settings page, the administrator can manage global settings for the Immich instance.
|
||||
|
||||
:::note
|
||||
Viewing and modifying the system settings is restricted to the Administrator.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
You can always return to the default settings by clicking the `Reset to default` button.
|
||||
:::
|
||||
|
||||
## Job Settings
|
||||
|
||||
Using these settings, you can determine the amount of work that will run concurrently for each task in microservices. Some tasks can be set to higher values on computers with powerful hardware and storage with good I/O capabilities.
|
||||
|
||||
With higher concurrency, the host will work on more assets in parallel,
|
||||
this advice improves throughput, not latency, for example, it will make Smart Search jobs process more quickly, but it won't make searching faster.
|
||||
|
||||
It is important to remember that jobs like Smart Search, Face Detection, Facial Recognition, and Transcode Videos require a **lot** of processing power and therefore do not exaggerate the amount of jobs because you're probably thoroughly overloading the server.
|
||||
|
||||
:::info Facial Recognition Concurrency
|
||||
The Facial Recognition Concurrency value cannot be changed because
|
||||
[DBSCAN](https://www.youtube.com/watch?v=RDZUdRSDOok) is traditionally sequential, but there are parallel implementations of it out there. Our implementation isn't parallel.
|
||||
:::
|
||||
|
||||
## External Library
|
||||
|
||||
### Library watching (EXPERIMENTAL)
|
||||
|
||||
External libraries can automatically import changed files without a full rescan. It will import the file whenever the operating system reports a file change. If your photos are mounted over the network, this does not work.
|
||||
|
||||
### Periodic Scanning
|
||||
|
||||
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> Library.
|
||||
You can set the scanning interval using the preset or cron format. For more information please refer to e.g. [Crontab Guru](https://crontab.guru/).
|
||||
|
||||
## Logging
|
||||
|
||||
The default Immich log level is `Log` (commonly known as `Info`). The Immich administrator can choose a higher or lower log level according to personal preference or as requested by the Immich support team.
|
||||
|
||||
## Machine Learning Settings
|
||||
|
||||
Through this setting, you can manage all the settings related to machine learning in Immich, from the setting of remote machine learning to the model and its parameters
|
||||
You can choose to disable a certain type of machine learning, for example smart search or facial recognition.
|
||||
|
||||
### Smart Search
|
||||
|
||||
The smart search settings are designed to allow the search tool to be used using [CLIP](https://openai.com/research/clip) models that [can be changed](/docs/FAQ#can-i-use-a-custom-clip-model), different models will necessarily give better results but may consume more processing power, when changing a model it is mandatory to re-run the
|
||||
Smart Search job on all images to fully apply the change.
|
||||
|
||||
:::info Internet connection
|
||||
Changing models requires a connection to the Internet to download the model.
|
||||
After downloading, there is no need for Immich to connect to the network
|
||||
Unless version checking has been enabled in the settings.
|
||||
:::
|
||||
|
||||
### Facial Recognition
|
||||
|
||||
Under these settings, you can change the facial recognition settings
|
||||
Editable settings:
|
||||
|
||||
- **Facial Recognition Model -** Models are listed in descending order of size. Larger models are slower and use more memory, but produce better results. Note that you must re-run the Face Detection job for all images upon changing a model.
|
||||
- **Min Detection Score -** Minimum confidence score for a face to be detected from 0-1. Lower values will detect more faces but may result in false positives.
|
||||
- **Max Recognition Distance -** Maximum distance between two faces to be considered the same person, ranging from 0-2. Lowering this can prevent labeling two people as the same person, while raising it can prevent labeling the same person as two different people. Note that it is easier to merge two people than to split one person in two, so err on the side of a lower threshold when possible.
|
||||
- **Min Recognized Faces -** The minimum number of recognized faces for a person to be created (AKA: Core face). Increasing this makes Facial Recognition more precise at the cost of increasing the chance that a face is not assigned to a person.
|
||||
|
||||
:::info
|
||||
When changing the values in Min Detection Score, Max Recognition Distance, and Min Recognized Faces.
|
||||
You will have to restart **only** the job FACIAL RECOGNITION - ALL.
|
||||
|
||||
If you replace the Facial Recognition Model, you will have to run the job FACE DETECTION - ALL.
|
||||
:::
|
||||
|
||||
:::tip identical twins
|
||||
If you have twins, you might want to lower the Max Recognition Distance value, decreasing this a **bit** can make it distinguish between them.
|
||||
:::
|
||||
|
||||
## Map & GPS Settings
|
||||
|
||||
### Map Settings
|
||||
|
||||
In these settings, you can change the appearance of the map in night and day modes according to your personal preference and according to the supported options.
|
||||
The map can be adjusted via [OpenMapTiles](https://openmaptiles.org/styles/) for example.
|
||||
|
||||
### Reverse Geocoding Settings
|
||||
|
||||
Immich supports [Reverse Geocoding](/docs/features/reverse-geocoding) using data from the [GeoNames](https://www.geonames.org/) geographical database.
|
||||
|
||||
## OAuth Authentication
|
||||
|
||||
Immich supports OAuth Authentication. Read more about this feature and its configuration [here](/docs/administration/oauth).
|
||||
|
||||
## Password Authentication
|
||||
|
||||
The administrator can choose to disable login with username and password for the entire instance. This means that **no one**, including the system administrator, will be able to log using this method. If [OAuth Authentication](/docs/administration/oauth) is also disabled, no users will be able to login using **any** method. Changing this setting does not affect existing sessions, just new login attempts.
|
||||
|
||||
:::tip
|
||||
You can always use the [Server CLI](/docs/administration/server-commands) to re-enable password login.
|
||||
:::
|
||||
|
||||
## Server Settings
|
||||
|
||||
### External Domain
|
||||
|
||||
When set, will override the domain name used when viewing and copying a shared link.
|
||||
|
||||
### Welcome Message
|
||||
|
||||
The administrator can set a custom message on the login screen (the message will be displayed to all users).
|
||||
|
||||
## Storage Template
|
||||
|
||||
Immich supports a custom [Storage Template](/docs/administration/storage-template). Learn more about this feature and its configuration [here](/docs/administration/storage-template).
|
||||
|
||||
## Theme Settings
|
||||
|
||||
You can write custom CSS that will get loaded in the web application for all users. This enables administrators to change fonts, colors, and other styles.
|
||||
|
||||
For example:
|
||||
|
||||
```css title='Custom CSS'
|
||||
p {
|
||||
color: green;
|
||||
}
|
||||
```
|
||||
|
||||
## Thumbnail Settings
|
||||
|
||||
By default Immich creates 3 thumbnails for each asset,
|
||||
Blurred (thumbhash) , Small (webp) , and Large (jpeg), using these settings you can change the quality for the thumbnail files that are created.
|
||||
|
||||
**Small thumbnail resolution**
|
||||
Used when viewing groups of photos (main timeline, album view, etc.). Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Large thumbnail resolution**
|
||||
Used when viewing a single photo and for machine learning. Higher resolutions can preserve more detail but take longer to encode, have larger file sizes, and can reduce app responsiveness.
|
||||
|
||||
**Quality**
|
||||
Thumbnail quality from 1-100. Higher is better for quality but produces larger files.
|
||||
|
||||
**Prefer wide gamut**
|
||||
Use display p3 for thumbnails. This better preserves the vibrance of images with wide color spaces, but images may appear differently on old devices with an old browser version. Srgb images are kept as srgb to avoid color shifts.
|
||||
|
||||
:::tip
|
||||
The default resolution for Large thumbnails can be lowered from 1440p (default) to 1080p or 720p to save storage space.
|
||||
:::
|
||||
|
||||
## Trash Settings
|
||||
|
||||
In the system administrator's option to set a trash for deleted files, these files will remain in the trash until the deletion date 30 days (default) or as defined by the system administrator.
|
||||
|
||||
The trash can be disabled, however this is not recommended as future files that are deleted will be permanently deleted.
|
||||
|
||||
:::tip Keyboard shortcut for permanently deletion
|
||||
You can select assets and press Ctrl + Del from the timeline for quick permanent deletion without the trash option.
|
||||
:::
|
||||
|
||||
## User Settings
|
||||
|
||||
### Delete delay
|
||||
|
||||
The system administrator can choose to delete users through the administration panel, the system administrator can delete users immediately or alternatively delay the deletion for users (7 days by default) this action permanently delete a user's account and assets. The user deletion job runs at midnight to check for users that are ready for deletion. Changes to this setting will be evaluated at the next execution.
|
||||
|
||||
## Version Check
|
||||
|
||||
When this option is enabled the `immich-server` will periodically make requests to GitHub to check for new releases.
|
||||
|
||||
## Video Transcoding Settings
|
||||
|
||||
The system administrator can define parameters according to which video files will be converted to different formats (depending on the settings). The settings can be changed in depth, to learn more about the terminology used here, refer to FFmpeg documentation for [H.264](https://trac.ffmpeg.org/wiki/Encode/H.264) codec, [HEVC](https://trac.ffmpeg.org/wiki/Encode/H.265) codec and [VP9](https://trac.ffmpeg.org/wiki/Encode/VP9) codec.
|
||||
12
docs/docs/community-guides.mdx
Normal file
@@ -0,0 +1,12 @@
|
||||
# Community Guides
|
||||
|
||||
This page lists community guides that are written around Immich, but not officially supported by the development team.
|
||||
|
||||
:::warning
|
||||
This list comes with no guarantees about security, performance, reliability, or accuracy. Use at your own risk.
|
||||
:::
|
||||
|
||||
import CommunityGuides from '../src/components/community-guides.tsx';
|
||||
import React from 'react';
|
||||
|
||||
<CommunityGuides />
|
||||
12
docs/docs/community-projects.mdx
Normal file
@@ -0,0 +1,12 @@
|
||||
# Community Projects
|
||||
|
||||
This page lists community projects that are built around Immich, but not officially supported by the development team.
|
||||
|
||||
:::warning
|
||||
This list comes with no guarantees about security, performance, reliability, or accuracy. Use at your own risk.
|
||||
:::
|
||||
|
||||
import CommunityProjects from '../src/components/community-projects.tsx';
|
||||
import React from 'react';
|
||||
|
||||
<CommunityProjects />
|
||||
@@ -1,14 +1,14 @@
|
||||
# Database Migrations
|
||||
|
||||
After making any changes in the `server/src/infra/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
After making any changes in the `server/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
|
||||
1. Run the command
|
||||
|
||||
```bash
|
||||
npm run typeorm:migrations:generate ./src/infra/<migration-name>
|
||||
npm run typeorm:migrations:generate <migration-name>
|
||||
```
|
||||
|
||||
2. Check if the migration file makes sense.
|
||||
3. Move the migration file to folder `./server/src/infra/migrations` in your code editor.
|
||||
3. Move the migration file to folder `./server/src/migrations` in your code editor.
|
||||
|
||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||
|
||||
@@ -9,7 +9,12 @@ When contributing code through a pull request, please check the following:
|
||||
- [ ] `npm run check:svelte` (Type checking via SvelteKit)
|
||||
- [ ] `npm test` (unit tests)
|
||||
|
||||
:::tip
|
||||
## Documentation
|
||||
|
||||
- [ ] `npm run format` (formatting via Prettier)
|
||||
- [ ] Update the `_redirects` file if you have renamed a page or removed it from the documentation.
|
||||
|
||||
:::tip AIO
|
||||
Run all web checks with `npm run check:all`
|
||||
:::
|
||||
|
||||
@@ -20,10 +25,14 @@ Run all web checks with `npm run check:all`
|
||||
- [ ] `npm run check` (Type checking via `tsc`)
|
||||
- [ ] `npm test` (unit tests)
|
||||
|
||||
:::tip
|
||||
:::tip AIO
|
||||
Run all server checks with `npm run check:all`
|
||||
:::
|
||||
|
||||
:::info Auto Fix
|
||||
You can use `npm run __:fix` to potentially correct some issues automatically for `npm run format` and `lint`.
|
||||
:::
|
||||
|
||||
## OpenAPI
|
||||
|
||||
The OpenAPI client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file. Note that you should not modify this file directly as it is auto-generated. See [OpenAPI](/docs/developer/open-api.md) for more details.
|
||||
|
||||
@@ -16,20 +16,19 @@ Thanks for being interested in contributing 😊
|
||||
|
||||
## Environment
|
||||
|
||||
### Server and web app
|
||||
### Services
|
||||
|
||||
This environment includes the following services:
|
||||
This environment includes the services below. Additional details are available in each service's README.
|
||||
|
||||
- Core server - `/server/src/immich`
|
||||
- Machine learning - `/machine-learning`
|
||||
- Microservices - `/server/src/microservicess`
|
||||
- Web app - `/web`
|
||||
- Server - [`/server`](https://github.com/immich-app/immich/tree/main/server)
|
||||
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
||||
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
||||
- Redis
|
||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to acess it
|
||||
|
||||
All the services are packaged to run as with single Docker Compose command.
|
||||
|
||||
### Instructions
|
||||
### Server and web apps
|
||||
|
||||
1. Clone the project repo.
|
||||
2. Run `cp docker/example.env docker/.env`.
|
||||
@@ -48,13 +47,7 @@ You can access the web from `http://your-machine-ip:2283` or `http://localhost:2
|
||||
|
||||
**Note:** the "web" development container runs with uid 1000. If that uid does not have read/write permissions on the mounted volumes, you may encounter errors
|
||||
|
||||
### Mobile app
|
||||
|
||||
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x to be installed on your system.
|
||||
|
||||
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
||||
|
||||
### Connect to a remote backend
|
||||
#### Connect web to a remote backend
|
||||
|
||||
If you only want to do web development connected to an existing, remote backend, follow these steps:
|
||||
|
||||
@@ -67,13 +60,21 @@ If you only want to do web development connected to an existing, remote backend,
|
||||
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
|
||||
```
|
||||
|
||||
### Mobile app
|
||||
|
||||
The mobile app `(/mobile)` will required Flutter toolchain 3.13.x to be installed on your system.
|
||||
|
||||
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
||||
|
||||
The mobile app asks you what backend to connect to. You can utilize the demo backend (https://demo.immich.app/) if you don't need to change server code or upload photos. Alternatively, you can run the server yourself per the instructions above.
|
||||
|
||||
## IDE setup
|
||||
|
||||
### Lint / format extensions
|
||||
|
||||
Setting these in the IDE give a better developer experience, auto-formatting code on save, and providing instant feedback on lint issues.
|
||||
|
||||
### Dart Code Metris
|
||||
### Dart Code Metrics
|
||||
|
||||
The mobile app uses DCM (Dart Code Metrics) for linting and metrics calculation. Please refer to the [Getting Started](https://dcm.dev/docs/getting-started/#installation) page for more information on setting up DCM
|
||||
|
||||
|
||||
@@ -8,15 +8,24 @@ Unit are run by calling `npm run test` from the `server` directory.
|
||||
|
||||
### End to end tests
|
||||
|
||||
The backend has two end-to-end test suites that can be called with the following two commands from the project root directory:
|
||||
The e2e tests can be run by first starting up a test production environment via:
|
||||
|
||||
- `make server-e2e-api`
|
||||
- `make server-e2e-jobs`
|
||||
```bash
|
||||
make e2e
|
||||
```
|
||||
|
||||
#### API (e2e)
|
||||
Once the test environment is running, the e2e tests can be run via:
|
||||
|
||||
The API e2e tests spin up a test database and execute http requests against the server, validating the expected response codes and functionality for API endpoints.
|
||||
```bash
|
||||
cd e2e/
|
||||
npm test
|
||||
```
|
||||
|
||||
#### Jobs (e2e)
|
||||
The tests check various things including:
|
||||
|
||||
The Jobs e2e tests spin up a docker test environment where thumbnail generation, library scanning, and other _job_ workflows are validated.
|
||||
- Authentication and authorization
|
||||
- Query param, body, and url validation
|
||||
- Response codes
|
||||
- Thumbnail generation
|
||||
- Metadata extraction
|
||||
- Library scanning
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# The Immich CLI
|
||||
|
||||
Immich has a CLI that allows you to perform certain actions from the command line. This CLI replaces the [legacy CLI](https://github.com/immich-app/CLI) that was previously available. The CLI is hosted in the [cli folder of the the main Immich github repository](https://github.com/immich-app/immich/tree/main/cli).
|
||||
Immich has a command line interface (CLI) that allows you to perform certain actions from the command line.
|
||||
|
||||
## Features
|
||||
|
||||
@@ -44,66 +44,72 @@ Please modify the `IMMICH_INSTANCE_URL` and `IMMICH_API_KEY` environment variabl
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
immich
|
||||
```
|
||||
<details>
|
||||
<summary>Usage</summary>
|
||||
|
||||
```
|
||||
$ immich
|
||||
Usage: immich [options] [command]
|
||||
|
||||
Command line interface for Immich
|
||||
|
||||
Options:
|
||||
-V, --version output the version number
|
||||
-d, --config Configuration directory (env: IMMICH_CONFIG_DIR)
|
||||
-h, --help display help for command
|
||||
-V, --version output the version number
|
||||
-d, --config-directory <directory> Configuration directory where auth.yml will be stored (default: "~/.config/immich/", env:
|
||||
IMMICH_CONFIG_DIR)
|
||||
-u, --url [url] Immich server URL (env: IMMICH_INSTANCE_URL)
|
||||
-k, --key [key] Immich API key (env: IMMICH_API_KEY)
|
||||
-h, --help display help for command
|
||||
|
||||
Commands:
|
||||
upload [options] [paths...] Upload assets
|
||||
server-info Display server information
|
||||
login-key [instanceUrl] [apiKey] Login using an API key
|
||||
logout Remove stored credentials
|
||||
help [command] display help for command
|
||||
login|login-key <url> <key> Login using an API key
|
||||
logout Remove stored credentials
|
||||
server-info Display server information
|
||||
upload [options] [paths...] Upload assets
|
||||
help [command] display help for command
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Commands
|
||||
|
||||
The upload command supports the following options:
|
||||
|
||||
<details>
|
||||
<summary>Options</summary>
|
||||
|
||||
```
|
||||
Usage: immich upload [options] [paths...]
|
||||
Usage: immich upload [paths...] [options]
|
||||
|
||||
Upload assets
|
||||
|
||||
Arguments:
|
||||
paths One or more paths to assets to be uploaded
|
||||
paths One or more paths to assets to be uploaded
|
||||
|
||||
Options:
|
||||
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
|
||||
-i, --ignore [paths...] Paths to ignore (env: IMMICH_IGNORE_PATHS)
|
||||
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
|
||||
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
|
||||
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
|
||||
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
|
||||
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
|
||||
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
||||
--help display help for command
|
||||
-r, --recursive Recursive (default: false, env: IMMICH_RECURSIVE)
|
||||
-i, --ignore [paths...] Paths to ignore (default: [], env: IMMICH_IGNORE_PATHS)
|
||||
-h, --skip-hash Don't hash files before upload (default: false, env: IMMICH_SKIP_HASH)
|
||||
-H, --include-hidden Include hidden folders (default: false, env: IMMICH_INCLUDE_HIDDEN)
|
||||
-a, --album Automatically create albums based on folder name (default: false, env: IMMICH_AUTO_CREATE_ALBUM)
|
||||
-A, --album-name <name> Add all assets to specified album (env: IMMICH_ALBUM_NAME)
|
||||
-n, --dry-run Don't perform any actions, just show what will be done (default: false, env: IMMICH_DRY_RUN)
|
||||
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
|
||||
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
|
||||
--help display help for command
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
Note that the above options can read from environment variables as well.
|
||||
|
||||
## Quick Start
|
||||
|
||||
You begin by authenticating to your Immich server.
|
||||
You begin by authenticating to your Immich server. For instance:
|
||||
|
||||
```bash
|
||||
immich login-key [instanceUrl] [apiKey]
|
||||
```
|
||||
|
||||
For instance,
|
||||
|
||||
```bash
|
||||
immich login-key http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
||||
# immich login [url] [key]
|
||||
immich login http://192.168.1.216:2283/api HFEJ38DNSDUEG
|
||||
```
|
||||
|
||||
This will store your credentials in a `auth.yml` file in the configuration directory which defaults to `~/.config/`. The directory can be set with the `-d` option or the environment variable `IMMICH_CONFIG_DIR`. Please keep the file secure, either by performing the logout command after you are done, or deleting it manually.
|
||||
|
||||
@@ -22,7 +22,8 @@ You do not need to redo any transcoding jobs after enabling hardware acceleratio
|
||||
- WSL2 does not support Quick Sync.
|
||||
- Raspberry Pi is currently not supported.
|
||||
- Two-pass mode is only supported for NVENC. Other APIs will ignore this setting.
|
||||
- Only encoding is currently hardware accelerated, so the CPU is still used for software decoding and tone-mapping.
|
||||
- By default, only encoding is currently hardware accelerated. This means the CPU is still used for software decoding and tone-mapping.
|
||||
- NVENC and RKMPP can be fully accelerated by enabling hardware decoding in the video transcoding settings.
|
||||
- Hardware dependent
|
||||
- Codec support varies, but H.264 and HEVC are usually supported.
|
||||
- Notably, NVIDIA and AMD GPUs do not support VP9 encoding.
|
||||
@@ -33,7 +34,7 @@ You do not need to redo any transcoding jobs after enabling hardware acceleratio
|
||||
#### NVENC
|
||||
|
||||
- You must have the official NVIDIA driver installed on the server.
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Runtime][nvcr] installed.
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||
|
||||
#### QSV
|
||||
|
||||
@@ -65,6 +66,7 @@ For RKMPP to work:
|
||||
|
||||
3. Redeploy the `immich-microservices` container with these updated settings.
|
||||
4. In the Admin page under `Video transcoding settings`, change the hardware acceleration setting to the appropriate option and save.
|
||||
5. (Optional) If using a compatible backend, you may enable hardware decoding for optimal performance.
|
||||
|
||||
#### Single Compose File
|
||||
|
||||
@@ -102,7 +104,14 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
||||
|
||||
#### All-In-One - Unraid Setup
|
||||
|
||||
##### NVENC - NVIDIA GPUs
|
||||
##### QSV
|
||||
|
||||
1. Unraid > Docker > (Stop) Immich container > Edit
|
||||
2. Scroll down and select `Add another Path, Port, Variable, Label or Device`
|
||||
3. In the drop-down menu, select `Device` and an entry with any name and the value `/dev/dri`.
|
||||
4. Continue to step 4 of "Basic Setup".
|
||||
|
||||
##### NVENC
|
||||
|
||||
1. In the container app, add this environmental variable: Key=`NVIDIA_VISIBLE_DEVICES` Value=`all`
|
||||
2. While still in the container app, change the container from Basic Mode to Advanced Mode and add the following parameter to the Extra Parameters field: `--runtime=nvidia`
|
||||
@@ -115,7 +124,7 @@ Once this is done, you can continue to step 3 of "Basic Setup".
|
||||
- While you can use VAAPI with NVIDIA and Intel devices, prefer the more specific APIs since they're more optimized for their respective devices
|
||||
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.transcoding.yml
|
||||
[nvcr]: https://github.com/NVIDIA/nvidia-container-runtime/
|
||||
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||
[jellyfin-lp]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#configure-and-verify-lp-mode-on-linux
|
||||
[jellyfin-kernel-bug]: https://jellyfin.org/docs/general/administration/hardware-acceleration/intel/#known-issues-and-limitations
|
||||
[libmali-rockchip]: https://github.com/tsukumijima/libmali-rockchip/releases
|
||||
|
||||
BIN
docs/docs/features/img/advanced-search-filters.webp
Normal file
|
After Width: | Height: | Size: 1.9 MiB |
BIN
docs/docs/features/img/library-custom-scan-interval.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/docs/features/img/moblie-smart-serach.webp
Normal file
|
After Width: | Height: | Size: 4.9 MiB |
|
Before Width: | Height: | Size: 84 KiB After Width: | Height: | Size: 43 KiB |
|
Before Width: | Height: | Size: 56 KiB After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 183 KiB After Width: | Height: | Size: 236 KiB |
BIN
docs/docs/features/img/partner-sharing-4.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/docs/features/img/partner-sharing-5.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
docs/docs/features/img/partner-sharing-6.png
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
BIN
docs/docs/features/img/partner-sharing-7.png
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
docs/docs/features/img/search-ex-1.png
Normal file
|
After Width: | Height: | Size: 2.2 MiB |
|
Before Width: | Height: | Size: 162 KiB |
|
Before Width: | Height: | Size: 59 KiB |
@@ -4,13 +4,9 @@
|
||||
|
||||
Immich supports the creation of libraries which is a top-level asset container. Currently, there are two types of libraries: traditional upload libraries that can sync with a mobile device, and external libraries, that keeps up to date with files on disk. Libraries are different from albums in that an asset can belong to multiple albums but only one library, and deleting a library deletes all assets contained within. As of August 2023, this is a new feature and libraries have a lot of potential for future development beyond what is documented here. This document attempts to describe the current state of libraries.
|
||||
|
||||
## The Upload Library
|
||||
|
||||
Immich comes preconfigured with an upload library for each user. All assets uploaded to Immich are added to this library. This library can be renamed, but not deleted. The upload library is the only library that can be synced with a mobile device. No items in an upload library is allowed to have the same sha1 hash as another item in the same library in order to prevent duplicates.
|
||||
|
||||
## External Libraries
|
||||
|
||||
External libraries tracks assets stored outside of immich, i.e. in the file system. Immich will only read data from the files, and will not modify them in any way. Therefore, the delete button is disabled for external assets. When the external library is scanned, immich will read the metadata from the file and create an asset in the library for each image or video file. These items will then be shown in the main timeline, and they will look and behave like any other asset, including viewing on the map, adding to albums, etc.
|
||||
External libraries tracks assets stored outside of Immich, i.e. in the file system. When the external library is scanned, Immich will read the metadata from the file and create an asset in the library for each image or video file. These items will then be shown in the main timeline, and they will look and behave like any other asset, including viewing on the map, adding to albums, etc.
|
||||
|
||||
If a file is modified outside of Immich, the changes will not be reflected in immich until the library is scanned again. There are different ways to scan a library depending on the use case:
|
||||
|
||||
@@ -161,7 +157,7 @@ The christmas trip library will now be scanned in the background. In the meantim
|
||||
|
||||
- Click on Create External Library.
|
||||
|
||||
:::info Note
|
||||
:::note
|
||||
If you get an error here, please rename the other external library to something else. This is a bug that will be fixed in a future release.
|
||||
:::
|
||||
|
||||
@@ -175,3 +171,14 @@ If you get an error here, please rename the other external library to something
|
||||
- Click on Scan Library Files
|
||||
|
||||
Within seconds, the assets from the old-pics and videos folders should show up in the main timeline.
|
||||
|
||||
### Set Custom Scan Interval
|
||||
|
||||
:::note
|
||||
Only an admin can do this.
|
||||
:::
|
||||
|
||||
You can define a custom interval for the trigger external library rescan under Administration -> Settings -> Library.
|
||||
You can set the scanning interval using the preset or cron format. For more information you can refer to [Crontab Guru](https://crontab.guru/).
|
||||
|
||||
<img src={require('./img/library-custom-scan-interval.png').default} width="75%" title='Set custom scan interval for external library' />
|
||||
|
||||
@@ -10,15 +10,14 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
## Supported Backends
|
||||
|
||||
- ARM NN (Mali)
|
||||
- CUDA (NVIDIA)
|
||||
- OpenVINO (Intel)
|
||||
- CUDA (NVIDIA GPUs with [compute capability](https://developer.nvidia.com/cuda-gpus) 5.2 or higher)
|
||||
- OpenVINO (Intel discrete GPUs such as Iris Xe and Arc)
|
||||
|
||||
## Limitations
|
||||
|
||||
- The instructions and configurations here are specific to Docker Compose. Other container engines may require different configuration.
|
||||
- Only Linux and Windows (through WSL2) servers are supported.
|
||||
- ARM NN is only supported on devices with Mali GPUs. Other Arm devices are not supported.
|
||||
- There is currently an upstream issue with OpenVINO, so whether it will work is device-dependent.
|
||||
- Some models may not be compatible with certain backends. CUDA is the most reliable.
|
||||
|
||||
## Prerequisites
|
||||
@@ -36,14 +35,22 @@ You do not need to redo any machine learning jobs after enabling hardware accele
|
||||
|
||||
#### CUDA
|
||||
|
||||
- You must have the official NVIDIA driver installed on the server.
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Runtime][nvcr] installed.
|
||||
- The GPU must have compute capability 5.2 or greater.
|
||||
- The server must have the official NVIDIA driver installed.
|
||||
- The installed driver must be >= 535 (it must support CUDA 12.2).
|
||||
- On Linux (except for WSL2), you also need to have [NVIDIA Container Toolkit][nvct] installed.
|
||||
|
||||
#### OpenVINO
|
||||
|
||||
- The server must have a discrete GPU, i.e. Iris Xe or Arc. Expect issues when attempting to use integrated graphics.
|
||||
- Ensure the server's kernel version is new enough to use the device for hardware accceleration.
|
||||
|
||||
## Setup
|
||||
|
||||
1. If you do not already have it, download the latest [`hwaccel.ml.yml`][hw-file] file and ensure it's in the same folder as the `docker-compose.yml`.
|
||||
2. In the `docker-compose.yml` under `immich-machine-learning`, uncomment the `extends` section and change `cpu` to the appropriate backend.
|
||||
3. Redeploy the `immich-machine-learning` container with these updated settings.
|
||||
3. Still in `immich-machine-learning`, add one of -[armnn, cuda, openvino] to the `image` section's tag at the end of the line.
|
||||
4. Redeploy the `immich-machine-learning` container with these updated settings.
|
||||
|
||||
#### Single Compose File
|
||||
|
||||
@@ -60,8 +67,6 @@ deploy:
|
||||
count: 1
|
||||
capabilities:
|
||||
- gpu
|
||||
- compute
|
||||
- video
|
||||
```
|
||||
|
||||
You can add this to the `immich-machine-learning` service instead of extending from `hwaccel.ml.yml`:
|
||||
@@ -69,6 +74,7 @@ You can add this to the `immich-machine-learning` service instead of extending f
|
||||
```yaml
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# Note the `-cuda` at the end
|
||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}-cuda
|
||||
# Note the lack of an `extends` section
|
||||
deploy:
|
||||
@@ -79,8 +85,6 @@ immich-machine-learning:
|
||||
count: 1
|
||||
capabilities:
|
||||
- gpu
|
||||
- compute
|
||||
- video
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
@@ -91,11 +95,11 @@ immich-machine-learning:
|
||||
Once this is done, you can redeploy the `immich-machine-learning` container.
|
||||
|
||||
:::info
|
||||
You can confirm the device is being recognized and used by checking its utilization (via `nvtop` for CUDA, `intel_gpu_top` for OpenVINO, etc.). You can also enable debug logging by setting `LOG_LEVEL=debug` in the `.env` file and restarting the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, you should see a log for `Available ORT providers` containing the relevant provider. In the case of ARM NN, the absence of a `Could not load ANN shared libraries` log entry means it loaded successfully.
|
||||
You can confirm the device is being recognized and used by checking its utilization (via `nvtop` for CUDA, `intel_gpu_top` for OpenVINO, etc.). You can also enable debug logging by setting `IMMICH_LOG_LEVEL=debug` in the `.env` file and restarting the `immich-machine-learning` container. When a Smart Search or Face Detection job begins, you should see a log for `Available ORT providers` containing the relevant provider. In the case of ARM NN, the absence of a `Could not load ANN shared libraries` log entry means it loaded successfully.
|
||||
:::
|
||||
|
||||
[hw-file]: https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml
|
||||
[nvcr]: https://github.com/NVIDIA/nvidia-container-runtime/
|
||||
[nvct]: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
|
||||
|
||||
## Tips
|
||||
|
||||
|
||||
@@ -27,8 +27,8 @@ The metrics in immich are grouped into API (endpoint calls and response times),
|
||||
|
||||
Immich will not expose an endpoint for metrics by default. To enable this endpoint, you can add the `IMMICH_METRICS=true` environmental variable to your `.env` file. Note that only the server and microservices containers currently use this variable.
|
||||
|
||||
:::note
|
||||
`IMMICH_METRICS` is equivalent to enabling the following three environmental variables: `IMMICH_API_METRICS`, `IMMICH_HOST_METRICS`, and `IMMICH_IO_METRICS`. If you would like to only expose certain kinds of metrics, you can set only those environmental variables to `true`. Explicitly setting the environmental variable for a metric group overrides `IMMICH_METRICS` for that group.
|
||||
:::tip
|
||||
`IMMICH_METRICS` enables all metrics, but there are also [environmental variables](/docs/install/environment-variables.md#prometheus) to toggle specific metric groups. If you'd like to only expose certain kinds of metrics, you can set only those environmental variables to `true`. Explicitly setting the environmental variable for a metric group overrides `IMMICH_METRICS` for that group. For example, setting `IMMICH_METRICS=true` and `IMMICH_API_METRICS=false` will enable all metrics except API metrics.
|
||||
:::
|
||||
|
||||
The next step is to configure a new or existing Prometheus instance to scrape this endpoint. The following steps assume that you do not have an existing Prometheus instance, but the steps will be similar either way.
|
||||
|
||||
@@ -1,17 +1,57 @@
|
||||
# Partner Sharing
|
||||
|
||||
Immich allows you to share your library with other users. They can then view your library and download the assets.
|
||||
|
||||
You can manage one or multiple users to have access to your library from the [User Settings](docs/features/user-settings.md) page.
|
||||
|
||||
<img src={require('./img/partner-sharing-1.png').default} title='Partner Sharing 1' />
|
||||
|
||||
<img src={require('./img/partner-sharing-2.png').default} title='Partner Sharing 2' />
|
||||
|
||||
Accessing the shared library can be done from the Sharing page.
|
||||
|
||||
<img src={require('./img/partner-sharing-3.png').default} title='Partner Sharing 3' />
|
||||
|
||||
:::tip Sharing specific assets
|
||||
For sharing a specific set of assets, you can use the shared album feature of Immich.
|
||||
:::
|
||||
|
||||
Immich allows you to share your library with other users. They can then view your library and download the assets. You can manage Partner Sharing from the [User Settings](docs/features/user-settings.md) page on the web.
|
||||
|
||||
Partner Sharing includes:
|
||||
|
||||
- Access to all non-archived and trashed photos and videos.
|
||||
- Access to all metadata, including GPS information.
|
||||
- Access to share assets via shared links, albums, etc.
|
||||
|
||||
:::info
|
||||
Partner sharing is one-way. To view your partner's assets, they must also share them with you.
|
||||
:::
|
||||
|
||||
## Sharing with a Partner
|
||||
|
||||
:::note Duplicates
|
||||
Partner sharing may result in displaying duplicate assets on the main timeline.
|
||||
:::
|
||||
|
||||
<img src={require('./img/partner-sharing-1.png').default} width="70%" title='Add Partner 1' />
|
||||
|
||||
<img src={require('./img/partner-sharing-2.png').default} width="70%" title='Add Partner 2' />
|
||||
|
||||
<img src={require('./img/partner-sharing-4.png').default} width="70%" title='Add Partner 4' />
|
||||
|
||||
## Viewing Partner Assets
|
||||
|
||||
Access partner assets via the Sharing page.
|
||||
|
||||
<img src={require('./img/partner-sharing-3.png').default} width="70%" title='Access to the Shared Library' />
|
||||
|
||||
## Timeline Integration
|
||||
|
||||
Partner shared photos can be displayed in the main timeline. This feature can be enabled on a per-partner basis and can be viewed and updated on both the web and mobile app.
|
||||
|
||||
### Web:
|
||||
|
||||
Account Settings -> Sharing -> Show in timeline
|
||||
|
||||
<img src={require('./img/partner-sharing-5.png').default} width="70%" title='Partner Sharing for the web interface' />
|
||||
|
||||
### Mobile App:
|
||||
|
||||
From the partner’s view, on the top right corner of the app bar
|
||||
|
||||
<img src={require('./img/partner-sharing-6.png').default} width="30%" title='Partner Sharing for the mobile app' />
|
||||
|
||||
## Removing Access
|
||||
|
||||
In order to remove a partner, you can go to User -> Account Settings -> Sharing and click on the X button.
|
||||
|
||||
<img src={require('./img/partner-sharing-7.png').default} width="70%" title='Remove Partner' />
|
||||
|
||||
@@ -8,7 +8,7 @@ During Exif Extraction, assets with latitudes and longitudes are reverse geocode
|
||||
|
||||
## Usage
|
||||
|
||||
Data from a reverse geocode is displayed in the image details, and used in [Search](/docs/features/search.md).
|
||||
Data from a reverse geocode is displayed in the image details, and used in [Smart Search](/docs/features/smart-search.md).
|
||||
|
||||
<img src={require('./img/reverse-geocoding-mobile1.png').default} width='33%' title='Reverse Geocoding' />
|
||||
<img src={require('./img/reverse-geocoding-mobile2.png').default} width='33%' title='Reverse Geocoding' />
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
# Search
|
||||
|
||||
Immich uses Postgres as its search database for both metadata and smart search.
|
||||
|
||||
Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like CLIP to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
|
||||
Metadata search (prefixed with `m:`) can search specifically by text without the use of a model.
|
||||
|
||||
Archived photos are not included in search results by default. To include them, add the query parameter `withArchived=true` to the url.
|
||||
|
||||
Some search examples:
|
||||
<img src={require('./img/search-ex-2.webp').default} title='Search Example 1' />
|
||||
|
||||
<img src={require('./img/search-ex-3.webp').default} title='Search Example 2' />
|
||||
252
docs/docs/features/shared-albums.md
Normal file
@@ -0,0 +1,252 @@
|
||||
# Shared Albums & Assets
|
||||
|
||||
## Shared Albums
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
Album sharing allows you to share assets with other users or with people from around the world via a link or invitation (for system users).
|
||||
|
||||
When sharing shared albums, whats shared is:
|
||||
|
||||
- The selected assets.
|
||||
- Metadata of the assets (Can be removed by sharing via link).
|
||||
|
||||
## Shared Album Features
|
||||
|
||||
- Download all assets as zip file (Web only).
|
||||
:::info Archive size limited.
|
||||
If the size of the album exceeds 4GB, the archive files will be divided into 4GB each.
|
||||
:::
|
||||
- Add a description to the album (Web only).
|
||||
- Slideshow view (Web only).
|
||||
- Add or remove photos to an album.
|
||||
- Comment on the album (for system users only).
|
||||
|
||||
:::info
|
||||
When you create an album and it is not shared with anyone, it will be added to the Albums category.
|
||||
:::
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="Computer" label="Computer" default>
|
||||
|
||||
### Create a Shared Album
|
||||
|
||||
1. Select the assets (Shift can be used for multiple selection).
|
||||
2. Click on the + on the top right -> add to a shared album.
|
||||
3. Name the new album and add the album.
|
||||
|
||||
## Sharing Between Users
|
||||
|
||||
### Shared Album Options
|
||||
|
||||
- Add or remove users from the album.
|
||||
:::info remove user(s)
|
||||
When a user is removed from the album, the photos he uploaded will still appear in the album.
|
||||
:::
|
||||
- Enable or disable comments & likes.
|
||||
- Replace the album cover.
|
||||
- Display order (newest first / oldest first).
|
||||
|
||||
To change these settings click on the 3 dots on the right -> options.
|
||||
|
||||
:::info Known bug
|
||||
Currently it is not possible to remove people through the options.
|
||||
This is a [known problem and it has a temporary solution](https://github.com/immich-app/immich/issues/7954)
|
||||
:::
|
||||
|
||||
## Share Album via Link
|
||||
|
||||
:::info
|
||||
When wanting to share with people outside the home network via a link, Immich needs to be exposed to the world wide web, read more to [learn the ways to do this](/docs/guides/remote-access.md).
|
||||
:::
|
||||
|
||||
1. Enter the shared album.
|
||||
2. Click on the share icon.
|
||||
3. Click on Create link.
|
||||
|
||||
You can edit the link properties, options include;
|
||||
|
||||
- **Description -** adding a description to the album (optional)
|
||||
- **Password -** adding a password to the album (optional), it is required to activate Require password.
|
||||
- **Show metadata -** whether to show metadata to whoever the link will be shared with (optional).
|
||||
- **Allow public user to download -** whether to allow whoever has the link to download all the images or a certain image (optional).
|
||||
- **Allow public user to upload -** whether to allow whoever has the link to upload assets to the album (optional).
|
||||
:::info
|
||||
whoever has the link and have uploaded files cannot delete them.
|
||||
:::
|
||||
- **Expire after -** adding an expiration date to the link (optional).
|
||||
|
||||
## Share Specific Assets
|
||||
|
||||
A user can share specific assets without linking them to a specific album.
|
||||
in order to do so;
|
||||
|
||||
1. Go to the timeline
|
||||
2. Select the assets (Shift can be used for multiple selection)
|
||||
3. Click the share button
|
||||
|
||||
:::info
|
||||
Assets shared in this way will not be displayed in the Sharing category, in order to expect to remove or change the link settings of assets shared in this way, you must use the Manage generated links option.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
For temporary sharing, you can add an expiration date to assets shared this way.
|
||||
:::
|
||||
|
||||
## Manage generated links
|
||||
|
||||
A user can copy, delete and change the link settings he created for specific albums or assets, in order to do so;
|
||||
|
||||
1. Go to the Immich home page.
|
||||
2. Select the Sharing category.
|
||||
3. On the top right, select Shared links.
|
||||
|
||||
:::info remove links/users.
|
||||
When making a shared album private, the added photos will **still** be saved in the album.
|
||||
:::
|
||||
|
||||
## Activity
|
||||
|
||||
:::info
|
||||
Activity is not visible when sharing an album via external link.
|
||||
New users added to the album will be able to see previous activity.
|
||||
:::
|
||||
|
||||
### Add a Comment or Like to the Album
|
||||
|
||||
1. Enter the shared album.
|
||||
2. From the bottom right you can add comment(s) or delete old comments.
|
||||
3. To add a like (heart) to the album, click the heart button to the left of the "say something" button.
|
||||
|
||||
#### Add a Comment or Like to a Specific Photo
|
||||
|
||||
:::info Activity
|
||||
Activity of a comment or heart on a specific photo will appear in the general activity of the album.
|
||||
:::
|
||||
|
||||
1. Enter the shared album.
|
||||
2. Enter the picture.
|
||||
3. From the bottom right you can add comment(s) or delete old comments.
|
||||
4. To add a like (heart) to the album, click the heart button to the left of the "say something" button.
|
||||
|
||||
### Viewing Activity in the Album
|
||||
|
||||
To view album activity on comments or likes
|
||||
|
||||
1. Enter the shared album
|
||||
2. On the bottom right click on the message icon
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Mobile" label="Mobile">
|
||||
|
||||
:::note mobile app
|
||||
Some of the features are not available on mobile, to understand what the full features of shared albums are, it is recommended to additionally read the explanation for the computer version.
|
||||
:::
|
||||
|
||||
### Create a Shared Album
|
||||
|
||||
1. Select the assets.
|
||||
2. Swipe up and click on Create new album.
|
||||
3. Name the new album and add the album.
|
||||
|
||||
## Sharing Between Users
|
||||
|
||||
#### Add or remove users from the album.
|
||||
|
||||
:::info remove user(s)
|
||||
When a user is removed from the album, the photos he uploaded will still appear in the album.
|
||||
:::
|
||||
|
||||
After creating the album, click the Add User button and select the user you want to share with.
|
||||
|
||||
### Shared Album Options
|
||||
|
||||
- Enable or disable comments & likes.
|
||||
- Add or remove users
|
||||
|
||||
To change these settings click on the 3 dots on the top right -> options.
|
||||
|
||||
## Share Album via Link
|
||||
|
||||
:::info
|
||||
When wanting to share with people outside the home network via a link, Immich needs to be exposed to the world wide web, read more to [learn the ways to do this](/docs/guides/remote-access.md).
|
||||
:::
|
||||
|
||||
1. Enter the shared album.
|
||||
2. Click 3 dots on the top right.
|
||||
3. Click on Share.
|
||||
|
||||
You can edit the link properties, options include;
|
||||
|
||||
- **Description -** adding a description to the album (optional)
|
||||
- **Password -** adding a password to the album (optional)
|
||||
- **Show metadata -** whether to show metadata to whoever the link will be shared with (optional).
|
||||
- **Allow public user to download -** whether to allow whoever has the link to download all the images or a certain image (optional).
|
||||
- **Allow public user to upload -** whether to allow whoever has the link to upload assets to the album (optional).
|
||||
::: info
|
||||
whoever has the link and have uploaded files cannot delete them.
|
||||
:::
|
||||
- **Expire after -** adding an expiration date to the link (optional).
|
||||
|
||||
## Share Specific Assets
|
||||
|
||||
A user can share specific assets without linking them to a specific album.
|
||||
in order to do so;
|
||||
|
||||
1. Go to the timeline
|
||||
2. Select the assets.
|
||||
3. Click the share button
|
||||
|
||||
:::info
|
||||
Assets shared in this way will not be displayed in the Sharing category, in order to expect to remove or change the link settings of assets shared in this way, you must use the Manage generated links option.
|
||||
:::
|
||||
|
||||
:::tip
|
||||
For temporary sharing, you can add an expiration date to assets shared this way.
|
||||
:::
|
||||
|
||||
## Manage generated links
|
||||
|
||||
A user can copy, delete and change the link settings he created for specific albums or assets, in order to do so;
|
||||
|
||||
1. Go to Sharing category.
|
||||
2. Select Shared links at the top right.
|
||||
|
||||
:::info remove links/users.
|
||||
When making a shared album private, the added photos will **still** be saved in the album.
|
||||
:::
|
||||
|
||||
## Activity
|
||||
|
||||
:::info
|
||||
Activity is not visible when sharing an album via external link.
|
||||
New users added to the album will be able to see previous activity.
|
||||
:::
|
||||
|
||||
### Add a Comment or Like to the Album
|
||||
|
||||
1. Enter the shared album.
|
||||
2. From the top right you can add comment(s) or delete old comments.
|
||||
3. To add a like (heart) to the album, click the heart button to the right of the "say something" button.
|
||||
|
||||
#### Add a Comment or Like to a Specific Photo
|
||||
|
||||
:::info Activity
|
||||
Activity of a comment or heart on a specific photo will appear in the general activity of the album.
|
||||
:::
|
||||
|
||||
1. Enter the shared album.
|
||||
2. Enter the picture.
|
||||
3. From the top right you can add comment(s) or delete old comments.
|
||||
4. To add a like (heart) to the album, click the heart button to the right of the "say something" button.
|
||||
|
||||
### Viewing Activity in the Album
|
||||
|
||||
To view album activity on comments or likes
|
||||
|
||||
1. Enter the shared album
|
||||
2. On the top right click on the message icon
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
49
docs/docs/features/smart-search.md
Normal file
@@ -0,0 +1,49 @@
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
# Smart Search
|
||||
|
||||
Immich uses Postgres as its search database for both metadata and smart search.
|
||||
|
||||
Smart search is powered by the [pgvecto.rs](https://github.com/tensorchord/pgvecto.rs) extension, utilizing machine learning models like [CLIP](https://openai.com/research/clip) to provide relevant search results. This allows for freeform searches without requiring specific keywords in the image or video metadata.
|
||||
|
||||
Archived photos are not included in search results by default. To include them, mark the checkbox in [advanced search filters](/docs/features/smart-search#advanced-search-filters).
|
||||
|
||||
:::tip Alternative CLIP Models
|
||||
More powerful models can be used for more accurate search results. For more information, see the related [FAQ](/docs/FAQ#can-i-use-a-custom-clip-model).
|
||||
:::
|
||||
|
||||
:::info
|
||||
Smart Search is currently limited to 5,000 results for a single search on the web.
|
||||
:::
|
||||
|
||||
## Advanced Search Filters
|
||||
|
||||
In addition, Immich offers advanced search functionality, allowing you to find specific content using customizable search filters. These filters include location, one or more faces, specific albums, and more. You can try out the search filters on the [Demo site](https://demo.immich.app).
|
||||
|
||||
Smart search features include:
|
||||
|
||||
- Search for one or more faces (with or without context search).
|
||||
- Search by Country or State or City or by all three.
|
||||
- Search by camera make and model.
|
||||
- Search by date range.
|
||||
- Search by file name.
|
||||
- Search by media types: image, video or all (**Note:** Image includes live images).
|
||||
- Search by condition: not in any album or archive or Favorite or all conditions.
|
||||
|
||||
<Tabs>
|
||||
<TabItem value="Computer" label="Computer" default>
|
||||
|
||||
Some search examples:
|
||||
|
||||
<img src={require('./img/advanced-search-filters.webp').default} width="70%" title='Advanced search filters' />
|
||||
|
||||
<img src={require('./img/search-ex-1.png').default} width="70%" title='Search Example 1' />
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="Mobile" label="Mobile">
|
||||
|
||||
<img src={require('./img/moblie-smart-serach.webp').default} width="30%" title='Smart search on mobile' />
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
42
docs/docs/features/supported-formats.md
Normal file
@@ -0,0 +1,42 @@
|
||||
# Supported formats
|
||||
|
||||
Immich supports a number of image and video formats, the most common of which are outlined here.
|
||||
|
||||
:::note
|
||||
For the full list, refer to the [Immich source code](https://github.com/immich-app/immich/blob/main/server/src/utils/mime-types.ts).
|
||||
:::
|
||||
|
||||
## Image formats
|
||||
|
||||
| Format | Extension(s) | Supported? | Notes |
|
||||
| :-------- | :---------------------------- | :----------------: | :-------------- |
|
||||
| `AVIF` | `.avif` | :white_check_mark: | |
|
||||
| `BMP` | `.bmp` | :white_check_mark: | |
|
||||
| `GIF` | `.gif` | :white_check_mark: | |
|
||||
| `HEIC` | `.heic` | :white_check_mark: | |
|
||||
| `HEIF` | `.heif` | :white_check_mark: | |
|
||||
| `JPEG` | `.jpeg` `.jpg` `.jpe` `.insp` | :white_check_mark: | |
|
||||
| `JPEG XL` | `.jxl` | :white_check_mark: | |
|
||||
| `PNG` | `.png` | :white_check_mark: | |
|
||||
| `PSD` | `.psd` | :white_check_mark: | Adobe Photoshop |
|
||||
| `RAW` | `.raw` | :white_check_mark: | |
|
||||
| `RW2` | `.rw2` | :white_check_mark: | |
|
||||
| `SVG` | `.svg` | :white_check_mark: | |
|
||||
| `TIFF` | `.tif` `.tiff` | :white_check_mark: | |
|
||||
| `WEBP` | `.webp` | :white_check_mark: | |
|
||||
|
||||
## Video formats
|
||||
|
||||
| Format | Extension(s) | Supported? | Notes |
|
||||
| :---------- | :-------------------- | :----------------: | :---- |
|
||||
| `3GPP` | `.3gp` `.3gpp` | :white_check_mark: | |
|
||||
| `AVI` | `.avi` | :white_check_mark: | |
|
||||
| `FLV` | `.flv` | :white_check_mark: | |
|
||||
| `M4V` | `.m4v` | :white_check_mark: | |
|
||||
| `MATROSKA` | `.mkv` | :white_check_mark: | |
|
||||
| `MP2T` | `.mts` `.m2ts` | :white_check_mark: | |
|
||||
| `MP4` | `.mp4` `.insv` | :white_check_mark: | |
|
||||
| `MPEG` | `.mpg` `.mpe` `.mpeg` | :white_check_mark: | |
|
||||
| `QUICKTIME` | `.mov` | :white_check_mark: | |
|
||||
| `WEBM` | `.webm` | :white_check_mark: | |
|
||||
| `WMV` | `.wmv` | :white_check_mark: | |
|
||||
@@ -6,7 +6,7 @@ Immich can ingest XMP sidecars on file upload (via the CLI) as well as detect ne
|
||||
|
||||
XMP sidecars are external XML files that contain metadata related to media files. Many applications read and write these files either exclusively or in addition to the metadata written to image files. They can be a powerful tool for editing and storing metadata of a media file without modifying the media file itself. When Immich receives or detects an XMP sidecar for a media file, it will attempt to extract the metadata from both the sidecar as well as the media file. It will prioritize the metadata for fields in the sidecar but will fall back and use the metadata in the media file if necessary.
|
||||
|
||||
When importing files via the CLI bulk uploader, Immich will automatically detect XMP sidecar files as files that exist next to the original media file and have the exact same name with an additional `.xmp` file extension (i.e., `PXL_20230401_203352928.MP.jpg` and `PXL_20230401_203352928.MP.jpg.xmp`).
|
||||
When importing files via the CLI bulk uploader or parsing photo metadata for external libraries, Immich will automatically detect XMP sidecar files as files that exist next to the original media file. Immich will look files that have the same name as the photo, but with the `.xmp` file extension. The same name can either include the photo's file extension or without the photo's file extension. For example, for a photo named `PXL_20230401_203352928.MP.jpg`, Immich will look for an XMP file named either `PXL_20230401_203352928.MP.jpg.xmp` or `PXL_20230401_203352928.MP.xmp`. If both `PXL_20230401_203352928.MP.jpg.xmp` and `PXL_20230401_203352928.MP.xmp` are present, Immich will prefer `PXL_20230401_203352928.MP.jpg.xmp`.
|
||||
|
||||
There are 2 administrator jobs associated with sidecar files: `SYNC` and `DISCOVER`. The sync job will re-scan all media with existing sidecar files and queue them for a metadata refresh. This is a great use case when third-party applications are used to modify the metadata of media. The discover job will attempt to scan the filesystem for new sidecar files for all media that does not currently have a sidecar file associated with it.
|
||||
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
# API Album Sync (Python Script)
|
||||
|
||||
This is an example of a python script for syncing an album to a local folder. This was used for a digital photoframe so the displayed photos could be managed from the immich web or app UI.
|
||||
|
||||
The script is copied below in it's current form. A repository is hosted [here](https://git.orenit.solutions/open/immichalbumpull).
|
||||
|
||||
:::danger
|
||||
This guide uses a generated API key. This key gives the same access to your immich instance as the user it is attached to, so be careful how the config file is stored and transferred.
|
||||
:::
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.7+
|
||||
- [requests library](https://pypi.org/project/requests/)
|
||||
|
||||
### Installing
|
||||
|
||||
Copy the contents of 'pull.py' (shown below) to your chosen location or clone the repository:
|
||||
|
||||
```bash
|
||||
git clone https://git.orenit.solutions/open/immichalbumpull
|
||||
```
|
||||
|
||||
Edit or create the 'config.ini' file in the same directory as the script with the necessary details:
|
||||
|
||||
```ini title='config.ini'
|
||||
[immich]
|
||||
# URL of target immich instance
|
||||
url = https://photo.example.com
|
||||
# API key from Account Settings -> API Keys
|
||||
apikey = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
# Full local path to target directory
|
||||
destination = /home/photo/photos
|
||||
# immich album name
|
||||
album = Photoframe
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Run the script directly:
|
||||
|
||||
```bash
|
||||
./pull.py
|
||||
```
|
||||
|
||||
Or from cron (every 5 minutes):
|
||||
|
||||
```bash
|
||||
*/5 * * * * /usr/bin/python /home/user/immichalbumpull/pull.py
|
||||
```
|
||||
|
||||
### Python Script
|
||||
|
||||
```python title='pull.py'
|
||||
#!/usr/bin/env python
|
||||
|
||||
import requests
|
||||
import configparser
|
||||
import os
|
||||
import shutil
|
||||
|
||||
# Read config file
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini')
|
||||
|
||||
url = config['immich']['url']
|
||||
apikey = config['immich']['apikey']
|
||||
photodir = config['immich']['destination']
|
||||
albumname = config['immich']['album']
|
||||
|
||||
headers = {
|
||||
'Accept': 'application/json',
|
||||
'x-api-key': apikey
|
||||
}
|
||||
|
||||
# Set up the directory for the downloaded images
|
||||
os.makedirs(photodir, exist_ok=True)
|
||||
|
||||
# Get the list of albums from the API
|
||||
response = requests.get(url + "/api/album", headers=headers)
|
||||
|
||||
# Parse the JSON response
|
||||
data = response.json()
|
||||
|
||||
# Find the chosen album id
|
||||
for item in data:
|
||||
if item['albumName'] == albumname:
|
||||
albumid = item['id']
|
||||
|
||||
# Get the list of photos from the API using the albumid
|
||||
response = requests.get(url + "/api/album/" + albumid, headers=headers)
|
||||
|
||||
# Parse the JSON response and extract the URLs of the images
|
||||
data = response.json()
|
||||
image_urls = data['assets']
|
||||
|
||||
# Download each image from the URL and save it to the directory
|
||||
headers = {
|
||||
'Accept': 'application/octet-stream',
|
||||
'x-api-key': apikey
|
||||
}
|
||||
|
||||
photolist = []
|
||||
|
||||
for id in image_urls:
|
||||
# Query asset info endpoint for correct extension
|
||||
assetinfourl = url + "/api/asset/" + str(id['id'])
|
||||
response = requests.get(assetinfourl, headers=headers)
|
||||
assetinfo = response.json()
|
||||
ext = os.path.splitext(assetinfo['originalFileName'])
|
||||
|
||||
asseturl = url + "/api/download/asset/" + str(id['id'])
|
||||
response = requests.post(asseturl, headers=headers, stream=True)
|
||||
|
||||
# Build current photo list for deletions below
|
||||
photo = os.path.basename(asseturl) + ext[1]
|
||||
photolist.append(photo)
|
||||
|
||||
photofullpath = photodir + '/' + os.path.basename(asseturl) + ext[1]
|
||||
# Only download file if it doesn't already exist
|
||||
if not os.path.exists(photofullpath):
|
||||
with open(photofullpath, 'wb') as f:
|
||||
for chunk in response.iter_content(1024):
|
||||
f.write(chunk)
|
||||
|
||||
# Delete old photos removed from album
|
||||
for filename in os.listdir(photodir):
|
||||
if filename not in photolist:
|
||||
os.unlink(os.path.join(photodir, filename))
|
||||
```
|
||||
63
docs/docs/guides/custom-locations.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# Files Custom Locations
|
||||
|
||||
This guide explains storing generated and raw files with docker's volume mount in different locations.
|
||||
|
||||
:::caution Backup
|
||||
It is important to remember to update the backup settings after following the guide to back up the new backup paths if using automatic backup tools, especially `profile/`.
|
||||
:::
|
||||
|
||||
In our `.env` file, we will define variables that will help us in the future when we want to move to a more advanced server in the future
|
||||
|
||||
```diff title=".env"
|
||||
# You can find documentation for all the supported env variables at https://immich.app/docs/install/environment-variables
|
||||
|
||||
# Custom location where your uploaded, thumbnails, and transcoded video files are stored
|
||||
- UPLOAD_LOCATION=./library
|
||||
+ UPLOAD_LOCATION=/custom/location/on/your/system/immich/immich_files
|
||||
+ THUMB_LOCATION=/custom/location/on/your/system/immich/thumbs
|
||||
+ ENCODED_VIDEO_LOCATION=/custom/location/on/your/system/immich/encoded-video
|
||||
+ PROFILE_LOCATION=/custom/location/on/your/system/immich/profile
|
||||
...
|
||||
```
|
||||
|
||||
After defining the locations for these files, we will edit the `docker-compose.yml` file accordingly and add the new variables to the `immich-server` and `immich-microservices` containers.
|
||||
|
||||
```diff title="docker-compose.yml"
|
||||
services:
|
||||
immich-server:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
|
||||
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
|
||||
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
...
|
||||
|
||||
immich-microservices:
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - ${THUMB_LOCATION}:/usr/src/app/upload/thumbs
|
||||
+ - ${ENCODED_VIDEO_LOCATION}:/usr/src/app/upload/encoded-video
|
||||
+ - ${PROFILE_LOCATION}:/usr/src/app/upload/profile
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
```
|
||||
|
||||
Restart Immich to register the changes.
|
||||
|
||||
```
|
||||
docker compose down
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
:::note
|
||||
Because of the underlying properties of docker bind mounts, it is not recommended to mount the `upload/` and `library/` folders as separate bind mounts if they are on the same device.
|
||||
For this reason, we mount the HDD or network storage to `/usr/src/app/upload` and then mount the folders we want quick access to below this folder.
|
||||
|
||||
The `thumbs/` folder contains both the small thumbnails shown in the timeline, and the larger previews shown when clicking into an image. These cannot be split up.
|
||||
|
||||
The storage metrics of the Immich server will track the storage available at `UPLOAD_LOCATION`,
|
||||
so the administrator should setup some kind of monitoring to make sure the SSD does not run out of space. The `profile/` folder is much smaller, typically less than 1 MB.
|
||||
:::
|
||||
|
||||
Thanks to [Jrasm91](https://github.com/immich-app/immich/discussions/2110#discussioncomment-5477767) for writing the guide.
|
||||
16
docs/docs/guides/custom-map-styles.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# Create Custom Map Styles for Immich Using Maptiler
|
||||
|
||||
You may decide that you'd like to modify the style document which is used to draw the maps in Immich. This can be done easily using Maptiler, if you do not want to write an entire JSON document by hand.
|
||||
|
||||
## Steps
|
||||
|
||||
1. Create a free account at https://cloud.maptiler.com
|
||||
2. Once logged in, you can either create a brand new map by clicking on **New Map**, selecting a starter map, and then clicking **Customize**, OR by selecting a **Standard Map** and customizing it from there.
|
||||
3. The **editor** interface is self-explanatory. You can change colors, remove visible layers, or add optional layers (e.g., administrative, topo, hydro, etc.) in the composer.
|
||||
4. Once you have your map composed, click on **Save** at the top right. Give it a unique name to save it to your account.
|
||||
5. Next, **Publish** your style using the **Publish** button at the top right. This will deploy it to production, which means it is able to be exposed over the Internet. Maptiler will present an interactive side-by-side map with the original and your changes prior to publication.<br/>
|
||||
6. Maptiler will warn you that changing the map will change it across all apps using the map. Since no apps are using the map yet, this is okay.
|
||||
7. Clicking on the name of your new map at the top left will bring you to the item's **details** page. From here, copy the link to the JSON style under **Use vector style**. This link will automatically contain your personal API key to Maptiler.
|
||||
8. In **Immich**, navigate to **Administration --> Settings --> Map & GPS Settings** and expand the **Map Settings** subsection.
|
||||
9. Paste the link to your JSON style in either the **Light Style** or **Dark Style**. (You can add different styles which will help make the map style more appropriate depending on whether you set **Immich** to Light or Dark mode.
|
||||
10. Save your selections. Reload the map, and enjoy your custom map style!
|
||||
@@ -2,48 +2,52 @@
|
||||
|
||||
A short guide on connecting [pgAdmin](https://www.pgadmin.org/) to Immich.
|
||||
|
||||
:::note
|
||||
|
||||
In order to connect to the database the immich_postgres container **must be running**.
|
||||
|
||||
The passwords and usernames used below match the ones specified in the example `.env` file. If changed, please use actual values instead.
|
||||
|
||||
**Optional:** To connect to the database **outside** of your Docker's network:
|
||||
|
||||
- Expose port 5432 in your `docker-compose.yml` file.
|
||||
- Edit the PostgreSQL [`pg_hba.conf`](https://www.postgresql.org/docs/current/auth-pg-hba-conf.html) file.
|
||||
- Make sure your firewall does not block access to port 5432.
|
||||
Note that exposing the database port increases the risk of getting attacked by hackers.
|
||||
Make sure to remove the binding port after finishing the database's tasks.
|
||||
|
||||
:::
|
||||
|
||||
## 1. Install pgAdmin
|
||||
|
||||
Download and install [pgAdmin](https://www.pgadmin.org/download/) following the official documentation.
|
||||
Add a file `docker-compose-pgadmin.yml` next to your `docker-compose.yml` with the following content:
|
||||
|
||||
```
|
||||
name: immich
|
||||
|
||||
services:
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4
|
||||
container_name: pgadmin4_container
|
||||
restart: always
|
||||
ports:
|
||||
- "8888:80"
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: user-name@domain-name.com
|
||||
PGADMIN_DEFAULT_PASSWORD: strong-password
|
||||
volumes:
|
||||
- pgadmin-data:/var/lib/pgadmin
|
||||
|
||||
volumes:
|
||||
pgadmin-data:
|
||||
```
|
||||
|
||||
Change the values of `PGADMIN_DEFAULT_EMAIL` and `PGADMIN_DEFAULT_PASSWORD` in this file.
|
||||
|
||||
Run `docker compose -f docker-compose.yml -f docker-compose-pgadmin.yml up` to start immich along with `pgAdmin`.
|
||||
|
||||
## 2. Add a Server
|
||||
|
||||
Open pgAdmin and click "Add New Server".
|
||||
Open [localhost:8888](http://localhost:8888) and login with the default credentials from above.
|
||||
|
||||
<img src={require('./img/add-new-server-option.png').default} width="50%" title="new server option" />
|
||||
Right click on `Servers` and click on `Register >> Server..` then enter the values below in the `Connection` tab.
|
||||
|
||||
## 3. Enter Connection Details
|
||||
<img src={require('./img/pgadmin-add-new-server.png').default} width="50%" title="new server option" />
|
||||
|
||||
| Name | Value |
|
||||
| -------------------- | ----------- |
|
||||
| Host name/address | `localhost` |
|
||||
| Port | `5432` |
|
||||
| Maintenance database | `immich` |
|
||||
| Username | `postgres` |
|
||||
| Password | `postgres` |
|
||||
:::note
|
||||
The parameters used here match those specified in the example `.env` file. If you have changed your `.env` file, you'll need to adjust accordingly.
|
||||
:::
|
||||
|
||||
<img src={require('./img/Connection-Pgadmin.png').default} width="75%" title="Connection" />
|
||||
|
||||
## 4. Save Connection
|
||||
| Name | Value |
|
||||
| -------------------- | ----------------- |
|
||||
| Host name/address | `immich_postgres` |
|
||||
| Port | `5432` |
|
||||
| Maintenance database | `immich` |
|
||||
| Username | `postgres` |
|
||||
| Password | `postgres` |
|
||||
|
||||
Click on "Save" to connect to the Immich database.
|
||||
|
||||
:::tip
|
||||
View [Database Queries](https://immich.app/docs/guides/database-queries/) for common database queries.
|
||||
:::
|
||||
|
||||
@@ -17,7 +17,7 @@ The `"originalFileName"` column is the name of the file at time of upload, inclu
|
||||
:::
|
||||
|
||||
```sql title="Find by original filename"
|
||||
SELECT * FROM "assets" WHERE "originalFileName" = 'PXL_20230903_232542848';
|
||||
SELECT * FROM "assets" WHERE "originalFileName" = 'PXL_20230903_232542848.jpg';
|
||||
SELECT * FROM "assets" WHERE "originalFileName" LIKE 'PXL_%'; -- all files starting with PXL_
|
||||
SELECT * FROM "assets" WHERE "originalFileName" LIKE '%_2023_%'; -- all files with _2023_ in the middle
|
||||
```
|
||||
@@ -27,25 +27,44 @@ SELECT * FROM "assets" WHERE "originalPath" = 'upload/library/admin/2023/2023-09
|
||||
SELECT * FROM "assets" WHERE "originalPath" LIKE 'upload/library/admin/2023/%';
|
||||
```
|
||||
|
||||
```sql title="Find by checksum" (sha1)
|
||||
:::note
|
||||
You can calculate the checksum for a particular file by using the command `sha1sum <filename>`.
|
||||
:::
|
||||
|
||||
```sql title="Find by checksum (SHA-1)"
|
||||
SELECT encode("checksum", 'hex') FROM "assets";
|
||||
SELECT * FROM "assets" WHERE "checksum" = decode('69de19c87658c4c15d9cacb9967b8e033bf74dd1', 'hex');
|
||||
SELECT * FROM "assets" WHERE "checksum" = '\x69de19c87658c4c15d9cacb9967b8e033bf74dd1'; -- alternate notation
|
||||
```
|
||||
|
||||
```sql title="Live photos"
|
||||
SELECT * FROM "assets" where "livePhotoVideoId" IS NOT NULL;
|
||||
SELECT * FROM "assets" WHERE "livePhotoVideoId" IS NOT NULL;
|
||||
```
|
||||
|
||||
```sql title="By description"
|
||||
SELECT "assets".*, "exif"."description" FROM "exif"
|
||||
JOIN "assets" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE TRIM("exif"."description") <> ''; -- all files with a description
|
||||
SELECT "assets".*, "exif"."description" FROM "exif"
|
||||
JOIN "assets" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE "exif"."description" ILIKE '%string to match%'; -- search by string
|
||||
```
|
||||
|
||||
```sql title="Without metadata"
|
||||
SELECT "assets".* FROM "exif" LEFT JOIN "assets" ON "assets"."id" = "exif"."assetId" WHERE "exif"."assetId" IS NULL;
|
||||
SELECT "assets".* FROM "exif"
|
||||
LEFT JOIN "assets" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE "exif"."assetId" IS NULL;
|
||||
```
|
||||
|
||||
```sql title="size < 100,000 bytes, smallest to largest"
|
||||
SELECT * FROM "assets" JOIN "exif" ON "assets"."id" = "exif"."assetId" WHERE "exif"."fileSizeInByte" < 100000 ORDER BY "exif"."fileSizeInByte" ASC;
|
||||
SELECT * FROM "assets"
|
||||
JOIN "exif" ON "assets"."id" = "exif"."assetId"
|
||||
WHERE "exif"."fileSizeInByte" < 100000
|
||||
ORDER BY "exif"."fileSizeInByte" ASC;
|
||||
```
|
||||
|
||||
```sql title="Without thumbnails"
|
||||
SELECT * FROM "assets" WHERE "assets"."resizePath" IS NULL OR "assets"."webpPath" IS NULL;
|
||||
SELECT * FROM "assets" WHERE "assets"."previewPath" IS NULL OR "assets"."thumbnailPath" IS NULL;
|
||||
```
|
||||
|
||||
```sql title="By type"
|
||||
@@ -54,20 +73,14 @@ SELECT * FROM "assets" WHERE "assets"."type" = 'IMAGE';
|
||||
```
|
||||
|
||||
```sql title="Count by type"
|
||||
SELECT "assets"."type", count(*) FROM "assets" GROUP BY "assets"."type";
|
||||
SELECT "assets"."type", COUNT(*) FROM "assets" GROUP BY "assets"."type";
|
||||
```
|
||||
|
||||
```sql title="Count by type (per user)"
|
||||
SELECT
|
||||
"users"."email", "assets"."type", COUNT(*)
|
||||
FROM
|
||||
"assets"
|
||||
JOIN
|
||||
"users" ON "assets"."ownerId" = "users"."id"
|
||||
GROUP BY
|
||||
"assets"."type", "users"."email"
|
||||
ORDER BY
|
||||
"users"."email";
|
||||
SELECT "users"."email", "assets"."type", COUNT(*) FROM "assets"
|
||||
JOIN "users" ON "assets"."ownerId" = "users"."id"
|
||||
GROUP BY "assets"."type", "users"."email"
|
||||
ORDER BY "users"."email";
|
||||
```
|
||||
|
||||
```sql title="Failed file movements"
|
||||
@@ -76,14 +89,20 @@ SELECT * FROM "move_history";
|
||||
|
||||
## Users
|
||||
|
||||
```sql title="List"
|
||||
```sql title="List all users"
|
||||
SELECT * FROM "users";
|
||||
```
|
||||
|
||||
## System Config
|
||||
|
||||
```sql title="Custom settings"
|
||||
SELECT "key", "value" FROM "system_config";
|
||||
SELECT "key", "value" FROM "system_metadata" WHERE "key" = 'system-config';
|
||||
```
|
||||
|
||||
(Only used when not using the [config file](/docs/install/config-file))
|
||||
|
||||
## Persons
|
||||
|
||||
```sql title="Delete person and unset it for the faces it was associated with"
|
||||
DELETE FROM "person" WHERE "name" = 'PersonNameHere';
|
||||
```
|
||||
|
||||
@@ -6,21 +6,19 @@ in a directory on the same machine.
|
||||
|
||||
# Mount the directory into the containers.
|
||||
|
||||
Edit `docker-compose.yml` to add two new mount points under `volumes:`
|
||||
Edit `docker-compose.yml` to add two new mount points in the sections `immich-server:` and `immich-microservices:` under `volumes:`
|
||||
|
||||
```
|
||||
immich-server:
|
||||
```diff
|
||||
immich-server:
|
||||
volumes:
|
||||
- ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
```
|
||||
+ - ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
|
||||
```
|
||||
immich-microservices:
|
||||
immich-microservices:
|
||||
volumes:
|
||||
- ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
+ - ${EXTERNAL_PATH}:/usr/src/app/external
|
||||
```
|
||||
|
||||
Be sure to add exactly the same line to both `immich-server:` and `immich-microservices:`.
|
||||
Be sure to add exactly the same path to both services.
|
||||
|
||||
Edit `.env` to define `EXTERNAL_PATH`, substituting in the correct path for your computer:
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 36 KiB |
|
Before Width: | Height: | Size: 39 KiB |
BIN
docs/docs/guides/img/immich_map_styles_publish.png
Normal file
|
After Width: | Height: | Size: 2.4 MiB |
BIN
docs/docs/guides/img/pgadmin-add-new-server.png
Normal file
|
After Width: | Height: | Size: 68 KiB |
@@ -56,4 +56,4 @@ A remote reverse proxy like [Cloudflare](https://www.cloudflare.com/learning/cdn
|
||||
### Cons
|
||||
|
||||
- Complex configuration
|
||||
- Depending on your configuration, both the Immich web interface and API may be exposed to the internet. Immich is under very active developement and the existence of severe security vulnerabilities cannot be ruled out.
|
||||
- Depending on your configuration, both the Immich web interface and API may be exposed to the internet. Immich is under very active development and the existence of severe security vulnerabilities cannot be ruled out.
|
||||
|
||||
@@ -4,19 +4,28 @@ To alleviate [performance issues on low-memory systems](/docs/FAQ.mdx#why-is-imm
|
||||
|
||||
- Set the URL in Machine Learning Settings on the Admin Settings page to point to the designated ML system, e.g. `http://workstation:3003`.
|
||||
- Copy the following `docker-compose.yml` to your ML system.
|
||||
- Start the container by running `docker-compose up -d` or `docker compose up -d` (depending on your Docker version).
|
||||
- Start the container by running `docker compose up -d`.
|
||||
|
||||
:::note Info
|
||||
:::info
|
||||
Starting with version v1.93.0 face detection work and face recognize were split. From now on face detection is done in the immich_machine_learning service, but facial recognition is done in the immich_microservices service.
|
||||
:::
|
||||
|
||||
:::note
|
||||
The [hwaccel.ml.yml](https://github.com/immich-app/immich/releases/latest/download/hwaccel.ml.yml) file also needs to be in the same folder if trying to use [hardware acceleration](/docs/features/ml-hardware-acceleration).
|
||||
:::
|
||||
|
||||
```yaml
|
||||
version: '3.8'
|
||||
name: immich_remote_ml
|
||||
|
||||
services:
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
# For hardware acceleration, add one of -[armnn, cuda, openvino] to the image tag.
|
||||
# Example tag: ${IMMICH_VERSION:-release}-cuda
|
||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||
# extends:
|
||||
# file: hwaccel.ml.yml
|
||||
# service: # set to one of [armnn, cuda, openvino, openvino-wsl] for accelerated inference - use the `-wsl` version for WSL2 where applicable
|
||||
volumes:
|
||||
- model-cache:/cache
|
||||
restart: always
|
||||
|
||||