Compare commits
501 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bbb6bca605 | ||
|
|
a8e5a1de15 | ||
|
|
bba4c44182 | ||
|
|
7c76249e1f | ||
|
|
294955db17 | ||
|
|
752ad2d2eb | ||
|
|
02a268c7c6 | ||
|
|
b2dc7adf3b | ||
|
|
6e62558d81 | ||
|
|
0d0866d5d9 | ||
|
|
00f65a53dd | ||
|
|
751922990f | ||
|
|
4311d385fc | ||
|
|
3e2f335a4c | ||
|
|
cf1eddb449 | ||
|
|
e171fec5aa | ||
|
|
7f44d508dc | ||
|
|
2c924e4c1c | ||
|
|
0f0375a67e | ||
|
|
069c68bfe4 | ||
|
|
c03d8e312a | ||
|
|
de7f66f983 | ||
|
|
82b89aa20b | ||
|
|
80d02e8a8d | ||
|
|
868f629f32 | ||
|
|
746ca5d5ed | ||
|
|
3c5fefde2e | ||
|
|
26f58d3335 | ||
|
|
6baeca654b | ||
|
|
1b15b5414c | ||
|
|
48e4ea5231 | ||
|
|
f9fbf1a2a5 | ||
|
|
f003ff3c98 | ||
|
|
81e2b18531 | ||
|
|
c404ea20ee | ||
|
|
cc45564d84 | ||
|
|
8d560ec55f | ||
|
|
df74111427 | ||
|
|
93c35efe67 | ||
|
|
296c77ac73 | ||
|
|
9c0f444e4d | ||
|
|
6b0f91cafd | ||
|
|
3f71d2d33d | ||
|
|
f2942588f2 | ||
|
|
b47027efc2 | ||
|
|
34201be74c | ||
|
|
3e804f16df | ||
|
|
3512140148 | ||
|
|
bff6914a73 | ||
|
|
652add635f | ||
|
|
fde410e2ac | ||
|
|
f04e47803c | ||
|
|
61d74263d9 | ||
|
|
66ee065c0c | ||
|
|
09bcf6974e | ||
|
|
5d7d615433 | ||
|
|
5387048dc3 | ||
|
|
6930df71cf | ||
|
|
52bbf6da5d | ||
|
|
1cd5df7558 | ||
|
|
74429798e2 | ||
|
|
651f3ea5eb | ||
|
|
0909335d02 | ||
|
|
827e4b5f75 | ||
|
|
c8ff07fff0 | ||
|
|
4a21cb2d00 | ||
|
|
07f7fffae7 | ||
|
|
441ee2ef90 | ||
|
|
acad133e3a | ||
|
|
ef8714fda9 | ||
|
|
16171eee8d | ||
|
|
d3c1781478 | ||
|
|
329b52e670 | ||
|
|
a1b9a1d244 | ||
|
|
377cec9fb1 | ||
|
|
48b9c63268 | ||
|
|
caccb1094d | ||
|
|
43ffcf7e8f | ||
|
|
77fe2e55be | ||
|
|
a59e9e1d9e | ||
|
|
896645130b | ||
|
|
045bb855d2 | ||
|
|
3b4f6edbdb | ||
|
|
1cbf9ff621 | ||
|
|
41c2c8b82d | ||
|
|
43ec0b77a0 | ||
|
|
408fa45c51 | ||
|
|
eed1243263 | ||
|
|
8f5214724c | ||
|
|
55b6b28afb | ||
|
|
5a48034e33 | ||
|
|
756f4e5986 | ||
|
|
48492b9f4e | ||
|
|
e101e40c47 | ||
|
|
9a80a2151c | ||
|
|
73075c64d1 | ||
|
|
053a0482b4 | ||
|
|
9cdec62918 | ||
|
|
e3694695ae | ||
|
|
9a3a01ca78 | ||
|
|
f0bc318712 | ||
|
|
53adb0c515 | ||
|
|
747afa0cee | ||
|
|
104e489000 | ||
|
|
5764bf16f3 | ||
|
|
8ebac41318 | ||
|
|
a2130aa6c5 | ||
|
|
5dbf46ac3c | ||
|
|
b7d42e7e8e | ||
|
|
d08535e7f6 | ||
|
|
eb1225a0a5 | ||
|
|
284edd97d6 | ||
|
|
d1b0b64d59 | ||
|
|
d0cc231782 | ||
|
|
6ce35d47f5 | ||
|
|
d1db479727 | ||
|
|
1e748864c5 | ||
|
|
c92c442356 | ||
|
|
1f4993350a | ||
|
|
f9b1d1edaf | ||
|
|
cab5477656 | ||
|
|
b8de668f5f | ||
|
|
c5234731d6 | ||
|
|
ef86a77946 | ||
|
|
1b301984dd | ||
|
|
9807f76aff | ||
|
|
47673dd773 | ||
|
|
a9fb1d435a | ||
|
|
422ad20641 | ||
|
|
3ea2fe1c48 | ||
|
|
038e064e60 | ||
|
|
800f010383 | ||
|
|
4350f9363d | ||
|
|
76a1629e75 | ||
|
|
2493dfaba3 | ||
|
|
656dc08406 | ||
|
|
631f13cf2f | ||
|
|
9730bf0acc | ||
|
|
9f2b5ea86e | ||
|
|
5702442783 | ||
|
|
74c2f446e9 | ||
|
|
da1710bcd2 | ||
|
|
2dfd56b49b | ||
|
|
6538e599dd | ||
|
|
789e3e3924 | ||
|
|
3d505e425d | ||
|
|
e7122d7a72 | ||
|
|
94d0705607 | ||
|
|
caba462703 | ||
|
|
ffe397247e | ||
|
|
e7ad622c02 | ||
|
|
bca4626708 | ||
|
|
7f0ad8e2d2 | ||
|
|
6c6c5ef651 | ||
|
|
a460940430 | ||
|
|
fc2455be80 | ||
|
|
fd4357cf23 | ||
|
|
e41e0df27e | ||
|
|
f370dc3929 | ||
|
|
1c2d83e2c7 | ||
|
|
d6756f3d81 | ||
|
|
71ef7685c5 | ||
|
|
b7516f31c6 | ||
|
|
065fb166c2 | ||
|
|
4cc6e3b966 | ||
|
|
1c293a2759 | ||
|
|
062e2eca6f | ||
|
|
bcc2c34eef | ||
|
|
1613ae9185 | ||
|
|
d827a6182b | ||
|
|
83df14d379 | ||
|
|
7c1dae918d | ||
|
|
1b54c4f8e7 | ||
|
|
49b74e9091 | ||
|
|
a1f1e5bc37 | ||
|
|
2dc8a93685 | ||
|
|
c2145cbe11 | ||
|
|
50a792a81a | ||
|
|
e2bd7e1e08 | ||
|
|
11a5a990d0 | ||
|
|
ecc894ac82 | ||
|
|
50b649cd3e | ||
|
|
99b018cd49 | ||
|
|
6aa2800275 | ||
|
|
cd7fc7e026 | ||
|
|
b4d312efb6 | ||
|
|
e9722710ac | ||
|
|
f1384fea58 | ||
|
|
feadc45e75 | ||
|
|
eefe5266a8 | ||
|
|
74353193f8 | ||
|
|
0ccb73cf2b | ||
|
|
356f4424df | ||
|
|
85c6cf4309 | ||
|
|
96fb68135e | ||
|
|
a7b9adc692 | ||
|
|
e028cf9002 | ||
|
|
f984be8ea0 | ||
|
|
3d426b55d3 | ||
|
|
02b8b2c125 | ||
|
|
dc7b0f75bb | ||
|
|
a089d9891d | ||
|
|
a1183f4b4b | ||
|
|
84cfa38510 | ||
|
|
89edbcacfa | ||
|
|
c8e649f190 | ||
|
|
790e43dd6e | ||
|
|
59f6b2ff2e | ||
|
|
829defbf61 | ||
|
|
70a0f4ae48 | ||
|
|
c7c0ef6abc | ||
|
|
2fc8a0db92 | ||
|
|
b50c621be8 | ||
|
|
126f5857c3 | ||
|
|
8b3e1764a8 | ||
|
|
b776461297 | ||
|
|
4a0052026f | ||
|
|
35c4887e4a | ||
|
|
f5b87833f8 | ||
|
|
0dde76bbbc | ||
|
|
93863b0629 | ||
|
|
115a47d4c6 | ||
|
|
308c63df16 | ||
|
|
ab86d0a18d | ||
|
|
3ec74444b0 | ||
|
|
1979c84ea8 | ||
|
|
f4aefcb18b | ||
|
|
7f2fa23179 | ||
|
|
4524aa0d06 | ||
|
|
15fa8250cb | ||
|
|
4dff129949 | ||
|
|
43951ec208 | ||
|
|
f961acdf0c | ||
|
|
2c7821e5e6 | ||
|
|
d25ddfc46b | ||
|
|
8cc9b08c06 | ||
|
|
98b9d815a6 | ||
|
|
a808b9403e | ||
|
|
c956eee919 | ||
|
|
aa97ca9ccf | ||
|
|
98bb3de8da | ||
|
|
cd43edf074 | ||
|
|
dffd992304 | ||
|
|
f1b70e13a1 | ||
|
|
d91247dc35 | ||
|
|
25f55ee6bb | ||
|
|
c2e9fe0aac | ||
|
|
5885ec8e65 | ||
|
|
053104fc50 | ||
|
|
861de7f8b3 | ||
|
|
b5a4aef829 | ||
|
|
54b4f8afbd | ||
|
|
65daf342df | ||
|
|
15a498fd60 | ||
|
|
af7da9d2c9 | ||
|
|
91ad584064 | ||
|
|
b6b9f51bd7 | ||
|
|
6acfb55dcc | ||
|
|
ce42b84430 | ||
|
|
59d93138d3 | ||
|
|
78de189d56 | ||
|
|
b21c99eb12 | ||
|
|
e22cdea485 | ||
|
|
1e97407025 | ||
|
|
c4f5dc6d01 | ||
|
|
c329a17975 | ||
|
|
2a88cc74bf | ||
|
|
7e965cb6d4 | ||
|
|
6631b286c1 | ||
|
|
b8313abfa8 | ||
|
|
aa91b946fa | ||
|
|
82af2c5717 | ||
|
|
4cdc59e51c | ||
|
|
d34585e4b0 | ||
|
|
d565a684a1 | ||
|
|
13f178dca8 | ||
|
|
f8ba33e81c | ||
|
|
3d251f51fc | ||
|
|
57704522cd | ||
|
|
787926c111 | ||
|
|
08b424b3df | ||
|
|
736a946101 | ||
|
|
6f6f847ee2 | ||
|
|
13be271df7 | ||
|
|
b423852fad | ||
|
|
fe3d6b870a | ||
|
|
14be63039f | ||
|
|
d339d4c8dd | ||
|
|
b0d5cb62fa | ||
|
|
975d23ee5c | ||
|
|
8a421831ab | ||
|
|
c8d3faec6d | ||
|
|
8a45c258c5 | ||
|
|
d45ff72c9c | ||
|
|
137d246d6a | ||
|
|
e80d37bf8f | ||
|
|
b970a40b4e | ||
|
|
1e32a5fffd | ||
|
|
2e5cd986dd | ||
|
|
635eee9e5e | ||
|
|
ae3ea9e531 | ||
|
|
5b241f0b64 | ||
|
|
1a64075027 | ||
|
|
100866be37 | ||
|
|
2179530084 | ||
|
|
d500ef77cf | ||
|
|
4952b3a2d6 | ||
|
|
a9859bc029 | ||
|
|
561b208508 | ||
|
|
de59d02ad7 | ||
|
|
017a34fc10 | ||
|
|
d314805caf | ||
|
|
eb9481b668 | ||
|
|
1564807aa0 | ||
|
|
dd8d113334 | ||
|
|
258bc328e0 | ||
|
|
c0de3aa35c | ||
|
|
a1a62b00a0 | ||
|
|
db628cec11 | ||
|
|
6f7071b12d | ||
|
|
e9c171f7ab | ||
|
|
983abf5e14 | ||
|
|
91e27affeb | ||
|
|
d76b3c8f78 | ||
|
|
fb42a736f1 | ||
|
|
a68fbcc520 | ||
|
|
9fc70fc24e | ||
|
|
1f17720be2 | ||
|
|
ab5b92ae68 | ||
|
|
767410959a | ||
|
|
e3b043e0e1 | ||
|
|
0979906933 | ||
|
|
e241fd0418 | ||
|
|
8e3a7caebd | ||
|
|
b03ce897c7 | ||
|
|
d7c1005a50 | ||
|
|
1111c15f77 | ||
|
|
fc12a9f751 | ||
|
|
cfcae39699 | ||
|
|
4c923bae7d | ||
|
|
a5a6bebf0b | ||
|
|
6f1d0a3caa | ||
|
|
2b5484539d | ||
|
|
5d21dc95ea | ||
|
|
e32b6c98df | ||
|
|
7b9248c10a | ||
|
|
4853240de9 | ||
|
|
d5f2e3e45c | ||
|
|
ad680b6a35 | ||
|
|
4cb74f0fe4 | ||
|
|
333ab1124b | ||
|
|
48393c215b | ||
|
|
ec6a7ae97c | ||
|
|
808d6423be | ||
|
|
9076f3e69e | ||
|
|
7e526f87b4 | ||
|
|
fc585bffcc | ||
|
|
d6f2ca6aaa | ||
|
|
2dcccb37a0 | ||
|
|
c584791b65 | ||
|
|
ed551500e7 | ||
|
|
94b2ea9b5f | ||
|
|
8b001b87d2 | ||
|
|
b06ddec2d5 | ||
|
|
d04f340b5b | ||
|
|
aaaf1a6cf8 | ||
|
|
23e4449f27 | ||
|
|
51785a1ead | ||
|
|
49f66be8af | ||
|
|
009b6e3ca5 | ||
|
|
c011b06bea | ||
|
|
34d300d1da | ||
|
|
468e620372 | ||
|
|
e05153d7bb | ||
|
|
4aa4a3b597 | ||
|
|
b1d17302bc | ||
|
|
cc3ffcbb84 | ||
|
|
eda9e580c9 | ||
|
|
76a07a3ebc | ||
|
|
abe87686a2 | ||
|
|
6371c11fc5 | ||
|
|
d5596cf6a2 | ||
|
|
2f64af9cb2 | ||
|
|
b0d5c7035b | ||
|
|
0c61521521 | ||
|
|
3497a0de54 | ||
|
|
117f2fa00d | ||
|
|
2c67090e3c | ||
|
|
10ccbeab35 | ||
|
|
b49f66bbc9 | ||
|
|
9adbbd42be | ||
|
|
8563bd463c | ||
|
|
da5a6d2272 | ||
|
|
0854737be2 | ||
|
|
6f3f8b0a48 | ||
|
|
f0e272d0f2 | ||
|
|
5e207aa7c1 | ||
|
|
e0b80f49b6 | ||
|
|
97bbe42599 | ||
|
|
089dbdbd7e | ||
|
|
833c099025 | ||
|
|
4e526dfaae | ||
|
|
cae37657e9 | ||
|
|
1a94530935 | ||
|
|
75d28d3c58 | ||
|
|
cd59f7aad6 | ||
|
|
2f9fcd96c7 | ||
|
|
c74fba483d | ||
|
|
193dd01e06 | ||
|
|
2400004f41 | ||
|
|
b862c20e8e | ||
|
|
c0ed623d26 | ||
|
|
501b96baf7 | ||
|
|
d2600e0ddd | ||
|
|
7d799b785e | ||
|
|
e36b620020 | ||
|
|
1efc74dabc | ||
|
|
54f98053a8 | ||
|
|
6745826f35 | ||
|
|
bbd897b8ff | ||
|
|
586590e9ec | ||
|
|
4bf50a0b46 | ||
|
|
40832f0ea7 | ||
|
|
32a065afc7 | ||
|
|
4dafc74223 | ||
|
|
8adf1231a3 | ||
|
|
c00624f209 | ||
|
|
eccde8fa07 | ||
|
|
0616a66b05 | ||
|
|
67453d18ff | ||
|
|
792a87e407 | ||
|
|
6da50626e1 | ||
|
|
6239b3b309 | ||
|
|
b9bc621e2a | ||
|
|
e10bbfa933 | ||
|
|
2dd301e292 | ||
|
|
75edc6de0f | ||
|
|
780c5183e3 | ||
|
|
25a10784eb | ||
|
|
6e1d09fc32 | ||
|
|
73a2063d96 | ||
|
|
deb1e7f41f | ||
|
|
f45f719b9d | ||
|
|
325639b308 | ||
|
|
386eef046d | ||
|
|
db6b14361d | ||
|
|
719f074ccf | ||
|
|
646b912da8 | ||
|
|
b29c43d86a | ||
|
|
7ce64ecf05 | ||
|
|
9a332074c7 | ||
|
|
dd02f1025f | ||
|
|
d7bfab7b13 | ||
|
|
05cf5d57a9 | ||
|
|
f56eaae019 | ||
|
|
0d436db3ea | ||
|
|
6c8b29f326 | ||
|
|
23e76b0bd9 | ||
|
|
82e8cd0f8d | ||
|
|
87d84b922f | ||
|
|
04955a4123 | ||
|
|
54831878e0 | ||
|
|
08ed71e51e | ||
|
|
3a1d5de742 | ||
|
|
e15be5bf9a | ||
|
|
01afeefeb9 | ||
|
|
532bd6fe12 | ||
|
|
416e30ede2 | ||
|
|
ceb81d00fc | ||
|
|
8adca31c24 | ||
|
|
3cce43309c | ||
|
|
63ad802013 | ||
|
|
9313e70575 | ||
|
|
838ea56605 | ||
|
|
9ac087c59c | ||
|
|
f52e076cb3 | ||
|
|
8857d0b8df | ||
|
|
950989a85e | ||
|
|
a4c215751e | ||
|
|
2ca560ebf8 | ||
|
|
1f631eafce | ||
|
|
6f605d4a35 | ||
|
|
1918625be9 | ||
|
|
bdf35b6688 | ||
|
|
2ac54ce4bd | ||
|
|
96d75c9ad4 | ||
|
|
dac4020f27 | ||
|
|
a5f49b065c | ||
|
|
d5d0624311 | ||
|
|
8708867c1c | ||
|
|
8f11529a75 | ||
|
|
0aaeab124d | ||
|
|
1cc184ed10 | ||
|
|
830f4268c3 | ||
|
|
977740045a | ||
|
|
2a1dcbc28b | ||
|
|
21f8ab647f | ||
|
|
aef5a48fc6 | ||
|
|
434c1a0f20 | ||
|
|
5fd2496774 | ||
|
|
7411bcbb30 |
2
.github/PULL_REQUEST_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
blank_issues_enabled: false
|
||||
blank_pull_request_template_enabled: false
|
||||
22
.github/PULL_REQUEST_TEMPLATE/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
## Description
|
||||
<!--- Describe your changes in detail -->
|
||||
<!--- Why is this change required? What problem does it solve? -->
|
||||
<!--- If it fixes an open issue, please link to the issue here. -->
|
||||
|
||||
Fixes # (issue)
|
||||
|
||||
|
||||
## How Has This Been Tested?
|
||||
|
||||
<!-- Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration -->
|
||||
|
||||
- [ ] Test A
|
||||
- [ ] Test B
|
||||
|
||||
## Screenshots (if appropriate):
|
||||
|
||||
|
||||
## Checklist:
|
||||
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have made corresponding changes to the documentation if applicable
|
||||
2
.github/workflows/build-mobile.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: "stable"
|
||||
flutter-version: "3.7.3"
|
||||
flutter-version: "3.10.0"
|
||||
cache: true
|
||||
|
||||
- name: Create the Keystore
|
||||
|
||||
79
.github/workflows/docker-cleanup.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
# This workflow runs on certain conditions to check for and potentially
|
||||
# delete container images from the GHCR which no longer have an associated
|
||||
# code branch.
|
||||
# Requires a PAT with the correct scope set in the secrets.
|
||||
#
|
||||
# This workflow will not trigger runs on forked repos.
|
||||
|
||||
name: Cleanup Old Docker Images
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- "closed"
|
||||
push:
|
||||
paths:
|
||||
- ".github/workflows/docker-cleanup.yml"
|
||||
|
||||
concurrency:
|
||||
group: registry-tags-cleanup
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
cleanup-images:
|
||||
name: Cleanup Stale Images Tags for ${{ matrix.primary-name }}
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- primary-name: "immich-server"
|
||||
- primary-name: "immich-machine-learning"
|
||||
- primary-name: "immich-web"
|
||||
- primary-name: "immich-proxy"
|
||||
env:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
|
||||
steps:
|
||||
-
|
||||
name: Clean temporary images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/ephemeral@v0.1.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
is_org: "true"
|
||||
do_delete: "true"
|
||||
package_name: "${{ matrix.primary-name }}"
|
||||
scheme: "pull_request"
|
||||
repo_name: "immich"
|
||||
match_regex: '^pr-(\d+)$|^(\d+)$'
|
||||
|
||||
cleanup-untagged-images:
|
||||
name: Cleanup Untagged Images Tags for ${{ matrix.primary-name }}
|
||||
runs-on: ubuntu-22.04
|
||||
needs:
|
||||
- cleanup-images
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- primary-name: "immich-server"
|
||||
- primary-name: "immich-machine-learning"
|
||||
- primary-name: "immich-web"
|
||||
- primary-name: "immich-proxy"
|
||||
- primary-name: "immich-build-cache"
|
||||
env:
|
||||
# Requires a personal access token with the OAuth scope delete:packages
|
||||
TOKEN: ${{ secrets.PACKAGE_DELETE_TOKEN }}
|
||||
steps:
|
||||
-
|
||||
name: Clean untagged images
|
||||
if: "${{ env.TOKEN != '' }}"
|
||||
uses: stumpylog/image-cleaner-action/untagged@v0.1.0
|
||||
with:
|
||||
token: "${{ env.TOKEN }}"
|
||||
owner: "immich-app"
|
||||
do_delete: "true"
|
||||
is_org: "true"
|
||||
package_name: "${{ matrix.primary-name }}"
|
||||
9
.github/workflows/docker.yml
vendored
@@ -13,6 +13,9 @@ concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build_and_push:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -39,10 +42,10 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
uses: docker/setup-qemu-action@v2.2.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.4.1
|
||||
uses: docker/setup-buildx-action@v2.7.0
|
||||
# Workaround to fix error:
|
||||
# failed to push: failed to copy: io: read/write on closed pipe
|
||||
# See https://github.com/docker/build-push-action/issues/761
|
||||
@@ -97,7 +100,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v4.0.0
|
||||
uses: docker/build-push-action@v4.1.1
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
|
||||
23
.github/workflows/github-repo-stats.yml
vendored
@@ -1,23 +0,0 @@
|
||||
name: github-repo-stats
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run this once per day, towards the end of the day for keeping the most
|
||||
# recent data point most meaningful (hours are interpreted in UTC).
|
||||
- cron: "0 23 * * *"
|
||||
workflow_dispatch: # Allow for running this manually.
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
j1:
|
||||
name: github-repo-stats
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: run-ghrs
|
||||
# Use latest release.
|
||||
uses: jgehrcke/github-repo-stats@RELEASE
|
||||
with:
|
||||
ghtoken: ${{ secrets.GHRS_GITHUB_API_TOKEN }}
|
||||
5
.github/workflows/prepare-release.yml
vendored
@@ -41,8 +41,9 @@ jobs:
|
||||
id: push-tag
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
author_name: Immich Release Bot
|
||||
author_email: bot@immich.app
|
||||
author_name: Alex The Bot
|
||||
author_email: alex.tran1502@gmail.com
|
||||
default_author: user_info
|
||||
message: "Version ${{ env.IMMICH_VERSION }}"
|
||||
tag: ${{ env.IMMICH_VERSION }}
|
||||
push: true
|
||||
|
||||
5
.github/workflows/static_analysis.yml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version: '3.7.3'
|
||||
channel: "stable"
|
||||
flutter-version: "3.10.0"
|
||||
|
||||
- name: Install dependencies
|
||||
run: dart pub get
|
||||
@@ -32,4 +32,3 @@ jobs:
|
||||
- name: Run dart analyze
|
||||
run: dart analyze --fatal-infos
|
||||
working-directory: ./mobile
|
||||
|
||||
|
||||
247
.github/workflows/test.yml
vendored
@@ -12,7 +12,6 @@ concurrency:
|
||||
jobs:
|
||||
e2e-tests:
|
||||
name: Run end-to-end test suites
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -22,27 +21,91 @@ jobs:
|
||||
- name: Run Immich Server E2E Test
|
||||
run: docker-compose -f ./docker/docker-compose.test.yml --env-file ./docker/.env.test up --abort-on-container-exit --exit-code-from immich-server-test
|
||||
|
||||
server-unit-tests:
|
||||
name: Run server unit test suites and checks
|
||||
doc-tests:
|
||||
name: Run documentation checks
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./docs
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run tests
|
||||
run: cd server && npm ci && npm run check:all
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
server-unit-tests:
|
||||
name: Run server unit test suites and checks
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Run web unit test suites and checks
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run tests
|
||||
run: cd web && npm ci && npm run check:all
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run svelte checks
|
||||
run: npm run check:svelte
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check:typescript
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test:cov
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
mobile-unit-tests:
|
||||
name: Run mobile unit tests
|
||||
@@ -52,11 +115,11 @@ jobs:
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version: '3.7.3'
|
||||
channel: "stable"
|
||||
flutter-version: "3.10.0"
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test
|
||||
run: flutter test -j 1
|
||||
|
||||
generated-api-up-to-date:
|
||||
name: Check generated files are up-to-date
|
||||
@@ -64,7 +127,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Run API generation
|
||||
run: cd server && npm ci && npm run api:generate
|
||||
run: npm --prefix server run api:generate
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v13.1
|
||||
id: verify-changed-files
|
||||
@@ -99,24 +162,18 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install server dependencies
|
||||
run: |
|
||||
cd server
|
||||
npm ci
|
||||
run: npm --prefix server ci
|
||||
- name: Run existing migrations
|
||||
run: |
|
||||
cd server
|
||||
npm run typeorm:migrations:run
|
||||
run: npm --prefix server run typeorm:migrations:run
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
run: |
|
||||
cd server
|
||||
npm run typeorm:migrations:generate ./libs/infra/src/db/migrations/TestMigration
|
||||
run: npm --prefix server run typeorm:migrations:generate ./src/infra/migrations/TestMigration
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@v13.1
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
server/libs/infra/src/db/migrations/
|
||||
server/src/infra/migrations/
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
run: |
|
||||
@@ -124,77 +181,77 @@ jobs:
|
||||
echo "Changed files: ${{ steps.verify-changed-files.outputs.changed_files }}"
|
||||
exit 1
|
||||
|
||||
mobile-integration-tests:
|
||||
name: Run mobile end-to-end integration tests
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-java@v3
|
||||
with:
|
||||
distribution: 'zulu'
|
||||
java-version: '12.x'
|
||||
cache: 'gradle'
|
||||
- name: Cache android SDK
|
||||
uses: actions/cache@v3
|
||||
id: android-sdk
|
||||
with:
|
||||
key: android-sdk
|
||||
path: |
|
||||
/usr/local/lib/android/
|
||||
~/.android
|
||||
- name: Cache Gradle
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
./mobile/build/
|
||||
./mobile/android/.gradle/
|
||||
key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
|
||||
- name: Setup Android SDK
|
||||
if: steps.android-sdk.outputs.cache-hit != 'true'
|
||||
uses: android-actions/setup-android@v2
|
||||
- name: AVD cache
|
||||
uses: actions/cache@v3
|
||||
id: avd-cache
|
||||
with:
|
||||
path: |
|
||||
~/.android/avd/*
|
||||
~/.android/adb*
|
||||
key: avd-29
|
||||
- name: create AVD and generate snapshot for caching
|
||||
if: steps.avd-cache.outputs.cache-hit != 'true'
|
||||
uses: reactivecircus/android-emulator-runner@v2.27.0
|
||||
with:
|
||||
working-directory: ./mobile
|
||||
cores: 2
|
||||
api-level: 29
|
||||
arch: x86_64
|
||||
profile: pixel
|
||||
target: default
|
||||
force-avd-creation: false
|
||||
emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
disable-animations: false
|
||||
script: echo "Generated AVD snapshot for caching."
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version: '3.7.3'
|
||||
cache: true
|
||||
- name: Run integration tests
|
||||
uses: Wandalen/wretry.action@master
|
||||
with:
|
||||
action: reactivecircus/android-emulator-runner@v2.27.0
|
||||
with: |
|
||||
working-directory: ./mobile
|
||||
cores: 2
|
||||
api-level: 29
|
||||
arch: x86_64
|
||||
profile: pixel
|
||||
target: default
|
||||
force-avd-creation: false
|
||||
emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
disable-animations: true
|
||||
script: |
|
||||
flutter pub get
|
||||
flutter test integration_test
|
||||
attempt_limit: 3
|
||||
# mobile-integration-tests:
|
||||
# name: Run mobile end-to-end integration tests
|
||||
# runs-on: macos-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - uses: actions/setup-java@v3
|
||||
# with:
|
||||
# distribution: 'zulu'
|
||||
# java-version: '12.x'
|
||||
# cache: 'gradle'
|
||||
# - name: Cache android SDK
|
||||
# uses: actions/cache@v3
|
||||
# id: android-sdk
|
||||
# with:
|
||||
# key: android-sdk
|
||||
# path: |
|
||||
# /usr/local/lib/android/
|
||||
# ~/.android
|
||||
# - name: Cache Gradle
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: |
|
||||
# ./mobile/build/
|
||||
# ./mobile/android/.gradle/
|
||||
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
|
||||
# - name: Setup Android SDK
|
||||
# if: steps.android-sdk.outputs.cache-hit != 'true'
|
||||
# uses: android-actions/setup-android@v2
|
||||
# - name: AVD cache
|
||||
# uses: actions/cache@v3
|
||||
# id: avd-cache
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.android/avd/*
|
||||
# ~/.android/adb*
|
||||
# key: avd-29
|
||||
# - name: create AVD and generate snapshot for caching
|
||||
# if: steps.avd-cache.outputs.cache-hit != 'true'
|
||||
# uses: reactivecircus/android-emulator-runner@v2.27.0
|
||||
# with:
|
||||
# working-directory: ./mobile
|
||||
# cores: 2
|
||||
# api-level: 29
|
||||
# arch: x86_64
|
||||
# profile: pixel
|
||||
# target: default
|
||||
# force-avd-creation: false
|
||||
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
# disable-animations: false
|
||||
# script: echo "Generated AVD snapshot for caching."
|
||||
# - name: Setup Flutter SDK
|
||||
# uses: subosito/flutter-action@v2
|
||||
# with:
|
||||
# channel: 'stable'
|
||||
# flutter-version: '3.7.3'
|
||||
# cache: true
|
||||
# - name: Run integration tests
|
||||
# uses: Wandalen/wretry.action@master
|
||||
# with:
|
||||
# action: reactivecircus/android-emulator-runner@v2.27.0
|
||||
# with: |
|
||||
# working-directory: ./mobile
|
||||
# cores: 2
|
||||
# api-level: 29
|
||||
# arch: x86_64
|
||||
# profile: pixel
|
||||
# target: default
|
||||
# force-avd-creation: false
|
||||
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
# disable-animations: true
|
||||
# script: |
|
||||
# flutter pub get
|
||||
# flutter test integration_test
|
||||
# attempt_limit: 3
|
||||
|
||||
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "mobile/.isar"]
|
||||
path = mobile/.isar
|
||||
url = https://github.com/isar/isar
|
||||
9
.vscode/launch.json
vendored
@@ -9,6 +9,15 @@
|
||||
"name": "Immich Server",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"restart": true,
|
||||
"port": 9231,
|
||||
"name": "Immich Microservices",
|
||||
"remoteRoot": "/usr/src/app",
|
||||
"localRoot": "${workspaceFolder}/server"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
10
Makefile
@@ -1,17 +1,17 @@
|
||||
dev:
|
||||
rm -rf ./server/dist && docker-compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
docker-compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
|
||||
dev-new:
|
||||
rm -rf ./server/dist && docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --remove-orphans
|
||||
|
||||
dev-new-update:
|
||||
rm -rf ./server/dist && docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
|
||||
dev-update:
|
||||
rm -rf ./server/dist && docker-compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
docker-compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
|
||||
dev-scale:
|
||||
rm -rf ./server/dist && docker-compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
docker-compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
stage:
|
||||
docker-compose -f ./docker/docker-compose.staging.yml up --build -V --remove-orphans
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
# Deployment checklist for iOS/Android/Server
|
||||
|
||||
[ ] Up version in [mobile/pubspec.yml](/mobile/pubspec.yaml)
|
||||
|
||||
[ ] Up version in [docker/docker-compose.yml](/docker/docker-compose.yml) for `immich_server` service
|
||||
|
||||
[ ] Up version in [docker/docker-compose.gpu.yml](/docker/docker-compose.gpu.yml) for `immich_server` service
|
||||
|
||||
[ ] Up version in [docker/docker-compose.dev.yml](/docker/docker-compose.dev.yml) for `immich_server` service
|
||||
|
||||
[ ] Up version in [server/src/constants/server_version.constant.ts](/server/src/constants/server_version.constant.ts)
|
||||
|
||||
[ ] Up version in iOS Fastlane [/mobile/ios/fastlane/Fastfile](/mobile/ios/fastlane/Fastfile)
|
||||
|
||||
[ ] Add changelog to [Android Fastlane F-droid folder](/mobile/android/fastlane/metadata/android/en-US/changelogs)
|
||||
|
||||
All of the version should be the same.
|
||||
53
README.md
@@ -37,7 +37,6 @@
|
||||
- [Installation](https://immich.app/docs/install/requirements)
|
||||
- [Contribution Guidelines](https://immich.app/docs/overview/support-the-project)
|
||||
- [Support The Project](#support-the-project)
|
||||
- [Known Issues](#known-issues)
|
||||
|
||||
## Documentation
|
||||
|
||||
@@ -61,25 +60,31 @@ Spec: Free-tier Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
|
||||
|
||||
# Features
|
||||
|
||||
| Features | Mobile | Web |
|
||||
| ------------------------------------------- | ------ | --- |
|
||||
| Upload and view videos and photos | Yes | Yes |
|
||||
| Auto backup when the app is opened | Yes | N/A |
|
||||
| Selective album(s) for backup | Yes | N/A |
|
||||
| Download photos and videos to local device | Yes | Yes |
|
||||
| Multi-user support | Yes | Yes |
|
||||
| Album and Shared albums | Yes | Yes |
|
||||
| Scrubbable/draggable scrollbar | Yes | Yes |
|
||||
| Support RAW (HEIC, HEIF, DNG, Apple ProRaw) | Yes | Yes |
|
||||
| Metadata view (EXIF, map) | Yes | Yes |
|
||||
| Search by metadata, objects and image tags | Yes | No |
|
||||
| Administrative functions (user management) | N/A | Yes |
|
||||
| Background backup | Yes | N/A |
|
||||
| Virtual scroll | Yes | Yes |
|
||||
| OAuth support | Yes | Yes |
|
||||
| LivePhoto backup and playback | iOS | Yes |
|
||||
| User-defined storage structure | Yes | Yes |
|
||||
| Public Sharing | N/A | Yes |
|
||||
| Features | Mobile | Web |
|
||||
| -------------------------------------------- | ------ | --- |
|
||||
| Upload and view videos and photos | Yes | Yes |
|
||||
| Auto backup when the app is opened | Yes | N/A |
|
||||
| Selective album(s) for backup | Yes | N/A |
|
||||
| Download photos and videos to local device | Yes | Yes |
|
||||
| Multi-user support | Yes | Yes |
|
||||
| Album and Shared albums | Yes | Yes |
|
||||
| Scrubbable/draggable scrollbar | Yes | Yes |
|
||||
| Support RAW (HEIC, HEIF, DNG, Apple ProRaw) | Yes | Yes |
|
||||
| Metadata view (EXIF, map) | Yes | Yes |
|
||||
| Search by metadata, objects, faces, and CLIP | Yes | Yes |
|
||||
| Administrative functions (user management) | No | Yes |
|
||||
| Background backup | Yes | N/A |
|
||||
| Virtual scroll | Yes | Yes |
|
||||
| OAuth support | Yes | Yes |
|
||||
| API Keys | N/A | Yes |
|
||||
| LivePhoto backup and playback | iOS | Yes |
|
||||
| User-defined storage structure | Yes | Yes |
|
||||
| Public Sharing | No | Yes |
|
||||
| Archive and Favorites | Yes | Yes |
|
||||
| Global Map | No | Yes |
|
||||
| Partner Sharing | Yes | Yes |
|
||||
| Facial recognition and clustering | No | Yes |
|
||||
| Offline support | Yes | No |
|
||||
|
||||
# Support the project
|
||||
|
||||
@@ -96,11 +101,3 @@ If you feel like this is the right cause and the app is something you are seeing
|
||||
- [Librepay](https://liberapay.com/alex.tran1502/)
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||
|
||||
# Known Issues
|
||||
|
||||
## immich-machine-learning fails to start
|
||||
|
||||
Symptoms: the container logs `illegal instruction core dump` and restarts
|
||||
|
||||
Solution: https://immich.app/docs/install/requirements#hardware
|
||||
|
||||
|
Before Width: | Height: | Size: 1.7 MiB After Width: | Height: | Size: 1.7 MiB |
32
dev-setup.md
@@ -1,32 +0,0 @@
|
||||
# Development Setup
|
||||
|
||||
## Lint / format extensions
|
||||
|
||||
Setting these in the IDE give a better developer experience auto-formatting code on save and providing instant feedback on lint issues.
|
||||
|
||||
### VSCode
|
||||
Install Prettier, ESLint and Svelte extensions.
|
||||
|
||||
in User `settings.json` (`cmd + shift + p` and search for Open User Settings JSON) add the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[javascript][typescript][css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.tabSize": 2,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[svelte]": {
|
||||
"editor.defaultFormatter": "svelte.svelte-vscode",
|
||||
"editor.tabSize": 2
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"eslint.validate": ["javascript", "svelte"]
|
||||
}
|
||||
```
|
||||
|
||||
## Running tests / checks
|
||||
|
||||
In both server and web:
|
||||
`npm run check:all`
|
||||
@@ -10,10 +10,7 @@ REDIS_HOSTNAME=immich-redis-test
|
||||
# Upload File Config
|
||||
UPLOAD_LOCATION=./upload
|
||||
|
||||
# MAPBOX
|
||||
## ENABLE_MAPBOX is either true of false -> if true, you have to provide MAPBOX_KEY
|
||||
ENABLE_MAPBOX=false
|
||||
|
||||
# WEB
|
||||
MAPBOX_KEY=
|
||||
VITE_SERVER_ENDPOINT=http://localhost:2283/api
|
||||
|
||||
TYPESENSE_ENABLED=false
|
||||
|
||||
@@ -23,6 +23,7 @@ services:
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -34,8 +35,7 @@ services:
|
||||
ports:
|
||||
- 3003:3003
|
||||
volumes:
|
||||
- ../machine-learning/src:/usr/src/app
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- ../machine-learning/app:/usr/src/app
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
- .env
|
||||
@@ -52,18 +52,21 @@ services:
|
||||
context: ../server
|
||||
dockerfile: Dockerfile
|
||||
target: builder
|
||||
command: npm run start:dev microservices
|
||||
command: npm run start:debug microservices
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- /usr/src/app/node_modules
|
||||
env_file:
|
||||
- .env
|
||||
ports:
|
||||
- 9231:9230
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
depends_on:
|
||||
- database
|
||||
- immich-server
|
||||
- typesense
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
@@ -89,13 +92,24 @@ services:
|
||||
depends_on:
|
||||
- immich-server
|
||||
|
||||
typesense:
|
||||
container_name: immich_typesense
|
||||
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
|
||||
environment:
|
||||
- TYPESENSE_API_KEY=${TYPESENSE_API_KEY}
|
||||
- TYPESENSE_DATA_DIR=/data
|
||||
logging:
|
||||
driver: none
|
||||
volumes:
|
||||
- tsdata:/data
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2
|
||||
image: redis:6.2-alpine@sha256:70a7a5b641117670beae0d80658430853896b5ef269ccf00d1827427e3263fa3
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: postgres:14
|
||||
image: postgres:14-alpine@sha256:28407a9961e76f2d285dc6991e8e48893503cc3836a4755bbc2d40bcc272a441
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -120,12 +134,12 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- 2283:8080
|
||||
logging:
|
||||
driver: none
|
||||
depends_on:
|
||||
- immich-server
|
||||
- immich-web
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
model-cache:
|
||||
tsdata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
version: '3.8'
|
||||
version: "3.8"
|
||||
|
||||
services:
|
||||
immich-server-test:
|
||||
@@ -9,7 +9,7 @@ services:
|
||||
target: builder
|
||||
command: npm run test:e2e
|
||||
expose:
|
||||
- '3000'
|
||||
- "3000"
|
||||
volumes:
|
||||
- ../server:/usr/src/app
|
||||
- /usr/src/app/node_modules
|
||||
@@ -17,6 +17,7 @@ services:
|
||||
- .env.test
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
- TYPESENSE_ENABLED=false
|
||||
depends_on:
|
||||
- immich-redis-test
|
||||
- immich-database-test
|
||||
@@ -24,12 +25,12 @@ services:
|
||||
- immich-test-network
|
||||
immich-redis-test:
|
||||
container_name: immich-redis-test
|
||||
image: redis:6.2
|
||||
image: redis:6.2-alpine@sha256:70a7a5b641117670beae0d80658430853896b5ef269ccf00d1827427e3263fa3
|
||||
networks:
|
||||
- immich-test-network
|
||||
immich-database-test:
|
||||
container_name: immich-database-test
|
||||
image: postgres:14
|
||||
image: postgres:14-alpine@sha256:28407a9961e76f2d285dc6991e8e48893503cc3836a4755bbc2d40bcc272a441
|
||||
env_file:
|
||||
- .env.test
|
||||
environment:
|
||||
|
||||
@@ -3,62 +3,68 @@ version: "3.8"
|
||||
services:
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: altran1502/immich-server:release
|
||||
entrypoint: [ "/bin/sh", "./start-server.sh" ]
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
command: [ "start.sh", "immich" ]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: altran1502/immich-server:release
|
||||
entrypoint: [ "/bin/sh", "./start-microservices.sh" ]
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
command: [ "start.sh", "microservices" ]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
restart: always
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
image: altran1502/immich-machine-learning:release
|
||||
image: ghcr.io/immich-app/immich-machine-learning:${IMMICH_VERSION:-release}
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
- model-cache:/cache
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- NODE_ENV=production
|
||||
restart: always
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
image: altran1502/immich-web:release
|
||||
entrypoint: [ "/bin/sh", "./entrypoint.sh" ]
|
||||
image: ghcr.io/immich-app/immich-web:${IMMICH_VERSION:-release}
|
||||
env_file:
|
||||
- .env
|
||||
restart: always
|
||||
|
||||
typesense:
|
||||
container_name: immich_typesense
|
||||
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
|
||||
environment:
|
||||
- TYPESENSE_API_KEY=${TYPESENSE_API_KEY}
|
||||
- TYPESENSE_DATA_DIR=/data
|
||||
logging:
|
||||
driver: none
|
||||
volumes:
|
||||
- tsdata:/data
|
||||
restart: always
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2
|
||||
image: redis:6.2-alpine@sha256:70a7a5b641117670beae0d80658430853896b5ef269ccf00d1827427e3263fa3
|
||||
restart: always
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: postgres:14
|
||||
image: postgres:14-alpine@sha256:28407a9961e76f2d285dc6991e8e48893503cc3836a4755bbc2d40bcc272a441
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
@@ -72,19 +78,19 @@ services:
|
||||
|
||||
immich-proxy:
|
||||
container_name: immich_proxy
|
||||
image: altran1502/immich-proxy:release
|
||||
image: ghcr.io/immich-app/immich-proxy:${IMMICH_VERSION:-release}
|
||||
environment:
|
||||
# Make sure these values get passed through from the env file
|
||||
- IMMICH_SERVER_URL
|
||||
- IMMICH_WEB_URL
|
||||
ports:
|
||||
- 2283:8080
|
||||
logging:
|
||||
driver: none
|
||||
depends_on:
|
||||
- immich-server
|
||||
- immich-web
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
model-cache:
|
||||
tsdata:
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
# Database
|
||||
###################################################################################
|
||||
|
||||
# NOTE: The following four database variables support Docker secrets by adding a *_FILE suffix to the variable name
|
||||
# See the docker-compose documentation on secrets for additional details: https://docs.docker.com/compose/compose-file/compose-file-v3/#secrets
|
||||
DB_HOSTNAME=immich_postgres
|
||||
DB_USERNAME=postgres
|
||||
DB_PASSWORD=postgres
|
||||
@@ -16,9 +18,20 @@ DB_DATABASE_NAME=immich
|
||||
|
||||
REDIS_HOSTNAME=immich_redis
|
||||
|
||||
# REDIS_URL will be used to pass custom options to ioredis.
|
||||
# Example for Sentinel
|
||||
# {"sentinels":[{"host":"redis-sentinel-node-0","port":26379},{"host":"redis-sentinel-node-1","port":26379},{"host":"redis-sentinel-node-2","port":26379}],"name":"redis-sentinel"}
|
||||
# REDIS_URL=ioredis://eyJzZW50aW5lbHMiOlt7Imhvc3QiOiJyZWRpcy1zZW50aW5lbDEiLCJwb3J0IjoyNjM3OX0seyJob3N0IjoicmVkaXMtc2VudGluZWwyIiwicG9ydCI6MjYzNzl9XSwibmFtZSI6Im15bWFzdGVyIn0=
|
||||
|
||||
# Optional Redis settings:
|
||||
|
||||
# Note: these parameters are not automatically passed to the Redis Container
|
||||
# to do so, please edit the docker-compose.yml file as well. Redis is not configured
|
||||
# via environment variables, only redis.conf or the command line
|
||||
|
||||
# REDIS_PORT=6379
|
||||
# REDIS_DBINDEX=0
|
||||
# REDIS_USERNAME=
|
||||
# REDIS_PASSWORD=
|
||||
# REDIS_SOCKET=
|
||||
|
||||
@@ -30,6 +43,21 @@ REDIS_HOSTNAME=immich_redis
|
||||
|
||||
UPLOAD_LOCATION=absolute_location_on_your_machine_where_you_want_to_store_the_backup
|
||||
|
||||
|
||||
###################################################################################
|
||||
# Typesense
|
||||
###################################################################################
|
||||
TYPESENSE_API_KEY=some-random-text
|
||||
# TYPESENSE_ENABLED=false
|
||||
# TYPESENSE_URL uses base64 encoding for the nodes json.
|
||||
# Example JSON that was used:
|
||||
# [
|
||||
# { "host": "typesense-1.example.net", "port": "443", "protocol": "https" },
|
||||
# { "host": "typesense-2.example.net", "port": "443", "protocol": "https" },
|
||||
# { "host": "typesense-3.example.net", "port": "443", "protocol": "https" },
|
||||
# ]
|
||||
# TYPESENSE_URL=ha://WwogIHsgImhvc3QiOiAidHlwZXNlbnNlLTEuZXhhbXBsZS5uZXQiLCAicG9ydCI6ICI0NDMiLCAicHJvdG9jb2wiOiAiaHR0cHMiIH0sCiAgeyAiaG9zdCI6ICJ0eXBlc2Vuc2UtMi5leGFtcGxlLm5ldCIsICJwb3J0IjogIjQ0MyIsICJwcm90b2NvbCI6ICJodHRwcyIgfSwKICB7ICJob3N0IjogInR5cGVzZW5zZS0zLmV4YW1wbGUubmV0IiwgInBvcnQiOiAiNDQzIiwgInByb3RvY29sIjogImh0dHBzIiB9Cl0=
|
||||
|
||||
###################################################################################
|
||||
# Reverse Geocoding
|
||||
#
|
||||
@@ -76,4 +104,13 @@ IMMICH_MACHINE_LEARNING_URL=http://immich-machine-learning:3003
|
||||
# Examples: http://localhost:3001, http://immich-api.example.com, etc
|
||||
####################################################################################
|
||||
|
||||
#IMMICH_API_URL_EXTERNAL=http://localhost:3001
|
||||
#IMMICH_API_URL_EXTERNAL=http://localhost:3001
|
||||
|
||||
###################################################################################
|
||||
# Immich Version - Optional
|
||||
#
|
||||
# This allows all immich docker images to be pinned to a specific version. By default,
|
||||
# the version is "release" but could be a specific version, like "v1.59.0".
|
||||
###################################################################################
|
||||
|
||||
#IMMICH_VERSION=
|
||||
|
||||
2
docs/.prettierignore
Normal file
@@ -0,0 +1,2 @@
|
||||
build/
|
||||
.docusaurus/
|
||||
6
docs/.prettierrc
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 120,
|
||||
"semi": true
|
||||
}
|
||||
@@ -33,9 +33,8 @@ The motion part will now be uploaded and can be played on the mobile app and the
|
||||
src="https://media.giphy.com/media/fTrGceZd7t1ewi8ESc/giphy.gif"
|
||||
width="100%"
|
||||
style={{
|
||||
borderRadius: "10px",
|
||||
boxShadow:
|
||||
"rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px",
|
||||
borderRadius: '10px',
|
||||
boxShadow: 'rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px',
|
||||
}}
|
||||
title="LivePhoto playback on the web"
|
||||
/>
|
||||
@@ -73,9 +72,8 @@ The web will have the option to sign in with OAuth.
|
||||
width="50%"
|
||||
title="Web Sign in with OAuth"
|
||||
style={{
|
||||
borderRadius: "10px",
|
||||
boxShadow:
|
||||
"rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px",
|
||||
borderRadius: '10px',
|
||||
boxShadow: 'rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px',
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -86,9 +84,8 @@ sign-in button.
|
||||
src="https://media.giphy.com/media/3iy3SaNkVYtlkEiw06/giphy.gif"
|
||||
title="Mobile sign in with OAuth"
|
||||
style={{
|
||||
borderRadius: "10px",
|
||||
boxShadow:
|
||||
"rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px",
|
||||
borderRadius: '10px',
|
||||
boxShadow: 'rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px',
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -98,9 +95,8 @@ sign-in button.
|
||||
src="https://media.giphy.com/media/LStqgGESXW8XnuCv5y/giphy.gif"
|
||||
width="300"
|
||||
style={{
|
||||
borderRadius: "10px",
|
||||
boxShadow:
|
||||
"rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px",
|
||||
borderRadius: '10px',
|
||||
boxShadow: 'rgba(9, 30, 66, 0.25) 0px 1px 1px, rgba(9, 30, 66, 0.13) 0px 0px 1px 1px',
|
||||
}}
|
||||
title="Support the project"
|
||||
/>
|
||||
|
||||
@@ -14,16 +14,49 @@ sidebar_position: 7
|
||||
|
||||
### How can I sync an existing directory with Immich's server?
|
||||
|
||||
Immich doesn't have the mechanism to sync an existing directory with the server. There is however, a helper CLI tool to help you bulk upload the existing photos and videos to the server. You can find the guide to use the CLI tool [here](/docs/features/bulk-upload.md).
|
||||
Immich doesn't have two-way synchronization ([yet](https://github.com/immich-app/immich/discussions/1006)), but the [command line tool](/docs/features/bulk-upload.md) can bulk upload items from a directory to Immich.
|
||||
|
||||
### Why doesn't Immich watch an existing photo gallery directory?
|
||||
|
||||
The initial approach of Immich is to become a backup tool, primarily for mobile device usage. Thus, all the assets must be uploaded from the mobile client. The app was architectured to perform that job well.
|
||||
|
||||
### Why does my uploaded photo show up with the wrong date or time in Immich?
|
||||
|
||||
When a photo is initially uploaded Immich uses the create date of the file to determine where it belongs in the timeline. After that, background jobs will run that extract [exif metadata](https://en.wikipedia.org/wiki/Exif), including the CreateDate, to provide a more accurate date for the photo. If that is not available it will fallback to the modified date. If you want to ensure your photo has the right date, check the exif metadata before uploading.
|
||||
|
||||
If the timezone is incorrect in an uploaded photo, check the `DateTimeOriginal` exif field of the uploaded file. Immich uses the very competent library [exiftool-vendored.js](https://github.com/photostructure/exiftool-vendored.js#dates) to handle timezone parsing, but in some cases (like photos taken with DSLR cameras) it has to fallback on the local timezone. If you are using docker, this fallback will be UTC. (Note that even the photo backup app that can't be named [has the same bug!](https://photo.stackexchange.com/a/126978)) In Immich, it is possible to change this assumed fallback timezone system-wide by setting the timezone in the microservices docker container. You might need to run the "Extract Metadata" job after to effect the change.
|
||||
|
||||
As an example, the following modification of `docker-compose.yml` will set the timezone of the microservices container to be `Europe/Stockholm`
|
||||
|
||||
```
|
||||
environment:
|
||||
- TZ=Europe/Stockholm # <---- Add this line in the microservices config
|
||||
```
|
||||
|
||||
### Why are only photos and not videos being uploaded to Immich?
|
||||
|
||||
This often happens when using a reverse proxy or cloudflare tunnel in front of Immich. Make sure to set your reverse proxy to allow large POST requests. In `nginx`, set `client_max_body_size 50000M;` or similar. Cloudflare tunnels are limited to 100 mb file sizes.
|
||||
|
||||
### Why is Immich slow on low-memory systems like the Raspberry Pi?
|
||||
|
||||
Immich uses optional machine-learning features to enhance search results. This feature, however, can be too heavy to run on a Raspberry Pi. To disable machine learning, comment out the `immich-machine-learning` section of your docker-compose.yml and set `IMMICH_MACHINE_LEARNING_URL=false` in your .env file.
|
||||
|
||||
### How to disable machine-learning and TypeSense?
|
||||
|
||||
:::warning
|
||||
Disabling both will result in poor search experience and typesense utilizes CLIP embeddings which are generated by machine-learning.
|
||||
:::
|
||||
|
||||
These features can be disabled by commenting out `immich-typesense` and `immich-machine-learning` sections of the docker-compose.yml and setting `IMMICH_MACHINE_LEARNING_URL=false` & `TYPESENSE_ENABLED=false` in your .env file.
|
||||
|
||||
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
||||
|
||||
Template changes will only apply to new assets. To retroactively apply the template to previously uploaded assets, run the Storage Migration Job, available on the [Jobs](/docs/administration/jobs.md) page.
|
||||
|
||||
### In the uploads folder, why are photos stored in the wrong date?
|
||||
|
||||
This is fixed by running the storage migration job.
|
||||
|
||||
### Why is object detection not very good?
|
||||
|
||||
The model we used for machine learning is a prebuilt model, so the accuracy is not very good. It will hopefully be replaced with a better solution in the future.
|
||||
@@ -44,6 +77,10 @@ The non-root user/group needs read/write access to the volume mounts, including
|
||||
|
||||
The admin password can be reset by running the [reset-admin-password](/docs/administration/server-commands.md) command on the immich-server.
|
||||
|
||||
### How can I backup data from Immich?
|
||||
|
||||
See [backup and restore](/docs/administration/backup-and-restore.md).
|
||||
|
||||
### How can I **purge** data from Immich?
|
||||
|
||||
Data for Immich comes in two forms:
|
||||
@@ -58,3 +95,7 @@ docker-compose down -v
|
||||
```
|
||||
|
||||
After removing the the containers and volumes, the **Files** can be cleaned up (if necessary) from the `UPLOAD_LOCATION` by simply deleting an unwanted files or folders.
|
||||
|
||||
### Why iOS app shows duplicate photos on the timeline while the web doesn't?
|
||||
|
||||
If you are using `My Photo Stream`, the Photos app temporarily creates duplicates of photos taken in the last 30 days. These photos are included in the `Recents` album and thus shown up twice. To fix this, you can disable `My Photo Stream` in the native Photos app or choose a different album in the backup screen in Immich.
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"label": "Administration",
|
||||
"position": 4
|
||||
}
|
||||
|
||||
"label": "Administration",
|
||||
"position": 4
|
||||
}
|
||||
|
||||
63
docs/docs/administration/backup-and-restore.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# Backup and Restore
|
||||
|
||||
## Database
|
||||
|
||||
:::info
|
||||
Refer to the official [postgres documentation](https://www.postgresql.org/docs/current/backup.html) for details about backing up and restoring a postgres database.
|
||||
:::
|
||||
|
||||
The recommended way to backup and restore the Immich database is to use the `pg_dumpall` command.
|
||||
|
||||
```bash title='Backup'
|
||||
docker exec -t immich_postgres pg_dumpall -c -U postgres | gzip > "/path/to/backup/dump.sql.gz"
|
||||
```
|
||||
|
||||
```bash title='Restore'
|
||||
docker-compose down -v # CAUTION! Deletes all Immich data to start from scratch.
|
||||
docker-compose pull # Update to latest version of Immich (if desired)
|
||||
docker-compose create # Create Docker containers for Immich apps without running them.
|
||||
docker start immich_postgres # Start Postgres server
|
||||
sleep 10 # Wait for Postgres server to start up
|
||||
gunzip < "/path/to/backup/dump.sql.gz" | docker exec -i immich_postgres psql -U postgres -d immich # Restore Backup
|
||||
docker-compose up -d # Start remainder of Immich apps
|
||||
```
|
||||
|
||||
Note that for the database restore to proceed properly, it requires a completely fresh install (i.e. the Immich server has never run since creating the Docker containers). If the Immich app has run, Postgres conflicts may be encountered upon database restoration (relation already exists, violated foreign key constraints, multiple primary keys, etc.).
|
||||
|
||||
The database dumps can also be automated (using [this image](https://github.com/prodrigestivill/docker-postgres-backup-local)) by editing the docker compose file to match the following:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
...
|
||||
backup:
|
||||
container_name: immich_db_dumper
|
||||
image: prodrigestivill/postgres-backup-local
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
POSTGRES_HOST: database
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
SCHEDULE: "@daily"
|
||||
BACKUP_NUM_KEEP: 7
|
||||
BACKUP_DIR: /db_dumps
|
||||
volumes:
|
||||
- ./db_dumps:/db_dumps
|
||||
depends_on:
|
||||
- database
|
||||
```
|
||||
|
||||
Then you can restore with the same command but pointed at the latest dump.
|
||||
|
||||
```bash title='Automated Restore'
|
||||
gunzip < db_dumps/last/immich-latest.sql.gz | docker exec -i immich_postgres psql -U postgres -d immich
|
||||
```
|
||||
|
||||
## Filesystem
|
||||
|
||||
Immich stores two types of content in the filesystem: (1) original, unmodified content, and (2) generated content. Only the original content needs to be backed-up, which includes the following folders:
|
||||
|
||||
1. `UPLOAD_LOCATION/library`
|
||||
1. `UPLOAD_LOCATION/upload`
|
||||
1. `UPLOAD_LOCATION/profile`
|
||||
22
docs/docs/administration/reverse-proxy.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Reverse Proxy
|
||||
|
||||
When deploying Immich it is important to understand that a reverse proxy is required in front of the server and web container. The reverse proxy acts as an intermediary between the user and container, forwarding requests to the correct container based on the URL path.
|
||||
|
||||
## Default Reverse Proxy
|
||||
|
||||
Immich provides a default nginx reverse proxy preconfigured to perform the correct routing and set the necessary headers for the server and web container to use. These headers are crucial to redirect to the correct URL and determine the client's IP address.
|
||||
|
||||
## Using a Different Reverse Proxy
|
||||
|
||||
While the reverse proxy provided by Immich works well for basic deployments, some users may want to use a different reverse proxy. Fortunately, Immich is flexible enough to accommodate different reverse proxies. Users can either:
|
||||
|
||||
1. Add another reverse proxy on top of Immich's reverse proxy
|
||||
2. Completely replace the default reverse proxy
|
||||
|
||||
## Adding a Custom Reverse Proxy
|
||||
|
||||
Users can deploy a custom reverse proxy that forwards requests to Immich's reverse proxy. This way, the new reverse proxy can handle TLS termination, load balancing, or other advanced features, while still delegating routing decisions to Immich's reverse proxy. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Forwarded-Host`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||
|
||||
## Replacing the Default Reverse Proxy
|
||||
|
||||
Replacing Immich's default reverse proxy is an advanced deployment and support may be limited. When replacing Immich's default proxy it is important to ensure that requests to `/api/*` are routed to the server container and all other requests to the web container. Additionally, the previously mentioned headers should be configured accordingly. You may find our [nginx configuration file](https://github.com/immich-app/immich/blob/main/nginx/templates/default.conf.template) a helpful reference.
|
||||
@@ -20,7 +20,7 @@ Immich is a full-stack [TypeScript](https://www.typescriptlang.org/) application
|
||||
### Web
|
||||
|
||||
- [SvelteKit](https://kit.svelte.dev/)
|
||||
- [tailwindcss](https://tailwindcss.com/)
|
||||
- [Tailwindcss](https://tailwindcss.com/)
|
||||
|
||||
### Server
|
||||
|
||||
@@ -34,6 +34,7 @@ Immich is a full-stack [TypeScript](https://www.typescriptlang.org/) application
|
||||
|
||||
- [PostgreSQL](https://www.postgresql.org/)
|
||||
- [Redis](https://redis.io/) for job queuing.
|
||||
- [Typesense](https://typesense.org/) for search.
|
||||
|
||||
### Web Server
|
||||
|
||||
|
||||
14
docs/docs/developer/database-migrations.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# Database Migrations
|
||||
|
||||
After making any changes in the `server/src/infra/database/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
|
||||
1. Run the command
|
||||
|
||||
```bash
|
||||
npm run typeorm:migrations:generate ./src/infra/<migration-name>
|
||||
```
|
||||
|
||||
2. Check if the migration file makes sense.
|
||||
3. Move the migration file to folder `./src/infra/database/migrations` in your code editor.
|
||||
|
||||
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
|
||||
|
Before Width: | Height: | Size: 242 KiB After Width: | Height: | Size: 570 KiB |
@@ -1,7 +1,17 @@
|
||||
---
|
||||
sidebar_position: 5
|
||||
---
|
||||
|
||||
# Open API
|
||||
|
||||
Immich uses the [Open API](https://swagger.io/specification/) standard to generate API documentation. To view the published docs see [here](/docs/api).
|
||||
|
||||
## Generator
|
||||
|
||||
OpenAPI is used to generate the client (Typescript, Dart) SDK. `openapi-generator-cli` can be installed [here](https://openapi-generator.tech/docs/installation/). When you add a new or modify an existing endpoint, you must run the command below to update the client SDK.
|
||||
|
||||
```bash
|
||||
npm run api:generate # Run from the `server/` directory
|
||||
```
|
||||
|
||||
You can find the generated client SDK in the `web/src/api` for Typescript SDK and `mobile/openapi` for Dart SDK.
|
||||
|
||||
:::tip
|
||||
This can also be run via `make api` from the project root directory (not in the `server` folder)
|
||||
:::
|
||||
|
||||
@@ -1,16 +1,8 @@
|
||||
---
|
||||
sidebar_position: 3
|
||||
---
|
||||
|
||||
# Contributing
|
||||
|
||||
Contributions are welcome!
|
||||
|
||||
## PR Checklist
|
||||
# PR Checklist
|
||||
|
||||
When contributing code through a pull request, please check the following:
|
||||
|
||||
### Web Checks
|
||||
## Web Checks
|
||||
|
||||
- [ ] `npm run lint` (linting via ESLint)
|
||||
- [ ] `npm run format` (formatting via Prettier)
|
||||
@@ -21,7 +13,7 @@ When contributing code through a pull request, please check the following:
|
||||
Run all web checks with `npm run check:all`
|
||||
:::
|
||||
|
||||
### Server Checks
|
||||
## Server Checks
|
||||
|
||||
- [ ] `npm run lint` (linting via ESLint)
|
||||
- [ ] `npm run format` (formatting via Prettier)
|
||||
@@ -32,12 +24,10 @@ Run all web checks with `npm run check:all`
|
||||
Run all server checks with `npm run check:all`
|
||||
:::
|
||||
|
||||
### Open API
|
||||
## Open API
|
||||
|
||||
The Open API client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file.
|
||||
The Open API client libraries need to be regenerated whenever there are changes to the `immich-openapi-specs.json` file. See [Open API](/docs/developer/open-api.md) for more details.
|
||||
|
||||
- [ ] `npm run api:generate`
|
||||
## Database Migrations
|
||||
|
||||
:::tip
|
||||
This can also be run via `make api` from the project root directory (not in the `server` folder)
|
||||
:::
|
||||
A database migration needs to be generated whenever there are changes to `server/src/infra/src/entities`. See [Database Migration](/docs/developer/database-migrations.md) for more details.
|
||||
@@ -10,9 +10,9 @@ sidebar_position: 2
|
||||
|
||||
This environment includes the following services:
|
||||
|
||||
- Core server - `/server/apps/immich`
|
||||
- Core server - `/server/src/immich`
|
||||
- Machine learning - `/machine-learning`
|
||||
- Microservices - `/server/apps/microservicess`
|
||||
- Microservices - `/server/src/microservicess`
|
||||
- Web app - `/web`
|
||||
- Redis
|
||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to acess it
|
||||
@@ -43,6 +43,18 @@ The mobile app `(/mobile)` will required Flutter toolchain to be installed on yo
|
||||
|
||||
Please refer to the [Flutter's official documentation](https://flutter.dev/docs/get-started/install) for more information on setting up the toolchain on your machine.
|
||||
|
||||
### Connect to a remote backend
|
||||
|
||||
If you only want to do web development connected to an existing, remote backend, follow these steps:
|
||||
|
||||
1. Enter the web directory - `cd web/`
|
||||
2. Install web dependencies - `npm i`
|
||||
3. Start the web development server
|
||||
|
||||
```
|
||||
PUBLIC_IMMICH_SERVER_URL=https://demo.immich.app/api npm run dev
|
||||
```
|
||||
|
||||
## IDE setup
|
||||
|
||||
### Lint / format extensions
|
||||
@@ -80,27 +92,3 @@ in User `settings.json` (`cmd + shift + p` and search for `Open User Settings JS
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## OpenAPI generator
|
||||
|
||||
OpenAPI is used to generate the client (Typescript, Dart) SDK. `openapi-generator-cli` can be installed [here](https://openapi-generator.tech/docs/installation/). When you add a new or modify an existing endpoint, you must run the command below to update the client SDK.
|
||||
|
||||
```bash
|
||||
npm run api:generate # Run from the `server` directory
|
||||
```
|
||||
|
||||
You can find the generated client SDK in the `web/src/api` for Typescript SDK and `mobile/openapi` for Dart SDK.
|
||||
|
||||
## Database migrations
|
||||
|
||||
After making any changes in the `server/libs/database/src/entities`, a database migration need to run in order to register the changes in the database. Follow the steps below to create a new migration.
|
||||
|
||||
1. Attached to the server container shell.
|
||||
2. Run
|
||||
|
||||
```bash
|
||||
npm run typeorm -- migration:generate ./libs/infra/src/db/<migration-name> -d ./libs/infra/src/db/config/database.config.ts
|
||||
```
|
||||
|
||||
3. Check if the migration file makes sense.
|
||||
4. Move the migration file to folder `server/libs/database/src/migrations` in your code editor.
|
||||
|
||||
@@ -14,17 +14,19 @@ Background backup is available thanks to the contribution effort of [@zoodyy](ht
|
||||
|
||||
If background backup is enabled. The app will periodically check if there are any new photos or videos in the selected album(s) to be uploaded to the cloud. If there are, it will upload them to the cloud in the background.
|
||||
|
||||
|
||||
:::info Note
|
||||
|
||||
#### General
|
||||
|
||||
- The app must be in the background for the backup worker to start running.
|
||||
- If you reopen the app and the first page you see is the backup page, the counts will not reflect the background uploaded result. You have to navigate out of the page and come back to see the updated counts.
|
||||
|
||||
#### Android
|
||||
|
||||
- It is a well-known problem that some Android models are very strict with battery optimization settings, which can cause a problem with the background worker. Please visit [Don't kill my app](https://dontkillmyapp.com/) for a guide on disabling this setting on your phone.
|
||||
|
||||
#### iOS
|
||||
|
||||
- You must enable **Background App Refresh** for the app to work in the background. You can enable it in the Settings app under General > Background App Refresh.
|
||||
|
||||
<div style={{textAlign: 'center'}}>
|
||||
|
||||
@@ -17,25 +17,32 @@ npm i -g immich
|
||||
|
||||
## Quick Start
|
||||
|
||||
Specify user's credentials, Immich's server address and port, and the directory you would like to upload videos/photos from.
|
||||
Specify user's credential, Immich's server address and port and the directory you would like to upload videos/photos from.
|
||||
|
||||
```bash
|
||||
immich upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api -d your/target/directory
|
||||
```
|
||||
immich upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api file1.jpg file2.jpg
|
||||
```
|
||||
|
||||
By default, subfolders are not included. To upload a directory including subfolder, use the --recursive option:
|
||||
|
||||
```
|
||||
immich upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api --recursive directory/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Parameters
|
||||
### Options
|
||||
|
||||
| Parameter | Description |
|
||||
| ---------------- | ------------------------------------------------------------------- |
|
||||
| --yes / -y | Assume yes on all interactive prompts |
|
||||
| --recursive / -r | Include subfolders |
|
||||
| --delete / -da | Delete local assets after upload |
|
||||
| --key / -k | User's API key |
|
||||
| --server / -s | Immich's server address |
|
||||
| --directory / -d | Directory to upload from |
|
||||
| --threads / -t | Number of threads to use (Default 5) |
|
||||
| --album/ -al | Create albums for assets based on the parent folder or a given name |
|
||||
| --import/ -i | Import gallery |
|
||||
|
||||
### Obtain the API Key
|
||||
|
||||
@@ -43,7 +50,6 @@ The API key can be obtained in the user setting panel on the web interface.
|
||||
|
||||

|
||||
|
||||
|
||||
### Run via Docker
|
||||
|
||||
You can run the CLI inside of a docker container to avoid needing to install anything.
|
||||
@@ -54,27 +60,29 @@ Be aware that as this runs inside a container, you need to mount the folder from
|
||||
|
||||
```bash title="Upload current directory"
|
||||
cd /DIRECTORY/WITH/IMAGES
|
||||
docker run -it --rm -v $(pwd):/import ghcr.io/immich-app/immich-cli:latest upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api
|
||||
docker run -it --rm -v "$(pwd):/import" ghcr.io/immich-app/immich-cli:latest upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api
|
||||
```
|
||||
|
||||
```bash title="Upload target directory"
|
||||
docker run -it --rm -v /DIRECTORY/WITH/IMAGES:/import ghcr.io/immich-app/immich-cli:latest upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api
|
||||
docker run -it --rm -v "/DIRECTORY/WITH/IMAGES:/import" ghcr.io/immich-app/immich-cli:latest upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api
|
||||
```
|
||||
|
||||
```bash title="Create an alias"
|
||||
alias immich="docker run -it --rm -v $(pwd):/import ghcr.io/immich-app/immich-cli:latest"
|
||||
alias immich='docker run -it --rm -v "$(pwd):/import" ghcr.io/immich-app/immich-cli:latest'
|
||||
immich upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api
|
||||
```
|
||||
|
||||
:::tip Internal networking
|
||||
If you are running the CLI container on the same machine as your Immich server, you may not be able to reach the external address. In that case, try the following steps:
|
||||
|
||||
1. Find the internal Docker network used by Immich via `docker network ls`.
|
||||
2. Adapt the above command to pass the `--network <immich_network>` argument to `docker run`, substituting `<immich_network>` with the result from step 1.
|
||||
3. Use `--server http://immich-server:3001/` for the upload command instead of the external address.
|
||||
3. Use `--server http://immich-server:3001` for the upload command instead of the external address.
|
||||
|
||||
```bash title="Upload to internal address"
|
||||
docker run --network immich_default -it --rm -v $(pwd):/import ghcr.io/immich-app/immich-cli:latest upload --key HFEJ38DNSDUEG --server http://immich-server:3001/
|
||||
docker run --network immich_default -it --rm -v "$(pwd):/import" ghcr.io/immich-app/immich-cli:latest upload --key HFEJ38DNSDUEG --server http://immich-server:3001
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
### Run from source
|
||||
@@ -92,5 +100,5 @@ npm run build
|
||||
```
|
||||
|
||||
```bash title="Run the command"
|
||||
node bin/index.js upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api -d your/target/directory
|
||||
node bin/index.js upload --key HFEJ38DNSDUEG --server http://192.168.1.216:2283/api --recursive your/asset/directory
|
||||
```
|
||||
|
||||
15
docs/docs/features/facial-recognition.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Facial Recognition
|
||||
|
||||
Immich recognizes faces in your photos and videos and groups them together. You can then assign names to the faces and search for them.
|
||||
|
||||
The list of people is shown in the Explore page.
|
||||
|
||||
<img src={require('./img/facial-recognition-1.png').default} title='Facial Recognition 1' />
|
||||
|
||||
Upon clicking on a person, a list of assets that contain their face will be shown.
|
||||
|
||||
<img src={require('./img/facial-recognition-2.png').default} title='Facial Recognition 2' />
|
||||
|
||||
The asset detail view will also show the faces that are recognized in the asset.
|
||||
|
||||
<img src={require('./img/facial-recognition-3.png').default} title='Facial Recognition 3' />
|
||||
BIN
docs/docs/features/img/facial-recognition-1.png
Normal file
|
After Width: | Height: | Size: 524 KiB |
BIN
docs/docs/features/img/facial-recognition-2.png
Normal file
|
After Width: | Height: | Size: 2.6 MiB |
BIN
docs/docs/features/img/facial-recognition-3.png
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
BIN
docs/docs/features/img/me.png
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
docs/docs/features/img/my-wife.png
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
docs/docs/features/img/partner-sharing-1.png
Normal file
|
After Width: | Height: | Size: 84 KiB |
BIN
docs/docs/features/img/partner-sharing-2.png
Normal file
|
After Width: | Height: | Size: 56 KiB |
BIN
docs/docs/features/img/partner-sharing-3.png
Normal file
|
After Width: | Height: | Size: 183 KiB |
BIN
docs/docs/features/img/search-ex-2.webp
Normal file
|
After Width: | Height: | Size: 162 KiB |
BIN
docs/docs/features/img/search-ex-3.webp
Normal file
|
After Width: | Height: | Size: 59 KiB |
BIN
docs/docs/features/img/sidecar-jobs.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
docs/docs/features/img/xmp-sidecars.png
Normal file
|
After Width: | Height: | Size: 8.5 KiB |
17
docs/docs/features/partner-sharing.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Partner Sharing
|
||||
|
||||
Immich allows you to share your library with other users. They can then view your library and download the assets.
|
||||
|
||||
You can manage one or multiple users to have access to your library from the [User Settings](docs/features/user-settings.md) page.
|
||||
|
||||
<img src={require('./img/partner-sharing-1.png').default} title='Partner Sharing 1' />
|
||||
|
||||
<img src={require('./img/partner-sharing-2.png').default} title='Partner Sharing 2' />
|
||||
|
||||
Accessing the shared library can be done from the Sharing page.
|
||||
|
||||
<img src={require('./img/partner-sharing-3.png').default} title='Partner Sharing 3' />
|
||||
|
||||
:::tip Sharing specific assets
|
||||
For sharing a specific set of assets, you can use the shared album feature of Immich.
|
||||
:::
|
||||
103
docs/docs/features/read-only-gallery.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# Read-only Gallery [Experimental]
|
||||
|
||||
## Overview
|
||||
|
||||
This feature enables users to use an existing gallery without uploading the assets to Immich.
|
||||
|
||||
Upon syncing the file information, it will be read by Immich to generate supported files.
|
||||
|
||||
:::caution
|
||||
|
||||
This feature is still in an experimental stage. And this is an initial implementation and will receive improvements in the future.
|
||||
|
||||
The current limitations of this feature are:
|
||||
|
||||
- Assets are not automatically synced and must instead be manually synced with the CLI tool.
|
||||
- Only new files that are added to the gallery will be detected.
|
||||
- Deleted and moved files will not be detected.
|
||||
|
||||
:::
|
||||
|
||||
## Usage
|
||||
|
||||
:::tip Example scenario
|
||||
|
||||
On the VM/system that Immich is running, I have 2 galleries that I want to use with Immich.
|
||||
|
||||
- My gallery is stored at `/mnt/media/precious-memory`
|
||||
- My wife's gallery is stored at `/mnt/media/childhood-memory`
|
||||
|
||||
We will use those values in the steps below.
|
||||
|
||||
:::
|
||||
|
||||
### Mount the gallery to the containers.
|
||||
|
||||
`immich-server` and `immich-microservices` containers will need access to the gallery. Mount the directory path as in the example below
|
||||
|
||||
```diff title="docker-compose.yml"
|
||||
immich-server:
|
||||
container_name: immich_server
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
command: [ "start.sh", "immich" ]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - /mnt/media/precious-memory:/mnt/media/precious-memory
|
||||
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
image: ghcr.io/immich-app/immich-server:${IMMICH_VERSION:-release}
|
||||
command: [ "start.sh", "microservices" ]
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}:/usr/src/app/upload
|
||||
+ - /mnt/media/precious-memory:/mnt/media/precious-memory
|
||||
+ - /mnt/media/childhood-memory:/mnt/media/childhood-memory
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
restart: always
|
||||
```
|
||||
|
||||
:::tip
|
||||
Internal and external path have to be identical.
|
||||
:::
|
||||
|
||||
_Remember to bring the container down/up to register the changes. Make sure you can see the mounted path in the container._
|
||||
|
||||
### Register the path for the user.
|
||||
|
||||
This action is done by the admin of the instance.
|
||||
|
||||
- Navigate to `Administration > Users` page on the web.
|
||||
- Click on the user edit button.
|
||||
- Add the gallery path to the `External Path` field for the corresponding user and confirm the changes.
|
||||
|
||||
<img src={require('./img/me.png').default} width='33%' title='My Account Storage Path' />
|
||||
|
||||
<img src={require('./img/my-wife.png').default} width='33%' title='My Wifes Account Storage Path' />
|
||||
|
||||
### Sync with the CLI tool.
|
||||
|
||||
- Install or update the [CLI Tool](/docs/features/bulk-upload.md). The import feature is supported from version `v0.39.0` of the CLI
|
||||
- Run the command below to sync the gallery with Immich.
|
||||
|
||||
```bash title="Import my gallery"
|
||||
immich upload --key <my-api-key> --server http://my-server-ip:2283/api /mnt/media/precious-memory --recursive --import
|
||||
```
|
||||
|
||||
```bash title="Import my wife gallery"
|
||||
immich upload --key <my-wife-api-key> --server http://my-server-ip:2283/api /mnt/media/childhood-memory --recursive --import
|
||||
```
|
||||
|
||||
The `--import` flag will tell Immich to import the files by path instead of uploading them.
|
||||
@@ -4,7 +4,7 @@ Immich supports [Reverse Geocoding](https://en.wikipedia.org/wiki/Reverse_geocod
|
||||
|
||||
## Extraction
|
||||
|
||||
During Exif Extraction, assets with latitudes and longitudes are reverse geocoded to determine their City, State, and Country.
|
||||
During Exif Extraction, assets with latitudes and longitudes are reverse geocoded to determine their City, State, and Country.
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
# Search
|
||||
|
||||
:::warning Work In Progress
|
||||
Search is work-in-progress and subject to change. Stay tuned!
|
||||
:::
|
||||
Immich uses Typesense as the primary search database to enable high performance search mechanism.
|
||||
|
||||
## Search by Place
|
||||
Typesense is a powerful search engine that can be integrated with popular natural language processing (NLP) models like CLIP and SBERT to provide highly accurate and relevant search results. Here are some benefits of using Typesense integrated search for CLIP and SBERT:
|
||||
|
||||
:::info
|
||||
Searching is currently only implemented in the [Mobile App](/docs/features/mobile-app.mdx)
|
||||
:::
|
||||
Improved Search Accuracy: Typesense uses a combination of indexing, querying, and ranking algorithms to quickly and accurately retrieve relevant search results. When integrated with CLIP and SBERT, Typesense can leverage the semantic understanding and deep learning capabilities of these models to further improve the accuracy of search results.
|
||||
|
||||
Searching by the name of a city, state, or country is possible for assets with geolocation data and successful [Reverse Geocoding](/docs/features/reverse-geocoding.md).
|
||||
Faster Search Response Times: Typesense is optimized for lightning-fast search response times, making it ideal for applications that require near-instantaneous search results. By integrating with CLIP and SBERT, Typesense can reduce the time required to process complex search queries, making it even faster and more efficient.
|
||||
|
||||
<img src={require('./img/reverse-geocoding-mobile1.png').default} width='33%' title='Reverse Geocoding' />
|
||||
<img src={require('./img/reverse-geocoding-mobile2.png').default} width='33%' title='Reverse Geocoding' />
|
||||
Enhanced Semantic Search Capabilities: CLIP and SBERT are powerful NLP models that can extract the semantic meaning from text, enabling more nuanced search queries. By integrating with Typesense, these models can help to improve the accuracy of semantic search, enabling users to find the most relevant results based on the true meaning of their query.
|
||||
|
||||
Greater Search Flexibility: Typesense provides flexible search capabilities, including fuzzy search, partial search, enabling users to find the information they need quickly and easily. When integrated with CLIP and SBERT, Typesense can offer even greater flexibility, allowing users to refine their search queries using natural language and providing more accurate and relevant results.
|
||||
|
||||
(Generated by Chat-GPT4)
|
||||
|
||||
Some search examples:
|
||||
<img src={require('./img/search-ex-2.webp').default} title='Search Example 1' />
|
||||
|
||||
<img src={require('./img/search-ex-3.webp').default} title='Search Example 2' />
|
||||
|
||||
@@ -10,7 +10,7 @@ View your User ID and email, and update your first and last name.
|
||||
|
||||
## Change Password
|
||||
|
||||
Users can change their own passwords.
|
||||
Users can change their own passwords.
|
||||
|
||||

|
||||
|
||||
|
||||
13
docs/docs/features/xmp-sidecars.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# XMP Sidecars
|
||||
|
||||
Immich can ingest XMP sidecars on file upload (via the CLI) as well as detect new sidecars that are placed in the filesystem for existing images.
|
||||
|
||||
<img src={require('./img/xmp-sidecars.png').default} title='XMP sidecars' />
|
||||
|
||||
XMP sidecars are external XML files that contain metadata related to media files. Many applications read and write these files either exclusively or in addition to the metadata written to image files. They can be a powerful tool for editing and storing metadata of a media file without modifying the media file itself. When Immich receives or detects an XMP sidecar for a media file, it will attempt to extract the metadata from both the sidecar as well as the media file. It will prioritize the metadata for fields in the sidecar but will fall back and use the metadata in the media file if necessary.
|
||||
|
||||
When importing files via the CLI bulk uploader, Immich will automatically detect XMP sidecar files as files that exist next to the original media file and have the exact same name with an additional `.xmp` file extension (i.e., `PXL_20230401_203352928.MP.jpg` and `PXL_20230401_203352928.MP.jpg.xmp`).
|
||||
|
||||
There are 2 administrator jobs associated with sidecar files: `SYNC` and `DISCOVER`. The sync job will re-scan all media with existing sidecar files and queue them for a metadata refresh. This is a great use case when third-party applications are used to modify the metadata of media. The discover job will attempt to scan the filesystem for new sidecar files for all media that does not currently have a sidecar file associated with it.
|
||||
|
||||
<img src={require('./img/sidecar-jobs.png').default} title='Sidecar Administrator Jobs' />
|
||||
@@ -8,9 +8,14 @@ Docker Compose is the recommended method to run Immich in production. Below are
|
||||
|
||||
### Step 1 - Download the required files
|
||||
|
||||
Download [`docker-compose.yml`][compose-file] [`example.env`][env-file].
|
||||
Create a directory of your choice (e.g. `./immich-app`) to hold the `docker-compose.yml` and `.env` files.
|
||||
|
||||
From a directory of your choice (e.g. `./immich-app`) run the following commands:
|
||||
```bash title="Move to the directory you created"
|
||||
mkdir ./immich-app
|
||||
cd ./immich-app
|
||||
```
|
||||
|
||||
Download [`docker-compose.yml`][compose-file] and [`example.env`][env-file], either by running the following commands:
|
||||
|
||||
```bash title="Get docker-compose.yml file"
|
||||
wget https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml
|
||||
@@ -20,6 +25,10 @@ wget https://github.com/immich-app/immich/releases/latest/download/docker-compos
|
||||
wget -O .env https://github.com/immich-app/immich/releases/latest/download/example.env
|
||||
```
|
||||
|
||||
or by downloading from your browser and moving the files to the directory that you created.
|
||||
|
||||
Note: If you downloaded the files from your browser, also ensure that you rename `example.env` to `.env`.
|
||||
|
||||
### Step 2 - Populate the .env file with custom values
|
||||
|
||||
<details>
|
||||
@@ -46,29 +55,43 @@ DB_DATABASE_NAME=immich
|
||||
REDIS_HOSTNAME=immich_redis
|
||||
|
||||
# Optional Redis settings:
|
||||
|
||||
# Note: these parameters are not automatically passed to the Redis Container
|
||||
# to do so, please edit the docker-compose.yml file as well. Redis is not configured
|
||||
# via environment variables, only redis.conf or the command line
|
||||
|
||||
# REDIS_PORT=6379
|
||||
# REDIS_DBINDEX=0
|
||||
# REDIS_PASSWORD=
|
||||
# REDIS_SOCKET=
|
||||
|
||||
###################################################################################
|
||||
# Upload File Config
|
||||
# Upload File Location
|
||||
#
|
||||
# This is the location where uploaded files are stored.
|
||||
###################################################################################
|
||||
|
||||
UPLOAD_LOCATION=absolute_location_on_your_machine_where_you_want_to_store_the_backup
|
||||
|
||||
|
||||
###################################################################################
|
||||
# Log message level - [simple|verbose]
|
||||
###################################################################################
|
||||
|
||||
LOG_LEVEL=simple
|
||||
|
||||
###################################################################################
|
||||
# Typesense
|
||||
###################################################################################
|
||||
# TYPESENSE_ENABLED=false
|
||||
TYPESENSE_API_KEY=some-random-text
|
||||
# TYPESENSE_HOST: typesense
|
||||
# TYPESENSE_PORT: 8108
|
||||
# TYPESENSE_PROTOCOL: http
|
||||
|
||||
###################################################################################
|
||||
# Reverse Geocoding
|
||||
####################################################################################
|
||||
|
||||
# DISABLE_REVERSE_GEOCODING=false
|
||||
|
||||
#
|
||||
# Reverse geocoding is done locally which has a small impact on memory usage
|
||||
# This memory usage can be altered by changing the REVERSE_GEOCODING_PRECISION variable
|
||||
# This ranges from 0-3 with 3 being the most precise
|
||||
@@ -76,28 +99,67 @@ LOG_LEVEL=simple
|
||||
# 2 - Cities > 1000 population: ~150MB RAM
|
||||
# 1 - Cities > 5000 population: ~80MB RAM
|
||||
# 0 - Cities > 15000 population: ~40MB RAM
|
||||
####################################################################################
|
||||
|
||||
# DISABLE_REVERSE_GEOCODING=false
|
||||
# REVERSE_GEOCODING_PRECISION=3
|
||||
|
||||
####################################################################################
|
||||
# WEB - Optional
|
||||
#
|
||||
# Custom message on the login page, should be written in HTML form.
|
||||
# For example:
|
||||
# PUBLIC_LOGIN_PAGE_MESSAGE="This is a demo instance of Immich.<br><br>Email: <i>demo@demo.de</i><br>Password: <i>demo</i>"
|
||||
####################################################################################
|
||||
|
||||
# Custom message on the login page, should be written in HTML form.
|
||||
# For example PUBLIC_LOGIN_PAGE_MESSAGE="This is a demo instance of Immich.<br><br>Email: <i>demo@demo.de</i><br>Password: <i>demo</i>"
|
||||
|
||||
PUBLIC_LOGIN_PAGE_MESSAGE="My Family Photos and Videos Backup Server"
|
||||
|
||||
####################################################################################
|
||||
# Alternative Service Addresses - Optional
|
||||
#
|
||||
# This is an advanced feature for users who may be running their immich services on different hosts.
|
||||
# It will not change which address or port that services bind to within their containers, but it will change where other services look for their peers.
|
||||
# Note: immich-microservices is bound to 3002, but no references are made
|
||||
####################################################################################
|
||||
|
||||
IMMICH_WEB_URL=http://immich-web:3000
|
||||
IMMICH_SERVER_URL=http://immich-server:3001
|
||||
IMMICH_MACHINE_LEARNING_URL=http://immich-machine-learning:3003
|
||||
|
||||
####################################################################################
|
||||
# Alternative API's External Address - Optional
|
||||
#
|
||||
# This is an advanced feature used to control the public server endpoint returned to clients during Well-known discovery.
|
||||
# You should only use this if you want mobile apps to access the immich API over a custom URL. Do not include trailing slash.
|
||||
# NOTE: At this time, the web app will not be affected by this setting and will continue to use the relative path: /api
|
||||
# Examples: http://localhost:3001, http://immich-api.example.com, etc
|
||||
####################################################################################
|
||||
|
||||
#IMMICH_API_URL_EXTERNAL=http://localhost:3001
|
||||
|
||||
###################################################################################
|
||||
# Immich Version - Optional
|
||||
#
|
||||
# This allows all immich docker images to be pinned to a specific version. By default,
|
||||
# the version is "release" but could be a specific version, like "v1.59.0".
|
||||
###################################################################################
|
||||
|
||||
#IMMICH_VERSION=
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
- Populate custom database information if necessary.
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets.
|
||||
- Consider changing `DB_PASSWORD` to something randomly generated
|
||||
- Consider changing `TYPESENSE_API_KEY` to something randomly generated
|
||||
|
||||
### Step 3 - Start the containers
|
||||
|
||||
From the directory you created in Step 1, (which should now contain your customized `docker-compose.yml` and `.env` files) run `docker-compose up -d`.
|
||||
|
||||
```bash title="Start the containers using docker compose command"
|
||||
docker-compose up -d # or `docker compose up -d` based on your docker-compose version
|
||||
docker-compose up -d # or `docker compose up -d` based on your docker-compose version
|
||||
```
|
||||
|
||||
:::tip
|
||||
@@ -106,10 +168,12 @@ For more information on how to use the application, please refer to the [Post In
|
||||
|
||||
### Step 4 - Upgrading
|
||||
|
||||
If `IMMICH_VERSION` is set, it will need to be updated to the latest or desired version.
|
||||
|
||||
When a new version of Immich is [released](https://github.com/immich-app/immich/releases), the application can be upgraded with the following commands, run in the directory with the `docker-compose.yml` file:
|
||||
|
||||
```bash title="Upgrade Immich"
|
||||
docker-compose pull && docker-compose up -d # Or `docker compose`
|
||||
docker-compose pull && docker-compose up -d # Or `docker compose up -d`
|
||||
```
|
||||
|
||||
:::caution Automatic Updates
|
||||
|
||||
186
docs/docs/install/environment-variables.md
Normal file
@@ -0,0 +1,186 @@
|
||||
# Environment Variables
|
||||
|
||||
## Docker Compose
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------- | :-------------------- | :-------: | :------------------------------------------------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, microservices, machine learning, web, proxy, typesense |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server, microservices |
|
||||
|
||||
:::tip
|
||||
|
||||
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
|
||||
|
||||
:::
|
||||
|
||||
## General
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :-------------------------- | :------------------------------------------- | :----------: | :------------------------------------------- |
|
||||
| `TZ` | Timezone | | microservices |
|
||||
| `NODE_ENV` | Environment (production, development) | `production` | server, microservices, machine learning, web |
|
||||
| `LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload` | server, microservices |
|
||||
| `PUBLIC_LOGIN_PAGE_MESSAGE` | Public Login Page Message | | web |
|
||||
|
||||
:::tip
|
||||
|
||||
`TZ` is only used by the `exiftool` as a fallback in case the timezone cannot be determined from the image metadata.
|
||||
|
||||
`exiftool` is only present in the microservices container.
|
||||
|
||||
:::
|
||||
|
||||
## Geocoding
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :--------------------------------- | :---------------------------------- | :--------------------------: | :------------ |
|
||||
| `DISABLE_REVERSE_GEOCODING` | Disable Reverse Geocoding Precision | `false` | microservices |
|
||||
| `REVERSE_GEOCODING_PRECISION` | Reverse Geocoding Precision | `3` | microservices |
|
||||
| `REVERSE_GEOCODING_DUMP_DIRECTORY` | Reverse Geocoding Dump Directory | `./.reverse-geocoding-dump/` | microservices |
|
||||
|
||||
## Ports
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------------- | :-------------------- | :-----: | :--------------- |
|
||||
| `PORT` | Web Port | `3000` | web |
|
||||
| `SERVER_PORT` | Server Port | `3001` | server |
|
||||
| `MICROSERVICES_PORT` | Microservices Port | `3002` | microservices |
|
||||
| `MACHINE_LEARNING_PORT` | Machine Learning Port | `3003` | machine learning |
|
||||
|
||||
## URLs
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------------------- | :------------------------------------------------------- | :-----------------------------------: | :-------------------- |
|
||||
| `IMMICH_WEB_URL` | Immich Web URL | `http://immich-web:3000` | proxy |
|
||||
| `IMMICH_SERVER_URL` | Immich Server URL | `http://immich-server:3001` | web, proxy |
|
||||
| `IMMICH_MACHINE_LEARNING_URL` | Immich Machine Learning URL, set `"false"` to disable ML | `http://immich-machine-learning:3003` | server, microservices |
|
||||
| `PUBLIC_IMMICH_SERVER_URL` | Public Immich URL | `http://immich-server:3001` | web |
|
||||
| `IMMICH_API_URL_EXTERNAL` | Immich API URL External | `/api` | web |
|
||||
|
||||
:::info
|
||||
|
||||
The above paths are modifying the internal paths of the containers.
|
||||
|
||||
:::
|
||||
|
||||
## Database
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :------------ | :---------------- | :---------: | :-------------------- |
|
||||
| `DB_URL` | Database URL | | server, microservices |
|
||||
| `DB_HOSTNAME` | Database Host | `localhost` | server, microservices |
|
||||
| `DB_PORT` | Database Port | `5432` | server, microservices |
|
||||
| `DB_USERNAME` | Database User | `postgres` | server, microservices |
|
||||
| `DB_PASSWORD` | Database Password | `postgres` | server, microservices |
|
||||
| `DB_DATABASE` | Database Name | `immich` | server, microservices |
|
||||
|
||||
:::info
|
||||
|
||||
When `DB_URL` is defined, the other database (`DB_*`) variables are ignored.
|
||||
|
||||
:::
|
||||
|
||||
## Redis
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :--------------- | :------------- | :------------: | :-------------------- |
|
||||
| `REDIS_URL` | Redis URL | | server, microservices |
|
||||
| `REDIS_HOSTNAME` | Redis Host | `immich_redis` | server, microservices |
|
||||
| `REDIS_PORT` | Redis Port | `6379` | server, microservices |
|
||||
| `REDIS_DBINDEX` | Redis DB Index | `0` | server, microservices |
|
||||
| `REDIS_USERNAME` | Redis Username | | server, microservices |
|
||||
| `REDIS_PASSWORD` | Redis Password | | server, microservices |
|
||||
| `REDIS_SOCKET` | Redis Socket | | server, microservices |
|
||||
|
||||
:::info
|
||||
|
||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
||||
More info can be found in the upstream [ioredis](https://ioredis.readthedocs.io/en/latest/API/) documentation.
|
||||
|
||||
- When `REDIS_URL` is defined, the other redis (`REDIS_*`) variables are ignored.
|
||||
- When `REDIS_SOCKET` is defined, the other redis (`REDIS_*`) variables are ignored.
|
||||
|
||||
:::
|
||||
|
||||
Redis (Sentinel) URL example JSON before encoding:
|
||||
|
||||
```json
|
||||
{
|
||||
"sentinels": [
|
||||
{
|
||||
"host": "redis-sentinel-node-0",
|
||||
"port": 26379
|
||||
},
|
||||
{
|
||||
"host": "redis-sentinel-node-1",
|
||||
"port": 26379
|
||||
},
|
||||
{
|
||||
"host": "redis-sentinel-node-2",
|
||||
"port": 26379
|
||||
}
|
||||
],
|
||||
"name": "redis-sentinel"
|
||||
}
|
||||
```
|
||||
|
||||
## Typesense
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :------------------- | :----------------------- | :---------: | :------------------------------- |
|
||||
| `TYPESENSE_ENABLED` | Enable Typesense | | server, microservices |
|
||||
| `TYPESENSE_URL` | Typesense URL | | server, microservices |
|
||||
| `TYPESENSE_HOST` | Typesense Host | `typesense` | server, microservices |
|
||||
| `TYPESENSE_PORT` | Typesense Port | `8108` | server, microservices |
|
||||
| `TYPESENSE_PROTOCOL` | Typesense Protocol | `http` | server, microservices |
|
||||
| `TYPESENSE_API_KEY` | Typesense API Key | | server, microservices, typesense |
|
||||
| `TYPESENSE_DATA_DIR` | Typesense Data Directory | `/data` | typesense |
|
||||
|
||||
:::info
|
||||
|
||||
`TYPESENSE_URL` must start with `ha://` and then include a `base64` encoded JSON string for the configuration.
|
||||
|
||||
`TYPESENSE_ENABLED`: Anything other than `false`, behaves as `true`.
|
||||
Even undefined is treated as `true`.
|
||||
|
||||
- When `TYPESENSE_URL` is defined, the other typesense (`TYPESENSE_*`) variables are ignored.
|
||||
|
||||
:::
|
||||
|
||||
Typesense URL example JSON before encoding:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"host": "typesense-1.example.net",
|
||||
"port": "443",
|
||||
"protocol": "https"
|
||||
},
|
||||
{
|
||||
"host": "typesense-2.example.net",
|
||||
"port": "443",
|
||||
"protocol": "https"
|
||||
},
|
||||
{
|
||||
"host": "typesense-3.example.net",
|
||||
"port": "443",
|
||||
"protocol": "https"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :------------------------------------------ | :----------------------------- | :-------------------: | :--------------- |
|
||||
| `MACHINE_LEARNING_MIN_FACE_SCORE` | Minimum Face Score | `0.7` | machine learning |
|
||||
| `MACHINE_LEARNING_MODEL_TTL` | Model TTL | `300` | machine learning |
|
||||
| `MACHINE_LEARNING_EAGER_STARTUP` | Eager Startup | `true` | machine learning |
|
||||
| `MACHINE_LEARNING_MIN_TAG_SCORE` | Minimum Tag Score | `0.9` | machine learning |
|
||||
| `MACHINE_LEARNING_FACIAL_RECOGNITION_MODEL` | Facial Recognition Model | `buffalo_l` | machine learning |
|
||||
| `MACHINE_LEARNING_CLIP_TEXT_MODEL` | Clip Text Model | `clip-ViT-B-32` | machine learning |
|
||||
| `MACHINE_LEARNING_CLIP_IMAGE_MODEL` | Clip Image Model | `clip-ViT-B-32` | machine learning |
|
||||
| `MACHINE_LEARNING_CLASSIFICATION_MODEL` | Classification Model | `microsoft/resnet-50` | machine learning |
|
||||
| `MACHINE_LEARNING_CACHE_FOLDER` | ML Cache Location | `/cache` | machine learning |
|
||||
| `TRANSFORMERS_CACHE` | ML Transformers Cache Location | `/cache` | machine learning |
|
||||
@@ -4,12 +4,12 @@ sidebar_position: 40
|
||||
|
||||
# Kubernetes
|
||||
|
||||
You can deploy Immich on Kubernetes using [the official Helm chart](https://github.com/immich-app/immich-charts/tree/main/charts/apps/immich).
|
||||
You can deploy Immich on Kubernetes using [the official Helm chart](https://github.com/immich-app/immich-charts/tree/main/charts/immich).
|
||||
|
||||
If you want examples of how other people run Immich on Kubernetes, using the official chart or otherwise, you can find them at https://nanne.dev/k8s-at-home-search/#/immich.
|
||||
|
||||
:::caution DNS in Alpine containers
|
||||
Immich makes use of Alpine container images. These can encounter [a DNS resolution bug](https://stackoverflow.com/a/65593511) on Kubernetes clusters if the host
|
||||
Immich makes use of Alpine container images. These can encounter [a DNS resolution bug](https://stackoverflow.com/a/65593511) on Kubernetes clusters if the host
|
||||
nodes have a search domain set, like:
|
||||
|
||||
```
|
||||
@@ -18,7 +18,7 @@ search home.lan
|
||||
nameserver 192.168.1.1
|
||||
```
|
||||
|
||||
When you encounter this bug, it will cause the immich-microservices to crash on startup because it cannot download
|
||||
the geocoder data. This can be solved in one of two ways: Either reconfigure your nodes to remove the searchdomain from
|
||||
When you encounter this bug, it will cause the immich-microservices to crash on startup because it cannot download
|
||||
the geocoder data. This can be solved in one of two ways: Either reconfigure your nodes to remove the searchdomain from
|
||||
`resolv.conf`, or set the `DISABLE_REVERSE_GEOCODING` environment variable for Immich to `true` to disable the geocoder.
|
||||
:::
|
||||
:::
|
||||
|
||||
@@ -13,37 +13,36 @@ Install Immich using Portainer's Stack feature.
|
||||
5. Replace `.env` with `stack.env` for all containers that need to use environment variables in the web editor.
|
||||
|
||||
<img
|
||||
src={require('./img/dot-env.png').default}
|
||||
width="50%"
|
||||
style={{border: '1px solid #ddd'}}
|
||||
alt="Dot Env Example"
|
||||
src={require('./img/dot-env.png').default}
|
||||
width="50%"
|
||||
style={{border: '1px solid #ddd'}}
|
||||
alt="Dot Env Example"
|
||||
/>
|
||||
|
||||
8. Click on "**Advanced Mode**" in the **Environment Variables** section.
|
||||
|
||||
<img
|
||||
src={require('./img/env-1.png').default}
|
||||
width="50%"
|
||||
style={{border: '1px solid #ddd'}}
|
||||
alt="Dot Env Example"
|
||||
src={require('./img/env-1.png').default}
|
||||
width="50%"
|
||||
style={{border: '1px solid #ddd'}}
|
||||
alt="Dot Env Example"
|
||||
/>
|
||||
|
||||
9. Copy the content of the `example.env` file from the [GitHub repository](https://github.com/immich-app/immich/releases/latest/download/example.env) and paste into the editor.
|
||||
10. Switch back to "**Simple Mode**".
|
||||
|
||||
<img
|
||||
src={require('./img/env-2.png').default}
|
||||
width="50%"
|
||||
style={{border: '1px solid #ddd'}}
|
||||
alt="Dot Env Example"
|
||||
src={require('./img/env-2.png').default}
|
||||
width="50%"
|
||||
style={{border: '1px solid #ddd'}}
|
||||
alt="Dot Env Example"
|
||||
/>
|
||||
|
||||
* Populate custom database information if necessary.
|
||||
* Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets.
|
||||
- Populate custom database information if necessary.
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets.
|
||||
|
||||
11. Click on "**Deploy the stack**".
|
||||
|
||||
|
||||
:::tip
|
||||
For more information on how to use the application, please refer to the [Post Installation](/docs/install/post-install.mdx) guide.
|
||||
:::
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
sidebar_position: 10
|
||||
---
|
||||
|
||||
|
||||
# Requirements
|
||||
|
||||
Hardware and software requirements for Immich
|
||||
|
||||
## Software
|
||||
@@ -18,5 +18,5 @@ You can also use Podman to run the application. However, additional configuratio
|
||||
## Hardware
|
||||
|
||||
- **OS**: Preferred unix-based operating system (Ubuntu, Debian, MacOS, etc). Windows works too, with [Docker Desktop on Windows](https://docs.docker.com/desktop/install/windows-install/)
|
||||
- **RAM**: At least 2GB, preferred 4GB.
|
||||
- **RAM**: At least 4GB, preferred 6GB.
|
||||
- **CPU**: At least 2 cores, preferred 4 cores.
|
||||
|
||||
@@ -5,6 +5,7 @@ sidebar_position: 60
|
||||
# Unraid
|
||||
|
||||
Immich can easily be installed and updated on Unraid via:
|
||||
|
||||
1. [Docker Compose Manager](https://forums.unraid.net/topic/114415-plugin-docker-compose-manager/) plugin from the Unraid Community Apps
|
||||
2. Community made template on the Unraid Community Apps
|
||||
|
||||
@@ -20,7 +21,7 @@ In order to install Immich from the Unraid CA, you will need an existing Redis a
|
||||
|
||||
Once you have Redis and PostgreSQL running, search for Immich on the Unraid CA, choose either of the templates listed and fill out the example variables.
|
||||
|
||||
For more information about setting up the community image see [here](https://github.com/imagegenius/docker-immich#application-setup)
|
||||
For more information about setting up the community image see [here](https://github.com/imagegenius/docker-immich#application-setup)
|
||||
|
||||
## Docker-Compose Method (Official)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ sidebar_position: 1
|
||||
|
||||
# Introduction
|
||||
|
||||
<img src={require('./img/feature-panel.png').default} alt='Immich' />
|
||||
<img src={require('./img/feature-panel.png').default} alt="Immich" />
|
||||
|
||||
## Welcome!
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ If you feel like this is the right cause and the app is something you see yourse
|
||||
- [buymeacoffee](https://www.buymeacoffee.com/altran1502)
|
||||
- Bitcoin: 1FvEp6P6NM8EZEkpGUFAN2LqJ1gxusNxZX
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
There are lots of non-monetary ways to contribute to Immich as well.
|
||||
|
||||
@@ -1,70 +1,70 @@
|
||||
// @ts-check
|
||||
// Note: type annotations allow type checking and IDEs autocompletion
|
||||
|
||||
const lightCodeTheme = require("prism-react-renderer/themes/github");
|
||||
const darkCodeTheme = require("prism-react-renderer/themes/dracula");
|
||||
const lightCodeTheme = require('prism-react-renderer/themes/github');
|
||||
const darkCodeTheme = require('prism-react-renderer/themes/dracula');
|
||||
|
||||
/** @type {import('@docusaurus/types').Config} */
|
||||
const config = {
|
||||
title: "Immich",
|
||||
tagline:
|
||||
"High performance self-hosted photo and video backup solution directly from your mobile phone",
|
||||
url: "https://documentation.immich.app",
|
||||
baseUrl: "/",
|
||||
onBrokenLinks: "throw",
|
||||
onBrokenMarkdownLinks: "warn",
|
||||
favicon: "img/favicon.png",
|
||||
title: 'Immich',
|
||||
tagline: 'High performance self-hosted photo and video backup solution directly from your mobile phone',
|
||||
url: 'https://documentation.immich.app',
|
||||
baseUrl: '/',
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
favicon: 'img/favicon.png',
|
||||
|
||||
// GitHub pages deployment config.
|
||||
// If you aren't using GitHub pages, you don't need these.
|
||||
organizationName: "immich-app", // Usually your GitHub org/user name.
|
||||
projectName: "immich", // Usually your repo name.
|
||||
deploymentBranch: "main",
|
||||
organizationName: 'immich-app', // Usually your GitHub org/user name.
|
||||
projectName: 'immich', // Usually your repo name.
|
||||
deploymentBranch: 'main',
|
||||
// Even if you don't use internalization, you can use this field to set useful
|
||||
// metadata like html lang. For example, if your site is Chinese, you may want
|
||||
// to replace "en" with "zh-Hans".
|
||||
i18n: {
|
||||
defaultLocale: "en",
|
||||
locales: ["en"],
|
||||
defaultLocale: 'en',
|
||||
locales: ['en'],
|
||||
},
|
||||
|
||||
plugins: [
|
||||
async function myPlugin(context, options) {
|
||||
return {
|
||||
name: "docusaurus-tailwindcss",
|
||||
name: 'docusaurus-tailwindcss',
|
||||
configurePostCss(postcssOptions) {
|
||||
// Appends TailwindCSS and AutoPrefixer.
|
||||
postcssOptions.plugins.push(require("tailwindcss"));
|
||||
postcssOptions.plugins.push(require("autoprefixer"));
|
||||
postcssOptions.plugins.push(require('tailwindcss'));
|
||||
postcssOptions.plugins.push(require('autoprefixer'));
|
||||
return postcssOptions;
|
||||
},
|
||||
};
|
||||
},
|
||||
require.resolve('docusaurus-lunr-search'),
|
||||
],
|
||||
presets: [
|
||||
[
|
||||
"docusaurus-preset-openapi",
|
||||
'docusaurus-preset-openapi',
|
||||
/** @type {import('docusaurus-preset-openapi').Options} */
|
||||
({
|
||||
docs: {
|
||||
showLastUpdateAuthor: true,
|
||||
showLastUpdateTime: true,
|
||||
|
||||
sidebarPath: require.resolve("./sidebars.js"),
|
||||
sidebarPath: require.resolve('./sidebars.js'),
|
||||
// Please change this to your repo.
|
||||
// Remove this to remove the "edit this page" links.
|
||||
editUrl: "https://github.com/immich-app/immich/tree/main/docs/",
|
||||
editUrl: 'https://github.com/immich-app/immich/tree/main/docs/',
|
||||
},
|
||||
api: {
|
||||
path: "../server/immich-openapi-specs.json",
|
||||
routeBasePath: "/docs/api",
|
||||
path: '../server/immich-openapi-specs.json',
|
||||
routeBasePath: '/docs/api',
|
||||
},
|
||||
// blog: {
|
||||
// showReadingTime: true,
|
||||
// editUrl: "https://github.com/immich-app/immich/tree/main/docs/",
|
||||
// },
|
||||
theme: {
|
||||
customCss: require.resolve("./src/css/custom.css"),
|
||||
customCss: require.resolve('./src/css/custom.css'),
|
||||
},
|
||||
}),
|
||||
],
|
||||
@@ -74,13 +74,13 @@ const config = {
|
||||
/** @type {import('@docusaurus/preset-classic').ThemeConfig} */
|
||||
({
|
||||
colorMode: {
|
||||
defaultMode: "dark",
|
||||
defaultMode: 'dark',
|
||||
},
|
||||
announcementBar: {
|
||||
id: "site_announcement_immich",
|
||||
id: 'site_announcement_immich',
|
||||
content: `⚠️ The project is under <strong>very active</strong> development. Expect bugs and changes. Do not use it as <strong>the only way</strong> to store your photos and videos!`,
|
||||
backgroundColor: "#593f00",
|
||||
textColor: "#ffefc9",
|
||||
backgroundColor: '#593f00',
|
||||
textColor: '#ffefc9',
|
||||
isCloseable: false,
|
||||
},
|
||||
docs: {
|
||||
@@ -90,72 +90,63 @@ const config = {
|
||||
},
|
||||
navbar: {
|
||||
logo: {
|
||||
alt: "Immich University Logo",
|
||||
src: "img/color-logo.png",
|
||||
srcDark: "img/logo.png",
|
||||
alt: 'Immich University Logo',
|
||||
src: 'img/color-logo.png',
|
||||
srcDark: 'img/logo.png',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
to: "/docs/overview/introduction",
|
||||
position: "right",
|
||||
label: "Docs",
|
||||
to: '/docs/overview/introduction',
|
||||
position: 'right',
|
||||
label: 'Docs',
|
||||
},
|
||||
{
|
||||
to: "/docs/api",
|
||||
position: "right",
|
||||
label: "API",
|
||||
to: '/docs/api',
|
||||
position: 'right',
|
||||
label: 'API',
|
||||
},
|
||||
{
|
||||
href: "https://github.com/immich-app/immich",
|
||||
label: "GitHub",
|
||||
position: "right",
|
||||
},
|
||||
{
|
||||
href: "https://github.com/orgs/immich-app/projects/1",
|
||||
label: "Roadmap",
|
||||
position: "right",
|
||||
href: 'https://github.com/immich-app/immich',
|
||||
label: 'GitHub',
|
||||
position: 'right',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: "light",
|
||||
style: 'light',
|
||||
links: [
|
||||
{
|
||||
title: "Overview",
|
||||
title: 'Overview',
|
||||
items: [
|
||||
{
|
||||
label: "Welcome",
|
||||
to: "/docs/overview/introduction",
|
||||
label: 'Welcome',
|
||||
to: '/docs/overview/introduction',
|
||||
},
|
||||
{
|
||||
label: "Installation",
|
||||
to: "/docs/install/requirements",
|
||||
label: 'Installation',
|
||||
to: '/docs/install/requirements',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: "Community",
|
||||
title: 'Community',
|
||||
items: [
|
||||
{
|
||||
label: "Discord",
|
||||
href: "https://discord.com/invite/D8JsnBEuKb",
|
||||
label: 'Discord',
|
||||
href: 'https://discord.com/invite/D8JsnBEuKb',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: "Links",
|
||||
title: 'Links',
|
||||
items: [
|
||||
// {
|
||||
// label: "Blog",
|
||||
// to: "/blog",
|
||||
// },
|
||||
{
|
||||
label: "GitHub",
|
||||
href: "https://github.com/immich-app/immich",
|
||||
},
|
||||
{
|
||||
label: "Roadmap",
|
||||
href: "https://github.com/orgs/immich-app/projects/1",
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/immich-app/immich',
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -166,7 +157,7 @@ const config = {
|
||||
theme: lightCodeTheme,
|
||||
darkTheme: darkCodeTheme,
|
||||
},
|
||||
image: "overview/img/feature-panel.png",
|
||||
image: 'overview/img/feature-panel.png',
|
||||
}),
|
||||
};
|
||||
|
||||
|
||||
1391
docs/package-lock.json
generated
@@ -4,6 +4,8 @@
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"format": "prettier --check .",
|
||||
"format:fix": "prettier --write .",
|
||||
"start": "docusaurus start",
|
||||
"build": "docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
@@ -12,7 +14,7 @@
|
||||
"serve": "docusaurus serve",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids",
|
||||
"typecheck": "tsc"
|
||||
"check": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "2.1.0",
|
||||
@@ -20,6 +22,7 @@
|
||||
"@mdx-js/react": "^1.6.22",
|
||||
"autoprefixer": "^10.4.13",
|
||||
"clsx": "^1.2.1",
|
||||
"docusaurus-lunr-search": "^2.3.2",
|
||||
"docusaurus-preset-openapi": "^0.6.3",
|
||||
"postcss": "^8.4.20",
|
||||
"prism-react-renderer": "^1.3.5",
|
||||
@@ -31,7 +34,8 @@
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "2.1.0",
|
||||
"@tsconfig/docusaurus": "^1.0.5",
|
||||
"typescript": "^4.7.4"
|
||||
"prettier": "^2.8.8",
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"browserslist": {
|
||||
"production": [
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */
|
||||
const sidebars = {
|
||||
// By default, Docusaurus generates a sidebar from the docs folder structure
|
||||
tutorialSidebar: [{type: 'autogenerated', dirName: '.'}],
|
||||
tutorialSidebar: [{ type: 'autogenerated', dirName: '.' }],
|
||||
|
||||
// But you can create a sidebar manually
|
||||
/*
|
||||
|
||||
@@ -14,8 +14,8 @@ const FeatureList: FeatureItem[] = [
|
||||
Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Docusaurus was designed from the ground up to be easily installed and
|
||||
used to get your website up and running quickly.
|
||||
Docusaurus was designed from the ground up to be easily installed and used to get your website up and running
|
||||
quickly.
|
||||
</>
|
||||
),
|
||||
},
|
||||
@@ -24,8 +24,8 @@ const FeatureList: FeatureItem[] = [
|
||||
Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Docusaurus lets you focus on your docs, and we'll do the chores. Go
|
||||
ahead and move your docs into the <code>docs</code> directory.
|
||||
Docusaurus lets you focus on your docs, and we'll do the chores. Go ahead and move your docs into the{' '}
|
||||
<code>docs</code> directory.
|
||||
</>
|
||||
),
|
||||
},
|
||||
@@ -34,14 +34,14 @@ const FeatureList: FeatureItem[] = [
|
||||
Svg: require('@site/static/img/undraw_docusaurus_react.svg').default,
|
||||
description: (
|
||||
<>
|
||||
Extend or customize your website layout by reusing React. Docusaurus can
|
||||
be extended while reusing the same header and footer.
|
||||
Extend or customize your website layout by reusing React. Docusaurus can be extended while reusing the same
|
||||
header and footer.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
function Feature({title, Svg, description}: FeatureItem) {
|
||||
function Feature({ title, Svg, description }: FeatureItem) {
|
||||
return (
|
||||
<div className={clsx('col col--4')}>
|
||||
<div className="text--center">
|
||||
|
||||
@@ -7,12 +7,12 @@
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@import url("https://fonts.googleapis.com/css2?family=Overpass:ital,wght@0,300;0,400;0,500;0,600;0,700;1,300;1,400;1,500;1,600;1,700&display=swap");
|
||||
@import url("https://fonts.googleapis.com/css2?family=Snowburst+One&display=swap");
|
||||
@import url('https://fonts.googleapis.com/css2?family=Overpass:ital,wght@0,300;0,400;0,500;0,600;0,700;1,300;1,400;1,500;1,600;1,700&display=swap');
|
||||
@import url('https://fonts.googleapis.com/css2?family=Snowburst+One&display=swap');
|
||||
|
||||
html,
|
||||
button {
|
||||
font-family: "Overpass", sans-serif;
|
||||
font-family: 'Overpass', sans-serif;
|
||||
}
|
||||
|
||||
/* You can override the default Infima variables here. */
|
||||
@@ -29,7 +29,7 @@ button {
|
||||
}
|
||||
|
||||
/* For readability concerns, you should choose a lighter palette in dark mode. */
|
||||
[data-theme="dark"] {
|
||||
[data-theme='dark'] {
|
||||
--ifm-color-primary: #adcbfa;
|
||||
--ifm-color-primary-dark: #85b2f8;
|
||||
--ifm-color-primary-darker: #71a5f6;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from "react";
|
||||
import Link from "@docusaurus/Link";
|
||||
import Layout from "@theme/Layout";
|
||||
import React from 'react';
|
||||
import Link from '@docusaurus/Link';
|
||||
import Layout from '@theme/Layout';
|
||||
|
||||
function HomepageHeader() {
|
||||
return (
|
||||
@@ -31,7 +31,7 @@ function HomepageHeader() {
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
<img src="/img/immich-screenshots.webp" alt="logo" />
|
||||
<img src="/img/immich-screenshots.png" alt="logo" />
|
||||
</section>
|
||||
</header>
|
||||
);
|
||||
|
||||
256
docs/src/theme/SearchBar/DocSearch.js
Normal file
@@ -0,0 +1,256 @@
|
||||
import Hogan from 'hogan.js';
|
||||
import LunrSearchAdapter from './lunar-search';
|
||||
import autocomplete from 'autocomplete.js';
|
||||
import templates from './templates';
|
||||
import utils from './utils';
|
||||
import $ from 'autocomplete.js/zepto';
|
||||
|
||||
class DocSearch {
|
||||
constructor({
|
||||
searchDocs,
|
||||
searchIndex,
|
||||
inputSelector,
|
||||
debug = false,
|
||||
baseUrl = '/',
|
||||
queryDataCallback = null,
|
||||
autocompleteOptions = {
|
||||
debug: false,
|
||||
hint: false,
|
||||
autoselect: true,
|
||||
},
|
||||
transformData = false,
|
||||
queryHook = false,
|
||||
handleSelected = false,
|
||||
enhancedSearchInput = false,
|
||||
layout = 'collumns',
|
||||
}) {
|
||||
this.input = DocSearch.getInputFromSelector(inputSelector);
|
||||
this.queryDataCallback = queryDataCallback || null;
|
||||
const autocompleteOptionsDebug =
|
||||
autocompleteOptions && autocompleteOptions.debug ? autocompleteOptions.debug : false;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
autocompleteOptions.debug = debug || autocompleteOptionsDebug;
|
||||
this.autocompleteOptions = autocompleteOptions;
|
||||
this.autocompleteOptions.cssClasses = this.autocompleteOptions.cssClasses || {};
|
||||
this.autocompleteOptions.cssClasses.prefix = this.autocompleteOptions.cssClasses.prefix || 'ds';
|
||||
const inputAriaLabel = this.input && typeof this.input.attr === 'function' && this.input.attr('aria-label');
|
||||
this.autocompleteOptions.ariaLabel = this.autocompleteOptions.ariaLabel || inputAriaLabel || 'search input';
|
||||
|
||||
this.isSimpleLayout = layout === 'simple';
|
||||
|
||||
this.client = new LunrSearchAdapter(searchDocs, searchIndex, baseUrl);
|
||||
|
||||
if (enhancedSearchInput) {
|
||||
this.input = DocSearch.injectSearchBox(this.input);
|
||||
}
|
||||
this.autocomplete = autocomplete(this.input, autocompleteOptions, [
|
||||
{
|
||||
source: this.getAutocompleteSource(transformData, queryHook),
|
||||
templates: {
|
||||
suggestion: DocSearch.getSuggestionTemplate(this.isSimpleLayout),
|
||||
footer: templates.footer,
|
||||
empty: DocSearch.getEmptyTemplate(),
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const customHandleSelected = handleSelected;
|
||||
this.handleSelected = customHandleSelected || this.handleSelected;
|
||||
|
||||
// We prevent default link clicking if a custom handleSelected is defined
|
||||
if (customHandleSelected) {
|
||||
$('.algolia-autocomplete').on('click', '.ds-suggestions a', (event) => {
|
||||
event.preventDefault();
|
||||
});
|
||||
}
|
||||
|
||||
this.autocomplete.on('autocomplete:selected', this.handleSelected.bind(null, this.autocomplete.autocomplete));
|
||||
|
||||
this.autocomplete.on('autocomplete:shown', this.handleShown.bind(null, this.input));
|
||||
|
||||
if (enhancedSearchInput) {
|
||||
DocSearch.bindSearchBoxEvent();
|
||||
}
|
||||
}
|
||||
|
||||
static injectSearchBox(input) {
|
||||
input.before(templates.searchBox);
|
||||
const newInput = input.prev().prev().find('input');
|
||||
input.remove();
|
||||
return newInput;
|
||||
}
|
||||
|
||||
static bindSearchBoxEvent() {
|
||||
$('.searchbox [type="reset"]').on('click', function () {
|
||||
$('input#docsearch').focus();
|
||||
$(this).addClass('hide');
|
||||
autocomplete.autocomplete.setVal('');
|
||||
});
|
||||
|
||||
$('input#docsearch').on('keyup', () => {
|
||||
const searchbox = document.querySelector('input#docsearch');
|
||||
const reset = document.querySelector('.searchbox [type="reset"]');
|
||||
reset.className = 'searchbox__reset';
|
||||
if (searchbox.value.length === 0) {
|
||||
reset.className += ' hide';
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the matching input from a CSS selector, null if none matches
|
||||
* @function getInputFromSelector
|
||||
* @param {string} selector CSS selector that matches the search
|
||||
* input of the page
|
||||
* @returns {void}
|
||||
*/
|
||||
static getInputFromSelector(selector) {
|
||||
const input = $(selector).filter('input');
|
||||
return input.length ? $(input[0]) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the `source` method to be passed to autocomplete.js. It will query
|
||||
* the Algolia index and call the callbacks with the formatted hits.
|
||||
* @function getAutocompleteSource
|
||||
* @param {function} transformData An optional function to transform the hits
|
||||
* @param {function} queryHook An optional function to transform the query
|
||||
* @returns {function} Method to be passed as the `source` option of
|
||||
* autocomplete
|
||||
*/
|
||||
getAutocompleteSource(transformData, queryHook) {
|
||||
return (query, callback) => {
|
||||
if (queryHook) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
query = queryHook(query) || query;
|
||||
}
|
||||
this.client.search(query).then((hits) => {
|
||||
if (this.queryDataCallback && typeof this.queryDataCallback == 'function') {
|
||||
this.queryDataCallback(hits);
|
||||
}
|
||||
if (transformData) {
|
||||
hits = transformData(hits) || hits;
|
||||
}
|
||||
callback(DocSearch.formatHits(hits));
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
// Given a list of hits returned by the API, will reformat them to be used in
|
||||
// a Hogan template
|
||||
static formatHits(receivedHits) {
|
||||
const clonedHits = utils.deepClone(receivedHits);
|
||||
const hits = clonedHits.map((hit) => {
|
||||
if (hit._highlightResult) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
hit._highlightResult = utils.mergeKeyWithParent(hit._highlightResult, 'hierarchy');
|
||||
}
|
||||
return utils.mergeKeyWithParent(hit, 'hierarchy');
|
||||
});
|
||||
|
||||
// Group hits by category / subcategory
|
||||
let groupedHits = utils.groupBy(hits, 'lvl0');
|
||||
$.each(groupedHits, (level, collection) => {
|
||||
const groupedHitsByLvl1 = utils.groupBy(collection, 'lvl1');
|
||||
const flattenedHits = utils.flattenAndFlagFirst(groupedHitsByLvl1, 'isSubCategoryHeader');
|
||||
groupedHits[level] = flattenedHits;
|
||||
});
|
||||
groupedHits = utils.flattenAndFlagFirst(groupedHits, 'isCategoryHeader');
|
||||
|
||||
// Translate hits into smaller objects to be send to the template
|
||||
return groupedHits.map((hit) => {
|
||||
const url = DocSearch.formatURL(hit);
|
||||
const category = utils.getHighlightedValue(hit, 'lvl0');
|
||||
const subcategory = utils.getHighlightedValue(hit, 'lvl1') || category;
|
||||
const displayTitle = utils
|
||||
.compact([
|
||||
utils.getHighlightedValue(hit, 'lvl2') || subcategory,
|
||||
utils.getHighlightedValue(hit, 'lvl3'),
|
||||
utils.getHighlightedValue(hit, 'lvl4'),
|
||||
utils.getHighlightedValue(hit, 'lvl5'),
|
||||
utils.getHighlightedValue(hit, 'lvl6'),
|
||||
])
|
||||
.join('<span class="aa-suggestion-title-separator" aria-hidden="true"> › </span>');
|
||||
const text = utils.getSnippetedValue(hit, 'content');
|
||||
const isTextOrSubcategoryNonEmpty = (subcategory && subcategory !== '') || (displayTitle && displayTitle !== '');
|
||||
const isLvl1EmptyOrDuplicate = !subcategory || subcategory === '' || subcategory === category;
|
||||
const isLvl2 = displayTitle && displayTitle !== '' && displayTitle !== subcategory;
|
||||
const isLvl1 = !isLvl2 && subcategory && subcategory !== '' && subcategory !== category;
|
||||
const isLvl0 = !isLvl1 && !isLvl2;
|
||||
|
||||
return {
|
||||
isLvl0,
|
||||
isLvl1,
|
||||
isLvl2,
|
||||
isLvl1EmptyOrDuplicate,
|
||||
isCategoryHeader: hit.isCategoryHeader,
|
||||
isSubCategoryHeader: hit.isSubCategoryHeader,
|
||||
isTextOrSubcategoryNonEmpty,
|
||||
category,
|
||||
subcategory,
|
||||
title: displayTitle,
|
||||
text,
|
||||
url,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
static formatURL(hit) {
|
||||
const { url, anchor } = hit;
|
||||
if (url) {
|
||||
const containsAnchor = url.indexOf('#') !== -1;
|
||||
if (containsAnchor) return url;
|
||||
else if (anchor) return `${hit.url}#${hit.anchor}`;
|
||||
return url;
|
||||
} else if (anchor) return `#${hit.anchor}`;
|
||||
/* eslint-disable */
|
||||
console.warn('no anchor nor url for : ', JSON.stringify(hit));
|
||||
/* eslint-enable */
|
||||
return null;
|
||||
}
|
||||
|
||||
static getEmptyTemplate() {
|
||||
return (args) => Hogan.compile(templates.empty).render(args);
|
||||
}
|
||||
|
||||
static getSuggestionTemplate(isSimpleLayout) {
|
||||
const stringTemplate = isSimpleLayout ? templates.suggestionSimple : templates.suggestion;
|
||||
const template = Hogan.compile(stringTemplate);
|
||||
return (suggestion) => template.render(suggestion);
|
||||
}
|
||||
|
||||
handleSelected(input, event, suggestion, datasetNumber, context = {}) {
|
||||
// Do nothing if click on the suggestion, as it's already a <a href>, the
|
||||
// browser will take care of it. This allow Ctrl-Clicking on results and not
|
||||
// having the main window being redirected as well
|
||||
if (context.selectionMethod === 'click') {
|
||||
return;
|
||||
}
|
||||
|
||||
input.setVal('');
|
||||
window.location.assign(suggestion.url);
|
||||
}
|
||||
|
||||
handleShown(input) {
|
||||
const middleOfInput = input.offset().left + input.width() / 2;
|
||||
let middleOfWindow = $(document).width() / 2;
|
||||
|
||||
if (isNaN(middleOfWindow)) {
|
||||
middleOfWindow = 900;
|
||||
}
|
||||
|
||||
const alignClass = middleOfInput - middleOfWindow >= 0 ? 'algolia-autocomplete-right' : 'algolia-autocomplete-left';
|
||||
const otherAlignClass =
|
||||
middleOfInput - middleOfWindow < 0 ? 'algolia-autocomplete-right' : 'algolia-autocomplete-left';
|
||||
const autocompleteWrapper = $('.algolia-autocomplete');
|
||||
if (!autocompleteWrapper.hasClass(alignClass)) {
|
||||
autocompleteWrapper.addClass(alignClass);
|
||||
}
|
||||
|
||||
if (autocompleteWrapper.hasClass(otherAlignClass)) {
|
||||
autocompleteWrapper.removeClass(otherAlignClass);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default DocSearch;
|
||||
514
docs/src/theme/SearchBar/algolia.css
Normal file
111
docs/src/theme/SearchBar/index.js
Normal file
@@ -0,0 +1,111 @@
|
||||
import React, { useRef, useCallback, useState } from 'react';
|
||||
import classnames from 'classnames';
|
||||
import { useHistory } from '@docusaurus/router';
|
||||
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
|
||||
import { usePluginData } from '@docusaurus/useGlobalData';
|
||||
import useIsBrowser from '@docusaurus/useIsBrowser';
|
||||
const Search = (props) => {
|
||||
const initialized = useRef(false);
|
||||
const searchBarRef = useRef(null);
|
||||
const [indexReady, setIndexReady] = useState(false);
|
||||
const history = useHistory();
|
||||
const { siteConfig = {} } = useDocusaurusContext();
|
||||
const isBrowser = useIsBrowser();
|
||||
const { baseUrl } = siteConfig;
|
||||
const initAlgolia = (searchDocs, searchIndex, DocSearch) => {
|
||||
new DocSearch({
|
||||
searchDocs,
|
||||
searchIndex,
|
||||
baseUrl,
|
||||
inputSelector: '#search_input_react',
|
||||
// Override algolia's default selection event, allowing us to do client-side
|
||||
// navigation and avoiding a full page refresh.
|
||||
handleSelected: (_input, _event, suggestion) => {
|
||||
const url = suggestion.url || '/';
|
||||
// Use an anchor tag to parse the absolute url into a relative url
|
||||
// Alternatively, we can use new URL(suggestion.url) but its not supported in IE
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
// Algolia use closest parent element id #__docusaurus when a h1 page title does not have an id
|
||||
// So, we can safely remove it. See https://github.com/facebook/docusaurus/issues/1828 for more details.
|
||||
|
||||
history.push(url);
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const pluginData = usePluginData('docusaurus-lunr-search');
|
||||
const getSearchDoc = () =>
|
||||
process.env.NODE_ENV === 'production'
|
||||
? fetch(`${baseUrl}${pluginData.fileNames.searchDoc}`).then((content) => content.json())
|
||||
: Promise.resolve([]);
|
||||
|
||||
const getLunrIndex = () =>
|
||||
process.env.NODE_ENV === 'production'
|
||||
? fetch(`${baseUrl}${pluginData.fileNames.lunrIndex}`).then((content) => content.json())
|
||||
: Promise.resolve([]);
|
||||
|
||||
const loadAlgolia = () => {
|
||||
if (!initialized.current) {
|
||||
Promise.all([getSearchDoc(), getLunrIndex(), import('./DocSearch'), import('./algolia.css')]).then(
|
||||
([searchDocs, searchIndex, { default: DocSearch }]) => {
|
||||
if (searchDocs.length === 0) {
|
||||
return;
|
||||
}
|
||||
initAlgolia(searchDocs, searchIndex, DocSearch);
|
||||
setIndexReady(true);
|
||||
},
|
||||
);
|
||||
initialized.current = true;
|
||||
}
|
||||
};
|
||||
|
||||
const toggleSearchIconClick = useCallback(
|
||||
(e) => {
|
||||
if (!searchBarRef.current.contains(e.target)) {
|
||||
searchBarRef.current.focus();
|
||||
}
|
||||
|
||||
props.handleSearchBarToggle && props.handleSearchBarToggle(!props.isSearchBarExpanded);
|
||||
},
|
||||
[props.isSearchBarExpanded],
|
||||
);
|
||||
|
||||
if (isBrowser) {
|
||||
loadAlgolia();
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="navbar__search" key="search-box">
|
||||
<span
|
||||
aria-label="expand searchbar"
|
||||
role="button"
|
||||
className={classnames('search-icon', {
|
||||
'search-icon-hidden': props.isSearchBarExpanded,
|
||||
})}
|
||||
onClick={toggleSearchIconClick}
|
||||
onKeyDown={toggleSearchIconClick}
|
||||
tabIndex={0}
|
||||
/>
|
||||
<input
|
||||
id="search_input_react"
|
||||
type="search"
|
||||
placeholder={indexReady ? 'Search' : 'Loading...'}
|
||||
aria-label="Search"
|
||||
className={classnames(
|
||||
'navbar__search-input',
|
||||
{ 'search-bar-expanded': props.isSearchBarExpanded },
|
||||
{ 'search-bar': !props.isSearchBarExpanded },
|
||||
)}
|
||||
onClick={loadAlgolia}
|
||||
onMouseOver={loadAlgolia}
|
||||
onFocus={toggleSearchIconClick}
|
||||
onBlur={toggleSearchIconClick}
|
||||
ref={searchBarRef}
|
||||
disabled={!indexReady}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default Search;
|
||||
161
docs/src/theme/SearchBar/lunar-search.js
Normal file
@@ -0,0 +1,161 @@
|
||||
import lunr from '@generated/lunr.client';
|
||||
lunr.tokenizer.separator = /[\s\-/]+/;
|
||||
|
||||
class LunrSearchAdapter {
|
||||
constructor(searchDocs, searchIndex, baseUrl = '/') {
|
||||
this.searchDocs = searchDocs;
|
||||
this.lunrIndex = lunr.Index.load(searchIndex);
|
||||
this.baseUrl = baseUrl;
|
||||
}
|
||||
|
||||
getLunrResult(input) {
|
||||
return this.lunrIndex.query(function (query) {
|
||||
const tokens = lunr.tokenizer(input);
|
||||
query.term(tokens, {
|
||||
boost: 10,
|
||||
});
|
||||
query.term(tokens, {
|
||||
wildcard: lunr.Query.wildcard.TRAILING,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getHit(doc, formattedTitle, formattedContent) {
|
||||
return {
|
||||
hierarchy: {
|
||||
lvl0: doc.pageTitle || doc.title,
|
||||
lvl1: doc.type === 0 ? null : doc.title,
|
||||
},
|
||||
url: doc.url,
|
||||
_snippetResult: formattedContent
|
||||
? {
|
||||
content: {
|
||||
value: formattedContent,
|
||||
matchLevel: 'full',
|
||||
},
|
||||
}
|
||||
: null,
|
||||
_highlightResult: {
|
||||
hierarchy: {
|
||||
lvl0: {
|
||||
value: doc.type === 0 ? formattedTitle || doc.title : doc.pageTitle,
|
||||
},
|
||||
lvl1:
|
||||
doc.type === 0
|
||||
? null
|
||||
: {
|
||||
value: formattedTitle || doc.title,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
getTitleHit(doc, position, length) {
|
||||
const start = position[0];
|
||||
const end = position[0] + length;
|
||||
let formattedTitle =
|
||||
doc.title.substring(0, start) +
|
||||
'<span class="algolia-docsearch-suggestion--highlight">' +
|
||||
doc.title.substring(start, end) +
|
||||
'</span>' +
|
||||
doc.title.substring(end, doc.title.length);
|
||||
return this.getHit(doc, formattedTitle);
|
||||
}
|
||||
|
||||
getKeywordHit(doc, position, length) {
|
||||
const start = position[0];
|
||||
const end = position[0] + length;
|
||||
let formattedTitle =
|
||||
doc.title +
|
||||
'<br /><i>Keywords: ' +
|
||||
doc.keywords.substring(0, start) +
|
||||
'<span class="algolia-docsearch-suggestion--highlight">' +
|
||||
doc.keywords.substring(start, end) +
|
||||
'</span>' +
|
||||
doc.keywords.substring(end, doc.keywords.length) +
|
||||
'</i>';
|
||||
return this.getHit(doc, formattedTitle);
|
||||
}
|
||||
|
||||
getContentHit(doc, position) {
|
||||
const start = position[0];
|
||||
const end = position[0] + position[1];
|
||||
let previewStart = start;
|
||||
let previewEnd = end;
|
||||
let ellipsesBefore = true;
|
||||
let ellipsesAfter = true;
|
||||
for (let k = 0; k < 3; k++) {
|
||||
const nextSpace = doc.content.lastIndexOf(' ', previewStart - 2);
|
||||
const nextDot = doc.content.lastIndexOf('.', previewStart - 2);
|
||||
if (nextDot > 0 && nextDot > nextSpace) {
|
||||
previewStart = nextDot + 1;
|
||||
ellipsesBefore = false;
|
||||
break;
|
||||
}
|
||||
if (nextSpace < 0) {
|
||||
previewStart = 0;
|
||||
ellipsesBefore = false;
|
||||
break;
|
||||
}
|
||||
previewStart = nextSpace + 1;
|
||||
}
|
||||
for (let k = 0; k < 10; k++) {
|
||||
const nextSpace = doc.content.indexOf(' ', previewEnd + 1);
|
||||
const nextDot = doc.content.indexOf('.', previewEnd + 1);
|
||||
if (nextDot > 0 && nextDot < nextSpace) {
|
||||
previewEnd = nextDot;
|
||||
ellipsesAfter = false;
|
||||
break;
|
||||
}
|
||||
if (nextSpace < 0) {
|
||||
previewEnd = doc.content.length;
|
||||
ellipsesAfter = false;
|
||||
break;
|
||||
}
|
||||
previewEnd = nextSpace;
|
||||
}
|
||||
let preview = doc.content.substring(previewStart, start);
|
||||
if (ellipsesBefore) {
|
||||
preview = '... ' + preview;
|
||||
}
|
||||
preview += '<span class="algolia-docsearch-suggestion--highlight">' + doc.content.substring(start, end) + '</span>';
|
||||
preview += doc.content.substring(end, previewEnd);
|
||||
if (ellipsesAfter) {
|
||||
preview += ' ...';
|
||||
}
|
||||
return this.getHit(doc, null, preview);
|
||||
}
|
||||
search(input) {
|
||||
return new Promise((resolve, rej) => {
|
||||
const results = this.getLunrResult(input);
|
||||
const hits = [];
|
||||
results.length > 5 && (results.length = 5);
|
||||
this.titleHitsRes = [];
|
||||
this.contentHitsRes = [];
|
||||
results.forEach((result) => {
|
||||
const doc = this.searchDocs[result.ref];
|
||||
const { metadata } = result.matchData;
|
||||
for (let i in metadata) {
|
||||
if (metadata[i].title) {
|
||||
if (!this.titleHitsRes.includes(result.ref)) {
|
||||
const position = metadata[i].title.position[0];
|
||||
hits.push(this.getTitleHit(doc, position, input.length));
|
||||
this.titleHitsRes.push(result.ref);
|
||||
}
|
||||
} else if (metadata[i].content) {
|
||||
const position = metadata[i].content.position[0];
|
||||
hits.push(this.getContentHit(doc, position));
|
||||
} else if (metadata[i].keywords) {
|
||||
const position = metadata[i].keywords.position[0];
|
||||
hits.push(this.getKeywordHit(doc, position, input.length));
|
||||
this.titleHitsRes.push(result.ref);
|
||||
}
|
||||
}
|
||||
});
|
||||
hits.length > 5 && (hits.length = 5);
|
||||
resolve(hits);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default LunrSearchAdapter;
|
||||
33
docs/src/theme/SearchBar/styles.css
Normal file
@@ -0,0 +1,33 @@
|
||||
.search-icon {
|
||||
background-image: var(--ifm-navbar-search-input-icon);
|
||||
height: auto;
|
||||
width: 24px;
|
||||
cursor: pointer;
|
||||
padding: 8px;
|
||||
line-height: 32px;
|
||||
background-repeat: no-repeat;
|
||||
background-position: center;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.search-icon-hidden {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
@media (max-width: 360px) {
|
||||
.search-bar {
|
||||
width: 0 !important;
|
||||
background: none !important;
|
||||
padding: 0 !important;
|
||||
transition: none !important;
|
||||
}
|
||||
|
||||
.search-bar-expanded {
|
||||
width: 9rem !important;
|
||||
}
|
||||
|
||||
.search-icon {
|
||||
display: inline;
|
||||
vertical-align: sub;
|
||||
}
|
||||
}
|
||||
112
docs/src/theme/SearchBar/templates.js
Normal file
@@ -0,0 +1,112 @@
|
||||
const prefix = 'algolia-docsearch';
|
||||
const suggestionPrefix = `${prefix}-suggestion`;
|
||||
const footerPrefix = `${prefix}-footer`;
|
||||
|
||||
const templates = {
|
||||
suggestion: `
|
||||
<a class="${suggestionPrefix}
|
||||
{{#isCategoryHeader}}${suggestionPrefix}__main{{/isCategoryHeader}}
|
||||
{{#isSubCategoryHeader}}${suggestionPrefix}__secondary{{/isSubCategoryHeader}}
|
||||
"
|
||||
aria-label="Link to the result"
|
||||
href="{{{url}}}"
|
||||
>
|
||||
<div class="${suggestionPrefix}--category-header">
|
||||
<span class="${suggestionPrefix}--category-header-lvl0">{{{category}}}</span>
|
||||
</div>
|
||||
<div class="${suggestionPrefix}--wrapper">
|
||||
<div class="${suggestionPrefix}--subcategory-column">
|
||||
<span class="${suggestionPrefix}--subcategory-column-text">{{{subcategory}}}</span>
|
||||
</div>
|
||||
{{#isTextOrSubcategoryNonEmpty}}
|
||||
<div class="${suggestionPrefix}--content">
|
||||
<div class="${suggestionPrefix}--subcategory-inline">{{{subcategory}}}</div>
|
||||
<div class="${suggestionPrefix}--title">{{{title}}}</div>
|
||||
{{#text}}<div class="${suggestionPrefix}--text">{{{text}}}</div>{{/text}}
|
||||
</div>
|
||||
{{/isTextOrSubcategoryNonEmpty}}
|
||||
</div>
|
||||
</a>
|
||||
`,
|
||||
suggestionSimple: `
|
||||
<div class="${suggestionPrefix}
|
||||
{{#isCategoryHeader}}${suggestionPrefix}__main{{/isCategoryHeader}}
|
||||
{{#isSubCategoryHeader}}${suggestionPrefix}__secondary{{/isSubCategoryHeader}}
|
||||
suggestion-layout-simple
|
||||
">
|
||||
<div class="${suggestionPrefix}--category-header">
|
||||
{{^isLvl0}}
|
||||
<span class="${suggestionPrefix}--category-header-lvl0 ${suggestionPrefix}--category-header-item">{{{category}}}</span>
|
||||
{{^isLvl1}}
|
||||
{{^isLvl1EmptyOrDuplicate}}
|
||||
<span class="${suggestionPrefix}--category-header-lvl1 ${suggestionPrefix}--category-header-item">
|
||||
{{{subcategory}}}
|
||||
</span>
|
||||
{{/isLvl1EmptyOrDuplicate}}
|
||||
{{/isLvl1}}
|
||||
{{/isLvl0}}
|
||||
<div class="${suggestionPrefix}--title ${suggestionPrefix}--category-header-item">
|
||||
{{#isLvl2}}
|
||||
{{{title}}}
|
||||
{{/isLvl2}}
|
||||
{{#isLvl1}}
|
||||
{{{subcategory}}}
|
||||
{{/isLvl1}}
|
||||
{{#isLvl0}}
|
||||
{{{category}}}
|
||||
{{/isLvl0}}
|
||||
</div>
|
||||
</div>
|
||||
<div class="${suggestionPrefix}--wrapper">
|
||||
{{#text}}
|
||||
<div class="${suggestionPrefix}--content">
|
||||
<div class="${suggestionPrefix}--text">{{{text}}}</div>
|
||||
</div>
|
||||
{{/text}}
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
footer: `
|
||||
<div class="${footerPrefix}">
|
||||
</div>
|
||||
`,
|
||||
empty: `
|
||||
<div class="${suggestionPrefix}">
|
||||
<div class="${suggestionPrefix}--wrapper">
|
||||
<div class="${suggestionPrefix}--content ${suggestionPrefix}--no-results">
|
||||
<div class="${suggestionPrefix}--title">
|
||||
<div class="${suggestionPrefix}--text">
|
||||
No results found for query <b>"{{query}}"</b>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
searchBox: `
|
||||
<form novalidate="novalidate" onsubmit="return false;" class="searchbox">
|
||||
<div role="search" class="searchbox__wrapper">
|
||||
<input id="docsearch" type="search" name="search" placeholder="Search the docs" autocomplete="off" required="required" class="searchbox__input"/>
|
||||
<button type="submit" title="Submit your search query." class="searchbox__submit" >
|
||||
<svg width=12 height=12 role="img" aria-label="Search">
|
||||
<use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#sbx-icon-search-13"></use>
|
||||
</svg>
|
||||
</button>
|
||||
<button type="reset" title="Clear the search query." class="searchbox__reset hide">
|
||||
<svg width=12 height=12 role="img" aria-label="Reset">
|
||||
<use xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#sbx-icon-clear-3"></use>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div class="svg-icons" style="height: 0; width: 0; position: absolute; visibility: hidden">
|
||||
<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<symbol id="sbx-icon-clear-3" viewBox="0 0 40 40"><path d="M16.228 20L1.886 5.657 0 3.772 3.772 0l1.885 1.886L20 16.228 34.343 1.886 36.228 0 40 3.772l-1.886 1.885L23.772 20l14.342 14.343L40 36.228 36.228 40l-1.885-1.886L20 23.772 5.657 38.114 3.772 40 0 36.228l1.886-1.885L16.228 20z" fill-rule="evenodd"></symbol>
|
||||
<symbol id="sbx-icon-search-13" viewBox="0 0 40 40"><path d="M26.806 29.012a16.312 16.312 0 0 1-10.427 3.746C7.332 32.758 0 25.425 0 16.378 0 7.334 7.333 0 16.38 0c9.045 0 16.378 7.333 16.378 16.38 0 3.96-1.406 7.593-3.746 10.426L39.547 37.34c.607.608.61 1.59-.004 2.203a1.56 1.56 0 0 1-2.202.004L26.807 29.012zm-10.427.627c7.322 0 13.26-5.938 13.26-13.26 0-7.324-5.938-13.26-13.26-13.26-7.324 0-13.26 5.936-13.26 13.26 0 7.322 5.936 13.26 13.26 13.26z" fill-rule="evenodd"></symbol>
|
||||
</svg>
|
||||
</div>
|
||||
`,
|
||||
};
|
||||
|
||||
export default templates;
|
||||
266
docs/src/theme/SearchBar/utils.js
Normal file
@@ -0,0 +1,266 @@
|
||||
import $ from 'autocomplete.js/zepto';
|
||||
|
||||
const utils = {
|
||||
/*
|
||||
* Move the content of an object key one level higher.
|
||||
* eg.
|
||||
* {
|
||||
* name: 'My name',
|
||||
* hierarchy: {
|
||||
* lvl0: 'Foo',
|
||||
* lvl1: 'Bar'
|
||||
* }
|
||||
* }
|
||||
* Will be converted to
|
||||
* {
|
||||
* name: 'My name',
|
||||
* lvl0: 'Foo',
|
||||
* lvl1: 'Bar'
|
||||
* }
|
||||
* @param {Object} object Main object
|
||||
* @param {String} property Main object key to move up
|
||||
* @return {Object}
|
||||
* @throws Error when key is not an attribute of Object or is not an object itself
|
||||
*/
|
||||
mergeKeyWithParent(object, property) {
|
||||
if (object[property] === undefined) {
|
||||
return object;
|
||||
}
|
||||
if (typeof object[property] !== 'object') {
|
||||
return object;
|
||||
}
|
||||
const newObject = $.extend({}, object, object[property]);
|
||||
delete newObject[property];
|
||||
return newObject;
|
||||
},
|
||||
/*
|
||||
* Group all objects of a collection by the value of the specified attribute
|
||||
* If the attribute is a string, use the lowercase form.
|
||||
*
|
||||
* eg.
|
||||
* groupBy([
|
||||
* {name: 'Tim', category: 'dev'},
|
||||
* {name: 'Vincent', category: 'dev'},
|
||||
* {name: 'Ben', category: 'sales'},
|
||||
* {name: 'Jeremy', category: 'sales'},
|
||||
* {name: 'AlexS', category: 'dev'},
|
||||
* {name: 'AlexK', category: 'sales'}
|
||||
* ], 'category');
|
||||
* =>
|
||||
* {
|
||||
* 'devs': [
|
||||
* {name: 'Tim', category: 'dev'},
|
||||
* {name: 'Vincent', category: 'dev'},
|
||||
* {name: 'AlexS', category: 'dev'}
|
||||
* ],
|
||||
* 'sales': [
|
||||
* {name: 'Ben', category: 'sales'},
|
||||
* {name: 'Jeremy', category: 'sales'},
|
||||
* {name: 'AlexK', category: 'sales'}
|
||||
* ]
|
||||
* }
|
||||
* @param {array} collection Array of objects to group
|
||||
* @param {String} property The attribute on which apply the grouping
|
||||
* @return {array}
|
||||
* @throws Error when one of the element does not have the specified property
|
||||
*/
|
||||
groupBy(collection, property) {
|
||||
const newCollection = {};
|
||||
$.each(collection, (index, item) => {
|
||||
if (item[property] === undefined) {
|
||||
throw new Error(`[groupBy]: Object has no key ${property}`);
|
||||
}
|
||||
let key = item[property];
|
||||
if (typeof key === 'string') {
|
||||
key = key.toLowerCase();
|
||||
}
|
||||
// fix #171 the given data type of docsearch hits might be conflict with the properties of the native Object,
|
||||
// such as the constructor, so we need to do this check.
|
||||
if (!Object.prototype.hasOwnProperty.call(newCollection, key)) {
|
||||
newCollection[key] = [];
|
||||
}
|
||||
newCollection[key].push(item);
|
||||
});
|
||||
return newCollection;
|
||||
},
|
||||
/*
|
||||
* Return an array of all the values of the specified object
|
||||
* eg.
|
||||
* values({
|
||||
* foo: 42,
|
||||
* bar: true,
|
||||
* baz: 'yep'
|
||||
* })
|
||||
* =>
|
||||
* [42, true, yep]
|
||||
* @param {object} object Object to extract values from
|
||||
* @return {array}
|
||||
*/
|
||||
values(object) {
|
||||
return Object.keys(object).map((key) => object[key]);
|
||||
},
|
||||
/*
|
||||
* Flattens an array
|
||||
* eg.
|
||||
* flatten([1, 2, [3, 4], [5, 6]])
|
||||
* =>
|
||||
* [1, 2, 3, 4, 5, 6]
|
||||
* @param {array} array Array to flatten
|
||||
* @return {array}
|
||||
*/
|
||||
flatten(array) {
|
||||
const results = [];
|
||||
array.forEach((value) => {
|
||||
if (!Array.isArray(value)) {
|
||||
results.push(value);
|
||||
return;
|
||||
}
|
||||
value.forEach((subvalue) => {
|
||||
results.push(subvalue);
|
||||
});
|
||||
});
|
||||
return results;
|
||||
},
|
||||
/*
|
||||
* Flatten all values of an object into an array, marking each first element of
|
||||
* each group with a specific flag
|
||||
* eg.
|
||||
* flattenAndFlagFirst({
|
||||
* 'devs': [
|
||||
* {name: 'Tim', category: 'dev'},
|
||||
* {name: 'Vincent', category: 'dev'},
|
||||
* {name: 'AlexS', category: 'dev'}
|
||||
* ],
|
||||
* 'sales': [
|
||||
* {name: 'Ben', category: 'sales'},
|
||||
* {name: 'Jeremy', category: 'sales'},
|
||||
* {name: 'AlexK', category: 'sales'}
|
||||
* ]
|
||||
* , 'isTop');
|
||||
* =>
|
||||
* [
|
||||
* {name: 'Tim', category: 'dev', isTop: true},
|
||||
* {name: 'Vincent', category: 'dev', isTop: false},
|
||||
* {name: 'AlexS', category: 'dev', isTop: false},
|
||||
* {name: 'Ben', category: 'sales', isTop: true},
|
||||
* {name: 'Jeremy', category: 'sales', isTop: false},
|
||||
* {name: 'AlexK', category: 'sales', isTop: false}
|
||||
* ]
|
||||
* @param {object} object Object to flatten
|
||||
* @param {string} flag Flag to set to true on first element of each group
|
||||
* @return {array}
|
||||
*/
|
||||
flattenAndFlagFirst(object, flag) {
|
||||
const values = this.values(object).map((collection) =>
|
||||
collection.map((item, index) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
item[flag] = index === 0;
|
||||
return item;
|
||||
}),
|
||||
);
|
||||
return this.flatten(values);
|
||||
},
|
||||
/*
|
||||
* Removes all empty strings, null, false and undefined elements array
|
||||
* eg.
|
||||
* compact([42, false, null, undefined, '', [], 'foo']);
|
||||
* =>
|
||||
* [42, [], 'foo']
|
||||
* @param {array} array Array to compact
|
||||
* @return {array}
|
||||
*/
|
||||
compact(array) {
|
||||
const results = [];
|
||||
array.forEach((value) => {
|
||||
if (!value) {
|
||||
return;
|
||||
}
|
||||
results.push(value);
|
||||
});
|
||||
return results;
|
||||
},
|
||||
/*
|
||||
* Returns the highlighted value of the specified key in the specified object.
|
||||
* If no highlighted value is available, will return the key value directly
|
||||
* eg.
|
||||
* getHighlightedValue({
|
||||
* _highlightResult: {
|
||||
* text: {
|
||||
* value: '<mark>foo</mark>'
|
||||
* }
|
||||
* },
|
||||
* text: 'foo'
|
||||
* }, 'text');
|
||||
* =>
|
||||
* '<mark>foo</mark>'
|
||||
* @param {object} object Hit object returned by the Algolia API
|
||||
* @param {string} property Object key to look for
|
||||
* @return {string}
|
||||
**/
|
||||
getHighlightedValue(object, property) {
|
||||
if (
|
||||
object._highlightResult &&
|
||||
object._highlightResult.hierarchy_camel &&
|
||||
object._highlightResult.hierarchy_camel[property] &&
|
||||
object._highlightResult.hierarchy_camel[property].matchLevel &&
|
||||
object._highlightResult.hierarchy_camel[property].matchLevel !== 'none' &&
|
||||
object._highlightResult.hierarchy_camel[property].value
|
||||
) {
|
||||
return object._highlightResult.hierarchy_camel[property].value;
|
||||
}
|
||||
if (
|
||||
object._highlightResult &&
|
||||
object._highlightResult &&
|
||||
object._highlightResult[property] &&
|
||||
object._highlightResult[property].value
|
||||
) {
|
||||
return object._highlightResult[property].value;
|
||||
}
|
||||
return object[property];
|
||||
},
|
||||
/*
|
||||
* Returns the snippeted value of the specified key in the specified object.
|
||||
* If no highlighted value is available, will return the key value directly.
|
||||
* Will add starting and ending ellipsis (…) if we detect that a sentence is
|
||||
* incomplete
|
||||
* eg.
|
||||
* getSnippetedValue({
|
||||
* _snippetResult: {
|
||||
* text: {
|
||||
* value: '<mark>This is an unfinished sentence</mark>'
|
||||
* }
|
||||
* },
|
||||
* text: 'This is an unfinished sentence'
|
||||
* }, 'text');
|
||||
* =>
|
||||
* '<mark>This is an unfinished sentence</mark>…'
|
||||
* @param {object} object Hit object returned by the Algolia API
|
||||
* @param {string} property Object key to look for
|
||||
* @return {string}
|
||||
**/
|
||||
getSnippetedValue(object, property) {
|
||||
if (!object._snippetResult || !object._snippetResult[property] || !object._snippetResult[property].value) {
|
||||
return object[property];
|
||||
}
|
||||
let snippet = object._snippetResult[property].value;
|
||||
|
||||
if (snippet[0] !== snippet[0].toUpperCase()) {
|
||||
snippet = `…${snippet}`;
|
||||
}
|
||||
if (['.', '!', '?'].indexOf(snippet[snippet.length - 1]) === -1) {
|
||||
snippet = `${snippet}…`;
|
||||
}
|
||||
return snippet;
|
||||
},
|
||||
/*
|
||||
* Deep clone an object.
|
||||
* Note: This will not clone functions and dates
|
||||
* @param {object} object Object to clone
|
||||
* @return {object}
|
||||
*/
|
||||
deepClone(object) {
|
||||
return JSON.parse(JSON.stringify(object));
|
||||
},
|
||||
};
|
||||
|
||||
export default utils;
|
||||
BIN
docs/static/img/immich-screenshots.png
vendored
Normal file
|
After Width: | Height: | Size: 1.6 MiB |
BIN
docs/static/img/immich-screenshots.webp
vendored
|
Before Width: | Height: | Size: 162 KiB |
@@ -4,25 +4,25 @@ module.exports = {
|
||||
corePlugins: {
|
||||
preflight: false, // disable Tailwind's reset
|
||||
},
|
||||
content: ["./src/**/*.{js,jsx,ts,tsx}", "../docs/**/*.mdx"], // my markdown stuff is in ../docs, not /src
|
||||
darkMode: ["class", '[data-theme="dark"]'], // hooks into docusaurus' dark mode settigns
|
||||
content: ['./src/**/*.{js,jsx,ts,tsx}', '../docs/**/*.mdx'], // my markdown stuff is in ../docs, not /src
|
||||
darkMode: ['class', '[data-theme="dark"]'], // hooks into docusaurus' dark mode settigns
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
// Light Theme
|
||||
"immich-primary": "#4250af",
|
||||
"immich-bg": "white",
|
||||
"immich-fg": "black",
|
||||
"immich-gray": "#F6F6F4",
|
||||
'immich-primary': '#4250af',
|
||||
'immich-bg': 'white',
|
||||
'immich-fg': 'black',
|
||||
'immich-gray': '#F6F6F4',
|
||||
|
||||
// Dark Theme
|
||||
"immich-dark-primary": "#adcbfa",
|
||||
"immich-dark-bg": "black",
|
||||
"immich-dark-fg": "#e5e7eb",
|
||||
"immich-dark-gray": "#212121",
|
||||
'immich-dark-primary': '#adcbfa',
|
||||
'immich-dark-bg': 'black',
|
||||
'immich-dark-fg': '#e5e7eb',
|
||||
'immich-dark-gray': '#212121',
|
||||
},
|
||||
fontFamily: {
|
||||
"immich-title": ["Snowburst One", "cursive"],
|
||||
'immich-title': ['Snowburst One', 'cursive'],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
{ "source": "/docs/features/password-login", "destination": "/docs/administration/password-login" },
|
||||
{ "source": "/docs/features/server-commands", "destination": "/docs/administration/server-commands" },
|
||||
{ "source": "/docs/features/storage-template", "destination": "/docs/administration/storage-template" },
|
||||
{ "source": "/docs/features/user-management", "destination": "/docs/administration/user-management" }
|
||||
{ "source": "/docs/features/user-management", "destination": "/docs/administration/user-management" },
|
||||
{ "source": "/docs/developer/contributing", "destination": "/docs/developer/pr-checklist" }
|
||||
]
|
||||
}
|
||||
|
||||
15
install.sh
@@ -2,19 +2,10 @@ echo "Starting Immich installation..."
|
||||
|
||||
ip_address=$(hostname -I | awk '{print $1}')
|
||||
|
||||
release_version=$(curl --silent "https://api.github.com/repos/immich-app/immich/releases/latest" |
|
||||
grep '"tag_name":' |
|
||||
sed -E 's/.*"([^"]+)".*/\1/')
|
||||
RED='\033[0;31m'
|
||||
GREEN='\032[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
get_release_version() {
|
||||
curl --silent "https://api.github.com/repos/immich-app/immich/releases/latest" | # Get latest release from GitHub api
|
||||
grep '"tag_name":' | # Get tag line
|
||||
sed -E 's/.*"([^"]+)".*/\1/' # Pluck JSON value
|
||||
}
|
||||
|
||||
create_immich_directory() {
|
||||
echo "Creating Immich directory..."
|
||||
mkdir -p ./immich-app/immich-data
|
||||
@@ -23,12 +14,12 @@ create_immich_directory() {
|
||||
|
||||
download_docker_compose_file() {
|
||||
echo "Downloading docker-compose.yml..."
|
||||
curl -L https://raw.githubusercontent.com/immich-app/immich/$release_version/docker/docker-compose.yml -o ./docker-compose.yml >/dev/null 2>&1
|
||||
curl -L https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml -o ./docker-compose.yml >/dev/null 2>&1
|
||||
}
|
||||
|
||||
download_dot_env_file() {
|
||||
echo "Downloading .env file..."
|
||||
curl -L https://raw.githubusercontent.com/immich-app/immich/$release_version/docker/example.env -o ./.env >/dev/null 2>&1
|
||||
curl -L https://github.com/immich-app/immich/releases/latest/download/example.env -o ./.env >/dev/null 2>&1
|
||||
}
|
||||
|
||||
replace_env_value() {
|
||||
@@ -69,7 +60,7 @@ start_docker_compose() {
|
||||
show_friendly_message() {
|
||||
echo "Succesfully deployed Immich!"
|
||||
echo "You can access the website at http://$ip_address:2283 and the server URL for the mobile app is http://$ip_address:2283/api"
|
||||
echo "The backup (or upload) location is $upload_location"
|
||||
echo "The library location is $upload_location"
|
||||
echo "---------------------------------------------------"
|
||||
echo "If you want to configure custom information of the server, including the database, Redis information, or the backup (or upload) location, etc.
|
||||
|
||||
|
||||
@@ -44,5 +44,5 @@ download:
|
||||
locale_code: ru-RU
|
||||
- file: mobile/assets/i18n/cs-CZ.json
|
||||
locale_code: cs-CZ
|
||||
- file: mobile/assets/i18n/no-NO.json
|
||||
locale_code: no-NO
|
||||
- file: mobile/assets/i18n/nb-NO.json
|
||||
locale_code: nb-NO
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
venv/
|
||||
venv/
|
||||
*.zip
|
||||
*.onnx
|
||||
170
machine-learning/.gitignore
vendored
@@ -1,4 +1,172 @@
|
||||
*.zip
|
||||
*.onnx
|
||||
upload/
|
||||
venv/
|
||||
__pycache__/
|
||||
model-cache/
|
||||
model-cache/
|
||||
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
|
||||
*.onnx
|
||||
*.zip
|
||||
@@ -1,19 +1,28 @@
|
||||
FROM python:3.10
|
||||
FROM python:3.11.4-bullseye@sha256:5b401676aff858495a5c9c726c60b8b73fe52833e9e16eccdb59e93d52741727 as builder
|
||||
|
||||
ENV TRANSFORMERS_CACHE=/cache \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=true
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PIP_NO_CACHE_DIR=true
|
||||
|
||||
RUN pip install --upgrade pip && pip install poetry
|
||||
RUN poetry config installer.max-workers 10 && \
|
||||
poetry config virtualenvs.create false
|
||||
RUN python -m venv /opt/venv
|
||||
ENV VIRTUAL_ENV="/opt/venv" PATH="/opt/venv/bin:${PATH}"
|
||||
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --only main
|
||||
|
||||
FROM python:3.11.4-slim-bullseye@sha256:91d194f58f50594cda71dcd2e8fdefd90e7ecc57d07823813b67c8521e565dcd
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
ENV NODE_ENV=production \
|
||||
TRANSFORMERS_CACHE=/cache \
|
||||
PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
PATH="/opt/venv/bin:$PATH" \
|
||||
PYTHONPATH=`pwd`
|
||||
|
||||
RUN python -m venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
RUN pip install --pre torch -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
|
||||
RUN pip install transformers tqdm numpy scikit-learn scipy nltk sentencepiece flask Pillow
|
||||
RUN pip install --no-deps sentence-transformers
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["python", "src/main.py"]
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
COPY app .
|
||||
ENTRYPOINT ["python", "main.py"]
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
|
||||
# Immich Machine Learning
|
||||
|
||||
- Object Detection
|
||||
- Image Classification
|
||||
- Image classification
|
||||
- CLIP embeddings
|
||||
- Facial recognition
|
||||
|
||||
# Setup
|
||||
|
||||
This project uses [Poetry](https://python-poetry.org/docs/#installation), so be sure to install it first.
|
||||
Running `poetry install --no-root --with dev` will install everything you need in an isolated virtual environment.
|
||||
|
||||
To add or remove dependencies, you can use the commands `poetry add $PACKAGE_NAME` and `poetry remove $PACKAGE_NAME`, respectively.
|
||||
Be sure to commit the `poetry.lock` and `pyproject.toml` files to reflect any changes in dependencies.
|
||||
|
||||
84
machine-learning/app/cache.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from aiocache.plugins import TimingPlugin, BasePlugin
|
||||
from aiocache.backends.memory import SimpleMemoryCache
|
||||
from aiocache.lock import OptimisticLock
|
||||
from typing import Any
|
||||
from models import get_model
|
||||
|
||||
|
||||
class ModelCache:
|
||||
"""Fetches a model from an in-memory cache, instantiating it if it's missing."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
ttl: int | None = None,
|
||||
revalidate: bool = False,
|
||||
timeout: int | None = None,
|
||||
profiling: bool = False,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
ttl: Unloads model after this duration. Disabled if None. Defaults to None.
|
||||
revalidate: Resets TTL on cache hit. Useful to keep models in memory while active. Defaults to False.
|
||||
timeout: Maximum allowed time for model to load. Disabled if None. Defaults to None.
|
||||
profiling: Collects metrics for cache operations, adding slight overhead. Defaults to False.
|
||||
"""
|
||||
|
||||
self.ttl = ttl
|
||||
plugins = []
|
||||
|
||||
if revalidate:
|
||||
plugins.append(RevalidationPlugin())
|
||||
if profiling:
|
||||
plugins.append(TimingPlugin())
|
||||
|
||||
self.cache = SimpleMemoryCache(
|
||||
ttl=ttl, timeout=timeout, plugins=plugins, namespace=None
|
||||
)
|
||||
|
||||
async def get_cached_model(
|
||||
self, model_name: str, model_type: str, **model_kwargs
|
||||
) -> Any:
|
||||
"""
|
||||
Args:
|
||||
model_name: Name of model in the model hub used for the task.
|
||||
model_type: Model type or task, which determines which model zoo is used.
|
||||
|
||||
Returns:
|
||||
model: The requested model.
|
||||
"""
|
||||
|
||||
key = self.cache.build_key(model_name, model_type)
|
||||
model = await self.cache.get(key)
|
||||
if model is None:
|
||||
async with OptimisticLock(self.cache, key) as lock:
|
||||
model = get_model(model_name, model_type, **model_kwargs)
|
||||
await lock.cas(model, ttl=self.ttl)
|
||||
return model
|
||||
|
||||
async def get_profiling(self) -> dict[str, float] | None:
|
||||
if not hasattr(self.cache, "profiling"):
|
||||
return None
|
||||
|
||||
return self.cache.profiling # type: ignore
|
||||
|
||||
|
||||
class RevalidationPlugin(BasePlugin):
|
||||
"""Revalidates cache item's TTL after cache hit."""
|
||||
|
||||
async def post_get(self, client, key, ret=None, namespace=None, **kwargs):
|
||||
if ret is None:
|
||||
return
|
||||
if namespace is not None:
|
||||
key = client.build_key(key, namespace)
|
||||
if key in client._handlers:
|
||||
await client.expire(key, client.ttl)
|
||||
|
||||
async def post_multi_get(self, client, keys, ret=None, namespace=None, **kwargs):
|
||||
if ret is None:
|
||||
return
|
||||
|
||||
for key, val in zip(keys, ret):
|
||||
if namespace is not None:
|
||||
key = client.build_key(key, namespace)
|
||||
if val is not None and key in client._handlers:
|
||||
await client.expire(key, client.ttl)
|
||||
22
machine-learning/app/config.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from pydantic import BaseSettings
|
||||
|
||||
class Settings(BaseSettings):
|
||||
cache_folder: str = "/cache"
|
||||
classification_model: str = "microsoft/resnet-50"
|
||||
clip_image_model: str = "clip-ViT-B-32"
|
||||
clip_text_model: str = "clip-ViT-B-32"
|
||||
facial_recognition_model: str = "buffalo_l"
|
||||
min_tag_score: float = 0.9
|
||||
eager_startup: bool = True
|
||||
model_ttl: int = 300
|
||||
host: str = "0.0.0.0"
|
||||
port: int = 3003
|
||||
workers: int = 1
|
||||
min_face_score: float = 0.7
|
||||
|
||||
class Config(BaseSettings.Config):
|
||||
env_prefix = 'MACHINE_LEARNING_'
|
||||
case_sensitive = False
|
||||
|
||||
|
||||
settings = Settings()
|
||||
133
machine-learning/app/main.py
Normal file
@@ -0,0 +1,133 @@
|
||||
import os
|
||||
import io
|
||||
from typing import Any
|
||||
|
||||
from cache import ModelCache
|
||||
from schemas import (
|
||||
EmbeddingResponse,
|
||||
FaceResponse,
|
||||
TagResponse,
|
||||
MessageResponse,
|
||||
TextModelRequest,
|
||||
TextResponse,
|
||||
)
|
||||
import uvicorn
|
||||
from PIL import Image
|
||||
from fastapi import FastAPI, HTTPException, Depends, Body
|
||||
from models import get_model, run_classification, run_facial_recognition
|
||||
from config import settings
|
||||
|
||||
_model_cache = None
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event() -> None:
|
||||
global _model_cache
|
||||
_model_cache = ModelCache(ttl=settings.model_ttl, revalidate=True)
|
||||
models = [
|
||||
(settings.classification_model, "image-classification"),
|
||||
(settings.clip_image_model, "clip"),
|
||||
(settings.clip_text_model, "clip"),
|
||||
(settings.facial_recognition_model, "facial-recognition"),
|
||||
]
|
||||
|
||||
# Get all models
|
||||
for model_name, model_type in models:
|
||||
if settings.eager_startup:
|
||||
await _model_cache.get_cached_model(model_name, model_type)
|
||||
else:
|
||||
get_model(model_name, model_type)
|
||||
|
||||
|
||||
def dep_model_cache():
|
||||
if _model_cache is None:
|
||||
raise HTTPException(status_code=500, detail="Unable to load model.")
|
||||
|
||||
def dep_input_image(image: bytes = Body(...)) -> Image:
|
||||
return Image.open(io.BytesIO(image))
|
||||
|
||||
@app.get("/", response_model=MessageResponse)
|
||||
async def root() -> dict[str, str]:
|
||||
return {"message": "Immich ML"}
|
||||
|
||||
|
||||
@app.get("/ping", response_model=TextResponse)
|
||||
def ping() -> str:
|
||||
return "pong"
|
||||
|
||||
|
||||
@app.post(
|
||||
"/image-classifier/tag-image",
|
||||
response_model=TagResponse,
|
||||
status_code=200,
|
||||
dependencies=[Depends(dep_model_cache)],
|
||||
)
|
||||
async def image_classification(
|
||||
image: Image = Depends(dep_input_image)
|
||||
) -> list[str]:
|
||||
try:
|
||||
model = await _model_cache.get_cached_model(
|
||||
settings.classification_model, "image-classification"
|
||||
)
|
||||
labels = run_classification(model, image, settings.min_tag_score)
|
||||
except Exception as ex:
|
||||
raise HTTPException(status_code=500, detail=str(ex))
|
||||
else:
|
||||
return labels
|
||||
|
||||
|
||||
@app.post(
|
||||
"/sentence-transformer/encode-image",
|
||||
response_model=EmbeddingResponse,
|
||||
status_code=200,
|
||||
dependencies=[Depends(dep_model_cache)],
|
||||
)
|
||||
async def clip_encode_image(
|
||||
image: Image = Depends(dep_input_image)
|
||||
) -> list[float]:
|
||||
model = await _model_cache.get_cached_model(settings.clip_image_model, "clip")
|
||||
embedding = model.encode(image).tolist()
|
||||
return embedding
|
||||
|
||||
|
||||
@app.post(
|
||||
"/sentence-transformer/encode-text",
|
||||
response_model=EmbeddingResponse,
|
||||
status_code=200,
|
||||
dependencies=[Depends(dep_model_cache)],
|
||||
)
|
||||
async def clip_encode_text(
|
||||
payload: TextModelRequest
|
||||
) -> list[float]:
|
||||
model = await _model_cache.get_cached_model(settings.clip_text_model, "clip")
|
||||
embedding = model.encode(payload.text).tolist()
|
||||
return embedding
|
||||
|
||||
|
||||
@app.post(
|
||||
"/facial-recognition/detect-faces",
|
||||
response_model=FaceResponse,
|
||||
status_code=200,
|
||||
dependencies=[Depends(dep_model_cache)],
|
||||
)
|
||||
async def facial_recognition(
|
||||
image: bytes = Body(...),
|
||||
) -> list[dict[str, Any]]:
|
||||
model = await _model_cache.get_cached_model(
|
||||
settings.facial_recognition_model, "facial-recognition"
|
||||
)
|
||||
faces = run_facial_recognition(model, image)
|
||||
return faces
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
is_dev = os.getenv("NODE_ENV") == "development"
|
||||
uvicorn.run(
|
||||
"main:app",
|
||||
host=settings.host,
|
||||
port=settings.port,
|
||||
reload=is_dev,
|
||||
workers=settings.workers,
|
||||
)
|
||||
119
machine-learning/app/models.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import torch
|
||||
from insightface.app import FaceAnalysis
|
||||
from pathlib import Path
|
||||
|
||||
from transformers import pipeline, Pipeline
|
||||
from sentence_transformers import SentenceTransformer
|
||||
from typing import Any, BinaryIO
|
||||
import cv2 as cv
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
from config import settings
|
||||
|
||||
device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
|
||||
|
||||
def get_model(model_name: str, model_type: str, **model_kwargs):
|
||||
"""
|
||||
Instantiates the specified model.
|
||||
|
||||
Args:
|
||||
model_name: Name of model in the model hub used for the task.
|
||||
model_type: Model type or task, which determines which model zoo is used.
|
||||
`facial-recognition` uses Insightface, while all other models use the HF Model Hub.
|
||||
|
||||
Options:
|
||||
`image-classification`, `clip`,`facial-recognition`, `tokenizer`, `processor`
|
||||
|
||||
Returns:
|
||||
model: The requested model.
|
||||
"""
|
||||
|
||||
cache_dir = _get_cache_dir(model_name, model_type)
|
||||
match model_type:
|
||||
case "facial-recognition":
|
||||
model = _load_facial_recognition(
|
||||
model_name, cache_dir=cache_dir, **model_kwargs
|
||||
)
|
||||
case "clip":
|
||||
model = SentenceTransformer(
|
||||
model_name, cache_folder=cache_dir, **model_kwargs
|
||||
)
|
||||
case _:
|
||||
model = pipeline(
|
||||
model_type,
|
||||
model_name,
|
||||
model_kwargs={"cache_dir": cache_dir, **model_kwargs},
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def run_classification(
|
||||
model: Pipeline, image: Image, min_score: float | None = None
|
||||
):
|
||||
predictions: list[dict[str, Any]] = model(image) # type: ignore
|
||||
result = {
|
||||
tag
|
||||
for pred in predictions
|
||||
for tag in pred["label"].split(", ")
|
||||
if min_score is None or pred["score"] >= min_score
|
||||
}
|
||||
|
||||
return list(result)
|
||||
|
||||
|
||||
def run_facial_recognition(
|
||||
model: FaceAnalysis, image: bytes
|
||||
) -> list[dict[str, Any]]:
|
||||
file_bytes = np.frombuffer(image, dtype=np.uint8)
|
||||
img = cv.imdecode(file_bytes, cv.IMREAD_COLOR)
|
||||
height, width, _ = img.shape
|
||||
results = []
|
||||
faces = model.get(img)
|
||||
|
||||
for face in faces:
|
||||
x1, y1, x2, y2 = face.bbox
|
||||
|
||||
results.append(
|
||||
{
|
||||
"imageWidth": width,
|
||||
"imageHeight": height,
|
||||
"boundingBox": {
|
||||
"x1": round(x1),
|
||||
"y1": round(y1),
|
||||
"x2": round(x2),
|
||||
"y2": round(y2),
|
||||
},
|
||||
"score": face.det_score.item(),
|
||||
"embedding": face.normed_embedding.tolist(),
|
||||
}
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
def _load_facial_recognition(
|
||||
model_name: str,
|
||||
min_face_score: float | None = None,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs,
|
||||
):
|
||||
if cache_dir is None:
|
||||
cache_dir = _get_cache_dir(model_name, "facial-recognition")
|
||||
if isinstance(cache_dir, Path):
|
||||
cache_dir = cache_dir.as_posix()
|
||||
if min_face_score is None:
|
||||
min_face_score = settings.min_face_score
|
||||
|
||||
model = FaceAnalysis(
|
||||
name=model_name,
|
||||
root=cache_dir,
|
||||
allowed_modules=["detection", "recognition"],
|
||||
**model_kwargs,
|
||||
)
|
||||
model.prepare(ctx_id=0, det_thresh=min_face_score, det_size=(640, 640))
|
||||
return model
|
||||
|
||||
|
||||
def _get_cache_dir(model_name: str, model_type: str) -> Path:
|
||||
return Path(settings.cache_folder, device, model_type, model_name)
|
||||
56
machine-learning/app/schemas.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
def to_lower_camel(string: str) -> str:
|
||||
tokens = [
|
||||
token.capitalize() if i > 0 else token
|
||||
for i, token in enumerate(string.split("_"))
|
||||
]
|
||||
return "".join(tokens)
|
||||
|
||||
|
||||
class TextModelRequest(BaseModel):
|
||||
text: str
|
||||
|
||||
|
||||
class TextResponse(BaseModel):
|
||||
__root__: str
|
||||
|
||||
|
||||
class MessageResponse(BaseModel):
|
||||
message: str
|
||||
|
||||
|
||||
class TagResponse(BaseModel):
|
||||
__root__: list[str]
|
||||
|
||||
|
||||
class Embedding(BaseModel):
|
||||
__root__: list[float]
|
||||
|
||||
|
||||
class EmbeddingResponse(BaseModel):
|
||||
__root__: Embedding
|
||||
|
||||
|
||||
class BoundingBox(BaseModel):
|
||||
x1: int
|
||||
y1: int
|
||||
x2: int
|
||||
y2: int
|
||||
|
||||
|
||||
class Face(BaseModel):
|
||||
image_width: int
|
||||
image_height: int
|
||||
bounding_box: BoundingBox
|
||||
score: float
|
||||
embedding: Embedding
|
||||
|
||||
class Config:
|
||||
alias_generator = to_lower_camel
|
||||
allow_population_by_field_name = True
|
||||
|
||||
|
||||
class FaceResponse(BaseModel):
|
||||
__root__: list[Face]
|
||||
2472
machine-learning/poetry.lock
generated
Normal file
57
machine-learning/pyproject.toml
Normal file
@@ -0,0 +1,57 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.59.1"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
packages = [{include = "app"}]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.11"
|
||||
torch = [
|
||||
{markers = "platform_machine == 'arm64' or platform_machine == 'aarch64'", version = "=2.0.1", source = "pypi"},
|
||||
{markers = "platform_machine == 'amd64' or platform_machine == 'x86_64'", version = "=2.0.1", source = "pytorch-cpu"}
|
||||
]
|
||||
transformers = "^4.29.2"
|
||||
sentence-transformers = "^2.2.2"
|
||||
onnxruntime = "^1.15.0"
|
||||
insightface = "^0.7.3"
|
||||
opencv-python-headless = "^4.7.0.72"
|
||||
pillow = "^9.5.0"
|
||||
fastapi = "^0.95.2"
|
||||
uvicorn = {extras = ["standard"], version = "^0.22.0"}
|
||||
pydantic = "^1.10.8"
|
||||
aiocache = "^0.12.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
mypy = "^1.3.0"
|
||||
black = "^23.3.0"
|
||||
pytest = "^7.3.1"
|
||||
|
||||
[[tool.poetry.source]]
|
||||
name = "pytorch-cpu"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
priority = "explicit"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.flake8]
|
||||
max-line-length = 120
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.11"
|
||||
plugins = "pydantic.mypy"
|
||||
follow_imports = "silent"
|
||||
warn_redundant_casts = true
|
||||
disallow_any_generics = true
|
||||
check_untyped_defs = true
|
||||
no_implicit_reexport = true
|
||||
disallow_untyped_defs = true
|
||||
|
||||
[tool.pydantic-mypy]
|
||||
init_forbid_extra = true
|
||||
init_typed = true
|
||||
warn_required_dynamic_aliases = true
|
||||
warn_untyped_fields = true
|
||||
@@ -1,58 +0,0 @@
|
||||
import os
|
||||
from flask import Flask, request
|
||||
from transformers import pipeline
|
||||
|
||||
|
||||
server = Flask(__name__)
|
||||
|
||||
|
||||
classifier = pipeline(
|
||||
task="image-classification",
|
||||
model="microsoft/resnet-50"
|
||||
)
|
||||
|
||||
detector = pipeline(
|
||||
task="object-detection",
|
||||
model="hustvl/yolos-tiny"
|
||||
)
|
||||
|
||||
|
||||
# Environment resolver
|
||||
is_dev = os.getenv('NODE_ENV') == 'development'
|
||||
server_port = os.getenv('MACHINE_LEARNING_PORT') or 3003
|
||||
|
||||
|
||||
@server.route("/ping")
|
||||
def ping():
|
||||
return "pong"
|
||||
|
||||
|
||||
@server.route("/object-detection/detect-object", methods=['POST'])
|
||||
def object_detection():
|
||||
assetPath = request.json['thumbnailPath']
|
||||
return run_engine(detector, assetPath), 201
|
||||
|
||||
|
||||
@server.route("/image-classifier/tag-image", methods=['POST'])
|
||||
def image_classification():
|
||||
assetPath = request.json['thumbnailPath']
|
||||
return run_engine(classifier, assetPath), 201
|
||||
|
||||
|
||||
def run_engine(engine, path):
|
||||
result = []
|
||||
predictions = engine(path)
|
||||
|
||||
for index, pred in enumerate(predictions):
|
||||
tags = pred['label'].split(', ')
|
||||
if (pred['score'] > 0.9):
|
||||
result = [*result, *tags]
|
||||
|
||||
if (len(result) > 1):
|
||||
result = list(set(result))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
server.run(debug=is_dev, host='0.0.0.0', port=server_port)
|
||||
@@ -61,26 +61,18 @@ fi
|
||||
|
||||
if [ "$CURRENT_SERVER" != "$NEXT_SERVER" ]; then
|
||||
echo "Pumping Server: $CURRENT_SERVER => $NEXT_SERVER"
|
||||
npm --prefix server version $SERVER_PUMP
|
||||
npm --prefix server run api:generate
|
||||
poetry --directory machine-learning version $SERVER_PUMP
|
||||
fi
|
||||
|
||||
if [ "$CURRENT_MOBILE" != "$NEXT_MOBILE" ]; then
|
||||
echo "Pumping Mobile: $CURRENT_MOBILE => $NEXT_MOBILE"
|
||||
fi
|
||||
|
||||
sed -i "s/^ \"version\": \"$CURRENT_SERVER\",$/ \"version\": \"$NEXT_SERVER\",/" server/package.json
|
||||
sed -i "s/^ \"version\": \"$CURRENT_SERVER\",$/ \"version\": \"$NEXT_SERVER\",/" server/package-lock.json
|
||||
sed -i "s/\"version\": \"$CURRENT_SERVER\",$/\"version\": \"$NEXT_SERVER\",/" server/immich-openapi-specs.json
|
||||
sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/" mobile/ios/fastlane/Fastfile
|
||||
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml
|
||||
|
||||
# OpenApi Generated Files
|
||||
sed -i "s/API version: $CURRENT_SERVER,$/API version: $NEXT_SERVER/" mobile/openapi/README.md
|
||||
sed -i "s/OpenAPI document: $CURRENT_SERVER,$/OpenAPI document: $NEXT_SERVER/" web/src/api/open-api/api.ts
|
||||
sed -i "s/OpenAPI document: $CURRENT_SERVER,$/OpenAPI document: $NEXT_SERVER/" web/src/api/open-api/base.ts
|
||||
sed -i "s/OpenAPI document: $CURRENT_SERVER,$/OpenAPI document: $NEXT_SERVER/" web/src/api/open-api/common.ts
|
||||
sed -i "s/OpenAPI document: $CURRENT_SERVER,$/OpenAPI document: $NEXT_SERVER/" web/src/api/open-api/configuration.ts
|
||||
sed -i "s/OpenAPI document: $CURRENT_SERVER,$/OpenAPI document: $NEXT_SERVER/" web/src/api/open-api/index.ts
|
||||
|
||||
echo "IMMICH_VERSION=v$NEXT_SERVER" >>$GITHUB_ENV
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{
|
||||
"flutterSdkVersion": "3.7.0",
|
||||
"flutterSdkVersion": "3.10.0",
|
||||
"flavors": {}
|
||||
}
|
||||
}
|
||||
|
||||
5
mobile/.gitignore
vendored
@@ -49,3 +49,8 @@ app.*.map.json
|
||||
|
||||
# Fastlane
|
||||
ios/fastlane/report.xml
|
||||
|
||||
# Isar
|
||||
default.isar
|
||||
default.isar.lock
|
||||
libisar.so
|
||||
1
mobile/.isar
Submodule
@@ -23,6 +23,7 @@ if (flutterVersionName == null) {
|
||||
|
||||
apply plugin: 'com.android.application'
|
||||
apply plugin: 'kotlin-android'
|
||||
apply plugin: 'kotlin-kapt'
|
||||
apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
|
||||
|
||||
def keystoreProperties = new Properties()
|
||||
@@ -71,6 +72,11 @@ android {
|
||||
}
|
||||
|
||||
buildTypes {
|
||||
debug {
|
||||
applicationIdSuffix '.debug'
|
||||
versionNameSuffix '-DEBUG'
|
||||
}
|
||||
|
||||
release {
|
||||
signingConfig signingConfigs.release
|
||||
}
|
||||
@@ -83,7 +89,10 @@ flutter {
|
||||
|
||||
dependencies {
|
||||
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
|
||||
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:$kotlin_coroutines_version"
|
||||
implementation "androidx.work:work-runtime-ktx:$work_version"
|
||||
implementation "androidx.concurrent:concurrent-futures:$concurrent_version"
|
||||
implementation "com.google.guava:guava:$guava_version"
|
||||
implementation "com.github.bumptech.glide:glide:$glide_version"
|
||||
kapt "com.github.bumptech.glide:compiler:$glide_version"
|
||||
}
|
||||
|
||||