mirror of
https://github.com/immich-app/immich.git
synced 2026-01-26 11:24:44 -08:00
Compare commits
95 Commits
v1.107.0
...
feat/ml-ar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
72269ab58c | ||
|
|
3db69b94ed | ||
|
|
b5acb71b05 | ||
|
|
b39cca1b43 | ||
|
|
3d62011ae3 | ||
|
|
1ad348c407 | ||
|
|
5dae920ac6 | ||
|
|
956480ab2c | ||
|
|
5748f50c1f | ||
|
|
1b3a7feb67 | ||
|
|
d68bd876c1 | ||
|
|
c50ac55892 | ||
|
|
b2dd4e1c2b | ||
|
|
ff2ba240c9 | ||
|
|
96084355f0 | ||
|
|
25a380d023 | ||
|
|
3cb42de931 | ||
|
|
8dd1d95913 | ||
|
|
0ee2390c7f | ||
|
|
52db9558b3 | ||
|
|
0fbfbc86d2 | ||
|
|
c7432834d0 | ||
|
|
a971fae81f | ||
|
|
a58a2eec53 | ||
|
|
f43721ec92 | ||
|
|
59aa347912 | ||
|
|
1dd1d36120 | ||
|
|
545b206076 | ||
|
|
cf77487c00 | ||
|
|
9d8b755c07 | ||
|
|
bd88b079ea | ||
|
|
27b13b82f5 | ||
|
|
79c8412660 | ||
|
|
a078dde241 | ||
|
|
7e4e96c440 | ||
|
|
94f129d632 | ||
|
|
678111ed3b | ||
|
|
c1036d6f88 | ||
|
|
e8af0e859e | ||
|
|
a0f6d7444a | ||
|
|
eb89208abb | ||
|
|
af94f0f979 | ||
|
|
025a54c462 | ||
|
|
334a709cc6 | ||
|
|
5f25e2ce82 | ||
|
|
04d0f575b7 | ||
|
|
e9683b326a | ||
|
|
cb40db9555 | ||
|
|
39221c8d1f | ||
|
|
a5467d60ea | ||
|
|
d582ec02b1 | ||
|
|
59cdbdc492 | ||
|
|
01706ccf5c | ||
|
|
6c49a4ba34 | ||
|
|
e1f25b44d2 | ||
|
|
f6cafa3290 | ||
|
|
53d4a5268b | ||
|
|
cf88f4b6f8 | ||
|
|
ac8d8d91f7 | ||
|
|
842291124c | ||
|
|
6f5b3c47b0 | ||
|
|
b25642b889 | ||
|
|
7bde19d842 | ||
|
|
eb1ba11d60 | ||
|
|
23b3073687 | ||
|
|
3cd187dced | ||
|
|
6791af8c2c | ||
|
|
e566fbb009 | ||
|
|
e5c92912fc | ||
|
|
f33d5b0a38 | ||
|
|
df10618a7e | ||
|
|
6030349a6f | ||
|
|
6629bf50ae | ||
|
|
e32ce82179 | ||
|
|
10ea894186 | ||
|
|
81d12c0586 | ||
|
|
0b88bef157 | ||
|
|
2b8942026c | ||
|
|
f5937a5a9b | ||
|
|
04f0ac1aad | ||
|
|
4a481acca6 | ||
|
|
de62bd3ba5 | ||
|
|
ab2ea28ed9 | ||
|
|
96f29cefeb | ||
|
|
6f950ea45d | ||
|
|
99c45bd4d2 | ||
|
|
312030f275 | ||
|
|
bed9ccadbc | ||
|
|
d55499eba0 | ||
|
|
910b75c6cc | ||
|
|
6a11464d60 | ||
|
|
aa29f5d69c | ||
|
|
1ee10ee2d6 | ||
|
|
f23401d911 | ||
|
|
14d94df1b8 |
6
.github/workflows/cli.yml
vendored
6
.github/workflows/cli.yml
vendored
@@ -56,10 +56,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v3.1.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
uses: docker/setup-buildx-action@v3.4.0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.2.0
|
||||
uses: docker/build-push-action@v6.3.0
|
||||
with:
|
||||
file: cli/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
6
.github/workflows/docker.yml
vendored
6
.github/workflows/docker.yml
vendored
@@ -63,10 +63,10 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3.0.0
|
||||
uses: docker/setup-qemu-action@v3.1.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.3.0
|
||||
uses: docker/setup-buildx-action@v3.4.0
|
||||
|
||||
- name: Login to Docker Hub
|
||||
# Only push to Docker Hub when making a release
|
||||
@@ -115,7 +115,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v6.2.0
|
||||
uses: docker/build-push-action@v6.3.0
|
||||
with:
|
||||
context: ${{ matrix.context }}
|
||||
file: ${{ matrix.file }}
|
||||
|
||||
@@ -131,4 +131,4 @@ conduct enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
For answers to common questions about this code of conduct, see the
|
||||
FAQ at https://www.contributor-covenant.org/faq. Translations are
|
||||
available at https://www.contributor-covenant.org/translations.
|
||||
available at https://www.contributor-covenant.org/translations.
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
- [Roadmap](https://immich.app/roadmap)
|
||||
- [Demo](#demo)
|
||||
- [Features](#features)
|
||||
- [Translations](https://immich.app/docs/developer/tranlations)
|
||||
- [Translations](https://immich.app/docs/developer/translations)
|
||||
- [Contributing](https://immich.app/docs/overview/support-the-project)
|
||||
|
||||
## Demo
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15
|
||||
20.15.1
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:20.15.0-alpine3.20@sha256:df01469346db2bf1cfc1f7261aeab86b2960efa840fe2bd46d83ff339f463665 as core
|
||||
FROM node:20.15.1-alpine3.20@sha256:34b7aa411056c85dbf71d240d26516949b3f72b318d796c26b57caaa1df5639a as core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
|
||||
219
cli/package-lock.json
generated
219
cli/package-lock.json
generated
@@ -1,15 +1,16 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.5",
|
||||
"version": "2.2.8",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.5",
|
||||
"version": "2.2.8",
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
"fastq": "^1.17.1",
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"bin": {
|
||||
@@ -21,7 +22,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/node": "^20.14.10",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
@@ -34,11 +35,12 @@
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.0.12",
|
||||
"vite-tsconfig-paths": "^4.3.2",
|
||||
"vitest": "^1.2.2",
|
||||
"vitest-fetch-mock": "^0.2.2",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -47,14 +49,14 @@
|
||||
},
|
||||
"../open-api/typescript-sdk": {
|
||||
"name": "@immich/sdk",
|
||||
"version": "1.107.0",
|
||||
"version": "1.108.0",
|
||||
"dev": true,
|
||||
"license": "GNU Affero General Public License version 3",
|
||||
"dependencies": {
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/node": "^20.14.10",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
@@ -1161,9 +1163,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.14.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.9.tgz",
|
||||
"integrity": "sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==",
|
||||
"version": "20.14.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.10.tgz",
|
||||
"integrity": "sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -1177,17 +1179,17 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.14.1.tgz",
|
||||
"integrity": "sha512-aAJd6bIf2vvQRjUG3ZkNXkmBpN+J7Wd0mfQiiVCJMu9Z5GcZZdcc0j8XwN/BM97Fl7e3SkTXODSk4VehUv7CGw==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.15.0.tgz",
|
||||
"integrity": "sha512-uiNHpyjZtFrLwLDpHnzaDlP3Tt6sGMqTCiqmxaN4n4RP0EfYZDODJyddiFDF44Hjwxr5xAcaYxVKm9QKQFJFLA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "7.14.1",
|
||||
"@typescript-eslint/type-utils": "7.14.1",
|
||||
"@typescript-eslint/utils": "7.14.1",
|
||||
"@typescript-eslint/visitor-keys": "7.14.1",
|
||||
"@typescript-eslint/scope-manager": "7.15.0",
|
||||
"@typescript-eslint/type-utils": "7.15.0",
|
||||
"@typescript-eslint/utils": "7.15.0",
|
||||
"@typescript-eslint/visitor-keys": "7.15.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^5.3.1",
|
||||
"natural-compare": "^1.4.0",
|
||||
@@ -1211,16 +1213,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.14.1.tgz",
|
||||
"integrity": "sha512-8lKUOebNLcR0D7RvlcloOacTOWzOqemWEWkKSVpMZVF/XVcwjPR+3MD08QzbW9TCGJ+DwIc6zUSGZ9vd8cO1IA==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.15.0.tgz",
|
||||
"integrity": "sha512-k9fYuQNnypLFcqORNClRykkGOMOj+pV6V91R4GO/l1FDGwpqmSwoOQrOHo3cGaH63e+D3ZiCAOsuS/D2c99j/A==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "7.14.1",
|
||||
"@typescript-eslint/types": "7.14.1",
|
||||
"@typescript-eslint/typescript-estree": "7.14.1",
|
||||
"@typescript-eslint/visitor-keys": "7.14.1",
|
||||
"@typescript-eslint/scope-manager": "7.15.0",
|
||||
"@typescript-eslint/types": "7.15.0",
|
||||
"@typescript-eslint/typescript-estree": "7.15.0",
|
||||
"@typescript-eslint/visitor-keys": "7.15.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1240,14 +1242,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.14.1.tgz",
|
||||
"integrity": "sha512-gPrFSsoYcsffYXTOZ+hT7fyJr95rdVe4kGVX1ps/dJ+DfmlnjFN/GcMxXcVkeHDKqsq6uAcVaQaIi3cFffmAbA==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.15.0.tgz",
|
||||
"integrity": "sha512-Q/1yrF/XbxOTvttNVPihxh1b9fxamjEoz2Os/Pe38OHwxC24CyCqXxGTOdpb4lt6HYtqw9HetA/Rf6gDGaMPlw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.14.1",
|
||||
"@typescript-eslint/visitor-keys": "7.14.1"
|
||||
"@typescript-eslint/types": "7.15.0",
|
||||
"@typescript-eslint/visitor-keys": "7.15.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
@@ -1258,14 +1260,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.14.1.tgz",
|
||||
"integrity": "sha512-/MzmgNd3nnbDbOi3LfasXWWe292+iuo+umJ0bCCMCPc1jLO/z2BQmWUUUXvXLbrQey/JgzdF/OV+I5bzEGwJkQ==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.15.0.tgz",
|
||||
"integrity": "sha512-SkgriaeV6PDvpA6253PDVep0qCqgbO1IOBiycjnXsszNTVQe5flN5wR5jiczoEoDEnAqYFSFFc9al9BSGVltkg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/typescript-estree": "7.14.1",
|
||||
"@typescript-eslint/utils": "7.14.1",
|
||||
"@typescript-eslint/typescript-estree": "7.15.0",
|
||||
"@typescript-eslint/utils": "7.15.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^1.3.0"
|
||||
},
|
||||
@@ -1286,9 +1288,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.14.1.tgz",
|
||||
"integrity": "sha512-mL7zNEOQybo5R3AavY+Am7KLv8BorIv7HCYS5rKoNZKQD9tsfGUpO4KdAn3sSUvTiS4PQkr2+K0KJbxj8H9NDg==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.15.0.tgz",
|
||||
"integrity": "sha512-aV1+B1+ySXbQH0pLK0rx66I3IkiZNidYobyfn0WFsdGhSXw+P3YOqeTq5GED458SfB24tg+ux3S+9g118hjlTw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -1300,14 +1302,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.14.1.tgz",
|
||||
"integrity": "sha512-k5d0VuxViE2ulIO6FbxxSZaxqDVUyMbXcidC8rHvii0I56XZPv8cq+EhMns+d/EVIL41sMXqRbK3D10Oza1bbA==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.15.0.tgz",
|
||||
"integrity": "sha512-gjyB/rHAopL/XxfmYThQbXbzRMGhZzGw6KpcMbfe8Q3nNQKStpxnUKeXb0KiN/fFDR42Z43szs6rY7eHk0zdGQ==",
|
||||
"dev": true,
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.14.1",
|
||||
"@typescript-eslint/visitor-keys": "7.14.1",
|
||||
"@typescript-eslint/types": "7.15.0",
|
||||
"@typescript-eslint/visitor-keys": "7.15.0",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -1329,16 +1331,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.14.1.tgz",
|
||||
"integrity": "sha512-CMmVVELns3nak3cpJhZosDkm63n+DwBlDX8g0k4QUa9BMnF+lH2lr3d130M1Zt1xxmB3LLk3NV7KQCq86ZBBhQ==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.15.0.tgz",
|
||||
"integrity": "sha512-hfDMDqaqOqsUVGiEPSMLR/AjTSCsmJwjpKkYQRo1FNbmW4tBwBspYDwO9eh7sKSTwMQgBw9/T4DHudPaqshRWA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.4.0",
|
||||
"@typescript-eslint/scope-manager": "7.14.1",
|
||||
"@typescript-eslint/types": "7.14.1",
|
||||
"@typescript-eslint/typescript-estree": "7.14.1"
|
||||
"@typescript-eslint/scope-manager": "7.15.0",
|
||||
"@typescript-eslint/types": "7.15.0",
|
||||
"@typescript-eslint/typescript-estree": "7.15.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || >=20.0.0"
|
||||
@@ -1352,13 +1354,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "7.14.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.14.1.tgz",
|
||||
"integrity": "sha512-Crb+F75U1JAEtBeQGxSKwI60hZmmzaqA3z9sYsVm8X7W5cwLEm5bRe0/uXS6+MR/y8CVpKSR/ontIAIEPFcEkA==",
|
||||
"version": "7.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.15.0.tgz",
|
||||
"integrity": "sha512-Hqgy/ETgpt2L5xueA/zHHIl4fJI2O4XUE9l4+OIfbJIRSnTJb/QscncdqqZzofQegIJugRIF57OJea1khw2SDw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "7.14.1",
|
||||
"@typescript-eslint/types": "7.15.0",
|
||||
"eslint-visitor-keys": "^3.4.3"
|
||||
},
|
||||
"engines": {
|
||||
@@ -1853,6 +1855,15 @@
|
||||
"url": "https://opencollective.com/core-js"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-fetch": {
|
||||
"version": "3.1.8",
|
||||
"resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.8.tgz",
|
||||
"integrity": "sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"node-fetch": "^2.6.12"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
@@ -3146,6 +3157,26 @@
|
||||
"integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
|
||||
"integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "4.x || >=6.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"encoding": "^0.1.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"encoding": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/node-releases": {
|
||||
"version": "2.0.14",
|
||||
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz",
|
||||
@@ -3369,10 +3400,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
|
||||
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==",
|
||||
"dev": true
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
|
||||
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
@@ -3406,9 +3438,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.38",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
|
||||
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
|
||||
"version": "8.4.39",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
|
||||
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
@@ -3424,9 +3456,10 @@
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.7",
|
||||
"picocolors": "^1.0.0",
|
||||
"picocolors": "^1.0.1",
|
||||
"source-map-js": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -3471,21 +3504,22 @@
|
||||
}
|
||||
},
|
||||
"node_modules/prettier-plugin-organize-imports": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-3.2.4.tgz",
|
||||
"integrity": "sha512-6m8WBhIp0dfwu0SkgfOxJqh+HpdyfqSSLfKKRZSFbDuEQXDDndb8fTpRWkUrX/uBenkex3MgnVk0J3b3Y5byog==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-4.0.0.tgz",
|
||||
"integrity": "sha512-vnKSdgv9aOlqKeEFGhf9SCBsTyzDSyScy1k7E0R1Uo4L0cTcOV7c1XQaT7jfXIOc/p08WLBfN2QUQA9zDSZMxA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@volar/vue-language-plugin-pug": "^1.0.4",
|
||||
"@volar/vue-typescript": "^1.0.4",
|
||||
"@vue/language-plugin-pug": "^2.0.24",
|
||||
"prettier": ">=2.0",
|
||||
"typescript": ">=2.9"
|
||||
"typescript": ">=2.9",
|
||||
"vue-tsc": "^2.0.24"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@volar/vue-language-plugin-pug": {
|
||||
"@vue/language-plugin-pug": {
|
||||
"optional": true
|
||||
},
|
||||
"@volar/vue-typescript": {
|
||||
"vue-tsc": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
@@ -4168,6 +4202,12 @@
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tr46": {
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
|
||||
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ts-api-utils": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz",
|
||||
@@ -4240,9 +4280,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.5.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
|
||||
"integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
|
||||
"version": "5.5.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
|
||||
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
@@ -4315,14 +4355,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "5.3.1",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.3.1.tgz",
|
||||
"integrity": "sha512-XBmSKRLXLxiaPYamLv3/hnP/KXDai1NDexN0FpkTaZXTfycHvkRHoenpgl/fvuK/kPbB6xAgoyiryAhQNxYmAQ==",
|
||||
"version": "5.3.3",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-5.3.3.tgz",
|
||||
"integrity": "sha512-NPQdeCU0Dv2z5fu+ULotpuq5yfCS1BzKUIPhNbP3YBfAMGJXbt2nS+sbTFu+qchaqWTD+H3JK++nRwr6XIcp6A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.21.3",
|
||||
"postcss": "^8.4.38",
|
||||
"postcss": "^8.4.39",
|
||||
"rollup": "^4.13.0"
|
||||
},
|
||||
"bin": {
|
||||
@@ -4476,6 +4516,37 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/vitest-fetch-mock": {
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/vitest-fetch-mock/-/vitest-fetch-mock-0.2.2.tgz",
|
||||
"integrity": "sha512-XmH6QgTSjCWrqXoPREIdbj40T7i1xnGmAsTAgfckoO75W1IEHKR8hcPCQ7SO16RsdW1t85oUm6pcQRLeBgjVYQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"cross-fetch": "^3.0.6"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.14.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"vitest": ">=0.16.0"
|
||||
}
|
||||
},
|
||||
"node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/whatwg-url": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
|
||||
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"tr46": "~0.0.3",
|
||||
"webidl-conversions": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/which": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.5",
|
||||
"version": "2.2.8",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -18,7 +18,7 @@
|
||||
"@types/cli-progress": "^3.11.0",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/node": "^20.14.10",
|
||||
"@typescript-eslint/eslint-plugin": "^7.0.0",
|
||||
"@typescript-eslint/parser": "^7.0.0",
|
||||
"@vitest/coverage-v8": "^1.2.2",
|
||||
@@ -31,11 +31,12 @@
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
"vite": "^5.0.12",
|
||||
"vite-tsconfig-paths": "^4.3.2",
|
||||
"vitest": "^1.2.2",
|
||||
"vitest-fetch-mock": "^0.2.2",
|
||||
"yaml": "^2.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
@@ -59,9 +60,10 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"fast-glob": "^3.3.2",
|
||||
"fastq": "^1.17.1",
|
||||
"lodash-es": "^4.17.21"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.0"
|
||||
"node": "20.15.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
import { platform } from 'node:os';
|
||||
import { UploadOptionsDto, getAlbumName } from 'src/commands/asset';
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { describe, expect, it, vi } from 'vitest';
|
||||
|
||||
describe('Unit function tests', () => {
|
||||
import { Action, checkBulkUpload, defaults, Reason } from '@immich/sdk';
|
||||
import createFetchMock from 'vitest-fetch-mock';
|
||||
|
||||
import { checkForDuplicates, getAlbumName, uploadFiles, UploadOptionsDto } from './asset';
|
||||
|
||||
vi.mock('@immich/sdk');
|
||||
|
||||
describe('getAlbumName', () => {
|
||||
it('should return a non-undefined value', () => {
|
||||
if (platform() === 'win32') {
|
||||
if (os.platform() === 'win32') {
|
||||
// This is meaningless for Unix systems.
|
||||
expect(getAlbumName(String.raw`D:\test\Filename.txt`, {} as UploadOptionsDto)).toBe('test');
|
||||
}
|
||||
@@ -17,3 +25,177 @@ describe('Unit function tests', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('uploadFiles', () => {
|
||||
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
|
||||
const testFilePath = path.join(testDir, 'test.png');
|
||||
const testFileData = 'test';
|
||||
const baseUrl = 'http://example.com';
|
||||
const apiKey = 'key';
|
||||
const retry = 3;
|
||||
|
||||
const fetchMocker = createFetchMock(vi);
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a test file
|
||||
fs.writeFileSync(testFilePath, testFileData);
|
||||
|
||||
// Defaults
|
||||
vi.mocked(defaults).baseUrl = baseUrl;
|
||||
vi.mocked(defaults).headers = { 'x-api-key': apiKey };
|
||||
|
||||
fetchMocker.enableMocks();
|
||||
fetchMocker.resetMocks();
|
||||
});
|
||||
|
||||
it('returns new assets when upload file is successful', async () => {
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
return {
|
||||
status: 200,
|
||||
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
|
||||
};
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns new assets when upload file retry is successful', async () => {
|
||||
let counter = 0;
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
counter++;
|
||||
if (counter < retry) {
|
||||
throw new Error('Network error');
|
||||
}
|
||||
|
||||
return {
|
||||
status: 200,
|
||||
body: JSON.stringify({ id: 'fc5621b1-86f6-44a1-9905-403e607df9f5', status: 'created' }),
|
||||
};
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns new assets when upload file retry is failed', async () => {
|
||||
fetchMocker.doMockIf(new RegExp(`${baseUrl}/assets$`), () => {
|
||||
throw new Error('Network error');
|
||||
});
|
||||
|
||||
await expect(uploadFiles([testFilePath], { concurrency: 1 })).resolves.toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkForDuplicates', () => {
|
||||
const testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'test-'));
|
||||
const testFilePath = path.join(testDir, 'test.png');
|
||||
const testFileData = 'test';
|
||||
const testFileChecksum = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3'; // SHA1
|
||||
const retry = 3;
|
||||
|
||||
beforeEach(() => {
|
||||
// Create a test file
|
||||
fs.writeFileSync(testFilePath, testFileData);
|
||||
});
|
||||
|
||||
it('checks duplicates', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await checkForDuplicates([testFilePath], { concurrency: 1 });
|
||||
|
||||
expect(checkBulkUpload).toHaveBeenCalledWith({
|
||||
assetBulkUploadCheckDto: {
|
||||
assets: [
|
||||
{
|
||||
checksum: testFileChecksum,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('returns duplicates when check duplicates is rejected', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Reject,
|
||||
id: testFilePath,
|
||||
assetId: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
reason: Reason.Duplicate,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [
|
||||
{
|
||||
filepath: testFilePath,
|
||||
id: 'fc5621b1-86f6-44a1-9905-403e607df9f5',
|
||||
},
|
||||
],
|
||||
newFiles: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns new assets when check duplicates is accepted', async () => {
|
||||
vi.mocked(checkBulkUpload).mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [testFilePath],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns results when check duplicates retry is successful', async () => {
|
||||
let mocked = vi.mocked(checkBulkUpload);
|
||||
for (let i = 1; i < retry; i++) {
|
||||
mocked = mocked.mockRejectedValueOnce(new Error('Network error'));
|
||||
}
|
||||
mocked.mockResolvedValue({
|
||||
results: [
|
||||
{
|
||||
action: Action.Accept,
|
||||
id: testFilePath,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [testFilePath],
|
||||
});
|
||||
});
|
||||
|
||||
it('returns results when check duplicates retry is failed', async () => {
|
||||
vi.mocked(checkBulkUpload).mockRejectedValue(new Error('Network error'));
|
||||
|
||||
await expect(checkForDuplicates([testFilePath], { concurrency: 1 })).resolves.toEqual({
|
||||
duplicates: [],
|
||||
newFiles: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -16,6 +16,7 @@ import { chunk } from 'lodash-es';
|
||||
import { Stats, createReadStream } from 'node:fs';
|
||||
import { stat, unlink } from 'node:fs/promises';
|
||||
import path, { basename } from 'node:path';
|
||||
import { Queue } from 'src/queue';
|
||||
import { BaseOptions, authenticate, crawl, sha1 } from 'src/utils';
|
||||
|
||||
const s = (count: number) => (count === 1 ? '' : 's');
|
||||
@@ -83,7 +84,7 @@ const scan = async (pathsToCrawl: string[], options: UploadOptionsDto) => {
|
||||
return files;
|
||||
};
|
||||
|
||||
const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
export const checkForDuplicates = async (files: string[], { concurrency, skipHash }: UploadOptionsDto) => {
|
||||
if (skipHash) {
|
||||
console.log('Skipping hash check, assuming all files are new');
|
||||
return { newFiles: files, duplicates: [] };
|
||||
@@ -99,32 +100,50 @@ const checkForDuplicates = async (files: string[], { concurrency, skipHash }: Up
|
||||
const newFiles: string[] = [];
|
||||
const duplicates: Asset[] = [];
|
||||
|
||||
try {
|
||||
// TODO refactor into a queue
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
const dto = await Promise.all(items.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })));
|
||||
const { results } = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||
|
||||
for (const { id: filepath, assetId, action } of results as AssetBulkUploadCheckResults) {
|
||||
const queue = new Queue<string[], AssetBulkUploadCheckResults>(
|
||||
async (filepaths: string[]) => {
|
||||
const dto = await Promise.all(
|
||||
filepaths.map(async (filepath) => ({ id: filepath, checksum: await sha1(filepath) })),
|
||||
);
|
||||
const response = await checkBulkUpload({ assetBulkUploadCheckDto: { assets: dto } });
|
||||
const results = response.results as AssetBulkUploadCheckResults;
|
||||
for (const { id: filepath, assetId, action } of results) {
|
||||
if (action === Action.Accept) {
|
||||
newFiles.push(filepath);
|
||||
} else {
|
||||
// rejects are always duplicates
|
||||
duplicates.push({ id: assetId as string, filepath });
|
||||
}
|
||||
progressBar.increment();
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
progressBar.stop();
|
||||
progressBar.increment(filepaths.length);
|
||||
return results;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
await queue.push(items);
|
||||
}
|
||||
|
||||
await queue.drained();
|
||||
|
||||
progressBar.stop();
|
||||
|
||||
console.log(`Found ${newFiles.length} new files and ${duplicates.length} duplicate${s(duplicates.length)}`);
|
||||
|
||||
// Report failures
|
||||
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
|
||||
if (failedTasks.length > 0) {
|
||||
console.log(`Failed to verify ${failedTasks.length} file${s(failedTasks.length)}:`);
|
||||
for (const task of failedTasks) {
|
||||
console.log(`- ${task.data} - ${task.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return { newFiles, duplicates };
|
||||
};
|
||||
|
||||
const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
export const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptionsDto): Promise<Asset[]> => {
|
||||
if (files.length === 0) {
|
||||
console.log('All assets were already uploaded, nothing to do.');
|
||||
return [];
|
||||
@@ -158,37 +177,52 @@ const uploadFiles = async (files: string[], { dryRun, concurrency }: UploadOptio
|
||||
|
||||
const newAssets: Asset[] = [];
|
||||
|
||||
try {
|
||||
for (const items of chunk(files, concurrency)) {
|
||||
await Promise.all(
|
||||
items.map(async (filepath) => {
|
||||
const stats = statsMap.get(filepath) as Stats;
|
||||
const response = await uploadFile(filepath, stats);
|
||||
const queue = new Queue<string, AssetMediaResponseDto>(
|
||||
async (filepath: string) => {
|
||||
const stats = statsMap.get(filepath);
|
||||
if (!stats) {
|
||||
throw new Error(`Stats not found for ${filepath}`);
|
||||
}
|
||||
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
const response = await uploadFile(filepath, stats);
|
||||
newAssets.push({ id: response.id, filepath });
|
||||
if (response.status === AssetMediaStatus.Duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
|
||||
if (response.status === AssetMediaStatus.Duplicate) {
|
||||
duplicateCount++;
|
||||
duplicateSize += stats.size ?? 0;
|
||||
} else {
|
||||
successCount++;
|
||||
successSize += stats.size ?? 0;
|
||||
}
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
|
||||
uploadProgress.update(successSize, { value_formatted: byteSize(successSize + duplicateSize) });
|
||||
return response;
|
||||
},
|
||||
{ concurrency, retry: 3 },
|
||||
);
|
||||
|
||||
return response;
|
||||
}),
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
uploadProgress.stop();
|
||||
for (const filepath of files) {
|
||||
await queue.push(filepath);
|
||||
}
|
||||
|
||||
await queue.drained();
|
||||
|
||||
uploadProgress.stop();
|
||||
|
||||
console.log(`Successfully uploaded ${successCount} new asset${s(successCount)} (${byteSize(successSize)})`);
|
||||
if (duplicateCount > 0) {
|
||||
console.log(`Skipped ${duplicateCount} duplicate asset${s(duplicateCount)} (${byteSize(duplicateSize)})`);
|
||||
}
|
||||
|
||||
// Report failures
|
||||
const failedTasks = queue.tasks.filter((task) => task.status === 'failed');
|
||||
if (failedTasks.length > 0) {
|
||||
console.log(`Failed to upload ${failedTasks.length} asset${s(failedTasks.length)}:`);
|
||||
for (const task of failedTasks) {
|
||||
console.log(`- ${task.data} - ${task.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
return newAssets;
|
||||
};
|
||||
|
||||
|
||||
131
cli/src/queue.ts
Normal file
131
cli/src/queue.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import * as fastq from 'fastq';
|
||||
import { uniqueId } from 'lodash-es';
|
||||
|
||||
export type Task<T, R> = {
|
||||
readonly id: string;
|
||||
status: 'idle' | 'processing' | 'succeeded' | 'failed';
|
||||
data: T;
|
||||
error: unknown | undefined;
|
||||
count: number;
|
||||
// TODO: Could be useful to adding progress property.
|
||||
// TODO: Could be useful to adding start_at/end_at/duration properties.
|
||||
result: undefined | R;
|
||||
};
|
||||
|
||||
export type QueueOptions = {
|
||||
verbose?: boolean;
|
||||
concurrency?: number;
|
||||
retry?: number;
|
||||
// TODO: Could be useful to adding timeout property for retry.
|
||||
};
|
||||
|
||||
export type ComputedQueueOptions = Required<QueueOptions>;
|
||||
|
||||
export const defaultQueueOptions = {
|
||||
concurrency: 1,
|
||||
retry: 0,
|
||||
verbose: false,
|
||||
};
|
||||
|
||||
/**
|
||||
* An in-memory queue that processes tasks in parallel with a given concurrency.
|
||||
* @see {@link https://www.npmjs.com/package/fastq}
|
||||
* @template T - The type of the worker task data.
|
||||
* @template R - The type of the worker output data.
|
||||
*/
|
||||
export class Queue<T, R> {
|
||||
private readonly queue: fastq.queueAsPromised<string, Task<T, R>>;
|
||||
private readonly store = new Map<string, Task<T, R>>();
|
||||
readonly options: ComputedQueueOptions;
|
||||
readonly worker: (data: T) => Promise<R>;
|
||||
|
||||
/**
|
||||
* Create a new queue.
|
||||
* @param worker - The worker function that processes the task.
|
||||
* @param options - The queue options.
|
||||
*/
|
||||
constructor(worker: (data: T) => Promise<R>, options?: QueueOptions) {
|
||||
this.options = { ...defaultQueueOptions, ...options };
|
||||
this.worker = worker;
|
||||
this.store = new Map<string, Task<T, R>>();
|
||||
this.queue = this.buildQueue();
|
||||
}
|
||||
|
||||
get tasks(): Task<T, R>[] {
|
||||
const tasks: Task<T, R>[] = [];
|
||||
for (const task of this.store.values()) {
|
||||
tasks.push(task);
|
||||
}
|
||||
return tasks;
|
||||
}
|
||||
|
||||
getTask(id: string): Task<T, R> {
|
||||
const task = this.store.get(id);
|
||||
if (!task) {
|
||||
throw new Error(`Task with id ${id} not found`);
|
||||
}
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for the queue to be empty.
|
||||
* @returns Promise<void> - The returned Promise will be resolved when all tasks in the queue have been processed by a worker.
|
||||
* This promise could be ignored as it will not lead to a `unhandledRejection`.
|
||||
*/
|
||||
async drained(): Promise<void> {
|
||||
await this.queue.drain();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a task at the end of the queue.
|
||||
* @see {@link https://www.npmjs.com/package/fastq}
|
||||
* @param data
|
||||
* @returns Promise<void> - A Promise that will be fulfilled (rejected) when the task is completed successfully (unsuccessfully).
|
||||
* This promise could be ignored as it will not lead to a `unhandledRejection`.
|
||||
*/
|
||||
async push(data: T): Promise<Task<T, R>> {
|
||||
const id = uniqueId();
|
||||
const task: Task<T, R> = { id, status: 'idle', error: undefined, count: 0, data, result: undefined };
|
||||
this.store.set(id, task);
|
||||
return this.queue.push(id);
|
||||
}
|
||||
|
||||
// TODO: Support more function delegation to fastq.
|
||||
|
||||
private buildQueue(): fastq.queueAsPromised<string, Task<T, R>> {
|
||||
return fastq.promise((id: string) => {
|
||||
const task = this.getTask(id);
|
||||
return this.work(task);
|
||||
}, this.options.concurrency);
|
||||
}
|
||||
|
||||
private async work(task: Task<T, R>): Promise<Task<T, R>> {
|
||||
task.count += 1;
|
||||
task.error = undefined;
|
||||
task.status = 'processing';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] processing:', task);
|
||||
}
|
||||
try {
|
||||
task.result = await this.worker(task.data);
|
||||
task.status = 'succeeded';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] succeeded:', task);
|
||||
}
|
||||
return task;
|
||||
} catch (error) {
|
||||
task.error = error;
|
||||
task.status = 'failed';
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] failed:', task);
|
||||
}
|
||||
if (this.options.retry > 0 && task.count < this.options.retry) {
|
||||
if (this.options.verbose) {
|
||||
console.log('[task] retry:', task);
|
||||
}
|
||||
return this.work(task);
|
||||
}
|
||||
return task;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -73,7 +73,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:075b1ba2c4ebb04bc3a6ab86c06ec8d8099f8fda1c96ef6d104d9bb1def1d8bc
|
||||
image: prom/prometheus@sha256:f20d3127bf2876f4a1df76246fca576b41ddf1125ed1c546fbd8b16ea55117e6
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15
|
||||
20.15.1
|
||||
|
||||
@@ -4,7 +4,7 @@ This page gives a few pointers on how to access your Immich instance from outsid
|
||||
You can read the [full discussion in Discord](https://discord.com/channels/979116623879368755/1122615710846308484)
|
||||
|
||||
:::danger
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you succeptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
Never forward port 2283 directly to the internet without additional configuration. This will expose the web interface via http to the internet, making you susceptible to [man in the middle](https://en.wikipedia.org/wiki/Man-in-the-middle_attack) attacks.
|
||||
:::
|
||||
|
||||
## Option 1: VPN to home network
|
||||
|
||||
@@ -45,8 +45,6 @@ Regardless of filesystem, it is not recommended to use a network share for your
|
||||
| `IMMICH_LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload`<sup>\*1</sup> | server | api, microservices |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
|
||||
| `IMMICH_WEB_ROOT` | Path of root index.html | `/usr/src/app/www` | server | api |
|
||||
| `IMMICH_REVERSE_GEOCODING_ROOT` | Path of reverse geocoding dump directory | `/usr/src/resources` | server | microservices |
|
||||
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
|
||||
| `CPU_CORES` | Amount of cores available to the immich server | auto-detected cpu core count | server | |
|
||||
| `IMMICH_API_METRICS_PORT` | Port for the OTEL metrics | `8081` | server | api |
|
||||
|
||||
@@ -92,6 +92,7 @@ const config = {
|
||||
alt: 'Immich Logo',
|
||||
src: 'img/immich-logo-inline-light.png',
|
||||
srcDark: 'img/immich-logo-inline-dark.png',
|
||||
className: 'rounded-none',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
|
||||
22
docs/package-lock.json
generated
22
docs/package-lock.json
generated
@@ -12640,9 +12640,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
|
||||
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ=="
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz",
|
||||
"integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/picomatch": {
|
||||
"version": "2.3.1",
|
||||
@@ -12753,9 +12754,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/postcss": {
|
||||
"version": "8.4.38",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz",
|
||||
"integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==",
|
||||
"version": "8.4.39",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.39.tgz",
|
||||
"integrity": "sha512-0vzE+lAiG7hZl1/9I8yzKLx3aR9Xbof3fBHKunvMfOCYAtMhrsnccJY2iTURb9EZd5+pLuiNV9/c/GZJOHsgIw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
@@ -12770,9 +12771,10 @@
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.7",
|
||||
"picocolors": "^1.0.0",
|
||||
"picocolors": "^1.0.1",
|
||||
"source-map-js": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -16374,9 +16376,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.5.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.2.tgz",
|
||||
"integrity": "sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==",
|
||||
"version": "5.5.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
|
||||
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
|
||||
@@ -56,6 +56,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.0"
|
||||
"node": "20.15.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ export function Timeline({ items }: Props): JSX.Element {
|
||||
<div className="flex flex-col flex-grow justify-between gap-2">
|
||||
<div className="flex gap-2 items-center">
|
||||
{cardIcon === 'immich' ? (
|
||||
<img src="img/immich-logo.svg" height="30" />
|
||||
<img src="img/immich-logo.svg" height="30" className="rounded-none" />
|
||||
) : (
|
||||
<Icon path={cardIcon} size={1} color={item.iconColor} />
|
||||
)}
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
@tailwind utilities;
|
||||
|
||||
@import url('https://fonts.googleapis.com/css2?family=Overpass:ital,wght@0,300;0,400;0,500;0,600;0,700;1,300;1,400;1,500;1,600;1,700&display=swap');
|
||||
@import url('https://fonts.googleapis.com/css2?family=Snowburst+One&display=swap');
|
||||
|
||||
html,
|
||||
button {
|
||||
@@ -48,7 +47,3 @@ img {
|
||||
div[class^='announcementBar_'] {
|
||||
min-height: 2rem;
|
||||
}
|
||||
|
||||
.navbar__brand .navbar__title {
|
||||
@apply font-immich-title text-2xl font-normal text-immich-primary dark:text-immich-dark-primary;
|
||||
}
|
||||
|
||||
77
docs/src/pages/cursed-knowledge.tsx
Normal file
77
docs/src/pages/cursed-knowledge.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import { mdiCalendarToday, mdiLeadPencil, mdiLockOutline, mdiSpeedometerSlow, mdiWeb } from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
import { Item as TimelineItem, Timeline } from '../components/timeline';
|
||||
|
||||
const withLanguage = (date: Date) => (language: string) => date.toLocaleDateString(language);
|
||||
|
||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||
|
||||
const items: Item[] = [
|
||||
{
|
||||
icon: mdiLeadPencil,
|
||||
iconColor: 'gold',
|
||||
title: 'PostgreSQL NOTIFY is cursed',
|
||||
description:
|
||||
'PostgreSQL does everything in a transaction, including NOTIFY. This means using the socket.io postgres-adapter writes to WAL every 5 seconds.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/10801', text: '#10801' },
|
||||
date: new Date(2024, 6, 3),
|
||||
},
|
||||
{
|
||||
icon: mdiWeb,
|
||||
iconColor: 'lightskyblue',
|
||||
title: 'npm scripts are cursed',
|
||||
description:
|
||||
'npm scripts make a http call to the npm registry each time they run, which means they are a terrible way to execute a health check.',
|
||||
link: { url: 'https://github.com/immich-app/immich/issues/10796', text: '#10796' },
|
||||
date: new Date(2024, 6, 3),
|
||||
},
|
||||
{
|
||||
icon: mdiSpeedometerSlow,
|
||||
iconColor: 'brown',
|
||||
title: '50 extra packages are cursed',
|
||||
description:
|
||||
'There is a user in the JavaScript community who goes around adding "backwards compatibility" to projects. They do this by adding 50 extra package dependencies to your project, which are maintained by them.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/10690', text: '#10690' },
|
||||
date: new Date(2024, 5, 28),
|
||||
},
|
||||
{
|
||||
icon: mdiLockOutline,
|
||||
iconColor: 'gold',
|
||||
title: 'Long passwords are cursed',
|
||||
description:
|
||||
'The bcrypt implementation only uses the first 72 bytes of a string. Any characters after that are ignored.',
|
||||
// link: GHSA-4p64-9f7h-3432
|
||||
date: new Date(2024, 5, 25),
|
||||
},
|
||||
{
|
||||
icon: mdiCalendarToday,
|
||||
iconColor: 'greenyellow',
|
||||
title: 'JavaScript Date objects are cursed',
|
||||
description: 'JavaScript date objects are 1 indexed for years and days, but 0 indexed for months.',
|
||||
link: { url: 'https://github.com/immich-app/immich/pull/6787', text: '#6787' },
|
||||
date: new Date(2024, 0, 31),
|
||||
},
|
||||
];
|
||||
|
||||
export default function CursedKnowledgePage(): JSX.Element {
|
||||
return (
|
||||
<Layout title="Cursed Knowledge" description="Things we wish we didn't know">
|
||||
<section className="my-8">
|
||||
<h1 className="md:text-6xl text-center mb-10 text-immich-primary dark:text-immich-dark-primary px-2">
|
||||
Cursed Knowledge
|
||||
</h1>
|
||||
<p className="text-center text-xl px-2">
|
||||
Cursed knowledge we have learned as a result of building Immich that we wish we never knew.
|
||||
</p>
|
||||
<div className="flex justify-around mt-8 w-full max-w-full">
|
||||
<Timeline
|
||||
items={items
|
||||
.sort((a, b) => b.date.getTime() - a.date.getTime())
|
||||
.map((item) => ({ ...item, getDateLabel: withLanguage(item.date) }))}
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
</Layout>
|
||||
);
|
||||
}
|
||||
@@ -10,7 +10,7 @@ function HomepageHeader() {
|
||||
<section className="text-center m-6 p-12 border border-red-400 rounded-[50px] bg-slate-200 dark:bg-immich-dark-gray">
|
||||
<img
|
||||
src={isDarkTheme ? 'img/immich-logo-stacked-dark.svg' : 'img/immich-logo-stacked-light.svg'}
|
||||
className="md:h-60 h-44 mb-2 antialiased"
|
||||
className="md:h-60 h-44 mb-2 antialiased rounded-none"
|
||||
alt="Immich logo"
|
||||
/>
|
||||
<div className="sm:text-2xl text-lg md:text-4xl mb-12 sm:leading-tight">
|
||||
|
||||
12
docs/static/archived-versions.json
vendored
12
docs/static/archived-versions.json
vendored
@@ -1,4 +1,16 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.108.0",
|
||||
"url": "https://v1.108.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.2",
|
||||
"url": "https://v1.107.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.1",
|
||||
"url": "https://v1.107.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.107.0",
|
||||
"url": "https://v1.107.0.archive.immich.app"
|
||||
|
||||
@@ -21,9 +21,6 @@ module.exports = {
|
||||
'immich-dark-fg': '#e5e7eb',
|
||||
'immich-dark-gray': '#212121',
|
||||
},
|
||||
fontFamily: {
|
||||
'immich-title': ['Snowburst One', 'cursive'],
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
|
||||
@@ -1 +1 @@
|
||||
20.15
|
||||
20.15.1
|
||||
|
||||
@@ -26,6 +26,8 @@ services:
|
||||
volumes:
|
||||
- upload:/usr/src/app/upload
|
||||
- ./test-assets:/test-assets
|
||||
extra_hosts:
|
||||
- 'auth-server:host-gateway'
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
|
||||
1178
e2e/package-lock.json
generated
1178
e2e/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.107.0",
|
||||
"version": "1.108.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -23,7 +23,8 @@
|
||||
"@immich/sdk": "file:../open-api/typescript-sdk",
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^20.14.9",
|
||||
"@types/node": "^20.14.10",
|
||||
"@types/oidc-provider": "^8.5.1",
|
||||
"@types/pg": "^8.11.0",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
"@types/supertest": "^6.0.2",
|
||||
@@ -35,11 +36,13 @@
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^54.0.0",
|
||||
"exiftool-vendored": "^27.0.0",
|
||||
"jose": "^5.6.3",
|
||||
"luxon": "^3.4.4",
|
||||
"oidc-provider": "^8.5.1",
|
||||
"pg": "^8.11.3",
|
||||
"pngjs": "^7.0.0",
|
||||
"prettier": "^3.2.5",
|
||||
"prettier-plugin-organize-imports": "^3.2.4",
|
||||
"prettier-plugin-organize-imports": "^4.0.0",
|
||||
"socket.io-client": "^4.7.4",
|
||||
"supertest": "^7.0.0",
|
||||
"typescript": "^5.3.3",
|
||||
@@ -47,6 +50,6 @@
|
||||
"vitest": "^1.3.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "20.15.0"
|
||||
"node": "20.15.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -100,6 +100,12 @@ describe('/auth/*', () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest());
|
||||
});
|
||||
|
||||
it('should reject an invalid email', async () => {
|
||||
const { status, body } = await request(app).post('/auth/login').send({ email: [], password });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.invalidEmail);
|
||||
});
|
||||
}
|
||||
|
||||
it('should accept a correct password', async () => {
|
||||
|
||||
@@ -1,12 +1,85 @@
|
||||
import {
|
||||
LoginResponseDto,
|
||||
SystemConfigOAuthDto,
|
||||
getConfigDefaults,
|
||||
getMyUser,
|
||||
startOAuth,
|
||||
updateConfig,
|
||||
} from '@immich/sdk';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, utils } from 'src/utils';
|
||||
import { OAuthClient, OAuthUser } from 'src/setup/auth-server';
|
||||
import { app, asBearerAuth, baseUrl, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
const authServer = {
|
||||
internal: 'http://auth-server:3000',
|
||||
external: 'http://127.0.0.1:3000',
|
||||
};
|
||||
|
||||
const redirect = async (url: string, cookies?: string[]) => {
|
||||
const { headers } = await request(url)
|
||||
.get('/')
|
||||
.set('Cookie', cookies || []);
|
||||
return { cookies: (headers['set-cookie'] as unknown as string[]) || [], location: headers.location };
|
||||
};
|
||||
|
||||
const loginWithOAuth = async (sub: OAuthUser | string) => {
|
||||
const { url } = await startOAuth({ oAuthConfigDto: { redirectUri: `${baseUrl}/auth/login` } });
|
||||
|
||||
// login
|
||||
const response1 = await redirect(url.replace(authServer.internal, authServer.external));
|
||||
const response2 = await request(authServer.external + response1.location)
|
||||
.post('/')
|
||||
.set('Cookie', response1.cookies)
|
||||
.type('form')
|
||||
.send({ prompt: 'login', login: sub, password: 'password' });
|
||||
|
||||
// approve
|
||||
const response3 = await redirect(response2.header.location, response1.cookies);
|
||||
const response4 = await request(authServer.external + response3.location)
|
||||
.post('/')
|
||||
.type('form')
|
||||
.set('Cookie', response3.cookies)
|
||||
.send({ prompt: 'consent' });
|
||||
|
||||
const response5 = await redirect(response4.header.location, response3.cookies.slice(1));
|
||||
const redirectUrl = response5.location;
|
||||
|
||||
expect(redirectUrl).toBeDefined();
|
||||
const params = new URL(redirectUrl).searchParams;
|
||||
expect(params.get('code')).toBeDefined();
|
||||
expect(params.get('state')).toBeDefined();
|
||||
|
||||
return redirectUrl;
|
||||
};
|
||||
|
||||
const setupOAuth = async (token: string, dto: Partial<SystemConfigOAuthDto>) => {
|
||||
const options = { headers: asBearerAuth(token) };
|
||||
const defaults = await getConfigDefaults(options);
|
||||
const merged = {
|
||||
...defaults.oauth,
|
||||
buttonText: 'Login with Immich',
|
||||
issuerUrl: `${authServer.internal}/.well-known/openid-configuration`,
|
||||
...dto,
|
||||
};
|
||||
await updateConfig({ systemConfigDto: { ...defaults, oauth: merged } }, options);
|
||||
};
|
||||
|
||||
describe(`/oauth`, () => {
|
||||
let admin: LoginResponseDto;
|
||||
|
||||
beforeAll(async () => {
|
||||
await utils.resetDatabase();
|
||||
await utils.adminSetup();
|
||||
admin = await utils.adminSetup();
|
||||
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
storageLabelClaim: 'immich_username',
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /oauth/authorize', () => {
|
||||
@@ -15,5 +88,171 @@ describe(`/oauth`, () => {
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['redirectUri must be a string', 'redirectUri should not be empty']));
|
||||
});
|
||||
|
||||
it('should return a redirect uri', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post('/oauth/authorize')
|
||||
.send({ redirectUri: 'http://127.0.0.1:2283/auth/login' });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toEqual({ url: expect.stringContaining(`${authServer.internal}/auth?`) });
|
||||
|
||||
const params = new URL(body.url).searchParams;
|
||||
expect(params.get('client_id')).toBe('client-default');
|
||||
expect(params.get('response_type')).toBe('code');
|
||||
expect(params.get('redirect_uri')).toBe('http://127.0.0.1:2283/auth/login');
|
||||
expect(params.get('state')).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /oauth/callback', () => {
|
||||
it(`should throw an error if a url is not provided`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({});
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url must be a string', 'url should not be empty']));
|
||||
});
|
||||
|
||||
it(`should throw an error if the url is empty`, async () => {
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url: '' });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest(['url should not be empty']));
|
||||
});
|
||||
|
||||
it('should auto register the user by default', async () => {
|
||||
const url = await loginWithOAuth('oauth-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-auto-register@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle a user without an email', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.NO_EMAIL);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('OAuth profile does not have an email address'));
|
||||
});
|
||||
|
||||
it('should set the quota from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_QUOTA);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-quota@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.quotaSizeInBytes).toBe(25 * 2 ** 30); // 25 GiB;
|
||||
});
|
||||
|
||||
it('should set the storage label from a claim', async () => {
|
||||
const url = await loginWithOAuth(OAuthUser.WITH_USERNAME);
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-with-username@immich.app',
|
||||
});
|
||||
|
||||
const user = await getMyUser({ headers: asBearerAuth(body.accessToken) });
|
||||
expect(user.storageLabel).toBe('user-username');
|
||||
});
|
||||
|
||||
it('should work with RS256 signed tokens', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_TOKENS,
|
||||
clientSecret: OAuthClient.RS256_TOKENS,
|
||||
autoRegister: true,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-RS256-token');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: false,
|
||||
name: 'OAuth User',
|
||||
userEmail: 'oauth-RS256-token@immich.app',
|
||||
userId: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with RS256 signed user profiles', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.RS256_PROFILE,
|
||||
clientSecret: OAuthClient.RS256_PROFILE,
|
||||
buttonText: 'Login with Immich',
|
||||
profileSigningAlgorithm: 'RS256',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-profile');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId: expect.any(String),
|
||||
userEmail: 'oauth-signed-profile@immich.app',
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw an error for an invalid token algorithm', async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
buttonText: 'Login with Immich',
|
||||
signingAlgorithm: 'something-that-does-not-work',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-signed-bad');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(500);
|
||||
expect(body).toMatchObject({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Failed to finish oauth',
|
||||
statusCode: 500,
|
||||
});
|
||||
});
|
||||
|
||||
describe('autoRegister: false', () => {
|
||||
beforeAll(async () => {
|
||||
await setupOAuth(admin.accessToken, {
|
||||
enabled: true,
|
||||
clientId: OAuthClient.DEFAULT,
|
||||
clientSecret: OAuthClient.DEFAULT,
|
||||
autoRegister: false,
|
||||
buttonText: 'Login with Immich',
|
||||
});
|
||||
});
|
||||
|
||||
it('should not auto register the user', async () => {
|
||||
const url = await loginWithOAuth('oauth-no-auto-register');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('User does not exist and auto registering is disabled.'));
|
||||
});
|
||||
|
||||
it('should link to an existing user by email', async () => {
|
||||
const { userId } = await utils.userSetup(admin.accessToken, {
|
||||
name: 'OAuth User 3',
|
||||
email: 'oauth-user3@immich.app',
|
||||
password: 'password',
|
||||
});
|
||||
const url = await loginWithOAuth('oauth-user3');
|
||||
const { status, body } = await request(app).post('/oauth/callback').send({ url });
|
||||
expect(status).toBe(201);
|
||||
expect(body).toMatchObject({
|
||||
userId,
|
||||
userEmail: 'oauth-user3@immich.app',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -230,4 +230,21 @@ describe('/people', () => {
|
||||
expect(body).toMatchObject({ birthDate: null });
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /people/:id/merge', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).post(`/people/${uuidDto.notFound}/merge`);
|
||||
expect(status).toBe(401);
|
||||
expect(body).toEqual(errorDto.unauthorized);
|
||||
});
|
||||
|
||||
it('should not supporting merging a person into themselves', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.post(`/people/${visiblePerson.id}/merge`)
|
||||
.set('Authorization', `Bearer ${admin.accessToken}`)
|
||||
.send({ ids: [visiblePerson.id] });
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(errorDto.badRequest('Cannot merge a person into themselves'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,11 +9,30 @@ describe(`immich-admin`, () => {
|
||||
|
||||
describe('list-users', () => {
|
||||
it('should list the admin user', async () => {
|
||||
const { stdout, stderr, exitCode } = await immichAdmin(['list-users']);
|
||||
const { stdout, stderr, exitCode } = await immichAdmin(['list-users']).promise;
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout).toContain("email: 'admin@immich.cloud'");
|
||||
expect(stdout).toContain("name: 'Immich Admin'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('reset-admin-password', () => {
|
||||
it('should reset admin password', async () => {
|
||||
const { child, promise } = immichAdmin(['reset-admin-password']);
|
||||
|
||||
let data = '';
|
||||
child.stdout.on('data', (chunk) => {
|
||||
data += chunk;
|
||||
if (data.includes('Please choose a new password (optional)')) {
|
||||
child.stdin.end('\n');
|
||||
}
|
||||
});
|
||||
|
||||
const { stderr, stdout, exitCode } = await promise;
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stderr).toBe('');
|
||||
expect(stdout).toContain('The admin password has been updated to:');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -61,6 +61,12 @@ export const errorDto = {
|
||||
message: 'The server already has an admin',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidEmail: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: ['email must be an email'],
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
};
|
||||
|
||||
export const signupResponseDto = {
|
||||
|
||||
117
e2e/src/setup/auth-server.ts
Normal file
117
e2e/src/setup/auth-server.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { exportJWK, generateKeyPair } from 'jose';
|
||||
import Provider from 'oidc-provider';
|
||||
|
||||
export enum OAuthClient {
|
||||
DEFAULT = 'client-default',
|
||||
RS256_TOKENS = 'client-RS256-tokens',
|
||||
RS256_PROFILE = 'client-RS256-profile',
|
||||
}
|
||||
|
||||
export enum OAuthUser {
|
||||
NO_EMAIL = 'no-email',
|
||||
NO_NAME = 'no-name',
|
||||
WITH_QUOTA = 'with-quota',
|
||||
WITH_USERNAME = 'with-username',
|
||||
}
|
||||
|
||||
const claims = [
|
||||
{ sub: OAuthUser.NO_EMAIL },
|
||||
{
|
||||
sub: OAuthUser.NO_NAME,
|
||||
email: 'oauth-no-name@immich.app',
|
||||
email_verified: true,
|
||||
},
|
||||
{
|
||||
sub: OAuthUser.WITH_USERNAME,
|
||||
email: 'oauth-with-username@immich.app',
|
||||
email_verified: true,
|
||||
immich_username: 'user-username',
|
||||
},
|
||||
{
|
||||
sub: OAuthUser.WITH_QUOTA,
|
||||
email: 'oauth-with-quota@immich.app',
|
||||
email_verified: true,
|
||||
preferred_username: 'user-quota',
|
||||
immich_quota: 25,
|
||||
},
|
||||
];
|
||||
|
||||
const withDefaultClaims = (sub: string) => ({
|
||||
sub,
|
||||
email: `${sub}@immich.app`,
|
||||
name: 'OAuth User',
|
||||
given_name: `OAuth`,
|
||||
family_name: 'User',
|
||||
email_verified: true,
|
||||
});
|
||||
|
||||
const getClaims = (sub: string) => claims.find((user) => user.sub === sub) || withDefaultClaims(sub);
|
||||
|
||||
const setup = async () => {
|
||||
const { privateKey, publicKey } = await generateKeyPair('RS256');
|
||||
|
||||
const port = 3000;
|
||||
const host = '0.0.0.0';
|
||||
const oidc = new Provider(`http://${host}:${port}`, {
|
||||
renderError: async (ctx, out, error) => {
|
||||
console.error(out);
|
||||
console.error(error);
|
||||
ctx.body = 'Internal Server Error';
|
||||
},
|
||||
findAccount: (ctx, sub) => ({ accountId: sub, claims: () => getClaims(sub) }),
|
||||
scopes: ['openid', 'email', 'profile'],
|
||||
claims: {
|
||||
openid: ['sub'],
|
||||
email: ['email', 'email_verified'],
|
||||
profile: ['name', 'given_name', 'family_name', 'preferred_username', 'immich_quota', 'immich_username'],
|
||||
},
|
||||
features: {
|
||||
jwtUserinfo: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
cookies: {
|
||||
names: {
|
||||
session: 'oidc.session',
|
||||
interaction: 'oidc.interaction',
|
||||
resume: 'oidc.resume',
|
||||
state: 'oidc.state',
|
||||
},
|
||||
},
|
||||
pkce: {
|
||||
required: () => false,
|
||||
},
|
||||
jwks: { keys: [await exportJWK(privateKey)] },
|
||||
clients: [
|
||||
{
|
||||
client_id: OAuthClient.DEFAULT,
|
||||
client_secret: OAuthClient.DEFAULT,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
response_types: ['code'],
|
||||
},
|
||||
{
|
||||
client_id: OAuthClient.RS256_TOKENS,
|
||||
client_secret: OAuthClient.RS256_TOKENS,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
id_token_signed_response_alg: 'RS256',
|
||||
jwks: { keys: [await exportJWK(publicKey)] },
|
||||
},
|
||||
{
|
||||
client_id: OAuthClient.RS256_PROFILE,
|
||||
client_secret: OAuthClient.RS256_PROFILE,
|
||||
redirect_uris: ['http://127.0.0.1:2283/auth/login'],
|
||||
grant_types: ['authorization_code'],
|
||||
userinfo_signed_response_alg: 'RS256',
|
||||
jwks: { keys: [await exportJWK(publicKey)] },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const onStart = () => console.log(`[auth-server] http://${host}:${port}/.well-known/openid-configuration`);
|
||||
const app = oidc.listen(port, host, onStart);
|
||||
return () => app.close();
|
||||
};
|
||||
|
||||
export default setup;
|
||||
@@ -53,8 +53,7 @@ type AdminSetupOptions = { onboarding?: boolean };
|
||||
type AssetData = { bytes?: Buffer; filename: string };
|
||||
|
||||
const dbUrl = 'postgres://postgres:postgres@127.0.0.1:5433/immich';
|
||||
const baseUrl = 'http://127.0.0.1:2283';
|
||||
|
||||
export const baseUrl = 'http://127.0.0.1:2283';
|
||||
export const shareUrl = `${baseUrl}/share`;
|
||||
export const app = `${baseUrl}/api`;
|
||||
// TODO move test assets into e2e/assets
|
||||
@@ -64,13 +63,13 @@ export const tempDir = tmpdir();
|
||||
export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer ${accessToken}` });
|
||||
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
|
||||
export const immichCli = (args: string[]) =>
|
||||
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]);
|
||||
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]).promise;
|
||||
export const immichAdmin = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
|
||||
|
||||
const executeCommand = (command: string, args: string[]) => {
|
||||
let _resolve: (value: CommandResponse) => void;
|
||||
const deferred = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
|
||||
const promise = new Promise<CommandResponse>((resolve) => (_resolve = resolve));
|
||||
const child = spawn(command, args, { stdio: 'pipe' });
|
||||
|
||||
let stdout = '';
|
||||
@@ -86,7 +85,7 @@ const executeCommand = (command: string, args: string[]) => {
|
||||
});
|
||||
});
|
||||
|
||||
return deferred;
|
||||
return { promise, child };
|
||||
};
|
||||
|
||||
let client: pg.Client | null = null;
|
||||
@@ -152,10 +151,6 @@ export const utils = {
|
||||
|
||||
const sql: string[] = [];
|
||||
|
||||
if (tables.includes('asset_stack')) {
|
||||
sql.push('UPDATE "assets" SET "stackId" = NULL;');
|
||||
}
|
||||
|
||||
for (const table of tables) {
|
||||
if (table === 'system_metadata') {
|
||||
// prevent reverse geocoder from being re-initialized
|
||||
|
||||
@@ -51,6 +51,13 @@ test.describe('Shared Links', () => {
|
||||
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
|
||||
});
|
||||
|
||||
test('download all from shared link', async ({ page }) => {
|
||||
await page.goto(`/share/${sharedLink.key}`);
|
||||
await page.getByRole('heading', { name: 'Test Album' }).waitFor();
|
||||
await page.getByRole('button', { name: 'Download' }).click();
|
||||
await page.getByText('DOWNLOADING', { exact: true }).waitFor();
|
||||
});
|
||||
|
||||
test('enter password for a shared link', async ({ page }) => {
|
||||
await page.goto(`/share/${sharedLinkPassword.key}`);
|
||||
await page.getByPlaceholder('Password').fill('test-password');
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { defineConfig } from 'vitest/config';
|
||||
|
||||
// skip `docker compose up` if `make e2e` was already run
|
||||
const globalSetup: string[] = [];
|
||||
const globalSetup: string[] = ['src/setup/auth-server.ts'];
|
||||
try {
|
||||
await fetch('http://127.0.0.1:2283/api/server-info/ping');
|
||||
} catch {
|
||||
globalSetup.push('src/setup.ts');
|
||||
globalSetup.push('src/setup/docker-compose.ts');
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
g++ -shared -O3 -o libann.so -fuse-ld=gold -std=c++17 -I"$ARMNN_PATH"/include -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -L"$ARMNN_PATH" ann.cpp
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
cd armnn-23.11/ || exit
|
||||
g++ -o ../armnnconverter -O1 -DARMNN_ONNX_PARSER -DARMNN_SERIALIZER -DARMNN_TF_LITE_PARSER -fuse-ld=gold -std=c++17 -Iinclude -Isrc/armnnUtils -Ithird-party -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -larmnnSerializer -L../armnn src/armnnConverter/ArmnnConverter.cpp
|
||||
@@ -1,201 +0,0 @@
|
||||
name: annexport
|
||||
channels:
|
||||
- pytorch
|
||||
- nvidia
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- _libgcc_mutex=0.1=conda_forge
|
||||
- _openmp_mutex=4.5=2_kmp_llvm
|
||||
- aiohttp=3.9.1=py310h2372a71_0
|
||||
- aiosignal=1.3.1=pyhd8ed1ab_0
|
||||
- arpack=3.8.0=nompi_h0baa96a_101
|
||||
- async-timeout=4.0.3=pyhd8ed1ab_0
|
||||
- attrs=23.1.0=pyh71513ae_1
|
||||
- aws-c-auth=0.7.3=h28f7589_1
|
||||
- aws-c-cal=0.6.1=hc309b26_1
|
||||
- aws-c-common=0.9.0=hd590300_0
|
||||
- aws-c-compression=0.2.17=h4d4d85c_2
|
||||
- aws-c-event-stream=0.3.1=h2e3709c_4
|
||||
- aws-c-http=0.7.11=h00aa349_4
|
||||
- aws-c-io=0.13.32=he9a53bd_1
|
||||
- aws-c-mqtt=0.9.3=hb447be9_1
|
||||
- aws-c-s3=0.3.14=hf3aad02_1
|
||||
- aws-c-sdkutils=0.1.12=h4d4d85c_1
|
||||
- aws-checksums=0.1.17=h4d4d85c_1
|
||||
- aws-crt-cpp=0.21.0=hb942446_5
|
||||
- aws-sdk-cpp=1.10.57=h85b1a90_19
|
||||
- blas=2.120=openblas
|
||||
- blas-devel=3.9.0=20_linux64_openblas
|
||||
- brotli-python=1.0.9=py310hd8f1fbe_9
|
||||
- bzip2=1.0.8=hd590300_5
|
||||
- c-ares=1.23.0=hd590300_0
|
||||
- ca-certificates=2023.11.17=hbcca054_0
|
||||
- certifi=2023.11.17=pyhd8ed1ab_0
|
||||
- charset-normalizer=3.3.2=pyhd8ed1ab_0
|
||||
- click=8.1.7=unix_pyh707e725_0
|
||||
- colorama=0.4.6=pyhd8ed1ab_0
|
||||
- coloredlogs=15.0.1=pyhd8ed1ab_3
|
||||
- cuda-cudart=11.7.99=0
|
||||
- cuda-cupti=11.7.101=0
|
||||
- cuda-libraries=11.7.1=0
|
||||
- cuda-nvrtc=11.7.99=0
|
||||
- cuda-nvtx=11.7.91=0
|
||||
- cuda-runtime=11.7.1=0
|
||||
- dataclasses=0.8=pyhc8e2a94_3
|
||||
- datasets=2.14.7=pyhd8ed1ab_0
|
||||
- dill=0.3.7=pyhd8ed1ab_0
|
||||
- filelock=3.13.1=pyhd8ed1ab_0
|
||||
- flatbuffers=23.5.26=h59595ed_1
|
||||
- freetype=2.12.1=h267a509_2
|
||||
- frozenlist=1.4.0=py310h2372a71_1
|
||||
- fsspec=2023.10.0=pyhca7485f_0
|
||||
- ftfy=6.1.3=pyhd8ed1ab_0
|
||||
- gflags=2.2.2=he1b5a44_1004
|
||||
- glog=0.6.0=h6f12383_0
|
||||
- glpk=5.0=h445213a_0
|
||||
- gmp=6.3.0=h59595ed_0
|
||||
- gmpy2=2.1.2=py310h3ec546c_1
|
||||
- huggingface_hub=0.17.3=pyhd8ed1ab_0
|
||||
- humanfriendly=10.0=pyhd8ed1ab_6
|
||||
- icu=73.2=h59595ed_0
|
||||
- idna=3.6=pyhd8ed1ab_0
|
||||
- importlib-metadata=7.0.0=pyha770c72_0
|
||||
- importlib_metadata=7.0.0=hd8ed1ab_0
|
||||
- joblib=1.3.2=pyhd8ed1ab_0
|
||||
- keyutils=1.6.1=h166bdaf_0
|
||||
- krb5=1.21.2=h659d440_0
|
||||
- lcms2=2.15=h7f713cb_2
|
||||
- ld_impl_linux-64=2.40=h41732ed_0
|
||||
- lerc=4.0.0=h27087fc_0
|
||||
- libabseil=20230125.3=cxx17_h59595ed_0
|
||||
- libarrow=12.0.1=hb87d912_8_cpu
|
||||
- libblas=3.9.0=20_linux64_openblas
|
||||
- libbrotlicommon=1.0.9=h166bdaf_9
|
||||
- libbrotlidec=1.0.9=h166bdaf_9
|
||||
- libbrotlienc=1.0.9=h166bdaf_9
|
||||
- libcblas=3.9.0=20_linux64_openblas
|
||||
- libcrc32c=1.1.2=h9c3ff4c_0
|
||||
- libcublas=11.10.3.66=0
|
||||
- libcufft=10.7.2.124=h4fbf590_0
|
||||
- libcufile=1.8.1.2=0
|
||||
- libcurand=10.3.4.101=0
|
||||
- libcurl=8.5.0=hca28451_0
|
||||
- libcusolver=11.4.0.1=0
|
||||
- libcusparse=11.7.4.91=0
|
||||
- libdeflate=1.19=hd590300_0
|
||||
- libedit=3.1.20191231=he28a2e2_2
|
||||
- libev=4.33=hd590300_2
|
||||
- libevent=2.1.12=hf998b51_1
|
||||
- libffi=3.4.2=h7f98852_5
|
||||
- libgcc-ng=13.2.0=h807b86a_3
|
||||
- libgfortran-ng=13.2.0=h69a702a_3
|
||||
- libgfortran5=13.2.0=ha4646dd_3
|
||||
- libgoogle-cloud=2.12.0=hac9eb74_1
|
||||
- libgrpc=1.54.3=hb20ce57_0
|
||||
- libhwloc=2.9.3=default_h554bfaf_1009
|
||||
- libiconv=1.17=hd590300_1
|
||||
- libjpeg-turbo=2.1.5.1=hd590300_1
|
||||
- liblapack=3.9.0=20_linux64_openblas
|
||||
- liblapacke=3.9.0=20_linux64_openblas
|
||||
- libnghttp2=1.58.0=h47da74e_1
|
||||
- libnpp=11.7.4.75=0
|
||||
- libnsl=2.0.1=hd590300_0
|
||||
- libnuma=2.0.16=h0b41bf4_1
|
||||
- libnvjpeg=11.8.0.2=0
|
||||
- libopenblas=0.3.25=pthreads_h413a1c8_0
|
||||
- libpng=1.6.39=h753d276_0
|
||||
- libprotobuf=3.21.12=hfc55251_2
|
||||
- libsentencepiece=0.1.99=h180e1df_0
|
||||
- libsqlite=3.44.2=h2797004_0
|
||||
- libssh2=1.11.0=h0841786_0
|
||||
- libstdcxx-ng=13.2.0=h7e041cc_3
|
||||
- libthrift=0.18.1=h8fd135c_2
|
||||
- libtiff=4.6.0=h29866fb_1
|
||||
- libutf8proc=2.8.0=h166bdaf_0
|
||||
- libuuid=2.38.1=h0b41bf4_0
|
||||
- libwebp-base=1.3.2=hd590300_0
|
||||
- libxcb=1.15=h0b41bf4_0
|
||||
- libxml2=2.11.6=h232c23b_0
|
||||
- libzlib=1.2.13=hd590300_5
|
||||
- llvm-openmp=17.0.6=h4dfa4b3_0
|
||||
- lz4-c=1.9.4=hcb278e6_0
|
||||
- mkl=2022.2.1=h84fe81f_16997
|
||||
- mkl-devel=2022.2.1=ha770c72_16998
|
||||
- mkl-include=2022.2.1=h84fe81f_16997
|
||||
- mpc=1.3.1=hfe3b2da_0
|
||||
- mpfr=4.2.1=h9458935_0
|
||||
- mpmath=1.3.0=pyhd8ed1ab_0
|
||||
- multidict=6.0.4=py310h2372a71_1
|
||||
- multiprocess=0.70.15=py310h2372a71_1
|
||||
- ncurses=6.4=h59595ed_2
|
||||
- numpy=1.26.2=py310hb13e2d6_0
|
||||
- onnx=1.14.0=py310ha3deec4_1
|
||||
- onnx2torch=1.5.13=pyhd8ed1ab_0
|
||||
- onnxruntime=1.16.3=py310hd4b7fbc_1_cpu
|
||||
- open-clip-torch=2.23.0=pyhd8ed1ab_1
|
||||
- openblas=0.3.25=pthreads_h7a3da1a_0
|
||||
- openjpeg=2.5.0=h488ebb8_3
|
||||
- openssl=3.2.0=hd590300_1
|
||||
- orc=1.9.0=h2f23424_1
|
||||
- packaging=23.2=pyhd8ed1ab_0
|
||||
- pandas=2.1.4=py310hcc13569_0
|
||||
- pillow=10.0.1=py310h29da1c1_1
|
||||
- pip=23.3.1=pyhd8ed1ab_0
|
||||
- protobuf=4.21.12=py310heca2aa9_0
|
||||
- pthread-stubs=0.4=h36c2ea0_1001
|
||||
- pyarrow=12.0.1=py310h0576679_8_cpu
|
||||
- pyarrow-hotfix=0.6=pyhd8ed1ab_0
|
||||
- pysocks=1.7.1=pyha2e5f31_6
|
||||
- python=3.10.13=hd12c33a_0_cpython
|
||||
- python-dateutil=2.8.2=pyhd8ed1ab_0
|
||||
- python-flatbuffers=23.5.26=pyhd8ed1ab_0
|
||||
- python-tzdata=2023.3=pyhd8ed1ab_0
|
||||
- python-xxhash=3.4.1=py310h2372a71_0
|
||||
- python_abi=3.10=4_cp310
|
||||
- pytorch=1.13.1=cpu_py310hd11e9c7_1
|
||||
- pytorch-cuda=11.7=h778d358_5
|
||||
- pytorch-mutex=1.0=cuda
|
||||
- pytz=2023.3.post1=pyhd8ed1ab_0
|
||||
- pyyaml=6.0.1=py310h2372a71_1
|
||||
- rdma-core=28.9=h59595ed_1
|
||||
- re2=2023.03.02=h8c504da_0
|
||||
- readline=8.2=h8228510_1
|
||||
- regex=2023.10.3=py310h2372a71_0
|
||||
- requests=2.31.0=pyhd8ed1ab_0
|
||||
- s2n=1.3.49=h06160fa_0
|
||||
- sacremoses=0.0.53=pyhd8ed1ab_0
|
||||
- safetensors=0.3.3=py310hcb5633a_1
|
||||
- sentencepiece=0.1.99=hff52083_0
|
||||
- sentencepiece-python=0.1.99=py310hebdb9f0_0
|
||||
- sentencepiece-spm=0.1.99=h180e1df_0
|
||||
- setuptools=68.2.2=pyhd8ed1ab_0
|
||||
- six=1.16.0=pyh6c4a22f_0
|
||||
- sleef=3.5.1=h9b69904_2
|
||||
- snappy=1.1.10=h9fff704_0
|
||||
- sympy=1.12=pypyh9d50eac_103
|
||||
- tbb=2021.11.0=h00ab1b0_0
|
||||
- texttable=1.7.0=pyhd8ed1ab_0
|
||||
- timm=0.9.12=pyhd8ed1ab_0
|
||||
- tk=8.6.13=noxft_h4845f30_101
|
||||
- tokenizers=0.14.1=py310h320607d_2
|
||||
- torchvision=0.14.1=cpu_py310hd3d2ac3_1
|
||||
- tqdm=4.66.1=pyhd8ed1ab_0
|
||||
- transformers=4.35.2=pyhd8ed1ab_0
|
||||
- typing-extensions=4.9.0=hd8ed1ab_0
|
||||
- typing_extensions=4.9.0=pyha770c72_0
|
||||
- tzdata=2023c=h71feb2d_0
|
||||
- ucx=1.14.1=h64cca9d_5
|
||||
- urllib3=2.1.0=pyhd8ed1ab_0
|
||||
- wcwidth=0.2.12=pyhd8ed1ab_0
|
||||
- wheel=0.42.0=pyhd8ed1ab_0
|
||||
- xorg-libxau=1.0.11=hd590300_0
|
||||
- xorg-libxdmcp=1.1.3=h7f98852_0
|
||||
- xxhash=0.8.2=hd590300_0
|
||||
- xz=5.2.6=h166bdaf_0
|
||||
- yaml=0.2.5=h7f98852_2
|
||||
- yarl=1.9.3=py310h2372a71_0
|
||||
- zipp=3.17.0=pyhd8ed1ab_0
|
||||
- zlib=1.2.13=hd590300_5
|
||||
- zstd=1.5.5=hfc55251_0
|
||||
- pip:
|
||||
- git+https://github.com/fyfrey/TinyNeuralNetwork.git
|
||||
@@ -1,157 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from abc import abstractmethod
|
||||
|
||||
import onnx
|
||||
import open_clip
|
||||
import torch
|
||||
from onnx2torch import convert
|
||||
from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed
|
||||
from tinynn.converter import TFLiteConverter
|
||||
|
||||
|
||||
class ExportBase(torch.nn.Module):
|
||||
input_shape: tuple[int, ...]
|
||||
|
||||
def __init__(self, device: torch.device, name: str):
|
||||
super().__init__()
|
||||
self.device = device
|
||||
self.name = name
|
||||
self.optimize = 5
|
||||
self.nchw_transpose = False
|
||||
|
||||
@abstractmethod
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.Tensor | tuple[torch.Tensor]:
|
||||
pass
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand((1, 3, 224, 224), device=self.device)
|
||||
|
||||
|
||||
class ArcFace(ExportBase):
|
||||
input_shape = (1, 3, 112, 112)
|
||||
|
||||
def __init__(self, onnx_model_path: str, device: torch.device):
|
||||
name, _ = os.path.splitext(os.path.basename(onnx_model_path))
|
||||
super().__init__(device, name)
|
||||
onnx_model = onnx.load_model(onnx_model_path)
|
||||
make_input_shape_fixed(onnx_model.graph, onnx_model.graph.input[0].name, self.input_shape)
|
||||
fix_output_shapes(onnx_model)
|
||||
self.model = convert(onnx_model).to(device)
|
||||
if self.device.type == "cuda":
|
||||
self.model = self.model.half()
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.FloatTensor:
|
||||
embedding: torch.FloatTensor = self.model(
|
||||
input_tensor.half() if self.device.type == "cuda" else input_tensor
|
||||
).float()
|
||||
assert isinstance(embedding, torch.FloatTensor)
|
||||
return embedding
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand(self.input_shape, device=self.device)
|
||||
|
||||
|
||||
class RetinaFace(ExportBase):
|
||||
input_shape = (1, 3, 640, 640)
|
||||
|
||||
def __init__(self, onnx_model_path: str, device: torch.device):
|
||||
name, _ = os.path.splitext(os.path.basename(onnx_model_path))
|
||||
super().__init__(device, name)
|
||||
self.optimize = 3
|
||||
self.model = convert(onnx_model_path).eval().to(device)
|
||||
if self.device.type == "cuda":
|
||||
self.model = self.model.half()
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> tuple[torch.FloatTensor]:
|
||||
out: torch.Tensor = self.model(input_tensor.half() if self.device.type == "cuda" else input_tensor)
|
||||
return tuple(o.float() for o in out)
|
||||
|
||||
def dummy_input(self) -> torch.FloatTensor:
|
||||
return torch.rand(self.input_shape, device=self.device)
|
||||
|
||||
|
||||
class ClipVision(ExportBase):
|
||||
input_shape = (1, 3, 224, 224)
|
||||
|
||||
def __init__(self, model_name: str, weights: str, device: torch.device):
|
||||
super().__init__(device, model_name + "__" + weights)
|
||||
self.model = open_clip.create_model(
|
||||
model_name,
|
||||
weights,
|
||||
precision="fp16" if device.type == "cuda" else "fp32",
|
||||
jit=False,
|
||||
require_pretrained=True,
|
||||
device=device,
|
||||
)
|
||||
|
||||
def forward(self, input_tensor: torch.Tensor) -> torch.FloatTensor:
|
||||
embedding: torch.Tensor = self.model.encode_image(
|
||||
input_tensor.half() if self.device.type == "cuda" else input_tensor,
|
||||
normalize=True,
|
||||
).float()
|
||||
return embedding
|
||||
|
||||
|
||||
def export(model: ExportBase) -> None:
|
||||
model.eval()
|
||||
for param in model.parameters():
|
||||
param.requires_grad = False
|
||||
dummy_input = model.dummy_input()
|
||||
model(dummy_input)
|
||||
jit = torch.jit.trace(model, dummy_input) # type: ignore[no-untyped-call,attr-defined]
|
||||
tflite_model_path = f"output/{model.name}.tflite"
|
||||
os.makedirs("output", exist_ok=True)
|
||||
|
||||
converter = TFLiteConverter(
|
||||
jit,
|
||||
dummy_input,
|
||||
tflite_model_path,
|
||||
optimize=model.optimize,
|
||||
nchw_transpose=model.nchw_transpose,
|
||||
)
|
||||
# segfaults on ARM, must run on x86_64 / AMD64
|
||||
converter.convert()
|
||||
|
||||
armnn_model_path = f"output/{model.name}.armnn"
|
||||
os.environ["LD_LIBRARY_PATH"] = "armnn"
|
||||
subprocess.run(
|
||||
[
|
||||
"./armnnconverter",
|
||||
"-f",
|
||||
"tflite-binary",
|
||||
"-m",
|
||||
tflite_model_path,
|
||||
"-i",
|
||||
"input_tensor",
|
||||
"-o",
|
||||
"output_tensor",
|
||||
"-p",
|
||||
armnn_model_path,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
if platform.machine() not in ("x86_64", "AMD64"):
|
||||
raise RuntimeError(f"Can only run on x86_64 / AMD64, not {platform.machine()}")
|
||||
|
||||
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
||||
if device.type != "cuda":
|
||||
logging.warning(
|
||||
"No CUDA available, cannot create fp16 model! proceeding to create a fp32 model (use only for testing)"
|
||||
)
|
||||
models = [
|
||||
ClipVision("ViT-B-32", "openai", device),
|
||||
ArcFace("buffalo_l_rec.onnx", device),
|
||||
RetinaFace("buffalo_l_det.onnx", device),
|
||||
]
|
||||
for model in models:
|
||||
export(model)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with torch.no_grad():
|
||||
main()
|
||||
@@ -168,6 +168,12 @@ def warning() -> Iterator[mock.Mock]:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def exception() -> Iterator[mock.Mock]:
|
||||
with mock.patch.object(log, "exception") as mocked:
|
||||
yield mocked
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def snapshot_download() -> Iterator[mock.Mock]:
|
||||
with mock.patch("app.models.base.snapshot_download") as mocked:
|
||||
|
||||
@@ -29,6 +29,7 @@ from .schemas import (
|
||||
InferenceEntry,
|
||||
InferenceResponse,
|
||||
MessageResponse,
|
||||
ModelFormat,
|
||||
ModelIdentity,
|
||||
ModelTask,
|
||||
ModelType,
|
||||
@@ -195,7 +196,17 @@ async def load(model: InferenceModel) -> InferenceModel:
|
||||
if model.load_attempts > 1:
|
||||
raise HTTPException(500, f"Failed to load model '{model.model_name}'")
|
||||
with lock:
|
||||
model.load()
|
||||
try:
|
||||
model.load()
|
||||
except FileNotFoundError as e:
|
||||
if model.model_format == ModelFormat.ONNX:
|
||||
raise e
|
||||
log.exception(e)
|
||||
log.warning(
|
||||
f"{model.model_format.upper()} is available, but model '{model.model_name}' does not support it."
|
||||
)
|
||||
model.model_format = ModelFormat.ONNX
|
||||
model.load()
|
||||
return model
|
||||
|
||||
try:
|
||||
|
||||
@@ -23,7 +23,7 @@ class InferenceModel(ABC):
|
||||
self,
|
||||
model_name: str,
|
||||
cache_dir: Path | str | None = None,
|
||||
preferred_format: ModelFormat | None = None,
|
||||
model_format: ModelFormat | None = None,
|
||||
session: ModelSession | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
@@ -31,7 +31,7 @@ class InferenceModel(ABC):
|
||||
self.load_attempts = 0
|
||||
self.model_name = clean_name(model_name)
|
||||
self.cache_dir = Path(cache_dir) if cache_dir is not None else self._cache_dir_default
|
||||
self.model_format = preferred_format if preferred_format is not None else self._model_format_default
|
||||
self.model_format = model_format if model_format is not None else self._model_format_default
|
||||
if session is not None:
|
||||
self.session = session
|
||||
|
||||
@@ -48,7 +48,7 @@ class InferenceModel(ABC):
|
||||
self.load_attempts += 1
|
||||
|
||||
self.download()
|
||||
attempt = f"Attempt #{self.load_attempts + 1} to load" if self.load_attempts else "Loading"
|
||||
attempt = f"Attempt #{self.load_attempts} to load" if self.load_attempts > 1 else "Loading"
|
||||
log.info(f"{attempt} {self.model_type.replace('-', ' ')} model '{self.model_name}' to memory")
|
||||
self.session = self._load()
|
||||
self.loaded = True
|
||||
@@ -101,6 +101,9 @@ class InferenceModel(ABC):
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _make_session(self, model_path: Path) -> ModelSession:
|
||||
if not model_path.is_file():
|
||||
raise FileNotFoundError(f"Model file not found: {model_path}")
|
||||
|
||||
match model_path.suffix:
|
||||
case ".armnn":
|
||||
session: ModelSession = AnnSession(model_path)
|
||||
@@ -144,17 +147,13 @@ class InferenceModel(ABC):
|
||||
|
||||
@property
|
||||
def model_format(self) -> ModelFormat:
|
||||
return self._preferred_format
|
||||
return self._model_format
|
||||
|
||||
@model_format.setter
|
||||
def model_format(self, preferred_format: ModelFormat) -> None:
|
||||
log.debug(f"Setting preferred format to {preferred_format}")
|
||||
self._preferred_format = preferred_format
|
||||
def model_format(self, model_format: ModelFormat) -> None:
|
||||
log.debug(f"Setting model format to {model_format}")
|
||||
self._model_format = model_format
|
||||
|
||||
@property
|
||||
def _model_format_default(self) -> ModelFormat:
|
||||
prefer_ann = ann.ann.is_available and settings.ann
|
||||
ann_exists = (self.model_dir / "model.armnn").is_file()
|
||||
if prefer_ann and not ann_exists:
|
||||
log.warning(f"ARM NN is available, but '{self.model_name}' does not support ARM NN. Falling back to ONNX.")
|
||||
return ModelFormat.ARMNN if prefer_ann and ann_exists else ModelFormat.ONNX
|
||||
return ModelFormat.ARMNN if ann.ann.is_available and settings.ann else ModelFormat.ONNX
|
||||
|
||||
@@ -22,11 +22,12 @@ class BaseCLIPTextualEncoder(InferenceModel):
|
||||
return res
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = super()._load()
|
||||
log.debug(f"Loading tokenizer for CLIP model '{self.model_name}'")
|
||||
self.tokenizer = self._load_tokenizer()
|
||||
log.debug(f"Loaded tokenizer for CLIP model '{self.model_name}'")
|
||||
|
||||
return super()._load()
|
||||
return session
|
||||
|
||||
@abstractmethod
|
||||
def _load_tokenizer(self) -> Tokenizer:
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import numpy as np
|
||||
@@ -14,15 +13,9 @@ class FaceDetector(InferenceModel):
|
||||
depends = []
|
||||
identity = (ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
super().__init__(model_name, cache_dir, **model_kwargs)
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
|
||||
@@ -9,7 +9,7 @@ from numpy.typing import NDArray
|
||||
from onnx.tools.update_model_dims import update_inputs_outputs_dims
|
||||
from PIL import Image
|
||||
|
||||
from app.config import clean_name, log
|
||||
from app.config import log
|
||||
from app.models.base import InferenceModel
|
||||
from app.models.transforms import decode_cv2
|
||||
from app.schemas import FaceDetectionOutput, FacialRecognitionOutput, ModelFormat, ModelSession, ModelTask, ModelType
|
||||
@@ -20,20 +20,14 @@ class FaceRecognizer(InferenceModel):
|
||||
depends = [(ModelType.DETECTION, ModelTask.FACIAL_RECOGNITION)]
|
||||
identity = (ModelType.RECOGNITION, ModelTask.FACIAL_RECOGNITION)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
min_score: float = 0.7,
|
||||
cache_dir: Path | str | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
super().__init__(clean_name(model_name), cache_dir, **model_kwargs)
|
||||
def __init__(self, model_name: str, min_score: float = 0.7, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs)
|
||||
self.min_score = model_kwargs.pop("minScore", min_score)
|
||||
self.batch = self.model_format == ModelFormat.ONNX
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
session = self._make_session(self.model_path)
|
||||
if self.model_format == ModelFormat.ONNX and not has_batch_axis(session):
|
||||
if self.batch and not has_batch_axis(session):
|
||||
self._add_batch_axis(self.model_path)
|
||||
session = self._make_session(self.model_path)
|
||||
self.model = ArcFaceONNX(
|
||||
|
||||
@@ -43,7 +43,7 @@ class TestBase:
|
||||
|
||||
assert encoder.cache_dir == cache_dir
|
||||
|
||||
def test_sets_default_preferred_format(self, mocker: MockerFixture) -> None:
|
||||
def test_sets_default_model_format(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", True)
|
||||
mocker.patch("ann.ann.is_available", False)
|
||||
|
||||
@@ -51,7 +51,7 @@ class TestBase:
|
||||
|
||||
assert encoder.model_format == ModelFormat.ONNX
|
||||
|
||||
def test_sets_default_preferred_format_to_armnn_if_available(self, path: mock.Mock, mocker: MockerFixture) -> None:
|
||||
def test_sets_default_model_format_to_armnn_if_available(self, path: mock.Mock, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", True)
|
||||
mocker.patch("ann.ann.is_available", True)
|
||||
path.suffix = ".armnn"
|
||||
@@ -60,11 +60,11 @@ class TestBase:
|
||||
|
||||
assert encoder.model_format == ModelFormat.ARMNN
|
||||
|
||||
def test_sets_preferred_format_kwarg(self, mocker: MockerFixture) -> None:
|
||||
def test_sets_model_format_kwarg(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(settings, "ann", False)
|
||||
mocker.patch("ann.ann.is_available", False)
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
|
||||
|
||||
assert encoder.model_format == ModelFormat.ARMNN
|
||||
|
||||
@@ -129,7 +129,7 @@ class TestBase:
|
||||
)
|
||||
|
||||
def test_download_downloads_armnn_if_preferred_format(self, snapshot_download: mock.Mock) -> None:
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", preferred_format=ModelFormat.ARMNN)
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", model_format=ModelFormat.ARMNN)
|
||||
encoder.download()
|
||||
|
||||
snapshot_download.assert_called_once_with(
|
||||
@@ -140,6 +140,19 @@ class TestBase:
|
||||
ignore_patterns=[],
|
||||
)
|
||||
|
||||
def test_throws_exception_if_model_path_does_not_exist(
|
||||
self, snapshot_download: mock.Mock, ort_session: mock.Mock, path: mock.Mock
|
||||
) -> None:
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.is_file.return_value = False
|
||||
|
||||
encoder = OpenClipTextualEncoder("ViT-B-32__openai", cache_dir=path)
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
encoder.load()
|
||||
|
||||
snapshot_download.assert_called_once()
|
||||
ort_session.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("ort_session")
|
||||
class TestOrtSession:
|
||||
@@ -467,16 +480,18 @@ class TestFaceRecognition:
|
||||
assert isinstance(call_args[0][0], np.ndarray)
|
||||
assert call_args[0][0].shape == (112, 112, 3)
|
||||
|
||||
def test_recognition_adds_batch_axis_for_ort(self, ort_session: mock.Mock, mocker: MockerFixture) -> None:
|
||||
def test_recognition_adds_batch_axis_for_ort(
|
||||
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
|
||||
ort_session.return_value.get_inputs.return_value = [SimpleNamespace(name="input.1", shape=(1, 3, 224, 224))]
|
||||
ort_session.return_value.get_outputs.return_value = [SimpleNamespace(name="output.1", shape=(1, 800))]
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
proto = mock.Mock()
|
||||
|
||||
@@ -492,27 +507,30 @@ class TestFaceRecognition:
|
||||
|
||||
onnx.load.return_value = proto
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s")
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is True
|
||||
update_dims.assert_called_once_with(proto, {"input.1": ["batch", 3, 224, 224]}, {"output.1": ["batch", 800]})
|
||||
onnx.save.assert_called_once_with(update_dims.return_value, face_recognizer.model_path)
|
||||
|
||||
def test_recognition_does_not_add_batch_axis_if_exists(self, ort_session: mock.Mock, mocker: MockerFixture) -> None:
|
||||
def test_recognition_does_not_add_batch_axis_if_exists(
|
||||
self, ort_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".onnx"
|
||||
|
||||
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
|
||||
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
|
||||
ort_session.return_value.get_inputs.return_value = inputs
|
||||
ort_session.return_value.get_outputs.return_value = outputs
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s")
|
||||
face_recognizer = FaceRecognizer("buffalo_s", cache_dir=path)
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is True
|
||||
@@ -520,6 +538,30 @@ class TestFaceRecognition:
|
||||
onnx.load.assert_not_called()
|
||||
onnx.save.assert_not_called()
|
||||
|
||||
def test_recognition_does_not_add_batch_axis_for_armnn(
|
||||
self, ann_session: mock.Mock, path: mock.Mock, mocker: MockerFixture
|
||||
) -> None:
|
||||
onnx = mocker.patch("app.models.facial_recognition.recognition.onnx", autospec=True)
|
||||
update_dims = mocker.patch(
|
||||
"app.models.facial_recognition.recognition.update_inputs_outputs_dims", autospec=True
|
||||
)
|
||||
mocker.patch("app.models.base.InferenceModel.download")
|
||||
mocker.patch("app.models.facial_recognition.recognition.ArcFaceONNX")
|
||||
path.return_value.__truediv__.return_value.__truediv__.return_value.suffix = ".armnn"
|
||||
|
||||
inputs = [SimpleNamespace(name="input.1", shape=("batch", 3, 224, 224))]
|
||||
outputs = [SimpleNamespace(name="output.1", shape=("batch", 800))]
|
||||
ann_session.return_value.get_inputs.return_value = inputs
|
||||
ann_session.return_value.get_outputs.return_value = outputs
|
||||
|
||||
face_recognizer = FaceRecognizer("buffalo_s", model_format=ModelFormat.ARMNN, cache_dir=path)
|
||||
face_recognizer.load()
|
||||
|
||||
assert face_recognizer.batch is False
|
||||
update_dims.assert_not_called()
|
||||
onnx.load.assert_not_called()
|
||||
onnx.save.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
class TestCache:
|
||||
@@ -693,7 +735,7 @@ class TestLoad:
|
||||
mock_model.clear_cache.assert_called_once()
|
||||
assert mock_model.load.call_count == 2
|
||||
|
||||
async def test_load_clears_cache_and_raises_if_os_error_and_already_retried(self) -> None:
|
||||
async def test_load_raises_if_os_error_and_already_retried(self) -> None:
|
||||
mock_model = mock.Mock(spec=InferenceModel)
|
||||
mock_model.model_name = "test_model_name"
|
||||
mock_model.model_type = ModelType.VISUAL
|
||||
@@ -707,6 +749,27 @@ class TestLoad:
|
||||
mock_model.clear_cache.assert_not_called()
|
||||
mock_model.load.assert_not_called()
|
||||
|
||||
async def test_falls_back_to_onnx_if_other_format_does_not_exist(
|
||||
self, exception: mock.Mock, warning: mock.Mock
|
||||
) -> None:
|
||||
mock_model = mock.Mock(spec=InferenceModel)
|
||||
mock_model.model_name = "test_model_name"
|
||||
mock_model.model_type = ModelType.VISUAL
|
||||
mock_model.model_task = ModelTask.SEARCH
|
||||
mock_model.model_format = ModelFormat.ARMNN
|
||||
mock_model.loaded = False
|
||||
mock_model.load_attempts = 0
|
||||
error = FileNotFoundError()
|
||||
mock_model.load.side_effect = [error, None]
|
||||
|
||||
await load(mock_model)
|
||||
|
||||
mock_model.clear_cache.assert_not_called()
|
||||
assert mock_model.load.call_count == 2
|
||||
exception.assert_called_once_with(error)
|
||||
warning.assert_called_once_with("ARMNN is available, but model 'test_model_name' does not support it.")
|
||||
mock_model.model_format = ModelFormat.ONNX
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
not settings.test_full,
|
||||
|
||||
35
machine-learning/export/ann/Dockerfile
Normal file
35
machine-learning/export/ann/Dockerfile
Normal file
@@ -0,0 +1,35 @@
|
||||
FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a as builder
|
||||
|
||||
USER root
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
cmake \
|
||||
curl \
|
||||
git
|
||||
USER $MAMBA_USER
|
||||
|
||||
WORKDIR /home/mambauser
|
||||
ENV ARMNN_PATH=armnn
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER scripts/* .
|
||||
RUN ./download-armnn.sh && \
|
||||
./build-converter.sh && \
|
||||
./build.sh
|
||||
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER conda-lock.yml .
|
||||
RUN micromamba create -y -p /home/mambauser/venv -f conda-lock.yml && \
|
||||
micromamba clean --all --yes
|
||||
ENV PATH="/home/mambauser/venv/bin:${PATH}"
|
||||
|
||||
FROM gcr.io/distroless/base-debian12
|
||||
# FROM mambaorg/micromamba:bookworm-slim@sha256:333f7598ff2c2400fb10bfe057709c68b7daab5d847143af85abcf224a07271a
|
||||
|
||||
WORKDIR /export/ann
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
LD_LIBRARY_PATH=/export/ann/armnn \
|
||||
PATH="/opt/venv/bin:${PATH}"
|
||||
|
||||
COPY --from=builder /home/mambauser/armnnconverter /home/mambauser/armnn ./
|
||||
COPY --from=builder /home/mambauser/venv /opt/venv
|
||||
COPY --chown=$MAMBA_USER:$MAMBA_USER onnx2ann onnx2ann
|
||||
|
||||
ENTRYPOINT ["python", "-m", "onnx2ann"]
|
||||
1600
machine-learning/export/ann/conda-lock.yml
Normal file
1600
machine-learning/export/ann/conda-lock.yml
Normal file
File diff suppressed because it is too large
Load Diff
21
machine-learning/export/ann/env.yaml
Normal file
21
machine-learning/export/ann/env.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
name: onnx2ann
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python>=3.11,<4.0
|
||||
- onnx>=1.16.1
|
||||
# - onnxruntime>=1.18.1 # conda only has gpu version
|
||||
- psutil>=6.0.0
|
||||
- flatbuffers>=24.3.25
|
||||
- ml_dtypes>=0.3.1
|
||||
- typer-slim>=0.12.3
|
||||
- huggingface_hub>=0.23.4
|
||||
- pip
|
||||
- pip:
|
||||
- onnxruntime>=1.18.1 # conda only has gpu version
|
||||
- onnxsim>=0.4.36
|
||||
- onnx2tf>=1.24.1
|
||||
- onnx_graphsurgeon>=0.5.2
|
||||
- simple_onnx_processing_tools>=1.1.32
|
||||
- tf_keras>=2.16.0
|
||||
- git+https://github.com/microsoft/onnxconverter-common.git
|
||||
99
machine-learning/export/ann/onnx2ann/__main__.py
Normal file
99
machine-learning/export/ann/onnx2ann/__main__.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import os
|
||||
import platform
|
||||
from typing import Annotated, Optional
|
||||
|
||||
import typer
|
||||
|
||||
from onnx2ann.export import Exporter, ModelType, Precision
|
||||
|
||||
app = typer.Typer(add_completion=False, pretty_exceptions_show_locals=False)
|
||||
|
||||
|
||||
@app.command()
|
||||
def export(
|
||||
model_name: Annotated[
|
||||
str, typer.Argument(..., help="The name of the model to be exported as it exists in Hugging Face.")
|
||||
],
|
||||
model_type: Annotated[ModelType, typer.Option(..., "--type", "-t", help="The type of model to be exported.")],
|
||||
input_shapes: Annotated[
|
||||
list[str],
|
||||
typer.Option(
|
||||
...,
|
||||
"--input-shape",
|
||||
"-s",
|
||||
help="The shape of an input tensor to the model, each dimension separated by commas. "
|
||||
"Multiple shapes can be provided for multiple inputs.",
|
||||
),
|
||||
],
|
||||
precision: Annotated[
|
||||
Precision,
|
||||
typer.Option(
|
||||
...,
|
||||
"--precision",
|
||||
"-p",
|
||||
help="The precision of the exported model. `float16` requires a GPU.",
|
||||
),
|
||||
] = Precision.FLOAT32,
|
||||
cache_dir: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
...,
|
||||
"--cache-dir",
|
||||
"-c",
|
||||
help="Directory where pre-export models will be stored.",
|
||||
envvar="CACHE_DIR",
|
||||
show_envvar=True,
|
||||
),
|
||||
] = "~/.cache/huggingface",
|
||||
output_dir: Annotated[
|
||||
str,
|
||||
typer.Option(
|
||||
...,
|
||||
"--output-dir",
|
||||
"-o",
|
||||
help="Directory where exported models will be stored.",
|
||||
),
|
||||
] = "output",
|
||||
auth_token: Annotated[
|
||||
Optional[str],
|
||||
typer.Option(
|
||||
...,
|
||||
"--auth-token",
|
||||
"-t",
|
||||
help="If uploading models to Hugging Face, the auth token of the user or organisation.",
|
||||
envvar="HF_AUTH_TOKEN",
|
||||
show_envvar=True,
|
||||
),
|
||||
] = None,
|
||||
force_export: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
...,
|
||||
"--force-export",
|
||||
"-f",
|
||||
help="Export the model even if an exported model already exists in the output directory.",
|
||||
),
|
||||
] = False,
|
||||
) -> None:
|
||||
if platform.machine() not in ("x86_64", "AMD64"):
|
||||
msg = f"Can only run on x86_64 / AMD64, not {platform.machine()}"
|
||||
raise RuntimeError(msg)
|
||||
os.environ.setdefault("LD_LIBRARY_PATH", "armnn")
|
||||
parsed_input_shapes = [tuple(map(int, shape.split(","))) for shape in input_shapes]
|
||||
model = Exporter(
|
||||
model_name, model_type, input_shapes=parsed_input_shapes, cache_dir=cache_dir, force_export=force_export
|
||||
)
|
||||
model_dir = os.path.join("output", model_name)
|
||||
output_dir = os.path.join(model_dir, model_type)
|
||||
armnn_model = model.to_armnn(output_dir, precision)
|
||||
|
||||
if not auth_token:
|
||||
return
|
||||
|
||||
from huggingface_hub import upload_file
|
||||
|
||||
relative_path = os.path.relpath(armnn_model, start=model_dir)
|
||||
upload_file(path_or_fileobj=armnn_model, path_in_repo=relative_path, repo_id=model.repo_name, token=auth_token)
|
||||
|
||||
|
||||
app()
|
||||
129
machine-learning/export/ann/onnx2ann/export.py
Normal file
129
machine-learning/export/ann/onnx2ann/export.py
Normal file
@@ -0,0 +1,129 @@
|
||||
import os
|
||||
import subprocess
|
||||
from enum import StrEnum
|
||||
|
||||
from onnx2ann.helpers import onnx_make_armnn_compatible, onnx_make_inputs_fixed
|
||||
|
||||
|
||||
class ModelType(StrEnum):
|
||||
VISUAL = "visual"
|
||||
TEXTUAL = "textual"
|
||||
RECOGNITION = "recognition"
|
||||
DETECTION = "detection"
|
||||
|
||||
|
||||
class Precision(StrEnum):
|
||||
FLOAT16 = "float16"
|
||||
FLOAT32 = "float32"
|
||||
|
||||
|
||||
class Exporter:
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
model_type: str,
|
||||
input_shapes: list[tuple[int, ...]],
|
||||
optimization_level: int = 5,
|
||||
cache_dir: str = os.environ.get("CACHE_DIR", "~/.cache/huggingface"),
|
||||
force_export: bool = False,
|
||||
):
|
||||
self.model_name = model_name.split("/")[-1]
|
||||
self.model_type = model_type
|
||||
self.optimize = optimization_level
|
||||
self.input_shapes = input_shapes
|
||||
self.cache_dir = os.path.join(cache_dir, self.repo_name)
|
||||
self.force_export = force_export
|
||||
|
||||
def download(self) -> str:
|
||||
model_path = os.path.join(self.cache_dir, self.model_type, "model.onnx")
|
||||
if os.path.isfile(model_path):
|
||||
print(f"Model is already downloaded at {model_path}")
|
||||
return model_path
|
||||
from huggingface_hub import snapshot_download
|
||||
|
||||
snapshot_download(
|
||||
self.repo_name, cache_dir=self.cache_dir, local_dir=self.cache_dir, local_dir_use_symlinks=False
|
||||
)
|
||||
return model_path
|
||||
|
||||
def to_onnx_static(self, precision: Precision) -> str:
|
||||
import onnx
|
||||
from onnxconverter_common import float16
|
||||
onnx_path_original = self.download()
|
||||
static_dir = os.path.join(self.cache_dir, self.model_type, "static")
|
||||
|
||||
static_path = os.path.join(static_dir, f"model.onnx")
|
||||
if self.force_export and not os.path.isfile(static_path):
|
||||
print(f"Making {self} static")
|
||||
os.makedirs(static_dir, exist_ok=True)
|
||||
onnx_make_inputs_fixed(onnx_path_original, static_path, self.input_shapes)
|
||||
onnx_make_armnn_compatible(static_path)
|
||||
print(f"Finished making {self} static")
|
||||
|
||||
model = onnx.load(static_path)
|
||||
self.inputs = [input_.name for input_ in model.graph.input]
|
||||
self.outputs = [output_.name for output_ in model.graph.output]
|
||||
if precision == Precision.FLOAT16:
|
||||
static_path = os.path.join(static_dir, f"model_{precision}.onnx")
|
||||
print(f"Converting {self} to {precision} precision")
|
||||
model = float16.convert_float_to_float16(model, keep_io_types=True, disable_shape_infer=True)
|
||||
onnx.save(model, static_path)
|
||||
print(f"Finished converting {self} to {precision} precision")
|
||||
# self.inputs, self.outputs = onnx_get_inputs_outputs(static_path)
|
||||
return static_path
|
||||
|
||||
def to_tflite(self, output_dir: str, precision: Precision) -> str:
|
||||
onnx_model = self.to_onnx_static(precision)
|
||||
tflite_dir = os.path.join(output_dir, precision)
|
||||
tflite_model = os.path.join(tflite_dir, f"model_{precision}.tflite")
|
||||
if self.force_export or not os.path.isfile(tflite_model):
|
||||
import onnx2tf
|
||||
|
||||
print(f"Exporting {self} to TFLite with {precision} precision (this might take a few minutes)")
|
||||
onnx2tf.convert(
|
||||
input_onnx_file_path=onnx_model,
|
||||
output_folder_path=tflite_dir,
|
||||
keep_shape_absolutely_input_names=self.inputs,
|
||||
# verbosity="warn",
|
||||
copy_onnx_input_output_names_to_tflite=True,
|
||||
output_signaturedefs=True,
|
||||
not_use_onnxsim=True,
|
||||
)
|
||||
print(f"Finished exporting {self} to TFLite with {precision} precision")
|
||||
|
||||
return tflite_model
|
||||
|
||||
def to_armnn(self, output_dir: str, precision: Precision) -> tuple[str, str]:
|
||||
armnn_model = os.path.join(output_dir, "model.armnn")
|
||||
if not self.force_export and os.path.isfile(armnn_model):
|
||||
return armnn_model
|
||||
|
||||
tflite_model_dir = os.path.join(output_dir, "tflite")
|
||||
tflite_model = self.to_tflite(tflite_model_dir, precision)
|
||||
|
||||
args = ["./armnnconverter", "-f", "tflite-binary", "-m", tflite_model, "-p", armnn_model]
|
||||
args.append("-i")
|
||||
args.extend(self.inputs)
|
||||
args.append("-o")
|
||||
args.extend(self.outputs)
|
||||
|
||||
print(f"Exporting {self} to ARM NN with {precision} precision")
|
||||
try:
|
||||
if (stdout := subprocess.check_output(args, stderr=subprocess.STDOUT).decode()):
|
||||
print(stdout)
|
||||
print(f"Finished exporting {self} to ARM NN with {precision} precision")
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output.decode())
|
||||
try:
|
||||
from shutil import rmtree
|
||||
|
||||
rmtree(tflite_model_dir, ignore_errors=True)
|
||||
finally:
|
||||
raise e
|
||||
|
||||
@property
|
||||
def repo_name(self) -> str:
|
||||
return f"immich-app/{self.model_name}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.model_name} ({self.model_type})"
|
||||
260
machine-learning/export/ann/onnx2ann/helpers.py
Normal file
260
machine-learning/export/ann/onnx2ann/helpers.py
Normal file
@@ -0,0 +1,260 @@
|
||||
from typing import Any
|
||||
|
||||
|
||||
def onnx_make_armnn_compatible(model_path: str) -> None:
|
||||
"""
|
||||
i can explain
|
||||
armnn only supports up to 4d tranposes, but the model has a 5d transpose due to a redundant unsqueeze
|
||||
this function folds the unsqueeze+transpose+squeeze into a single 4d transpose
|
||||
it also switches from gather ops to slices since armnn has different dimension semantics for gathers
|
||||
also fixes batch normalization being in training mode
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import onnx
|
||||
from onnx_graphsurgeon import Constant, Node, Variable, export_onnx, import_onnx
|
||||
|
||||
proto = onnx.load(model_path)
|
||||
graph = import_onnx(proto)
|
||||
|
||||
gather_idx = 1
|
||||
squeeze_idx = 1
|
||||
for node in graph.nodes:
|
||||
for link1 in node.outputs:
|
||||
if "Unsqueeze" in link1.name:
|
||||
for node1 in link1.outputs:
|
||||
for link2 in node1.outputs:
|
||||
if "Transpose" in link2.name:
|
||||
for node2 in link2.outputs:
|
||||
if node2.attrs.get("perm") == [3, 1, 2, 0, 4]:
|
||||
node2.attrs["perm"] = [2, 0, 1, 3]
|
||||
link2.shape = link1.shape
|
||||
for link3 in node2.outputs:
|
||||
if "Squeeze" in link3.name:
|
||||
link3.shape = [link3.shape[x] for x in [0, 1, 2, 4]]
|
||||
for node3 in link3.outputs:
|
||||
for link4 in node3.outputs:
|
||||
link4.shape = link3.shape
|
||||
try:
|
||||
idx = link2.inputs.index(node1)
|
||||
link2.inputs[idx] = node
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
node.outputs = [link2]
|
||||
if "Gather" in link4.name:
|
||||
for node4 in link4.outputs:
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node4.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=link4.dtype,
|
||||
shape=[1] + link3.shape[1:],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
link3,
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
for link5 in node4.outputs:
|
||||
for node5 in link5.outputs:
|
||||
try:
|
||||
idx = node5.inputs.index(link5)
|
||||
node5.inputs[idx] = slice_link
|
||||
except ValueError:
|
||||
pass
|
||||
elif node.op == "LayerNormalization":
|
||||
for node1 in link1.outputs:
|
||||
if node1.op == "Gather":
|
||||
for link2 in node1.outputs:
|
||||
for node2 in link2.outputs:
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node1.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=link2.dtype,
|
||||
shape=[1, *link2.shape],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
node1.inputs[0],
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
squeeze_link = Variable(
|
||||
f"onnx::Squeeze_123{squeeze_idx}",
|
||||
dtype=link2.dtype,
|
||||
shape=link2.shape,
|
||||
)
|
||||
squeeze_node = Node(
|
||||
op="Squeeze",
|
||||
inputs=[
|
||||
slice_link,
|
||||
Constant(
|
||||
f"SqueezeAxis_123{squeeze_idx}",
|
||||
np.array([0]),
|
||||
),
|
||||
],
|
||||
outputs=[squeeze_link],
|
||||
name=f"Squeeze_123{squeeze_idx}",
|
||||
)
|
||||
graph.nodes.append(squeeze_node)
|
||||
squeeze_idx += 1
|
||||
try:
|
||||
idx = node2.inputs.index(link2)
|
||||
node2.inputs[idx] = squeeze_link
|
||||
except ValueError:
|
||||
pass
|
||||
elif node.op == "Reshape":
|
||||
for node1 in link1.outputs:
|
||||
if node1.op == "Gather":
|
||||
node2s = [n for link in node1.outputs for n in link.outputs]
|
||||
if any(n.op == "Abs" for n in node2s):
|
||||
axis = node1.attrs.get("axis", 0)
|
||||
index = node1.inputs[1].values
|
||||
slice_link = Variable(
|
||||
f"onnx::Slice_123{gather_idx}",
|
||||
dtype=node1.outputs[0].dtype,
|
||||
shape=[1, *node1.outputs[0].shape],
|
||||
)
|
||||
slice_node = Node(
|
||||
op="Slice",
|
||||
inputs=[
|
||||
node1.inputs[0],
|
||||
Constant(
|
||||
f"SliceStart_123{gather_idx}",
|
||||
np.array([index]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceEnd_123{gather_idx}",
|
||||
np.array([index + 1]),
|
||||
),
|
||||
Constant(
|
||||
f"SliceAxis_123{gather_idx}",
|
||||
np.array([axis]),
|
||||
),
|
||||
],
|
||||
outputs=[slice_link],
|
||||
name=f"Slice_123{gather_idx}",
|
||||
)
|
||||
graph.nodes.append(slice_node)
|
||||
gather_idx += 1
|
||||
|
||||
squeeze_link = Variable(
|
||||
f"onnx::Squeeze_123{squeeze_idx}",
|
||||
dtype=node1.outputs[0].dtype,
|
||||
shape=node1.outputs[0].shape,
|
||||
)
|
||||
squeeze_node = Node(
|
||||
op="Squeeze",
|
||||
inputs=[
|
||||
slice_link,
|
||||
Constant(
|
||||
f"SqueezeAxis_123{squeeze_idx}",
|
||||
np.array([0]),
|
||||
),
|
||||
],
|
||||
outputs=[squeeze_link],
|
||||
name=f"Squeeze_123{squeeze_idx}",
|
||||
)
|
||||
graph.nodes.append(squeeze_node)
|
||||
squeeze_idx += 1
|
||||
for node2 in node2s:
|
||||
node2.inputs[0] = squeeze_link
|
||||
elif node.op == "BatchNormalization" and node.attrs.get("training_mode") == 1:
|
||||
node.attrs["training_mode"] = 0
|
||||
node.outputs = node.outputs[:1]
|
||||
|
||||
graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True)
|
||||
graph.toposort()
|
||||
graph.fold_constants()
|
||||
updated = export_onnx(graph)
|
||||
onnx_save(updated, model_path)
|
||||
|
||||
# for some reason, reloading the model is necessary to apply the correct shape
|
||||
proto = onnx.load(model_path)
|
||||
graph = import_onnx(proto)
|
||||
for node in graph.nodes:
|
||||
if node.op == "Slice":
|
||||
for link in node.outputs:
|
||||
if "Slice_123" in link.name and link.shape[0] == 3: # noqa: PLR2004
|
||||
link.shape[0] = 1
|
||||
|
||||
graph.cleanup(remove_unused_node_outputs=True, recurse_subgraphs=True, recurse_functions=True)
|
||||
graph.toposort()
|
||||
graph.fold_constants()
|
||||
updated = export_onnx(graph)
|
||||
onnx_save(updated, model_path)
|
||||
onnx.shape_inference.infer_shapes_path(model_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
|
||||
|
||||
def onnx_make_inputs_fixed(input_path: str, output_path: str, input_shapes: list[tuple[int, ...]]) -> None:
|
||||
import onnx
|
||||
import onnxsim
|
||||
from onnxruntime.tools.onnx_model_utils import fix_output_shapes, make_input_shape_fixed
|
||||
|
||||
model, success = onnxsim.simplify(input_path)
|
||||
if not success:
|
||||
msg = f"Failed to simplify {input_path}"
|
||||
raise RuntimeError(msg)
|
||||
onnx_save(model, output_path)
|
||||
onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
model = onnx.load_model(output_path)
|
||||
for input_node, shape in zip(model.graph.input, input_shapes, strict=False):
|
||||
make_input_shape_fixed(model.graph, input_node.name, shape)
|
||||
fix_output_shapes(model)
|
||||
onnx_save(model, output_path)
|
||||
onnx.shape_inference.infer_shapes_path(output_path, check_type=True, strict_mode=True, data_prop=True)
|
||||
|
||||
|
||||
def onnx_get_inputs_outputs(model_path: str) -> tuple[list[str], list[str]]:
|
||||
import onnx
|
||||
|
||||
model = onnx.load(model_path)
|
||||
inputs = [input_.name for input_ in model.graph.input]
|
||||
outputs = [output_.name for output_ in model.graph.output]
|
||||
return inputs, outputs
|
||||
|
||||
|
||||
def onnx_save(model: Any, output_path: str) -> None:
|
||||
import onnx
|
||||
|
||||
try:
|
||||
onnx.save(model, output_path)
|
||||
except:
|
||||
onnx.save(model, output_path, save_as_external_data=True, all_tensors_to_one_file=False, size_threshold=1_000_000)
|
||||
56
machine-learning/export/ann/pyproject.toml
Normal file
56
machine-learning/export/ann/pyproject.toml
Normal file
@@ -0,0 +1,56 @@
|
||||
[project]
|
||||
name = "onnx2ann"
|
||||
version = "1.107.2"
|
||||
dependencies = [
|
||||
"onnx>=1.16.1",
|
||||
"psutil>=6.0.0",
|
||||
"flatbuffers>=24.3.25",
|
||||
"ml_dtypes>=0.3.1,<1.0.0",
|
||||
"typer-slim>=0.12.3,<1.0.0",
|
||||
"huggingface_hub>=0.23.4,<1.0.0",
|
||||
"onnxruntime>=1.18.1",
|
||||
"onnxsim>=0.4.36,<1.0.0",
|
||||
"onnx2tf>=1.24.0",
|
||||
"onnx_graphsurgeon>=0.5.2,<1.0.0",
|
||||
"simple_onnx_processing_tools>=1.1.32",
|
||||
"tf_keras>=2.16.0",
|
||||
"onnxconverter-common @ git+https://github.com/microsoft/onnxconverter-common"
|
||||
]
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
only-include = ["onnx2ann"]
|
||||
|
||||
[tool.hatch.metadata]
|
||||
allow-direct-references = true
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.12"
|
||||
follow_imports = "silent"
|
||||
warn_redundant_casts = true
|
||||
disallow_any_generics = true
|
||||
check_untyped_defs = true
|
||||
disallow_untyped_defs = true
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.pydantic-mypy]
|
||||
init_forbid_extra = true
|
||||
init_typed = true
|
||||
warn_required_dynamic_aliases = true
|
||||
warn_untyped_fields = true
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
target-version = "py312"
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["E", "F", "I"]
|
||||
extend-ignore = ["FBT001", "FBT002"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
target-version = ['py312']
|
||||
281
machine-learning/export/ann/scripts/ann.cpp
Normal file
281
machine-learning/export/ann/scripts/ann.cpp
Normal file
@@ -0,0 +1,281 @@
|
||||
#include <fstream>
|
||||
#include <mutex>
|
||||
#include <atomic>
|
||||
|
||||
#include "armnn/IRuntime.hpp"
|
||||
#include "armnn/INetwork.hpp"
|
||||
#include "armnn/Types.hpp"
|
||||
#include "armnnDeserializer/IDeserializer.hpp"
|
||||
#include "armnnTfLiteParser/ITfLiteParser.hpp"
|
||||
#include "armnnOnnxParser/IOnnxParser.hpp"
|
||||
|
||||
using namespace armnn;
|
||||
|
||||
struct IOInfos
|
||||
{
|
||||
std::vector<BindingPointInfo> inputInfos;
|
||||
std::vector<BindingPointInfo> outputInfos;
|
||||
};
|
||||
|
||||
// from https://rigtorp.se/spinlock/
|
||||
struct SpinLock
|
||||
{
|
||||
std::atomic<bool> lock_ = {false};
|
||||
|
||||
void lock()
|
||||
{
|
||||
for (;;)
|
||||
{
|
||||
if (!lock_.exchange(true, std::memory_order_acquire))
|
||||
{
|
||||
break;
|
||||
}
|
||||
while (lock_.load(std::memory_order_relaxed))
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
void unlock() { lock_.store(false, std::memory_order_release); }
|
||||
};
|
||||
|
||||
class Ann
|
||||
{
|
||||
|
||||
public:
|
||||
int load(const char *modelPath,
|
||||
bool fastMath,
|
||||
bool fp16,
|
||||
bool saveCachedNetwork,
|
||||
const char *cachedNetworkPath)
|
||||
{
|
||||
INetworkPtr network = loadModel(modelPath);
|
||||
IOptimizedNetworkPtr optNet = OptimizeNetwork(network.get(), fastMath, fp16, saveCachedNetwork, cachedNetworkPath);
|
||||
const IOInfos infos = getIOInfos(optNet.get());
|
||||
NetworkId netId;
|
||||
mutex.lock();
|
||||
Status status = runtime->LoadNetwork(netId, std::move(optNet));
|
||||
mutex.unlock();
|
||||
if (status != Status::Success)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
spinLock.lock();
|
||||
ioInfos[netId] = infos;
|
||||
mutexes.emplace(netId, std::make_unique<std::mutex>());
|
||||
spinLock.unlock();
|
||||
return netId;
|
||||
}
|
||||
|
||||
void execute(NetworkId netId, const void **inputData, void **outputData)
|
||||
{
|
||||
spinLock.lock();
|
||||
const IOInfos *infos = &ioInfos[netId];
|
||||
auto m = mutexes[netId].get();
|
||||
spinLock.unlock();
|
||||
InputTensors inputTensors;
|
||||
inputTensors.reserve(infos->inputInfos.size());
|
||||
size_t i = 0;
|
||||
for (const BindingPointInfo &info : infos->inputInfos)
|
||||
inputTensors.emplace_back(info.first, ConstTensor(info.second, inputData[i++]));
|
||||
OutputTensors outputTensors;
|
||||
outputTensors.reserve(infos->outputInfos.size());
|
||||
i = 0;
|
||||
for (const BindingPointInfo &info : infos->outputInfos)
|
||||
outputTensors.emplace_back(info.first, Tensor(info.second, outputData[i++]));
|
||||
m->lock();
|
||||
runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
|
||||
m->unlock();
|
||||
}
|
||||
|
||||
void unload(NetworkId netId)
|
||||
{
|
||||
mutex.lock();
|
||||
runtime->UnloadNetwork(netId);
|
||||
mutex.unlock();
|
||||
}
|
||||
|
||||
int tensors(NetworkId netId, bool isInput = false)
|
||||
{
|
||||
spinLock.lock();
|
||||
const IOInfos *infos = &ioInfos[netId];
|
||||
spinLock.unlock();
|
||||
return (int)(isInput ? infos->inputInfos.size() : infos->outputInfos.size());
|
||||
}
|
||||
|
||||
unsigned long shape(NetworkId netId, bool isInput = false, int index = 0)
|
||||
{
|
||||
spinLock.lock();
|
||||
const IOInfos *infos = &ioInfos[netId];
|
||||
spinLock.unlock();
|
||||
const TensorShape shape = (isInput ? infos->inputInfos : infos->outputInfos)[index].second.GetShape();
|
||||
unsigned long s = 0;
|
||||
for (unsigned int d = 0; d < shape.GetNumDimensions(); d++)
|
||||
s |= ((unsigned long)shape[d]) << (d * 16); // stores up to 4 16-bit values in a 64-bit value
|
||||
return s;
|
||||
}
|
||||
|
||||
Ann(int tuningLevel, const char *tuningFile)
|
||||
{
|
||||
IRuntime::CreationOptions runtimeOptions;
|
||||
BackendOptions backendOptions{"GpuAcc",
|
||||
{
|
||||
{"TuningLevel", tuningLevel},
|
||||
{"MemoryOptimizerStrategy", "ConstantMemoryStrategy"}, // SingleAxisPriorityList or ConstantMemoryStrategy
|
||||
}};
|
||||
if (tuningFile)
|
||||
backendOptions.AddOption({"TuningFile", tuningFile});
|
||||
runtimeOptions.m_BackendOptions.emplace_back(backendOptions);
|
||||
runtime = IRuntime::CreateRaw(runtimeOptions);
|
||||
};
|
||||
~Ann()
|
||||
{
|
||||
IRuntime::Destroy(runtime);
|
||||
};
|
||||
|
||||
private:
|
||||
INetworkPtr loadModel(const char *modelPath)
|
||||
{
|
||||
const auto path = std::string(modelPath);
|
||||
if (path.rfind(".tflite") == path.length() - 7) // endsWith()
|
||||
{
|
||||
auto parser = armnnTfLiteParser::ITfLiteParser::CreateRaw();
|
||||
return parser->CreateNetworkFromBinaryFile(modelPath);
|
||||
}
|
||||
else if (path.rfind(".onnx") == path.length() - 5) // endsWith()
|
||||
{
|
||||
auto parser = armnnOnnxParser::IOnnxParser::CreateRaw();
|
||||
return parser->CreateNetworkFromBinaryFile(modelPath);
|
||||
}
|
||||
else
|
||||
{
|
||||
std::ifstream ifs(path, std::ifstream::in | std::ifstream::binary);
|
||||
auto parser = armnnDeserializer::IDeserializer::CreateRaw();
|
||||
return parser->CreateNetworkFromBinary(ifs);
|
||||
}
|
||||
}
|
||||
|
||||
static BindingPointInfo getInputTensorInfo(LayerBindingId inputBindingId, TensorInfo info)
|
||||
{
|
||||
const auto newInfo = TensorInfo{info.GetShape(), info.GetDataType(),
|
||||
info.GetQuantizationScale(),
|
||||
info.GetQuantizationOffset(),
|
||||
true};
|
||||
return {inputBindingId, newInfo};
|
||||
}
|
||||
|
||||
IOptimizedNetworkPtr OptimizeNetwork(INetwork *network, bool fastMath, bool fp16, bool saveCachedNetwork, const char *cachedNetworkPath)
|
||||
{
|
||||
const bool allowExpandedDims = false;
|
||||
const ShapeInferenceMethod shapeInferenceMethod = ShapeInferenceMethod::ValidateOnly;
|
||||
|
||||
OptimizerOptionsOpaque options;
|
||||
options.SetReduceFp32ToFp16(fp16);
|
||||
options.SetShapeInferenceMethod(shapeInferenceMethod);
|
||||
options.SetAllowExpandedDims(allowExpandedDims);
|
||||
|
||||
BackendOptions gpuAcc("GpuAcc", {{"FastMathEnabled", fastMath}});
|
||||
if (cachedNetworkPath)
|
||||
{
|
||||
gpuAcc.AddOption({"SaveCachedNetwork", saveCachedNetwork});
|
||||
gpuAcc.AddOption({"CachedNetworkFilePath", cachedNetworkPath});
|
||||
}
|
||||
options.AddModelOption(gpuAcc);
|
||||
|
||||
// No point in using ARMNN for CPU, use ONNX (quantized) instead.
|
||||
// BackendOptions cpuAcc("CpuAcc",
|
||||
// {
|
||||
// {"FastMathEnabled", fastMath},
|
||||
// {"NumberOfThreads", 0},
|
||||
// });
|
||||
// options.AddModelOption(cpuAcc);
|
||||
|
||||
BackendOptions allowExDimOpt("AllowExpandedDims",
|
||||
{{"AllowExpandedDims", allowExpandedDims}});
|
||||
options.AddModelOption(allowExDimOpt);
|
||||
BackendOptions shapeInferOpt("ShapeInferenceMethod",
|
||||
{{"InferAndValidate", shapeInferenceMethod == ShapeInferenceMethod::InferAndValidate}});
|
||||
options.AddModelOption(shapeInferOpt);
|
||||
|
||||
std::vector<BackendId> backends = {
|
||||
BackendId("GpuAcc"),
|
||||
// BackendId("CpuAcc"),
|
||||
// BackendId("CpuRef"),
|
||||
};
|
||||
return Optimize(*network, backends, runtime->GetDeviceSpec(), options);
|
||||
}
|
||||
|
||||
IOInfos getIOInfos(IOptimizedNetwork *optNet)
|
||||
{
|
||||
struct InfoStrategy : IStrategy
|
||||
{
|
||||
void ExecuteStrategy(const IConnectableLayer *layer,
|
||||
const BaseDescriptor &descriptor,
|
||||
const std::vector<ConstTensor> &constants,
|
||||
const char *name,
|
||||
const LayerBindingId id = 0) override
|
||||
{
|
||||
IgnoreUnused(descriptor, constants, id);
|
||||
const LayerType lt = layer->GetType();
|
||||
if (lt == LayerType::Input)
|
||||
ioInfos.inputInfos.push_back(getInputTensorInfo(id, layer->GetOutputSlot(0).GetTensorInfo()));
|
||||
else if (lt == LayerType::Output)
|
||||
ioInfos.outputInfos.push_back({id, layer->GetInputSlot(0).GetTensorInfo()});
|
||||
}
|
||||
IOInfos ioInfos;
|
||||
};
|
||||
|
||||
InfoStrategy infoStrategy;
|
||||
optNet->ExecuteStrategy(infoStrategy);
|
||||
return infoStrategy.ioInfos;
|
||||
}
|
||||
|
||||
IRuntime *runtime;
|
||||
std::map<NetworkId, IOInfos> ioInfos;
|
||||
std::map<NetworkId, std::unique_ptr<std::mutex>> mutexes; // mutex per network to not execute the same the same network concurrently
|
||||
std::mutex mutex; // global mutex for load/unload calls to the runtime
|
||||
SpinLock spinLock; // fast spin lock to guard access to the ioInfos and mutexes maps
|
||||
};
|
||||
|
||||
extern "C" void *init(int logLevel, int tuningLevel, const char *tuningFile)
|
||||
{
|
||||
LogSeverity level = static_cast<LogSeverity>(logLevel);
|
||||
ConfigureLogging(true, true, level);
|
||||
|
||||
Ann *ann = new Ann(tuningLevel, tuningFile);
|
||||
return ann;
|
||||
}
|
||||
|
||||
extern "C" void destroy(void *ann)
|
||||
{
|
||||
delete ((Ann *)ann);
|
||||
}
|
||||
|
||||
extern "C" int load(void *ann,
|
||||
const char *path,
|
||||
bool fastMath,
|
||||
bool fp16,
|
||||
bool saveCachedNetwork,
|
||||
const char *cachedNetworkPath)
|
||||
{
|
||||
return ((Ann *)ann)->load(path, fastMath, fp16, saveCachedNetwork, cachedNetworkPath);
|
||||
}
|
||||
|
||||
extern "C" void unload(void *ann, NetworkId netId)
|
||||
{
|
||||
((Ann *)ann)->unload(netId);
|
||||
}
|
||||
|
||||
extern "C" void execute(void *ann, NetworkId netId, const void **inputData, void **outputData)
|
||||
{
|
||||
((Ann *)ann)->execute(netId, inputData, outputData);
|
||||
}
|
||||
|
||||
extern "C" unsigned long shape(void *ann, NetworkId netId, bool isInput, int index)
|
||||
{
|
||||
return ((Ann *)ann)->shape(netId, isInput, index);
|
||||
}
|
||||
|
||||
extern "C" int tensors(void *ann, NetworkId netId, bool isInput)
|
||||
{
|
||||
return ((Ann *)ann)->tensors(netId, isInput);
|
||||
}
|
||||
4
machine-learning/export/ann/scripts/build-converter.sh
Executable file
4
machine-learning/export/ann/scripts/build-converter.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
cd armnn-23.11/ || exit
|
||||
g++ -o ../armnnconverter -fPIC -O1 -DARMNN_ONNX_PARSER -DARMNN_SERIALIZER -DARMNN_TF_LITE_PARSER -fuse-ld=gold -std=c++17 -Iinclude -Isrc/armnnUtils -Ithird-party -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -larmnnSerializer -L../armnn src/armnnConverter/ArmnnConverter.cpp
|
||||
3
machine-learning/export/ann/scripts/build.sh
Executable file
3
machine-learning/export/ann/scripts/build.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
g++ -shared -O3 -fPIC -o libann.so -fuse-ld=gold -std=c++17 -I"$ARMNN_PATH"/include -larmnn -larmnnDeserializer -larmnnTfLiteParser -larmnnOnnxParser -L"$ARMNN_PATH" ann.cpp
|
||||
0
machine-learning/export/ort/models/__init__.py
Normal file
0
machine-learning/export/ort/models/__init__.py
Normal file
@@ -19,37 +19,44 @@ _MCLIP_TO_OPENCLIP = {
|
||||
}
|
||||
|
||||
|
||||
def forward(self: MultilingualCLIP, input_ids: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:
|
||||
embs = self.transformer(input_ids, attention_mask)[0]
|
||||
embs = (embs * attention_mask.unsqueeze(2)).sum(dim=1) / attention_mask.sum(dim=1)[:, None]
|
||||
embs = self.LinearTransformation(embs)
|
||||
return torch.nn.functional.normalize(embs, dim=-1)
|
||||
|
||||
# unfortunately need to monkeypatch for tracing to work here
|
||||
# otherwise it hits the 2GiB protobuf serialization limit
|
||||
MultilingualCLIP.forward = forward
|
||||
|
||||
|
||||
def to_torchscript(model_name: str) -> torch.jit.ScriptModule:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
model = MultilingualCLIP.from_pretrained(model_name, cache_dir=tmpdir)
|
||||
|
||||
model.eval()
|
||||
for param in model.parameters():
|
||||
param.requires_grad_(False)
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def to_onnx(
|
||||
model_name: str,
|
||||
output_dir_visual: Path | str,
|
||||
output_dir_textual: Path | str,
|
||||
) -> None:
|
||||
textual_path = get_model_path(output_dir_textual)
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
model = MultilingualCLIP.from_pretrained(model_name, cache_dir=tmpdir)
|
||||
AutoTokenizer.from_pretrained(model_name).save_pretrained(output_dir_textual)
|
||||
model = to_torchscript(model_name)
|
||||
AutoTokenizer.from_pretrained(model_name).save_pretrained(output_dir_textual)
|
||||
|
||||
for param in model.parameters():
|
||||
param.requires_grad_(False)
|
||||
|
||||
export_text_encoder(model, textual_path)
|
||||
openclip_to_onnx(_MCLIP_TO_OPENCLIP[model_name], output_dir_visual)
|
||||
optimize(textual_path)
|
||||
_text_encoder_to_onnx(model, textual_path)
|
||||
openclip_to_onnx(_MCLIP_TO_OPENCLIP[model_name], output_dir_visual)
|
||||
optimize(textual_path)
|
||||
|
||||
|
||||
def export_text_encoder(model: MultilingualCLIP, output_path: Path | str) -> None:
|
||||
def _text_encoder_to_onnx(model: MultilingualCLIP, output_path: Path | str) -> None:
|
||||
output_path = Path(output_path)
|
||||
|
||||
def forward(self: MultilingualCLIP, input_ids: torch.Tensor, attention_mask: torch.Tensor) -> torch.Tensor:
|
||||
embs = self.transformer(input_ids, attention_mask)[0]
|
||||
embs = (embs * attention_mask.unsqueeze(2)).sum(dim=1) / attention_mask.sum(dim=1)[:, None]
|
||||
embs = self.LinearTransformation(embs)
|
||||
return torch.nn.functional.normalize(embs, dim=-1)
|
||||
|
||||
# unfortunately need to monkeypatch for tracing to work here
|
||||
# otherwise it hits the 2GiB protobuf serialization limit
|
||||
MultilingualCLIP.forward = forward
|
||||
|
||||
args = (torch.ones(1, 77, dtype=torch.int32), torch.ones(1, 77, dtype=torch.int32))
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", UserWarning)
|
||||
@@ -26,6 +26,17 @@ class OpenCLIPModelConfig:
|
||||
self.sequence_length = open_clip_cfg["text_cfg"]["context_length"]
|
||||
|
||||
|
||||
def to_torchscript(model_name: str) -> torch.jit.ScriptModule:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
model = MultilingualCLIP.from_pretrained(model_name, cache_dir=tmpdir)
|
||||
|
||||
model.eval()
|
||||
for param in model.parameters():
|
||||
param.requires_grad_(False)
|
||||
|
||||
return model
|
||||
|
||||
|
||||
def to_onnx(
|
||||
model_cfg: OpenCLIPModelConfig,
|
||||
output_dir_visual: Path | str | None = None,
|
||||
@@ -51,7 +62,7 @@ def to_onnx(
|
||||
|
||||
save_config(open_clip.get_model_preprocess_cfg(model), output_dir_visual / "preprocess_cfg.json")
|
||||
save_config(text_vision_cfg, output_dir_visual.parent / "config.json")
|
||||
export_image_encoder(model, model_cfg, visual_path)
|
||||
_image_encoder_to_onnx(model, model_cfg, visual_path)
|
||||
|
||||
optimize(visual_path)
|
||||
|
||||
@@ -61,11 +72,11 @@ def to_onnx(
|
||||
|
||||
tokenizer_name = text_vision_cfg["text_cfg"].get("hf_tokenizer_name", "openai/clip-vit-base-patch32")
|
||||
AutoTokenizer.from_pretrained(tokenizer_name).save_pretrained(output_dir_textual)
|
||||
export_text_encoder(model, model_cfg, textual_path)
|
||||
_text_encoder_to_onnx(model, model_cfg, textual_path)
|
||||
optimize(textual_path)
|
||||
|
||||
|
||||
def export_image_encoder(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
def _image_encoder_to_onnx(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
output_path = Path(output_path)
|
||||
|
||||
def encode_image(image: torch.Tensor) -> torch.Tensor:
|
||||
@@ -89,7 +100,7 @@ def export_image_encoder(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig,
|
||||
)
|
||||
|
||||
|
||||
def export_text_encoder(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
def _text_encoder_to_onnx(model: open_clip.CLIP, model_cfg: OpenCLIPModelConfig, output_path: Path | str) -> None:
|
||||
output_path = Path(output_path)
|
||||
|
||||
def encode_text(text: torch.Tensor) -> torch.Tensor:
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.107.0"
|
||||
version = "1.108.0"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="app.alextran.immich"
|
||||
xmlns:tools="http://schemas.android.com/tools">
|
||||
<application android:label="Immich" android:name=".ImmichApp" android:usesCleartextTraffic="true"
|
||||
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true">
|
||||
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true" android:largeHeap="true">
|
||||
|
||||
<meta-data
|
||||
android:name="io.flutter.embedding.android.EnableImpeller"
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 145,
|
||||
"android.injected.version.name" => "1.107.0",
|
||||
"android.injected.version.code" => 148,
|
||||
"android.injected.version.name" => "1.108.0",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="fastlane.lanes">
|
||||
|
||||
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000381">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="1: bundleRelease" time="52.832426">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="2: upload_to_play_store" time="27.616558">
|
||||
|
||||
</testcase>
|
||||
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -17,9 +17,9 @@ PODS:
|
||||
- fluttertoast (0.0.2):
|
||||
- Flutter
|
||||
- Toast
|
||||
- FMDB (2.7.11):
|
||||
- FMDB/standard (= 2.7.11)
|
||||
- FMDB/standard (2.7.11)
|
||||
- FMDB (2.7.5):
|
||||
- FMDB/standard (= 2.7.5)
|
||||
- FMDB/standard (2.7.5)
|
||||
- geolocator_apple (1.2.0):
|
||||
- Flutter
|
||||
- image_picker_ios (0.0.1):
|
||||
@@ -28,10 +28,10 @@ PODS:
|
||||
- Flutter
|
||||
- isar_flutter_libs (1.0.0):
|
||||
- Flutter
|
||||
- MapLibre (6.5.0)
|
||||
- MapLibre (5.14.0-pre3)
|
||||
- maplibre_gl (0.0.1):
|
||||
- Flutter
|
||||
- MapLibre (= 6.5.0)
|
||||
- MapLibre (= 5.14.0-pre3)
|
||||
- package_info_plus (0.4.5):
|
||||
- Flutter
|
||||
- path_provider_foundation (0.0.1):
|
||||
@@ -44,7 +44,7 @@ PODS:
|
||||
- photo_manager (2.0.0):
|
||||
- Flutter
|
||||
- FlutterMacOS
|
||||
- ReachabilitySwift (5.2.3)
|
||||
- ReachabilitySwift (5.0.0)
|
||||
- SAMKeychain (1.5.3)
|
||||
- share_plus (0.0.1):
|
||||
- Flutter
|
||||
@@ -54,7 +54,7 @@ PODS:
|
||||
- sqflite (0.0.3):
|
||||
- Flutter
|
||||
- FMDB (>= 2.7.5)
|
||||
- Toast (4.1.1)
|
||||
- Toast (4.0.0)
|
||||
- url_launcher_ios (0.0.1):
|
||||
- Flutter
|
||||
- video_player_avfoundation (0.0.1):
|
||||
@@ -156,24 +156,24 @@ SPEC CHECKSUMS:
|
||||
flutter_udid: a2482c67a61b9c806ef59dd82ed8d007f1b7ac04
|
||||
flutter_web_auth: c25208760459cec375a3c39f6a8759165ca0fa4d
|
||||
fluttertoast: 31b00dabfa7fb7bacd9e7dbee580d7a2ff4bf265
|
||||
FMDB: 57486c1117fd8e0e6b947b2f54c3f42bf8e57a4e
|
||||
FMDB: 2ce00b547f966261cd18927a3ddb07cb6f3db82a
|
||||
geolocator_apple: 9157311f654584b9bb72686c55fc02a97b73f461
|
||||
image_picker_ios: 4a8aadfbb6dc30ad5141a2ce3832af9214a705b5
|
||||
integration_test: ce0a3ffa1de96d1a89ca0ac26fca7ea18a749ef4
|
||||
isar_flutter_libs: b69f437aeab9c521821c3f376198c4371fa21073
|
||||
MapLibre: 0ebfa9329d313cec8bf0a5ba5a336a1dc903785e
|
||||
maplibre_gl: 943a491fffb3337c1b9de57fd377206d18c16e34
|
||||
MapLibre: 620fc933c1d6029b33738c905c1490d024e5d4ef
|
||||
maplibre_gl: a2efec727dd340e4c65e26d2b03b584f14881fd9
|
||||
package_info_plus: 115f4ad11e0698c8c1c5d8a689390df880f47e85
|
||||
path_provider_foundation: 29f094ae23ebbca9d3d0cec13889cd9060c0e943
|
||||
path_provider_ios: 14f3d2fd28c4fdb42f44e0f751d12861c43cee02
|
||||
permission_handler_apple: e76247795d700c14ea09e3a2d8855d41ee80a2e6
|
||||
photo_manager: 4f6810b7dfc4feb03b461ac1a70dacf91fba7604
|
||||
ReachabilitySwift: 7f151ff156cea1481a8411701195ac6a984f4979
|
||||
photo_manager: ff695c7a1dd5bc379974953a2b5c0a293f7c4c8a
|
||||
ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825
|
||||
SAMKeychain: 483e1c9f32984d50ca961e26818a534283b4cd5c
|
||||
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
|
||||
shared_preferences_foundation: 5b919d13b803cadd15ed2dc053125c68730e5126
|
||||
sqflite: 31f7eba61e3074736dff8807a9b41581e4f7f15a
|
||||
Toast: 1f5ea13423a1e6674c4abdac5be53587ae481c4e
|
||||
Toast: 91b396c56ee72a5790816f40d3a94dd357abc196
|
||||
url_launcher_ios: bbd758c6e7f9fd7b5b1d4cde34d2b95fcce5e812
|
||||
video_player_avfoundation: 02011213dab73ae3687df27ce441fbbcc82b5579
|
||||
wakelock_plus: 8b09852c8876491e4b6d179e17dfe2a0b5f60d47
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
886774DBDDE6B35BF2B4F2CD /* Pods_Runner.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Runner.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
9740EEB21CF90195004384FC /* Debug.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Debug.xcconfig; path = Flutter/Debug.xcconfig; sourceTree = "<group>"; };
|
||||
9740EEB31CF90195004384FC /* Generated.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; name = Generated.xcconfig; path = Flutter/Generated.xcconfig; sourceTree = "<group>"; };
|
||||
97C146EE1CF9000F007C117D /* Runner.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Runner.app; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
97C146EE1CF9000F007C117D /* Immich-Debug.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Immich-Debug.app"; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
97C146FB1CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
|
||||
97C146FD1CF9000F007C117D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
|
||||
97C147001CF9000F007C117D /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
|
||||
@@ -118,7 +118,7 @@
|
||||
97C146EF1CF9000F007C117D /* Products */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
97C146EE1CF9000F007C117D /* Runner.app */,
|
||||
97C146EE1CF9000F007C117D /* Immich-Debug.app */,
|
||||
);
|
||||
name = Products;
|
||||
sourceTree = "<group>";
|
||||
@@ -162,7 +162,7 @@
|
||||
);
|
||||
name = Runner;
|
||||
productName = Runner;
|
||||
productReference = 97C146EE1CF9000F007C117D /* Runner.app */;
|
||||
productReference = 97C146EE1CF9000F007C117D /* Immich-Debug.app */;
|
||||
productType = "com.apple.product-type.application";
|
||||
};
|
||||
/* End PBXNativeTarget section */
|
||||
@@ -265,7 +265,7 @@
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
shellPath = /bin/sh;
|
||||
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
|
||||
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build\n";
|
||||
};
|
||||
D218A34AEE62BC1EF119F5B0 /* [CP] Embed Pods Frameworks */ = {
|
||||
isa = PBXShellScriptBuildPhase;
|
||||
@@ -383,7 +383,7 @@
|
||||
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 160;
|
||||
CURRENT_PROJECT_VERSION = 163;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -392,9 +392,9 @@
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = "$(FLUTTER_BUILD_NAME)";
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
MARKETING_VERSION = 1.107.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.profile;
|
||||
PRODUCT_NAME = "Immich-Profile";
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
|
||||
SWIFT_VERSION = 5.0;
|
||||
@@ -525,7 +525,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 160;
|
||||
CURRENT_PROJECT_VERSION = 163;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -534,9 +534,9 @@
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = "$(FLUTTER_BUILD_NAME)";
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
MARKETING_VERSION = 1.107.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich.debug;
|
||||
PRODUCT_NAME = "Immich-Debug";
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
|
||||
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
||||
@@ -553,7 +553,7 @@
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 160;
|
||||
CURRENT_PROJECT_VERSION = 163;
|
||||
DEVELOPMENT_TEAM = 2F67MQ8R79;
|
||||
ENABLE_BITCODE = NO;
|
||||
INFOPLIST_FILE = Runner/Info.plist;
|
||||
@@ -562,9 +562,9 @@
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = "$(FLUTTER_BUILD_NAME)";
|
||||
MARKETING_VERSION = 1.107.0;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = app.alextran.immich;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
PRODUCT_NAME = Immich;
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
|
||||
SWIFT_VERSION = 5.0;
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
|
||||
BuildableName = "Runner.app"
|
||||
BuildableName = "Immich.app"
|
||||
BlueprintName = "Runner"
|
||||
ReferencedContainer = "container:Runner.xcodeproj">
|
||||
</BuildableReference>
|
||||
@@ -31,7 +31,7 @@
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
|
||||
BuildableName = "Runner.app"
|
||||
BuildableName = "Immich.app"
|
||||
BlueprintName = "Runner"
|
||||
ReferencedContainer = "container:Runner.xcodeproj">
|
||||
</BuildableReference>
|
||||
@@ -56,7 +56,7 @@
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
|
||||
BuildableName = "Runner.app"
|
||||
BuildableName = "Immich.app"
|
||||
BlueprintName = "Runner"
|
||||
ReferencedContainer = "container:Runner.xcodeproj">
|
||||
</BuildableReference>
|
||||
@@ -73,7 +73,7 @@
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "97C146ED1CF9000F007C117D"
|
||||
BuildableName = "Runner.app"
|
||||
BuildableName = "Immich.app"
|
||||
BlueprintName = "Runner"
|
||||
ReferencedContainer = "container:Runner.xcodeproj">
|
||||
</BuildableReference>
|
||||
|
||||
@@ -1,124 +1,124 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>BGTaskSchedulerPermittedIdentifiers</key>
|
||||
<array>
|
||||
<string>app.alextran.immich.backgroundFetch</string>
|
||||
<string>app.alextran.immich.backgroundProcessing</string>
|
||||
</array>
|
||||
<key>CADisableMinimumFrameDurationOnPhone</key>
|
||||
<true />
|
||||
<key>CFBundleDevelopmentRegion</key>
|
||||
<string>$(DEVELOPMENT_LANGUAGE)</string>
|
||||
<key>CFBundleDisplayName</key>
|
||||
<string>Immich</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>$(EXECUTABLE_NAME)</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
|
||||
<key>CFBundleInfoDictionaryVersion</key>
|
||||
<string>6.0</string>
|
||||
<key>CFBundleLocalizations</key>
|
||||
<array>
|
||||
<string>en</string>
|
||||
<string>ar</string>
|
||||
<string>ca</string>
|
||||
<string>cs</string>
|
||||
<string>da</string>
|
||||
<string>de</string>
|
||||
<string>es</string>
|
||||
<string>fi</string>
|
||||
<string>fr</string>
|
||||
<string>he</string>
|
||||
<string>hi</string>
|
||||
<string>hu</string>
|
||||
<string>it</string>
|
||||
<string>ja</string>
|
||||
<string>ko</string>
|
||||
<string>lv</string>
|
||||
<string>mn</string>
|
||||
<string>nb</string>
|
||||
<string>nl</string>
|
||||
<string>pl</string>
|
||||
<string>pt</string>
|
||||
<string>ro</string>
|
||||
<string>ru</string>
|
||||
<string>sk</string>
|
||||
<string>sl</string>
|
||||
<string>sr</string>
|
||||
<string>sv</string>
|
||||
<string>th</string>
|
||||
<string>uk</string>
|
||||
<string>vi</string>
|
||||
<string>zh</string>
|
||||
</array>
|
||||
<key>CFBundleName</key>
|
||||
<string>immich_mobile</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.106.3</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>160</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true />
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
<false />
|
||||
<key>LSApplicationQueriesSchemes</key>
|
||||
<array>
|
||||
<string>https</string>
|
||||
</array>
|
||||
<key>LSRequiresIPhoneOS</key>
|
||||
<true />
|
||||
<key>MGLMapboxMetricsEnabledSettingShownInApp</key>
|
||||
<true />
|
||||
<key>NSAppTransportSecurity</key>
|
||||
<dict>
|
||||
<key>NSAllowsArbitraryLoads</key>
|
||||
<true />
|
||||
</dict>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>We need to access the camera to let you take beautiful video using this app</string>
|
||||
<key>NSLocationWhenInUseUsageDescription</key>
|
||||
<string>Enable location setting to show position of assets on map</string>
|
||||
<key>NSMicrophoneUsageDescription</key>
|
||||
<string>We need to access the microphone to let you take beautiful video using this app</string>
|
||||
<key>NSPhotoLibraryAddUsageDescription</key>
|
||||
<string>We need to manage backup your photos album</string>
|
||||
<key>NSPhotoLibraryUsageDescription</key>
|
||||
<string>We need to manage backup your photos album</string>
|
||||
<key>UIApplicationSupportsIndirectInputEvents</key>
|
||||
<true />
|
||||
<key>UIBackgroundModes</key>
|
||||
<array>
|
||||
<string>fetch</string>
|
||||
<string>processing</string>
|
||||
</array>
|
||||
<key>UILaunchStoryboardName</key>
|
||||
<string>LaunchScreen</string>
|
||||
<key>UIMainStoryboardFile</key>
|
||||
<string>Main</string>
|
||||
<key>UIStatusBarHidden</key>
|
||||
<false />
|
||||
<key>UISupportedInterfaceOrientations</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
<key>UISupportedInterfaceOrientations~ipad</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationPortraitUpsideDown</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
<key>UIViewControllerBasedStatusBarAppearance</key>
|
||||
<true />
|
||||
<key>io.flutter.embedded_views_preview</key>
|
||||
<true />
|
||||
</dict>
|
||||
</plist>
|
||||
<dict>
|
||||
<key>BGTaskSchedulerPermittedIdentifiers</key>
|
||||
<array>
|
||||
<string>app.alextran.immich.backgroundFetch</string>
|
||||
<string>app.alextran.immich.backgroundProcessing</string>
|
||||
</array>
|
||||
<key>CADisableMinimumFrameDurationOnPhone</key>
|
||||
<true/>
|
||||
<key>CFBundleDevelopmentRegion</key>
|
||||
<string>$(DEVELOPMENT_LANGUAGE)</string>
|
||||
<key>CFBundleDisplayName</key>
|
||||
<string>${PRODUCT_NAME}</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>$(EXECUTABLE_NAME)</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
|
||||
<key>CFBundleInfoDictionaryVersion</key>
|
||||
<string>6.0</string>
|
||||
<key>CFBundleLocalizations</key>
|
||||
<array>
|
||||
<string>en</string>
|
||||
<string>ar</string>
|
||||
<string>ca</string>
|
||||
<string>cs</string>
|
||||
<string>da</string>
|
||||
<string>de</string>
|
||||
<string>es</string>
|
||||
<string>fi</string>
|
||||
<string>fr</string>
|
||||
<string>he</string>
|
||||
<string>hi</string>
|
||||
<string>hu</string>
|
||||
<string>it</string>
|
||||
<string>ja</string>
|
||||
<string>ko</string>
|
||||
<string>lv</string>
|
||||
<string>mn</string>
|
||||
<string>nb</string>
|
||||
<string>nl</string>
|
||||
<string>pl</string>
|
||||
<string>pt</string>
|
||||
<string>ro</string>
|
||||
<string>ru</string>
|
||||
<string>sk</string>
|
||||
<string>sl</string>
|
||||
<string>sr</string>
|
||||
<string>sv</string>
|
||||
<string>th</string>
|
||||
<string>uk</string>
|
||||
<string>vi</string>
|
||||
<string>zh</string>
|
||||
</array>
|
||||
<key>CFBundleName</key>
|
||||
<string>immich_mobile</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.108.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>163</string>
|
||||
<key>FLTEnableImpeller</key>
|
||||
<true/>
|
||||
<key>ITSAppUsesNonExemptEncryption</key>
|
||||
<false/>
|
||||
<key>LSApplicationQueriesSchemes</key>
|
||||
<array>
|
||||
<string>https</string>
|
||||
</array>
|
||||
<key>LSRequiresIPhoneOS</key>
|
||||
<true/>
|
||||
<key>MGLMapboxMetricsEnabledSettingShownInApp</key>
|
||||
<true/>
|
||||
<key>NSAppTransportSecurity</key>
|
||||
<dict>
|
||||
<key>NSAllowsArbitraryLoads</key>
|
||||
<true/>
|
||||
</dict>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>We need to access the camera to let you take beautiful video using this app</string>
|
||||
<key>NSLocationWhenInUseUsageDescription</key>
|
||||
<string>Enable location setting to show position of assets on map</string>
|
||||
<key>NSMicrophoneUsageDescription</key>
|
||||
<string>We need to access the microphone to let you take beautiful video using this app</string>
|
||||
<key>NSPhotoLibraryAddUsageDescription</key>
|
||||
<string>We need to manage backup your photos album</string>
|
||||
<key>NSPhotoLibraryUsageDescription</key>
|
||||
<string>We need to manage backup your photos album</string>
|
||||
<key>UIApplicationSupportsIndirectInputEvents</key>
|
||||
<true/>
|
||||
<key>UIBackgroundModes</key>
|
||||
<array>
|
||||
<string>fetch</string>
|
||||
<string>processing</string>
|
||||
</array>
|
||||
<key>UILaunchStoryboardName</key>
|
||||
<string>LaunchScreen</string>
|
||||
<key>UIMainStoryboardFile</key>
|
||||
<string>Main</string>
|
||||
<key>UIStatusBarHidden</key>
|
||||
<false/>
|
||||
<key>UISupportedInterfaceOrientations</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
<key>UISupportedInterfaceOrientations~ipad</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationPortraitUpsideDown</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
<key>UIViewControllerBasedStatusBarAppearance</key>
|
||||
<true/>
|
||||
<key>io.flutter.embedded_views_preview</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
@@ -16,10 +16,10 @@
|
||||
default_platform(:ios)
|
||||
|
||||
platform :ios do
|
||||
desc "iOS Beta"
|
||||
lane :beta do
|
||||
desc "iOS Release"
|
||||
lane :release do
|
||||
increment_version_number(
|
||||
version_number: "1.107.0"
|
||||
version_number: "1.108.0"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<testsuites>
|
||||
<testsuite name="fastlane.lanes">
|
||||
|
||||
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="0: default_platform" time="0.000491">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="1: increment_version_number" time="39.414297">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="2: latest_testflight_build_number" time="32.521647">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="3: increment_build_number" time="0.511733">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="4: build_app" time="202.628277">
|
||||
|
||||
</testcase>
|
||||
|
||||
|
||||
<testcase classname="fastlane.lanes" name="5: upload_to_testflight" time="73.861852">
|
||||
|
||||
</testcase>
|
||||
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
@@ -6,7 +6,7 @@ import 'package:immich_mobile/models/map/map_marker.model.dart';
|
||||
import 'package:immich_mobile/utils/map_utils.dart';
|
||||
import 'package:maplibre_gl/maplibre_gl.dart';
|
||||
|
||||
extension MapMarkers on MapLibreMapController {
|
||||
extension MapMarkers on MaplibreMapController {
|
||||
static var _completer = Completer()..complete();
|
||||
|
||||
Future<void> addGeoJSONSourceForMarkers(List<MapMarker> markers) async {
|
||||
|
||||
@@ -192,7 +192,7 @@ class ImmichAppState extends ConsumerState<ImmichApp>
|
||||
localizationsDelegates: context.localizationDelegates,
|
||||
supportedLocales: context.supportedLocales,
|
||||
locale: context.locale,
|
||||
debugShowCheckedModeBanner: false,
|
||||
debugShowCheckedModeBanner: true,
|
||||
home: MaterialApp.router(
|
||||
title: 'Immich',
|
||||
debugShowCheckedModeBanner: false,
|
||||
|
||||
@@ -166,7 +166,7 @@ class HeaderKeyValueSettings extends StatelessWidget {
|
||||
child: TextFormField(
|
||||
controller: valueController,
|
||||
decoration: InputDecoration(
|
||||
labelText: 'header_settings_header_name_input'.tr(),
|
||||
labelText: 'header_settings_header_value_input'.tr(),
|
||||
border: const OutlineInputBorder(),
|
||||
),
|
||||
autocorrect: false,
|
||||
|
||||
@@ -36,7 +36,7 @@ class MapPage extends HookConsumerWidget {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final mapController = useRef<MapLibreMapController?>(null);
|
||||
final mapController = useRef<MaplibreMapController?>(null);
|
||||
final markers = useRef<List<MapMarker>>([]);
|
||||
final markersInBounds = useRef<List<MapMarker>>([]);
|
||||
final bottomSheetStreamController = useStreamController<MapEvent>();
|
||||
@@ -156,7 +156,7 @@ class MapPage extends HookConsumerWidget {
|
||||
}
|
||||
}
|
||||
|
||||
void onMapCreated(MapLibreMapController controller) async {
|
||||
void onMapCreated(MaplibreMapController controller) async {
|
||||
mapController.value = controller;
|
||||
controller.addListener(() {
|
||||
if (controller.isCameraMoving && selectedMarker.value != null) {
|
||||
@@ -379,7 +379,7 @@ class _MapWithMarker extends StatelessWidget {
|
||||
child: Stack(
|
||||
children: [
|
||||
style.widgetWhen(
|
||||
onData: (style) => MapLibreMap(
|
||||
onData: (style) => MaplibreMap(
|
||||
initialCameraPosition:
|
||||
const CameraPosition(target: LatLng(0, 0)),
|
||||
styleString: style,
|
||||
@@ -393,7 +393,7 @@ class _MapWithMarker extends StatelessWidget {
|
||||
tiltGesturesEnabled: false,
|
||||
dragEnabled: false,
|
||||
myLocationEnabled: false,
|
||||
attributionButtonPosition: AttributionButtonPosition.topRight,
|
||||
attributionButtonPosition: AttributionButtonPosition.TopRight,
|
||||
rotateGesturesEnabled: false,
|
||||
),
|
||||
),
|
||||
|
||||
@@ -24,7 +24,7 @@ class MapLocationPickerPage extends HookConsumerWidget {
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final selectedLatLng = useValueNotifier<LatLng>(initialLatLng);
|
||||
final controller = useRef<MapLibreMapController?>(null);
|
||||
final controller = useRef<MaplibreMapController?>(null);
|
||||
final marker = useRef<Symbol?>(null);
|
||||
|
||||
Future<void> onStyleLoaded() async {
|
||||
@@ -74,7 +74,7 @@ class MapLocationPickerPage extends HookConsumerWidget {
|
||||
bottomRight: Radius.circular(40),
|
||||
),
|
||||
),
|
||||
child: MapLibreMap(
|
||||
child: MaplibreMap(
|
||||
initialCameraPosition:
|
||||
CameraPosition(target: initialLatLng, zoom: 12),
|
||||
styleString: style,
|
||||
|
||||
@@ -199,9 +199,12 @@ class SearchInputPage extends HookConsumerWidget {
|
||||
padding: EdgeInsets.only(
|
||||
bottom: MediaQuery.of(context).viewInsets.bottom,
|
||||
),
|
||||
child: LocationPicker(
|
||||
onSelected: handleOnSelect,
|
||||
filter: filter.value.location,
|
||||
child: Padding(
|
||||
padding: const EdgeInsets.symmetric(horizontal: 16.0),
|
||||
child: LocationPicker(
|
||||
onSelected: handleOnSelect,
|
||||
filter: filter.value.location,
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
@@ -242,7 +245,7 @@ class SearchInputPage extends HookConsumerWidget {
|
||||
onSearch: search,
|
||||
onClear: handleClear,
|
||||
child: Padding(
|
||||
padding: const EdgeInsets.symmetric(vertical: 16.0),
|
||||
padding: const EdgeInsets.all(16.0),
|
||||
child: CameraPicker(
|
||||
onSelect: handleOnSelect,
|
||||
filter: filter.value.camera,
|
||||
|
||||
@@ -41,7 +41,7 @@ final getAllPlacesProvider =
|
||||
final curatedContent = assetPlaces
|
||||
.map(
|
||||
(data) => SearchCuratedContent(
|
||||
label: data.exifInfo?.city ?? '',
|
||||
label: data.exifInfo!.city!,
|
||||
id: data.id,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import 'dart:async';
|
||||
import 'dart:collection';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:collection/collection.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:hooks_riverpod/hooks_riverpod.dart';
|
||||
import 'package:immich_mobile/models/albums/album_add_asset_response.model.dart';
|
||||
import 'package:immich_mobile/entities/backup_album.entity.dart';
|
||||
import 'package:immich_mobile/services/backup.service.dart';
|
||||
import 'package:immich_mobile/entities/album.entity.dart';
|
||||
import 'package:immich_mobile/entities/asset.entity.dart';
|
||||
import 'package:immich_mobile/entities/store.entity.dart';
|
||||
@@ -23,6 +28,7 @@ final albumServiceProvider = Provider(
|
||||
ref.watch(userServiceProvider),
|
||||
ref.watch(syncServiceProvider),
|
||||
ref.watch(dbProvider),
|
||||
ref.watch(backupServiceProvider),
|
||||
),
|
||||
);
|
||||
|
||||
@@ -31,6 +37,7 @@ class AlbumService {
|
||||
final UserService _userService;
|
||||
final SyncService _syncService;
|
||||
final Isar _db;
|
||||
final BackupService _backupService;
|
||||
final Logger _log = Logger('AlbumService');
|
||||
Completer<bool> _localCompleter = Completer()..complete(false);
|
||||
Completer<bool> _remoteCompleter = Completer()..complete(false);
|
||||
@@ -40,6 +47,7 @@ class AlbumService {
|
||||
this._userService,
|
||||
this._syncService,
|
||||
this._db,
|
||||
this._backupService,
|
||||
);
|
||||
|
||||
/// Checks all selected device albums for changes of albums and their assets
|
||||
@@ -54,14 +62,60 @@ class AlbumService {
|
||||
final Stopwatch sw = Stopwatch()..start();
|
||||
bool changes = false;
|
||||
try {
|
||||
final List<String> excludedIds =
|
||||
await _backupService.excludedAlbumsQuery().idProperty().findAll();
|
||||
final List<String> selectedIds =
|
||||
await _backupService.selectedAlbumsQuery().idProperty().findAll();
|
||||
if (selectedIds.isEmpty) {
|
||||
final numLocal = await _db.albums.where().localIdIsNotNull().count();
|
||||
if (numLocal > 0) {
|
||||
_syncService.removeAllLocalAlbumsAndAssets();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
final List<AssetPathEntity> onDevice =
|
||||
await PhotoManager.getAssetPathList(
|
||||
hasAll: true,
|
||||
filterOption: FilterOptionGroup(containsPathModified: true),
|
||||
);
|
||||
_log.info("Found ${onDevice.length} device albums");
|
||||
|
||||
changes = await _syncService.syncLocalAlbumAssetsToDb(onDevice);
|
||||
Set<String>? excludedAssets;
|
||||
if (excludedIds.isNotEmpty) {
|
||||
if (Platform.isIOS) {
|
||||
// iOS and Android device album working principle differ significantly
|
||||
// on iOS, an asset can be in multiple albums
|
||||
// on Android, an asset can only be in exactly one album (folder!) at the same time
|
||||
// thus, on Android, excluding an album can be done by ignoring that album
|
||||
// however, on iOS, it it necessary to load the assets from all excluded
|
||||
// albums and check every asset from any selected album against the set
|
||||
// of excluded assets
|
||||
excludedAssets = await _loadExcludedAssetIds(onDevice, excludedIds);
|
||||
_log.info("Found ${excludedAssets.length} assets to exclude");
|
||||
}
|
||||
// remove all excluded albums
|
||||
onDevice.removeWhere((e) => excludedIds.contains(e.id));
|
||||
_log.info(
|
||||
"Ignoring ${excludedIds.length} excluded albums resulting in ${onDevice.length} device albums",
|
||||
);
|
||||
}
|
||||
final hasAll = selectedIds
|
||||
.map((id) => onDevice.firstWhereOrNull((a) => a.id == id))
|
||||
.whereNotNull()
|
||||
.any((a) => a.isAll);
|
||||
if (hasAll) {
|
||||
if (Platform.isAndroid) {
|
||||
// remove the virtual "Recent" album and keep and individual albums
|
||||
// on Android, the virtual "Recent" `lastModified` value is always null
|
||||
onDevice.removeWhere((e) => e.isAll);
|
||||
_log.info("'Recents' is selected, keeping all individual albums");
|
||||
}
|
||||
} else {
|
||||
// keep only the explicitly selected albums
|
||||
onDevice.removeWhere((e) => !selectedIds.contains(e.id));
|
||||
_log.info("'Recents' is not selected, keeping only selected albums");
|
||||
}
|
||||
changes =
|
||||
await _syncService.syncLocalAlbumAssetsToDb(onDevice, excludedAssets);
|
||||
_log.info("Syncing completed. Changes: $changes");
|
||||
} finally {
|
||||
_localCompleter.complete(changes);
|
||||
@@ -70,6 +124,21 @@ class AlbumService {
|
||||
return changes;
|
||||
}
|
||||
|
||||
Future<Set<String>> _loadExcludedAssetIds(
|
||||
List<AssetPathEntity> albums,
|
||||
List<String> excludedAlbumIds,
|
||||
) async {
|
||||
final Set<String> result = HashSet<String>();
|
||||
for (AssetPathEntity a in albums) {
|
||||
if (excludedAlbumIds.contains(a.id)) {
|
||||
final List<AssetEntity> assets =
|
||||
await a.getAssetListRange(start: 0, end: 0x7fffffffffffffff);
|
||||
result.addAll(assets.map((e) => e.id));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// Checks remote albums (owned if `isShared` is false) for changes,
|
||||
/// updates the local database and returns `true` if there were any changes
|
||||
Future<bool> refreshRemoteAlbums({required bool isShared}) async {
|
||||
|
||||
@@ -24,9 +24,13 @@ class HashService {
|
||||
AssetPathEntity album, {
|
||||
int start = 0,
|
||||
int end = 0x7fffffffffffffff,
|
||||
Set<String>? excludedAssets,
|
||||
}) async {
|
||||
final entities = await album.getAssetListRange(start: start, end: end);
|
||||
return _hashAssets(entities);
|
||||
final filtered = excludedAssets == null
|
||||
? entities
|
||||
: entities.where((e) => !excludedAssets.contains(e.id)).toList();
|
||||
return _hashAssets(filtered);
|
||||
}
|
||||
|
||||
/// Converts a list of [AssetEntity]s to [Asset]s including only those
|
||||
|
||||
@@ -14,17 +14,6 @@ final partnerServiceProvider = Provider(
|
||||
),
|
||||
);
|
||||
|
||||
enum PartnerDirection {
|
||||
sharedWith("shared-with"),
|
||||
sharedBy("shared-by");
|
||||
|
||||
const PartnerDirection(
|
||||
this._value,
|
||||
);
|
||||
|
||||
final String _value;
|
||||
}
|
||||
|
||||
class PartnerService {
|
||||
final ApiService _apiService;
|
||||
final Isar _db;
|
||||
@@ -34,8 +23,7 @@ class PartnerService {
|
||||
|
||||
Future<List<User>?> getPartners(PartnerDirection direction) async {
|
||||
try {
|
||||
final userDtos =
|
||||
await _apiService.partnersApi.getPartners(direction._value);
|
||||
final userDtos = await _apiService.partnersApi.getPartners(direction);
|
||||
if (userDtos != null) {
|
||||
return userDtos.map((u) => User.fromPartnerDto(u)).toList();
|
||||
}
|
||||
|
||||
@@ -68,9 +68,10 @@ class SyncService {
|
||||
/// Syncs all device albums and their assets to the database
|
||||
/// Returns `true` if there were any changes
|
||||
Future<bool> syncLocalAlbumAssetsToDb(
|
||||
List<AssetPathEntity> onDevice,
|
||||
) =>
|
||||
_lock.run(() => _syncLocalAlbumAssetsToDb(onDevice));
|
||||
List<AssetPathEntity> onDevice, [
|
||||
Set<String>? excludedAssets,
|
||||
]) =>
|
||||
_lock.run(() => _syncLocalAlbumAssetsToDb(onDevice, excludedAssets));
|
||||
|
||||
/// returns all Asset IDs that are not contained in the existing list
|
||||
List<int> sharedAssetsToRemove(
|
||||
@@ -491,8 +492,9 @@ class SyncService {
|
||||
/// Syncs all device albums and their assets to the database
|
||||
/// Returns `true` if there were any changes
|
||||
Future<bool> _syncLocalAlbumAssetsToDb(
|
||||
List<AssetPathEntity> onDevice,
|
||||
) async {
|
||||
List<AssetPathEntity> onDevice, [
|
||||
Set<String>? excludedAssets,
|
||||
]) async {
|
||||
onDevice.sort((a, b) => a.id.compareTo(b.id));
|
||||
final inDb =
|
||||
await _db.albums.where().localIdIsNotNull().sortByLocalId().findAll();
|
||||
@@ -508,8 +510,10 @@ class SyncService {
|
||||
album,
|
||||
deleteCandidates,
|
||||
existing,
|
||||
excludedAssets,
|
||||
),
|
||||
onlyFirst: (AssetPathEntity ape) => _addAlbumFromDevice(ape, existing),
|
||||
onlyFirst: (AssetPathEntity ape) =>
|
||||
_addAlbumFromDevice(ape, existing, excludedAssets),
|
||||
onlySecond: (Album a) => _removeAlbumFromDb(a, deleteCandidates),
|
||||
);
|
||||
_log.fine(
|
||||
@@ -541,13 +545,16 @@ class SyncService {
|
||||
Album album,
|
||||
List<Asset> deleteCandidates,
|
||||
List<Asset> existing, [
|
||||
Set<String>? excludedAssets,
|
||||
bool forceRefresh = false,
|
||||
]) async {
|
||||
if (!forceRefresh && !await _hasAssetPathEntityChanged(ape, album)) {
|
||||
_log.fine("Local album ${ape.name} has not changed. Skipping sync.");
|
||||
return false;
|
||||
}
|
||||
if (!forceRefresh && await _syncDeviceAlbumFast(ape, album)) {
|
||||
if (!forceRefresh &&
|
||||
excludedAssets == null &&
|
||||
await _syncDeviceAlbumFast(ape, album)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -559,7 +566,8 @@ class SyncService {
|
||||
.findAll();
|
||||
assert(inDb.isSorted(Asset.compareByChecksum), "inDb not sorted!");
|
||||
final int assetCountOnDevice = await ape.assetCountAsync;
|
||||
final List<Asset> onDevice = await _hashService.getHashedAssets(ape);
|
||||
final List<Asset> onDevice =
|
||||
await _hashService.getHashedAssets(ape, excludedAssets: excludedAssets);
|
||||
_removeDuplicates(onDevice);
|
||||
// _removeDuplicates sorts `onDevice` by checksum
|
||||
final (toAdd, toUpdate, toDelete) = _diffAssets(onDevice, inDb);
|
||||
@@ -670,11 +678,13 @@ class SyncService {
|
||||
/// assets already existing in the database to the list of `existing` assets
|
||||
Future<void> _addAlbumFromDevice(
|
||||
AssetPathEntity ape,
|
||||
List<Asset> existing,
|
||||
) async {
|
||||
List<Asset> existing, [
|
||||
Set<String>? excludedAssets,
|
||||
]) async {
|
||||
_log.info("Syncing a new local album to DB: ${ape.name}");
|
||||
final Album a = Album.local(ape);
|
||||
final assets = await _hashService.getHashedAssets(ape);
|
||||
final assets =
|
||||
await _hashService.getHashedAssets(ape, excludedAssets: excludedAssets);
|
||||
_removeDuplicates(assets);
|
||||
final (existingInDb, updated) = await _linkWithExistingFromDb(assets);
|
||||
_log.info(
|
||||
|
||||
@@ -73,9 +73,9 @@ class UserService {
|
||||
Future<List<User>?> getUsersFromServer() async {
|
||||
final List<User>? users = await _getAllUsers();
|
||||
final List<User>? sharedBy =
|
||||
await _partnerService.getPartners(PartnerDirection.sharedBy);
|
||||
await _partnerService.getPartners(PartnerDirection.by);
|
||||
final List<User>? sharedWith =
|
||||
await _partnerService.getPartners(PartnerDirection.sharedWith);
|
||||
await _partnerService.getPartners(PartnerDirection.with_);
|
||||
|
||||
if (users == null || sharedBy == null || sharedWith == null) {
|
||||
_log.warning("Failed to refresh users");
|
||||
|
||||
@@ -194,7 +194,7 @@ class ExifBottomSheet extends HookConsumerWidget {
|
||||
exifInfo.mm != null ||
|
||||
exifInfo.iso != null
|
||||
? Text(
|
||||
"ƒ/${exifInfo.fNumber} ${exifInfo.exposureTime} s ${exifInfo.focalLength} mm ISO ${exifInfo.iso ?? ''} ",
|
||||
"ƒ/${exifInfo.fNumber} ${exifInfo.exposureTime} ${exifInfo.focalLength} mm ISO ${exifInfo.iso ?? ''} ",
|
||||
style: context.textTheme.bodySmall,
|
||||
)
|
||||
: null,
|
||||
|
||||
@@ -49,7 +49,7 @@ class ExifDetail extends StatelessWidget {
|
||||
exifInfo?.mm != null ||
|
||||
exifInfo?.iso != null
|
||||
? Text(
|
||||
"ƒ/${exifInfo?.fNumber} ${exifInfo?.exposureTime} s ${exifInfo?.focalLength} mm ISO ${exifInfo?.iso ?? ''} ",
|
||||
"ƒ/${exifInfo?.fNumber} ${exifInfo?.exposureTime} ${exifInfo?.focalLength} mm ISO ${exifInfo?.iso ?? ''} ",
|
||||
style: context.textTheme.bodySmall,
|
||||
)
|
||||
: null,
|
||||
|
||||
@@ -41,10 +41,10 @@ class MapThumbnail extends HookConsumerWidget {
|
||||
@override
|
||||
Widget build(BuildContext context, WidgetRef ref) {
|
||||
final offsettedCentre = LatLng(centre.latitude + 0.002, centre.longitude);
|
||||
final controller = useRef<MapLibreMapController?>(null);
|
||||
final controller = useRef<MaplibreMapController?>(null);
|
||||
final position = useValueNotifier<Point<num>?>(null);
|
||||
|
||||
Future<void> onMapCreated(MapLibreMapController mapController) async {
|
||||
Future<void> onMapCreated(MaplibreMapController mapController) async {
|
||||
controller.value = mapController;
|
||||
if (assetMarkerRemoteId != null) {
|
||||
// The iOS impl returns wrong toScreenLocation without the delay
|
||||
@@ -73,7 +73,7 @@ class MapThumbnail extends HookConsumerWidget {
|
||||
alignment: Alignment.center,
|
||||
children: [
|
||||
style.widgetWhen(
|
||||
onData: (style) => MapLibreMap(
|
||||
onData: (style) => MaplibreMap(
|
||||
initialCameraPosition:
|
||||
CameraPosition(target: offsettedCentre, zoom: zoom),
|
||||
styleString: style,
|
||||
|
||||
@@ -38,10 +38,7 @@ class FilterBottomSheetScaffold extends StatelessWidget {
|
||||
style: context.textTheme.headlineSmall,
|
||||
),
|
||||
),
|
||||
Padding(
|
||||
padding: const EdgeInsets.symmetric(horizontal: 16),
|
||||
child: buildChildWidget(),
|
||||
),
|
||||
buildChildWidget(),
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(8.0),
|
||||
child: Row(
|
||||
|
||||
3
mobile/openapi/README.md
generated
3
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 1.107.0
|
||||
- API version: 1.108.0
|
||||
- Generator version: 7.5.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
@@ -353,6 +353,7 @@ Class | Method | HTTP request | Description
|
||||
- [OAuthCallbackDto](doc//OAuthCallbackDto.md)
|
||||
- [OAuthConfigDto](doc//OAuthConfigDto.md)
|
||||
- [OnThisDayDto](doc//OnThisDayDto.md)
|
||||
- [PartnerDirection](doc//PartnerDirection.md)
|
||||
- [PartnerResponseDto](doc//PartnerResponseDto.md)
|
||||
- [PathEntityType](doc//PathEntityType.md)
|
||||
- [PathType](doc//PathType.md)
|
||||
|
||||
1
mobile/openapi/lib/api.dart
generated
1
mobile/openapi/lib/api.dart
generated
@@ -166,6 +166,7 @@ part 'model/o_auth_authorize_response_dto.dart';
|
||||
part 'model/o_auth_callback_dto.dart';
|
||||
part 'model/o_auth_config_dto.dart';
|
||||
part 'model/on_this_day_dto.dart';
|
||||
part 'model/partner_direction.dart';
|
||||
part 'model/partner_response_dto.dart';
|
||||
part 'model/path_entity_type.dart';
|
||||
part 'model/path_type.dart';
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user