Compare commits
144 Commits
fix/failed
...
v1.139.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b69470c69e | ||
|
|
34255453b1 | ||
|
|
4e03b06ff7 | ||
|
|
9bb211f56f | ||
|
|
6f4f79d8cc | ||
|
|
ed3997d844 | ||
|
|
fb59fa343d | ||
|
|
ab2849781a | ||
|
|
66c657ca8a | ||
|
|
c245208106 | ||
|
|
99d6673503 | ||
|
|
9ff37b6870 | ||
|
|
0e7816130b | ||
|
|
a1beb0a87d | ||
|
|
c4ac2e345f | ||
|
|
f422b341d1 | ||
|
|
90538d2535 | ||
|
|
abc7bfa0ba | ||
|
|
60a809d7b4 | ||
|
|
cda7249a6a | ||
|
|
47566c1a4a | ||
|
|
f08002d48f | ||
|
|
7186914531 | ||
|
|
d38ab93484 | ||
|
|
845b0f2073 | ||
|
|
acb1e513a7 | ||
|
|
4d4e54967d | ||
|
|
e2dcebfe6c | ||
|
|
d4f2b43f64 | ||
|
|
f343b0e58f | ||
|
|
a8b4a5e856 | ||
|
|
e7e030279b | ||
|
|
9ff664ed36 | ||
|
|
e00556a34a | ||
|
|
a313e4338e | ||
|
|
257b0c74af | ||
|
|
3d515f5072 | ||
|
|
ec01db5c8b | ||
|
|
cd6d8fcdfe | ||
|
|
1198311d64 | ||
|
|
1a4eab9655 | ||
|
|
1926c90780 | ||
|
|
4d5975b717 | ||
|
|
8cbd6b29c4 | ||
|
|
8c1b630a2b | ||
|
|
c961d2aaf7 | ||
|
|
41c75dc93e | ||
|
|
f92247c99b | ||
|
|
53f9fc2d1c | ||
|
|
bede19a3ca | ||
|
|
aefa62b234 | ||
|
|
b3fb831994 | ||
|
|
0d60199514 | ||
|
|
54960157c0 | ||
|
|
244d097d01 | ||
|
|
adb55f3726 | ||
|
|
5d2777a5c6 | ||
|
|
24db881c14 | ||
|
|
f09bed9ad2 | ||
|
|
e29cc66361 | ||
|
|
669b765662 | ||
|
|
e7060dc292 | ||
|
|
03a8b6cb38 | ||
|
|
f317cbe221 | ||
|
|
d6d31c6695 | ||
|
|
4b9019e762 | ||
|
|
13563fc507 | ||
|
|
2ce4f8dd3b | ||
|
|
538d5c81ea | ||
|
|
9ecaa3fa9d | ||
|
|
b1aacfdbd9 | ||
|
|
cfbc24579d | ||
|
|
1d4d8e7a9a | ||
|
|
7b83b7b2d5 | ||
|
|
a896c5a4dd | ||
|
|
c74989d304 | ||
|
|
1283491cc2 | ||
|
|
89522daaac | ||
|
|
011a667314 | ||
|
|
df2525ee08 | ||
|
|
01a9f735c8 | ||
|
|
af10c3bc2f | ||
|
|
395f2e155d | ||
|
|
10cbed55c4 | ||
|
|
325d5f7ba9 | ||
|
|
746252fe39 | ||
|
|
f36efd128b | ||
|
|
f1c494ef97 | ||
|
|
9c8c52874a | ||
|
|
68b617130a | ||
|
|
89292fecb4 | ||
|
|
1193a23282 | ||
|
|
bbfff64927 | ||
|
|
c5c9a522c1 | ||
|
|
3cd7f5ab90 | ||
|
|
f2067221c5 | ||
|
|
89598cf0be | ||
|
|
0121043d7d | ||
|
|
1ca46fbd98 | ||
|
|
6ddef3a7e4 | ||
|
|
0d9ebdc46a | ||
|
|
fa26d0de33 | ||
|
|
a5760129f0 | ||
|
|
d430b869ac | ||
|
|
4179c8a17d | ||
|
|
0a9cbf01d2 | ||
|
|
9567a2a560 | ||
|
|
58dd6f094c | ||
|
|
02381343ff | ||
|
|
09a5963eee | ||
|
|
a573a23c83 | ||
|
|
7118dca559 | ||
|
|
13d43e193e | ||
|
|
7a7843467c | ||
|
|
9e6fee4064 | ||
|
|
9680f1290d | ||
|
|
ce2ea98926 | ||
|
|
5c76cc34e1 | ||
|
|
eb2f4c866e | ||
|
|
2a370087e8 | ||
|
|
272c8a5812 | ||
|
|
08fe549ed8 | ||
|
|
ae15efdf2a | ||
|
|
8e003f95db | ||
|
|
3e92e837f1 | ||
|
|
081307ced2 | ||
|
|
a91bb399f0 | ||
|
|
42b78c59b5 | ||
|
|
750d21aeba | ||
|
|
990d9ba9a8 | ||
|
|
4d0c9172e5 | ||
|
|
094e3a2757 | ||
|
|
278668b8c5 | ||
|
|
10141504a2 | ||
|
|
67736c8fce | ||
|
|
b56a272f64 | ||
|
|
5901c2e963 | ||
|
|
be85832b20 | ||
|
|
c8f9a72d3e | ||
|
|
3d633a81c4 | ||
|
|
4efbf36d82 | ||
|
|
e2c3c39597 | ||
|
|
007ba1d9ef | ||
|
|
4d5cd1a6b5 |
@@ -49,10 +49,11 @@ fix_permissions() {
|
||||
|
||||
log "Fixing permissions for ${IMMICH_WORKSPACE}"
|
||||
|
||||
run_cmd sudo find "${IMMICH_WORKSPACE}/server/upload" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres/*" -not -path "${IMMICH_WORKSPACE}/server/upload/postgres" -exec chown node {} +
|
||||
|
||||
# Change ownership for directories that exist
|
||||
for dir in "${IMMICH_WORKSPACE}/.vscode" \
|
||||
"${IMMICH_WORKSPACE}/server/upload" \
|
||||
"${IMMICH_WORKSPACE}/.pnpm-store" \
|
||||
"${IMMICH_WORKSPACE}/.github/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/cli/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/e2e/node_modules" \
|
||||
"${IMMICH_WORKSPACE}/open-api/typescript-sdk/node_modules" \
|
||||
|
||||
@@ -8,21 +8,13 @@ services:
|
||||
- IMMICH_SERVER_URL=http://127.0.0.1:2283/
|
||||
volumes: !override
|
||||
- ..:/workspaces/immich
|
||||
- cli_node_modules:/workspaces/immich/cli/node_modules
|
||||
- e2e_node_modules:/workspaces/immich/e2e/node_modules
|
||||
- open_api_node_modules:/workspaces/immich/open-api/typescript-sdk/node_modules
|
||||
- server_node_modules:/workspaces/immich/server/node_modules
|
||||
- web_node_modules:/workspaces/immich/web/node_modules
|
||||
- ${UPLOAD_LOCATION:-upload1-devcontainer-volume}${UPLOAD_LOCATION:+/photos}:/data
|
||||
- ${UPLOAD_LOCATION:-upload2-devcontainer-volume}${UPLOAD_LOCATION:+/photos/upload}:/data/upload
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
|
||||
immich-web:
|
||||
env_file: !reset []
|
||||
|
||||
immich-machine-learning:
|
||||
env_file: !reset []
|
||||
|
||||
database:
|
||||
env_file: !reset []
|
||||
environment: !override
|
||||
@@ -33,17 +25,10 @@ services:
|
||||
POSTGRES_HOST_AUTH_METHOD: md5
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION:-postgres-devcontainer-volume}${UPLOAD_LOCATION:+/postgres}:/var/lib/postgresql/data
|
||||
|
||||
redis:
|
||||
env_file: !reset []
|
||||
|
||||
volumes:
|
||||
# Node modules for each service to avoid conflicts and ensure consistent dependencies
|
||||
cli_node_modules:
|
||||
e2e_node_modules:
|
||||
open_api_node_modules:
|
||||
server_node_modules:
|
||||
web_node_modules:
|
||||
upload1-devcontainer-volume:
|
||||
upload2-devcontainer-volume:
|
||||
postgres-devcontainer-volume:
|
||||
|
||||
@@ -3,15 +3,20 @@
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
log "Preparing Immich Nest API Server"
|
||||
log ""
|
||||
export CI=1
|
||||
run_cmd pnpm --filter immich install
|
||||
|
||||
log "Starting Nest API Server"
|
||||
log ""
|
||||
cd "${IMMICH_WORKSPACE}/server" || (
|
||||
log "Immich workspace not found"
|
||||
log "Immich workspace not found"jj
|
||||
exit 1
|
||||
)
|
||||
|
||||
while true; do
|
||||
run_cmd node ./node_modules/.bin/nest start --debug "0.0.0.0:9230" --watch
|
||||
run_cmd pnpm --filter immich exec nest start --debug "0.0.0.0:9230" --watch
|
||||
log "Nest API Server crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
|
||||
@@ -3,6 +3,13 @@
|
||||
# shellcheck disable=SC1091
|
||||
source /immich-devcontainer/container-common.sh
|
||||
|
||||
export CI=1
|
||||
log "Preparing Immich Web Frontend"
|
||||
log ""
|
||||
run_cmd pnpm --filter @immich/sdk install
|
||||
run_cmd pnpm --filter @immich/sdk build
|
||||
run_cmd pnpm --filter immich-web install
|
||||
|
||||
log "Starting Immich Web Frontend"
|
||||
log ""
|
||||
cd "${IMMICH_WORKSPACE}/web" || (
|
||||
@@ -16,7 +23,7 @@ until curl --output /dev/null --silent --head --fail "http://127.0.0.1:${IMMICH_
|
||||
done
|
||||
|
||||
while true; do
|
||||
run_cmd node ./node_modules/.bin/vite dev --host 0.0.0.0 --port "${DEV_PORT}"
|
||||
run_cmd pnpm --filter immich-web exec vite dev --host 0.0.0.0 --port "${DEV_PORT}"
|
||||
log "Web crashed with exit code $?. Respawning in 3s ..."
|
||||
sleep 3
|
||||
done
|
||||
|
||||
@@ -6,9 +6,6 @@ source /immich-devcontainer/container-common.sh
|
||||
log "Setting up Immich dev container..."
|
||||
fix_permissions
|
||||
|
||||
log "Installing npm dependencies (node_modules)..."
|
||||
install_dependencies
|
||||
|
||||
log "Setup complete, please wait while backend and frontend services automatically start"
|
||||
log
|
||||
log "If necessary, the services may be manually started using"
|
||||
|
||||
2
.github/.nvmrc
vendored
@@ -1 +1 @@
|
||||
22.17.1
|
||||
22.18.0
|
||||
|
||||
5
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -64,6 +64,11 @@ body:
|
||||
- label: Web
|
||||
- label: Mobile
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Device make and model
|
||||
placeholder: Samsung S25 Android 16
|
||||
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
|
||||
28
.github/package-lock.json
generated
vendored
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"name": ".github",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"devDependencies": {
|
||||
"prettier": "^3.5.3"
|
||||
}
|
||||
},
|
||||
"node_modules/prettier": {
|
||||
"version": "3.6.2",
|
||||
"resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz",
|
||||
"integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"prettier": "bin/prettier.cjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/prettier/prettier?sponsor=1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
10
.github/workflows/build-mobile.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
runs-on: mich
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
ref: ${{ inputs.ref || github.sha }}
|
||||
persist-credentials: false
|
||||
@@ -79,7 +79,7 @@ jobs:
|
||||
|
||||
- name: Restore Gradle Cache
|
||||
id: cache-gradle-restore
|
||||
uses: actions/cache/restore@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
uses: actions/cache/restore@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
|
||||
with:
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
run: flutter pub get
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation
|
||||
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate platform APIs
|
||||
@@ -136,7 +136,7 @@ jobs:
|
||||
|
||||
- name: Save Gradle Cache
|
||||
id: cache-gradle-save
|
||||
uses: actions/cache/save@5a3ec84eff668545956fd18022155c47e93e2684 # v4
|
||||
uses: actions/cache/save@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
path: |
|
||||
|
||||
2
.github/workflows/cache-cleanup.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
actions: write
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
33
.github/workflows/cli.yml
vendored
@@ -29,25 +29,28 @@ jobs:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Prepare SDK
|
||||
run: npm ci --prefix ../open-api/typescript-sdk/
|
||||
- name: Build SDK
|
||||
run: npm run build --prefix ../open-api/typescript-sdk/
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm publish
|
||||
- name: Setup typescript-sdk
|
||||
run: pnpm install && pnpm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
- run: pnpm build
|
||||
- run: pnpm publish
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -62,7 +65,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -73,7 +76,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
@@ -88,7 +91,7 @@ jobs:
|
||||
|
||||
- name: Generate docker image tags
|
||||
id: metadata
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
with:
|
||||
flavor: |
|
||||
latest=false
|
||||
|
||||
96
.github/workflows/close-duplicates.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
discussion:
|
||||
types: [created]
|
||||
|
||||
name: Close likely duplicates
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
get_body:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
EVENT: ${{ toJSON(github.event) }}
|
||||
outputs:
|
||||
body: ${{ steps.get_body.outputs.body }}
|
||||
steps:
|
||||
- id: get_body
|
||||
run: |
|
||||
BODY=$(echo """$EVENT""" | jq -r '.issue // .discussion | .body' | base64 -w 0)
|
||||
echo "body=$BODY" >> $GITHUB_OUTPUT
|
||||
|
||||
get_checkbox_json:
|
||||
runs-on: ubuntu-latest
|
||||
needs: get_body
|
||||
container:
|
||||
image: yshavit/mdq:0.8.0@sha256:c69224d34224a0043d9a3ee46679ba4a2a25afaac445f293d92afe13cd47fcea
|
||||
outputs:
|
||||
json: ${{ steps.get_checkbox.outputs.json }}
|
||||
steps:
|
||||
- id: get_checkbox
|
||||
env:
|
||||
BODY: ${{ needs.get_body.outputs.body }}
|
||||
run: |
|
||||
JSON=$(echo "$BODY" | base64 -d | /mdq --output json '# I have searched | - [?] Yes')
|
||||
echo "json=$JSON" >> $GITHUB_OUTPUT
|
||||
|
||||
close_and_comment:
|
||||
runs-on: ubuntu-latest
|
||||
needs: get_checkbox_json
|
||||
if: ${{ !fromJSON(needs.get_checkbox_json.outputs.json).items[0].list[0].checked }}
|
||||
permissions:
|
||||
issues: write
|
||||
discussions: write
|
||||
steps:
|
||||
- name: Close issue
|
||||
if: ${{ github.event_name == 'issues' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
NODE_ID: ${{ github.event.issue.node_id }}
|
||||
run: |
|
||||
gh api graphql \
|
||||
-f issueId="$NODE_ID" \
|
||||
-f body="This issue has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
|
||||
-f query='
|
||||
mutation CommentAndCloseIssue($issueId: ID!, $body: String!) {
|
||||
addComment(input: {
|
||||
subjectId: $issueId,
|
||||
body: $body
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
|
||||
closeIssue(input: {
|
||||
issueId: $issueId,
|
||||
stateReason: DUPLICATE
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
}'
|
||||
|
||||
- name: Close discussion
|
||||
if: ${{ github.event_name == 'discussion' && github.event.discussion.category.name == 'Feature Request' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
NODE_ID: ${{ github.event.discussion.node_id }}
|
||||
run: |
|
||||
gh api graphql \
|
||||
-f discussionId="$NODE_ID" \
|
||||
-f body="This discussion has automatically been closed as it is likely a duplicate. We get a lot of duplicate threads each day, which is why we ask you in the template to confirm that you searched for duplicates before opening one. If you're sure this is not a duplicate, please leave a comment and we will reopen the thread if necessary." \
|
||||
-f query='
|
||||
mutation CommentAndCloseDiscussion($discussionId: ID!, $body: String!) {
|
||||
addDiscussionComment(input: {
|
||||
discussionId: $discussionId,
|
||||
body: $body
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
|
||||
closeDiscussion(input: {
|
||||
discussionId: $discussionId,
|
||||
reason: DUPLICATE
|
||||
}) {
|
||||
__typename
|
||||
}
|
||||
}'
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
@@ -44,13 +44,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4
|
||||
uses: github/codeql-action/init@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4
|
||||
uses: github/codeql-action/autobuild@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -76,6 +76,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4
|
||||
uses: github/codeql-action/analyze@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
6
.github/workflows/docker.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
should_run_ml: ${{ steps.found_paths.outputs.machine-learning == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -89,7 +89,7 @@ jobs:
|
||||
suffix: ['']
|
||||
steps:
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
||||
19
.github/workflows/docs-build.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.docs == 'true' || steps.found_paths.outputs.open-api == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
@@ -51,25 +51,28 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './docs/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
- name: Run install
|
||||
run: pnpm install
|
||||
|
||||
- name: Check formatting
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
|
||||
- name: Run build
|
||||
run: npm run build
|
||||
run: pnpm build
|
||||
|
||||
- name: Upload build output
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
||||
2
.github/workflows/docs-deploy.yml
vendored
@@ -108,7 +108,7 @@ jobs:
|
||||
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
2
.github/workflows/docs-destroy.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
||||
8
.github/workflows/fix-format.yml
vendored
@@ -16,13 +16,13 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: 'Checkout'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
@@ -32,8 +32,8 @@ jobs:
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
|
||||
- name: Fix formatting
|
||||
run: make install-all && make format-all
|
||||
|
||||
8
.github/workflows/prepare-release.yml
vendored
@@ -32,13 +32,13 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: true
|
||||
@@ -83,13 +83,13 @@ jobs:
|
||||
steps:
|
||||
- name: Generate a token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
with:
|
||||
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
|
||||
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
persist-credentials: false
|
||||
|
||||
14
.github/workflows/preview-label.yaml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
|
||||
remove-label:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event.action == 'closed' && contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||
if: ${{ (github.event.action == 'closed' || github.event.pull_request.head.repo.fork) && contains(github.event.pull_request.labels.*.name, 'preview') }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
@@ -33,3 +33,15 @@ jobs:
|
||||
repo: context.repo.repo,
|
||||
name: 'preview'
|
||||
})
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
message-id: 'preview-status'
|
||||
message: 'PRs from forks cannot have preview environments.'
|
||||
|
||||
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
|
||||
if: ${{ !github.event.pull_request.head.repo.fork }}
|
||||
with:
|
||||
message-id: 'preview-status'
|
||||
message: 'Preview environment has been removed.'
|
||||
|
||||
15
.github/workflows/sdk.yml
vendored
@@ -16,22 +16,25 @@ jobs:
|
||||
run:
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './open-api/typescript-sdk/.nvmrc'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Build
|
||||
run: npm run build
|
||||
run: pnpm build
|
||||
- name: Publish
|
||||
run: npm publish
|
||||
run: pnpm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
10
.github/workflows/static_analysis.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.mobile == 'true' || steps.should_force.outputs.should_force == 'true' }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
working-directory: ./mobile
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation
|
||||
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
|
||||
|
||||
- name: Run Build Runner
|
||||
run: make build
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
actions: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@4e828ff8d448a8a6e532957b1811f387a63867e8 # v3.29.4
|
||||
uses: github/codeql-action/upload-sarif@df559355d593797519d70b90fc8edd5db049e7a2 # v3.29.9
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
|
||||
493
.github/workflows/test.yml
vendored
@@ -4,13 +4,10 @@ on:
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
pre-job:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -29,10 +26,9 @@ jobs:
|
||||
should_run_.github: ${{ steps.found_paths.outputs['.github'] == 'true' || steps.should_force.outputs.should_force == 'true' }} # redundant to have should_force but if someone changes the trigger then this won't have to be changed
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- id: found_paths
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
@@ -58,11 +54,9 @@ jobs:
|
||||
- '.github/workflows/test.yml'
|
||||
.github:
|
||||
- '.github/**'
|
||||
|
||||
- name: Check if we should force jobs to run
|
||||
id: should_force
|
||||
run: echo "should_force=${{ steps.found_paths.outputs.workflow == 'true' || github.event_name == 'workflow_dispatch' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
server-unit-tests:
|
||||
name: Test & Lint Server
|
||||
needs: pre-job
|
||||
@@ -73,39 +67,33 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run package manager install
|
||||
run: pnpm install
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run small tests & coverage
|
||||
run: npm test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests:
|
||||
name: Unit Test CLI
|
||||
needs: pre-job
|
||||
@@ -116,43 +104,36 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install && pnpm run build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
run: pnpm install
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
cli-unit-tests-win:
|
||||
name: Unit Test CLI (Windows)
|
||||
needs: pre-job
|
||||
@@ -163,36 +144,31 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./cli
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './cli/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Install deps
|
||||
run: npm ci
|
||||
|
||||
run: pnpm install --frozen-lockfile
|
||||
# Skip linter & formatter in Windows test.
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-lint:
|
||||
name: Lint Web
|
||||
needs: pre-job
|
||||
@@ -203,39 +179,33 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
- name: Run pnpm install
|
||||
run: pnpm rebuild && pnpm install --frozen-lockfile
|
||||
- name: Run linter
|
||||
run: npm run lint:p
|
||||
run: pnpm lint:p
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run svelte checks
|
||||
run: npm run check:svelte
|
||||
run: pnpm check:svelte
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
web-unit-tests:
|
||||
name: Test Web
|
||||
needs: pre-job
|
||||
@@ -246,35 +216,30 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./web
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Run tsc
|
||||
run: npm run check:typescript
|
||||
run: pnpm check:typescript
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run unit tests & coverage
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
i18n-tests:
|
||||
name: Test i18n
|
||||
needs: pre-job
|
||||
@@ -284,30 +249,27 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './web/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Install dependencies
|
||||
run: npm --prefix=web ci
|
||||
|
||||
run: pnpm --filter=immich-web install --frozen-lockfile
|
||||
- name: Format
|
||||
run: npm --prefix=web run format:i18n
|
||||
|
||||
run: pnpm --filter=immich-web format:i18n
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
with:
|
||||
files: |
|
||||
i18n/**
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
@@ -316,7 +278,6 @@ jobs:
|
||||
echo "ERROR: i18n files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
e2e-tests-lint:
|
||||
name: End-to-End Lint
|
||||
needs: pre-job
|
||||
@@ -327,41 +288,35 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./e2e
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run linter
|
||||
run: npm run lint
|
||||
run: pnpm lint
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run tsc
|
||||
run: npm run check
|
||||
run: pnpm check
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
server-medium-tests:
|
||||
name: Medium Tests (Server)
|
||||
needs: pre-job
|
||||
@@ -372,27 +327,24 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run pnpm install
|
||||
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
|
||||
- name: Run medium tests
|
||||
run: npm run test:medium
|
||||
run: pnpm test:medium
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-server-cli:
|
||||
name: End-to-End Tests (Server & CLI)
|
||||
needs: pre-job
|
||||
@@ -406,43 +358,41 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run setup web
|
||||
run: pnpm install --frozen-lockfile && pnpm exec svelte-kit sync
|
||||
working-directory: ./web
|
||||
if: ${{ !cancelled() }}
|
||||
- name: Run setup cli
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./cli
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Docker build
|
||||
run: docker compose build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run e2e tests (api & cli)
|
||||
run: npm run test
|
||||
run: pnpm test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
e2e-tests-web:
|
||||
name: End-to-End Tests (Web)
|
||||
needs: pre-job
|
||||
@@ -456,42 +406,36 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm]
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './e2e/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run setup typescript-sdk
|
||||
run: npm ci && npm run build
|
||||
run: pnpm install --frozen-lockfile && pnpm build
|
||||
working-directory: ./open-api/typescript-sdk
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: npx playwright install chromium --only-shell
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Docker build
|
||||
run: docker compose build
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
- name: Run e2e tests (web)
|
||||
run: npx playwright test
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
success-check-e2e:
|
||||
name: End-to-End Tests Success
|
||||
needs: [e2e-tests-server-cli, e2e-tests-web]
|
||||
@@ -502,7 +446,6 @@ jobs:
|
||||
- uses: immich-app/devtools/actions/success-check@68f10eb389bb02a3cf9d1156111964c549eb421b # 0.0.4
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
mobile-unit-tests:
|
||||
name: Unit Test Mobile
|
||||
needs: pre-job
|
||||
@@ -511,24 +454,20 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Flutter SDK
|
||||
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
|
||||
with:
|
||||
channel: 'stable'
|
||||
flutter-version-file: ./mobile/pubspec.yaml
|
||||
|
||||
- name: Generate translation file
|
||||
run: make translation
|
||||
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
|
||||
working-directory: ./mobile
|
||||
|
||||
- name: Run tests
|
||||
working-directory: ./mobile
|
||||
run: flutter test -j 1
|
||||
|
||||
ml-unit-tests:
|
||||
name: Unit Test ML
|
||||
needs: pre-job
|
||||
@@ -540,10 +479,9 @@ jobs:
|
||||
run:
|
||||
working-directory: ./machine-learning
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
@@ -566,7 +504,6 @@ jobs:
|
||||
- name: Run tests and coverage
|
||||
run: |
|
||||
uv run pytest --cov=immich_ml --cov-report term-missing
|
||||
|
||||
github-files-formatting:
|
||||
name: .github Files Formatting
|
||||
needs: pre-job
|
||||
@@ -577,45 +514,38 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./.github
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './.github/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
- name: Run npm install
|
||||
run: npm ci
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Run pnpm install
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Run formatter
|
||||
run: npm run format
|
||||
run: pnpm format
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
shellcheck:
|
||||
name: ShellCheck
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run ShellCheck
|
||||
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
||||
with:
|
||||
ignore_paths: >-
|
||||
**/open-api/**
|
||||
**/openapi**
|
||||
**/node_modules/**
|
||||
|
||||
**/open-api/** **/openapi** **/node_modules/**
|
||||
generated-api-up-to-date:
|
||||
name: OpenAPI Clients
|
||||
runs-on: ubuntu-latest
|
||||
@@ -623,26 +553,23 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Install server dependencies
|
||||
run: npm --prefix=server ci
|
||||
|
||||
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm --filter immich install --frozen-lockfile
|
||||
- name: Build the app
|
||||
run: npm --prefix=server run build
|
||||
|
||||
run: pnpm --filter immich build
|
||||
- name: Run API generation
|
||||
run: make open-api
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
@@ -651,7 +578,6 @@ jobs:
|
||||
mobile/openapi
|
||||
open-api/typescript-sdk
|
||||
open-api/immich-openapi-specs.json
|
||||
|
||||
- name: Verify files have not changed
|
||||
if: steps.verify-changed-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
@@ -660,7 +586,6 @@ jobs:
|
||||
echo "ERROR: Generated files not up to date!"
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
exit 1
|
||||
|
||||
sql-schema-up-to-date:
|
||||
name: SQL Schema Checks
|
||||
runs-on: ubuntu-latest
|
||||
@@ -674,45 +599,36 @@ jobs:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: immich
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
--health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./server
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
with:
|
||||
node-version-file: './server/.nvmrc'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: '**/package-lock.json'
|
||||
|
||||
cache: 'pnpm'
|
||||
cache-dependency-path: '**/pnpm-lock.yaml'
|
||||
- name: Install server dependencies
|
||||
run: npm ci
|
||||
|
||||
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
|
||||
- name: Build the app
|
||||
run: npm run build
|
||||
|
||||
run: pnpm build
|
||||
- name: Run existing migrations
|
||||
run: npm run migrations:run
|
||||
|
||||
run: pnpm migrations:run
|
||||
- name: Test npm run schema:reset command works
|
||||
run: npm run schema:reset
|
||||
|
||||
run: pnpm schema:reset
|
||||
- name: Generate new migrations
|
||||
continue-on-error: true
|
||||
run: npm run migrations:generate src/TestMigration
|
||||
|
||||
run: pnpm migrations:generate src/TestMigration
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-files
|
||||
@@ -728,19 +644,16 @@ jobs:
|
||||
echo "Changed files: ${CHANGED_FILES}"
|
||||
cat ./src/*-TestMigration.ts
|
||||
exit 1
|
||||
|
||||
- name: Run SQL generation
|
||||
run: npm run sync:sql
|
||||
run: pnpm sync:sql
|
||||
env:
|
||||
DB_URL: postgres://postgres:postgres@localhost:5432/immich
|
||||
|
||||
- name: Find file changes
|
||||
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
|
||||
id: verify-changed-sql-files
|
||||
with:
|
||||
files: |
|
||||
server/src/queries
|
||||
|
||||
- name: Verify SQL files have not changed
|
||||
if: steps.verify-changed-sql-files.outputs.files_changed == 'true'
|
||||
env:
|
||||
@@ -751,77 +664,77 @@ jobs:
|
||||
git diff
|
||||
exit 1
|
||||
|
||||
# mobile-integration-tests:
|
||||
# name: Run mobile end-to-end integration tests
|
||||
# runs-on: macos-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
# - uses: actions/setup-java@v3
|
||||
# with:
|
||||
# distribution: 'zulu'
|
||||
# java-version: '12.x'
|
||||
# cache: 'gradle'
|
||||
# - name: Cache android SDK
|
||||
# uses: actions/cache@v3
|
||||
# id: android-sdk
|
||||
# with:
|
||||
# key: android-sdk
|
||||
# path: |
|
||||
# /usr/local/lib/android/
|
||||
# ~/.android
|
||||
# - name: Cache Gradle
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: |
|
||||
# ./mobile/build/
|
||||
# ./mobile/android/.gradle/
|
||||
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
|
||||
# - name: Setup Android SDK
|
||||
# if: steps.android-sdk.outputs.cache-hit != 'true'
|
||||
# uses: android-actions/setup-android@v2
|
||||
# - name: AVD cache
|
||||
# uses: actions/cache@v3
|
||||
# id: avd-cache
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.android/avd/*
|
||||
# ~/.android/adb*
|
||||
# key: avd-29
|
||||
# - name: create AVD and generate snapshot for caching
|
||||
# if: steps.avd-cache.outputs.cache-hit != 'true'
|
||||
# uses: reactivecircus/android-emulator-runner@v2.27.0
|
||||
# with:
|
||||
# working-directory: ./mobile
|
||||
# cores: 2
|
||||
# api-level: 29
|
||||
# arch: x86_64
|
||||
# profile: pixel
|
||||
# target: default
|
||||
# force-avd-creation: false
|
||||
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
# disable-animations: false
|
||||
# script: echo "Generated AVD snapshot for caching."
|
||||
# - name: Setup Flutter SDK
|
||||
# uses: subosito/flutter-action@v2
|
||||
# with:
|
||||
# channel: 'stable'
|
||||
# flutter-version: '3.7.3'
|
||||
# cache: true
|
||||
# - name: Run integration tests
|
||||
# uses: Wandalen/wretry.action@master
|
||||
# with:
|
||||
# action: reactivecircus/android-emulator-runner@v2.27.0
|
||||
# with: |
|
||||
# working-directory: ./mobile
|
||||
# cores: 2
|
||||
# api-level: 29
|
||||
# arch: x86_64
|
||||
# profile: pixel
|
||||
# target: default
|
||||
# force-avd-creation: false
|
||||
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
# disable-animations: true
|
||||
# script: |
|
||||
# flutter pub get
|
||||
# flutter test integration_test
|
||||
# attempt_limit: 3
|
||||
# mobile-integration-tests:
|
||||
# name: Run mobile end-to-end integration tests
|
||||
# runs-on: macos-latest
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
# - uses: actions/setup-java@v3
|
||||
# with:
|
||||
# distribution: 'zulu'
|
||||
# java-version: '12.x'
|
||||
# cache: 'gradle'
|
||||
# - name: Cache android SDK
|
||||
# uses: actions/cache@v3
|
||||
# id: android-sdk
|
||||
# with:
|
||||
# key: android-sdk
|
||||
# path: |
|
||||
# /usr/local/lib/android/
|
||||
# ~/.android
|
||||
# - name: Cache Gradle
|
||||
# uses: actions/cache@v3
|
||||
# with:
|
||||
# path: |
|
||||
# ./mobile/build/
|
||||
# ./mobile/android/.gradle/
|
||||
# key: ${{ runner.os }}-flutter-${{ hashFiles('**/*.gradle*', 'pubspec.lock') }}
|
||||
# - name: Setup Android SDK
|
||||
# if: steps.android-sdk.outputs.cache-hit != 'true'
|
||||
# uses: android-actions/setup-android@v2
|
||||
# - name: AVD cache
|
||||
# uses: actions/cache@v3
|
||||
# id: avd-cache
|
||||
# with:
|
||||
# path: |
|
||||
# ~/.android/avd/*
|
||||
# ~/.android/adb*
|
||||
# key: avd-29
|
||||
# - name: create AVD and generate snapshot for caching
|
||||
# if: steps.avd-cache.outputs.cache-hit != 'true'
|
||||
# uses: reactivecircus/android-emulator-runner@v2.27.0
|
||||
# with:
|
||||
# working-directory: ./mobile
|
||||
# cores: 2
|
||||
# api-level: 29
|
||||
# arch: x86_64
|
||||
# profile: pixel
|
||||
# target: default
|
||||
# force-avd-creation: false
|
||||
# emulator-options: -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
# disable-animations: false
|
||||
# script: echo "Generated AVD snapshot for caching."
|
||||
# - name: Setup Flutter SDK
|
||||
# uses: subosito/flutter-action@v2
|
||||
# with:
|
||||
# channel: 'stable'
|
||||
# flutter-version: '3.7.3'
|
||||
# cache: true
|
||||
# - name: Run integration tests
|
||||
# uses: Wandalen/wretry.action@master
|
||||
# with:
|
||||
# action: reactivecircus/android-emulator-runner@v2.27.0
|
||||
# with: |
|
||||
# working-directory: ./mobile
|
||||
# cores: 2
|
||||
# api-level: 29
|
||||
# arch: x86_64
|
||||
# profile: pixel
|
||||
# target: default
|
||||
# force-avd-creation: false
|
||||
# emulator-options: -no-snapshot-save -no-window -gpu swiftshader_indirect -noaudio -no-boot-anim -camera-back none
|
||||
# disable-animations: true
|
||||
# script: |
|
||||
# flutter pub get
|
||||
# flutter test integration_test
|
||||
# attempt_limit: 3
|
||||
|
||||
2
.github/workflows/weblate-lock.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
should_run: ${{ steps.found_paths.outputs.i18n == 'true' && github.head_ref != 'chore/translations'}}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: found_paths
|
||||
|
||||
39
.pnpmfile.cjs
Normal file
@@ -0,0 +1,39 @@
|
||||
module.exports = {
|
||||
hooks: {
|
||||
readPackage: (pkg) => {
|
||||
if (!pkg.name) {
|
||||
return pkg;
|
||||
}
|
||||
switch (pkg.name) {
|
||||
case "exiftool-vendored":
|
||||
if (pkg.optionalDependencies["exiftool-vendored.pl"]) {
|
||||
// make exiftool-vendored.pl a regular dependency
|
||||
pkg.dependencies["exiftool-vendored.pl"] =
|
||||
pkg.optionalDependencies["exiftool-vendored.pl"];
|
||||
delete pkg.optionalDependencies["exiftool-vendored.pl"];
|
||||
}
|
||||
break;
|
||||
case "sharp":
|
||||
const optionalDeps = Object.keys(pkg.optionalDependencies).filter(
|
||||
(dep) => dep.startsWith("@img")
|
||||
);
|
||||
for (const dep of optionalDeps) {
|
||||
// remove all optionalDependencies from sharp (they will be compiled from source), except:
|
||||
// include the precompiled musl version of sharp, for web
|
||||
// include precompiled linux-x64 version of sharp, for server (stage: web-prod)
|
||||
// include precompiled linux-arm64 version of sharp, for server (stage: web-prod)
|
||||
if (
|
||||
dep.includes("musl") ||
|
||||
dep.includes("linux-x64") ||
|
||||
dep.includes("linux-arm64")
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
delete pkg.optionalDependencies[dep];
|
||||
}
|
||||
break;
|
||||
}
|
||||
return pkg;
|
||||
},
|
||||
},
|
||||
};
|
||||
3
.vscode/settings.json
vendored
@@ -56,7 +56,8 @@
|
||||
"explorer.fileNesting.enabled": true,
|
||||
"explorer.fileNesting.patterns": {
|
||||
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts"
|
||||
"*.ts": "${capture}.spec.ts,${capture}.mock.ts",
|
||||
"package.json": "package-lock.json, yarn.lock, pnpm-lock.yaml, bun.lockb, bun.lock, pnpm-workspace.yaml, .pnpmfile.cjs"
|
||||
},
|
||||
"svelte.enable-ts-plugin": true,
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative"
|
||||
|
||||
77
Makefile
@@ -8,7 +8,10 @@ dev-update:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --remove-orphans
|
||||
|
||||
dev-scale:
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
@trap 'make dev-down' EXIT; COMPOSE_BAKE=true docker compose -f ./docker/docker-compose.dev.yml up --build -V --scale immich-server=3 --remove-orphans
|
||||
|
||||
dev-docs:
|
||||
npm --prefix docs run start
|
||||
|
||||
.PHONY: e2e
|
||||
e2e:
|
||||
@@ -40,7 +43,7 @@ open-api-typescript:
|
||||
cd ./open-api && bash ./bin/generate-open-api.sh typescript
|
||||
|
||||
sql:
|
||||
npm --prefix server run sync:sql
|
||||
pnpm --filter immich run sync:sql
|
||||
|
||||
attach-server:
|
||||
docker exec -it docker_immich-server_1 sh
|
||||
@@ -50,31 +53,40 @@ renovate:
|
||||
|
||||
MODULES = e2e server web cli sdk docs .github
|
||||
|
||||
# directory to package name mapping function
|
||||
# cli = @immich/cli
|
||||
# docs = documentation
|
||||
# e2e = immich-e2e
|
||||
# open-api/typescript-sdk = @immich/sdk
|
||||
# server = immich
|
||||
# web = immich-web
|
||||
map-package = $(subst sdk,@immich/sdk,$(subst cli,@immich/cli,$(subst docs,documentation,$(subst e2e,immich-e2e,$(subst server,immich,$(subst web,immich-web,$1))))))
|
||||
|
||||
audit-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) audit fix
|
||||
pnpm --filter $(call map-package,$*) audit fix
|
||||
install-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) i
|
||||
ci-%:
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) ci
|
||||
pnpm --filter $(call map-package,$*) install $(if $(FROZEN),--frozen-lockfile) $(if $(OFFLINE),--offline)
|
||||
build-cli: build-sdk
|
||||
build-web: build-sdk
|
||||
build-%: install-%
|
||||
npm --prefix $(subst sdk,open-api/typescript-sdk,$*) run build
|
||||
pnpm --filter $(call map-package,$*) run build
|
||||
format-%:
|
||||
npm --prefix $* run format:fix
|
||||
pnpm --filter $(call map-package,$*) run format:fix
|
||||
lint-%:
|
||||
npm --prefix $* run lint:fix
|
||||
pnpm --filter $(call map-package,$*) run lint:fix
|
||||
lint-web:
|
||||
pnpm --filter $(call map-package,$*) run lint:p
|
||||
check-%:
|
||||
npm --prefix $* run check
|
||||
pnpm --filter $(call map-package,$*) run check
|
||||
check-web:
|
||||
npm --prefix web run check:typescript
|
||||
npm --prefix web run check:svelte
|
||||
pnpm --filter immich-web run check:typescript
|
||||
pnpm --filter immich-web run check:svelte
|
||||
test-%:
|
||||
npm --prefix $* run test
|
||||
pnpm --filter $(call map-package,$*) run test
|
||||
test-e2e:
|
||||
docker compose -f ./e2e/docker-compose.yml build
|
||||
npm --prefix e2e run test
|
||||
npm --prefix e2e run test:web
|
||||
pnpm --filter immich-e2e run test
|
||||
pnpm --filter immich-e2e run test:web
|
||||
test-medium:
|
||||
docker run \
|
||||
--rm \
|
||||
@@ -84,25 +96,36 @@ test-medium:
|
||||
-v ./server/tsconfig.json:/usr/src/app/tsconfig.json \
|
||||
-e NODE_ENV=development \
|
||||
immich-server:latest \
|
||||
-c "npm ci && npm run test:medium -- --run"
|
||||
-c "pnpm test:medium -- --run"
|
||||
test-medium-dev:
|
||||
docker exec -it immich_server /bin/sh -c "npm run test:medium"
|
||||
docker exec -it immich_server /bin/sh -c "pnpm run test:medium"
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e .github,$(MODULES)),build-$M) ;
|
||||
install-all: $(foreach M,$(MODULES),install-$M) ;
|
||||
ci-all: $(foreach M,$(filter-out .github,$(MODULES)),ci-$M) ;
|
||||
check-all: $(foreach M,$(filter-out sdk cli docs .github,$(MODULES)),check-$M) ;
|
||||
lint-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),lint-$M) ;
|
||||
format-all: $(foreach M,$(filter-out sdk,$(MODULES)),format-$M) ;
|
||||
audit-all: $(foreach M,$(MODULES),audit-$M) ;
|
||||
hygiene-all: lint-all format-all check-all sql audit-all;
|
||||
test-all: $(foreach M,$(filter-out sdk docs .github,$(MODULES)),test-$M) ;
|
||||
install-all:
|
||||
pnpm -r --filter '!documentation' install
|
||||
|
||||
build-all: $(foreach M,$(filter-out e2e docs .github,$(MODULES)),build-$M) ;
|
||||
|
||||
check-all:
|
||||
pnpm -r --filter '!documentation' run "/^(check|check\:svelte|check\:typescript)$/"
|
||||
lint-all:
|
||||
pnpm -r --filter '!documentation' run lint:fix
|
||||
format-all:
|
||||
pnpm -r --filter '!documentation' run format:fix
|
||||
audit-all:
|
||||
pnpm -r --filter '!documentation' audit fix
|
||||
hygiene-all: audit-all
|
||||
pnpm -r --filter '!documentation' run "/(format:fix|check|check:svelte|check:typescript|sql)/"
|
||||
|
||||
test-all:
|
||||
pnpm -r --filter '!documentation' run "/^test/"
|
||||
|
||||
clean:
|
||||
find . -name "node_modules" -type d -prune -exec rm -rf {} +
|
||||
find . -name "dist" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "build" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name ".svelte-kit" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name "coverage" -type d -prune -exec rm -rf '{}' +
|
||||
find . -name ".pnpm-store" -type d -prune -exec rm -rf '{}' +
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./docker/docker-compose.dev.yml rm -v -f || true
|
||||
command -v docker >/dev/null 2>&1 && docker compose -f ./e2e/docker-compose.yml rm -v -f || true
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.17.1
|
||||
22.18.0
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
|
||||
|
||||
WORKDIR /usr/src/open-api/typescript-sdk
|
||||
COPY open-api/typescript-sdk/package*.json open-api/typescript-sdk/tsconfig*.json ./
|
||||
RUN npm ci
|
||||
COPY open-api/typescript-sdk/ ./
|
||||
RUN npm run build
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY cli/package.json cli/package-lock.json ./
|
||||
RUN npm ci
|
||||
|
||||
COPY cli .
|
||||
RUN npm run build
|
||||
COPY package* pnpm* .pnpmfile.cjs ./
|
||||
COPY ./cli ./cli/
|
||||
COPY ./open-api/typescript-sdk ./open-api/typescript-sdk/
|
||||
RUN corepack enable pnpm && \
|
||||
pnpm install --filter @immich/sdk --filter @immich/cli --frozen-lockfile && \
|
||||
pnpm --filter @immich/sdk build && \
|
||||
pnpm --filter @immich/cli build
|
||||
|
||||
WORKDIR /import
|
||||
|
||||
ENTRYPOINT ["node", "/usr/src/app/dist"]
|
||||
ENTRYPOINT ["node", "/usr/src/app/cli/dist"]
|
||||
|
||||
938
cli/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.76",
|
||||
"version": "2.2.80",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.16.5",
|
||||
"@types/node": "^22.17.1",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
@@ -29,7 +29,7 @@
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^59.0.0",
|
||||
"eslint-plugin-unicorn": "^60.0.0",
|
||||
"globals": "^16.0.0",
|
||||
"mock-fs": "^5.2.0",
|
||||
"prettier": "^3.2.5",
|
||||
@@ -69,6 +69,6 @@
|
||||
"micromatch": "^4.0.8"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.17.1"
|
||||
"node": "22.18.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,17 +21,16 @@ services:
|
||||
# extends:
|
||||
# file: hwaccel.transcoding.yml
|
||||
# service: cpu # set to one of [nvenc, quicksync, rkmpp, vaapi, vaapi-wsl] for accelerated transcoding
|
||||
user: '${UID:-1000}:${GID:-1000}'
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ../server:/usr/src/app/server
|
||||
- ../open-api:/usr/src/app/open-api
|
||||
- ..:/usr/src/app
|
||||
- ${UPLOAD_LOCATION}/photos:/data
|
||||
- ${UPLOAD_LOCATION}/photos/upload:/data/upload
|
||||
- /usr/src/app/server/node_modules
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
env_file:
|
||||
- .env
|
||||
@@ -58,8 +57,12 @@ services:
|
||||
- 9231:9231
|
||||
- 2283:2283
|
||||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
redis:
|
||||
condition: service_started
|
||||
database:
|
||||
condition: service_started
|
||||
init:
|
||||
condition: service_completed_successfully
|
||||
healthcheck:
|
||||
disable: false
|
||||
|
||||
@@ -68,9 +71,11 @@ services:
|
||||
image: immich-web-dev:latest
|
||||
# Needed for rootless docker setup, see https://github.com/moby/moby/issues/45919
|
||||
# user: 0:0
|
||||
user: '${UID:-1000}:${GID:-1000}'
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: web/Dockerfile
|
||||
dockerfile: server/Dockerfile
|
||||
target: dev
|
||||
command: ['immich-web']
|
||||
env_file:
|
||||
- .env
|
||||
@@ -78,18 +83,17 @@ services:
|
||||
- 3000:3000
|
||||
- 24678:24678
|
||||
volumes:
|
||||
- ../web:/usr/src/app/web
|
||||
- ../i18n:/usr/src/app/i18n
|
||||
- ../open-api/:/usr/src/app/open-api/
|
||||
# - ../../ui:/usr/ui
|
||||
- /usr/src/app/web/node_modules
|
||||
- ..:/usr/src/app
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 1048576
|
||||
hard: 1048576
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- immich-server
|
||||
immich-server:
|
||||
condition: service_started
|
||||
init:
|
||||
condition: service_completed_successfully
|
||||
|
||||
immich-machine-learning:
|
||||
container_name: immich_machine_learning
|
||||
@@ -117,7 +121,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:a137a2b60aca1a75130022d6bb96af423fefae4eb55faf395732db3544803280
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
|
||||
@@ -157,6 +161,14 @@ services:
|
||||
# volumes:
|
||||
# - grafana-data:/var/lib/grafana
|
||||
|
||||
init:
|
||||
container_name: init
|
||||
image: busybox
|
||||
env_file:
|
||||
- .env
|
||||
user: 0:0
|
||||
command: sh -c 'for path in /usr/src/app/.pnpm-store /usr/src/app/server/node_modules /usr/src/app/.github/node_modules /usr/src/app/cli/node_modules /usr/src/app/docs/node_modules /usr/src/app/e2e/node_modules /usr/src/app/open-api/typescript-sdk/node_modules /usr/src/app/web/.svelte-kit /usr/src/app/web/coverage /usr/src/app/node_modules /usr/src/app/web/node_modules; do [ -e "$$path" ] && chown -R ${UID:-1000}:${GID:-1000} "$$path" || true; done'
|
||||
|
||||
volumes:
|
||||
model-cache:
|
||||
prometheus-data:
|
||||
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:a137a2b60aca1a75130022d6bb96af423fefae4eb55faf395732db3544803280
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
@@ -95,7 +95,7 @@ services:
|
||||
command: ['./run.sh', '-disable-reporting']
|
||||
ports:
|
||||
- 3000:3000
|
||||
image: grafana/grafana:12.0.2-ubuntu@sha256:0512d81cdeaaff0e370a9aa66027b465d1f1f04379c3a9c801a905fabbdbc7a5
|
||||
image: grafana/grafana:12.1.1-ubuntu@sha256:d1da838234ff2de93e0065ee1bf0e66d38f948dcc5d718c25fa6237e14b4424a
|
||||
volumes:
|
||||
- grafana-data:/var/lib/grafana
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ services:
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:facc1d2c3462975c34e10fccb167bfa92b0e0dbd992fc282c29a61c3243afb11
|
||||
image: docker.io/valkey/valkey:8-bookworm@sha256:a137a2b60aca1a75130022d6bb96af423fefae4eb55faf395732db3544803280
|
||||
healthcheck:
|
||||
test: redis-cli ping || exit 1
|
||||
restart: always
|
||||
|
||||
4
docs/.gitignore
vendored
@@ -18,4 +18,6 @@
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
yarn.lock
|
||||
yarn.lock
|
||||
|
||||
/static/openapi.json
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.17.1
|
||||
22.18.0
|
||||
|
||||
@@ -5,13 +5,13 @@ This website is built using [Docusaurus](https://docusaurus.io/), a modern stati
|
||||
### Installation
|
||||
|
||||
```
|
||||
$ npm install
|
||||
$ pnpm install
|
||||
```
|
||||
|
||||
### Local Development
|
||||
|
||||
```
|
||||
$ npm run start
|
||||
$ pnpm run start
|
||||
```
|
||||
|
||||
This command starts a local development server and opens up a browser window. Most changes are reflected live without having to restart the server.
|
||||
@@ -19,7 +19,7 @@ This command starts a local development server and opens up a browser window. Mo
|
||||
### Build
|
||||
|
||||
```
|
||||
$ npm run build
|
||||
$ pnpm run build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||
@@ -29,13 +29,13 @@ This command generates static content into the `build` directory and can be serv
|
||||
Using SSH:
|
||||
|
||||
```
|
||||
$ USE_SSH=true npm run deploy
|
||||
$ USE_SSH=true pnpm run deploy
|
||||
```
|
||||
|
||||
Not using SSH:
|
||||
|
||||
```
|
||||
$ GIT_USER=<Your GitHub username> npm run deploy
|
||||
$ GIT_USER=<Your GitHub username> pnpm run deploy
|
||||
```
|
||||
|
||||
If you are using GitHub pages for hosting, this command is a convenient way to build the website and push to the `gh-pages` branch.
|
||||
|
||||
@@ -2,10 +2,6 @@
|
||||
|
||||
Users can deploy a custom reverse proxy that forwards requests to Immich. This way, the reverse proxy can handle TLS termination, load balancing, or other advanced features. All reverse proxies between Immich and the user must forward all headers and set the `Host`, `X-Real-IP`, `X-Forwarded-Proto` and `X-Forwarded-For` headers to their appropriate values. Additionally, your reverse proxy should allow for big enough uploads. By following these practices, you ensure that all custom reverse proxies are fully compatible with Immich.
|
||||
|
||||
:::note
|
||||
The Repair page can take a long time to load. To avoid server timeouts or errors, we recommend specifying a timeout of at least 10 minutes on your proxy server.
|
||||
:::
|
||||
|
||||
:::caution
|
||||
Immich does not support being served on a sub-path such as `location /immich {`. It has to be served on the root path of a (sub)domain.
|
||||
:::
|
||||
|
||||
@@ -5,7 +5,7 @@ After making any changes in the `server/src/schema`, a database migration need t
|
||||
1. Run the command
|
||||
|
||||
```bash
|
||||
npm run migrations:generate <migration-name>
|
||||
pnpm run migrations:generate <migration-name>
|
||||
```
|
||||
|
||||
2. Check if the migration file makes sense.
|
||||
|
||||
@@ -204,8 +204,8 @@ When the Dev Container starts, it automatically:
|
||||
|
||||
1. **Runs post-create script** (`container-server-post-create.sh`):
|
||||
- Adjusts file permissions for the `node` user
|
||||
- Installs dependencies: `npm install` in all packages
|
||||
- Builds TypeScript SDK: `npm run build` in `open-api/typescript-sdk`
|
||||
- Installs dependencies: `pnpm install` in all packages
|
||||
- Builds TypeScript SDK: `pnpm run build` in `open-api/typescript-sdk`
|
||||
|
||||
2. **Starts development servers** via VS Code tasks:
|
||||
- `Immich API Server (Nest)` - API server with hot-reloading on port 2283
|
||||
@@ -243,7 +243,7 @@ To connect the mobile app to your Dev Container:
|
||||
|
||||
- **Server code** (`/server`): Changes trigger automatic restart
|
||||
- **Web code** (`/web`): Changes trigger hot module replacement
|
||||
- **Database migrations**: Run `npm run sync:sql` in the server directory
|
||||
- **Database migrations**: Run `pnpm run sync:sql` in the server directory
|
||||
- **API changes**: Regenerate TypeScript SDK with `make open-api`
|
||||
|
||||
## Testing
|
||||
@@ -273,19 +273,19 @@ make test-medium-dev # End-to-end tests
|
||||
```bash
|
||||
# Server tests
|
||||
cd /workspaces/immich/server
|
||||
npm test # Run all tests
|
||||
npm run test:watch # Watch mode
|
||||
npm run test:cov # Coverage report
|
||||
pnpm test # Run all tests
|
||||
pnpm run test:watch # Watch mode
|
||||
pnpm run test:cov # Coverage report
|
||||
|
||||
# Web tests
|
||||
cd /workspaces/immich/web
|
||||
npm test # Run all tests
|
||||
npm run test:watch # Watch mode
|
||||
pnpm test # Run all tests
|
||||
pnpm run test:watch # Watch mode
|
||||
|
||||
# E2E tests
|
||||
cd /workspaces/immich/e2e
|
||||
npm run test # Run API tests
|
||||
npm run test:web # Run web UI tests
|
||||
pnpm run test # Run API tests
|
||||
pnpm run test:web # Run web UI tests
|
||||
```
|
||||
|
||||
### Code Quality Commands
|
||||
|
||||
@@ -8,34 +8,34 @@ When contributing code through a pull request, please check the following:
|
||||
|
||||
## Web Checks
|
||||
|
||||
- [ ] `npm run lint` (linting via ESLint)
|
||||
- [ ] `npm run format` (formatting via Prettier)
|
||||
- [ ] `npm run check:svelte` (Type checking via SvelteKit)
|
||||
- [ ] `npm run check:typescript` (check typescript)
|
||||
- [ ] `npm test` (unit tests)
|
||||
- [ ] `pnpm run lint` (linting via ESLint)
|
||||
- [ ] `pnpm run format` (formatting via Prettier)
|
||||
- [ ] `pnpm run check:svelte` (Type checking via SvelteKit)
|
||||
- [ ] `pnpm run check:typescript` (check typescript)
|
||||
- [ ] `pnpm test` (unit tests)
|
||||
|
||||
## Documentation
|
||||
|
||||
- [ ] `npm run format` (formatting via Prettier)
|
||||
- [ ] `pnpm run format` (formatting via Prettier)
|
||||
- [ ] Update the `_redirects` file if you have renamed a page or removed it from the documentation.
|
||||
|
||||
:::tip AIO
|
||||
Run all web checks with `npm run check:all`
|
||||
Run all web checks with `pnpm run check:all`
|
||||
:::
|
||||
|
||||
## Server Checks
|
||||
|
||||
- [ ] `npm run lint` (linting via ESLint)
|
||||
- [ ] `npm run format` (formatting via Prettier)
|
||||
- [ ] `npm run check` (Type checking via `tsc`)
|
||||
- [ ] `npm test` (unit tests)
|
||||
- [ ] `pnpm run lint` (linting via ESLint)
|
||||
- [ ] `pnpm run format` (formatting via Prettier)
|
||||
- [ ] `pnpm run check` (Type checking via `tsc`)
|
||||
- [ ] `pnpm test` (unit tests)
|
||||
|
||||
:::tip AIO
|
||||
Run all server checks with `npm run check:all`
|
||||
Run all server checks with `pnpm run check:all`
|
||||
:::
|
||||
|
||||
:::info Auto Fix
|
||||
You can use `npm run __:fix` to potentially correct some issues automatically for `npm run format` and `lint`.
|
||||
You can use `pnpm run __:fix` to potentially correct some issues automatically for `pnpm run format` and `lint`.
|
||||
:::
|
||||
|
||||
## Mobile Checks
|
||||
|
||||
@@ -54,20 +54,20 @@ You can access the web from `http://your-machine-ip:3000` or `http://localhost:3
|
||||
|
||||
If you only want to do web development connected to an existing, remote backend, follow these steps:
|
||||
|
||||
1. Build the Immich SDK - `cd open-api/typescript-sdk && npm i && npm run build && cd -`
|
||||
1. Build the Immich SDK - `cd open-api/typescript-sdk && pnpm i && pnpm run build && cd -`
|
||||
2. Enter the web directory - `cd web/`
|
||||
3. Install web dependencies - `npm i`
|
||||
3. Install web dependencies - `pnpm i`
|
||||
4. Start the web development server
|
||||
|
||||
```bash
|
||||
IMMICH_SERVER_URL=https://demo.immich.app/ npm run dev
|
||||
IMMICH_SERVER_URL=https://demo.immich.app/ pnpm run dev
|
||||
```
|
||||
|
||||
If you're using PowerShell on Windows you may need to set the env var separately like so:
|
||||
|
||||
```powershell
|
||||
$env:IMMICH_SERVER_URL = "https://demo.immich.app/"
|
||||
npm run dev
|
||||
pnpm run dev
|
||||
```
|
||||
|
||||
#### `@immich/ui`
|
||||
@@ -75,12 +75,12 @@ npm run dev
|
||||
To see local changes to `@immich/ui` in Immich, do the following:
|
||||
|
||||
1. Install `@immich/ui` as a sibling to `immich/`, for example `/home/user/immich` and `/home/user/ui`
|
||||
2. Build the `@immich/ui` project via `npm run build`
|
||||
2. Build the `@immich/ui` project via `pnpm run build`
|
||||
3. Uncomment the corresponding volume in web service of the `docker/docker-compose.dev.yaml` file (`../../ui:/usr/ui`)
|
||||
4. Uncomment the corresponding alias in the `web/vite.config.js` file (`'@immich/ui': path.resolve(\_\_dirname, '../../ui')`)
|
||||
5. Uncomment the import statement in `web/src/app.css` file `@import '/usr/ui/dist/theme/default.css';` and comment out `@import '@immich/ui/theme/default.css';`
|
||||
6. Start up the stack via `make dev`
|
||||
7. After making changes in `@immich/ui`, rebuild it (`npm run build`)
|
||||
7. After making changes in `@immich/ui`, rebuild it (`pnpm run build`)
|
||||
|
||||
### Mobile app
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
|
||||
### Unit tests
|
||||
|
||||
Unit are run by calling `npm run test` from the `server/` directory.
|
||||
You need to run `npm install` (in `server/`) before _once_.
|
||||
Unit are run by calling `pnpm run test` from the `server/` directory.
|
||||
You need to run `pnpm install` (in `server/`) before _once_.
|
||||
|
||||
### End to end tests
|
||||
|
||||
@@ -17,14 +17,14 @@ make e2e
|
||||
|
||||
Before you can run the tests, you need to run the following commands _once_:
|
||||
|
||||
- `npm install` (in `e2e/`)
|
||||
- `pnpm install` (in `e2e/`)
|
||||
- `make open-api` (in the project root `/`)
|
||||
|
||||
Once the test environment is running, the e2e tests can be run via:
|
||||
|
||||
```bash
|
||||
cd e2e/
|
||||
npm test
|
||||
pnpm test
|
||||
```
|
||||
|
||||
The tests check various things including:
|
||||
|
||||
BIN
docs/docs/install/img/truenas/truenas00.webp
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
|
Before Width: | Height: | Size: 4.7 KiB After Width: | Height: | Size: 4.7 KiB |
|
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 48 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
BIN
docs/docs/install/img/truenas/truenas04.webp
Normal file
|
After Width: | Height: | Size: 101 KiB |
|
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.6 KiB |
BIN
docs/docs/install/img/truenas/truenas06.webp
Normal file
|
After Width: | Height: | Size: 5.1 KiB |
BIN
docs/docs/install/img/truenas/truenas07.webp
Normal file
|
After Width: | Height: | Size: 49 KiB |
BIN
docs/docs/install/img/truenas/truenas08.webp
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/docs/install/img/truenas/truenas09.webp
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
docs/docs/install/img/truenas/truenas10.webp
Normal file
|
After Width: | Height: | Size: 17 KiB |
|
Before Width: | Height: | Size: 4.0 KiB After Width: | Height: | Size: 4.0 KiB |
|
Before Width: | Height: | Size: 35 KiB After Width: | Height: | Size: 35 KiB |
|
Before Width: | Height: | Size: 8.9 KiB |
|
Before Width: | Height: | Size: 1.6 KiB |
|
Before Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 9.7 KiB |
|
Before Width: | Height: | Size: 6.7 KiB |
|
Before Width: | Height: | Size: 4.8 KiB |
@@ -2,6 +2,9 @@
|
||||
sidebar_position: 80
|
||||
---
|
||||
|
||||
import Tabs from '@theme/Tabs';
|
||||
import TabItem from '@theme/TabItem';
|
||||
|
||||
# TrueNAS [Community]
|
||||
|
||||
:::note
|
||||
@@ -9,211 +12,324 @@ This is a community contribution and not officially supported by the Immich team
|
||||
|
||||
Community support can be found in the dedicated channel on the [Discord Server](https://discord.immich.app/).
|
||||
|
||||
**Please report app issues to the corresponding [Github Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
|
||||
**Please report app issues to the corresponding [GitHub Repository](https://github.com/truenas/apps/tree/master/trains/community/immich).**
|
||||
:::
|
||||
|
||||
:::warning
|
||||
This guide covers the installation of Immich on TrueNAS Community Edition 24.10.2.2 (Electric Eel) and later.
|
||||
|
||||
We recommend keeping TrueNAS Community Edition and Immich relatively up to date with the latest versions to avoid any issues.
|
||||
|
||||
If you are using an older version of TrueNAS, we ask that you upgrade to the latest version before installing Immich. Check the [TrueNAS Community Edition Release Notes](https://www.truenas.com/docs/softwarereleases/) for more information on breaking changes, new features, and how to upgrade your system.
|
||||
:::
|
||||
|
||||
Immich can easily be installed on TrueNAS Community Edition via the **Community** train application.
|
||||
Consider reviewing the TrueNAS [Apps resources](https://apps.truenas.com/getting-started/) if you have not previously configured applications on your system.
|
||||
|
||||
TrueNAS Community Edition makes installing and updating Immich easy, but you must use the Immich web portal and mobile app to configure accounts and access libraries.
|
||||
|
||||
## First Steps
|
||||
|
||||
The Immich app in TrueNAS Community Edition installs, completes the initial configuration, then starts the Immich web portal.
|
||||
When updates become available, TrueNAS alerts and provides easy updates.
|
||||
|
||||
Before installing the Immich app in TrueNAS, review the [Environment Variables](#environment-variables) documentation to see if you want to configure any during installation.
|
||||
You may also configure environment variables at any time after deploying the application.
|
||||
|
||||
### Setting up Storage Datasets
|
||||
|
||||
Before beginning app installation, [create the datasets](https://www.truenas.com/docs/scale/scaletutorials/storage/datasets/datasetsscale/) to use in the **Storage Configuration** section during installation.
|
||||
Immich requires seven datasets: `library`, `upload`, `thumbs`, `profile`, `video`, `backups`, and `pgData`.
|
||||
You can organize these as one parent with seven child datasets, for example `/mnt/tank/immich/library`, `/mnt/tank/immich/upload`, and so on.
|
||||
|
||||
In TrueNAS, Immich requires 2 datasets for the application to function correctly: `data` and `pgData`. You can set the datasets to any names to match your naming conventions or preferences.
|
||||
You can organize these as one parent with two child datasets, for example `/mnt/tank/immich/data` and `/mnt/tank/immich/pgData`.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas12.webp').default}
|
||||
width="30%"
|
||||
src={require('./img/truenas/truenas00.webp').default}
|
||||
width="40%"
|
||||
alt="Immich App Widget"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::info Permissions
|
||||
The **pgData** dataset must be owned by the user `netdata` (UID 999) for postgres to start. The other datasets must be owned by the user `root` (UID 0) or a group that includes the user `root` (UID 0) for immich to have the necessary permissions.
|
||||
:::info Datasets Permissions
|
||||
|
||||
If the **library** dataset uses ACL it must have [ACL mode](https://www.truenas.com/docs/core/coretutorials/storage/pools/permissions/#access-control-lists) set to `Passthrough` if you plan on using a [storage template](/docs/administration/storage-template.mdx) and the dataset is configured for network sharing (its ACL type is set to `SMB/NFSv4`). When the template is applied and files need to be moved from **upload** to **library**, Immich performs `chmod` internally and needs to be allowed to execute the command. [More info.](https://github.com/immich-app/immich/pull/13017)
|
||||
The **pgData** dataset must be owned by the user `netdata` (UID 999) for Postgres to start.
|
||||
|
||||
The `data` dataset must have given the **_modify_** permission to the user who will run Immich.
|
||||
|
||||
Since TrueNAS Community Edition 24.10.2.2 and later, Immich can be run as any user and group, the default user being `apps` (UID 568) and the default group being `apps` (GID 568). This user, either `apps` or another user you choose, must have **_modify_** permissions on the **data** dataset.
|
||||
|
||||
For an easy setup:
|
||||
|
||||
- Create the parent dataset `immich` keeping the default **Generic** preset.
|
||||
- Select `Dataset Preset` **Apps** instead of **Generic** when creating the `data` dataset. This will automatically give the correct permissions to the dataset. If you want to use another user for Immich, you can keep the **Generic** preset, but you will need to give the **_modify_** permission to that other user.
|
||||
- For the `pgData` dataset, you can keep the default preset **Generic** as permissions can be set during the installation of the Immich app (See [Storage Configuration](#storage-configuration) section).
|
||||
:::
|
||||
|
||||
:::tip
|
||||
To improve performance, Immich recommends using SSDs for the database. If you have a pool made of SSDs, you can create the `pgData` dataset on that pool.
|
||||
|
||||
Thumbnails can also be stored on the SSDs for faster access. This is an advanced option and not required for Immich to run. More information on how you can use multiple datasets to manage Immich storage in a finer-grained manner can be found in the [Advanced: Multiple Datasets for Immich Storage](#advanced-multiple-datasets-for-immich-storage) section below.
|
||||
:::
|
||||
|
||||
:::warning
|
||||
If you just created the datasets using the **Apps** preset, you can skip this warning section.
|
||||
|
||||
If the **data** dataset uses ACL it must have [ACL mode](https://www.truenas.com/docs/scale/scaletutorials/datasets/permissionsscale/) set to `Passthrough` if you plan on using a [storage template](/docs/administration/storage-template.mdx) and the dataset is configured for network sharing (its ACL type is set to `SMB/NFSv4`). When the template is applied and files need to be moved from **upload** to **library** (internal folder created by Immich within the **data** dataset), Immich performs `chmod` internally and must be allowed to execute the command. [More info.](https://github.com/immich-app/immich/pull/13017)
|
||||
|
||||
To change or verify the ACL mode, go to the **Datasets** screen, select the **library** dataset, click on the **Edit** button next to **Dataset Details**, then click on the **Advanced Options** tab, scroll down to the **ACL Mode** section, and select `Passthrough` from the dropdown menu. Click **Save** to apply the changes. If the option is greyed out, set the **ACL Type** to `SMB/NFSv4` first, then you can change the **ACL Mode** to `Passthrough`.
|
||||
:::
|
||||
|
||||
## Installing the Immich Application
|
||||
|
||||
To install the **Immich** application, go to **Apps**, click **Discover Apps**, either begin typing Immich into the search field or scroll down to locate the **Immich** application widget.
|
||||
To install the **Immich** application, go to **Apps**, click **Discover Apps**, and either begin typing Immich into the search field or scroll down to locate the **Immich** application widget.
|
||||
|
||||
<div style={{ marginBottom: '2rem', border: '1px solid #ccc', padding: '1rem', borderRadius: '8px' }}>
|
||||
|
||||
Click on the widget to open the **Immich** application details screen.
|
||||
<img
|
||||
src={require('./img/truenas01.webp').default}
|
||||
src={require('./img/truenas/truenas01.webp').default}
|
||||
width="50%"
|
||||
alt="Immich App Widget"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Click on the widget to open the **Immich** application details screen.
|
||||
</div>
|
||||
|
||||
<br/><br/>
|
||||
<div style={{ marginBottom: '2rem', border: '1px solid #ccc', padding: '1rem', borderRadius: '8px' }}>
|
||||
|
||||
Click **Install** to open the Immich application configuration screen.
|
||||
<img
|
||||
src={require('./img/truenas02.webp').default}
|
||||
src={require('./img/truenas/truenas02.webp').default}
|
||||
width="100%"
|
||||
alt="Immich App Details Screen"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Click **Install** to open the Immich application configuration screen.
|
||||
|
||||
<br/><br/>
|
||||
</div>
|
||||
|
||||
Application configuration settings are presented in several sections, each explained below.
|
||||
To find specific fields click in the **Search Input Fields** search field, scroll down to a particular section or click on the section heading on the navigation area in the upper-right corner.
|
||||
To find specific fields, click in the **Search Input Fields** search field, scroll down to a particular section, or click on the section heading on the navigation area in the upper-right corner.
|
||||
|
||||
### Application Name and Version
|
||||
|
||||
<img
|
||||
src={require('./img/truenas03.webp').default}
|
||||
src={require('./img/truenas/truenas03.webp').default}
|
||||
width="100%"
|
||||
alt="Install Immich Screen"
|
||||
className="border rounded-xl"
|
||||
className="border rounded-xl mb-4"
|
||||
/>
|
||||
|
||||
Accept the default value or enter a name in **Application Name** field.
|
||||
In most cases use the default name, but if adding a second deployment of the application you must change this name.
|
||||
Keep the default value or enter a name in the **Application Name** field.
|
||||
Change it if you’re deploying a second instance.
|
||||
|
||||
Accept the default version number in **Version**.
|
||||
When a new version becomes available, the application has an update badge.
|
||||
The **Installed Applications** screen shows the option to update applications.
|
||||
Immich version within TrueNAS catalog (Different from Immich release version).
|
||||
|
||||
### Immich Configuration
|
||||
|
||||
<img
|
||||
src={require('./img/truenas05.webp').default}
|
||||
src={require('./img/truenas/truenas04.webp').default}
|
||||
width="40%"
|
||||
alt="Configuration Settings"
|
||||
className="border rounded-xl mb-4"
|
||||
/>
|
||||
|
||||
The **Timezone** is set to the system default, which usually matches your local timezone. You can change it to another timezone if you prefer.
|
||||
|
||||
**Enable Machine Learning** is enabled by default. It allows Immich to use machine learning features such as face recognition, image search, and smart duplicate detection. Untick this option if you do not want to use these features.
|
||||
|
||||
Select the **Machine Learning Image Type** based on the hardware you have. More details here: [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md)
|
||||
|
||||
**Database Password** should be set to a custom value using only the characters `A-Za-z0-9`. This password is used to secure the Postgres database.
|
||||
|
||||
**Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`. Preferably, use a different password from the database password.
|
||||
|
||||
Keep the **Log Level** to the default `Log` value.
|
||||
|
||||
Leave **Hugging Face Endpoint** blank. (This is used to download ML models from a different source.)
|
||||
|
||||
Set **Database Storage Type** to the type of storage (**HDD** or **SSD**) that the pool where the **pgData** dataset is located uses.
|
||||
|
||||
**Additional Environment Variables** can be left blank.
|
||||
|
||||
<details>
|
||||
<summary>Advanced users: Adding Environment Variables</summary>
|
||||
|
||||
Environment variables can be set by clicking the **Add** button and filling in the **Name** and **Value** fields.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas/truenas05.webp').default}
|
||||
width="40%"
|
||||
alt="Environment Variables"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Accept the default value in **Timezone** or change to match your local timezone.
|
||||
**Timezone** is only used by the Immich `exiftool` microservice if it cannot be determined from the image metadata.
|
||||
These are used to add custom configuration options or to enable specific features.
|
||||
More information on available environment variables can be found in the **[environment variables documentation](/docs/install/environment-variables/)**.
|
||||
|
||||
Untick **Enable Machine Learning** if you will not use face recognition, image search, and smart duplicate detection.
|
||||
:::info
|
||||
Some environment variables are not available for the TrueNAS Community Edition app as they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
|
||||
Accept the default option or select the **Machine Learning Image Type** for your hardware based on the [Hardware-Accelerated Machine Learning Supported Backends](/docs/features/ml-hardware-acceleration.md#supported-backends).
|
||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
||||
:::
|
||||
|
||||
Immich's default is `postgres` but you should consider setting the **Database Password** to a custom value using only the characters `A-Za-z0-9`.
|
||||
</details>
|
||||
|
||||
The **Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`.
|
||||
### User and Group Configuration
|
||||
|
||||
Accept the **Log Level** default of **Log**.
|
||||
Application in TrueNAS runs as a specific user and group. Immich uses the default user `apps` (UID 568) and the default group `apps` (GID 568).
|
||||
|
||||
Leave **Hugging Face Endpoint** blank. (This is for downloading ML models from a different source.)
|
||||
<img
|
||||
src={require('./img/truenas/truenas06.webp').default}
|
||||
width="40%"
|
||||
alt="User and Group Configuration"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Leave **Additional Environment Variables** blank or see [Environment Variables](#environment-variables) to set before installing.
|
||||
- **User ID**: Keep the default value `apps` (UID 568) or define a different one if needed.
|
||||
|
||||
- **Group ID**: Keep the default value `apps` (GID 568) or define a different one if needed.
|
||||
|
||||
:::warning
|
||||
If you change the user or group, make sure that the datasets you created for Immich data storage have the correct permissions set for that user and group as specified in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above.
|
||||
:::
|
||||
|
||||
### Network Configuration
|
||||
|
||||
<img
|
||||
src={require('./img/truenas06.webp').default}
|
||||
src={require('./img/truenas/truenas07.webp').default}
|
||||
width="40%"
|
||||
alt="Networking Settings"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Accept the default port `30041` in **WebUI Port** or enter a custom port number.
|
||||
:::info Allowed Port Numbers
|
||||
Only numbers within the range 9000-65535 may be used on TrueNAS versions below TrueNAS Community Edition 24.10 Electric Eel.
|
||||
- **Port Bind Mode**: This lets you expose the port to the host system, allowing you to access Immich from outside the TrueNAS system. Keep the default **_Publish port on the host for external access_** value unless you have a specific reason to change it.
|
||||
|
||||
Regardless of version, to avoid port conflicts, don't use [ports on this list](https://www.truenas.com/docs/solutions/optimizations/security/#truenas-default-ports).
|
||||
:::
|
||||
- **Port Number**: Keep the default port `30041` or enter a custom port number.
|
||||
|
||||
- **Host IPs**: Leave the default blank value.
|
||||
|
||||
### Storage Configuration
|
||||
|
||||
Immich requires seven storage datasets.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas07.webp').default}
|
||||
width="20%"
|
||||
alt="Configure Storage ixVolumes"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::note Default Setting (Not recommended)
|
||||
The default setting for datasets is **ixVolume (dataset created automatically by the system)** but this results in your data being harder to access manually and can result in data loss if you delete the immich app. (Not recommended)
|
||||
:::danger Default Settings (Not recommended)
|
||||
The default setting for datasets is **ixVolume (dataset created automatically by the system)**. This is not recommended as this results in your data being harder to access manually and can result in data loss if you delete the immich app. It is also harder to manage snapshots and replication tasks. It is recommended to use the **Host Path (Path that already exists on the system)** option instead.
|
||||
:::
|
||||
|
||||
For each Storage option select **Host Path (Path that already exists on the system)** and then select the matching dataset [created before installing the app](#setting-up-storage-datasets): **Immich Library Storage**: `library`, **Immich Uploads Storage**: `upload`, **Immich Thumbs Storage**: `thumbs`, **Immich Profile Storage**: `profile`, **Immich Video Storage**: `video`, **Immich Backups Storage**: `backups`, **Postgres Data Storage**: `pgData`.
|
||||
The storage configuration section allows you to set up the storage locations for Immich data. You can select the datasets created in the previous step.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas08.webp').default}
|
||||
src={require('./img/truenas/truenas08.webp').default}
|
||||
width="40%"
|
||||
alt="Configure Storage Host Paths"
|
||||
alt="Configure Storage Volumes"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
The image above has example values.
|
||||
|
||||
<br/>
|
||||
For the Data Storage, select **Host Path (Path that already exists on the system)** and then select the dataset you created for Immich data storage, for example, `data`.
|
||||
|
||||
### Additional Storage [(External Libraries)](/docs/features/libraries)
|
||||
The Machine Learning cache can be left with default _Temporary_
|
||||
|
||||
For the Postgres Data Storage, select **Host Path (Path that already exists on the system)** and then select the dataset you created for Postgres data storage, for example, `pgData`.
|
||||
|
||||
:::info
|
||||
**Postgres Data Storage**
|
||||
Once **Host Path** is selected, a checkbox appears with **_Automatic Permissions_**. If you have not set the ownership of the **pgData** dataset to `netdata` (UID 999), tick this box as it will set the user ownership to `netdata` (UID 999) and the group ownership to `docker` (GID 999) automatically. If you have set the ownership of the **pgData** dataset to `netdata` (UID 999), you can leave this box unticked.
|
||||
:::
|
||||
|
||||
### Additional Storage (Advanced Users)
|
||||
|
||||
<details>
|
||||
<summary>External Libraries</summary>
|
||||
|
||||
:::danger Advanced Users Only
|
||||
This feature should only be used by advanced users. If this is your first time installing Immich, then DO NOT mount an external library until you have a working setup. Also, your mount path MUST be something unique and should NOT be your library or upload location or a Linux directory like `/lib`. The picture below shows a valid example.
|
||||
This feature should only be used by advanced users. If this is your first time installing Immich, then DO NOT mount an external library until you have a working setup.
|
||||
:::
|
||||
|
||||
<img
|
||||
src={require('./img/truenas10.webp').default}
|
||||
src={require('./img/truenas/truenas09.webp').default}
|
||||
width="40%"
|
||||
alt="Configure Storage Host Paths"
|
||||
alt="Add External Libraries with Additional Storage"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
You may configure [External Libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
||||
The **Mount Path** is the location you will need to copy and paste into the External Library settings within Immich.
|
||||
The **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
|
||||
You may configure [external libraries](/docs/features/libraries) by mounting them using **Additional Storage**.
|
||||
|
||||
<!-- A section for Labels would go here but I don't know what they do. -->
|
||||
The dataset that contains your external library files must at least give **read** access to the user running Immich (Default: `apps` (UID 568), `apps` (GID 568)).
|
||||
If you want to be able to delete files or edit metadata in the external library using Immich, you will need to give the **modify** permission to the user running Immich.
|
||||
|
||||
- **Mount Path** is the location you will need to copy and paste into the external library settings within Immich.
|
||||
- **Host Path** is the location on the TrueNAS Community Edition server where your external library is located.
|
||||
- **Read Only** is a checkbox that you can tick if you want to prevent Immich from modifying the files in the external library. This is useful if you want to use Immich to view and search your external library without modifying it.
|
||||
|
||||
:::warning
|
||||
Each mount path MUST be something unique and should NOT be your library or upload location or a Linux directory like `/lib`.
|
||||
|
||||
A general recommendation is to mount any external libraries to a path beginning with `/mnt` or `/media` followed by a unique name, such as `/mnt/external-libraries` or `/media/my-external-libraries`. If you plan to mount multiple external libraries, you can use paths like `/mnt/external-libraries/library1`, `/mnt/external-libraries/library2`, etc.
|
||||
:::
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>Multiple Datasets for Immich Storage</summary>
|
||||
|
||||
:::danger Advanced Users Only
|
||||
This feature should only be used by advanced users.
|
||||
:::
|
||||
|
||||
Immich can use multiple datasets for its storage, allowing you to manage your data more granularly, similar to the old storage configuration. This is useful if you want to separate your data into different datasets for performance or organizational reasons. There is a general guide for this [here](/docs/guides/custom-locations), but read on for the TrueNAS guide.
|
||||
|
||||
Each additional dataset has to give the permission **_modify_** to the user who will run Immich (Default: `apps` (UID 568), `apps` (GID 568))
|
||||
As described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, you have to create the datasets with the **Apps** preset to ensure the correct permissions are set, or you can set the permissions manually after creating the datasets.
|
||||
|
||||
Immich uses 6 folders for its storage: `library`, `upload`, `thumbs`, `profile`, `encoded-video`, and `backups`. You can create a dataset for each of these folders or only for some of them.
|
||||
|
||||
To mount these datasets:
|
||||
|
||||
1. Add an **Additional Storage** entry for each dataset you want to use.
|
||||
2. Select **Type** as **Host Path (Path that already exists on the system)**.
|
||||
3. Enter the **Mount Path** with `/data/<folder-name>`. The `<folder-name>` is the name of the folder you want to mount, for example, `library`, `upload`, `thumbs`, `profile`, `encoded-video`, or `backups`.
|
||||
:::danger Important
|
||||
You have to write the full path, including `/data/`, as Immich expects the data to be in that location.
|
||||
If you do not include this path, Immich will not be able to find the data and will not write the data to the location you specified.
|
||||
:::
|
||||
4. Select the **Host Path** as the dataset you created for that folder, for example, `/mnt/tank/immich/library`, `/mnt/tank/immich/upload`, etc.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas/truenas10.webp').default}
|
||||
width="40%"
|
||||
alt="Use Multiple Datasets for Immich Storage with Additional Storage"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
</details>
|
||||
|
||||
<!-- A section for Labels could be added, but I don't think it is needed as they are of no use for Immich. -->
|
||||
|
||||
### Resources Configuration
|
||||
|
||||
<img
|
||||
src={require('./img/truenas09.webp').default}
|
||||
src={require('./img/truenas/truenas11.webp').default}
|
||||
width="40%"
|
||||
alt="Resource Limits"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Accept the default **CPU** limit of `2` threads or specify the number of threads (CPUs with Multi-/Hyper-threading have 2 threads per core).
|
||||
- **CPU**: Depending on your system resources, you can keep the default value of `2` threads or specify a different number. Immich recommends at least `8` threads.
|
||||
|
||||
Specify the **Memory** limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
|
||||
- **Memory**: Limit in MB of RAM. Immich recommends at least 6000 MB (6GB). If you selected **Enable Machine Learning** in **Immich Configuration**, you should probably set this above 8000 MB.
|
||||
|
||||
:::info Older TrueNAS Versions
|
||||
Before TrueNAS Community Edition version 24.10 Electric Eel:
|
||||
Both **CPU** and **Memory** are limits, not reservations. This means that Immich can use up to the specified amount of CPU threads and RAM, but it will not reserve that amount of resources at all times. The system will allocate resources as needed, and Immich will use less than the specified amount most of the time.
|
||||
|
||||
The **CPU** value was specified in a different format with a default of `4000m` which is 4 threads.
|
||||
- Enable **GPU Configuration** options if you have a GPU or CPU with integrated graphics that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md).
|
||||
|
||||
The **Memory** value was specified in a different format with a default of `8Gi` which is 8 GiB of RAM. The value was specified in bytes or a number with a measurement suffix. Examples: `129M`, `123Mi`, `1000000000`
|
||||
:::
|
||||
|
||||
Enable **GPU Configuration** options if you have a GPU that you will use for [Hardware Transcoding](/docs/features/hardware-transcoding) and/or [Hardware-Accelerated Machine Learning](/docs/features/ml-hardware-acceleration.md). More info: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
|
||||
The process for NVIDIA GPU passthrough requires additional steps.
|
||||
More details here: [GPU Passthrough Docs for TrueNAS Apps](https://apps.truenas.com/managing-apps/installing-apps/#gpu-passthrough)
|
||||
|
||||
### Install
|
||||
|
||||
Finally, click **Install**.
|
||||
The system opens the **Installed Applications** screen with the Immich app in the **Deploying** state.
|
||||
When the installation completes it changes to **Running**.
|
||||
When the installation completes, it changes to **Running**.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas04.webp').default}
|
||||
src={require('./img/truenas/truenas12.webp').default}
|
||||
width="100%"
|
||||
alt="Immich Installed"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
Click **Web Portal** on the **Application Info** widget to open the Immich web interface to set up your account and begin uploading photos.
|
||||
Click **Web Portal** on the **Application Info** widget, or go to the URL `http://<your-truenas-ip>:30041` in your web browser to open the Immich web interface. This will show you the onboarding process to set up your first user account, which will be an administrator account.
|
||||
|
||||
After that, you can start using Immich to upload and manage your photos and videos.
|
||||
|
||||
:::tip
|
||||
For more information on how to use the application once installed, please refer to the [Post Install](/docs/install/post-install.mdx) guide.
|
||||
@@ -228,23 +344,6 @@ For more information on how to use the application once installed, please refer
|
||||
- Click **Update** at the very bottom of the page to save changes.
|
||||
- TrueNAS automatically updates, recreates, and redeploys the Immich container with the updated settings.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
You can set [Environment Variables](/docs/install/environment-variables) by clicking **Add** on the **Additional Environment Variables** option and filling in the **Name** and **Value**.
|
||||
|
||||
<img
|
||||
src={require('./img/truenas11.webp').default}
|
||||
width="40%"
|
||||
alt="Environment Variables"
|
||||
className="border rounded-xl"
|
||||
/>
|
||||
|
||||
:::info
|
||||
Some Environment Variables are not available for the TrueNAS Community Edition app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||
|
||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
||||
:::
|
||||
|
||||
## Updating the App
|
||||
|
||||
:::danger
|
||||
@@ -261,3 +360,116 @@ To update the app to the latest version:
|
||||
- You may view the Changelog.
|
||||
- Click **Upgrade** to begin the process and open a counter dialog that shows the upgrade progress.
|
||||
- When complete, the update badge and buttons disappear and the application Update state on the Installed screen changes from Update Available to Up to date.
|
||||
|
||||
## Migration
|
||||
|
||||
:::danger
|
||||
Perform a backup of your Immich data before proceeding with the migration steps below. This is crucial to prevent any data loss if something goes wrong during the migration process.
|
||||
|
||||
The migration should also be performed when the Immich app is not running to ensure no data is being written while you are copying the data.
|
||||
:::
|
||||
|
||||
### Migration from Old Storage Configuration
|
||||
|
||||
There are two ways to migrate from the old storage configuration to the new one, depending on whether you want to keep the old multiple datasets or if you want to move to a double dataset configuration with a single dataset for Immich data storage and a single dataset for Postgres data storage.
|
||||
|
||||
:::note Old TrueNAS Versions Permissions
|
||||
If you were using an older version of TrueNAS (before 24.10.2.2), the datasets, except the one for **pgData** had only to be owned by the `root` user (UID 0). You might have to add the **modify** permission to the `apps` user (UID 568) or the user you want to run Immich as, to all of them, except **pgData**. The steps to add or change ACL permissions are described in the [TrueNAS documentation](https://www.truenas.com/docs/scale/scaletutorials/datasets/permissionsscale/).
|
||||
:::
|
||||
|
||||
<Tabs groupId="truenas-migration-tabs">
|
||||
<TabItem value="migrate-new-dataset" label="Migrate data to a new dataset (recommended)" default>
|
||||
|
||||
To migrate from the old storage configuration to the new one, you will need to create a new dataset for the Immich data storage and copy the data from the old datasets to the new ones. The steps are as follows:
|
||||
|
||||
1. **Stop the Immich app** from the TrueNAS web interface to ensure no data is being written while you are copying the data.
|
||||
2. **Create a new dataset** for the Immich data storage, for example, `data`. As described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, create the dataset with the **Apps** preset to ensure the correct permissions are set.
|
||||
3. **Copy the data** from the old datasets to the new dataset. We advise using the `rsync` command to copy the data, as it will preserve the permissions and ownership of the files. The following commands are examples:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/tank/immich/library/ /mnt/tank/immich/data/library/
|
||||
rsync -av /mnt/tank/immich/upload/ /mnt/tank/immich/data/upload/
|
||||
rsync -av /mnt/tank/immich/thumbs/ /mnt/tank/immich/data/thumbs/
|
||||
rsync -av /mnt/tank/immich/profile/ /mnt/tank/immich/data/profile/
|
||||
rsync -av /mnt/tank/immich/video/ /mnt/tank/immich/data/encoded-video/
|
||||
rsync -av /mnt/tank/immich/backups/ /mnt/tank/immich/data/backups/
|
||||
```
|
||||
|
||||
Make sure to replace `/mnt/tank/immich/` with the correct path to your old datasets and `/mnt/tank/immich/data/` with the correct path to your new dataset.
|
||||
|
||||
:::tip
|
||||
If you were using **ixVolume (dataset created automatically by the system)** for Immich data storage, the path to the data should be `/mnt/.ix-apps/app_mounts/immich/`. You have to use this path instead of `/mnt/tank/immich/` in the `rsync` command above, for example:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/.ix-apps/app_mounts/immich/library/ /mnt/tank/immich/data/library/
|
||||
```
|
||||
|
||||
If you were also using an ixVolume for Postgres data storage, you also should, first create the pgData dataset, as described in the [Setting up Storage Datasets](#setting-up-storage-datasets) section above, and then you can use the following command to copy the Postgres data:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/.ix-apps/app_mounts/immich/pgData/ /mnt/tank/immich/pgData/
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
:::warning
|
||||
Make sure that for each folder, the `.immich` file is copied as well, as it contains important metadata for Immich. If for some reason the `.immich` file is not copied, you can copy it manually with the `rsync` command, for example:
|
||||
|
||||
```bash
|
||||
rsync -av /mnt/tank/immich/library/.immich /mnt/tank/immich/data/library/
|
||||
```
|
||||
|
||||
Replace `library` with the name of the folder where you are copying the file.
|
||||
:::
|
||||
|
||||
4. **Update the permissions** as the permissions of the data that have been copied has been preserved, to ensure that the `apps` user (UID 568) has the correct permissions on all the copied data. If you just created the dataset with the **Apps** preset, from the TrueNAS web interface, go to the **Datasets** screen, select the **data** dataset, click on the **Edit** button next to **Permissions**, tick the "Apply permissions recursively" checkbox, and click **Save**. This will apply the correct permissions to all the copied data.
|
||||
5. **Update the Immich app** to use the new dataset:
|
||||
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
||||
- Click **Edit** on the **Application Info** widget.
|
||||
- In the **Storage Configuration** section, untick the **Use Old Storage Configuration (Deprecated)** checkbox.
|
||||
- For the **Data Storage**, select **Host Path (Path that already exists on the system)** and then select the new dataset you created for Immich data storage, for example, `data`.
|
||||
- For the **Postgres Data Storage**, verify that it is still set to the dataset you created for Postgres data storage, for example, `pgData`.
|
||||
- Click **Update** at the bottom of the page to save changes.
|
||||
|
||||
6. **Start the Immich app** from the TrueNAS web interface.
|
||||
|
||||
This will recreate the Immich container with the new storage configuration and start the app.
|
||||
|
||||
If everything went well, you should now be able to access Immich with the new storage configuration. You can verify that the data has been copied correctly by checking the Immich web interface and ensuring that all your photos and videos are still available. You may delete the old datasets, if you no longer need them, using the TrueNAS web interface.
|
||||
|
||||
If you were using **ixVolume (dataset created automatically by the system)** or folders for Immich data storage, you can delete the old datasets using the following commands:
|
||||
|
||||
```bash
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/library
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/uploads
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/thumbs
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/profile
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/video
|
||||
rm -r /mnt/.ix-apps/app_mounts/immich/backups
|
||||
```
|
||||
|
||||
</TabItem>
|
||||
<TabItem value="migrate-old-dataset" label="Keep the existing datasets">
|
||||
|
||||
To migrate from the old storage configuration to the new one without creating new datasets.
|
||||
1. **Stop the Immich app** from the TrueNAS web interface to ensure no data is being written while you are updating the app.
|
||||
2. **Update the datasets permissions**: Ensure that the datasets used for Immich data storage (`library`, `upload`, `thumbs`, `profile`, `video`, `backups`) have the correct permissions set for the user who will run Immich. The user should have ***modify*** permissions on these datasets. The default user for Immich is `apps` (UID 568) and the default group is `apps` (GID 568). If you are using a different user, make sure to set the permissions accordingly. You can do this from the TrueNAS web interface by going to the **Datasets** screen, selecting each dataset, clicking on the **Edit** button next to **Permissions**, and adding the user with ***modify*** permissions.
|
||||
3. **Update the Immich app** to use the existing datasets:
|
||||
- Go to the **Installed Applications** screen and select Immich from the list of installed applications.
|
||||
- Click **Edit** on the **Application Info** widget.
|
||||
- In the **Storage Configuration** section, untick the **Use Old Storage Configuration (Deprecated)** checkbox.
|
||||
- For the **Data Storage**, you can keep the **ixVolume (dataset created automatically by the system)** as no data will be directly written to it. We recommend selecting **Host Path (Path that already exists on the system)** and then select a **new** dataset you created for Immich data storage, for example, `data`.
|
||||
- For the **Postgres Data Storage**, keep **Host Path (Path that already exists on the system)** and then select the existing dataset you used for Postgres data storage, for example, `pgData`.
|
||||
- Following the instructions in the [Multiple Datasets for Immich Storage](#additional-storage-advanced-users) section, you can add, **for each old dataset**, a new Additional Storage with the following settings:
|
||||
- **Type**: `Host Path (Path that already exists on the system)`
|
||||
- **Mount Path**: `/data/<folder-name>` (e.g. `/data/library`)
|
||||
- **Host Path**: `/mnt/<your-pool-name>/<dataset-name>` (e.g. `/mnt/tank/immich/library`)
|
||||
:::danger Ensure using the correct paths names
|
||||
Make sure to replace `<folder-name>` with the actual name of the folder used by Immich: `library`, `upload`, `thumbs`, `profile`, `encoded-video`, and `backups`. Also, replace `<your-pool-name>` and `<dataset-name>` with the actual names of your pool and dataset.
|
||||
:::
|
||||
- **Read Only**: Keep it unticked as Immich needs to write to these datasets.
|
||||
- Click **Update** at the bottom of the page to save changes.
|
||||
4. **Start the Immich app** from the TrueNAS web interface. This will recreate the Immich container with the new storage configuration and start the app. If everything went well, you should now be able to access Immich with the new storage configuration. You can verify that the data is still available by checking the Immich web interface and ensuring that all your photos and videos are still accessible.
|
||||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
|
||||
@@ -27,3 +27,102 @@ docker image prune
|
||||
[watchtower]: https://containrrr.dev/watchtower/
|
||||
[breaking]: https://github.com/immich-app/immich/discussions?discussions_q=label%3Achangelog%3Abreaking-change+sort%3Adate_created
|
||||
[releases]: https://github.com/immich-app/immich/releases
|
||||
|
||||
## Migrating to VectorChord
|
||||
|
||||
:::info
|
||||
If you deploy Immich using Docker Compose, see `ghcr.io/immich-app/postgres` in the `docker-compose.yml` file and have not explicitly set the `DB_VECTOR_EXTENSION` environmental variable, your Immich database is already using VectorChord and this section does not apply to you.
|
||||
:::
|
||||
|
||||
:::important
|
||||
If you do not deploy Immich using Docker Compose and see a deprecation warning for pgvecto.rs on server startup, you should refer to the maintainers of the Immich distribution for guidance (if using a turnkey solution) or adapt the instructions for your specific setup.
|
||||
:::
|
||||
|
||||
Immich has migrated off of the deprecated pgvecto.rs database extension to its successor, [VectorChord](https://github.com/tensorchord/VectorChord), which comes with performance improvements in almost every aspect. This section will guide you on how to make this change in a Docker Compose setup.
|
||||
|
||||
Before making any changes, please [back up your database](/docs/administration/backup-and-restore). While every effort has been made to make this migration as smooth as possible, there’s always a chance that something can go wrong.
|
||||
|
||||
After making a backup, please modify your `docker-compose.yml` file with the following information.
|
||||
|
||||
```diff
|
||||
[...]
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
- image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||
+ image: ghcr.io/immich-app/postgres:14-vectorchord0.4.3-pgvectors0.2.0
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD}
|
||||
POSTGRES_USER: ${DB_USERNAME}
|
||||
POSTGRES_DB: ${DB_DATABASE_NAME}
|
||||
POSTGRES_INITDB_ARGS: '--data-checksums'
|
||||
+ # Uncomment the DB_STORAGE_TYPE: 'HDD' var if your database isn't stored on SSDs
|
||||
+ # DB_STORAGE_TYPE: 'HDD'
|
||||
volumes:
|
||||
# Do not edit the next line. If you want to change the database storage location on your system, edit the value of DB_DATA_LOCATION in the .env file
|
||||
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
|
||||
- healthcheck:
|
||||
- test: >-
|
||||
- pg_isready --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" || exit 1;
|
||||
- Chksum="$$(psql --dbname="$${POSTGRES_DB}" --username="$${POSTGRES_USER}" --tuples-only --no-align
|
||||
- --command='SELECT COALESCE(SUM(checksum_failures), 0) FROM pg_stat_database')";
|
||||
- echo "checksum failure count is $$Chksum";
|
||||
- [ "$$Chksum" = '0' ] || exit 1
|
||||
- interval: 5m
|
||||
- start_interval: 30s
|
||||
- start_period: 5m
|
||||
- command: >-
|
||||
- postgres
|
||||
- -c shared_preload_libraries=vectors.so
|
||||
- -c 'search_path="$$user", public, vectors'
|
||||
- -c logging_collector=on
|
||||
- -c max_wal_size=2GB
|
||||
- -c shared_buffers=512MB
|
||||
- -c wal_compression=on
|
||||
+ shm_size: 128mb
|
||||
restart: always
|
||||
|
||||
[...]
|
||||
```
|
||||
|
||||
:::important
|
||||
If you deviated from the defaults of pg14 or pgvectors0.2.0, you must adjust the pg major version and pgvecto.rs version. If you are still using the default `docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0` image, you can just follow the changes above. For example, if the previous image is `docker.io/tensorchord/pgvecto-rs:pg16-v0.3.0`, the new image should be `ghcr.io/immich-app/postgres:16-vectorchord0.3.0-pgvectors0.3.0` instead of the image specified in the diff.
|
||||
:::
|
||||
|
||||
After making these changes, you can start Immich as normal. Immich will make some changes to the DB during startup, which can take seconds to minutes to finish, depending on hardware and library size. In particular, it’s normal for the server logs to be seemingly stuck at `Reindexing clip_index` and `Reindexing face_index`for some time if you have over 100k assets in Immich and/or Immich is on a relatively weak server. If you see these logs and there are no errors, just give it time.
|
||||
|
||||
:::danger
|
||||
After switching to VectorChord, you should not downgrade Immich below 1.133.0.
|
||||
:::
|
||||
|
||||
Please don’t hesitate to contact us on [GitHub](https://github.com/immich-app/immich/discussions) or [Discord](https://discord.immich.app/) if you encounter migration issues.
|
||||
|
||||
### VectorChord FAQ
|
||||
|
||||
#### I have a separate PostgreSQL instance shared with multiple services. How can I switch to VectorChord?
|
||||
|
||||
Please see the [standalone PostgreSQL documentation](/docs/administration/postgres-standalone#migrating-to-vectorchord) for migration instructions. The migration path will be different depending on whether you’re currently using pgvecto.rs or pgvector, as well as whether Immich has superuser DB permissions.
|
||||
|
||||
#### Why are so many lines removed from the `docker-compose.yml` file? Does this mean the health check is removed?
|
||||
|
||||
These lines are now incorporated into the image itself along with some additional tuning.
|
||||
|
||||
#### What does this change mean for my existing DB backups?
|
||||
|
||||
The new DB image includes pgvector and pgvecto.rs in addition to VectorChord, so you can use this image to restore from existing backups that used either of these extensions. However, backups made after switching to VectorChord require an image containing VectorChord to restore successfully.
|
||||
|
||||
#### Do I still need pgvecto.rs installed after migrating to VectorChord?
|
||||
|
||||
pgvecto.rs only needs to be available during the migration, or if you need to restore from a backup that used pgvecto.rs. For a leaner DB and a smaller image, you can optionally switch to an image variant that doesn’t have pgvecto.rs installed after you’ve performed the migration and started Immich: `ghcr.io/immich-app/postgres:14-vectorchord0.4.3`, changing the PostgreSQL version as appropriate.
|
||||
|
||||
#### Why does it matter whether my database is on an SSD or an HDD?
|
||||
|
||||
These storage mediums have different performance characteristics. As a result, the optimal settings for an SSD are not the same as those for an HDD. Either configuration is compatible with SSD and HDD, but using the right configuration will make Immich snappier. As a general tip, we recommend users store the database on an SSD whenever possible.
|
||||
|
||||
#### Can I use the new database image as a general PostgreSQL image outside of Immich?
|
||||
|
||||
It’s a standard PostgreSQL container image that additionally contains the VectorChord, pgvector, and (optionally) pgvecto.rs extensions. If you were using the previous pgvecto.rs image for other purposes, you can similarly do so with this image.
|
||||
|
||||
#### If pgvecto.rs and pgvector still work, why should I switch to VectorChord?
|
||||
|
||||
VectorChord is faster, more stable, uses less RAM, and (with the settings Immich uses) offers higher-quality results than pgvector and pgvecto.rs. This translates to better search and facial recognition experiences. In addition, pgvecto.rs support will be dropped in the future, so changing it sooner will avoid disruption.
|
||||
|
||||
20545
docs/package-lock.json
generated
@@ -7,7 +7,8 @@
|
||||
"format": "prettier --check .",
|
||||
"format:fix": "prettier --write .",
|
||||
"start": "docusaurus start --port 3005",
|
||||
"build": "docusaurus build",
|
||||
"copy:openapi": "jq -c < ../open-api/immich-openapi-specs.json > ./static/openapi.json || exit 0",
|
||||
"build": "npm run copy:openapi && docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
@@ -59,6 +60,6 @@
|
||||
"node": ">=20"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.17.1"
|
||||
"node": "22.18.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,9 @@ import {
|
||||
mdiCloudKeyOutline,
|
||||
mdiRegex,
|
||||
mdiCodeJson,
|
||||
mdiClockOutline,
|
||||
mdiAccountOutline,
|
||||
mdiRestart,
|
||||
} from '@mdi/js';
|
||||
import Layout from '@theme/Layout';
|
||||
import React from 'react';
|
||||
@@ -26,6 +29,42 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
|
||||
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
|
||||
|
||||
const items: Item[] = [
|
||||
{
|
||||
icon: mdiClockOutline,
|
||||
iconColor: 'gray',
|
||||
title: 'setTimeout is cursed',
|
||||
description:
|
||||
'The setTimeout method in JavaScript is cursed when used with small values because the implementation may or may not actually wait the specified time.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/20655',
|
||||
text: '#20655',
|
||||
},
|
||||
date: new Date(2025, 7, 4),
|
||||
},
|
||||
{
|
||||
icon: mdiAccountOutline,
|
||||
iconColor: '#DAB1DA',
|
||||
title: 'PostgreSQL USER is cursed',
|
||||
description:
|
||||
'The USER keyword in PostgreSQL is cursed because you can select from it like a table, which leads to confusion if you have a table name user as well.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/19891',
|
||||
text: '#19891',
|
||||
},
|
||||
date: new Date(2025, 7, 4),
|
||||
},
|
||||
{
|
||||
icon: mdiRestart,
|
||||
iconColor: '#8395e3',
|
||||
title: 'PostgreSQL RESET is cursed',
|
||||
description:
|
||||
'PostgreSQL RESET is cursed because it is impossible to RESET a PostgreSQL extension parameter if the extension has been uninstalled.',
|
||||
link: {
|
||||
url: 'https://github.com/immich-app/immich/pull/19363',
|
||||
text: '#19363',
|
||||
},
|
||||
date: new Date(2025, 5, 20),
|
||||
},
|
||||
{
|
||||
icon: mdiRegex,
|
||||
iconColor: 'purple',
|
||||
|
||||
16
docs/static/archived-versions.json
vendored
@@ -1,4 +1,20 @@
|
||||
[
|
||||
{
|
||||
"label": "v1.139.0",
|
||||
"url": "https://v1.139.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.138.1",
|
||||
"url": "https://v1.138.1.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.138.0",
|
||||
"url": "https://v1.138.0.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.137.3",
|
||||
"url": "https://v1.137.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v1.137.2",
|
||||
"url": "https://v1.137.2.archive.immich.app"
|
||||
|
||||
1
e2e/.gitignore
vendored
@@ -3,3 +3,4 @@ node_modules/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
/dist
|
||||
|
||||
@@ -1 +1 @@
|
||||
22.17.1
|
||||
22.18.0
|
||||
|
||||
1178
e2e/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "1.137.2",
|
||||
"version": "1.139.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -26,7 +26,7 @@
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@socket.io/component-emitter": "^3.1.2",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.16.5",
|
||||
"@types/node": "^22.17.1",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@@ -35,7 +35,7 @@
|
||||
"eslint": "^9.14.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"eslint-plugin-unicorn": "^59.0.0",
|
||||
"eslint-plugin-unicorn": "^60.0.0",
|
||||
"exiftool-vendored": "^28.3.1",
|
||||
"globals": "^16.0.0",
|
||||
"jose": "^5.6.3",
|
||||
@@ -54,6 +54,6 @@
|
||||
"vitest": "^3.0.0"
|
||||
},
|
||||
"volta": {
|
||||
"node": "22.17.1"
|
||||
"node": "22.18.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -683,7 +683,7 @@ describe('/albums', () => {
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ role: AlbumUserRole.Editor });
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(status).toBe(204);
|
||||
|
||||
// Get album to verify the role change
|
||||
const { body } = await request(app)
|
||||
|
||||
@@ -555,7 +555,7 @@ describe('/asset', () => {
|
||||
expect(body).toMatchObject({ id: user1Assets[0].id, livePhotoVideoId: null });
|
||||
});
|
||||
|
||||
it('should update date time original when sidecar file contains DateTimeOriginal', async () => {
|
||||
it.skip('should update date time original when sidecar file contains DateTimeOriginal', async () => {
|
||||
const sidecarData = `<?xpacket begin='?' id='W5M0MpCehiHzreSzNTczkc9d'?>
|
||||
<x:xmpmeta xmlns:x='adobe:ns:meta/' x:xmptk='Image::ExifTool 12.40'>
|
||||
<rdf:RDF xmlns:rdf='http://www.w3.org/1999/02/22-rdf-syntax-ns#'>
|
||||
@@ -854,6 +854,30 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('PUT /assets', () => {
|
||||
it('should update date time original relatively', async () => {
|
||||
const { status, body } = await request(app)
|
||||
.put(`/assets/`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send({ ids: [user1Assets[0].id], dateTimeRelative: -1441 });
|
||||
|
||||
expect(body).toEqual({});
|
||||
expect(status).toEqual(204);
|
||||
|
||||
const result = await request(app)
|
||||
.get(`/assets/${user1Assets[0].id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`)
|
||||
.send();
|
||||
|
||||
expect(result.body).toMatchObject({
|
||||
id: user1Assets[0].id,
|
||||
exifInfo: expect.objectContaining({
|
||||
dateTimeOriginal: '2023-11-19T01:10:00+00:00',
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets', () => {
|
||||
beforeAll(setupTests, 30_000);
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ describe('/partners', () => {
|
||||
.delete(`/partners/${user3.userId}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
|
||||
it('should throw a bad request if partner not found', async () => {
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
} from '@immich/sdk';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { errorDto } from 'src/responses';
|
||||
import { app, asBearerAuth, shareUrl, utils } from 'src/utils';
|
||||
import { app, asBearerAuth, baseUrl, shareUrl, utils } from 'src/utils';
|
||||
import request from 'supertest';
|
||||
import { beforeAll, describe, expect, it } from 'vitest';
|
||||
|
||||
@@ -78,6 +78,7 @@ describe('/shared-links', () => {
|
||||
type: SharedLinkType.Album,
|
||||
albumId: metadataAlbum.id,
|
||||
showMetadata: true,
|
||||
slug: 'metadata-album',
|
||||
}),
|
||||
utils.createSharedLink(user1.accessToken, {
|
||||
type: SharedLinkType.Album,
|
||||
@@ -138,6 +139,17 @@ describe('/shared-links', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /s/:slug', () => {
|
||||
it('should work for slug auth', async () => {
|
||||
const resp = await request(baseUrl).get(`/s/${linkWithMetadata.slug}`);
|
||||
expect(resp.status).toBe(200);
|
||||
expect(resp.header['content-type']).toContain('text/html');
|
||||
expect(resp.text).toContain(
|
||||
`<meta name="description" content="${metadataAlbum.assets.length} shared photos & videos" />`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GET /shared-links', () => {
|
||||
it('should require authentication', async () => {
|
||||
const { status, body } = await request(app).get('/shared-links');
|
||||
@@ -473,7 +485,7 @@ describe('/shared-links', () => {
|
||||
.delete(`/shared-links/${linkWithAlbum.id}`)
|
||||
.set('Authorization', `Bearer ${user1.accessToken}`);
|
||||
|
||||
expect(status).toBe(200);
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -304,7 +304,7 @@ describe('/users', () => {
|
||||
const { status } = await request(app)
|
||||
.delete(`/users/me/license`)
|
||||
.set('Authorization', `Bearer ${nonAdmin.accessToken}`);
|
||||
expect(status).toBe(200);
|
||||
expect(status).toBe(204);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -79,7 +79,7 @@ export const tempDir = tmpdir();
|
||||
export const asBearerAuth = (accessToken: string) => ({ Authorization: `Bearer ${accessToken}` });
|
||||
export const asKeyAuth = (key: string) => ({ 'x-api-key': key });
|
||||
export const immichCli = (args: string[]) =>
|
||||
executeCommand('node', ['node_modules/.bin/immich', '-d', `/${tempDir}/immich/`, ...args]).promise;
|
||||
executeCommand('pnpm', ['exec', 'immich', '-d', `/${tempDir}/immich/`, ...args], { cwd: '../cli' }).promise;
|
||||
export const immichAdmin = (args: string[]) =>
|
||||
executeCommand('docker', ['exec', '-i', 'immich-e2e-server', '/bin/bash', '-c', `immich-admin ${args.join(' ')}`]);
|
||||
export const specialCharStrings = ["'", '"', ',', '{', '}', '*'];
|
||||
|
||||
38
i18n/en.json
@@ -28,6 +28,9 @@
|
||||
"add_to_album": "Add to album",
|
||||
"add_to_album_bottom_sheet_added": "Added to {album}",
|
||||
"add_to_album_bottom_sheet_already_exists": "Already in {album}",
|
||||
"add_to_album_toggle": "Toggle selection for {album}",
|
||||
"add_to_albums": "Add to albums",
|
||||
"add_to_albums_count": "Add to albums ({count})",
|
||||
"add_to_shared_album": "Add to shared album",
|
||||
"add_url": "Add URL",
|
||||
"added_to_archive": "Added to archive",
|
||||
@@ -355,6 +358,9 @@
|
||||
"trash_number_of_days_description": "Number of days to keep the assets in trash before permanently removing them",
|
||||
"trash_settings": "Trash Settings",
|
||||
"trash_settings_description": "Manage trash settings",
|
||||
"unlink_all_oauth_accounts": "Unlink all OAuth accounts",
|
||||
"unlink_all_oauth_accounts_description": "Remember to unlink all OAuth accounts before migrating to a new provider.",
|
||||
"unlink_all_oauth_accounts_prompt": "Are you sure you want to unlink all OAuth accounts? This will reset the OAuth ID for each user and cannot be undone.",
|
||||
"user_cleanup_job": "User cleanup",
|
||||
"user_delete_delay": "<b>{user}</b>'s account and assets will be scheduled for permanent deletion in {delay, plural, one {# day} other {# days}}.",
|
||||
"user_delete_delay_settings": "Delete delay",
|
||||
@@ -494,7 +500,9 @@
|
||||
"assets": "Assets",
|
||||
"assets_added_count": "Added {count, plural, one {# asset} other {# assets}}",
|
||||
"assets_added_to_album_count": "Added {count, plural, one {# asset} other {# assets}} to the album",
|
||||
"assets_added_to_albums_count": "Added {assetTotal, plural, one {# asset} other {# assets}} to {albumTotal} albums",
|
||||
"assets_cannot_be_added_to_album_count": "{count, plural, one {Asset} other {Assets}} cannot be added to the album",
|
||||
"assets_cannot_be_added_to_albums": "{count, plural, one {Asset} other {Assets}} cannot be added to any of the albums",
|
||||
"assets_count": "{count, plural, one {# asset} other {# assets}}",
|
||||
"assets_deleted_permanently": "{count} asset(s) deleted permanently",
|
||||
"assets_deleted_permanently_from_server": "{count} asset(s) deleted permanently from the Immich server",
|
||||
@@ -511,6 +519,7 @@
|
||||
"assets_trashed_count": "Trashed {count, plural, one {# asset} other {# assets}}",
|
||||
"assets_trashed_from_server": "{count} asset(s) trashed from the Immich server",
|
||||
"assets_were_part_of_album_count": "{count, plural, one {Asset was} other {Assets were}} already part of the album",
|
||||
"assets_were_part_of_albums_count": "{count, plural, one {Asset was} other {Assets were}} already part of the albums",
|
||||
"authorized_devices": "Authorized Devices",
|
||||
"automatic_endpoint_switching_subtitle": "Connect locally over designated Wi-Fi when available and use alternative connections elsewhere",
|
||||
"automatic_endpoint_switching_title": "Automatic URL switching",
|
||||
@@ -724,6 +733,7 @@
|
||||
"create_new_user": "Create new user",
|
||||
"create_shared_album_page_share_add_assets": "ADD ASSETS",
|
||||
"create_shared_album_page_share_select_photos": "Select Photos",
|
||||
"create_shared_link": "Create shared link",
|
||||
"create_tag": "Create tag",
|
||||
"create_tag_description": "Create a new tag. For nested tags, please enter the full path of the tag including forward slashes.",
|
||||
"create_user": "Create user",
|
||||
@@ -748,6 +758,7 @@
|
||||
"date_of_birth_saved": "Date of birth saved successfully",
|
||||
"date_range": "Date range",
|
||||
"day": "Day",
|
||||
"days": "Days",
|
||||
"deduplicate_all": "Deduplicate All",
|
||||
"deduplication_criteria_1": "Image size in bytes",
|
||||
"deduplication_criteria_2": "Count of EXIF data",
|
||||
@@ -832,10 +843,12 @@
|
||||
"edit": "Edit",
|
||||
"edit_album": "Edit album",
|
||||
"edit_avatar": "Edit avatar",
|
||||
"edit_birthday": "Edit Birthday",
|
||||
"edit_birthday": "Edit birthday",
|
||||
"edit_date": "Edit date",
|
||||
"edit_date_and_time": "Edit date and time",
|
||||
"edit_date_and_time_action_prompt": "{count} date and time edited",
|
||||
"edit_date_and_time_by_offset": "Change date by offset",
|
||||
"edit_date_and_time_by_offset_interval": "New date range: {from} - {to}",
|
||||
"edit_description": "Edit description",
|
||||
"edit_description_prompt": "Please select a new description:",
|
||||
"edit_exclusion_pattern": "Edit exclusion pattern",
|
||||
@@ -908,6 +921,7 @@
|
||||
"failed_to_load_notifications": "Failed to load notifications",
|
||||
"failed_to_load_people": "Failed to load people",
|
||||
"failed_to_remove_product_key": "Failed to remove product key",
|
||||
"failed_to_reset_pin_code": "Failed to reset PIN code",
|
||||
"failed_to_stack_assets": "Failed to stack assets",
|
||||
"failed_to_unstack_assets": "Failed to un-stack assets",
|
||||
"failed_to_update_notification_status": "Failed to update notification status",
|
||||
@@ -916,6 +930,7 @@
|
||||
"paths_validation_failed": "{paths, plural, one {# path} other {# paths}} failed validation",
|
||||
"profile_picture_transparent_pixels": "Profile pictures cannot have transparent pixels. Please zoom in and/or move the image.",
|
||||
"quota_higher_than_disk_size": "You set a quota higher than the disk size",
|
||||
"something_went_wrong": "Something went wrong",
|
||||
"unable_to_add_album_users": "Unable to add users to album",
|
||||
"unable_to_add_assets_to_shared_link": "Unable to add assets to shared link",
|
||||
"unable_to_add_comment": "Unable to add comment",
|
||||
@@ -1006,9 +1021,6 @@
|
||||
"exif_bottom_sheet_location": "LOCATION",
|
||||
"exif_bottom_sheet_people": "PEOPLE",
|
||||
"exif_bottom_sheet_person_add_person": "Add name",
|
||||
"exif_bottom_sheet_person_age_months": "Age {months} months",
|
||||
"exif_bottom_sheet_person_age_year_months": "Age 1 year, {months} months",
|
||||
"exif_bottom_sheet_person_age_years": "Age {years}",
|
||||
"exit_slideshow": "Exit Slideshow",
|
||||
"expand_all": "Expand all",
|
||||
"experimental_settings_new_asset_list_subtitle": "Work in progress",
|
||||
@@ -1050,11 +1062,13 @@
|
||||
"filter_people": "Filter people",
|
||||
"filter_places": "Filter places",
|
||||
"find_them_fast": "Find them fast by name with search",
|
||||
"first": "First",
|
||||
"fix_incorrect_match": "Fix incorrect match",
|
||||
"folder": "Folder",
|
||||
"folder_not_found": "Folder not found",
|
||||
"folders": "Folders",
|
||||
"folders_feature_description": "Browsing the folder view for the photos and videos on the file system",
|
||||
"forgot_pin_code_question": "Forgot your PIN?",
|
||||
"forward": "Forward",
|
||||
"gcast_enabled": "Google Cast",
|
||||
"gcast_enabled_description": "This feature loads external resources from Google in order to work.",
|
||||
@@ -1109,6 +1123,7 @@
|
||||
"home_page_upload_err_limit": "Can only upload a maximum of 30 assets at a time, skipping",
|
||||
"host": "Host",
|
||||
"hour": "Hour",
|
||||
"hours": "Hours",
|
||||
"id": "ID",
|
||||
"idle": "Idle",
|
||||
"ignore_icloud_photos": "Ignore iCloud photos",
|
||||
@@ -1169,10 +1184,12 @@
|
||||
"language_search_hint": "Search languages...",
|
||||
"language_setting_description": "Select your preferred language",
|
||||
"large_files": "Large Files",
|
||||
"last": "Last",
|
||||
"last_seen": "Last seen",
|
||||
"latest_version": "Latest Version",
|
||||
"latitude": "Latitude",
|
||||
"leave": "Leave",
|
||||
"leave_album": "Leave album",
|
||||
"lens_model": "Lens model",
|
||||
"let_others_respond": "Let others respond",
|
||||
"level": "Level",
|
||||
@@ -1186,6 +1203,7 @@
|
||||
"library_page_sort_title": "Album title",
|
||||
"licenses": "Licenses",
|
||||
"light": "Light",
|
||||
"like": "Like",
|
||||
"like_deleted": "Like deleted",
|
||||
"link_motion_video": "Link motion video",
|
||||
"link_to_oauth": "Link to OAuth",
|
||||
@@ -1252,7 +1270,7 @@
|
||||
"manage_your_devices": "Manage your logged-in devices",
|
||||
"manage_your_oauth_connection": "Manage your OAuth connection",
|
||||
"map": "Map",
|
||||
"map_assets_in_bounds": "{count, plural, one {# photo} other {# photos}}",
|
||||
"map_assets_in_bounds": "{count, plural, =0 {No photos in this area} one {# photo} other {# photos}}",
|
||||
"map_cannot_get_user_location": "Cannot get user's location",
|
||||
"map_location_dialog_yes": "Yes",
|
||||
"map_location_picker_page_use_location": "Use this location",
|
||||
@@ -1260,7 +1278,6 @@
|
||||
"map_location_service_disabled_title": "Location Service disabled",
|
||||
"map_marker_for_images": "Map marker for images taken in {city}, {country}",
|
||||
"map_marker_with_image": "Map marker with image",
|
||||
"map_no_assets_in_bounds": "No photos in this area",
|
||||
"map_no_location_permission_content": "Location permission is needed to display assets from your current location. Do you want to allow it now?",
|
||||
"map_no_location_permission_title": "Location Permission denied",
|
||||
"map_settings": "Map settings",
|
||||
@@ -1297,6 +1314,7 @@
|
||||
"merged_people_count": "Merged {count, plural, one {# person} other {# people}}",
|
||||
"minimize": "Minimize",
|
||||
"minute": "Minute",
|
||||
"minutes": "Minutes",
|
||||
"missing": "Missing",
|
||||
"model": "Model",
|
||||
"month": "Month",
|
||||
@@ -1370,6 +1388,7 @@
|
||||
"oauth": "OAuth",
|
||||
"official_immich_resources": "Official Immich Resources",
|
||||
"offline": "Offline",
|
||||
"offset": "Offset",
|
||||
"ok": "Ok",
|
||||
"oldest_first": "Oldest first",
|
||||
"on_this_device": "On this device",
|
||||
@@ -1447,6 +1466,9 @@
|
||||
"permission_onboarding_permission_limited": "Permission limited. To let Immich backup and manage your entire gallery collection, grant photo and video permissions in Settings.",
|
||||
"permission_onboarding_request": "Immich requires permission to view your photos and videos.",
|
||||
"person": "Person",
|
||||
"person_age_months": "{months, plural, one {# month} other {# months}} old",
|
||||
"person_age_year_months": "1 year, {months, plural, one {# month} other {# months}} old",
|
||||
"person_age_years": "{years, plural, other {# years}} old",
|
||||
"person_birthdate": "Born on {date}",
|
||||
"person_hidden": "{name}{hidden, select, true { (hidden)} other {}}",
|
||||
"photo_shared_all_users": "Looks like you shared your photos with all users or you don't have any user to share with.",
|
||||
@@ -1592,6 +1614,9 @@
|
||||
"reset_password": "Reset password",
|
||||
"reset_people_visibility": "Reset people visibility",
|
||||
"reset_pin_code": "Reset PIN code",
|
||||
"reset_pin_code_description": "If you forgot your PIN code, you can contact the server administrator to reset it",
|
||||
"reset_pin_code_success": "Successfully reset PIN code",
|
||||
"reset_pin_code_with_password": "You can always reset your PIN code with your password",
|
||||
"reset_sqlite": "Reset SQLite Database",
|
||||
"reset_sqlite_confirmation": "Are you sure you want to reset the SQLite database? You will need to log out and log in again to resync the data",
|
||||
"reset_sqlite_success": "Successfully reset the SQLite database",
|
||||
@@ -1840,6 +1865,7 @@
|
||||
"sort_created": "Date created",
|
||||
"sort_items": "Number of items",
|
||||
"sort_modified": "Date modified",
|
||||
"sort_newest": "Newest photo",
|
||||
"sort_oldest": "Oldest photo",
|
||||
"sort_people_by_similarity": "Sort people by similarity",
|
||||
"sort_recent": "Most recent photo",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
ARG DEVICE=cpu
|
||||
|
||||
FROM python:3.11-bookworm@sha256:ce3b954c9285a7a145cba620bae03db836ab890b6b9e0d05a3ca522ea00dfbc9 AS builder-cpu
|
||||
FROM python:3.11-bookworm@sha256:c642d5dfaf9115a12086785f23008558ae2e13bcd0c4794536340bcb777a4381 AS builder-cpu
|
||||
|
||||
FROM builder-cpu AS builder-openvino
|
||||
|
||||
@@ -59,7 +59,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends g++
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:9653efd4380d5a0e5511e337dcfc3b8ba5bc4e6ea7fa3be7716598261d5503fa /uv /uvx /bin/
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest@sha256:f64ad69940b634e75d2e4d799eb5238066c5eeda49f76e782d4873c3d014ea33 /uv /uvx /bin/
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
@@ -68,11 +68,11 @@ RUN if [ "$DEVICE" = "rocm" ]; then \
|
||||
uv pip install /opt/onnxruntime_rocm-*.whl; \
|
||||
fi
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:9e1912aab0a30bbd9488eb79063f68f42a68ab0946cbe98fecf197fe5b085506 AS prod-cpu
|
||||
FROM python:3.11-slim-bookworm@sha256:838ff46ae6c481e85e369706fa3dea5166953824124735639f3c9f52af85f319 AS prod-cpu
|
||||
|
||||
ENV LD_PRELOAD=/usr/lib/libmimalloc.so.2
|
||||
|
||||
FROM python:3.11-slim-bookworm@sha256:9e1912aab0a30bbd9488eb79063f68f42a68ab0946cbe98fecf197fe5b085506 AS prod-openvino
|
||||
FROM python:3.11-slim-bookworm@sha256:838ff46ae6c481e85e369706fa3dea5166953824124735639f3c9f52af85f319 AS prod-openvino
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install --no-install-recommends -yqq ocl-icd-libopencl1 wget && \
|
||||
|
||||
@@ -36,7 +36,7 @@ def to_numpy(img: Image.Image) -> NDArray[np.float32]:
|
||||
def normalize(
|
||||
img: NDArray[np.float32], mean: float | NDArray[np.float32], std: float | NDArray[np.float32]
|
||||
) -> NDArray[np.float32]:
|
||||
return np.divide(img - mean, std, dtype=np.float32)
|
||||
return (img - mean) / std
|
||||
|
||||
|
||||
def get_pil_resampling(resample: str) -> Image.Resampling:
|
||||
@@ -58,11 +58,13 @@ def decode_pil(image_bytes: bytes | IO[bytes] | Image.Image) -> Image.Image:
|
||||
|
||||
|
||||
def decode_cv2(image_bytes: NDArray[np.uint8] | bytes | Image.Image) -> NDArray[np.uint8]:
|
||||
if isinstance(image_bytes, bytes):
|
||||
image_bytes = decode_pil(image_bytes) # pillow is much faster than cv2
|
||||
if isinstance(image_bytes, Image.Image):
|
||||
return pil_to_cv2(image_bytes)
|
||||
return image_bytes
|
||||
match image_bytes:
|
||||
case bytes() | memoryview() | bytearray():
|
||||
return pil_to_cv2(decode_pil(image_bytes)) # pillow is much faster than cv2
|
||||
case Image.Image():
|
||||
return pil_to_cv2(image_bytes)
|
||||
case _:
|
||||
return image_bytes
|
||||
|
||||
|
||||
def clean_text(text: str, canonicalize: bool = False) -> str:
|
||||
|
||||
@@ -112,8 +112,4 @@ def has_profiling(obj: Any) -> TypeGuard[HasProfiling]:
|
||||
return hasattr(obj, "profiling") and isinstance(obj.profiling, dict)
|
||||
|
||||
|
||||
def is_ndarray(obj: Any, dtype: "type[np._DTypeScalar_co]") -> "TypeGuard[npt.NDArray[np._DTypeScalar_co]]":
|
||||
return isinstance(obj, np.ndarray) and obj.dtype == dtype
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
@@ -12,6 +12,7 @@ dependencies = [
|
||||
"gunicorn>=21.1.0",
|
||||
"huggingface-hub>=0.20.1,<1.0",
|
||||
"insightface>=0.7.3,<1.0",
|
||||
"numpy<2",
|
||||
"opencv-python-headless>=4.7.0.72,<5.0",
|
||||
"orjson>=3.9.5",
|
||||
"pillow>=9.5.0,<11.0",
|
||||
|
||||
773
machine-learning/uv.lock
generated
10
mobile/android/app/CMakeLists.txt
Normal file
@@ -0,0 +1,10 @@
|
||||
cmake_minimum_required(VERSION 3.12)
|
||||
|
||||
set(CMAKE_C_STANDARD 17)
|
||||
set(CMAKE_C_STANDARD_REQUIRED ON)
|
||||
|
||||
project(native_buffer LANGUAGES C)
|
||||
|
||||
add_library(native_buffer SHARED
|
||||
src/main/cpp/native_buffer.c
|
||||
)
|
||||
@@ -83,6 +83,12 @@ android {
|
||||
}
|
||||
}
|
||||
namespace 'app.alextran.immich'
|
||||
|
||||
externalNativeBuild {
|
||||
cmake {
|
||||
path "CMakeLists.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
flutter {
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
|
||||
<application android:label="Immich" android:name=".ImmichApp" android:usesCleartextTraffic="true"
|
||||
android:icon="@mipmap/ic_launcher" android:requestLegacyExternalStorage="true"
|
||||
android:largeHeap="true" android:enableOnBackInvokedCallback="false">
|
||||
android:largeHeap="true" android:enableOnBackInvokedCallback="false" android:allowBackup="false">
|
||||
|
||||
<service
|
||||
android:name="androidx.work.impl.foreground.SystemForegroundService"
|
||||
|
||||
40
mobile/android/app/src/main/cpp/native_buffer.c
Normal file
@@ -0,0 +1,40 @@
|
||||
#include <jni.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
JNIEXPORT jlong JNICALL
|
||||
Java_app_alextran_immich_images_ThumbnailsImpl_00024Companion_allocateNative(
|
||||
JNIEnv *env, jclass clazz, jint size) {
|
||||
void *ptr = malloc(size);
|
||||
return (jlong) ptr;
|
||||
}
|
||||
|
||||
JNIEXPORT jlong JNICALL
|
||||
Java_app_alextran_immich_images_ThumbnailsImpl_allocateNative(
|
||||
JNIEnv *env, jclass clazz, jint size) {
|
||||
void *ptr = malloc(size);
|
||||
return (jlong) ptr;
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_app_alextran_immich_images_ThumbnailsImpl_00024Companion_freeNative(
|
||||
JNIEnv *env, jclass clazz, jlong address) {
|
||||
free((void *) address);
|
||||
}
|
||||
|
||||
JNIEXPORT void JNICALL
|
||||
Java_app_alextran_immich_images_ThumbnailsImpl_freeNative(
|
||||
JNIEnv *env, jclass clazz, jlong address) {
|
||||
free((void *) address);
|
||||
}
|
||||
|
||||
JNIEXPORT jobject JNICALL
|
||||
Java_app_alextran_immich_images_ThumbnailsImpl_00024Companion_wrapAsBuffer(
|
||||
JNIEnv *env, jclass clazz, jlong address, jint capacity) {
|
||||
return (*env)->NewDirectByteBuffer(env, (void *) address, capacity);
|
||||
}
|
||||
|
||||
JNIEXPORT jobject JNICALL
|
||||
Java_app_alextran_immich_images_ThumbnailsImpl_wrapAsBuffer(
|
||||
JNIEnv *env, jclass clazz, jlong address, jint capacity) {
|
||||
return (*env)->NewDirectByteBuffer(env, (void *) address, capacity);
|
||||
}
|
||||
@@ -1,7 +1,20 @@
|
||||
package app.alextran.immich
|
||||
|
||||
import android.content.Context
|
||||
import com.bumptech.glide.GlideBuilder
|
||||
import com.bumptech.glide.annotation.GlideModule
|
||||
import com.bumptech.glide.load.engine.bitmap_recycle.BitmapPoolAdapter
|
||||
import com.bumptech.glide.load.engine.cache.DiskCacheAdapter
|
||||
import com.bumptech.glide.load.engine.cache.MemoryCacheAdapter
|
||||
import com.bumptech.glide.module.AppGlideModule
|
||||
|
||||
@GlideModule
|
||||
class AppGlideModule : AppGlideModule()
|
||||
class AppGlideModule : AppGlideModule() {
|
||||
override fun applyOptions(context: Context, builder: GlideBuilder) {
|
||||
super.applyOptions(context, builder)
|
||||
// disable caching as this is already done on the Flutter side
|
||||
builder.setMemoryCache(MemoryCacheAdapter())
|
||||
builder.setDiskCache(DiskCacheAdapter.Factory())
|
||||
builder.setBitmapPool(BitmapPoolAdapter())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@ package app.alextran.immich
|
||||
import android.os.Build
|
||||
import android.os.ext.SdkExtensions
|
||||
import androidx.annotation.NonNull
|
||||
import app.alextran.immich.images.ThumbnailApi
|
||||
import app.alextran.immich.images.ThumbnailsImpl
|
||||
import app.alextran.immich.sync.NativeSyncApi
|
||||
import app.alextran.immich.sync.NativeSyncApiImpl26
|
||||
import app.alextran.immich.sync.NativeSyncApiImpl30
|
||||
@@ -22,6 +24,7 @@ class MainActivity : FlutterFragmentActivity() {
|
||||
} else {
|
||||
NativeSyncApiImpl30(this)
|
||||
}
|
||||
NativeSyncApi.setUp(flutterEngine.dartExecutor.binaryMessenger, nativeSyncApiImpl)
|
||||
NativeSyncApi.setUp(flutterEngine.dartExecutor.binaryMessenger, nativeSyncApiImpl)
|
||||
ThumbnailApi.setUp(flutterEngine.dartExecutor.binaryMessenger, ThumbnailsImpl(this))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,164 @@
|
||||
package app.alextran.immich.images;
|
||||
|
||||
// Copyright (c) 2023 Evan Wallace
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
// modified to use native allocations
|
||||
public final class ThumbHash {
|
||||
/**
|
||||
* Decodes a ThumbHash to an RGBA image. RGB is not be premultiplied by A.
|
||||
*
|
||||
* @param hash The bytes of the ThumbHash.
|
||||
* @return The width, height, and pixels of the rendered placeholder image.
|
||||
*/
|
||||
public static Image thumbHashToRGBA(byte[] hash) {
|
||||
// Read the constants
|
||||
int header24 = (hash[0] & 255) | ((hash[1] & 255) << 8) | ((hash[2] & 255) << 16);
|
||||
int header16 = (hash[3] & 255) | ((hash[4] & 255) << 8);
|
||||
float l_dc = (float) (header24 & 63) / 63.0f;
|
||||
float p_dc = (float) ((header24 >> 6) & 63) / 31.5f - 1.0f;
|
||||
float q_dc = (float) ((header24 >> 12) & 63) / 31.5f - 1.0f;
|
||||
float l_scale = (float) ((header24 >> 18) & 31) / 31.0f;
|
||||
boolean hasAlpha = (header24 >> 23) != 0;
|
||||
float p_scale = (float) ((header16 >> 3) & 63) / 63.0f;
|
||||
float q_scale = (float) ((header16 >> 9) & 63) / 63.0f;
|
||||
boolean isLandscape = (header16 >> 15) != 0;
|
||||
int lx = Math.max(3, isLandscape ? hasAlpha ? 5 : 7 : header16 & 7);
|
||||
int ly = Math.max(3, isLandscape ? header16 & 7 : hasAlpha ? 5 : 7);
|
||||
float a_dc = hasAlpha ? (float) (hash[5] & 15) / 15.0f : 1.0f;
|
||||
float a_scale = (float) ((hash[5] >> 4) & 15) / 15.0f;
|
||||
|
||||
// Read the varying factors (boost saturation by 1.25x to compensate for quantization)
|
||||
int ac_start = hasAlpha ? 6 : 5;
|
||||
int ac_index = 0;
|
||||
Channel l_channel = new Channel(lx, ly);
|
||||
Channel p_channel = new Channel(3, 3);
|
||||
Channel q_channel = new Channel(3, 3);
|
||||
Channel a_channel = null;
|
||||
ac_index = l_channel.decode(hash, ac_start, ac_index, l_scale);
|
||||
ac_index = p_channel.decode(hash, ac_start, ac_index, p_scale * 1.25f);
|
||||
ac_index = q_channel.decode(hash, ac_start, ac_index, q_scale * 1.25f);
|
||||
if (hasAlpha) {
|
||||
a_channel = new Channel(5, 5);
|
||||
a_channel.decode(hash, ac_start, ac_index, a_scale);
|
||||
}
|
||||
float[] l_ac = l_channel.ac;
|
||||
float[] p_ac = p_channel.ac;
|
||||
float[] q_ac = q_channel.ac;
|
||||
float[] a_ac = hasAlpha ? a_channel.ac : null;
|
||||
|
||||
// Decode using the DCT into RGB
|
||||
float ratio = thumbHashToApproximateAspectRatio(hash);
|
||||
int w = Math.round(ratio > 1.0f ? 32.0f : 32.0f * ratio);
|
||||
int h = Math.round(ratio > 1.0f ? 32.0f / ratio : 32.0f);
|
||||
int size = w * h * 4;
|
||||
long pointer = ThumbnailsImpl.allocateNative(size);
|
||||
ByteBuffer rgba = ThumbnailsImpl.wrapAsBuffer(pointer, size);
|
||||
int cx_stop = Math.max(lx, hasAlpha ? 5 : 3);
|
||||
int cy_stop = Math.max(ly, hasAlpha ? 5 : 3);
|
||||
float[] fx = new float[cx_stop];
|
||||
float[] fy = new float[cy_stop];
|
||||
for (int y = 0, i = 0; y < h; y++) {
|
||||
for (int x = 0; x < w; x++, i += 4) {
|
||||
float l = l_dc, p = p_dc, q = q_dc, a = a_dc;
|
||||
|
||||
// Precompute the coefficients
|
||||
for (int cx = 0; cx < cx_stop; cx++)
|
||||
fx[cx] = (float) Math.cos(Math.PI / w * (x + 0.5f) * cx);
|
||||
for (int cy = 0; cy < cy_stop; cy++)
|
||||
fy[cy] = (float) Math.cos(Math.PI / h * (y + 0.5f) * cy);
|
||||
|
||||
// Decode L
|
||||
for (int cy = 0, j = 0; cy < ly; cy++) {
|
||||
float fy2 = fy[cy] * 2.0f;
|
||||
for (int cx = cy > 0 ? 0 : 1; cx * ly < lx * (ly - cy); cx++, j++)
|
||||
l += l_ac[j] * fx[cx] * fy2;
|
||||
}
|
||||
|
||||
// Decode P and Q
|
||||
for (int cy = 0, j = 0; cy < 3; cy++) {
|
||||
float fy2 = fy[cy] * 2.0f;
|
||||
for (int cx = cy > 0 ? 0 : 1; cx < 3 - cy; cx++, j++) {
|
||||
float f = fx[cx] * fy2;
|
||||
p += p_ac[j] * f;
|
||||
q += q_ac[j] * f;
|
||||
}
|
||||
}
|
||||
|
||||
// Decode A
|
||||
if (hasAlpha)
|
||||
for (int cy = 0, j = 0; cy < 5; cy++) {
|
||||
float fy2 = fy[cy] * 2.0f;
|
||||
for (int cx = cy > 0 ? 0 : 1; cx < 5 - cy; cx++, j++)
|
||||
a += a_ac[j] * fx[cx] * fy2;
|
||||
}
|
||||
|
||||
// Convert to RGB
|
||||
float b = l - 2.0f / 3.0f * p;
|
||||
float r = (3.0f * l - b + q) / 2.0f;
|
||||
float g = r - q;
|
||||
rgba.put(i, (byte) Math.max(0, Math.round(255.0f * Math.min(1, r))));
|
||||
rgba.put(i + 1, (byte) Math.max(0, Math.round(255.0f * Math.min(1, g))));
|
||||
rgba.put(i + 2, (byte) Math.max(0, Math.round(255.0f * Math.min(1, b))));
|
||||
rgba.put(i + 3, (byte) Math.max(0, Math.round(255.0f * Math.min(1, a))));
|
||||
}
|
||||
}
|
||||
return new Image(w, h, pointer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the approximate aspect ratio of the original image.
|
||||
*
|
||||
* @param hash The bytes of the ThumbHash.
|
||||
* @return The approximate aspect ratio (i.e. width / height).
|
||||
*/
|
||||
public static float thumbHashToApproximateAspectRatio(byte[] hash) {
|
||||
byte header = hash[3];
|
||||
boolean hasAlpha = (hash[2] & 0x80) != 0;
|
||||
boolean isLandscape = (hash[4] & 0x80) != 0;
|
||||
int lx = isLandscape ? hasAlpha ? 5 : 7 : header & 7;
|
||||
int ly = isLandscape ? header & 7 : hasAlpha ? 5 : 7;
|
||||
return (float) lx / (float) ly;
|
||||
}
|
||||
|
||||
public static final class Image {
|
||||
public int width;
|
||||
public int height;
|
||||
public long pointer;
|
||||
|
||||
public Image(int width, int height, long pointer) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.pointer = pointer;
|
||||
}
|
||||
}
|
||||
|
||||
private static final class Channel {
|
||||
int nx;
|
||||
int ny;
|
||||
float[] ac;
|
||||
|
||||
Channel(int nx, int ny) {
|
||||
this.nx = nx;
|
||||
this.ny = ny;
|
||||
int n = 0;
|
||||
for (int cy = 0; cy < ny; cy++)
|
||||
for (int cx = cy > 0 ? 0 : 1; cx * ny < nx * (ny - cy); cx++)
|
||||
n++;
|
||||
ac = new float[n];
|
||||
}
|
||||
|
||||
int decode(byte[] hash, int start, int index, float scale) {
|
||||
for (int i = 0; i < ac.length; i++) {
|
||||
int data = hash[start + (index >> 1)] >> ((index & 1) << 2);
|
||||
ac[i] = ((float) (data & 15) / 7.5f - 1.0f) * scale;
|
||||
index++;
|
||||
}
|
||||
return index;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
// Autogenerated from Pigeon (v26.0.0), do not edit directly.
|
||||
// See also: https://pub.dev/packages/pigeon
|
||||
@file:Suppress("UNCHECKED_CAST", "ArrayInDataClass")
|
||||
|
||||
package app.alextran.immich.images
|
||||
|
||||
import android.util.Log
|
||||
import io.flutter.plugin.common.BasicMessageChannel
|
||||
import io.flutter.plugin.common.BinaryMessenger
|
||||
import io.flutter.plugin.common.EventChannel
|
||||
import io.flutter.plugin.common.MessageCodec
|
||||
import io.flutter.plugin.common.StandardMethodCodec
|
||||
import io.flutter.plugin.common.StandardMessageCodec
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.nio.ByteBuffer
|
||||
private object ThumbnailsPigeonUtils {
|
||||
|
||||
fun wrapResult(result: Any?): List<Any?> {
|
||||
return listOf(result)
|
||||
}
|
||||
|
||||
fun wrapError(exception: Throwable): List<Any?> {
|
||||
return if (exception is FlutterError) {
|
||||
listOf(
|
||||
exception.code,
|
||||
exception.message,
|
||||
exception.details
|
||||
)
|
||||
} else {
|
||||
listOf(
|
||||
exception.javaClass.simpleName,
|
||||
exception.toString(),
|
||||
"Cause: " + exception.cause + ", Stacktrace: " + Log.getStackTraceString(exception)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Error class for passing custom error details to Flutter via a thrown PlatformException.
|
||||
* @property code The error code.
|
||||
* @property message The error message.
|
||||
* @property details The error details. Must be a datatype supported by the api codec.
|
||||
*/
|
||||
class FlutterError (
|
||||
val code: String,
|
||||
override val message: String? = null,
|
||||
val details: Any? = null
|
||||
) : Throwable()
|
||||
private open class ThumbnailsPigeonCodec : StandardMessageCodec() {
|
||||
override fun readValueOfType(type: Byte, buffer: ByteBuffer): Any? {
|
||||
return super.readValueOfType(type, buffer)
|
||||
}
|
||||
override fun writeValue(stream: ByteArrayOutputStream, value: Any?) {
|
||||
super.writeValue(stream, value)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Generated interface from Pigeon that represents a handler of messages from Flutter. */
|
||||
interface ThumbnailApi {
|
||||
fun requestImage(assetId: String, requestId: Long, width: Long, height: Long, isVideo: Boolean, callback: (Result<Map<String, Long>>) -> Unit)
|
||||
fun cancelImageRequest(requestId: Long)
|
||||
fun getThumbhash(thumbhash: String, callback: (Result<Map<String, Long>>) -> Unit)
|
||||
|
||||
companion object {
|
||||
/** The codec used by ThumbnailApi. */
|
||||
val codec: MessageCodec<Any?> by lazy {
|
||||
ThumbnailsPigeonCodec()
|
||||
}
|
||||
/** Sets up an instance of `ThumbnailApi` to handle messages through the `binaryMessenger`. */
|
||||
@JvmOverloads
|
||||
fun setUp(binaryMessenger: BinaryMessenger, api: ThumbnailApi?, messageChannelSuffix: String = "") {
|
||||
val separatedMessageChannelSuffix = if (messageChannelSuffix.isNotEmpty()) ".$messageChannelSuffix" else ""
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.ThumbnailApi.requestImage$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { message, reply ->
|
||||
val args = message as List<Any?>
|
||||
val assetIdArg = args[0] as String
|
||||
val requestIdArg = args[1] as Long
|
||||
val widthArg = args[2] as Long
|
||||
val heightArg = args[3] as Long
|
||||
val isVideoArg = args[4] as Boolean
|
||||
api.requestImage(assetIdArg, requestIdArg, widthArg, heightArg, isVideoArg) { result: Result<Map<String, Long>> ->
|
||||
val error = result.exceptionOrNull()
|
||||
if (error != null) {
|
||||
reply.reply(ThumbnailsPigeonUtils.wrapError(error))
|
||||
} else {
|
||||
val data = result.getOrNull()
|
||||
reply.reply(ThumbnailsPigeonUtils.wrapResult(data))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.ThumbnailApi.cancelImageRequest$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { message, reply ->
|
||||
val args = message as List<Any?>
|
||||
val requestIdArg = args[0] as Long
|
||||
val wrapped: List<Any?> = try {
|
||||
api.cancelImageRequest(requestIdArg)
|
||||
listOf(null)
|
||||
} catch (exception: Throwable) {
|
||||
ThumbnailsPigeonUtils.wrapError(exception)
|
||||
}
|
||||
reply.reply(wrapped)
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
run {
|
||||
val channel = BasicMessageChannel<Any?>(binaryMessenger, "dev.flutter.pigeon.immich_mobile.ThumbnailApi.getThumbhash$separatedMessageChannelSuffix", codec)
|
||||
if (api != null) {
|
||||
channel.setMessageHandler { message, reply ->
|
||||
val args = message as List<Any?>
|
||||
val thumbhashArg = args[0] as String
|
||||
api.getThumbhash(thumbhashArg) { result: Result<Map<String, Long>> ->
|
||||
val error = result.exceptionOrNull()
|
||||
if (error != null) {
|
||||
reply.reply(ThumbnailsPigeonUtils.wrapError(error))
|
||||
} else {
|
||||
val data = result.getOrNull()
|
||||
reply.reply(ThumbnailsPigeonUtils.wrapResult(data))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
channel.setMessageHandler(null)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,230 @@
|
||||
package app.alextran.immich.images
|
||||
|
||||
import android.content.ContentResolver
|
||||
import android.content.ContentUris
|
||||
import android.content.Context
|
||||
import android.graphics.*
|
||||
import android.net.Uri
|
||||
import android.os.Build
|
||||
import android.os.CancellationSignal
|
||||
import android.os.OperationCanceledException
|
||||
import android.provider.MediaStore
|
||||
import android.provider.MediaStore.Images
|
||||
import android.provider.MediaStore.Video
|
||||
import android.util.Size
|
||||
import java.nio.ByteBuffer
|
||||
import kotlin.math.*
|
||||
import java.util.concurrent.Executors
|
||||
import com.bumptech.glide.Glide
|
||||
import com.bumptech.glide.Priority
|
||||
import com.bumptech.glide.load.DecodeFormat
|
||||
import java.util.Base64
|
||||
import java.util.HashMap
|
||||
import java.util.concurrent.CancellationException
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.Future
|
||||
|
||||
data class Request(
|
||||
val taskFuture: Future<*>,
|
||||
val cancellationSignal: CancellationSignal,
|
||||
val callback: (Result<Map<String, Long>>) -> Unit
|
||||
)
|
||||
|
||||
class ThumbnailsImpl(context: Context) : ThumbnailApi {
|
||||
private val ctx: Context = context.applicationContext
|
||||
private val resolver: ContentResolver = ctx.contentResolver
|
||||
private val requestThread = Executors.newSingleThreadExecutor()
|
||||
private val threadPool =
|
||||
Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() / 2 + 1)
|
||||
private val requestMap = ConcurrentHashMap<Long, Request>()
|
||||
|
||||
companion object {
|
||||
val CANCELLED = Result.success<Map<String, Long>>(mapOf())
|
||||
val OPTIONS = BitmapFactory.Options().apply { inPreferredConfig = Bitmap.Config.ARGB_8888 }
|
||||
|
||||
init {
|
||||
System.loadLibrary("native_buffer")
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
external fun allocateNative(size: Int): Long
|
||||
|
||||
@JvmStatic
|
||||
external fun freeNative(pointer: Long)
|
||||
|
||||
@JvmStatic
|
||||
external fun wrapAsBuffer(address: Long, capacity: Int): ByteBuffer
|
||||
}
|
||||
|
||||
override fun getThumbhash(thumbhash: String, callback: (Result<Map<String, Long>>) -> Unit) {
|
||||
threadPool.execute {
|
||||
try {
|
||||
val bytes = Base64.getDecoder().decode(thumbhash)
|
||||
val image = ThumbHash.thumbHashToRGBA(bytes)
|
||||
val res = mapOf(
|
||||
"pointer" to image.pointer,
|
||||
"width" to image.width.toLong(),
|
||||
"height" to image.height.toLong()
|
||||
)
|
||||
callback(Result.success(res))
|
||||
} catch (e: Exception) {
|
||||
callback(Result.failure(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun requestImage(
|
||||
assetId: String,
|
||||
requestId: Long,
|
||||
width: Long,
|
||||
height: Long,
|
||||
isVideo: Boolean,
|
||||
callback: (Result<Map<String, Long>>) -> Unit
|
||||
) {
|
||||
val signal = CancellationSignal()
|
||||
val task = threadPool.submit {
|
||||
try {
|
||||
getThumbnailBufferInternal(assetId, width, height, isVideo, callback, signal)
|
||||
} catch (e: Exception) {
|
||||
when (e) {
|
||||
is OperationCanceledException -> callback(CANCELLED)
|
||||
is CancellationException -> callback(CANCELLED)
|
||||
else -> callback(Result.failure(e))
|
||||
}
|
||||
} finally {
|
||||
requestMap.remove(requestId)
|
||||
}
|
||||
}
|
||||
val request = Request(task, signal, callback)
|
||||
requestMap[requestId] = request
|
||||
}
|
||||
|
||||
override fun cancelImageRequest(requestId: Long) {
|
||||
val request = requestMap.remove(requestId) ?: return
|
||||
request.taskFuture.cancel(false)
|
||||
request.cancellationSignal.cancel()
|
||||
if (request.taskFuture.isCancelled) {
|
||||
requestThread.execute {
|
||||
try {
|
||||
request.callback(CANCELLED)
|
||||
} catch (_: Exception) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun getThumbnailBufferInternal(
|
||||
assetId: String,
|
||||
width: Long,
|
||||
height: Long,
|
||||
isVideo: Boolean,
|
||||
callback: (Result<Map<String, Long>>) -> Unit,
|
||||
signal: CancellationSignal
|
||||
) {
|
||||
signal.throwIfCanceled()
|
||||
val targetWidth = width.toInt()
|
||||
val targetHeight = height.toInt()
|
||||
val id = assetId.toLong()
|
||||
|
||||
signal.throwIfCanceled()
|
||||
val bitmap = if (isVideo) {
|
||||
decodeVideoThumbnail(id, targetWidth, targetHeight, signal)
|
||||
} else {
|
||||
decodeImage(id, targetWidth, targetHeight, signal)
|
||||
}
|
||||
|
||||
processBitmap(bitmap, callback, signal)
|
||||
}
|
||||
|
||||
private fun processBitmap(
|
||||
bitmap: Bitmap, callback: (Result<Map<String, Long>>) -> Unit, signal: CancellationSignal
|
||||
) {
|
||||
signal.throwIfCanceled()
|
||||
val actualWidth = bitmap.width
|
||||
val actualHeight = bitmap.height
|
||||
|
||||
val size = actualWidth * actualHeight * 4
|
||||
val pointer = allocateNative(size)
|
||||
|
||||
try {
|
||||
signal.throwIfCanceled()
|
||||
val buffer = wrapAsBuffer(pointer, size)
|
||||
bitmap.copyPixelsToBuffer(buffer)
|
||||
bitmap.recycle()
|
||||
signal.throwIfCanceled()
|
||||
val res = mapOf(
|
||||
"pointer" to pointer,
|
||||
"width" to actualWidth.toLong(),
|
||||
"height" to actualHeight.toLong()
|
||||
)
|
||||
callback(Result.success(res))
|
||||
} catch (e: Exception) {
|
||||
freeNative(pointer)
|
||||
callback(if (e is OperationCanceledException) CANCELLED else Result.failure(e))
|
||||
}
|
||||
}
|
||||
|
||||
private fun decodeImage(
|
||||
id: Long, targetWidth: Int, targetHeight: Int, signal: CancellationSignal
|
||||
): Bitmap {
|
||||
signal.throwIfCanceled()
|
||||
val uri = ContentUris.withAppendedId(Images.Media.EXTERNAL_CONTENT_URI, id)
|
||||
if (targetHeight > 768 || targetWidth > 768) {
|
||||
return decodeSource(uri, targetWidth, targetHeight, signal)
|
||||
}
|
||||
|
||||
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
resolver.loadThumbnail(uri, Size(targetWidth, targetHeight), signal)
|
||||
} else {
|
||||
signal.setOnCancelListener { Images.Thumbnails.cancelThumbnailRequest(resolver, id) }
|
||||
Images.Thumbnails.getThumbnail(resolver, id, Images.Thumbnails.MINI_KIND, OPTIONS)
|
||||
}
|
||||
}
|
||||
|
||||
private fun decodeVideoThumbnail(
|
||||
id: Long, targetWidth: Int, targetHeight: Int, signal: CancellationSignal
|
||||
): Bitmap {
|
||||
signal.throwIfCanceled()
|
||||
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
val uri = ContentUris.withAppendedId(Video.Media.EXTERNAL_CONTENT_URI, id)
|
||||
resolver.loadThumbnail(uri, Size(targetWidth, targetHeight), signal)
|
||||
} else {
|
||||
signal.setOnCancelListener { Video.Thumbnails.cancelThumbnailRequest(resolver, id) }
|
||||
Video.Thumbnails.getThumbnail(resolver, id, Video.Thumbnails.MINI_KIND, OPTIONS)
|
||||
}
|
||||
}
|
||||
|
||||
private fun decodeSource(
|
||||
uri: Uri, targetWidth: Int, targetHeight: Int, signal: CancellationSignal
|
||||
): Bitmap {
|
||||
signal.throwIfCanceled()
|
||||
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||
val source = ImageDecoder.createSource(resolver, uri)
|
||||
signal.throwIfCanceled()
|
||||
ImageDecoder.decodeBitmap(source) { decoder, info, _ ->
|
||||
val sampleSize =
|
||||
getSampleSize(info.size.width, info.size.height, targetWidth, targetHeight)
|
||||
decoder.setTargetSampleSize(sampleSize)
|
||||
decoder.allocator = ImageDecoder.ALLOCATOR_SOFTWARE
|
||||
decoder.setTargetColorSpace(ColorSpace.get(ColorSpace.Named.SRGB))
|
||||
}
|
||||
} else {
|
||||
val ref = Glide.with(ctx).asBitmap().priority(Priority.IMMEDIATE).load(uri)
|
||||
.disallowHardwareConfig().format(DecodeFormat.PREFER_ARGB_8888)
|
||||
.submit(targetWidth, targetHeight)
|
||||
signal.setOnCancelListener { Glide.with(ctx).clear(ref) }
|
||||
ref.get()
|
||||
}
|
||||
}
|
||||
|
||||
private fun getSampleSize(fullWidth: Int, fullHeight: Int, reqWidth: Int, reqHeight: Int): Int {
|
||||
return 1 shl max(
|
||||
0, floor(
|
||||
min(
|
||||
log2(fullWidth / reqWidth.toDouble()),
|
||||
log2(fullHeight / reqHeight.toDouble()),
|
||||
)
|
||||
).toInt()
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
// Autogenerated from Pigeon (v25.3.2), do not edit directly.
|
||||
// Autogenerated from Pigeon (v26.0.0), do not edit directly.
|
||||
// See also: https://pub.dev/packages/pigeon
|
||||
@file:Suppress("UNCHECKED_CAST", "ArrayInDataClass")
|
||||
|
||||
|
||||
@@ -24,14 +24,23 @@ class ImmichAPI(cfg: ServerConfig) {
|
||||
|
||||
val serverURL = prefs.getString("widget_server_url", "") ?: ""
|
||||
val sessionKey = prefs.getString("widget_auth_token", "") ?: ""
|
||||
val customHeadersJSON = prefs.getString("widget_custom_headers", "") ?: ""
|
||||
|
||||
if (serverURL.isBlank() || sessionKey.isBlank()) {
|
||||
return null
|
||||
}
|
||||
|
||||
var customHeaders: Map<String, String> = HashMap<String, String>()
|
||||
|
||||
if (customHeadersJSON.isNotBlank()) {
|
||||
val stringMapType = object : TypeToken<Map<String, String>>() {}.type
|
||||
customHeaders = Gson().fromJson(customHeadersJSON, stringMapType)
|
||||
}
|
||||
|
||||
return ServerConfig(
|
||||
serverURL,
|
||||
sessionKey
|
||||
sessionKey,
|
||||
customHeaders
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -50,11 +59,19 @@ class ImmichAPI(cfg: ServerConfig) {
|
||||
return URL(urlString.toString())
|
||||
}
|
||||
|
||||
private fun HttpURLConnection.applyCustomHeaders() {
|
||||
serverConfig.customHeaders.forEach { (key, value) ->
|
||||
setRequestProperty(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
suspend fun fetchSearchResults(filters: SearchFilters): List<Asset> = withContext(Dispatchers.IO) {
|
||||
val url = buildRequestURL("/search/random")
|
||||
val connection = (url.openConnection() as HttpURLConnection).apply {
|
||||
requestMethod = "POST"
|
||||
setRequestProperty("Content-Type", "application/json")
|
||||
applyCustomHeaders()
|
||||
|
||||
doOutput = true
|
||||
}
|
||||
|
||||
@@ -75,6 +92,7 @@ class ImmichAPI(cfg: ServerConfig) {
|
||||
val url = buildRequestURL("/memories", listOf("for" to iso8601))
|
||||
val connection = (url.openConnection() as HttpURLConnection).apply {
|
||||
requestMethod = "GET"
|
||||
applyCustomHeaders()
|
||||
}
|
||||
|
||||
val response = connection.inputStream.bufferedReader().readText()
|
||||
@@ -94,6 +112,7 @@ class ImmichAPI(cfg: ServerConfig) {
|
||||
val url = buildRequestURL("/albums")
|
||||
val connection = (url.openConnection() as HttpURLConnection).apply {
|
||||
requestMethod = "GET"
|
||||
applyCustomHeaders()
|
||||
}
|
||||
|
||||
val response = connection.inputStream.bufferedReader().readText()
|
||||
|
||||
@@ -55,7 +55,11 @@ data class WidgetEntry (
|
||||
val deeplink: String?
|
||||
)
|
||||
|
||||
data class ServerConfig(val serverEndpoint: String, val sessionKey: String)
|
||||
data class ServerConfig(
|
||||
val serverEndpoint: String,
|
||||
val sessionKey: String,
|
||||
val customHeaders: Map<String, String>
|
||||
)
|
||||
|
||||
// MARK: Widget State Keys
|
||||
val kImageUUID = stringPreferencesKey("uuid")
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 3002,
|
||||
"android.injected.version.name" => "1.137.2",
|
||||
"android.injected.version.code" => 3005,
|
||||
"android.injected.version.name" => "1.139.0",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -19,6 +19,7 @@ targets:
|
||||
- lib/infrastructure/entities/*.dart
|
||||
- lib/infrastructure/entities/*.drift
|
||||
- lib/infrastructure/repositories/db.repository.dart
|
||||
- lib/infrastructure/repositories/logger_db.repository.dart
|
||||
drift_dev:modular:
|
||||
enabled: true
|
||||
options: *drift_options
|
||||
|
||||