Compare commits

..

22 Commits

Author SHA1 Message Date
bwees
fc3fff097a chore: sync sql 2026-03-24 19:20:41 -05:00
bwees
a68ced542f fix: incorrect asset face sync 2026-03-24 18:23:45 -05:00
Mert
a9666d2cef fix(mobile): remove upload timeout (#27237)
remove timeout
2026-03-24 14:40:48 -04:00
renovate[bot]
4af9edc20b chore(deps): update github-actions (#27215)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-03-24 14:31:00 +01:00
renovate[bot]
c975fe5bc7 chore(deps): update github-actions (major) (#27225)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-03-24 12:40:10 +00:00
renovate[bot]
12a4d8e2ee chore(deps): update ghcr.io/jdx/mise docker tag to v2026.3.12 (#27224)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-03-24 12:06:19 +00:00
github-actions
ce9b32a61a chore: version v2.6.2 2026-03-24 02:51:55 +00:00
Yaros
4ddc288cd1 fix(mobile/web): album cover buttons consistency (#27213)
* fix(mobile/web): album cover buttons consistency

* test: adjust test
2026-03-23 21:40:17 -05:00
Yaros
94b15b8678 fix(server): album permissions for editors (#27214)
* fix(server): album permissions for editors

* test: adjust e2e test

* test: fix test
2026-03-23 21:39:30 -05:00
Daniel Dietzler
ff9ae24219 fix: album picker show all albums (#27211) 2026-03-23 19:08:57 -05:00
Matthew Momjian
b456f78771 fix(docs): clarify ML CPU architecture (#27187)
* ML architecture

* format

* clarify amd/arm
2026-03-23 18:29:58 -04:00
Mert
1506776891 fix(mobile): add cookie for auxiliary url (#27209)
add cookie before validating
2026-03-23 16:22:46 -05:00
Yaros
0e93aa74cf fix(mobile): add keys to people list (#27112)
mobile(fix): add keys to people list
2026-03-23 10:50:56 -05:00
Yaros
e95ad9d2eb fix(mobile): option padding on search dropdowns (#27154)
* fix(mobile): option padding on search dropdowns

* chore: prevent height fill up screen and block the bottom menu entry

---------

Co-authored-by: Alex <alex.tran1502@gmail.com>
2026-03-23 15:03:07 +00:00
Nicolas-micuda-becker
b98a227bbd fix(web): update upload summary when removing items (#27035) (#27139) 2026-03-23 10:02:09 -05:00
Michel Heusschen
2dd785e3e2 fix(web): restore duplicate viewer arrow key navigation (#27176) 2026-03-23 10:01:15 -05:00
Daniel Dietzler
7e754125cd fix: download original stale cache when edited (#27195) 2026-03-23 10:00:32 -05:00
Yaros
e2eb03d3a4 fix(mobile): star rating always defaults to 0 (#27157) 2026-03-23 09:56:27 -05:00
Yaros
bf065a834f fix(mobile): no results before applying filter (#27155) 2026-03-23 09:41:13 -05:00
Daniel Dietzler
db79173b5b chore: vite 8 (#26913) 2026-03-23 15:39:46 +01:00
Yaros
33666ccd21 fix(mobile): view similar photos from search (#27149)
* fix(mobile): view similar photos from search

* clean up

---------

Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
2026-03-23 09:36:42 -05:00
bo0tzz
be93b9040c feat: consolidate auto-close workflows (#27172) 2026-03-23 11:22:44 +01:00
91 changed files with 1457 additions and 2500 deletions

143
.github/workflows/auto-close.yml vendored Normal file
View File

@@ -0,0 +1,143 @@
name: Auto-close PRs
on:
pull_request_target: # zizmor: ignore[dangerous-triggers]
types: [opened, edited, labeled]
permissions: {}
jobs:
parse_template:
runs-on: ubuntu-latest
if: ${{ github.event.action != 'labeled' && github.event.pull_request.head.repo.fork == true }}
permissions:
contents: read
outputs:
uses_template: ${{ steps.check.outputs.uses_template }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
sparse-checkout: .github/pull_request_template.md
sparse-checkout-cone-mode: false
persist-credentials: false
- name: Check required sections
id: check
env:
BODY: ${{ github.event.pull_request.body }}
run: |
OK=true
while IFS= read -r header; do
printf '%s\n' "$BODY" | grep -qF "$header" || OK=false
done < <(sed '/<!--/,/-->/d' .github/pull_request_template.md | grep "^## ")
echo "uses_template=$OK" >> "$GITHUB_OUTPUT"
close_template:
runs-on: ubuntu-latest
needs: parse_template
if: ${{ needs.parse_template.outputs.uses_template == 'false' && github.event.pull_request.state != 'closed' }}
permissions:
pull-requests: write
steps:
- name: Comment and close
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f body="This PR has been automatically closed as the description doesn't follow our template. After you edit it to match the template, the PR will automatically be reopened." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'
- name: Add label
env:
GH_TOKEN: ${{ github.token }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: gh pr edit "$PR_NUMBER" --add-label "auto-closed:template"
close_llm:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'auto-closed:llm' }}
permissions:
pull-requests: write
steps:
- name: Comment and close
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f body="Thank you for your interest in contributing to Immich! Unfortunately this PR looks like it was generated using an LLM. As noted in our [CONTRIBUTING.md](https://github.com/immich-app/immich/blob/main/CONTRIBUTING.md#use-of-generative-ai), we request that you don't use LLMs to generate PRs as those are not a good use of maintainer time." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'
reopen:
runs-on: ubuntu-latest
needs: parse_template
if: >-
${{
needs.parse_template.outputs.uses_template == 'true'
&& github.event.pull_request.state == 'closed'
&& contains(github.event.pull_request.labels.*.name, 'auto-closed:template')
}}
permissions:
pull-requests: write
steps:
- name: Remove template label
env:
GH_TOKEN: ${{ github.token }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: gh pr edit "$PR_NUMBER" --remove-label "auto-closed:template" || true
- name: Check for remaining auto-closed labels
id: check_labels
env:
GH_TOKEN: ${{ github.token }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
REMAINING=$(gh pr view "$PR_NUMBER" --json labels \
--jq '[.labels[].name | select(startswith("auto-closed:"))] | length')
echo "remaining=$REMAINING" >> "$GITHUB_OUTPUT"
- name: Reopen PR
if: ${{ steps.check_labels.outputs.remaining == '0' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f query='
mutation ReopenPR($prId: ID!) {
reopenPullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'

View File

@@ -51,14 +51,14 @@ jobs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@eed0f8b8165ffcb951f2ba854b2dd031935e1d73 # pre-job-action-v2.0.2
uses: immich-app/devtools/actions/pre-job@f50e3b600b6ac1763ddb8f3dfc69093512b967a1 # pre-job-action-v2.0.3
with:
github-token: ${{ steps.token.outputs.token }}
filters: |
@@ -79,7 +79,7 @@ jobs:
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -103,7 +103,7 @@ jobs:
- name: Restore Gradle Cache
id: cache-gradle-restore
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
with:
path: |
~/.gradle/caches
@@ -114,7 +114,7 @@ jobs:
key: build-mobile-gradle-${{ runner.os }}-main
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@0ca7a949e71ae44c8e688a51c5e7e93b2c87e295 # v2.22.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -153,14 +153,14 @@ jobs:
fi
- name: Publish Android Artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk
- name: Save Gradle Cache
id: cache-gradle-save
uses: actions/cache/save@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
if: github.ref == 'refs/heads/main'
with:
path: |
@@ -185,13 +185,13 @@ jobs:
run: sudo xcode-select -s /Applications/Xcode_26.2.app/Contents/Developer
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ inputs.ref || github.sha }}
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2
uses: subosito/flutter-action@0ca7a949e71ae44c8e688a51c5e7e93b2c87e295 # v2.22.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -210,7 +210,7 @@ jobs:
working-directory: ./mobile
- name: Setup Ruby
uses: ruby/setup-ruby@v1
uses: ruby/setup-ruby@319994f95fa847cf3fb3cd3dbe89f6dcde9f178f # v1.295.0
with:
ruby-version: '3.3'
bundler-cache: true
@@ -291,7 +291,7 @@ jobs:
security delete-keychain build.keychain || true
- name: Upload IPA artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: ios-release-ipa
path: mobile/ios/Runner.ipa

View File

@@ -19,7 +19,7 @@ jobs:
actions: write
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}

View File

@@ -24,7 +24,7 @@ jobs:
persist-credentials: false
- name: Check for breaking API changes
uses: oasdiff/oasdiff-action/breaking@748daafaf3aac877a36307f842a48d55db938ac8 # v0.0.31
uses: oasdiff/oasdiff-action/breaking@2a37bc82462349c03a533b8b608bebbaf57b3e60 # v0.0.33
with:
base: https://raw.githubusercontent.com/${{ github.repository }}/main/open-api/immich-openapi-specs.json
revision: open-api/immich-openapi-specs.json

View File

@@ -1,97 +0,0 @@
name: Check PR Template
on:
pull_request_target: # zizmor: ignore[dangerous-triggers]
types: [opened, edited]
permissions: {}
env:
LABEL_ID: 'LA_kwDOGyI-8M8AAAACcAeOfg' # auto-closed:template
jobs:
parse:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.head.repo.fork == true }}
permissions:
contents: read
outputs:
uses_template: ${{ steps.check.outputs.uses_template }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
sparse-checkout: .github/pull_request_template.md
sparse-checkout-cone-mode: false
persist-credentials: false
- name: Check required sections
id: check
env:
BODY: ${{ github.event.pull_request.body }}
run: |
OK=true
while IFS= read -r header; do
printf '%s\n' "$BODY" | grep -qF "$header" || OK=false
done < <(sed '/<!--/,/-->/d' .github/pull_request_template.md | grep "^## ")
echo "uses_template=$OK" >> "$GITHUB_OUTPUT"
act:
runs-on: ubuntu-latest
needs: parse
permissions:
pull-requests: write
steps:
- name: Close PR
if: ${{ needs.parse.outputs.uses_template == 'false' && github.event.pull_request.state != 'closed' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f labelId="$LABEL_ID" \
-f body="This PR has been automatically closed as the description doesn't follow our template. After you edit it to match the template, the PR will automatically be reopened." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!, $labelId: ID!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
addLabelsToLabelable(input: {
labelableId: $prId,
labelIds: [$labelId]
}) {
__typename
}
}'
- name: Reopen PR (sections now present, PR was auto-closed)
if: ${{ needs.parse.outputs.uses_template == 'true' && github.event.pull_request.state == 'closed' && contains(github.event.pull_request.labels.*.node_id, env.LABEL_ID) }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f labelId="$LABEL_ID" \
-f query='
mutation ReopenPR($prId: ID!, $labelId: ID!) {
reopenPullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
removeLabelsFromLabelable(input: {
labelableId: $prId,
labelIds: [$labelId]
}) {
__typename
}
}'

View File

@@ -31,7 +31,7 @@ jobs:
working-directory: ./cli
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -42,7 +42,7 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
@@ -71,7 +71,7 @@ jobs:
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -83,13 +83,13 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io
@@ -104,7 +104,7 @@ jobs:
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
flavor: |
latest=false
@@ -115,7 +115,7 @@ jobs:
type=raw,value=latest,enable=${{ github.event_name == 'release' }}
- name: Build and push image
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
with:
file: cli/Dockerfile
platforms: linux/amd64,linux/arm64

View File

@@ -35,7 +35,7 @@ jobs:
needs: [get_body, should_run]
if: ${{ needs.should_run.outputs.should_run == 'true' }}
container:
image: ghcr.io/immich-app/mdq:main@sha256:4f9860d04c88f7f87861f8ee84bfeedaec15ed7ca5ca87bc7db44b036f81645f
image: ghcr.io/immich-app/mdq:main@sha256:df7188ba88abb0800d73cc97d3633280f0c0c3d4c441d678225067bf154150fb
outputs:
checked: ${{ steps.get_checkbox.outputs.checked }}
steps:

View File

@@ -1,38 +0,0 @@
name: Close LLM-generated PRs
on:
pull_request_target:
types: [labeled]
permissions: {}
jobs:
comment_and_close:
runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'llm-generated' }}
permissions:
pull-requests: write
steps:
- name: Comment and close
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f body="Thank you for your interest in contributing to Immich! Unfortunately this PR looks like it was generated using an LLM. As noted in our [CONTRIBUTING.md](https://github.com/immich-app/immich/blob/main/CONTRIBUTING.md#use-of-generative-ai), we request that you don't use LLMs to generate PRs as those are not a good use of maintainer time." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'

View File

@@ -44,7 +44,7 @@ jobs:
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -57,7 +57,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/init@b1bff81932f5cdfc8695c7752dcee935dcd061c8 # v4.33.0
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -70,7 +70,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/autobuild@b1bff81932f5cdfc8695c7752dcee935dcd061c8 # v4.33.0
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -83,6 +83,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0d579ffd059c29b07949a3cce3983f0780820c98 # v4.32.6
uses: github/codeql-action/analyze@b1bff81932f5cdfc8695c7752dcee935dcd061c8 # v4.33.0
with:
category: '/language:${{matrix.language}}'

View File

@@ -23,14 +23,14 @@ jobs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@eed0f8b8165ffcb951f2ba854b2dd031935e1d73 # pre-job-action-v2.0.2
uses: immich-app/devtools/actions/pre-job@f50e3b600b6ac1763ddb8f3dfc69093512b967a1 # pre-job-action-v2.0.3
with:
github-token: ${{ steps.token.outputs.token }}
filters: |
@@ -60,7 +60,7 @@ jobs:
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -90,7 +90,7 @@ jobs:
suffix: ['']
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -132,7 +132,7 @@ jobs:
suffixes: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "pokedex-large"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@bd49ed7a5a6022149f79b6564df48177476a822b # multi-runner-build-workflow-v2.2.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@61a0fc2b41524edcc7c9fffb8bb178e6b0ccf21d # multi-runner-build-workflow-v2.3.0
permissions:
contents: read
actions: read
@@ -155,7 +155,7 @@ jobs:
name: Build and Push Server
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@bd49ed7a5a6022149f79b6564df48177476a822b # multi-runner-build-workflow-v2.2.1
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@61a0fc2b41524edcc7c9fffb8bb178e6b0ccf21d # multi-runner-build-workflow-v2.3.0
permissions:
contents: read
actions: read

View File

@@ -21,14 +21,14 @@ jobs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@eed0f8b8165ffcb951f2ba854b2dd031935e1d73 # pre-job-action-v2.0.2
uses: immich-app/devtools/actions/pre-job@f50e3b600b6ac1763ddb8f3dfc69093512b967a1 # pre-job-action-v2.0.3
with:
github-token: ${{ steps.token.outputs.token }}
filters: |
@@ -54,7 +54,7 @@ jobs:
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -67,7 +67,7 @@ jobs:
fetch-depth: 0
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
@@ -86,7 +86,7 @@ jobs:
run: pnpm build
- name: Upload build output
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: docs-build-output
path: docs/build/

View File

@@ -20,7 +20,7 @@ jobs:
artifact: ${{ steps.get-artifact.outputs.result }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -119,7 +119,7 @@ jobs:
if: ${{ fromJson(needs.checks.outputs.artifact).found && fromJson(needs.checks.outputs.parameters).shouldDeploy }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -131,7 +131,7 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Setup Mise
uses: immich-app/devtools/actions/use-mise@dab18118da6476e8237ac94080fd937983fecd42 # use-mise-action-v1.1.2
uses: immich-app/devtools/actions/use-mise@035e80a7d4355d5f087ffb95db9e4a0944c04e56 # use-mise-action-v1.1.3
- name: Load parameters
id: parameters

View File

@@ -17,7 +17,7 @@ jobs:
pull-requests: write
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -29,7 +29,7 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Setup Mise
uses: immich-app/devtools/actions/use-mise@dab18118da6476e8237ac94080fd937983fecd42 # use-mise-action-v1.1.2
uses: immich-app/devtools/actions/use-mise@035e80a7d4355d5f087ffb95db9e4a0944c04e56 # use-mise-action-v1.1.3
- name: Destroy Docs Subdomain
env:

View File

@@ -16,7 +16,7 @@ jobs:
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -29,7 +29,7 @@ jobs:
persist-credentials: true
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0

View File

@@ -31,7 +31,7 @@ jobs:
- name: Generate a token
id: generate_token
if: ${{ inputs.skip != true }}
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}

View File

@@ -14,13 +14,13 @@ jobs:
pull-requests: write
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Require PR to have a changelog label
uses: mheap/github-action-required-labels@8afbe8ae6ab7647d0c9f0cfa7c2f939650d22509 # v5.5.1
uses: mheap/github-action-required-labels@0ac283b4e65c1fb28ce6079dea5546ceca98ccbe # v5.5.2
with:
token: ${{ steps.token.outputs.token }}
mode: exactly

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}

View File

@@ -50,7 +50,7 @@ jobs:
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -63,10 +63,10 @@ jobs:
ref: main
- name: Install uv
uses: astral-sh/setup-uv@6ee6290f1cbc4156c0bdd66691b2c144ef8df19a # v7.4.0
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
@@ -124,7 +124,7 @@ jobs:
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@f8d387b68d61c58ab83c6c016672934102569859 # v3.0.0
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -136,13 +136,13 @@ jobs:
persist-credentials: false
- name: Download APK
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
name: release-apk-signed
github-token: ${{ steps.generate-token.outputs.token }}
- name: Create draft release
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
uses: softprops/action-gh-release@153bb8e04406b158c6c84fc1615b65b24149a1fe # v2.6.1
with:
draft: true
tag_name: ${{ needs.bump_version.outputs.version }}

View File

@@ -14,12 +14,12 @@ jobs:
pull-requests: write
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
- uses: mshick/add-pr-comment@ffd016c7e151d97d69d21a843022fd4cd5b96fe5 # v3.9.0
with:
github-token: ${{ steps.token.outputs.token }}
message-id: 'preview-status'
@@ -32,7 +32,7 @@ jobs:
pull-requests: write
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -48,14 +48,14 @@ jobs:
name: 'preview'
})
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
- uses: mshick/add-pr-comment@ffd016c7e151d97d69d21a843022fd4cd5b96fe5 # v3.9.0
if: ${{ github.event.pull_request.head.repo.fork }}
with:
github-token: ${{ steps.token.outputs.token }}
message-id: 'preview-status'
message: 'PRs from forks cannot have preview environments.'
- uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2.8.2
- uses: mshick/add-pr-comment@ffd016c7e151d97d69d21a843022fd4cd5b96fe5 # v3.9.0
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
github-token: ${{ steps.token.outputs.token }}

View File

@@ -19,7 +19,7 @@ jobs:
working-directory: ./open-api/typescript-sdk
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -30,7 +30,7 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0

View File

@@ -20,14 +20,14 @@ jobs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@eed0f8b8165ffcb951f2ba854b2dd031935e1d73 # pre-job-action-v2.0.2
uses: immich-app/devtools/actions/pre-job@f50e3b600b6ac1763ddb8f3dfc69093512b967a1 # pre-job-action-v2.0.3
with:
github-token: ${{ steps.token.outputs.token }}
filters: |
@@ -49,7 +49,7 @@ jobs:
working-directory: ./mobile
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -61,7 +61,7 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@0ca7a949e71ae44c8e688a51c5e7e93b2c87e295 # v2.22.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml

View File

@@ -17,14 +17,14 @@ jobs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@eed0f8b8165ffcb951f2ba854b2dd031935e1d73 # pre-job-action-v2.0.2
uses: immich-app/devtools/actions/pre-job@f50e3b600b6ac1763ddb8f3dfc69093512b967a1 # pre-job-action-v2.0.3
with:
github-token: ${{ steps.token.outputs.token }}
filters: |
@@ -63,7 +63,7 @@ jobs:
working-directory: ./server
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -75,7 +75,7 @@ jobs:
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -108,7 +108,7 @@ jobs:
working-directory: ./cli
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -119,7 +119,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -155,7 +155,7 @@ jobs:
working-directory: ./cli
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -166,7 +166,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -197,7 +197,7 @@ jobs:
working-directory: ./web
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -208,7 +208,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -241,7 +241,7 @@ jobs:
working-directory: ./web
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -252,7 +252,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -279,7 +279,7 @@ jobs:
contents: read
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -290,7 +290,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -327,7 +327,7 @@ jobs:
working-directory: ./e2e
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -338,7 +338,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -373,7 +373,7 @@ jobs:
working-directory: ./server
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -385,7 +385,7 @@ jobs:
submodules: 'recursive'
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -412,7 +412,7 @@ jobs:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -424,7 +424,7 @@ jobs:
submodules: 'recursive'
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -464,7 +464,7 @@ jobs:
run: docker compose logs --no-color > docker-compose-logs.txt
working-directory: ./e2e
- name: Archive Docker logs
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: always()
with:
name: e2e-server-docker-logs-${{ matrix.runner }}
@@ -484,7 +484,7 @@ jobs:
runner: [ubuntu-latest, ubuntu-24.04-arm]
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -496,7 +496,7 @@ jobs:
submodules: 'recursive'
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -522,7 +522,7 @@ jobs:
run: pnpm test:web
if: ${{ !cancelled() }}
- name: Archive e2e test (web) results
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: success() || failure()
with:
name: e2e-web-test-results-${{ matrix.runner }}
@@ -533,7 +533,7 @@ jobs:
run: pnpm test:web:ui
if: ${{ !cancelled() }}
- name: Archive ui test (web) results
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: success() || failure()
with:
name: e2e-ui-test-results-${{ matrix.runner }}
@@ -544,7 +544,7 @@ jobs:
run: pnpm test:web:maintenance
if: ${{ !cancelled() }}
- name: Archive maintenance tests (web) results
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: success() || failure()
with:
name: e2e-maintenance-isolated-test-results-${{ matrix.runner }}
@@ -554,7 +554,7 @@ jobs:
run: docker compose logs --no-color > docker-compose-logs.txt
working-directory: ./e2e
- name: Archive Docker logs
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: always()
with:
name: e2e-web-docker-logs-${{ matrix.runner }}
@@ -578,7 +578,7 @@ jobs:
contents: read
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -588,7 +588,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2.21.0
uses: subosito/flutter-action@0ca7a949e71ae44c8e688a51c5e7e93b2c87e295 # v2.22.0
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
@@ -610,7 +610,7 @@ jobs:
working-directory: ./machine-learning
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -620,7 +620,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Install uv
uses: astral-sh/setup-uv@6ee6290f1cbc4156c0bdd66691b2c144ef8df19a # v7.4.0
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0
with:
python-version: 3.11
- name: Install dependencies
@@ -650,7 +650,7 @@ jobs:
working-directory: ./.github
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -661,7 +661,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -680,7 +680,7 @@ jobs:
contents: read
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -701,7 +701,7 @@ jobs:
contents: read
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -712,7 +712,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
@@ -763,7 +763,7 @@ jobs:
working-directory: ./server
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
@@ -774,7 +774,7 @@ jobs:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@b906affcce14559ad1aafd4ab0e942779e9f58b1 # v4.3.0
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0
- name: Setup Node
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:

View File

@@ -24,14 +24,14 @@ jobs:
should_run: ${{ steps.check.outputs.should_run }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Check what should run
id: check
uses: immich-app/devtools/actions/pre-job@eed0f8b8165ffcb951f2ba854b2dd031935e1d73 # pre-job-action-v2.0.2
uses: immich-app/devtools/actions/pre-job@f50e3b600b6ac1763ddb8f3dfc69093512b967a1 # pre-job-action-v2.0.3
with:
github-token: ${{ steps.token.outputs.token }}
filters: |
@@ -47,7 +47,7 @@ jobs:
if: ${{ fromJSON(needs.pre-job.outputs.should_run).i18n == true }}
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@05e16407c0a5492138bb38139c9d9bf067b40886 # create-workflow-token-action-v1.0.1
uses: immich-app/devtools/actions/create-workflow-token@57ff6ebfd507b045514442683ff06ff1b2f6efbd # create-workflow-token-action-v1.0.2
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.6.1",
"version": "2.6.2",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -35,8 +35,7 @@
"prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3",
"typescript-eslint": "^8.28.0",
"vite": "^7.0.0",
"vite-tsconfig-paths": "^6.0.0",
"vite": "^8.0.0",
"vitest": "^4.0.0",
"vitest-fetch-mock": "^0.4.0",
"yaml": "^2.3.1"

View File

@@ -1,10 +1,12 @@
import { defineConfig, UserConfig } from 'vite';
import tsconfigPaths from 'vite-tsconfig-paths';
export default defineConfig({
resolve: { alias: { src: '/src' } },
resolve: {
alias: { src: '/src' },
tsconfigPaths: true,
},
build: {
rollupOptions: {
rolldownOptions: {
input: 'src/index.ts',
output: {
dir: 'dist',
@@ -16,7 +18,6 @@ export default defineConfig({
// bundle everything except for Node built-ins
noExternal: /^(?!node:).*$/,
},
plugins: [tsconfigPaths()],
test: {
name: 'cli:unit',
globals: true,

View File

@@ -8,7 +8,7 @@ Hardware and software requirements for Immich:
## Hardware
- **OS**: Recommended Linux or \*nix operating system (Ubuntu, Debian, etc).
- **OS**: Recommended Linux or \*nix 64-bit operating system (Ubuntu, Debian, etc).
- Non-Linux OSes tend to provide a poor Docker experience and are strongly discouraged.
Our ability to assist with setup or troubleshooting on non-Linux OSes will be severely reduced.
If you still want to try to use a non-Linux OS, you can set it up as follows:
@@ -19,6 +19,10 @@ Hardware and software requirements for Immich:
If you have issues, we recommend that you switch to a supported VM deployment.
- **RAM**: Minimum 6GB, recommended 8GB.
- **CPU**: Minimum 2 cores, recommended 4 cores.
- Immich runs on the `amd64` and `arm64` platforms.
Since `v2.6`, the machine learning container on `amd64` requires the `>= x86-64-v2` [microarchitecture level](https://en.wikipedia.org/wiki/X86-64#Microarchitecture_levels).
Most CPUs released since ~2012 support this microarchitecture.
If you are using a virtual machine, ensure you have selected a [supported microarchitecture](https://pve.proxmox.com/pve-docs/chapter-qm.html#_qemu_cpu_types).
- **Storage**: Recommended Unix-compatible filesystem (EXT4, ZFS, APFS, etc.) with support for user/group ownership and permissions.
- The generation of thumbnails and transcoded video can increase the size of the photo library by 10-20% on average.

View File

@@ -1,7 +1,7 @@
[
{
"label": "v2.6.1",
"url": "https://docs.v2.6.1.archive.immich.app"
"label": "v2.6.2",
"url": "https://docs.v2.6.2.archive.immich.app"
},
{
"label": "v2.5.6",

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "2.6.1",
"version": "2.6.2",
"description": "",
"main": "index.js",
"type": "module",

View File

@@ -524,14 +524,19 @@ describe('/albums', () => {
expect(body).toEqual(errorDto.badRequest('Not found or no album.update access'));
});
it('should not be able to update as an editor', async () => {
it('should be able to update as an editor', async () => {
const { status, body } = await request(app)
.patch(`/albums/${user1Albums[0].id}`)
.set('Authorization', `Bearer ${user2.accessToken}`)
.send({ albumName: 'New album name' });
expect(status).toBe(400);
expect(body).toEqual(errorDto.badRequest('Not found or no album.update access'));
expect(status).toBe(200);
expect(body).toEqual(
expect.objectContaining({
id: user1Albums[0].id,
albumName: 'New album name',
}),
);
});
});

View File

@@ -0,0 +1,51 @@
import { AssetMediaResponseDto, LoginResponseDto, updateAssets } from '@immich/sdk';
import { expect, test } from '@playwright/test';
import crypto from 'node:crypto';
import { asBearerAuth, utils } from 'src/utils';
test.describe('Duplicates Utility', () => {
let admin: LoginResponseDto;
let firstAsset: AssetMediaResponseDto;
let secondAsset: AssetMediaResponseDto;
test.beforeAll(async () => {
utils.initSdk();
await utils.resetDatabase();
admin = await utils.adminSetup();
});
test.beforeEach(async ({ context }) => {
[firstAsset, secondAsset] = await Promise.all([
utils.createAsset(admin.accessToken, { deviceAssetId: 'duplicate-a' }),
utils.createAsset(admin.accessToken, { deviceAssetId: 'duplicate-b' }),
]);
await updateAssets(
{
assetBulkUpdateDto: {
ids: [firstAsset.id, secondAsset.id],
duplicateId: crypto.randomUUID(),
},
},
{ headers: asBearerAuth(admin.accessToken) },
);
await utils.setAuthCookies(context, admin.accessToken);
});
test('navigates with arrow keys between duplicate preview assets', async ({ page }) => {
await page.goto('/utilities/duplicates');
await page.getByRole('button', { name: 'View' }).first().click();
await page.waitForSelector('#immich-asset-viewer');
const getViewedAssetId = () => new URL(page.url()).pathname.split('/').at(-1) ?? '';
const initialAssetId = getViewedAssetId();
expect([firstAsset.id, secondAsset.id]).toContain(initialAssetId);
await page.keyboard.press('ArrowRight');
await expect.poll(getViewedAssetId).not.toBe(initialAssetId);
await page.keyboard.press('ArrowLeft');
await expect.poll(getViewedAssetId).toBe(initialAssetId);
});
});

View File

@@ -20,7 +20,7 @@ export {
toColumnarFormat,
} from './timeline/rest-response';
export type { Changes, FaceData } from './timeline/rest-response';
export type { Changes } from './timeline/rest-response';
export { randomImage, randomImageFromString, randomPreview, randomThumbnail } from './timeline/images';

View File

@@ -7,10 +7,8 @@ import {
AssetVisibility,
UserAvatarColor,
type AlbumResponseDto,
type AssetFaceWithoutPersonResponseDto,
type AssetResponseDto,
type ExifResponseDto,
type PersonWithFacesResponseDto,
type TimeBucketAssetResponseDto,
type TimeBucketsResponseDto,
type UserResponseDto,
@@ -286,16 +284,7 @@ const createDefaultOwner = (ownerId: string) => {
* Convert a TimelineAssetConfig to a full AssetResponseDto
* This matches the response from GET /api/assets/:id
*/
export type FaceData = {
people: PersonWithFacesResponseDto[];
unassignedFaces: AssetFaceWithoutPersonResponseDto[];
};
export function toAssetResponseDto(
asset: MockTimelineAsset,
owner?: UserResponseDto,
faceData?: FaceData,
): AssetResponseDto {
export function toAssetResponseDto(asset: MockTimelineAsset, owner?: UserResponseDto): AssetResponseDto {
const now = new Date().toISOString();
// Default owner if not provided
@@ -349,8 +338,8 @@ export function toAssetResponseDto(
exifInfo,
livePhotoVideoId: asset.livePhotoVideoId,
tags: [],
people: faceData?.people ?? [],
unassignedFaces: faceData?.unassignedFaces ?? [],
people: [],
unassignedFaces: [],
stack: asset.stack,
isOffline: false,
hasMetadata: true,

View File

@@ -1,6 +1,5 @@
import type { AssetFaceResponseDto, AssetResponseDto, PersonWithFacesResponseDto, SourceType } from '@immich/sdk';
import { BrowserContext } from '@playwright/test';
import { type FaceData, randomThumbnail } from 'src/ui/generators/timeline';
import { randomThumbnail } from 'src/ui/generators/timeline';
// Minimal valid H.264 MP4 (8x8px, 1 frame) that browsers can decode to get videoWidth/videoHeight
const MINIMAL_MP4_BASE64 =
@@ -126,84 +125,3 @@ export const setupFaceEditorMockApiRoutes = async (
});
});
};
export type MockFaceSpec = {
personId: string;
personName: string;
faceId: string;
boundingBoxX1: number;
boundingBoxY1: number;
boundingBoxX2: number;
boundingBoxY2: number;
};
const toPersonResponseDto = (spec: MockFaceSpec) => ({
id: spec.personId,
name: spec.personName,
birthDate: null,
isHidden: false,
thumbnailPath: `/upload/thumbs/${spec.personId}.jpeg`,
updatedAt: '2025-01-01T00:00:00.000Z',
});
const toBoundingBox = (spec: MockFaceSpec, imageWidth: number, imageHeight: number) => ({
id: spec.faceId,
imageWidth,
imageHeight,
boundingBoxX1: spec.boundingBoxX1,
boundingBoxY1: spec.boundingBoxY1,
boundingBoxX2: spec.boundingBoxX2,
boundingBoxY2: spec.boundingBoxY2,
});
export const createMockFaceData = (specs: MockFaceSpec[], imageWidth: number, imageHeight: number): FaceData => {
const people: PersonWithFacesResponseDto[] = specs.map((spec) => ({
...toPersonResponseDto(spec),
faces: [toBoundingBox(spec, imageWidth, imageHeight)],
}));
return { people, unassignedFaces: [] };
};
export const createMockAssetFaces = (
specs: MockFaceSpec[],
imageWidth: number,
imageHeight: number,
): AssetFaceResponseDto[] => {
return specs.map((spec) => ({
...toBoundingBox(spec, imageWidth, imageHeight),
person: toPersonResponseDto(spec),
sourceType: 'machine-learning' as SourceType,
}));
};
export const setupGetFacesMockApiRoute = async (context: BrowserContext, faces: AssetFaceResponseDto[]) => {
await context.route('**/api/faces?*', async (route, request) => {
if (request.method() !== 'GET') {
return route.fallback();
}
return route.fulfill({
status: 200,
contentType: 'application/json',
json: faces,
});
});
};
export const setupFaceOverlayMockApiRoutes = async (context: BrowserContext, assetDto: AssetResponseDto) => {
await context.route('**/api/assets/*', async (route, request) => {
if (request.method() !== 'GET') {
return route.fallback();
}
const url = new URL(request.url());
const assetId = url.pathname.split('/').at(-1);
if (assetId !== assetDto.id) {
return route.fallback();
}
return route.fulfill({
status: 200,
contentType: 'application/json',
json: assetDto,
});
});
};

View File

@@ -1,55 +0,0 @@
import { faker } from '@faker-js/faker';
import type { AssetOcrResponseDto } from '@immich/sdk';
import { BrowserContext } from '@playwright/test';
export type MockOcrBox = {
text: string;
x1: number;
y1: number;
x2: number;
y2: number;
x3: number;
y3: number;
x4: number;
y4: number;
};
export const createMockOcrData = (assetId: string, boxes: MockOcrBox[]): AssetOcrResponseDto[] => {
return boxes.map((box) => ({
id: faker.string.uuid(),
assetId,
x1: box.x1,
y1: box.y1,
x2: box.x2,
y2: box.y2,
x3: box.x3,
y3: box.y3,
x4: box.x4,
y4: box.y4,
boxScore: 0.95,
textScore: 0.9,
text: box.text,
}));
};
export const setupOcrMockApiRoutes = async (
context: BrowserContext,
ocrDataByAssetId: Map<string, AssetOcrResponseDto[]>,
) => {
await context.route('**/assets/*/ocr', async (route, request) => {
if (request.method() !== 'GET') {
return route.fallback();
}
const url = new URL(request.url());
const segments = url.pathname.split('/');
const assetIdIndex = segments.indexOf('assets') + 1;
const assetId = segments[assetIdIndex];
const ocrData = ocrDataByAssetId.get(assetId) ?? [];
return route.fulfill({
status: 200,
contentType: 'application/json',
json: ocrData,
});
});
};

View File

@@ -10,21 +10,16 @@ import { assetViewerUtils } from '../timeline/utils';
import { setupAssetViewerFixture } from './utils';
const waitForSelectorTransition = async (page: Page) => {
await expect(page.locator('#face-editor-data')).toHaveAttribute('data-face-width', /^[1-9]/, { timeout: 10_000 });
await page.locator('#face-selector').evaluate(
(el) =>
new Promise<void>((resolve) => {
requestAnimationFrame(() =>
requestAnimationFrame(() => {
const animations = el.getAnimations();
if (animations.length === 0) {
resolve();
return;
}
void Promise.all(animations.map((a) => a.finished)).then(() => resolve());
}),
);
}),
await page.waitForFunction(
() => {
const selector = document.querySelector('#face-selector') as HTMLElement | null;
if (!selector) {
return false;
}
return selector.getAnimations({ subtree: false }).every((animation) => animation.playState === 'finished');
},
undefined,
{ timeout: 1000, polling: 50 },
);
};
@@ -100,7 +95,7 @@ test.describe('face-editor', () => {
await page.mouse.down();
await page.mouse.move(centerX + deltaX, centerY + deltaY, { steps: 5 });
await page.mouse.up();
await waitForSelectorTransition(page);
await page.waitForTimeout(300);
};
test('Face editor opens with person list', async ({ page }) => {
@@ -154,7 +149,7 @@ test.describe('face-editor', () => {
await expect(page.getByRole('dialog')).toBeVisible();
});
test('Confirming tag calls createFace API with valid coordinates and closes editor', async ({ page }) => {
test('Confirming tag calls createFace API and closes editor', async ({ page }) => {
const asset = selectRandom(fixture.assets, rng);
await openFaceEditor(page, asset);
@@ -168,15 +163,8 @@ test.describe('face-editor', () => {
await expect(page.locator('#face-editor')).toBeHidden();
expect(faceCreateCapture.requests).toHaveLength(1);
const request = faceCreateCapture.requests[0];
expect(request.assetId).toBe(asset.id);
expect(request.personId).toBe(personToTag.id);
expect(request.x).toBeGreaterThanOrEqual(0);
expect(request.y).toBeGreaterThanOrEqual(0);
expect(request.width).toBeGreaterThan(0);
expect(request.height).toBeGreaterThan(0);
expect(request.x + request.width).toBeLessThanOrEqual(request.imageWidth);
expect(request.y + request.height).toBeLessThanOrEqual(request.imageHeight);
expect(faceCreateCapture.requests[0].assetId).toBe(asset.id);
expect(faceCreateCapture.requests[0].personId).toBe(personToTag.id);
});
test('Cancel button closes face editor', async ({ page }) => {
@@ -294,39 +282,4 @@ test.describe('face-editor', () => {
expect(afterDrag.left).toBeGreaterThan(beforeDrag.left + 50);
expect(afterDrag.top).toBeGreaterThan(beforeDrag.top + 20);
});
test('Cancel on confirmation dialog keeps face editor open', async ({ page }) => {
const asset = selectRandom(fixture.assets, rng);
await openFaceEditor(page, asset);
const personToTag = mockPeople[0];
await page.locator('#face-selector').getByText(personToTag.name).click();
await expect(page.getByRole('dialog')).toBeVisible();
await page
.getByRole('dialog')
.getByRole('button', { name: /cancel/i })
.click();
await expect(page.getByRole('dialog')).toBeHidden();
await expect(page.locator('#face-selector')).toBeVisible();
await expect(page.locator('#face-editor')).toBeVisible();
expect(faceCreateCapture.requests).toHaveLength(0);
});
test('Clicking on face rect center does not reposition it', async ({ page }) => {
const asset = selectRandom(fixture.assets, rng);
await openFaceEditor(page, asset);
const beforeClick = await getFaceBoxRect(page);
const centerX = beforeClick.left + beforeClick.width / 2;
const centerY = beforeClick.top + beforeClick.height / 2;
await page.mouse.click(centerX, centerY);
await waitForSelectorTransition(page);
const afterClick = await getFaceBoxRect(page);
expect(Math.abs(afterClick.left - beforeClick.left)).toBeLessThan(3);
expect(Math.abs(afterClick.top - beforeClick.top)).toBeLessThan(3);
});
});

View File

@@ -1,340 +0,0 @@
import { expect, test } from '@playwright/test';
import { toAssetResponseDto } from 'src/ui/generators/timeline';
import {
createMockAssetFaces,
createMockFaceData,
createMockPeople,
type MockFaceSpec,
setupFaceEditorMockApiRoutes,
setupFaceOverlayMockApiRoutes,
setupGetFacesMockApiRoute,
} from 'src/ui/mock-network/face-editor-network';
import { assetViewerUtils } from '../timeline/utils';
import { ensureDetailPanelVisible, setupAssetViewerFixture } from './utils';
test.describe.configure({ mode: 'parallel' });
const FACE_SPECS: MockFaceSpec[] = [
{
personId: 'person-alice',
personName: 'Alice Johnson',
faceId: 'face-alice',
boundingBoxX1: 1000,
boundingBoxY1: 500,
boundingBoxX2: 1500,
boundingBoxY2: 1200,
},
{
personId: 'person-bob',
personName: 'Bob Smith',
faceId: 'face-bob',
boundingBoxX1: 2000,
boundingBoxY1: 800,
boundingBoxX2: 2400,
boundingBoxY2: 1600,
},
];
const setupFaceMocks = async (
context: import('@playwright/test').BrowserContext,
fixture: ReturnType<typeof setupAssetViewerFixture>,
) => {
const mockPeople = createMockPeople(4);
const faceData = createMockFaceData(
FACE_SPECS,
fixture.primaryAssetDto.width ?? 3000,
fixture.primaryAssetDto.height ?? 4000,
);
const assetDtoWithFaces = toAssetResponseDto(fixture.primaryAsset, undefined, faceData);
await setupFaceOverlayMockApiRoutes(context, assetDtoWithFaces);
await setupFaceEditorMockApiRoutes(context, mockPeople, { requests: [] });
};
test.describe('face overlay bounding boxes', () => {
const fixture = setupAssetViewerFixture(901);
test.beforeEach(async ({ context }) => {
await setupFaceMocks(context, fixture);
});
test('face overlay divs render with correct aria labels', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const aliceOverlay = page.getByLabel('Person: Alice Johnson');
const bobOverlay = page.getByLabel('Person: Bob Smith');
await expect(aliceOverlay).toBeVisible();
await expect(bobOverlay).toBeVisible();
});
test('face overlay shows border on hover', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const aliceOverlay = page.getByLabel('Person: Alice Johnson');
await expect(aliceOverlay).toBeVisible();
const activeBorder = page.locator('[data-viewer-content] .border-solid.border-white.border-3');
await expect(activeBorder).toHaveCount(0);
await aliceOverlay.hover();
await expect(activeBorder).toHaveCount(1);
});
test('face name tooltip appears on hover', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const aliceOverlay = page.getByLabel('Person: Alice Johnson');
await expect(aliceOverlay).toBeVisible();
await aliceOverlay.hover();
const nameTooltip = page.locator('[data-viewer-content]').getByText('Alice Johnson');
await expect(nameTooltip).toBeVisible();
});
test('face overlays hidden in face edit mode', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const aliceOverlay = page.getByLabel('Person: Alice Johnson');
await expect(aliceOverlay).toBeVisible();
await ensureDetailPanelVisible(page);
await page.getByLabel('Tag people').click();
await page.locator('#face-selector').waitFor({ state: 'visible' });
await expect(aliceOverlay).toBeHidden();
});
test('face overlay hover works after exiting face edit mode', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const aliceOverlay = page.getByLabel('Person: Alice Johnson');
await expect(aliceOverlay).toBeVisible();
await ensureDetailPanelVisible(page);
await page.getByLabel('Tag people').click();
await page.locator('#face-selector').waitFor({ state: 'visible' });
await expect(aliceOverlay).toBeHidden();
await page.getByRole('button', { name: /cancel/i }).click();
await expect(page.locator('#face-selector')).toBeHidden();
await expect(aliceOverlay).toBeVisible();
const activeBorder = page.locator('[data-viewer-content] .border-solid.border-white.border-3');
await expect(activeBorder).toHaveCount(0);
await aliceOverlay.hover();
await expect(activeBorder).toHaveCount(1);
});
});
test.describe('zoom and face editor interaction', () => {
const fixture = setupAssetViewerFixture(902);
test.beforeEach(async ({ context }) => {
await setupFaceMocks(context, fixture);
});
test('zoom is preserved when entering face edit mode', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const { width, height } = page.viewportSize()!;
await page.mouse.move(width / 2, height / 2);
await page.mouse.wheel(0, -1);
const imgLocator = page.locator('[data-viewer-content] img[data-testid="preview"]');
await expect(async () => {
const transform = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
expect(transform).not.toBe('none');
expect(transform).not.toBe('');
}).toPass({ timeout: 2000 });
await ensureDetailPanelVisible(page);
await page.getByLabel('Tag people').click();
await page.locator('#face-selector').waitFor({ state: 'visible' });
await expect(page.locator('#face-editor')).toBeVisible();
const afterTransform = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
expect(afterTransform).not.toBe('none');
});
test('modifier+drag pans zoomed image without repositioning face rect', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const { width, height } = page.viewportSize()!;
await page.mouse.move(width / 2, height / 2);
for (let i = 0; i < 10; i++) {
await page.mouse.wheel(0, -3);
}
const imgLocator = page.locator('[data-viewer-content] img[data-testid="preview"]');
await expect(async () => {
const transform = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
expect(transform).not.toBe('none');
}).toPass({ timeout: 2000 });
await ensureDetailPanelVisible(page);
await page.getByLabel('Tag people').click();
await page.locator('#face-selector').waitFor({ state: 'visible' });
const dataEl = page.locator('#face-editor-data');
await expect(dataEl).toHaveAttribute('data-face-width', /^[1-9]/);
const beforeLeft = Number(await dataEl.getAttribute('data-face-left'));
const beforeTop = Number(await dataEl.getAttribute('data-face-top'));
const transformBefore = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
const panModifier = await page.evaluate(() =>
/Mac|iPhone|iPad|iPod/.test(navigator.userAgent) ? 'Meta' : 'Control',
);
await page.keyboard.down(panModifier);
// Verify face editor becomes transparent to pointer events
await expect(async () => {
const pe = await dataEl.evaluate((el) => getComputedStyle(el).pointerEvents);
expect(pe).toBe('none');
}).toPass({ timeout: 2000 });
await page.mouse.move(width / 2, height / 2);
await page.mouse.down();
await page.mouse.move(width / 2 + 100, height / 2 + 50, { steps: 5 });
await page.mouse.up();
await page.keyboard.up(panModifier);
const transformAfter = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
expect(transformAfter).not.toBe(transformBefore);
// Extract translate values from matrix(a, b, c, d, tx, ty)
const parseTranslate = (matrix: string) => {
const values =
matrix
.match(/matrix\((.+)\)/)?.[1]
.split(',')
.map(Number) ?? [];
return { tx: values[4], ty: values[5] };
};
const panBefore = parseTranslate(transformBefore);
const panAfter = parseTranslate(transformAfter);
const panDeltaX = panAfter.tx - panBefore.tx;
const panDeltaY = panAfter.ty - panBefore.ty;
// Face rect screen position should have moved by the same amount as the pan
// (it follows the image), NOT been repositioned by a click
const afterLeft = Number(await dataEl.getAttribute('data-face-left'));
const afterTop = Number(await dataEl.getAttribute('data-face-top'));
const faceDeltaX = afterLeft - beforeLeft;
const faceDeltaY = afterTop - beforeTop;
expect(Math.abs(faceDeltaX - panDeltaX)).toBeLessThan(3);
expect(Math.abs(faceDeltaY - panDeltaY)).toBeLessThan(3);
});
});
test.describe('face overlay via detail panel interaction', () => {
const fixture = setupAssetViewerFixture(903);
test.beforeEach(async ({ context }) => {
await setupFaceMocks(context, fixture);
});
test('hovering person in detail panel shows face overlay border', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await ensureDetailPanelVisible(page);
const personLink = page.locator('#detail-panel a').filter({ hasText: 'Alice Johnson' });
await expect(personLink).toBeVisible();
const activeBorder = page.locator('[data-viewer-content] .border-solid.border-white.border-3');
await expect(activeBorder).toHaveCount(0);
await personLink.hover();
await expect(activeBorder).toHaveCount(1);
});
test('touch pointer on person in detail panel shows face overlay border', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await ensureDetailPanelVisible(page);
const personLink = page.locator('#detail-panel a').filter({ hasText: 'Alice Johnson' });
await expect(personLink).toBeVisible();
const activeBorder = page.locator('[data-viewer-content] .border-solid.border-white.border-3');
await expect(activeBorder).toHaveCount(0);
// Simulate a touch-type pointerover (the fix changed from onmouseover to onpointerover,
// which fires for touch pointers unlike mouseover)
await personLink.dispatchEvent('pointerover', { pointerType: 'touch' });
await expect(activeBorder).toHaveCount(1);
});
test('hovering person in detail panel works after exiting face edit mode', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await ensureDetailPanelVisible(page);
await page.getByLabel('Tag people').click();
await page.locator('#face-selector').waitFor({ state: 'visible' });
await page.getByRole('button', { name: /cancel/i }).click();
await expect(page.locator('#face-selector')).toBeHidden();
const personLink = page.locator('#detail-panel a').filter({ hasText: 'Alice Johnson' });
await expect(personLink).toBeVisible();
const activeBorder = page.locator('[data-viewer-content] .border-solid.border-white.border-3');
await personLink.hover();
await expect(activeBorder).toHaveCount(1);
});
});
test.describe('face overlay via edit faces side panel', () => {
const fixture = setupAssetViewerFixture(904);
test.beforeEach(async ({ context }) => {
await setupFaceMocks(context, fixture);
const assetFaces = createMockAssetFaces(
FACE_SPECS,
fixture.primaryAssetDto.width ?? 3000,
fixture.primaryAssetDto.height ?? 4000,
);
await setupGetFacesMockApiRoute(context, assetFaces);
});
test('hovering person in edit faces panel shows face overlay border', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await ensureDetailPanelVisible(page);
await page.getByLabel('Edit people').click();
const faceThumbnail = page.getByTestId('face-thumbnail').first();
await expect(faceThumbnail).toBeVisible();
const activeBorder = page.locator('[data-viewer-content] .border-solid.border-white.border-3');
await expect(activeBorder).toHaveCount(0);
await faceThumbnail.hover();
await expect(activeBorder).toHaveCount(1);
});
});

View File

@@ -1,300 +0,0 @@
import type { AssetOcrResponseDto, AssetResponseDto } from '@immich/sdk';
import { expect, test } from '@playwright/test';
import { toAssetResponseDto } from 'src/ui/generators/timeline';
import {
createMockStack,
createMockStackAsset,
MockStack,
setupBrokenAssetMockApiRoutes,
} from 'src/ui/mock-network/broken-asset-network';
import { createMockOcrData, setupOcrMockApiRoutes } from 'src/ui/mock-network/ocr-network';
import { assetViewerUtils } from '../timeline/utils';
import { setupAssetViewerFixture } from './utils';
test.describe.configure({ mode: 'parallel' });
const PRIMARY_OCR_BOXES = [
{ text: 'Hello World', x1: 0.1, y1: 0.1, x2: 0.4, y2: 0.1, x3: 0.4, y3: 0.15, x4: 0.1, y4: 0.15 },
{ text: 'Immich Photo', x1: 0.2, y1: 0.3, x2: 0.6, y2: 0.3, x3: 0.6, y3: 0.36, x4: 0.2, y4: 0.36 },
];
const SECONDARY_OCR_BOXES = [
{ text: 'Second Asset Text', x1: 0.15, y1: 0.2, x2: 0.55, y2: 0.2, x3: 0.55, y3: 0.26, x4: 0.15, y4: 0.26 },
];
test.describe('OCR bounding boxes', () => {
const fixture = setupAssetViewerFixture(920);
test.beforeEach(async ({ context }) => {
const primaryAssetDto = toAssetResponseDto(fixture.primaryAsset);
const ocrDataByAssetId = new Map<string, AssetOcrResponseDto[]>([
[primaryAssetDto.id, createMockOcrData(primaryAssetDto.id, PRIMARY_OCR_BOXES)],
]);
await setupOcrMockApiRoutes(context, ocrDataByAssetId);
});
test('OCR bounding boxes appear when clicking OCR button', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const ocrButton = page.getByLabel('Text recognition');
await expect(ocrButton).toBeVisible();
await ocrButton.click();
const ocrBoxes = page.locator('[data-viewer-content] [data-testid="ocr-box"]');
await expect(ocrBoxes).toHaveCount(2);
await expect(ocrBoxes.nth(0)).toContainText('Hello World');
await expect(ocrBoxes.nth(1)).toContainText('Immich Photo');
});
test('OCR bounding boxes toggle off on second click', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const ocrButton = page.getByLabel('Text recognition');
await ocrButton.click();
await expect(page.locator('[data-viewer-content] [data-testid="ocr-box"]').first()).toBeVisible();
await ocrButton.click();
await expect(page.locator('[data-viewer-content] [data-testid="ocr-box"]')).toHaveCount(0);
});
});
test.describe('OCR with stacked assets', () => {
const fixture = setupAssetViewerFixture(921);
let mockStack: MockStack;
let primaryAssetDto: AssetResponseDto;
let secondAssetDto: AssetResponseDto;
test.beforeAll(async () => {
primaryAssetDto = toAssetResponseDto(fixture.primaryAsset);
secondAssetDto = createMockStackAsset(fixture.adminUserId);
secondAssetDto.originalFileName = 'second-ocr-asset.jpg';
mockStack = createMockStack(primaryAssetDto, [secondAssetDto], new Set());
});
test.beforeEach(async ({ context }) => {
await setupBrokenAssetMockApiRoutes(context, mockStack);
const ocrDataByAssetId = new Map<string, AssetOcrResponseDto[]>([
[primaryAssetDto.id, createMockOcrData(primaryAssetDto.id, PRIMARY_OCR_BOXES)],
[secondAssetDto.id, createMockOcrData(secondAssetDto.id, SECONDARY_OCR_BOXES)],
]);
await setupOcrMockApiRoutes(context, ocrDataByAssetId);
});
test('different OCR boxes shown for different stacked assets', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const ocrButton = page.getByLabel('Text recognition');
await expect(ocrButton).toBeVisible();
await ocrButton.click();
const ocrBoxes = page.locator('[data-viewer-content] [data-testid="ocr-box"]');
await expect(ocrBoxes).toHaveCount(2);
await expect(ocrBoxes.nth(0)).toContainText('Hello World');
const stackThumbnails = page.locator('#stack-slideshow [data-asset]');
await expect(stackThumbnails).toHaveCount(2);
await stackThumbnails.nth(1).click();
// refreshOcr() clears showOverlay when switching assets, so re-enable it
await expect(ocrBoxes).toHaveCount(0);
await expect(ocrButton).toBeVisible();
await ocrButton.click();
await expect(ocrBoxes).toHaveCount(1);
await expect(ocrBoxes.first()).toContainText('Second Asset Text');
});
});
test.describe('OCR boxes and zoom', () => {
const fixture = setupAssetViewerFixture(922);
test.beforeEach(async ({ context }) => {
const primaryAssetDto = toAssetResponseDto(fixture.primaryAsset);
const ocrDataByAssetId = new Map<string, AssetOcrResponseDto[]>([
[primaryAssetDto.id, createMockOcrData(primaryAssetDto.id, PRIMARY_OCR_BOXES)],
]);
await setupOcrMockApiRoutes(context, ocrDataByAssetId);
});
test('OCR boxes scale with zoom', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
const ocrButton = page.getByLabel('Text recognition');
await expect(ocrButton).toBeVisible();
await ocrButton.click();
const ocrBox = page.locator('[data-viewer-content] [data-testid="ocr-box"]').first();
await expect(ocrBox).toBeVisible();
const initialBox = await ocrBox.boundingBox();
expect(initialBox).toBeTruthy();
const { width, height } = page.viewportSize()!;
await page.mouse.move(width / 2, height / 2);
await page.mouse.wheel(0, -3);
await expect(async () => {
const zoomedBox = await ocrBox.boundingBox();
expect(zoomedBox).toBeTruthy();
expect(zoomedBox!.width).toBeGreaterThan(initialBox!.width);
expect(zoomedBox!.height).toBeGreaterThan(initialBox!.height);
}).toPass({ timeout: 2000 });
});
});
test.describe('OCR text interaction', () => {
const fixture = setupAssetViewerFixture(923);
test.beforeEach(async ({ context }) => {
const primaryAssetDto = toAssetResponseDto(fixture.primaryAsset);
const ocrDataByAssetId = new Map<string, AssetOcrResponseDto[]>([
[primaryAssetDto.id, createMockOcrData(primaryAssetDto.id, PRIMARY_OCR_BOXES)],
]);
await setupOcrMockApiRoutes(context, ocrDataByAssetId);
});
test('OCR text box has data-overlay-interactive attribute', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await page.getByLabel('Text recognition').click();
const ocrBox = page.locator('[data-viewer-content] [data-testid="ocr-box"]').first();
await expect(ocrBox).toBeVisible();
await expect(ocrBox).toHaveAttribute('data-overlay-interactive');
});
test('OCR text box receives focus on click', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await page.getByLabel('Text recognition').click();
const ocrBox = page.locator('[data-viewer-content] [data-testid="ocr-box"]').first();
await expect(ocrBox).toBeVisible();
await ocrBox.click();
await expect(ocrBox).toBeFocused();
});
test('dragging on OCR text box does not trigger image pan', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await page.getByLabel('Text recognition').click();
const ocrBox = page.locator('[data-viewer-content] [data-testid="ocr-box"]').first();
await expect(ocrBox).toBeVisible();
const imgLocator = page.locator('[data-viewer-content] img[draggable="false"]');
const initialTransform = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
const box = await ocrBox.boundingBox();
expect(box).toBeTruthy();
const centerX = box!.x + box!.width / 2;
const centerY = box!.y + box!.height / 2;
await page.mouse.move(centerX, centerY);
await page.mouse.down();
await page.mouse.move(centerX + 50, centerY + 30, { steps: 5 });
await page.mouse.up();
const afterTransform = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
expect(afterTransform).toBe(initialTransform);
});
test('split touch gesture across zoom container does not trigger zoom', async ({ page }) => {
await page.goto(`/photos/${fixture.primaryAsset.id}`);
await assetViewerUtils.waitForViewerLoad(page, fixture.primaryAsset);
await page.getByLabel('Text recognition').click();
const ocrBox = page.locator('[data-viewer-content] [data-testid="ocr-box"]').first();
await expect(ocrBox).toBeVisible();
const imgLocator = page.locator('[data-viewer-content] img[draggable="false"]');
const initialTransform = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
const viewerContent = page.locator('[data-viewer-content]');
const viewerBox = await viewerContent.boundingBox();
expect(viewerBox).toBeTruthy();
// Dispatch a synthetic split gesture: one touch inside the viewer, one outside
await page.evaluate(
({ viewerCenterX, viewerCenterY, outsideY }) => {
const viewer = document.querySelector('[data-viewer-content]');
if (!viewer) {
return;
}
const createTouch = (id: number, x: number, y: number) => {
return new Touch({
identifier: id,
target: viewer,
clientX: x,
clientY: y,
});
};
const insideTouch = createTouch(0, viewerCenterX, viewerCenterY);
const outsideTouch = createTouch(1, viewerCenterX, outsideY);
const touchStartEvent = new TouchEvent('touchstart', {
touches: [insideTouch, outsideTouch],
targetTouches: [insideTouch],
changedTouches: [insideTouch, outsideTouch],
bubbles: true,
cancelable: true,
});
const touchMoveEvent = new TouchEvent('touchmove', {
touches: [createTouch(0, viewerCenterX, viewerCenterY - 30), createTouch(1, viewerCenterX, outsideY + 30)],
targetTouches: [createTouch(0, viewerCenterX, viewerCenterY - 30)],
changedTouches: [
createTouch(0, viewerCenterX, viewerCenterY - 30),
createTouch(1, viewerCenterX, outsideY + 30),
],
bubbles: true,
cancelable: true,
});
const touchEndEvent = new TouchEvent('touchend', {
touches: [],
targetTouches: [],
changedTouches: [insideTouch, outsideTouch],
bubbles: true,
cancelable: true,
});
viewer.dispatchEvent(touchStartEvent);
viewer.dispatchEvent(touchMoveEvent);
viewer.dispatchEvent(touchEndEvent);
},
{
viewerCenterX: viewerBox!.x + viewerBox!.width / 2,
viewerCenterY: viewerBox!.y + viewerBox!.height / 2,
outsideY: 10, // near the top of the page, outside the viewer
},
);
const afterTransform = await imgLocator.evaluate((element) => {
return getComputedStyle(element.closest('[style*="transform"]') ?? element).transform;
});
expect(afterTransform).toBe(initialTransform);
});
});

View File

@@ -1275,7 +1275,6 @@
"hide_schema": "Hide schema",
"hide_text_recognition": "Hide text recognition",
"hide_unnamed_people": "Hide unnamed people",
"hold_key_to_pan": "Hold {key} to pan",
"home_page_add_to_album_conflicts": "Added {added} assets to album {album}. {failed} assets are already in the album.",
"home_page_add_to_album_err_local": "Can not add local assets to albums yet, skipping",
"home_page_add_to_album_success": "Added {added} assets to album {album}.",

View File

@@ -1,6 +1,6 @@
{
"name": "immich-i18n",
"version": "2.6.1",
"version": "2.6.2",
"private": true,
"scripts": {
"format": "prettier --cache --check .",

View File

@@ -1,6 +1,6 @@
[project]
name = "immich-ml"
version = "2.6.1"
version = "2.6.2"
description = ""
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
requires-python = ">=3.11,<4.0"

View File

@@ -898,7 +898,7 @@ wheels = [
[[package]]
name = "immich-ml"
version = "2.6.1"
version = "2.6.2"
source = { editable = "." }
dependencies = [
{ name = "aiocache" },

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 3039,
"android.injected.version.name" => "2.6.1",
"android.injected.version.code" => 3040,
"android.injected.version.name" => "2.6.2",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -150,7 +150,6 @@ class URLSessionManager: NSObject {
config.httpCookieStorage = cookieStorage
config.httpMaximumConnectionsPerHost = 64
config.timeoutIntervalForRequest = 60
config.timeoutIntervalForResource = 300
var headers = UserDefaults.group.dictionary(forKey: HEADERS_KEY) as? [String: String] ?? [:]
headers["User-Agent"] = headers["User-Agent"] ?? userAgent

View File

@@ -80,7 +80,7 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>2.6.1</string>
<string>2.6.2</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleURLTypes</key>

View File

@@ -79,6 +79,7 @@ class _DriftPeopleCollectionPageState extends ConsumerState<DriftPeopleCollectio
final person = people[index];
return Column(
key: ValueKey(person.id),
children: [
GestureDetector(
onTap: () {
@@ -88,6 +89,7 @@ class _DriftPeopleCollectionPageState extends ConsumerState<DriftPeopleCollectio
shape: const CircleBorder(side: BorderSide.none),
elevation: 3,
child: CircleAvatar(
key: ValueKey('avatar-${person.id}'),
maxRadius: isTablet ? 100 / 2 : 96 / 2,
backgroundImage: RemoteImageProvider(url: getFaceThumbnailUrl(person.id)),
),

View File

@@ -69,6 +69,7 @@ class DriftSearchPage extends HookConsumerWidget {
);
final previousFilter = useState<SearchFilter?>(null);
final hasRequestedSearch = useState<bool>(false);
final dateInputFilter = useState<DateFilterInputModel?>(null);
final peopleCurrentFilterWidget = useState<Widget?>(null);
@@ -91,9 +92,11 @@ class DriftSearchPage extends HookConsumerWidget {
if (filter.isEmpty) {
previousFilter.value = null;
hasRequestedSearch.value = false;
return;
}
hasRequestedSearch.value = true;
unawaited(ref.read(paginatedSearchProvider.notifier).search(filter));
previousFilter.value = filter;
}
@@ -107,6 +110,8 @@ class DriftSearchPage extends HookConsumerWidget {
searchPreFilter() {
if (preFilter != null) {
Future.delayed(Duration.zero, () {
filter.value = preFilter;
textSearchController.clear();
searchFilter(preFilter);
if (preFilter.location.city != null) {
@@ -719,7 +724,7 @@ class DriftSearchPage extends HookConsumerWidget {
),
),
),
if (filter.value.isEmpty)
if (!hasRequestedSearch.value)
const _SearchSuggestions()
else
_SearchResultGrid(onScrollEnd: loadMoreSearchResults),

View File

@@ -24,20 +24,22 @@ class SimilarPhotosActionButton extends ConsumerWidget {
}
ref.invalidate(assetViewerProvider);
ref
.read(searchPreFilterProvider.notifier)
.setFilter(
SearchFilter(
assetId: assetId,
people: {},
location: SearchLocationFilter(),
camera: SearchCameraFilter(),
date: SearchDateFilter(),
display: SearchDisplayFilters(isNotInAlbum: false, isArchive: false, isFavorite: false),
rating: SearchRatingFilter(),
mediaType: AssetType.image,
),
);
ref.invalidate(paginatedSearchProvider);
ref.read(searchPreFilterProvider.notifier)
..clear()
..setFilter(
SearchFilter(
assetId: assetId,
people: {},
location: SearchLocationFilter(),
camera: SearchCameraFilter(),
date: SearchDateFilter(),
display: SearchDisplayFilters(isNotInAlbum: false, isArchive: false, isFavorite: false),
rating: SearchRatingFilter(),
mediaType: AssetType.image,
),
);
unawaited(context.navigateTo(const DriftSearchRoute()));
}

View File

@@ -39,6 +39,16 @@ class _RatingBarState extends State<RatingBar> {
_currentRating = widget.initialRating;
}
@override
void didUpdateWidget(covariant RatingBar oldWidget) {
super.didUpdateWidget(oldWidget);
if (oldWidget.initialRating != widget.initialRating && _currentRating != widget.initialRating) {
setState(() {
_currentRating = widget.initialRating;
});
}
}
void _updateRating(Offset localPosition, bool isRTL, {bool isTap = false}) {
final totalWidth = widget.itemCount * widget.itemSize + (widget.itemCount - 1) * widget.starPadding;
double dx = localPosition.dx;

View File

@@ -67,6 +67,9 @@ class AuthService {
bool isValid = false;
try {
final urls = ApiService.getServerUrls();
urls.add(url);
await NetworkRepository.setHeaders(ApiService.getRequestHeaders(), urls);
final uri = Uri.parse('$url/users/me');
final response = await NetworkRepository.client.get(uri);
if (response.statusCode == 200) {

View File

@@ -143,8 +143,7 @@ enum ActionButtonType {
!context.isInLockedView && //
context.currentAlbum != null,
ActionButtonType.setAlbumCover =>
context.isOwner && //
!context.isInLockedView && //
!context.isInLockedView && //
context.currentAlbum != null && //
context.selectedCount == 1,
ActionButtonType.unstack =>

View File

@@ -16,9 +16,15 @@ class SearchDropdown<T> extends StatelessWidget {
final Widget? label;
final Widget? leadingIcon;
static const WidgetStatePropertyAll<EdgeInsetsGeometry> _optionPadding = WidgetStatePropertyAll<EdgeInsetsGeometry>(
EdgeInsetsDirectional.fromSTEB(16, 0, 16, 0),
);
@override
Widget build(BuildContext context) {
final menuStyle = const MenuStyle(
final mediaQuery = MediaQuery.of(context);
final maxMenuHeight = mediaQuery.size.height * 0.5 - mediaQuery.viewPadding.bottom;
const menuStyle = MenuStyle(
shape: WidgetStatePropertyAll<OutlinedBorder>(
RoundedRectangleBorder(borderRadius: BorderRadius.all(Radius.circular(15))),
),
@@ -26,11 +32,26 @@ class SearchDropdown<T> extends StatelessWidget {
return LayoutBuilder(
builder: (context, constraints) {
final styledEntries = dropdownMenuEntries
.map(
(entry) => DropdownMenuEntry<T>(
value: entry.value,
label: entry.label,
labelWidget: entry.labelWidget,
enabled: entry.enabled,
leadingIcon: entry.leadingIcon,
trailingIcon: entry.trailingIcon,
style: (entry.style ?? const ButtonStyle()).copyWith(padding: _optionPadding),
),
)
.toList(growable: false);
return DropdownMenu(
controller: controller,
leadingIcon: leadingIcon,
width: constraints.maxWidth,
dropdownMenuEntries: dropdownMenuEntries,
menuHeight: maxMenuHeight,
dropdownMenuEntries: styledEntries,
label: label,
menuStyle: menuStyle,
trailingIcon: const Icon(Icons.arrow_drop_down_rounded),

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 2.6.1
- API version: 2.6.2
- Generator version: 7.8.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: 'none'
version: 2.6.1+3039
version: 2.6.2+3040
environment:
sdk: '>=3.8.0 <4.0.0'

View File

@@ -727,7 +727,7 @@ void main() {
expect(ActionButtonType.setAlbumCover.shouldShow(context), isTrue);
});
test('should not show when not owner', () {
test('should show when not owner', () {
final album = createRemoteAlbum();
final context = ActionButtonContext(
asset: mergedAsset,
@@ -742,7 +742,7 @@ void main() {
selectedCount: 1,
);
expect(ActionButtonType.setAlbumCover.shouldShow(context), isFalse);
expect(ActionButtonType.setAlbumCover.shouldShow(context), isTrue);
});
test('should not show when in locked view', () {

View File

@@ -15166,7 +15166,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "2.6.1",
"version": "2.6.2",
"contact": {}
},
"tags": [

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/sdk",
"version": "2.6.1",
"version": "2.6.2",
"description": "Auto-generated TypeScript SDK for the Immich API",
"type": "module",
"main": "./build/index.js",

View File

@@ -1,6 +1,6 @@
/**
* Immich
* 2.6.1
* 2.6.2
* DO NOT MODIFY - This file has been generated using oazapfts.
* See https://www.npmjs.com/package/oazapfts
*/

View File

@@ -1,6 +1,6 @@
{
"name": "immich-monorepo",
"version": "2.6.1",
"version": "2.6.2",
"description": "Monorepo for Immich",
"private": true,
"packageManager": "pnpm@10.30.3+sha512.c961d1e0a2d8e354ecaa5166b822516668b7f44cb5bd95122d590dd81922f606f5473b6d23ec4a5be05e7fcd18e8488d47d978bbe981872f1145d06e9a740017",

781
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -52,7 +52,7 @@ FROM builder AS plugins
ARG TARGETPLATFORM
COPY --from=ghcr.io/jdx/mise:2026.1.1@sha256:a55c391f7582f34c58bce1a85090cd526596402ba77fc32b06c49b8404ef9c14 /usr/local/bin/mise /usr/local/bin/mise
COPY --from=ghcr.io/jdx/mise:2026.3.12@sha256:0210678cbf58413806531a27adb2c7daf1c37238e56e8f7ea381d73521571775 /usr/local/bin/mise /usr/local/bin/mise
WORKDIR /usr/src/app
COPY ./plugins/mise.toml ./plugins/

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "2.6.1",
"version": "2.6.2",
"description": "",
"author": "",
"private": true,

View File

@@ -582,7 +582,6 @@ where
"asset_face"."updateId" < $1
and "asset_face"."updateId" > $2
and "asset"."ownerId" = $3
and "asset_face"."isVisible" = $4
order by
"asset_face"."updateId" asc

View File

@@ -487,7 +487,6 @@ class AssetFaceSync extends BaseSync {
])
.leftJoin('asset', 'asset.id', 'asset_face.assetId')
.where('asset.ownerId', '=', options.userId)
.where('asset_face.isVisible', '=', true)
.stream();
}
}

View File

@@ -0,0 +1,10 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
// Sync query for faces was incorrect on server <=2.6.2
await sql`DELETE FROM session_sync_checkpoint WHERE type in ('AssetFaceV1', 'AssetFaceV2')`.execute(db);
}
export async function down(): Promise<void> {
// Not implemented
}

View File

@@ -190,7 +190,13 @@ const checkOtherAccess = async (access: AccessRepository, request: OtherAccessRe
}
case Permission.AlbumUpdate: {
return await access.album.checkOwnerAccess(auth.user.id, ids);
const isOwner = await access.album.checkOwnerAccess(auth.user.id, ids);
const isShared = await access.album.checkSharedAlbumAccess(
auth.user.id,
setDifference(ids, isOwner),
AlbumUserRole.Editor,
);
return setUnion(isOwner, isShared);
}
case Permission.AlbumDelete: {
@@ -198,7 +204,13 @@ const checkOtherAccess = async (access: AccessRepository, request: OtherAccessRe
}
case Permission.AlbumShare: {
return await access.album.checkOwnerAccess(auth.user.id, ids);
const isOwner = await access.album.checkOwnerAccess(auth.user.id, ids);
const isShared = await access.album.checkSharedAlbumAccess(
auth.user.id,
setDifference(ids, isOwner),
AlbumUserRole.Editor,
);
return setUnion(isOwner, isShared);
}
case Permission.AlbumDownload: {

View File

@@ -1,4 +0,0 @@
{
"status": "failed",
"failedTests": []
}

View File

@@ -1,6 +1,6 @@
{
"name": "immich-web",
"version": "2.6.1",
"version": "2.6.2",
"license": "GNU Affero General Public License version 3",
"type": "module",
"scripts": {
@@ -72,10 +72,10 @@
"@koddsson/eslint-plugin-tscompat": "^0.2.0",
"@socket.io/component-emitter": "^3.1.0",
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/enhanced-img": "^0.10.0",
"@sveltejs/enhanced-img": "^0.10.4",
"@sveltejs/kit": "^2.27.1",
"@sveltejs/vite-plugin-svelte": "6.2.4",
"@tailwindcss/vite": "^4.1.7",
"@sveltejs/vite-plugin-svelte": "7.0.0",
"@tailwindcss/vite": "^4.2.2",
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/svelte": "^5.2.8",
"@testing-library/user-event": "^14.5.2",
@@ -103,10 +103,10 @@
"svelte": "5.53.13",
"svelte-check": "^4.1.5",
"svelte-eslint-parser": "^1.3.3",
"tailwindcss": "^4.1.7",
"tailwindcss": "^4.2.2",
"typescript": "^5.8.3",
"typescript-eslint": "^8.45.0",
"vite": "^7.1.2",
"vite": "^8.0.0",
"vitest": "^4.0.0"
},
"volta": {

View File

@@ -1,18 +1,11 @@
import { assetViewerManager } from '$lib/managers/asset-viewer-manager.svelte';
import { createZoomImageWheel } from '@zoom-image/core';
// Minimal touch shape — avoids importing DOM TouchEvent which isn't available in all TS targets.
type TouchEventLike = {
touches: Iterable<{ clientX: number; clientY: number }> & { length: number };
targetTouches: ArrayLike<unknown>;
};
const asTouchEvent = (event: Event) => event as unknown as TouchEventLike;
export const zoomImageAction = (node: HTMLElement, options?: { zoomTarget?: HTMLElement }) => {
export const zoomImageAction = (node: HTMLElement, options?: { disabled?: boolean }) => {
const zoomInstance = createZoomImageWheel(node, {
maxZoom: 10,
initialState: assetViewerManager.zoomState,
zoomTarget: options?.zoomTarget,
zoomTarget: null,
});
const unsubscribes = [
@@ -20,130 +13,47 @@ export const zoomImageAction = (node: HTMLElement, options?: { zoomTarget?: HTML
zoomInstance.subscribe(({ state }) => assetViewerManager.onZoomChange(state)),
];
const controller = new AbortController();
const { signal } = controller;
node.addEventListener('pointerdown', () => assetViewerManager.cancelZoomAnimation(), { capture: true, signal });
// Intercept events in capture phase to prevent zoom-image from seeing interactions on
// overlay elements (e.g. OCR text boxes), preserving browser defaults like text selection.
const isOverlayEvent = (event: Event) => !!(event.target as HTMLElement).closest('[data-overlay-interactive]');
const isOverlayAtPoint = (x: number, y: number) =>
!!document.elementFromPoint(x, y)?.closest('[data-overlay-interactive]');
// Pointer event interception: track pointers that start on overlays and intercept the entire gesture.
const overlayPointers = new Set<number>();
const interceptedPointers = new Set<number>();
const interceptOverlayPointerDown = (event: PointerEvent) => {
if (isOverlayEvent(event) || isOverlayAtPoint(event.clientX, event.clientY)) {
overlayPointers.add(event.pointerId);
interceptedPointers.add(event.pointerId);
event.stopPropagation();
} else if (overlayPointers.size > 0) {
// Split gesture (e.g. pinch with one finger on overlay) — intercept entirely.
interceptedPointers.add(event.pointerId);
event.stopPropagation();
const onInteractionStart = (event: Event) => {
if (options?.disabled) {
event.stopImmediatePropagation();
}
assetViewerManager.cancelZoomAnimation();
};
const interceptOverlayPointerEvent = (event: PointerEvent) => {
if (interceptedPointers.has(event.pointerId)) {
event.stopPropagation();
}
};
const interceptOverlayPointerEnd = (event: PointerEvent) => {
overlayPointers.delete(event.pointerId);
if (interceptedPointers.delete(event.pointerId)) {
event.stopPropagation();
}
};
node.addEventListener('pointerdown', interceptOverlayPointerDown, { capture: true, signal });
node.addEventListener('pointermove', interceptOverlayPointerEvent, { capture: true, signal });
node.addEventListener('pointerup', interceptOverlayPointerEnd, { capture: true, signal });
node.addEventListener('pointerleave', interceptOverlayPointerEnd, { capture: true, signal });
// Touch event interception for overlay touches or split gestures (pinch across container boundary).
// Once intercepted, stays intercepted until all fingers are lifted.
let touchGestureIntercepted = false;
const interceptOverlayTouchEvent = (event: Event) => {
if (touchGestureIntercepted) {
event.stopPropagation();
return;
}
const { touches, targetTouches } = asTouchEvent(event);
if (touches && targetTouches) {
if (touches.length > targetTouches.length) {
touchGestureIntercepted = true;
event.stopPropagation();
return;
}
for (const touch of touches) {
if (isOverlayAtPoint(touch.clientX, touch.clientY)) {
touchGestureIntercepted = true;
event.stopPropagation();
return;
}
}
} else if (isOverlayEvent(event)) {
event.stopPropagation();
}
};
const resetTouchGesture = (event: Event) => {
const { touches } = asTouchEvent(event);
if (touches.length === 0) {
touchGestureIntercepted = false;
}
};
node.addEventListener('touchstart', interceptOverlayTouchEvent, { capture: true, signal });
node.addEventListener('touchmove', interceptOverlayTouchEvent, { capture: true, signal });
node.addEventListener('touchend', resetTouchGesture, { capture: true, signal });
node.addEventListener('wheel', onInteractionStart, { capture: true });
node.addEventListener('pointerdown', onInteractionStart, { capture: true });
// Wheel and dblclick interception on overlay elements.
// Dblclick also intercepted for all touch double-taps (Safari fires synthetic dblclick
// on double-tap, which conflicts with zoom-image's touch zoom handler).
// Suppress Safari's synthetic dblclick on double-tap. Without this, zoom-image's touchstart
// handler zooms to maxZoom (10x), then Safari's synthetic dblclick triggers photo-viewer's
// handler which conflicts. Chrome does not fire synthetic dblclick on touch.
let lastPointerWasTouch = false;
node.addEventListener('pointerdown', (event) => (lastPointerWasTouch = event.pointerType === 'touch'), {
capture: true,
signal,
});
node.addEventListener(
'wheel',
(event) => {
if (isOverlayEvent(event)) {
event.stopPropagation();
}
},
{ capture: true, signal },
);
node.addEventListener(
'dblclick',
(event) => {
if (lastPointerWasTouch || isOverlayEvent(event)) {
event.stopImmediatePropagation();
}
},
{ capture: true, signal },
);
const trackPointerType = (event: PointerEvent) => {
lastPointerWasTouch = event.pointerType === 'touch';
};
const suppressTouchDblClick = (event: MouseEvent) => {
if (lastPointerWasTouch) {
event.stopImmediatePropagation();
}
};
node.addEventListener('pointerdown', trackPointerType, { capture: true });
node.addEventListener('dblclick', suppressTouchDblClick, { capture: true });
if (options?.zoomTarget) {
options.zoomTarget.style.willChange = 'transform';
}
// Allow zoomed content to render outside the container bounds
node.style.overflow = 'visible';
// Prevent browser handling of touch gestures so zoom-image can manage them
node.style.touchAction = 'none';
return {
update(newOptions?: { zoomTarget?: HTMLElement }) {
update(newOptions?: { disabled?: boolean }) {
options = newOptions;
if (newOptions?.zoomTarget !== undefined) {
zoomInstance.setState({ zoomTarget: newOptions.zoomTarget });
}
},
destroy() {
controller.abort();
if (options?.zoomTarget) {
options.zoomTarget.style.willChange = '';
}
for (const unsubscribe of unsubscribes) {
unsubscribe();
}
node.removeEventListener('wheel', onInteractionStart, { capture: true });
node.removeEventListener('pointerdown', onInteractionStart, { capture: true });
node.removeEventListener('pointerdown', trackPointerType, { capture: true });
node.removeEventListener('dblclick', suppressTouchDblClick, { capture: true });
zoomInstance.cleanup();
},
};

View File

@@ -7,7 +7,7 @@
import { assetViewerManager } from '$lib/managers/asset-viewer-manager.svelte';
import { getAssetUrls } from '$lib/utils';
import { AdaptiveImageLoader, type QualityList } from '$lib/utils/adaptive-image-loader.svelte';
import { scaleToCover, scaleToFit, type Size } from '$lib/utils/container-utils';
import { scaleToCover, scaleToFit } from '$lib/utils/container-utils';
import { getAltText } from '$lib/utils/thumbnail-util';
import { toTimelineAsset } from '$lib/utils/timeline-util';
import type { AssetResponseDto, SharedLinkResponseDto } from '@immich/sdk';
@@ -17,14 +17,15 @@
asset: AssetResponseDto;
sharedLink?: SharedLinkResponseDto;
objectFit?: 'contain' | 'cover';
container: Size;
container: {
width: number;
height: number;
};
onUrlChange?: (url: string) => void;
onImageReady?: () => void;
onError?: () => void;
ref?: HTMLDivElement;
imgRef?: HTMLImageElement;
imgNaturalSize?: Size;
imgScaledSize?: Size;
backdrop?: Snippet;
overlays?: Snippet;
};
@@ -33,10 +34,6 @@
ref = $bindable(),
// eslint-disable-next-line no-useless-assignment
imgRef = $bindable(),
// eslint-disable-next-line no-useless-assignment
imgNaturalSize = $bindable(),
// eslint-disable-next-line no-useless-assignment
imgScaledSize = $bindable(),
asset,
sharedLink,
objectFit = 'contain',
@@ -104,21 +101,9 @@
return { width: 1, height: 1 };
});
$effect(() => {
imgNaturalSize = imageDimensions;
});
const scaledDimensions = $derived.by(() => {
const scaleFn = objectFit === 'cover' ? scaleToCover : scaleToFit;
return scaleFn(imageDimensions, container);
});
$effect(() => {
imgScaledSize = scaledDimensions;
});
const { width, height, left, top } = $derived.by(() => {
const { width, height } = scaledDimensions;
const scaleFn = objectFit === 'cover' ? scaleToCover : scaleToFit;
const { width, height } = scaleFn(imageDimensions, container);
return {
width: width + 'px',
height: height + 'px',
@@ -164,66 +149,81 @@
(quality.preview === 'success' ? previewElement : undefined) ??
(quality.thumbnail === 'success' ? thumbnailElement : undefined);
});
const zoomTransform = $derived.by(() => {
const { currentZoom, currentPositionX, currentPositionY } = assetViewerManager.zoomState;
if (currentZoom === 1 && currentPositionX === 0 && currentPositionY === 0) {
return undefined;
}
return `translate(${currentPositionX}px, ${currentPositionY}px) scale(${currentZoom})`;
});
</script>
<div class="relative h-full w-full overflow-hidden will-change-transform" bind:this={ref}>
{@render backdrop?.()}
<div class="absolute inset-0 pointer-events-none" style:left style:top style:width style:height>
{#if show.alphaBackground}
<AlphaBackground />
{/if}
{#if show.thumbhash}
{#if asset.thumbhash}
<!-- Thumbhash / spinner layer -->
<canvas use:thumbhash={{ base64ThumbHash: asset.thumbhash }} class="h-full w-full absolute"></canvas>
{:else if show.spinner}
<DelayedLoadingSpinner />
<!-- pointer-events-none so events pass through to the container where zoom-image listens -->
<div
class="absolute inset-0 pointer-events-none"
style:transform={zoomTransform}
style:transform-origin={zoomTransform ? '0 0' : undefined}
>
<div class="absolute" style:left style:top style:width style:height>
{#if show.alphaBackground}
<AlphaBackground />
{/if}
{/if}
{#if show.thumbnail}
<ImageLayer
{adaptiveImageLoader}
{width}
{height}
quality="thumbnail"
src={status.urls.thumbnail}
alt=""
role="presentation"
bind:ref={thumbnailElement}
/>
{/if}
{#if show.thumbhash}
{#if asset.thumbhash}
<!-- Thumbhash / spinner layer -->
<canvas use:thumbhash={{ base64ThumbHash: asset.thumbhash }} class="h-full w-full absolute"></canvas>
{:else if show.spinner}
<DelayedLoadingSpinner />
{/if}
{/if}
{#if show.brokenAsset}
<BrokenAsset class="text-xl h-full w-full absolute" />
{/if}
{#if show.thumbnail}
<ImageLayer
{adaptiveImageLoader}
{width}
{height}
quality="thumbnail"
src={status.urls.thumbnail}
alt=""
role="presentation"
bind:ref={thumbnailElement}
/>
{/if}
{#if show.preview}
<ImageLayer
{adaptiveImageLoader}
{alt}
{width}
{height}
{overlays}
quality="preview"
src={status.urls.preview}
bind:ref={previewElement}
/>
{/if}
{#if show.brokenAsset}
<BrokenAsset class="text-xl h-full w-full absolute" />
{/if}
{#if show.original}
<ImageLayer
{adaptiveImageLoader}
{alt}
{width}
{height}
{overlays}
quality="original"
src={status.urls.original}
bind:ref={originalElement}
/>
{/if}
{#if show.preview}
<ImageLayer
{adaptiveImageLoader}
{alt}
{width}
{height}
{overlays}
quality="preview"
src={status.urls.preview}
bind:ref={previewElement}
/>
{/if}
{#if show.original}
<ImageLayer
{adaptiveImageLoader}
{alt}
{width}
{height}
{overlays}
quality="original"
src={status.urls.original}
bind:ref={originalElement}
/>
{/if}
</div>
</div>
</div>

View File

@@ -59,7 +59,7 @@
previousAsset?: AssetResponseDto;
};
type Props = {
interface Props {
cursor: AssetCursor;
showNavigation?: boolean;
withStacked?: boolean;
@@ -72,7 +72,7 @@
onUndoDelete?: OnUndoDelete;
onClose?: (asset: AssetResponseDto) => void;
onRandom?: () => Promise<{ id: string } | undefined>;
};
}
let {
cursor,
@@ -176,7 +176,6 @@
onDestroy(() => {
activityManager.reset();
assetViewerManager.closeEditor();
isFaceEditMode.value = false;
syncAssetViewerOpenClass(false);
preloadManager.destroy();
});
@@ -291,9 +290,6 @@
const handleStackedAssetMouseEvent = (isMouseOver: boolean, stackedAsset: AssetResponseDto) => {
previewStackedAsset = isMouseOver ? stackedAsset : undefined;
if (isMouseOver) {
isFaceEditMode.value = false;
}
};
const handlePreAction = (action: Action) => {
@@ -362,18 +358,15 @@
}
};
const refreshOcr = async () => {
ocrManager.clear();
if (sharedLink) {
return;
}
await ocrManager.getAssetOcr(asset.id);
};
const refresh = async () => {
await refreshStack();
await refreshOcr();
ocrManager.clear();
if (!sharedLink) {
if (previewStackedAsset) {
await ocrManager.getAssetOcr(previewStackedAsset.id);
}
await ocrManager.getAssetOcr(asset.id);
}
};
$effect(() => {
@@ -382,12 +375,6 @@
untrack(() => handlePromiseError(refresh()));
});
$effect(() => {
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
previewStackedAsset;
untrack(() => ocrManager.clear());
});
let lastCursor = $state<AssetCursor>();
$effect(() => {
@@ -473,7 +460,7 @@
<section
id="immich-asset-viewer"
class="fixed start-0 top-0 grid size-full grid-cols-4 grid-rows-[64px_1fr] overflow-hidden bg-black touch-none"
class="fixed start-0 top-0 grid size-full grid-cols-4 grid-rows-[64px_1fr] overflow-hidden bg-black"
use:focusTrap
bind:this={assetViewerHtmlElement}
>
@@ -625,7 +612,6 @@
onClick={() => {
cursor.current = stackedAsset;
previewStackedAsset = undefined;
isFaceEditMode.value = false;
}}
onMouseEvent={({ isMouseOver }) => handleStackedAssetMouseEvent(isMouseOver, stackedAsset)}
readonly

View File

@@ -7,11 +7,10 @@
import { timeToLoadTheMap } from '$lib/constants';
import { assetViewerManager } from '$lib/managers/asset-viewer-manager.svelte';
import { authManager } from '$lib/managers/auth-manager.svelte';
import { eventManager } from '$lib/managers/event-manager.svelte';
import { featureFlagsManager } from '$lib/managers/feature-flags-manager.svelte';
import AssetChangeDateModal from '$lib/modals/AssetChangeDateModal.svelte';
import { Route } from '$lib/route';
import { isEditFacesPanelOpen, isFaceEditMode } from '$lib/stores/face-edit.svelte';
import { isFaceEditMode } from '$lib/stores/face-edit.svelte';
import { boundingBoxesArray } from '$lib/stores/people.store';
import { locale } from '$lib/stores/preferences.store';
import { preferences, user } from '$lib/stores/user.store';
@@ -50,15 +49,15 @@
import UserAvatar from '../shared-components/user-avatar.svelte';
import AlbumListItemDetails from './album-list-item-details.svelte';
type Props = {
interface Props {
asset: AssetResponseDto;
currentAlbum?: AlbumResponseDto | null;
};
}
let { asset, currentAlbum = null }: Props = $props();
let showAssetPath = $state(false);
let showEditFaces = $derived(isEditFacesPanelOpen.value);
let showEditFaces = $state(false);
let isOwner = $derived($user?.id === asset.ownerId);
let people = $derived(asset.people || []);
let unassignedFaces = $derived(asset.unassignedFaces || []);
@@ -107,7 +106,7 @@
return;
}
isEditFacesPanelOpen.value = false;
showEditFaces = false;
previousId = asset.id;
});
@@ -123,8 +122,7 @@
const handleRefreshPeople = async () => {
asset = await getAssetInfo({ id: asset.id });
eventManager.emit('AssetUpdate', asset);
isEditFacesPanelOpen.value = false;
showEditFaces = false;
};
const getAssetFolderHref = (asset: AssetResponseDto) => {
@@ -221,7 +219,7 @@
shape="round"
color="secondary"
variant="ghost"
onclick={() => (isEditFacesPanelOpen.value = true)}
onclick={() => (showEditFaces = true)}
/>
{/if}
</div>
@@ -230,14 +228,13 @@
<div class="mt-2 flex flex-wrap gap-2">
{#each people as person, index (person.id)}
{#if showingHiddenPeople || !person.isHidden}
{@const isHighlighted = people[index].faces.some((f) => $boundingBoxesArray.some((b) => b.id === f.id))}
<a
class="group w-22 outline-none"
class="w-22"
href={Route.viewPerson(person, { previousRoute })}
onfocus={() => ($boundingBoxesArray = people[index].faces)}
onblur={() => ($boundingBoxesArray = [])}
onpointerover={() => ($boundingBoxesArray = people[index].faces)}
onpointerleave={() => ($boundingBoxesArray = [])}
onmouseover={() => ($boundingBoxesArray = people[index].faces)}
onmouseleave={() => ($boundingBoxesArray = [])}
>
<div class="relative">
<ImageThumbnail
@@ -249,8 +246,6 @@
widthStyle="90px"
heightStyle="90px"
hidden={person.isHidden}
highlighted={isHighlighted}
class="group-focus-visible:outline-2 group-focus-visible:outline-offset-2 group-focus-visible:outline-immich-primary dark:group-focus-visible:outline-immich-dark-primary"
/>
</div>
<p class="mt-1 truncate font-medium" title={person.name}>{person.name}</p>
@@ -579,7 +574,7 @@
<PersonSidePanel
assetId={asset.id}
assetType={asset.type}
onClose={() => (isEditFacesPanelOpen.value = false)}
onClose={() => (showEditFaces = false)}
onRefresh={handleRefreshPeople}
/>
{/if}

View File

@@ -1,12 +1,10 @@
<script lang="ts">
import ImageThumbnail from '$lib/components/assets/thumbnail/image-thumbnail.svelte';
import { assetViewerManager } from '$lib/managers/asset-viewer-manager.svelte';
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
import { isFaceEditMode } from '$lib/stores/face-edit.svelte';
import { getPeopleThumbnailUrl } from '$lib/utils';
import { computeContentMetrics, mapContentRectToNatural, type Size } from '$lib/utils/container-utils';
import { getNaturalSize, scaleToFit } from '$lib/utils/container-utils';
import { handleError } from '$lib/utils/handle-error';
import { scaleFaceRectOnResize, type ResizeContext } from '$lib/utils/people-utils';
import { createFace, getAllPeople, type PersonResponseDto } from '@immich/sdk';
import { shortcut } from '$lib/actions/shortcut';
import { Button, Input, modalManager, toastManager } from '@immich/ui';
@@ -14,19 +12,17 @@
import { clamp } from 'lodash-es';
import { onMount } from 'svelte';
import { t } from 'svelte-i18n';
import { fade } from 'svelte/transition';
type Props = {
imageSize: Size;
interface Props {
htmlElement: HTMLImageElement | HTMLVideoElement;
containerWidth: number;
containerHeight: number;
assetId: string;
};
}
let { imageSize, containerWidth, containerHeight, assetId }: Props = $props();
let { htmlElement, containerWidth, containerHeight, assetId }: Props = $props();
let canvasEl: HTMLCanvasElement | undefined = $state();
let containerEl: HTMLDivElement | undefined = $state();
let canvas: Canvas | undefined = $state();
let faceRect: Rect | undefined = $state();
let faceSelectorEl: HTMLDivElement | undefined = $state();
@@ -36,9 +32,6 @@
let searchTerm = $state('');
let faceBoxPosition = $state({ left: 0, top: 0, width: 0, height: 0 });
let userMovedRect = false;
let previousMetrics: ResizeContext | null = null;
let panModifierHeld = $state(false);
let filteredCandidates = $derived(
searchTerm
@@ -60,12 +53,11 @@
};
const setupCanvas = () => {
if (!canvasEl) {
if (!canvasEl || !htmlElement) {
return;
}
canvas = new Canvas(canvasEl, { width: containerWidth, height: containerHeight });
canvas.selection = false;
canvas = new Canvas(canvasEl);
configureControlStyle();
// eslint-disable-next-line tscompat/tscompat
@@ -83,104 +75,67 @@
canvas.add(faceRect);
canvas.setActiveObject(faceRect);
setDefaultFaceRectanglePosition(faceRect);
};
onMount(() => {
void getPeople();
onMount(async () => {
setupCanvas();
await getPeople();
});
const imageContentMetrics = $derived.by(() => {
const natural = getNaturalSize(htmlElement);
const container = { width: containerWidth, height: containerHeight };
const { width: contentWidth, height: contentHeight } = scaleToFit(natural, container);
return {
contentWidth,
contentHeight,
offsetX: (containerWidth - contentWidth) / 2,
offsetY: (containerHeight - contentHeight) / 2,
};
});
const setDefaultFaceRectanglePosition = (faceRect: Rect) => {
const { offsetX, offsetY } = imageContentMetrics;
faceRect.set({
top: offsetY + 200,
left: offsetX + 200,
});
faceRect.setCoords();
positionFaceSelector();
};
$effect(() => {
if (!canvas) {
return;
}
const upperCanvas = canvas.upperCanvasEl;
const controller = new AbortController();
const { signal } = controller;
const stopIfOnTarget = (event: PointerEvent) => {
if (canvas?.findTarget(event).target) {
event.stopPropagation();
}
};
const handlePointerDown = (event: PointerEvent) => {
if (!canvas) {
return;
}
if (canvas.findTarget(event).target) {
event.stopPropagation();
return;
}
if (faceRect) {
event.stopPropagation();
const pointer = canvas.getScenePoint(event);
faceRect.set({ left: pointer.x, top: pointer.y });
faceRect.setCoords();
userMovedRect = true;
canvas.renderAll();
positionFaceSelector();
}
};
upperCanvas.addEventListener('pointerdown', handlePointerDown, { signal });
upperCanvas.addEventListener('pointermove', stopIfOnTarget, { signal });
upperCanvas.addEventListener('pointerup', stopIfOnTarget, { signal });
return () => {
controller.abort();
};
});
const imageContentMetrics = $derived.by(() => {
if (imageSize.width === 0 || imageSize.height === 0) {
return { contentWidth: 0, contentHeight: 0, offsetX: 0, offsetY: 0 };
}
return computeContentMetrics(imageSize, { width: containerWidth, height: containerHeight });
});
const setDefaultFaceRectanglePosition = (faceRect: Rect) => {
const { offsetX, offsetY, contentWidth, contentHeight } = imageContentMetrics;
faceRect.set({
top: offsetY + contentHeight / 2 - 56,
left: offsetX + contentWidth / 2 - 56,
canvas.setDimensions({
width: containerWidth,
height: containerHeight,
});
};
$effect(() => {
const { offsetX, offsetY, contentWidth } = imageContentMetrics;
if (contentWidth === 0) {
if (!faceRect) {
return;
}
const isFirstRun = previousMetrics === null;
if (isFirstRun && !canvas) {
setupCanvas();
}
if (!canvas || !faceRect) {
return;
}
if (!isFirstRun) {
canvas.setDimensions({ width: containerWidth, height: containerHeight });
}
if (!isFirstRun && userMovedRect && previousMetrics) {
faceRect.set(scaleFaceRectOnResize(faceRect, previousMetrics, { contentWidth, offsetX, offsetY }));
} else {
if (!isFaceRectIntersectingCanvas(faceRect, canvas)) {
setDefaultFaceRectanglePosition(faceRect);
}
faceRect.setCoords();
previousMetrics = { contentWidth, offsetX, offsetY };
canvas.renderAll();
positionFaceSelector();
});
const isFaceRectIntersectingCanvas = (faceRect: Rect, canvas: Canvas) => {
const faceBox = faceRect.getBoundingRect();
return !(
0 > faceBox.left + faceBox.width ||
0 > faceBox.top + faceBox.height ||
canvas.width < faceBox.left ||
canvas.height < faceBox.top
);
};
const cancel = () => {
isFaceEditMode.value = false;
};
@@ -209,15 +164,11 @@
const gap = 15;
const padding = faceRect.padding ?? 0;
const rawBox = faceRect.getBoundingRect();
if (Number.isNaN(rawBox.left) || Number.isNaN(rawBox.width)) {
return;
}
const { currentZoom, currentPositionX, currentPositionY } = assetViewerManager.zoomState;
const faceBox = {
left: (rawBox.left - padding) * currentZoom + currentPositionX,
top: (rawBox.top - padding) * currentZoom + currentPositionY,
width: (rawBox.width + padding * 2) * currentZoom,
height: (rawBox.height + padding * 2) * currentZoom,
left: rawBox.left - padding,
top: rawBox.top - padding,
width: rawBox.width + padding * 2,
height: rawBox.height + padding * 2,
};
const selectorWidth = faceSelectorEl.offsetWidth;
const chromeHeight = faceSelectorEl.offsetHeight - scrollableListEl.offsetHeight;
@@ -227,21 +178,20 @@
const clampTop = (top: number) => clamp(top, gap, containerHeight - selectorHeight - gap);
const clampLeft = (left: number) => clamp(left, gap, containerWidth - selectorWidth - gap);
const faceRight = faceBox.left + faceBox.width;
const faceBottom = faceBox.top + faceBox.height;
const overlapArea = (position: { top: number; left: number }) => {
const overlapX = Math.max(
0,
Math.min(position.left + selectorWidth, faceRight) - Math.max(position.left, faceBox.left),
);
const overlapY = Math.max(
0,
Math.min(position.top + selectorHeight, faceBottom) - Math.max(position.top, faceBox.top),
);
const selectorRight = position.left + selectorWidth;
const selectorBottom = position.top + selectorHeight;
const faceRight = faceBox.left + faceBox.width;
const faceBottom = faceBox.top + faceBox.height;
const overlapX = Math.max(0, Math.min(selectorRight, faceRight) - Math.max(position.left, faceBox.left));
const overlapY = Math.max(0, Math.min(selectorBottom, faceBottom) - Math.max(position.top, faceBox.top));
return overlapX * overlapY;
};
const faceBottom = faceBox.top + faceBox.height;
const faceRight = faceBox.left + faceBox.width;
const positions = [
{ top: clampTop(faceBottom + gap), left: clampLeft(faceBox.left) },
{ top: clampTop(faceBox.top - selectorHeight - gap), left: clampLeft(faceBox.left) },
@@ -263,139 +213,45 @@
}
}
const containerRect = containerEl?.getBoundingClientRect();
const offsetTop = containerRect?.top ?? 0;
const offsetLeft = containerRect?.left ?? 0;
faceSelectorEl.style.top = `${bestPosition.top + offsetTop}px`;
faceSelectorEl.style.left = `${bestPosition.left + offsetLeft}px`;
faceSelectorEl.style.top = `${bestPosition.top}px`;
faceSelectorEl.style.left = `${bestPosition.left}px`;
scrollableListEl.style.height = `${listHeight}px`;
faceBoxPosition = faceBox;
faceBoxPosition = { left: faceBox.left, top: faceBox.top, width: faceBox.width, height: faceBox.height };
};
$effect(() => {
if (!canvas) {
return;
}
const { currentZoom, currentPositionX, currentPositionY } = assetViewerManager.zoomState;
canvas.setViewportTransform([currentZoom, 0, 0, currentZoom, currentPositionX, currentPositionY]);
canvas.renderAll();
positionFaceSelector();
});
$effect(() => {
const rect = faceRect;
if (rect) {
const onUserMove = () => {
userMovedRect = true;
positionFaceSelector();
};
rect.on('moving', onUserMove);
rect.on('scaling', onUserMove);
rect.on('moving', positionFaceSelector);
rect.on('scaling', positionFaceSelector);
return () => {
rect.off('moving', onUserMove);
rect.off('scaling', onUserMove);
rect.off('moving', positionFaceSelector);
rect.off('scaling', positionFaceSelector);
};
}
});
const isMac = typeof navigator !== 'undefined' && /Mac|iPhone|iPad|iPod/.test(navigator.userAgent);
const panModifierKey = isMac ? 'Meta' : 'Control';
const panModifierLabel = isMac ? '⌘' : 'Ctrl';
const isZoomed = $derived(assetViewerManager.zoom > 1);
$effect(() => {
if (!containerEl) {
return;
}
const element = containerEl;
const parent = element.parentElement;
const activate = () => {
panModifierHeld = true;
element.style.pointerEvents = 'none';
if (parent) {
parent.style.cursor = 'move';
}
};
const deactivate = () => {
panModifierHeld = false;
element.style.pointerEvents = '';
if (parent) {
parent.style.cursor = '';
}
};
const onKeyDown = (event: KeyboardEvent) => {
if (event.key === panModifierKey) {
activate();
}
};
const onKeyUp = (event: KeyboardEvent) => {
if (event.key === panModifierKey) {
deactivate();
}
};
document.addEventListener('keydown', onKeyDown);
document.addEventListener('keyup', onKeyUp);
window.addEventListener('blur', deactivate);
return () => {
document.removeEventListener('keydown', onKeyDown);
document.removeEventListener('keyup', onKeyUp);
window.removeEventListener('blur', deactivate);
deactivate();
};
});
const trapEvents = (node: HTMLElement) => {
const stop = (e: Event) => e.stopPropagation();
const eventTypes = ['keydown', 'pointerdown', 'pointermove', 'pointerup'] as const;
for (const type of eventTypes) {
node.addEventListener(type, stop);
}
// Move to body so the selector isn't affected by the zoom transform on the container
document.body.append(node);
return {
destroy() {
for (const type of eventTypes) {
node.removeEventListener(type, stop);
}
node.remove();
},
};
};
const getFaceCroppedCoordinates = () => {
if (!faceRect || imageSize.width === 0 || imageSize.height === 0) {
if (!faceRect || !htmlElement) {
return;
}
const scaledWidth = faceRect.getScaledWidth();
const scaledHeight = faceRect.getScaledHeight();
const { left, top, width, height } = faceRect.getBoundingRect();
const { offsetX, offsetY, contentWidth, contentHeight } = imageContentMetrics;
const natural = getNaturalSize(htmlElement);
const imageRect = mapContentRectToNatural(
{
left: faceRect.left - scaledWidth / 2,
top: faceRect.top - scaledHeight / 2,
width: scaledWidth,
height: scaledHeight,
},
imageContentMetrics,
imageSize,
);
const scaleX = natural.width / contentWidth;
const scaleY = natural.height / contentHeight;
const imageX = (left - offsetX) * scaleX;
const imageY = (top - offsetY) * scaleY;
return {
imageWidth: imageSize.width,
imageHeight: imageSize.height,
x: Math.floor(imageRect.left),
y: Math.floor(imageRect.top),
width: Math.floor(imageRect.width),
height: Math.floor(imageRect.height),
imageWidth: natural.width,
imageHeight: natural.height,
x: Math.floor(imageX),
y: Math.floor(imageY),
width: Math.floor(width * scaleX),
height: Math.floor(height * scaleY),
};
};
@@ -426,9 +282,10 @@
});
await assetViewingStore.setAssetId(assetId);
isFaceEditMode.value = false;
} catch (error) {
handleError(error, 'Error tagging face');
} finally {
isFaceEditMode.value = false;
}
};
</script>
@@ -437,7 +294,6 @@
<div
id="face-editor-data"
bind:this={containerEl}
class="absolute start-0 top-0 z-5 h-full w-full overflow-hidden"
data-face-left={faceBoxPosition.left}
data-face-top={faceBoxPosition.top}
@@ -449,9 +305,7 @@
<div
id="face-selector"
bind:this={faceSelectorEl}
class="fixed z-20 w-[min(200px,45vw)] min-w-48 bg-white dark:bg-immich-dark-gray dark:text-immich-dark-fg backdrop-blur-sm px-2 py-4 rounded-xl border border-gray-200 dark:border-gray-800 transition-[top,left] duration-200 ease-out"
use:trapEvents
onwheel={(e) => e.stopPropagation()}
class="absolute top-[calc(50%-250px)] start-[calc(50%-125px)] max-w-[250px] w-[250px] bg-white dark:bg-immich-dark-gray dark:text-immich-dark-fg backdrop-blur-sm px-2 py-4 rounded-xl border border-gray-200 dark:border-gray-800 transition-[top,left] duration-200 ease-out"
>
<p class="text-center text-sm">{$t('select_person_to_tag')}</p>
@@ -492,15 +346,4 @@
<Button size="small" fullWidth onclick={cancel} color="danger" class="mt-2">{$t('cancel')}</Button>
</div>
{#if isZoomed && !panModifierHeld}
<div
transition:fade={{ duration: 200 }}
class="absolute bottom-4 inset-s-1/2 -translate-x-1/2 pointer-events-none z-10"
>
<p class="bg-black/60 text-white text-xs px-3 py-1.5 rounded-full whitespace-nowrap">
{$t('hold_key_to_pan', { values: { key: panModifierLabel } })}
</p>
</div>
{/if}
</div>

View File

@@ -1,5 +1,4 @@
<script lang="ts">
import { mediaQueryManager } from '$lib/stores/media-query-manager.svelte';
import type { OcrBox } from '$lib/utils/ocr-utils';
import { calculateBoundingBoxMatrix, calculateFittedFontSize } from '$lib/utils/ocr-utils';
@@ -9,7 +8,6 @@
let { ocrBox }: Props = $props();
const isTouch = $derived(mediaQueryManager.pointerCoarse);
const dimensions = $derived(calculateBoundingBoxMatrix(ocrBox.points));
const transform = $derived(`matrix3d(${dimensions.matrix.join(',')})`);
@@ -17,23 +15,13 @@
calculateFittedFontSize(ocrBox.text, dimensions.width, dimensions.height, ocrBox.verticalMode) + 'px',
);
const handleSelectStart = (event: Event) => {
const target = event.currentTarget as HTMLElement;
requestAnimationFrame(() => {
const selection = globalThis.getSelection();
if (selection) {
selection.selectAllChildren(target);
}
});
};
const verticalStyle = $derived.by(() => {
switch (ocrBox.verticalMode) {
case 'cjk': {
return 'writing-mode: vertical-rl;';
return ' writing-mode: vertical-rl;';
}
case 'rotated': {
return 'writing-mode: vertical-rl; text-orientation: sideways;';
return ' writing-mode: vertical-rl; text-orientation: sideways;';
}
default: {
return '';
@@ -42,23 +30,17 @@
});
</script>
<div
class={[
'absolute left-0 top-0 flex items-center justify-center',
'border-2 border-blue-500 pointer-events-auto cursor-text',
'focus:z-1 focus:border-blue-600 focus:border-3 focus:outline-none',
isTouch
? 'text-white bg-black/60 select-all'
: 'select-text text-transparent bg-blue-500/10 transition-colors hover:z-1 hover:text-white hover:bg-black/60 hover:border-blue-600 hover:border-3',
ocrBox.verticalMode === 'none' ? 'px-2 py-1 whitespace-nowrap' : 'px-1 py-2',
]}
style="font-size: {fontSize}; width: {dimensions.width}px; height: {dimensions.height}px; transform: {transform}; transform-origin: 0 0; touch-action: none; {verticalStyle}"
data-testid="ocr-box"
data-overlay-interactive
tabindex="0"
role="button"
aria-label={ocrBox.text}
onselectstart={isTouch ? handleSelectStart : undefined}
>
{ocrBox.text}
<div class="absolute left-0 top-0">
<div
class="absolute flex items-center justify-center text-transparent border-2 border-blue-500 bg-blue-500/10 pointer-events-auto cursor-text select-text transition-colors hover:z-1 hover:text-white hover:bg-black/60 hover:border-blue-600 hover:border-3 focus:z-1 focus:text-white focus:bg-black/60 focus:border-blue-600 focus:border-3 focus:outline-none {ocrBox.verticalMode ===
'none'
? 'px-2 py-1 whitespace-nowrap'
: 'px-1 py-2'}"
style="font-size: {fontSize}; width: {dimensions.width}px; height: {dimensions.height}px; transform: {transform}; transform-origin: 0 0;{verticalStyle}"
tabindex="0"
role="button"
aria-label={ocrBox.text}
>
{ocrBox.text}
</div>
</div>

View File

@@ -128,8 +128,10 @@
}
const boxes = getOcrBoundingBoxes(ocrData, {
width: viewer.state.textureData.panoData.croppedWidth,
height: viewer.state.textureData.panoData.croppedHeight,
contentWidth: viewer.state.textureData.panoData.croppedWidth,
contentHeight: viewer.state.textureData.panoData.croppedHeight,
offsetX: 0,
offsetY: 0,
});
for (const [index, box] of boxes.entries()) {

View File

@@ -8,13 +8,13 @@
import AssetViewerEvents from '$lib/components/AssetViewerEvents.svelte';
import { assetViewerManager } from '$lib/managers/asset-viewer-manager.svelte';
import { castManager } from '$lib/managers/cast-manager.svelte';
import { isEditFacesPanelOpen, isFaceEditMode } from '$lib/stores/face-edit.svelte';
import { isFaceEditMode } from '$lib/stores/face-edit.svelte';
import { ocrManager } from '$lib/stores/ocr.svelte';
import { boundingBoxesArray, type Faces } from '$lib/stores/people.store';
import { SlideshowLook, SlideshowState, slideshowStore } from '$lib/stores/slideshow.store';
import { handlePromiseError } from '$lib/utils';
import { canCopyImageToClipboard, copyImageToClipboard } from '$lib/utils/asset-utils';
import type { Size } from '$lib/utils/container-utils';
import { getNaturalSize, scaleToFit, type ContentMetrics } from '$lib/utils/container-utils';
import { handleError } from '$lib/utils/handle-error';
import { getOcrBoundingBoxes } from '$lib/utils/ocr-utils';
import { getBoundingBox } from '$lib/utils/people-utils';
@@ -25,14 +25,14 @@
import { t } from 'svelte-i18n';
import type { AssetCursor } from './asset-viewer.svelte';
type Props = {
interface Props {
cursor: AssetCursor;
element?: HTMLDivElement;
sharedLink?: SharedLinkResponseDto;
onReady?: () => void;
onError?: () => void;
onSwipe?: (event: SwipeCustomEvent) => void;
};
}
let { cursor, element = $bindable(), sharedLink, onReady, onError, onSwipe }: Props = $props();
@@ -67,12 +67,23 @@
height: containerHeight,
});
let imageDimensions = $state<Size>({ width: 0, height: 0 });
let scaledDimensions = $state<Size>({ width: 0, height: 0 });
const overlayMetrics = $derived.by((): ContentMetrics => {
if (!assetViewerManager.imgRef || !visibleImageReady) {
return { contentWidth: 0, contentHeight: 0, offsetX: 0, offsetY: 0 };
}
const overlaySize = $derived(visibleImageReady ? scaledDimensions : { width: 0, height: 0 });
const natural = getNaturalSize(assetViewerManager.imgRef);
const scaled = scaleToFit(natural, { width: containerWidth, height: containerHeight });
const ocrBoxes = $derived(ocrManager.showOverlay ? getOcrBoundingBoxes(ocrManager.data, overlaySize) : []);
return {
contentWidth: scaled.width,
contentHeight: scaled.height,
offsetX: 0,
offsetY: 0,
};
});
const ocrBoxes = $derived(ocrManager.showOverlay ? getOcrBoundingBoxes(ocrManager.data, overlayMetrics) : []);
const onCopy = async () => {
if (!canCopyImageToClipboard() || !assetViewerManager.imgRef) {
@@ -94,6 +105,12 @@
const onPlaySlideshow = () => ($slideshowState = SlideshowState.PlaySlideshow);
$effect(() => {
if (isFaceEditMode.value && assetViewerManager.zoom > 1) {
onZoom();
}
});
// TODO move to action + command palette
const onCopyShortcut = (event: KeyboardEvent) => {
if (globalThis.getSelection()?.type === 'Range') {
@@ -134,26 +151,48 @@
$slideshowState !== SlideshowState.None && $slideshowLook === SlideshowLook.BlurredBackground && !!asset.thumbhash,
);
let adaptiveImage = $state<HTMLDivElement | undefined>();
const faceToNameMap = $derived.by(() => {
// eslint-disable-next-line svelte/prefer-svelte-reactivity
const map = new Map<Faces, string | undefined>();
const map = new Map<Faces, string>();
for (const person of asset.people ?? []) {
for (const face of person.faces ?? []) {
map.set(face, person.name);
}
}
for (const face of asset.unassignedFaces ?? []) {
map.set(face, undefined);
}
return map;
});
// Array needed for indexed access in the template (faces[index])
const faces = $derived(Array.from(faceToNameMap.keys()));
const boundingBoxes = $derived(getBoundingBox(faces, overlaySize));
const activeBoundingBoxes = $derived(boundingBoxes.filter((box) => $boundingBoxesArray.some((f) => f.id === box.id)));
const handleImageMouseMove = (event: MouseEvent) => {
$boundingBoxesArray = [];
if (!assetViewerManager.imgRef || !element || isFaceEditMode.value || ocrManager.showOverlay) {
return;
}
const natural = getNaturalSize(assetViewerManager.imgRef);
const scaled = scaleToFit(natural, container);
const { currentZoom, currentPositionX, currentPositionY } = assetViewerManager.zoomState;
const contentOffsetX = (container.width - scaled.width) / 2;
const contentOffsetY = (container.height - scaled.height) / 2;
const containerRect = element.getBoundingClientRect();
const mouseX = (event.clientX - containerRect.left - contentOffsetX * currentZoom - currentPositionX) / currentZoom;
const mouseY = (event.clientY - containerRect.top - contentOffsetY * currentZoom - currentPositionY) / currentZoom;
const faceBoxes = getBoundingBox(faces, overlayMetrics);
for (const [index, box] of faceBoxes.entries()) {
if (mouseX >= box.left && mouseX <= box.left + box.width && mouseY >= box.top && mouseY <= box.top + box.height) {
$boundingBoxesArray.push(faces[index]);
}
}
};
const handleImageMouseLeave = () => {
$boundingBoxesArray = [];
};
</script>
<AssetViewerEvents {onCopy} {onZoom} />
@@ -174,7 +213,9 @@
bind:clientHeight={containerHeight}
role="presentation"
ondblclick={onZoom}
use:zoomImageAction={{ zoomTarget: adaptiveImage }}
onmousemove={handleImageMouseMove}
onmouseleave={handleImageMouseLeave}
use:zoomImageAction={{ disabled: isFaceEditMode.value || ocrManager.showOverlay }}
{...useSwipe((event) => onSwipe?.(event))}
>
<AdaptiveImage
@@ -192,9 +233,6 @@
onReady?.();
}}
bind:imgRef={assetViewerManager.imgRef}
bind:imgNaturalSize={imageDimensions}
bind:imgScaledSize={scaledDimensions}
bind:ref={adaptiveImage}
>
{#snippet backdrop()}
{#if blurredSlideshow}
@@ -205,40 +243,20 @@
{/if}
{/snippet}
{#snippet overlays()}
{#if !isFaceEditMode.value}
{#each boundingBoxes as boundingbox, index (boundingbox.id)}
{@const face = faces[index]}
{@const name = faceToNameMap.get(face)}
{#if name !== undefined || isEditFacesPanelOpen.value}
<!-- svelte-ignore a11y_no_static_element_interactions -->
<div
class="absolute pointer-events-auto outline-none rounded-lg"
style="top: {boundingbox.top}px; left: {boundingbox.left}px; height: {boundingbox.height}px; width: {boundingbox.width}px;"
aria-label="{$t('person')}: {name ?? $t('unknown')}"
onpointerenter={() => ($boundingBoxesArray = [face])}
onpointerleave={() => ($boundingBoxesArray = [])}
></div>
{/if}
{/each}
{/if}
{#each activeBoundingBoxes as boundingbox (boundingbox.id)}
{@const face = faces.find((f) => f.id === boundingbox.id)}
{@const name = face ? faceToNameMap.get(face) : undefined}
{#each getBoundingBox($boundingBoxesArray, overlayMetrics) as boundingbox, index (boundingbox.id)}
<div
class="absolute border-solid border-white border-3 rounded-lg pointer-events-none"
class="absolute border-solid border-white border-3 rounded-lg"
style="top: {boundingbox.top}px; left: {boundingbox.left}px; height: {boundingbox.height}px; width: {boundingbox.width}px;"
>
{#if name}
<div
aria-hidden="true"
class="absolute bg-white/90 text-black px-2 py-1 rounded text-sm font-medium whitespace-nowrap shadow-lg"
style="top: {boundingbox.height + 4}px; right: 0;"
>
{name}
</div>
{/if}
</div>
></div>
{#if faceToNameMap.get($boundingBoxesArray[index])}
<div
class="absolute bg-white/90 text-black px-2 py-1 rounded text-sm font-medium whitespace-nowrap pointer-events-none shadow-lg"
style="top: {boundingbox.top + boundingbox.height + 4}px; left: {boundingbox.left +
boundingbox.width}px; transform: translateX(-100%);"
>
{faceToNameMap.get($boundingBoxesArray[index])}
</div>
{/if}
{/each}
{#each ocrBoxes as ocrBox (ocrBox.id)}
@@ -248,6 +266,6 @@
</AdaptiveImage>
{#if isFaceEditMode.value && assetViewerManager.imgRef}
<FaceEditor imageSize={imageDimensions} {containerWidth} {containerHeight} assetId={asset.id} />
<FaceEditor htmlElement={assetViewerManager.imgRef} {containerWidth} {containerHeight} assetId={asset.id} />
{/if}
</div>

View File

@@ -11,16 +11,14 @@
videoViewerVolume,
} from '$lib/stores/preferences.store';
import { getAssetMediaUrl, getAssetPlaybackUrl } from '$lib/utils';
import type { Size } from '$lib/utils/container-utils';
import { AssetMediaSize } from '@immich/sdk';
import { LoadingSpinner } from '@immich/ui';
import { onDestroy, onMount } from 'svelte';
import { useSwipe, type SwipeCustomEvent } from 'svelte-gestures';
import { fade } from 'svelte/transition';
type Props = {
interface Props {
assetId: string;
imageSize: Size;
loopVideo: boolean;
cacheKey: string | null;
playOriginalVideo: boolean;
@@ -29,11 +27,10 @@
onVideoEnded?: () => void;
onVideoStarted?: () => void;
onClose?: () => void;
};
}
let {
assetId,
imageSize,
loopVideo,
cacheKey,
playOriginalVideo,
@@ -176,7 +173,7 @@
{/if}
{#if isFaceEditMode.value}
<FaceEditor {imageSize} {containerWidth} {containerHeight} {assetId} />
<FaceEditor htmlElement={videoPlayer} {containerWidth} {containerHeight} {assetId} />
{/if}
{/if}
</div>

View File

@@ -4,7 +4,7 @@
import { ProjectionType } from '$lib/constants';
import type { AssetResponseDto } from '@immich/sdk';
type Props = {
interface Props {
asset: AssetResponseDto;
assetId?: string;
projectionType: string | null | undefined;
@@ -16,7 +16,7 @@
onNextAsset?: () => void;
onVideoEnded?: () => void;
onVideoStarted?: () => void;
};
}
let {
asset,
@@ -42,7 +42,6 @@
{loopVideo}
{cacheKey}
assetId={effectiveAssetId}
imageSize={{ width: asset.width ?? 1, height: asset.height ?? 1 }}
{playOriginalVideo}
{onPreviousAsset}
{onNextAsset}

View File

@@ -16,7 +16,6 @@
circle?: boolean;
hidden?: boolean;
border?: boolean;
highlighted?: boolean;
hiddenIconClass?: string;
class?: ClassValue;
brokenAssetClass?: ClassValue;
@@ -35,7 +34,6 @@
circle = false,
hidden = false,
border = false,
highlighted = false,
hiddenIconClass = 'text-white',
onComplete = undefined,
class: imageClass = '',
@@ -85,10 +83,6 @@
/>
{/if}
{#if highlighted}
<span class={['absolute inset-0 pointer-events-none border-2 border-white', sharedClasses]} {style}></span>
{/if}
{#if hidden}
<div class="absolute start-1/2 top-1/2 translate-x-[-50%] translate-y-[-50%] transform">
<!-- TODO fix `title` type -->

View File

@@ -27,12 +27,12 @@
import ImageThumbnail from '../assets/thumbnail/image-thumbnail.svelte';
import AssignFaceSidePanel from './assign-face-side-panel.svelte';
type Props = {
interface Props {
assetId: string;
assetType: AssetTypeEnum;
onClose: () => void;
onRefresh: () => void;
};
}
let { assetId, assetType, onClose, onRefresh }: Props = $props();
@@ -58,8 +58,6 @@
let automaticRefreshTimeout: ReturnType<typeof setTimeout>;
const thumbnailWidth = '90px';
const focusHighlightClass =
'group-focus-visible:outline-2 group-focus-visible:outline-offset-2 group-focus-visible:outline-immich-primary dark:group-focus-visible:outline-immich-dark-primary';
async function loadPeople() {
const timeout = setTimeout(() => (isShowLoadingPeople = true), timeBeforeShowLoadingSpinner);
@@ -228,16 +226,14 @@
{:else}
{#each peopleWithFaces as face, index (face.id)}
{@const personName = face.person ? face.person?.name : $t('face_unassigned')}
{@const isHighlighted = $boundingBoxesArray.some((f) => f.id === face.id)}
<div class="relative h-29 w-24">
<div
role="button"
tabindex={index}
data-testid="face-thumbnail"
class="group absolute inset-s-0 top-0 h-22.5 w-22.5 cursor-default outline-none"
class="absolute start-0 top-0 h-22.5 w-22.5 cursor-default"
onfocus={() => ($boundingBoxesArray = [peopleWithFaces[index]])}
onpointerover={() => ($boundingBoxesArray = [peopleWithFaces[index]])}
onpointerleave={() => ($boundingBoxesArray = [])}
onmouseover={() => ($boundingBoxesArray = [peopleWithFaces[index]])}
onmouseleave={() => ($boundingBoxesArray = [])}
>
<div class="relative">
{#if selectedPersonToCreate[face.id]}
@@ -249,8 +245,6 @@
title={$t('new_person')}
widthStyle={thumbnailWidth}
heightStyle={thumbnailWidth}
highlighted={isHighlighted}
class={focusHighlightClass}
/>
{:else if selectedPersonToReassign[face.id]}
<ImageThumbnail
@@ -265,8 +259,6 @@
widthStyle={thumbnailWidth}
heightStyle={thumbnailWidth}
hidden={selectedPersonToReassign[face.id].isHidden}
highlighted={isHighlighted}
class={focusHighlightClass}
/>
{:else if face.person}
<ImageThumbnail
@@ -278,8 +270,6 @@
widthStyle={thumbnailWidth}
heightStyle={thumbnailWidth}
hidden={face.person.isHidden}
highlighted={isHighlighted}
class={focusHighlightClass}
/>
{:else}
{#await zoomImageToBase64(face, assetId, assetType, assetViewerManager.imgRef)}
@@ -291,8 +281,6 @@
title={$t('face_unassigned')}
widthStyle="90px"
heightStyle="90px"
highlighted={isHighlighted}
class={focusHighlightClass}
/>
{:then data}
<ImageThumbnail
@@ -303,8 +291,6 @@
title={$t('face_unassigned')}
widthStyle="90px"
heightStyle="90px"
highlighted={isHighlighted}
class={focusHighlightClass}
/>
{/await}
{/if}

View File

@@ -28,7 +28,10 @@
let { onClose }: Props = $props();
onMount(async () => {
albums = await getAllAlbums({});
// TODO the server should *really* just return all albums (paginated ideally)
const ownedAlbums = await getAllAlbums({ shared: false });
ownedAlbums.push.apply(ownedAlbums, await getAllAlbums({ shared: true }));
albums = ownedAlbums;
recentAlbums = albums.sort((a, b) => (new Date(a.updatedAt) > new Date(b.updatedAt) ? -1 : 1)).slice(0, 3);
loading = false;
});

View File

@@ -8,17 +8,16 @@ import SharedLinkCreateModal from '$lib/modals/SharedLinkCreateModal.svelte';
import { isFaceEditMode } from '$lib/stores/face-edit.svelte';
import { user as authUser, preferences } from '$lib/stores/user.store';
import type { AssetControlContext } from '$lib/types';
import { getSharedLink, sleep } from '$lib/utils';
import { getAssetMediaUrl, getSharedLink, sleep } from '$lib/utils';
import { downloadUrl } from '$lib/utils/asset-utils';
import { handleError } from '$lib/utils/handle-error';
import { getFormatter } from '$lib/utils/i18n';
import { asQueryString } from '$lib/utils/shared-links';
import {
AssetJobName,
AssetMediaSize,
AssetTypeEnum,
AssetVisibility,
getAssetInfo,
getBaseUrl,
runAssetJobs,
updateAsset,
type AssetJobsDto,
@@ -308,6 +307,7 @@ export const handleDownloadAsset = async (asset: AssetResponseDto, { edited }: {
{
filename: asset.originalFileName,
id: asset.id,
cacheKey: asset.thumbhash,
},
];
@@ -321,13 +321,12 @@ export const handleDownloadAsset = async (asset: AssetResponseDto, { edited }: {
assets.push({
filename: motionAsset.originalFileName,
id: asset.livePhotoVideoId,
cacheKey: motionAsset.thumbhash,
});
}
}
const queryParams = asQueryString(authManager.params);
for (const [i, { filename, id }] of assets.entries()) {
for (const [i, { filename, id, cacheKey }] of assets.entries()) {
if (i !== 0) {
// play nice with Safari
await sleep(500);
@@ -335,12 +334,7 @@ export const handleDownloadAsset = async (asset: AssetResponseDto, { edited }: {
try {
toastManager.primary($t('downloading_asset_filename', { values: { filename } }));
downloadUrl(
getBaseUrl() +
`/assets/${id}/original` +
(queryParams ? `?${queryParams}&edited=${edited}` : `?edited=${edited}`),
filename,
);
downloadUrl(getAssetMediaUrl({ id, size: AssetMediaSize.Original, edited, cacheKey }), filename);
} catch (error) {
handleError(error, $t('errors.error_downloading', { values: { filename } }));
}

View File

@@ -1,2 +1 @@
export const isFaceEditMode = $state({ value: false });
export const isEditFacesPanelOpen = $state({ value: false });

View File

@@ -80,7 +80,34 @@ function createUploadStore() {
};
const removeItem = (id: string) => {
uploadAssets.update((uploadingAsset) => uploadingAsset.filter((a) => a.id != id));
uploadAssets.update((uploadingAsset) => {
const assetToRemove = uploadingAsset.find((a) => a.id === id);
if (assetToRemove) {
stats.update((stats) => {
switch (assetToRemove.state) {
case UploadState.DONE: {
stats.success--;
break;
}
case UploadState.DUPLICATED: {
stats.duplicates--;
break;
}
case UploadState.ERROR: {
stats.errors--;
break;
}
}
stats.total--;
return stats;
});
}
return uploadingAsset.filter((a) => a.id != id);
});
};
const dismissErrors = () =>

View File

@@ -1,14 +1,4 @@
import {
computeContentMetrics,
getContentMetrics,
getNaturalSize,
mapContentRectToNatural,
mapContentToNatural,
mapNormalizedRectToContent,
mapNormalizedToContent,
scaleToCover,
scaleToFit,
} from '$lib/utils/container-utils';
import { getContentMetrics, getNaturalSize, scaleToFit } from '$lib/utils/container-utils';
const mockImage = (props: {
naturalWidth: number;
@@ -102,178 +92,3 @@ describe('getNaturalSize', () => {
expect(getNaturalSize(video)).toEqual({ width: 1920, height: 1080 });
});
});
describe('scaleToCover', () => {
it('should scale up to cover container when image is smaller', () => {
expect(scaleToCover({ width: 400, height: 300 }, { width: 800, height: 600 })).toEqual({
width: 800,
height: 600,
});
});
it('should use height scale when image is wider than container', () => {
expect(scaleToCover({ width: 2000, height: 1000 }, { width: 800, height: 600 })).toEqual({
width: 1200,
height: 600,
});
});
it('should use width scale when image is taller than container', () => {
expect(scaleToCover({ width: 1000, height: 2000 }, { width: 800, height: 600 })).toEqual({
width: 800,
height: 1600,
});
});
});
describe('computeContentMetrics', () => {
it('should compute metrics with scaleToFit by default', () => {
expect(computeContentMetrics({ width: 2000, height: 1000 }, { width: 800, height: 600 })).toEqual({
contentWidth: 800,
contentHeight: 400,
offsetX: 0,
offsetY: 100,
});
});
it('should accept scaleToCover as scale function', () => {
expect(computeContentMetrics({ width: 2000, height: 1000 }, { width: 800, height: 600 }, scaleToCover)).toEqual({
contentWidth: 1200,
contentHeight: 600,
offsetX: -200,
offsetY: 0,
});
});
it('should compute zero offsets when aspect ratios match', () => {
expect(computeContentMetrics({ width: 1600, height: 900 }, { width: 800, height: 450 })).toEqual({
contentWidth: 800,
contentHeight: 450,
offsetX: 0,
offsetY: 0,
});
});
});
// Coordinate space glossary:
//
// "Normalized" coordinates: values in the 01 range, where (0,0) is the top-left
// of the image and (1,1) is the bottom-right. Resolution-independent.
//
// "Content" coordinates: pixel positions within the container, after the image
// has been scaled (scaleToFit/scaleToCover) and offset (centered). This is what
// CSS and DOM layout use for positioning overlays like face boxes and OCR text.
//
// "Natural" coordinates: pixel positions in the original image file at its full
// resolution (e.g. 4000×3000). Used when cropping or drawing on the source image.
//
// "Metadata pixel space": the coordinate system used by face detection / OCR
// models, where positions are in pixels relative to the image dimensions stored
// in metadata (face.imageWidth/imageHeight). These may differ from the natural
// dimensions if the image was resized. To convert to normalized, divide by
// the metadata dimensions (e.g. face.boundingBoxX1 / face.imageWidth).
describe('mapNormalizedToContent', () => {
const metrics = { contentWidth: 800, contentHeight: 400, offsetX: 0, offsetY: 100 };
it('should map top-left corner', () => {
expect(mapNormalizedToContent({ x: 0, y: 0 }, metrics)).toEqual({ x: 0, y: 100 });
});
it('should map bottom-right corner', () => {
expect(mapNormalizedToContent({ x: 1, y: 1 }, metrics)).toEqual({ x: 800, y: 500 });
});
it('should map center point', () => {
expect(mapNormalizedToContent({ x: 0.5, y: 0.5 }, metrics)).toEqual({ x: 400, y: 300 });
});
it('should apply offsets correctly for letterboxed content', () => {
const letterboxed = { contentWidth: 300, contentHeight: 600, offsetX: 250, offsetY: 0 };
expect(mapNormalizedToContent({ x: 0, y: 0 }, letterboxed)).toEqual({ x: 250, y: 0 });
expect(mapNormalizedToContent({ x: 1, y: 1 }, letterboxed)).toEqual({ x: 550, y: 600 });
});
it('should accept Size (zero offsets)', () => {
const size = { width: 800, height: 400 };
expect(mapNormalizedToContent({ x: 0, y: 0 }, size)).toEqual({ x: 0, y: 0 });
expect(mapNormalizedToContent({ x: 1, y: 1 }, size)).toEqual({ x: 800, y: 400 });
expect(mapNormalizedToContent({ x: 0.5, y: 0.5 }, size)).toEqual({ x: 400, y: 200 });
});
});
describe('mapContentToNatural', () => {
const metrics = { contentWidth: 800, contentHeight: 400, offsetX: 0, offsetY: 100 };
const natural = { width: 4000, height: 2000 };
it('should map content origin to natural origin', () => {
expect(mapContentToNatural({ x: 0, y: 100 }, metrics, natural)).toEqual({ x: 0, y: 0 });
});
it('should map content bottom-right to natural bottom-right', () => {
expect(mapContentToNatural({ x: 800, y: 500 }, metrics, natural)).toEqual({ x: 4000, y: 2000 });
});
it('should map content center to natural center', () => {
expect(mapContentToNatural({ x: 400, y: 300 }, metrics, natural)).toEqual({ x: 2000, y: 1000 });
});
it('should be the inverse of mapNormalizedToContent', () => {
const normalized = { x: 0.3, y: 0.7 };
const contentPoint = mapNormalizedToContent(normalized, metrics);
const naturalPoint = mapContentToNatural(contentPoint, metrics, natural);
expect(naturalPoint.x).toBeCloseTo(normalized.x * natural.width);
expect(naturalPoint.y).toBeCloseTo(normalized.y * natural.height);
});
});
describe('mapNormalizedRectToContent', () => {
const metrics = { contentWidth: 800, contentHeight: 400, offsetX: 0, offsetY: 100 };
it('should map a normalized rect to content pixel coordinates', () => {
const rect = mapNormalizedRectToContent({ x: 0.25, y: 0.25 }, { x: 0.75, y: 0.75 }, metrics);
expect(rect).toEqual({ left: 200, top: 200, width: 400, height: 200 });
});
it('should map full image rect', () => {
const rect = mapNormalizedRectToContent({ x: 0, y: 0 }, { x: 1, y: 1 }, metrics);
expect(rect).toEqual({ left: 0, top: 100, width: 800, height: 400 });
});
it('should handle letterboxed content with horizontal offsets', () => {
const letterboxed = { contentWidth: 300, contentHeight: 600, offsetX: 250, offsetY: 0 };
const rect = mapNormalizedRectToContent({ x: 0, y: 0 }, { x: 1, y: 1 }, letterboxed);
expect(rect).toEqual({ left: 250, top: 0, width: 300, height: 600 });
});
it('should accept Size (zero offsets)', () => {
const size = { width: 800, height: 400 };
const rect = mapNormalizedRectToContent({ x: 0.25, y: 0.25 }, { x: 0.75, y: 0.75 }, size);
expect(rect).toEqual({ left: 200, top: 100, width: 400, height: 200 });
});
});
describe('mapContentRectToNatural', () => {
const metrics = { contentWidth: 800, contentHeight: 400, offsetX: 0, offsetY: 100 };
const natural = { width: 4000, height: 2000 };
it('should map a content rect to natural image coordinates', () => {
const rect = mapContentRectToNatural({ left: 200, top: 200, width: 400, height: 200 }, metrics, natural);
expect(rect).toEqual({ left: 1000, top: 500, width: 2000, height: 1000 });
});
it('should map full content rect to full natural dimensions', () => {
const rect = mapContentRectToNatural({ left: 0, top: 100, width: 800, height: 400 }, metrics, natural);
expect(rect).toEqual({ left: 0, top: 0, width: 4000, height: 2000 });
});
it('should be the inverse of mapNormalizedRectToContent', () => {
const normalized = { topLeft: { x: 0.2, y: 0.3 }, bottomRight: { x: 0.8, y: 0.9 } };
const contentRect = mapNormalizedRectToContent(normalized.topLeft, normalized.bottomRight, metrics);
const naturalRect = mapContentRectToNatural(contentRect, metrics, natural);
expect(naturalRect.left).toBeCloseTo(normalized.topLeft.x * natural.width);
expect(naturalRect.top).toBeCloseTo(normalized.topLeft.y * natural.height);
expect(naturalRect.width).toBeCloseTo((normalized.bottomRight.x - normalized.topLeft.x) * natural.width);
expect(naturalRect.height).toBeCloseTo((normalized.bottomRight.y - normalized.topLeft.y) * natural.height);
});
});

View File

@@ -1,35 +1,14 @@
// Coordinate spaces used throughout the viewer:
//
// "Normalized": 01 range, (0,0) = top-left, (1,1) = bottom-right. Resolution-independent.
// Example: OCR coordinates, or face coords after dividing by metadata dimensions.
//
// "Content": pixel position within the container after scaling (scaleToFit/scaleToCover)
// and centering. Used for DOM overlay positioning (face boxes, OCR text).
//
// "Natural": pixel position in the original full-resolution image file (e.g. 4000×3000).
// Used when cropping or drawing on the source image.
//
// "Metadata pixel space": coordinates from face detection / OCR models, in pixels relative
// to face.imageWidth/imageHeight. Divide by those dimensions to get normalized coords.
export type Point = {
x: number;
y: number;
};
export type Size = {
width: number;
height: number;
};
export type ContentMetrics = {
export interface ContentMetrics {
contentWidth: number;
contentHeight: number;
offsetX: number;
offsetY: number;
};
}
export const scaleToCover = (dimensions: Size, container: Size): Size => {
export const scaleToCover = (
dimensions: { width: number; height: number },
container: { width: number; height: number },
): { width: number; height: number } => {
const scaleX = container.width / dimensions.width;
const scaleY = container.height / dimensions.height;
const scale = Math.max(scaleX, scaleY);
@@ -39,7 +18,10 @@ export const scaleToCover = (dimensions: Size, container: Size): Size => {
};
};
export const scaleToFit = (dimensions: Size, container: Size): Size => {
export const scaleToFit = (
dimensions: { width: number; height: number },
container: { width: number; height: number },
): { width: number; height: number } => {
const scaleX = container.width / dimensions.width;
const scaleY = container.height / dimensions.height;
const scale = Math.min(scaleX, scaleY);
@@ -49,93 +31,28 @@ export const scaleToFit = (dimensions: Size, container: Size): Size => {
};
};
const getElementSize = (element: HTMLImageElement | HTMLVideoElement): Size => {
const getElementSize = (element: HTMLImageElement | HTMLVideoElement): { width: number; height: number } => {
if (element instanceof HTMLVideoElement) {
return { width: element.clientWidth, height: element.clientHeight };
}
return { width: element.width, height: element.height };
};
export const getNaturalSize = (element: HTMLImageElement | HTMLVideoElement): Size => {
export const getNaturalSize = (element: HTMLImageElement | HTMLVideoElement): { width: number; height: number } => {
if (element instanceof HTMLVideoElement) {
return { width: element.videoWidth, height: element.videoHeight };
}
return { width: element.naturalWidth, height: element.naturalHeight };
};
export function computeContentMetrics(
imageSize: Size,
containerSize: Size,
scaleFn: (dimensions: Size, container: Size) => Size = scaleToFit,
) {
const { width: contentWidth, height: contentHeight } = scaleFn(imageSize, containerSize);
return {
contentWidth,
contentHeight,
offsetX: (containerSize.width - contentWidth) / 2,
offsetY: (containerSize.height - contentHeight) / 2,
};
}
export const getContentMetrics = (element: HTMLImageElement | HTMLVideoElement): ContentMetrics => {
const natural = getNaturalSize(element);
const client = getElementSize(element);
return computeContentMetrics(natural, client);
const { width: contentWidth, height: contentHeight } = scaleToFit(natural, client);
return {
contentWidth,
contentHeight,
offsetX: (client.width - contentWidth) / 2,
offsetY: (client.height - contentHeight) / 2,
};
};
export function mapNormalizedToContent(point: Point, sizeOrMetrics: Size | ContentMetrics): Point {
if ('contentWidth' in sizeOrMetrics) {
return {
x: point.x * sizeOrMetrics.contentWidth + sizeOrMetrics.offsetX,
y: point.y * sizeOrMetrics.contentHeight + sizeOrMetrics.offsetY,
};
}
return {
x: point.x * sizeOrMetrics.width,
y: point.y * sizeOrMetrics.height,
};
}
export function mapContentToNatural(point: Point, metrics: ContentMetrics, naturalSize: Size): Point {
return {
x: ((point.x - metrics.offsetX) / metrics.contentWidth) * naturalSize.width,
y: ((point.y - metrics.offsetY) / metrics.contentHeight) * naturalSize.height,
};
}
export type Rect = {
top: number;
left: number;
width: number;
height: number;
};
export function mapNormalizedRectToContent(
topLeft: Point,
bottomRight: Point,
sizeOrMetrics: Size | ContentMetrics,
): Rect {
const tl = mapNormalizedToContent(topLeft, sizeOrMetrics);
const br = mapNormalizedToContent(bottomRight, sizeOrMetrics);
return {
top: tl.y,
left: tl.x,
width: br.x - tl.x,
height: br.y - tl.y,
};
}
export function mapContentRectToNatural(rect: Rect, metrics: ContentMetrics, naturalSize: Size): Rect {
const topLeft = mapContentToNatural({ x: rect.left, y: rect.top }, metrics, naturalSize);
const bottomRight = mapContentToNatural(
{ x: rect.left + rect.width, y: rect.top + rect.height },
metrics,
naturalSize,
);
return {
top: topLeft.y,
left: topLeft.x,
width: bottomRight.x - topLeft.x,
height: bottomRight.y - topLeft.y,
};
}

View File

@@ -1,5 +1,5 @@
import type { OcrBoundingBox } from '$lib/stores/ocr.svelte';
import type { Size } from '$lib/utils/container-utils';
import type { ContentMetrics } from '$lib/utils/container-utils';
import { getOcrBoundingBoxes } from '$lib/utils/ocr-utils';
describe('getOcrBoundingBoxes', () => {
@@ -21,9 +21,9 @@ describe('getOcrBoundingBoxes', () => {
text: 'hello',
},
];
const imageSize: Size = { width: 1000, height: 500 };
const metrics: ContentMetrics = { contentWidth: 1000, contentHeight: 500, offsetX: 0, offsetY: 0 };
const boxes = getOcrBoundingBoxes(ocrData, imageSize);
const boxes = getOcrBoundingBoxes(ocrData, metrics);
expect(boxes).toHaveLength(1);
expect(boxes[0].id).toBe('box1');
@@ -37,7 +37,7 @@ describe('getOcrBoundingBoxes', () => {
]);
});
it('should map full-image box to full display area', () => {
it('should apply offsets for letterboxed images', () => {
const ocrData: OcrBoundingBox[] = [
{
id: 'box1',
@@ -55,20 +55,21 @@ describe('getOcrBoundingBoxes', () => {
text: 'test',
},
];
const imageSize: Size = { width: 600, height: 400 };
const metrics: ContentMetrics = { contentWidth: 600, contentHeight: 400, offsetX: 100, offsetY: 50 };
const boxes = getOcrBoundingBoxes(ocrData, imageSize);
const boxes = getOcrBoundingBoxes(ocrData, metrics);
expect(boxes[0].points).toEqual([
{ x: 0, y: 0 },
{ x: 600, y: 0 },
{ x: 600, y: 400 },
{ x: 0, y: 400 },
{ x: 100, y: 50 },
{ x: 700, y: 50 },
{ x: 700, y: 450 },
{ x: 100, y: 450 },
]);
});
it('should return empty array for empty input', () => {
expect(getOcrBoundingBoxes([], { width: 800, height: 600 })).toEqual([]);
const metrics: ContentMetrics = { contentWidth: 800, contentHeight: 600, offsetX: 0, offsetY: 0 };
expect(getOcrBoundingBoxes([], metrics)).toEqual([]);
});
it('should handle multiple boxes', () => {
@@ -104,9 +105,9 @@ describe('getOcrBoundingBoxes', () => {
text: 'second',
},
];
const imageSize: Size = { width: 200, height: 200 };
const metrics: ContentMetrics = { contentWidth: 200, contentHeight: 200, offsetX: 0, offsetY: 0 };
const boxes = getOcrBoundingBoxes(ocrData, imageSize);
const boxes = getOcrBoundingBoxes(ocrData, metrics);
expect(boxes).toHaveLength(2);
expect(boxes[0].text).toBe('first');

View File

@@ -1,19 +1,23 @@
import type { OcrBoundingBox } from '$lib/stores/ocr.svelte';
import { mapNormalizedToContent, type Point, type Size } from '$lib/utils/container-utils';
import type { ContentMetrics } from '$lib/utils/container-utils';
import { clamp } from 'lodash-es';
export type { Point } from '$lib/utils/container-utils';
export type Point = {
x: number;
y: number;
};
const distance = (p1: Point, p2: Point) => Math.hypot(p2.x - p1.x, p2.y - p1.y);
export type VerticalMode = 'none' | 'cjk' | 'rotated';
export type OcrBox = {
export interface OcrBox {
id: string;
points: Point[];
text: string;
confidence: number;
verticalMode: VerticalMode;
};
}
const CJK_PATTERN =
/[\u3000-\u303F\u3040-\u309F\u30A0-\u30FF\u3400-\u4DBF\u4E00-\u9FFF\uF900-\uFAFF\uAC00-\uD7AF\uFF00-\uFFEF]/;
@@ -34,7 +38,7 @@ const getVerticalMode = (width: number, height: number, text: string): VerticalM
* @param points - Array of 4 corner points of the bounding box
* @returns 4x4 matrix to transform the div with text onto the polygon defined by the corner points, and size to set on the source div.
*/
export const calculateBoundingBoxMatrix = (points: Point[]): Size & { matrix: number[] } => {
export const calculateBoundingBoxMatrix = (points: Point[]): { matrix: number[]; width: number; height: number } => {
const [topLeft, topRight, bottomRight, bottomLeft] = points;
const width = Math.max(distance(topLeft, topRight), distance(bottomLeft, bottomRight));
@@ -159,7 +163,7 @@ export const calculateFittedFontSize = (
return clamp(Math.min(scaleFromWidth, scaleFromHeight), MIN_FONT_SIZE, MAX_FONT_SIZE);
};
export const getOcrBoundingBoxes = (ocrData: OcrBoundingBox[], imageSize: Size): OcrBox[] => {
export const getOcrBoundingBoxes = (ocrData: OcrBoundingBox[], metrics: ContentMetrics): OcrBox[] => {
const boxes: OcrBox[] = [];
for (const ocr of ocrData) {
const points = [
@@ -167,7 +171,10 @@ export const getOcrBoundingBoxes = (ocrData: OcrBoundingBox[], imageSize: Size):
{ x: ocr.x2, y: ocr.y2 },
{ x: ocr.x3, y: ocr.y3 },
{ x: ocr.x4, y: ocr.y4 },
].map((point) => mapNormalizedToContent(point, imageSize));
].map((point) => ({
x: point.x * metrics.contentWidth + metrics.offsetX,
y: point.y * metrics.contentHeight + metrics.offsetY,
}));
const boxWidth = Math.max(distance(points[0], points[1]), distance(points[3], points[2]));
const boxHeight = Math.max(distance(points[0], points[3]), distance(points[1], points[2]));
@@ -181,7 +188,7 @@ export const getOcrBoundingBoxes = (ocrData: OcrBoundingBox[], imageSize: Size):
});
}
const rowThreshold = imageSize.height * 0.02;
const rowThreshold = metrics.contentHeight * 0.02;
boxes.sort((a, b) => {
const yDifference = a.points[0].y - b.points[0].y;
if (Math.abs(yDifference) < rowThreshold) {

View File

@@ -1,6 +1,6 @@
import type { Faces } from '$lib/stores/people.store';
import type { Size } from '$lib/utils/container-utils';
import { getBoundingBox, scaleFaceRectOnResize, type FaceRectState, type ResizeContext } from '$lib/utils/people-utils';
import type { ContentMetrics } from '$lib/utils/container-utils';
import { getBoundingBox } from '$lib/utils/people-utils';
const makeFace = (overrides: Partial<Faces> = {}): Faces => ({
id: 'face-1',
@@ -16,21 +16,21 @@ const makeFace = (overrides: Partial<Faces> = {}): Faces => ({
describe('getBoundingBox', () => {
it('should scale face coordinates to display dimensions', () => {
const face = makeFace();
const imageSize: Size = { width: 800, height: 600 };
const metrics: ContentMetrics = { contentWidth: 800, contentHeight: 600, offsetX: 0, offsetY: 0 };
const boxes = getBoundingBox([face], imageSize);
const boxes = getBoundingBox([face], metrics);
expect(boxes).toHaveLength(1);
expect(boxes[0]).toEqual({
id: 'face-1',
top: 600 * (750 / 3000),
left: 800 * (1000 / 4000),
width: 800 * (2000 / 4000) - 800 * (1000 / 4000),
height: 600 * (1500 / 3000) - 600 * (750 / 3000),
top: Math.round(600 * (750 / 3000)),
left: Math.round(800 * (1000 / 4000)),
width: Math.round(800 * (2000 / 4000) - 800 * (1000 / 4000)),
height: Math.round(600 * (1500 / 3000) - 600 * (750 / 3000)),
});
});
it('should map full-image face to full display area', () => {
it('should apply offsets for letterboxed display', () => {
const face = makeFace({
imageWidth: 1000,
imageHeight: 1000,
@@ -39,21 +39,49 @@ describe('getBoundingBox', () => {
boundingBoxX2: 1000,
boundingBoxY2: 1000,
});
const imageSize: Size = { width: 600, height: 600 };
const metrics: ContentMetrics = { contentWidth: 600, contentHeight: 600, offsetX: 100, offsetY: 0 };
const boxes = getBoundingBox([face], imageSize);
const boxes = getBoundingBox([face], metrics);
expect(boxes[0]).toEqual({
id: 'face-1',
top: 0,
left: 0,
left: 100,
width: 600,
height: 600,
});
});
it('should handle zoom by pre-scaled metrics', () => {
const face = makeFace({
imageWidth: 1000,
imageHeight: 1000,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 500,
boundingBoxY2: 500,
});
const metrics: ContentMetrics = {
contentWidth: 1600,
contentHeight: 1200,
offsetX: -200,
offsetY: -100,
};
const boxes = getBoundingBox([face], metrics);
expect(boxes[0]).toEqual({
id: 'face-1',
top: -100,
left: -200,
width: 800,
height: 600,
});
});
it('should return empty array for empty faces', () => {
expect(getBoundingBox([], { width: 800, height: 600 })).toEqual([]);
const metrics: ContentMetrics = { contentWidth: 800, contentHeight: 600, offsetX: 0, offsetY: 0 };
expect(getBoundingBox([], metrics)).toEqual([]);
});
it('should handle multiple faces', () => {
@@ -61,103 +89,11 @@ describe('getBoundingBox', () => {
makeFace({ id: 'face-1', boundingBoxX1: 0, boundingBoxY1: 0, boundingBoxX2: 1000, boundingBoxY2: 1000 }),
makeFace({ id: 'face-2', boundingBoxX1: 2000, boundingBoxY1: 1500, boundingBoxX2: 3000, boundingBoxY2: 2500 }),
];
const metrics: ContentMetrics = { contentWidth: 800, contentHeight: 600, offsetX: 0, offsetY: 0 };
const boxes = getBoundingBox(faces, { width: 800, height: 600 });
const boxes = getBoundingBox(faces, metrics);
expect(boxes).toHaveLength(2);
expect(boxes[0].left).toBeLessThan(boxes[1].left);
});
});
describe('scaleFaceRectOnResize', () => {
const makeRect = (overrides: Partial<FaceRectState> = {}): FaceRectState => ({
left: 300,
top: 400,
scaleX: 1,
scaleY: 1,
...overrides,
});
const makePrevious = (overrides: Partial<ResizeContext> = {}): ResizeContext => ({
offsetX: 100,
offsetY: 50,
contentWidth: 800,
...overrides,
});
it('should preserve relative position when container doubles in size', () => {
const rect = makeRect({ left: 300, top: 250 });
const previous = makePrevious({ offsetX: 100, offsetY: 50, contentWidth: 800 });
const result = scaleFaceRectOnResize(rect, previous, { offsetX: 200, offsetY: 100, contentWidth: 1600 });
// imageRelLeft = (300 - 100) * 2 = 400, new left = 200 + 400 = 600
// imageRelTop = (250 - 50) * 2 = 400, new top = 100 + 400 = 500
expect(result.left).toBe(600);
expect(result.top).toBe(500);
expect(result.scaleX).toBe(2);
expect(result.scaleY).toBe(2);
});
it('should preserve relative position when container halves in size', () => {
const rect = makeRect({ left: 300, top: 250 });
const previous = makePrevious({ offsetX: 100, offsetY: 50, contentWidth: 800 });
const result = scaleFaceRectOnResize(rect, previous, { offsetX: 50, offsetY: 25, contentWidth: 400 });
// imageRelLeft = (300 - 100) * 0.5 = 100, new left = 50 + 100 = 150
// imageRelTop = (250 - 50) * 0.5 = 100, new top = 25 + 100 = 125
expect(result.left).toBe(150);
expect(result.top).toBe(125);
expect(result.scaleX).toBe(0.5);
expect(result.scaleY).toBe(0.5);
});
it('should handle no change in dimensions', () => {
const rect = makeRect({ left: 300, top: 250, scaleX: 1.5, scaleY: 1.5 });
const previous = makePrevious({ offsetX: 100, offsetY: 50, contentWidth: 800 });
const result = scaleFaceRectOnResize(rect, previous, { offsetX: 100, offsetY: 50, contentWidth: 800 });
expect(result.left).toBe(300);
expect(result.top).toBe(250);
expect(result.scaleX).toBe(1.5);
expect(result.scaleY).toBe(1.5);
});
it('should handle offset changes without content width change', () => {
const rect = makeRect({ left: 300, top: 250 });
const previous = makePrevious({ offsetX: 100, offsetY: 50, contentWidth: 800 });
const result = scaleFaceRectOnResize(rect, previous, { offsetX: 150, offsetY: 75, contentWidth: 800 });
// scale = 1, imageRelLeft = 200, imageRelTop = 200
// new left = 150 + 200 = 350, new top = 75 + 200 = 275
expect(result.left).toBe(350);
expect(result.top).toBe(275);
expect(result.scaleX).toBe(1);
expect(result.scaleY).toBe(1);
});
it('should compound existing scale factors', () => {
const rect = makeRect({ left: 300, top: 250, scaleX: 2, scaleY: 3 });
const previous = makePrevious({ contentWidth: 800 });
const result = scaleFaceRectOnResize(rect, previous, { ...previous, contentWidth: 1600 });
expect(result.scaleX).toBe(4);
expect(result.scaleY).toBe(6);
});
it('should handle rect at image origin (top-left of content area)', () => {
const rect = makeRect({ left: 100, top: 50 });
const previous = makePrevious({ offsetX: 100, offsetY: 50, contentWidth: 800 });
const result = scaleFaceRectOnResize(rect, previous, { offsetX: 200, offsetY: 100, contentWidth: 1600 });
// imageRelLeft = (100 - 100) * 2 = 0, new left = 200
// imageRelTop = (50 - 50) * 2 = 0, new top = 100
expect(result.left).toBe(200);
expect(result.top).toBe(100);
});
});

View File

@@ -1,52 +1,42 @@
import type { Faces } from '$lib/stores/people.store';
import { getAssetMediaUrl } from '$lib/utils';
import { mapNormalizedRectToContent, type ContentMetrics, type Rect, type Size } from '$lib/utils/container-utils';
import type { ContentMetrics } from '$lib/utils/container-utils';
import { AssetTypeEnum, type AssetFaceResponseDto } from '@immich/sdk';
export type BoundingBox = Rect & { id: string };
export interface BoundingBox {
id: string;
top: number;
left: number;
width: number;
height: number;
}
export const getBoundingBox = (faces: Faces[], imageSize: Size): BoundingBox[] => {
export const getBoundingBox = (faces: Faces[], metrics: ContentMetrics): BoundingBox[] => {
const boxes: BoundingBox[] = [];
for (const face of faces) {
const rect = mapNormalizedRectToContent(
{ x: face.boundingBoxX1 / face.imageWidth, y: face.boundingBoxY1 / face.imageHeight },
{ x: face.boundingBoxX2 / face.imageWidth, y: face.boundingBoxY2 / face.imageHeight },
imageSize,
);
const scaleX = metrics.contentWidth / face.imageWidth;
const scaleY = metrics.contentHeight / face.imageHeight;
boxes.push({ id: face.id, ...rect });
const coordinates = {
x1: scaleX * face.boundingBoxX1 + metrics.offsetX,
x2: scaleX * face.boundingBoxX2 + metrics.offsetX,
y1: scaleY * face.boundingBoxY1 + metrics.offsetY,
y2: scaleY * face.boundingBoxY2 + metrics.offsetY,
};
boxes.push({
id: face.id,
top: Math.round(coordinates.y1),
left: Math.round(coordinates.x1),
width: Math.round(coordinates.x2 - coordinates.x1),
height: Math.round(coordinates.y2 - coordinates.y1),
});
}
return boxes;
};
export type FaceRectState = {
left: number;
top: number;
scaleX: number;
scaleY: number;
};
export type ResizeContext = Pick<ContentMetrics, 'contentWidth' | 'offsetX' | 'offsetY'>;
export const scaleFaceRectOnResize = (
faceRect: FaceRectState,
previous: ResizeContext,
current: ResizeContext,
): FaceRectState => {
const scale = current.contentWidth / previous.contentWidth;
const imageRelativeLeft = (faceRect.left - previous.offsetX) * scale;
const imageRelativeTop = (faceRect.top - previous.offsetY) * scale;
return {
left: current.offsetX + imageRelativeLeft,
top: current.offsetY + imageRelativeTop,
scaleX: faceRect.scaleX * scale,
scaleY: faceRect.scaleY * scale,
};
};
export const zoomImageToBase64 = async (
face: AssetFaceResponseDto,
assetId: string,

View File

@@ -476,13 +476,6 @@
<ChangeDate menuItem />
<ChangeDescription menuItem />
<ChangeLocation menuItem />
{#if assetInteraction.selectedAssets.length === 1}
<MenuOption
text={$t('set_as_album_cover')}
icon={mdiImageOutline}
onClick={() => updateThumbnailUsingCurrentSelection()}
/>
{/if}
<ArchiveAction
menuItem
unarchive={assetInteraction.isAllArchived}
@@ -490,6 +483,13 @@
/>
<SetVisibilityAction menuItem onVisibilitySet={handleSetVisibility} />
{/if}
{#if assetInteraction.selectedAssets.length === 1}
<MenuOption
text={$t('set_as_album_cover')}
icon={mdiImageOutline}
onClick={() => updateThumbnailUsingCurrentSelection()}
/>
{/if}
{#if $preferences.tags.enabled && assetInteraction.isAllUserOwned}
<TagAction menuItem />

View File

@@ -178,19 +178,7 @@
const handleFirst = () => navigateToIndex(0);
const handlePrevious = () => navigateToIndex(Math.max(duplicatesIndex - 1, 0));
const handlePreviousShortcut = async () => {
if ($showAssetViewer) {
return;
}
await handlePrevious();
};
const handleNext = async () => navigateToIndex(Math.min(duplicatesIndex + 1, duplicates.length - 1));
const handleNextShortcut = async () => {
if ($showAssetViewer) {
return;
}
await handleNext();
};
const handleLast = () => navigateToIndex(duplicates.length - 1);
const navigateToIndex = async (index: number) =>
@@ -198,10 +186,12 @@
</script>
<svelte:document
use:shortcuts={[
{ shortcut: { key: 'ArrowLeft' }, onShortcut: handlePreviousShortcut },
{ shortcut: { key: 'ArrowRight' }, onShortcut: handleNextShortcut },
]}
use:shortcuts={$showAssetViewer
? []
: [
{ shortcut: { key: 'ArrowLeft' }, onShortcut: handlePrevious },
{ shortcut: { key: 'ArrowRight' }, onShortcut: handleNext },
]}
/>
<UserPageLayout title={data.meta.title + ` (${duplicates.length.toLocaleString($locale)})`} scrollbar={true}>