Compare commits

..

1 Commits

Author SHA1 Message Date
Yaros
8e9bec75ac fix(mobile): map unresponsive after viewing asset 2026-03-19 12:41:56 +01:00
72 changed files with 941 additions and 1601 deletions

View File

@@ -1,143 +0,0 @@
name: Auto-close PRs
on:
pull_request_target: # zizmor: ignore[dangerous-triggers]
types: [opened, edited, labeled]
permissions: {}
jobs:
parse_template:
runs-on: ubuntu-latest
if: ${{ github.event.action != 'labeled' && github.event.pull_request.head.repo.fork == true }}
permissions:
contents: read
outputs:
uses_template: ${{ steps.check.outputs.uses_template }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
sparse-checkout: .github/pull_request_template.md
sparse-checkout-cone-mode: false
persist-credentials: false
- name: Check required sections
id: check
env:
BODY: ${{ github.event.pull_request.body }}
run: |
OK=true
while IFS= read -r header; do
printf '%s\n' "$BODY" | grep -qF "$header" || OK=false
done < <(sed '/<!--/,/-->/d' .github/pull_request_template.md | grep "^## ")
echo "uses_template=$OK" >> "$GITHUB_OUTPUT"
close_template:
runs-on: ubuntu-latest
needs: parse_template
if: ${{ needs.parse_template.outputs.uses_template == 'false' && github.event.pull_request.state != 'closed' }}
permissions:
pull-requests: write
steps:
- name: Comment and close
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f body="This PR has been automatically closed as the description doesn't follow our template. After you edit it to match the template, the PR will automatically be reopened." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'
- name: Add label
env:
GH_TOKEN: ${{ github.token }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: gh pr edit "$PR_NUMBER" --add-label "auto-closed:template"
close_llm:
runs-on: ubuntu-latest
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'auto-closed:llm' }}
permissions:
pull-requests: write
steps:
- name: Comment and close
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f body="Thank you for your interest in contributing to Immich! Unfortunately this PR looks like it was generated using an LLM. As noted in our [CONTRIBUTING.md](https://github.com/immich-app/immich/blob/main/CONTRIBUTING.md#use-of-generative-ai), we request that you don't use LLMs to generate PRs as those are not a good use of maintainer time." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'
reopen:
runs-on: ubuntu-latest
needs: parse_template
if: >-
${{
needs.parse_template.outputs.uses_template == 'true'
&& github.event.pull_request.state == 'closed'
&& contains(github.event.pull_request.labels.*.name, 'auto-closed:template')
}}
permissions:
pull-requests: write
steps:
- name: Remove template label
env:
GH_TOKEN: ${{ github.token }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: gh pr edit "$PR_NUMBER" --remove-label "auto-closed:template" || true
- name: Check for remaining auto-closed labels
id: check_labels
env:
GH_TOKEN: ${{ github.token }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
REMAINING=$(gh pr view "$PR_NUMBER" --json labels \
--jq '[.labels[].name | select(startswith("auto-closed:"))] | length')
echo "remaining=$REMAINING" >> "$GITHUB_OUTPUT"
- name: Reopen PR
if: ${{ steps.check_labels.outputs.remaining == '0' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f query='
mutation ReopenPR($prId: ID!) {
reopenPullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'

80
.github/workflows/check-pr-template.yml vendored Normal file
View File

@@ -0,0 +1,80 @@
name: Check PR Template
on:
pull_request_target: # zizmor: ignore[dangerous-triggers]
types: [opened, edited]
permissions: {}
jobs:
parse:
runs-on: ubuntu-latest
if: ${{ github.event.pull_request.head.repo.fork == true }}
permissions:
contents: read
outputs:
uses_template: ${{ steps.check.outputs.uses_template }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
sparse-checkout: .github/pull_request_template.md
sparse-checkout-cone-mode: false
persist-credentials: false
- name: Check required sections
id: check
env:
BODY: ${{ github.event.pull_request.body }}
run: |
OK=true
while IFS= read -r header; do
printf '%s\n' "$BODY" | grep -qF "$header" || OK=false
done < <(sed '/<!--/,/-->/d' .github/pull_request_template.md | grep "^## ")
echo "uses_template=$OK" >> "$GITHUB_OUTPUT"
act:
runs-on: ubuntu-latest
needs: parse
permissions:
pull-requests: write
steps:
- name: Close PR
if: ${{ needs.parse.outputs.uses_template == 'false' && github.event.pull_request.state != 'closed' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f body="This PR has been automatically closed as the description doesn't follow our template. After you edit it to match the template, the PR will automatically be reopened." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'
- name: Reopen PR (sections now present, PR closed)
if: ${{ needs.parse.outputs.uses_template == 'true' && github.event.pull_request.state == 'closed' }}
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f query='
mutation ReopenPR($prId: ID!) {
reopenPullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'

38
.github/workflows/close-llm-pr.yml vendored Normal file
View File

@@ -0,0 +1,38 @@
name: Close LLM-generated PRs
on:
pull_request_target:
types: [labeled]
permissions: {}
jobs:
comment_and_close:
runs-on: ubuntu-latest
if: ${{ github.event.label.name == 'llm-generated' }}
permissions:
pull-requests: write
steps:
- name: Comment and close
env:
GH_TOKEN: ${{ github.token }}
NODE_ID: ${{ github.event.pull_request.node_id }}
run: |
gh api graphql \
-f prId="$NODE_ID" \
-f body="Thank you for your interest in contributing to Immich! Unfortunately this PR looks like it was generated using an LLM. As noted in our [CONTRIBUTING.md](https://github.com/immich-app/immich/blob/main/CONTRIBUTING.md#use-of-generative-ai), we request that you don't use LLMs to generate PRs as those are not a good use of maintainer time." \
-f query='
mutation CommentAndClosePR($prId: ID!, $body: String!) {
addComment(input: {
subjectId: $prId,
body: $body
}) {
__typename
}
closePullRequest(input: {
pullRequestId: $prId
}) {
__typename
}
}'

View File

@@ -151,7 +151,6 @@ jobs:
body_path: misc/release/notes.tmpl
files: |
docker/docker-compose.yml
docker/docker-compose.rootless.yml
docker/example.env
docker/hwaccel.ml.yml
docker/hwaccel.transcoding.yml

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.6.1",
"version": "2.6.0",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -35,7 +35,8 @@
"prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3",
"typescript-eslint": "^8.28.0",
"vite": "^8.0.0",
"vite": "^7.0.0",
"vite-tsconfig-paths": "^6.0.0",
"vitest": "^4.0.0",
"vitest-fetch-mock": "^0.4.0",
"yaml": "^2.3.1"

View File

@@ -1,12 +1,10 @@
import { defineConfig, UserConfig } from 'vite';
import tsconfigPaths from 'vite-tsconfig-paths';
export default defineConfig({
resolve: {
alias: { src: '/src' },
tsconfigPaths: true,
},
resolve: { alias: { src: '/src' } },
build: {
rolldownOptions: {
rollupOptions: {
input: 'src/index.ts',
output: {
dir: 'dist',
@@ -18,6 +16,7 @@ export default defineConfig({
// bundle everything except for Node built-ins
noExternal: /^(?!node:).*$/,
},
plugins: [tsconfigPaths()],
test: {
name: 'cli:unit',
globals: true,

View File

@@ -1,7 +1,7 @@
[
{
"label": "v2.6.1",
"url": "https://docs.v2.6.1.archive.immich.app"
"label": "v2.6.0",
"url": "https://docs.v2.6.0.archive.immich.app"
},
{
"label": "v2.5.6",

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "2.6.1",
"version": "2.6.0",
"description": "",
"main": "index.js",
"type": "module",

View File

@@ -1,51 +0,0 @@
import { AssetMediaResponseDto, LoginResponseDto, updateAssets } from '@immich/sdk';
import { expect, test } from '@playwright/test';
import crypto from 'node:crypto';
import { asBearerAuth, utils } from 'src/utils';
test.describe('Duplicates Utility', () => {
let admin: LoginResponseDto;
let firstAsset: AssetMediaResponseDto;
let secondAsset: AssetMediaResponseDto;
test.beforeAll(async () => {
utils.initSdk();
await utils.resetDatabase();
admin = await utils.adminSetup();
});
test.beforeEach(async ({ context }) => {
[firstAsset, secondAsset] = await Promise.all([
utils.createAsset(admin.accessToken, { deviceAssetId: 'duplicate-a' }),
utils.createAsset(admin.accessToken, { deviceAssetId: 'duplicate-b' }),
]);
await updateAssets(
{
assetBulkUpdateDto: {
ids: [firstAsset.id, secondAsset.id],
duplicateId: crypto.randomUUID(),
},
},
{ headers: asBearerAuth(admin.accessToken) },
);
await utils.setAuthCookies(context, admin.accessToken);
});
test('navigates with arrow keys between duplicate preview assets', async ({ page }) => {
await page.goto('/utilities/duplicates');
await page.getByRole('button', { name: 'View' }).first().click();
await page.waitForSelector('#immich-asset-viewer');
const getViewedAssetId = () => new URL(page.url()).pathname.split('/').at(-1) ?? '';
const initialAssetId = getViewedAssetId();
expect([firstAsset.id, secondAsset.id]).toContain(initialAssetId);
await page.keyboard.press('ArrowRight');
await expect.poll(getViewedAssetId).not.toBe(initialAssetId);
await page.keyboard.press('ArrowLeft');
await expect.poll(getViewedAssetId).toBe(initialAssetId);
});
});

View File

@@ -1,6 +1,6 @@
{
"name": "immich-i18n",
"version": "2.6.1",
"version": "2.6.0",
"private": true,
"scripts": {
"format": "prettier --cache --check .",

View File

@@ -1,6 +1,6 @@
[project]
name = "immich-ml"
version = "2.6.1"
version = "2.6.0"
description = ""
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
requires-python = ">=3.11,<4.0"

View File

@@ -898,7 +898,7 @@ wheels = [
[[package]]
name = "immich-ml"
version = "2.6.1"
version = "2.6.0"
source = { editable = "." }
dependencies = [
{ name = "aiocache" },

View File

@@ -23,18 +23,10 @@ import okhttp3.HttpUrl
import okhttp3.HttpUrl.Companion.toHttpUrlOrNull
import okhttp3.OkHttpClient
import org.chromium.net.CronetEngine
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.Json
import java.io.ByteArrayInputStream
import java.io.File
import java.io.IOException
import java.nio.file.FileVisitResult
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.SimpleFileVisitor
import java.nio.file.attribute.BasicFileAttributes
import java.net.Authenticator
import java.net.CookieHandler
import java.net.PasswordAuthentication
@@ -285,13 +277,10 @@ object HttpClientManager {
return result
}
suspend fun rebuildCronetEngine(): Result<Long> {
return runCatching {
cronetEngine?.shutdown()
val deletionResult = deleteFolderAndGetSize(cronetStoragePath.toPath())
cronetEngine = buildCronetEngine()
deletionResult
}
fun rebuildCronetEngine(): CronetEngine {
val old = cronetEngine!!
cronetEngine = buildCronetEngine()
return old
}
val cronetStoragePath: File get() = cronetStorageDir
@@ -312,7 +301,7 @@ object HttpClientManager {
}
}
fun buildCronetEngine(): CronetEngine {
private fun buildCronetEngine(): CronetEngine {
return CronetEngine.Builder(appContext)
.enableHttp2(true)
.enableQuic(true)
@@ -323,27 +312,6 @@ object HttpClientManager {
.build()
}
private suspend fun deleteFolderAndGetSize(root: Path): Long = withContext(Dispatchers.IO) {
var totalSize = 0L
Files.walkFileTree(root, object : SimpleFileVisitor<Path>() {
override fun visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult {
totalSize += attrs.size()
Files.delete(file)
return FileVisitResult.CONTINUE
}
override fun postVisitDirectory(dir: Path, exc: IOException?): FileVisitResult {
if (dir != root) {
Files.delete(dir)
}
return FileVisitResult.CONTINUE
}
})
totalSize
}
private fun build(cacheDir: File): OkHttpClient {
val connectionPool = ConnectionPool(
maxIdleConnections = KEEP_ALIVE_CONNECTIONS,

View File

@@ -21,6 +21,11 @@ import java.io.EOFException
import java.io.File
import java.io.IOException
import java.nio.ByteBuffer
import java.nio.file.FileVisitResult
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.SimpleFileVisitor
import java.nio.file.attribute.BasicFileAttributes
import java.util.concurrent.ConcurrentHashMap
private class RemoteRequest(val cancellationSignal: CancellationSignal)
@@ -200,15 +205,18 @@ private class CronetImageFetcher : ImageFetcher {
private fun onDrained() {
val onCacheCleared = synchronized(stateLock) {
val onCacheCleared = this.onCacheCleared
val onCacheCleared = onCacheCleared
this.onCacheCleared = null
onCacheCleared
} ?: return
CoroutineScope(Dispatchers.IO).launch {
val result = HttpClientManager.rebuildCronetEngine()
synchronized(stateLock) { draining = false }
onCacheCleared(result)
}
if (onCacheCleared != null) {
val oldEngine = HttpClientManager.rebuildCronetEngine()
oldEngine.shutdown()
CoroutineScope(Dispatchers.IO).launch {
val result = runCatching { deleteFolderAndGetSize(HttpClientManager.cronetStoragePath.toPath()) }
synchronized(stateLock) { draining = false }
onCacheCleared(result)
}
}
}
@@ -298,6 +306,26 @@ private class CronetImageFetcher : ImageFetcher {
}
}
suspend fun deleteFolderAndGetSize(root: Path): Long = withContext(Dispatchers.IO) {
var totalSize = 0L
Files.walkFileTree(root, object : SimpleFileVisitor<Path>() {
override fun visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult {
totalSize += attrs.size()
Files.delete(file)
return FileVisitResult.CONTINUE
}
override fun postVisitDirectory(dir: Path, exc: IOException?): FileVisitResult {
if (dir != root) {
Files.delete(dir)
}
return FileVisitResult.CONTINUE
}
})
totalSize
}
}
private class OkHttpImageFetcher private constructor(

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 3039,
"android.injected.version.name" => "2.6.1",
"android.injected.version.code" => 3038,
"android.injected.version.name" => "2.6.0",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -80,7 +80,7 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>2.6.1</string>
<string>2.6.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleURLTypes</key>

View File

@@ -7,7 +7,7 @@ const Map<String, Locale> locales = {
'Arabic (ar)': Locale('ar'),
'Bulgarian (bg)': Locale('bg'),
'Catalan (ca)': Locale('ca'),
'Chinese Simplified (zh_CN)': Locale.fromSubtags(languageCode: 'zh', scriptCode: 'Hans'),
'Chinese Simplified (zh_CN)': Locale.fromSubtags(languageCode: 'zh', scriptCode: 'SIMPLIFIED'),
'Chinese Traditional (zh_TW)': Locale.fromSubtags(languageCode: 'zh', scriptCode: 'Hant'),
'Croatian (hr)': Locale('hr'),
'Czech (cs)': Locale('cs'),

View File

@@ -79,7 +79,6 @@ class _DriftPeopleCollectionPageState extends ConsumerState<DriftPeopleCollectio
final person = people[index];
return Column(
key: ValueKey(person.id),
children: [
GestureDetector(
onTap: () {
@@ -89,7 +88,6 @@ class _DriftPeopleCollectionPageState extends ConsumerState<DriftPeopleCollectio
shape: const CircleBorder(side: BorderSide.none),
elevation: 3,
child: CircleAvatar(
key: ValueKey('avatar-${person.id}'),
maxRadius: isTablet ? 100 / 2 : 96 / 2,
backgroundImage: RemoteImageProvider(url: getFaceThumbnailUrl(person.id)),
),

View File

@@ -69,7 +69,6 @@ class DriftSearchPage extends HookConsumerWidget {
);
final previousFilter = useState<SearchFilter?>(null);
final hasRequestedSearch = useState<bool>(false);
final dateInputFilter = useState<DateFilterInputModel?>(null);
final peopleCurrentFilterWidget = useState<Widget?>(null);
@@ -92,11 +91,9 @@ class DriftSearchPage extends HookConsumerWidget {
if (filter.isEmpty) {
previousFilter.value = null;
hasRequestedSearch.value = false;
return;
}
hasRequestedSearch.value = true;
unawaited(ref.read(paginatedSearchProvider.notifier).search(filter));
previousFilter.value = filter;
}
@@ -110,8 +107,6 @@ class DriftSearchPage extends HookConsumerWidget {
searchPreFilter() {
if (preFilter != null) {
Future.delayed(Duration.zero, () {
filter.value = preFilter;
textSearchController.clear();
searchFilter(preFilter);
if (preFilter.location.city != null) {
@@ -724,7 +719,7 @@ class DriftSearchPage extends HookConsumerWidget {
),
),
),
if (!hasRequestedSearch.value)
if (filter.value.isEmpty)
const _SearchSuggestions()
else
_SearchResultGrid(onScrollEnd: loadMoreSearchResults),

View File

@@ -24,22 +24,20 @@ class SimilarPhotosActionButton extends ConsumerWidget {
}
ref.invalidate(assetViewerProvider);
ref.invalidate(paginatedSearchProvider);
ref.read(searchPreFilterProvider.notifier)
..clear()
..setFilter(
SearchFilter(
assetId: assetId,
people: {},
location: SearchLocationFilter(),
camera: SearchCameraFilter(),
date: SearchDateFilter(),
display: SearchDisplayFilters(isNotInAlbum: false, isArchive: false, isFavorite: false),
rating: SearchRatingFilter(),
mediaType: AssetType.image,
),
);
ref
.read(searchPreFilterProvider.notifier)
.setFilter(
SearchFilter(
assetId: assetId,
people: {},
location: SearchLocationFilter(),
camera: SearchCameraFilter(),
date: SearchDateFilter(),
display: SearchDisplayFilters(isNotInAlbum: false, isArchive: false, isFavorite: false),
rating: SearchRatingFilter(),
mediaType: AssetType.image,
),
);
unawaited(context.navigateTo(const DriftSearchRoute()));
}

View File

@@ -39,16 +39,6 @@ class _RatingBarState extends State<RatingBar> {
_currentRating = widget.initialRating;
}
@override
void didUpdateWidget(covariant RatingBar oldWidget) {
super.didUpdateWidget(oldWidget);
if (oldWidget.initialRating != widget.initialRating && _currentRating != widget.initialRating) {
setState(() {
_currentRating = widget.initialRating;
});
}
}
void _updateRating(Offset localPosition, bool isRTL, {bool isTap = false}) {
final totalWidth = widget.itemCount * widget.itemSize + (widget.itemCount - 1) * widget.starPadding;
double dx = localPosition.dx;

View File

@@ -28,6 +28,17 @@ class AppNavigationObserver extends AutoRouterObserver {
});
}
@override
void didPop(Route route, Route? previousRoute) {
_handleLockedViewState(previousRoute ?? route, null);
_handleDriftLockedFolderState(previousRoute ?? route, null);
Future(() {
ref.read(currentRouteNameProvider.notifier).state = previousRoute?.settings.name;
ref.read(previousRouteNameProvider.notifier).state = ref.read(previousRouteNameProvider);
ref.read(previousRouteDataProvider.notifier).state = previousRoute?.settings;
});
}
_handleLockedViewState(Route route, Route? previousRoute) {
final isInLockedView = ref.read(inLockedViewProvider);
final isFromLockedViewToDetailView =

View File

@@ -6,7 +6,6 @@ import 'package:device_info_plus/device_info_plus.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/network.repository.dart';
import 'package:immich_mobile/models/auth/auxilary_endpoint.model.dart';
import 'package:immich_mobile/utils/debug_print.dart';
import 'package:immich_mobile/utils/url_helper.dart';
import 'package:logging/logging.dart';
@@ -185,8 +184,8 @@ class ApiService {
if (externalJson != null) {
final List<dynamic> list = jsonDecode(externalJson);
for (final entry in list) {
final url = AuxilaryEndpoint.fromJson(entry).url;
if (url.isNotEmpty) urls.add(url);
final url = entry['url'] as String?;
if (url != null && url.isNotEmpty) urls.add(url);
}
}
return urls;

View File

@@ -20,7 +20,7 @@ final class CustomImageCache implements ImageCache {
set maximumSize(int value) => _small.maximumSize = value;
@override
set maximumSizeBytes(int value) => _small.maximumSizeBytes = value;
set maximumSizeBytes(int value) => _small.maximumSize = value;
@override
void clear() {

View File

@@ -16,15 +16,9 @@ class SearchDropdown<T> extends StatelessWidget {
final Widget? label;
final Widget? leadingIcon;
static const WidgetStatePropertyAll<EdgeInsetsGeometry> _optionPadding = WidgetStatePropertyAll<EdgeInsetsGeometry>(
EdgeInsetsDirectional.fromSTEB(16, 0, 16, 0),
);
@override
Widget build(BuildContext context) {
final mediaQuery = MediaQuery.of(context);
final maxMenuHeight = mediaQuery.size.height * 0.5 - mediaQuery.viewPadding.bottom;
const menuStyle = MenuStyle(
final menuStyle = const MenuStyle(
shape: WidgetStatePropertyAll<OutlinedBorder>(
RoundedRectangleBorder(borderRadius: BorderRadius.all(Radius.circular(15))),
),
@@ -32,26 +26,11 @@ class SearchDropdown<T> extends StatelessWidget {
return LayoutBuilder(
builder: (context, constraints) {
final styledEntries = dropdownMenuEntries
.map(
(entry) => DropdownMenuEntry<T>(
value: entry.value,
label: entry.label,
labelWidget: entry.labelWidget,
enabled: entry.enabled,
leadingIcon: entry.leadingIcon,
trailingIcon: entry.trailingIcon,
style: (entry.style ?? const ButtonStyle()).copyWith(padding: _optionPadding),
),
)
.toList(growable: false);
return DropdownMenu(
controller: controller,
leadingIcon: leadingIcon,
width: constraints.maxWidth,
menuHeight: maxMenuHeight,
dropdownMenuEntries: styledEntries,
dropdownMenuEntries: dropdownMenuEntries,
label: label,
menuStyle: menuStyle,
trailingIcon: const Icon(Icons.arrow_drop_down_rounded),

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 2.6.1
- API version: 2.6.0
- Generator version: 7.8.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: 'none'
version: 2.6.1+3039
version: 2.6.0+3038
environment:
sdk: '>=3.8.0 <4.0.0'

View File

@@ -15166,7 +15166,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "2.6.1",
"version": "2.6.0",
"contact": {}
},
"tags": [

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/sdk",
"version": "2.6.1",
"version": "2.6.0",
"description": "Auto-generated TypeScript SDK for the Immich API",
"type": "module",
"main": "./build/index.js",

View File

@@ -1,6 +1,6 @@
/**
* Immich
* 2.6.1
* 2.6.0
* DO NOT MODIFY - This file has been generated using oazapfts.
* See https://www.npmjs.com/package/oazapfts
*/

View File

@@ -1,6 +1,6 @@
{
"name": "immich-monorepo",
"version": "2.6.1",
"version": "2.6.0",
"description": "Monorepo for Immich",
"private": true,
"packageManager": "pnpm@10.30.3+sha512.c961d1e0a2d8e354ecaa5166b822516668b7f44cb5bd95122d590dd81922f606f5473b6d23ec4a5be05e7fcd18e8488d47d978bbe981872f1145d06e9a740017",

896
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "2.6.1",
"version": "2.6.0",
"description": "",
"author": "",
"private": true,
@@ -24,7 +24,7 @@
"typeorm": "typeorm",
"migrations:debug": "sql-tools -u ${DB_URL:-postgres://postgres:postgres@localhost:5432/immich} migrations generate --debug",
"migrations:generate": "sql-tools -u ${DB_URL:-postgres://postgres:postgres@localhost:5432/immich} migrations generate",
"migrations:create": "sql-tools -u ${DB_URL:-postgres://postgres:postgres@localhost:5432/immich} migrations create",
"migrations:create": "sql-tools -u ${DB_URL:-postgres://postgres:postgres@localhost:5432/immich} migrations generate",
"migrations:run": "sql-tools -u ${DB_URL:-postgres://postgres:postgres@localhost:5432/immich} migrations run",
"migrations:revert": "sql-tools -u ${DB_URL:-postgres://postgres:postgres@localhost:5432/immich} migrations revert",
"schema:drop": "sql-tools -u ${DB_URL:-postgres://postgres:postgres@localhost:5432/immich} query 'DROP schema public cascade; CREATE schema public;'",

View File

@@ -169,7 +169,6 @@ export type AuthSharedLink = {
id: string;
expiresAt: Date | null;
userId: string;
albumId: string | null;
showExif: boolean;
allowUpload: boolean;
allowDownload: boolean;
@@ -358,6 +357,15 @@ export const columns = {
authUser: ['user.id', 'user.name', 'user.email', 'user.isAdmin', 'user.quotaUsageInBytes', 'user.quotaSizeInBytes'],
authApiKey: ['api_key.id', 'api_key.permissions'],
authSession: ['session.id', 'session.updatedAt', 'session.pinExpiresAt', 'session.appVersion'],
authSharedLink: [
'shared_link.id',
'shared_link.userId',
'shared_link.expiresAt',
'shared_link.showExif',
'shared_link.allowUpload',
'shared_link.allowDownload',
'shared_link.password',
],
user: userColumns,
userWithPrefix: userWithPrefixColumns,
userAdmin: [

View File

@@ -173,7 +173,6 @@ order by
select
"shared_link"."id",
"shared_link"."userId",
"shared_link"."albumId",
"shared_link"."expiresAt",
"shared_link"."showExif",
"shared_link"."allowUpload",
@@ -212,7 +211,6 @@ where
select
"shared_link"."id",
"shared_link"."userId",
"shared_link"."albumId",
"shared_link"."expiresAt",
"shared_link"."showExif",
"shared_link"."allowUpload",

View File

@@ -330,7 +330,6 @@ export class AlbumRepository {
await db
.insertInto('album_asset')
.values(assetIds.map((assetId) => ({ albumId, assetId })))
.onConflict((oc) => oc.doNothing())
.execute();
}

View File

@@ -119,12 +119,8 @@ export class MetadataRepository {
}
async writeTags(path: string, tags: Partial<Tags>): Promise<void> {
// If exiftool assigns a field with ^= instead of =, empty values will be written too.
// Since exiftool-vendored doesn't support an option for this, we append the ^ to the name of the tag instead.
// https://exiftool.org/exiftool_pod.html#:~:text=is%20used%20to%20write%20an%20empty%20string
const tagsToWrite = Object.fromEntries(Object.entries(tags).map(([key, value]) => [`${key}^`, value]));
try {
await this.exiftool.write(path, tagsToWrite);
await this.exiftool.write(path, tags);
} catch (error) {
this.logger.warn(`Error writing exif data (${path}): ${error}`);
}

View File

@@ -202,14 +202,7 @@ export class SharedLinkRepository {
.leftJoin('album', 'album.id', 'shared_link.albumId')
.where('album.deletedAt', 'is', null)
.select((eb) => [
'shared_link.id',
'shared_link.userId',
'shared_link.albumId',
'shared_link.expiresAt',
'shared_link.showExif',
'shared_link.allowUpload',
'shared_link.allowDownload',
'shared_link.password',
...columns.authSharedLink,
jsonObjectFrom(
eb.selectFrom('user').select(columns.authUser).whereRef('user.id', '=', 'shared_link.userId'),
).as('user'),

View File

@@ -1,13 +0,0 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`
DELETE FROM "shared_link_asset"
USING "shared_link"
WHERE "shared_link_asset"."sharedLinkId" = "shared_link"."id" AND "shared_link"."type" = 'ALBUM';
`.execute(db);
}
export async function down(): Promise<void> {
// noop
}

View File

@@ -64,9 +64,8 @@ export class UserTable {
@Column({ unique: true, nullable: true, default: null })
storageLabel!: string | null;
// TODO remove default, make nullable, and convert empty spaces to null
@Column({ default: '' })
name!: string;
name!: Generated<string>;
@Column({ type: 'bigint', nullable: true })
quotaSizeInBytes!: ColumnType<number> | null;

View File

@@ -165,12 +165,6 @@ export class AlbumService extends BaseService {
}
async addAssets(auth: AuthDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
if (auth.sharedLink) {
this.logger.deprecate(
'Assets uploaded to a shared link are automatically added and calling this endpoint is no longer necessary. It will be removed in the next major release.',
);
}
const album = await this.findOrFail(id, { withAssets: false });
await this.requireAccess({ auth, permission: Permission.AlbumAssetCreate, ids: [id] });
@@ -201,12 +195,6 @@ export class AlbumService extends BaseService {
}
async addAssetsToAlbums(auth: AuthDto, dto: AlbumsAddAssetsDto): Promise<AlbumsAddAssetsResponseDto> {
if (auth.sharedLink) {
this.logger.deprecate(
'Assets uploaded to a shared link are automatically added and calling this endpoint is no longer necessary. It will be removed in the next major release.',
);
}
const results: AlbumsAddAssetsResponseDto = {
success: false,
error: BulkIdErrorReason.DUPLICATE,

View File

@@ -2,7 +2,7 @@ import { BadRequestException, Injectable, InternalServerErrorException, NotFound
import { extname } from 'node:path';
import sanitize from 'sanitize-filename';
import { StorageCore } from 'src/cores/storage.core';
import { Asset, AuthSharedLink } from 'src/database';
import { Asset } from 'src/database';
import {
AssetBulkUploadCheckResponseDto,
AssetMediaResponseDto,
@@ -152,7 +152,7 @@ export class AssetMediaService extends BaseService {
const asset = await this.create(auth.user.id, dto, file, sidecarFile);
if (auth.sharedLink) {
await this.addToSharedLink(auth.sharedLink, asset.id);
await this.sharedLinkRepository.addAssets(auth.sharedLink.id, [asset.id]);
}
await this.userRepository.updateUsage(auth.user.id, file.size);
@@ -326,12 +326,6 @@ export class AssetMediaService extends BaseService {
};
}
private async addToSharedLink(sharedLink: AuthSharedLink, assetId: string) {
await (sharedLink.albumId
? this.albumRepository.addAssetIds(sharedLink.albumId, [assetId])
: this.sharedLinkRepository.addAssets(sharedLink.id, [assetId]));
}
private async handleUploadError(
error: any,
auth: AuthDto,
@@ -353,7 +347,7 @@ export class AssetMediaService extends BaseService {
}
if (auth.sharedLink) {
await this.addToSharedLink(auth.sharedLink, duplicateId);
await this.sharedLinkRepository.addAssets(auth.sharedLink.id, [duplicateId]);
}
return { status: AssetMediaStatus.DUPLICATE, id: duplicateId };

View File

@@ -8,7 +8,6 @@ import { AuthService } from 'src/services/auth.service';
import { UserMetadataItem } from 'src/types';
import { ApiKeyFactory } from 'test/factories/api-key.factory';
import { AuthFactory } from 'test/factories/auth.factory';
import { OAuthProfileFactory } from 'test/factories/oauth-profile.factory';
import { SessionFactory } from 'test/factories/session.factory';
import { UserFactory } from 'test/factories/user.factory';
import { sharedLinkStub } from 'test/fixtures/shared-link.stub';
@@ -16,7 +15,31 @@ import { systemConfigStub } from 'test/fixtures/system-config.stub';
import { newUuid } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
const oauthResponse = ({
id,
email,
name,
profileImagePath,
}: {
id: string;
email: string;
name: string;
profileImagePath?: string;
}) => ({
accessToken: 'cmFuZG9tLWJ5dGVz',
userId: id,
userEmail: email,
name,
profileImagePath,
isAdmin: false,
isOnboarded: false,
shouldChangePassword: false,
});
// const token = Buffer.from('my-api-key', 'utf8').toString('base64');
const email = 'test@immich.com';
const sub = 'my-auth-user-sub';
const loginDetails = {
isSecure: true,
clientIp: '127.0.0.1',
@@ -25,9 +48,11 @@ const loginDetails = {
appVersion: null,
};
const dto = {
email,
password: 'password',
const fixtures = {
login: {
email,
password: 'password',
},
};
describe(AuthService.name, () => {
@@ -38,6 +63,7 @@ describe(AuthService.name, () => {
({ sut, mocks } = newTestService(AuthService));
mocks.oauth.authorize.mockResolvedValue({ url: 'http://test', state: 'state', codeVerifier: 'codeVerifier' });
mocks.oauth.getProfile.mockResolvedValue({ sub, email });
mocks.oauth.getLogoutEndpoint.mockResolvedValue('http://end-session-endpoint');
});
@@ -49,13 +75,13 @@ describe(AuthService.name, () => {
it('should throw an error if password login is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.disabled);
await expect(sut.login(dto, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
});
it('should check the user exists', async () => {
mocks.user.getByEmail.mockResolvedValue(void 0);
await expect(sut.login(dto, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(1);
});
@@ -63,7 +89,7 @@ describe(AuthService.name, () => {
it('should check the user has a password', async () => {
mocks.user.getByEmail.mockResolvedValue({} as UserAdmin);
await expect(sut.login(dto, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
await expect(sut.login(fixtures.login, loginDetails)).rejects.toBeInstanceOf(UnauthorizedException);
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(1);
});
@@ -74,7 +100,7 @@ describe(AuthService.name, () => {
mocks.user.getByEmail.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(session);
await expect(sut.login(dto, loginDetails)).resolves.toEqual({
await expect(sut.login(fixtures.login, loginDetails)).resolves.toEqual({
accessToken: 'cmFuZG9tLWJ5dGVz',
userId: user.id,
userEmail: user.email,
@@ -598,7 +624,6 @@ describe(AuthService.name, () => {
it('should not allow auto registering', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthEnabled);
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create());
await expect(
sut.callback(
@@ -613,31 +638,31 @@ describe(AuthService.name, () => {
it('should link an existing user', async () => {
const user = UserFactory.create();
const profile = OAuthProfileFactory.create();
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthEnabled);
mocks.oauth.getProfile.mockResolvedValue(profile);
mocks.user.getByEmail.mockResolvedValue(user);
mocks.user.update.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foobar' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foobar' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(1);
expect(mocks.user.update).toHaveBeenCalledWith(user.id, { oauthId: profile.sub });
expect(mocks.user.update).toHaveBeenCalledWith(user.id, { oauthId: sub });
});
it('should not link to a user with a different oauth sub', async () => {
const user = UserFactory.create({ oauthId: 'existing-sub' });
const user = UserFactory.create({ isAdmin: true, oauthId: 'existing-sub' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithAutoRegister);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create());
mocks.user.getByEmail.mockResolvedValueOnce(user);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.getAdmin.mockResolvedValue(user);
mocks.user.create.mockResolvedValue(user);
await expect(
sut.callback(
@@ -652,30 +677,35 @@ describe(AuthService.name, () => {
});
it('should allow auto registering by default', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create());
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foobar' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foobar' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.getByEmail).toHaveBeenCalledTimes(2); // second call is for domain check before create
expect(mocks.user.create).toHaveBeenCalledTimes(1);
});
it('should throw an error if user should be auto registered but the email claim does not exist', async () => {
const user = UserFactory.create({ isAdmin: true });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.create.mockResolvedValue(UserFactory.create());
mocks.user.getAdmin.mockResolvedValue(user);
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
mocks.oauth.getProfile.mockResolvedValue({ sub: 'sub' });
mocks.oauth.getProfile.mockResolvedValue({ sub, email: undefined });
await expect(
sut.callback(
@@ -695,9 +725,10 @@ describe(AuthService.name, () => {
'app.immich:///oauth-callback?code=abc123',
]) {
it(`should use the mobile redirect override for a url of ${url}`, async () => {
const user = UserFactory.create();
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithMobileOverride);
mocks.user.getByOAuthId.mockResolvedValue(UserFactory.create());
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create());
mocks.user.getByOAuthId.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback({ url, state: 'xyz789', codeVerifier: 'foo' }, {}, loginDetails);
@@ -712,136 +743,135 @@ describe(AuthService.name, () => {
}
it('should use the default quota', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithStorageQuota);
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create());
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ quotaSizeInBytes: 1_073_741_824 }));
});
it('should infer name from given and family names', async () => {
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
mocks.oauth.getProfile.mockResolvedValue(
OAuthProfileFactory.create({ name: undefined, given_name: 'Given', family_name: 'Family' }),
);
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.create.mockResolvedValue(UserFactory.create());
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ name: 'Given Family' }));
});
it('should fallback to email when no username is provided', async () => {
const profile = OAuthProfileFactory.create({ name: undefined, given_name: undefined, family_name: undefined });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
mocks.oauth.getProfile.mockResolvedValue(profile);
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.create.mockResolvedValue(UserFactory.create());
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ name: profile.email }));
});
it('should ignore an invalid storage quota', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithStorageQuota);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create({ immich_quota: 'abc' }));
mocks.oauth.getProfile.mockResolvedValue({ sub: user.oauthId, email: user.email, immich_quota: 'abc' });
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ quotaSizeInBytes: 1_073_741_824 }));
});
it('should ignore a negative quota', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithStorageQuota);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create({ immich_quota: -5 }));
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.oauth.getProfile.mockResolvedValue({ sub: user.oauthId, email: user.email, immich_quota: -5 });
mocks.user.getAdmin.mockResolvedValue(user);
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ quotaSizeInBytes: 1_073_741_824 }));
});
it('should set quota for 0 quota', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithStorageQuota);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create({ immich_quota: 0 }));
mocks.oauth.getProfile.mockResolvedValue({ sub: user.oauthId, email: user.email, immich_quota: 0 });
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ quotaSizeInBytes: 0 }));
expect(mocks.user.create).toHaveBeenCalledWith({
email: user.email,
isAdmin: false,
name: ' ',
oauthId: user.oauthId,
quotaSizeInBytes: 0,
storageLabel: null,
});
});
it('should use a valid storage quota', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithStorageQuota);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create({ immich_quota: 5 }));
mocks.oauth.getProfile.mockResolvedValue({ sub: user.oauthId, email: user.email, immich_quota: 5 });
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.getByOAuthId.mockResolvedValue(void 0);
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ quotaSizeInBytes: 5_368_709_120 }));
expect(mocks.user.create).toHaveBeenCalledWith({
email: user.email,
isAdmin: false,
name: ' ',
oauthId: user.oauthId,
quotaSizeInBytes: 5_368_709_120,
storageLabel: null,
});
});
it('should sync the profile picture', async () => {
const fileId = newUuid();
const user = UserFactory.create({ oauthId: 'oauth-id' });
const profile = OAuthProfileFactory.create({ picture: 'https://auth.immich.cloud/profiles/1.jpg' });
const pictureUrl = 'https://auth.immich.cloud/profiles/1.jpg';
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthEnabled);
mocks.oauth.getProfile.mockResolvedValue(profile);
mocks.oauth.getProfile.mockResolvedValue({
sub: user.oauthId,
email: user.email,
picture: pictureUrl,
});
mocks.user.getByOAuthId.mockResolvedValue(user);
mocks.crypto.randomUUID.mockReturnValue(fileId);
mocks.oauth.getProfilePicture.mockResolvedValue({
@@ -851,96 +881,131 @@ describe(AuthService.name, () => {
mocks.user.update.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.update).toHaveBeenCalledWith(user.id, {
profileImagePath: expect.stringContaining(`/data/profile/${user.id}/${fileId}.jpg`),
profileChangedAt: expect.any(Date),
});
expect(mocks.oauth.getProfilePicture).toHaveBeenCalledWith(profile.picture);
expect(mocks.oauth.getProfilePicture).toHaveBeenCalledWith(pictureUrl);
});
it('should not sync the profile picture if the user already has one', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id', profileImagePath: 'not-empty' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthEnabled);
mocks.oauth.getProfile.mockResolvedValue(
OAuthProfileFactory.create({
sub: user.oauthId,
email: user.email,
picture: 'https://auth.immich.cloud/profiles/1.jpg',
}),
);
mocks.oauth.getProfile.mockResolvedValue({
sub: user.oauthId,
email: user.email,
picture: 'https://auth.immich.cloud/profiles/1.jpg',
});
mocks.user.getByOAuthId.mockResolvedValue(user);
mocks.user.update.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.update).not.toHaveBeenCalled();
expect(mocks.oauth.getProfilePicture).not.toHaveBeenCalled();
});
it('should only allow "admin" and "user" for the role claim', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithAutoRegister);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create({ immich_role: 'foo' }));
mocks.oauth.getProfile.mockResolvedValue({ sub: user.oauthId, email: user.email, immich_role: 'foo' });
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getAdmin.mockResolvedValue(UserFactory.create({ isAdmin: true }));
mocks.user.getByOAuthId.mockResolvedValue(void 0);
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ isAdmin: false }));
expect(mocks.user.create).toHaveBeenCalledWith({
email: user.email,
name: ' ',
oauthId: user.oauthId,
quotaSizeInBytes: null,
storageLabel: null,
isAdmin: false,
});
});
it('should create an admin user if the role claim is set to admin', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.oauthWithAutoRegister);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create({ immich_role: 'admin' }));
mocks.oauth.getProfile.mockResolvedValue({ sub: user.oauthId, email: user.email, immich_role: 'admin' });
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getByOAuthId.mockResolvedValue(void 0);
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ isAdmin: true }));
expect(mocks.user.create).toHaveBeenCalledWith({
email: user.email,
name: ' ',
oauthId: user.oauthId,
quotaSizeInBytes: null,
storageLabel: null,
isAdmin: true,
});
});
it('should accept a custom role claim', async () => {
const user = UserFactory.create({ oauthId: 'oauth-id' });
mocks.systemMetadata.get.mockResolvedValue({
oauth: { ...systemConfigStub.oauthWithAutoRegister.oauth, roleClaim: 'my_role' },
oauth: { ...systemConfigStub.oauthWithAutoRegister, roleClaim: 'my_role' },
});
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create({ my_role: 'admin' }));
mocks.oauth.getProfile.mockResolvedValue({ sub: user.oauthId, email: user.email, my_role: 'admin' });
mocks.user.getByEmail.mockResolvedValue(void 0);
mocks.user.getByOAuthId.mockResolvedValue(void 0);
mocks.user.create.mockResolvedValue(UserFactory.create({ oauthId: 'oauth-id' }));
mocks.user.create.mockResolvedValue(user);
mocks.session.create.mockResolvedValue(SessionFactory.create());
await sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
);
await expect(
sut.callback(
{ url: 'http://immich/auth/login?code=abc123', state: 'xyz789', codeVerifier: 'foo' },
{},
loginDetails,
),
).resolves.toEqual(oauthResponse(user));
expect(mocks.user.create).toHaveBeenCalledWith(expect.objectContaining({ isAdmin: true }));
expect(mocks.user.create).toHaveBeenCalledWith({
email: user.email,
name: ' ',
oauthId: user.oauthId,
quotaSizeInBytes: null,
storageLabel: null,
isAdmin: true,
});
});
});
@@ -948,10 +1013,8 @@ describe(AuthService.name, () => {
it('should link an account', async () => {
const user = UserFactory.create();
const auth = AuthFactory.from(user).apiKey({ permissions: [] }).build();
const profile = OAuthProfileFactory.create();
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
mocks.oauth.getProfile.mockResolvedValue(profile);
mocks.user.update.mockResolvedValue(user);
await sut.link(
@@ -960,7 +1023,7 @@ describe(AuthService.name, () => {
{},
);
expect(mocks.user.update).toHaveBeenCalledWith(auth.user.id, { oauthId: profile.sub });
expect(mocks.user.update).toHaveBeenCalledWith(auth.user.id, { oauthId: sub });
});
it('should not link an already linked oauth.sub', async () => {
@@ -969,7 +1032,6 @@ describe(AuthService.name, () => {
const auth = { user: authUser, apiKey: authApiKey };
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.enabled);
mocks.oauth.getProfile.mockResolvedValue(OAuthProfileFactory.create());
mocks.user.getByOAuthId.mockResolvedValue({ id: 'other-user' } as UserAdmin);
await expect(

View File

@@ -261,11 +261,6 @@ export class AuthService extends BaseService {
}
async callback(dto: OAuthCallbackDto, headers: IncomingHttpHeaders, loginDetails: LoginDetails) {
const { oauth } = await this.getConfig({ withCache: false });
if (!oauth.enabled) {
throw new BadRequestException('OAuth is not enabled');
}
const expectedState = dto.state ?? this.getCookieOauthState(headers);
if (!expectedState?.length) {
throw new BadRequestException('OAuth state is missing');
@@ -276,6 +271,7 @@ export class AuthService extends BaseService {
throw new BadRequestException('OAuth code verifier is missing');
}
const { oauth } = await this.getConfig({ withCache: false });
const url = this.resolveRedirectUri(oauth, dto.url);
const profile = await this.oauthRepository.getProfile(oauth, url, expectedState, codeVerifier);
const { autoRegister, defaultStorageQuota, storageLabelClaim, storageQuotaClaim, roleClaim } = oauth;
@@ -302,8 +298,7 @@ export class AuthService extends BaseService {
throw new BadRequestException(`User does not exist and auto registering is disabled.`);
}
const email = profile.email;
if (!email) {
if (!profile.email) {
throw new BadRequestException('OAuth profile does not have an email address');
}
@@ -325,13 +320,10 @@ export class AuthService extends BaseService {
isValid: (value: unknown) => isString(value) && ['admin', 'user'].includes(value),
});
const userName = profile.name ?? `${profile.given_name || ''} ${profile.family_name || ''}`;
user = await this.createUser({
name:
profile.name ||
`${profile.given_name || ''} ${profile.family_name || ''}`.trim() ||
profile.preferred_username ||
email,
email,
name: userName,
email: profile.email,
oauthId: profile.sub,
quotaSizeInBytes: storageQuota === null ? null : storageQuota * HumanReadableSize.GiB,
storageLabel: storageLabel || null,

View File

@@ -467,7 +467,7 @@ export class MetadataService extends BaseService {
GPSLatitude: latitude,
GPSLongitude: longitude,
Rating: rating,
TagsList: tags,
TagsList: tags?.length ? tags : undefined,
},
_.isUndefined,
);

View File

@@ -1,6 +1,5 @@
import { DateTime } from 'luxon';
import { SemVer } from 'semver';
import { defaults } from 'src/config';
import { serverVersion } from 'src/constants';
import { ImmichEnvironment, JobName, JobStatus, SystemMetadataKey } from 'src/enum';
import { VersionService } from 'src/services/version.service';
@@ -131,32 +130,6 @@ describe(VersionService.name, () => {
});
});
describe('onConfigUpdate', () => {
it('should queue a version check job when newVersionCheck is enabled', async () => {
await sut.onConfigUpdate({
oldConfig: { ...defaults, newVersionCheck: { enabled: false } },
newConfig: { ...defaults, newVersionCheck: { enabled: true } },
});
expect(mocks.job.queue).toHaveBeenCalledWith({ name: JobName.VersionCheck, data: {} });
});
it('should not queue a version check job when newVersionCheck is disabled', async () => {
await sut.onConfigUpdate({
oldConfig: { ...defaults, newVersionCheck: { enabled: true } },
newConfig: { ...defaults, newVersionCheck: { enabled: false } },
});
expect(mocks.job.queue).not.toHaveBeenCalled();
});
it('should not queue a version check job when newVersionCheck was already enabled', async () => {
await sut.onConfigUpdate({
oldConfig: { ...defaults, newVersionCheck: { enabled: true } },
newConfig: { ...defaults, newVersionCheck: { enabled: true } },
});
expect(mocks.job.queue).not.toHaveBeenCalled();
});
});
describe('onWebsocketConnection', () => {
it('should send on_server_version client event', async () => {
await sut.onWebsocketConnection({ userId: '42' });

View File

@@ -55,13 +55,6 @@ export class VersionService extends BaseService {
return this.versionRepository.getAll();
}
@OnEvent({ name: 'ConfigUpdate' })
async onConfigUpdate({ oldConfig, newConfig }: ArgOf<'ConfigUpdate'>) {
if (!oldConfig.newVersionCheck.enabled && newConfig.newVersionCheck.enabled) {
await this.handleQueueVersionCheck();
}
}
async handleQueueVersionCheck() {
await this.jobRepository.queue({ name: JobName.VersionCheck, data: {} });
}

View File

@@ -1,28 +0,0 @@
import { OAuthProfile } from 'src/repositories/oauth.repository';
import { OAuthProfileLike } from 'test/factories/types';
import { newUuid } from 'test/small.factory';
export class OAuthProfileFactory {
private constructor(private value: OAuthProfile) {}
static create(dto: OAuthProfileLike = {}) {
return OAuthProfileFactory.from(dto).build();
}
static from(dto: OAuthProfileLike = {}) {
const sub = newUuid();
return new OAuthProfileFactory({
sub,
name: 'Name',
given_name: 'Given',
family_name: 'Family',
email: `oauth-${sub}@immich.cloud`,
email_verified: true,
...dto,
});
}
build() {
return { ...this.value };
}
}

View File

@@ -1,5 +1,4 @@
import { Selectable } from 'kysely';
import { OAuthProfile } from 'src/repositories/oauth.repository';
import { ActivityTable } from 'src/schema/tables/activity.table';
import { AlbumUserTable } from 'src/schema/tables/album-user.table';
import { AlbumTable } from 'src/schema/tables/album.table';
@@ -35,4 +34,3 @@ export type PartnerLike = Partial<Selectable<PartnerTable>>;
export type ActivityLike = Partial<Selectable<ActivityTable>>;
export type ApiKeyLike = Partial<Selectable<ApiKeyTable>>;
export type SessionLike = Partial<Selectable<SessionTable>>;
export type OAuthProfileLike = Partial<OAuthProfile>;

View File

@@ -48,7 +48,6 @@ export const authStub = {
showExif: true,
allowDownload: true,
allowUpload: true,
albumId: null,
expiresAt: null,
password: null,
userId: '42',

View File

@@ -220,9 +220,9 @@ export class MediumTestContext<S extends BaseService = BaseService> {
return { result };
}
async newAlbum(dto: Insertable<AlbumTable>, assetIds?: string[]) {
async newAlbum(dto: Insertable<AlbumTable>) {
const album = mediumFactory.albumInsert(dto);
const result = await this.get(AlbumRepository).create(album, assetIds ?? [], []);
const result = await this.get(AlbumRepository).create(album, [], []);
return { album, result };
}

View File

@@ -1,75 +0,0 @@
import { Kysely } from 'kysely';
import { mkdtempSync, readFileSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { DB } from 'src/schema';
import { BaseService } from 'src/services/base.service';
import { newMediumService } from 'test/medium.factory';
import { newDate } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = () => {
const { ctx } = newMediumService(BaseService, {
database,
real: [],
mock: [LoggingRepository],
});
return { ctx, sut: ctx.get(MetadataRepository) };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe(MetadataRepository.name, () => {
describe('writeTags', () => {
it('should write an empty description', async () => {
const { sut } = setup();
const dir = mkdtempSync(join(tmpdir(), 'metadata-medium-write-tags'));
const sidecarFile = join(dir, 'sidecar.xmp');
await sut.writeTags(sidecarFile, { Description: '' });
expect(readFileSync(sidecarFile).toString()).toEqual(expect.stringContaining('rdf:Description'));
});
it('should write an empty tags list', async () => {
const { sut } = setup();
const dir = mkdtempSync(join(tmpdir(), 'metadata-medium-write-tags'));
const sidecarFile = join(dir, 'sidecar.xmp');
await sut.writeTags(sidecarFile, { TagsList: [] });
const fileContent = readFileSync(sidecarFile).toString();
expect(fileContent).toEqual(expect.stringContaining('digiKam:TagsList'));
expect(fileContent).toEqual(expect.stringContaining('<rdf:li/>'));
});
});
it('should write tags', async () => {
const { sut } = setup();
const dir = mkdtempSync(join(tmpdir(), 'metadata-medium-write-tags'));
const sidecarFile = join(dir, 'sidecar.xmp');
await sut.writeTags(sidecarFile, {
Description: 'my-description',
ImageDescription: 'my-image-description',
DateTimeOriginal: newDate().toISOString(),
GPSLatitude: 42,
GPSLongitude: 69,
Rating: 3,
TagsList: ['tagA'],
});
const fileContent = readFileSync(sidecarFile).toString();
expect(fileContent).toEqual(expect.stringContaining('my-description'));
expect(fileContent).toEqual(expect.stringContaining('my-image-description'));
expect(fileContent).toEqual(expect.stringContaining('exif:DateTimeOriginal'));
expect(fileContent).toEqual(expect.stringContaining('<exif:GPSLatitude>42,0.0N</exif:GPSLatitude>'));
expect(fileContent).toEqual(expect.stringContaining('<exif:GPSLongitude>69,0.0E</exif:GPSLongitude>'));
expect(fileContent).toEqual(expect.stringContaining('<xmp:Rating>3</xmp:Rating>'));
expect(fileContent).toEqual(expect.stringContaining('tagA'));
});
});

View File

@@ -1,15 +1,12 @@
import { Kysely } from 'kysely';
import { randomBytes } from 'node:crypto';
import { AssetMediaStatus } from 'src/dtos/asset-media-response.dto';
import { AssetMediaSize } from 'src/dtos/asset-media.dto';
import { AssetFileType, SharedLinkType } from 'src/enum';
import { AssetFileType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
@@ -25,7 +22,7 @@ let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(AssetMediaService, {
database: db || defaultDatabase,
real: [AccessRepository, AlbumRepository, AssetRepository, SharedLinkRepository, UserRepository],
real: [AccessRepository, AssetRepository, UserRepository],
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
});
};
@@ -47,6 +44,7 @@ describe(AssetService.name, () => {
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const auth = factory.auth({ user: { id: user.id } });
const file = mediumFactory.uploadFile();
await expect(
sut.uploadAsset(
@@ -58,7 +56,7 @@ describe(AssetService.name, () => {
fileCreatedAt: new Date(),
assetData: Buffer.from('some data'),
},
mediumFactory.uploadFile(),
file,
),
).resolves.toEqual({
id: expect.any(String),
@@ -101,168 +99,6 @@ describe(AssetService.name, () => {
status: AssetMediaStatus.CREATED,
});
});
it('should add to a shared link', async () => {
const { sut, ctx } = setup();
const sharedLinkRepo = ctx.get(SharedLinkRepository);
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(50),
type: SharedLinkType.Individual,
description: 'Shared link description',
userId: user.id,
allowDownload: true,
allowUpload: true,
});
const auth = factory.auth({ user: { id: user.id }, sharedLink });
const file = mediumFactory.uploadFile();
const uploadDto = {
deviceId: 'some-id',
deviceAssetId: 'some-id',
fileModifiedAt: new Date(),
fileCreatedAt: new Date(),
assetData: Buffer.from('some data'),
};
const response = await sut.uploadAsset(auth, uploadDto, file);
expect(response).toEqual({ id: expect.any(String), status: AssetMediaStatus.CREATED });
const update = await sharedLinkRepo.get(user.id, sharedLink.id);
const assets = update!.assets;
expect(assets).toHaveLength(1);
expect(assets[0]).toMatchObject({ id: response.id });
});
it('should handle adding a duplicate asset to a shared link', async () => {
const { sut, ctx } = setup();
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(50),
type: SharedLinkType.Individual,
description: 'Shared link description',
userId: user.id,
allowDownload: true,
allowUpload: true,
assetIds: [asset.id],
});
const auth = factory.auth({ user: { id: user.id }, sharedLink });
const uploadDto = {
deviceId: 'some-id',
deviceAssetId: 'some-id',
fileModifiedAt: new Date(),
fileCreatedAt: new Date(),
assetData: Buffer.from('some data'),
};
const response = await sut.uploadAsset(auth, uploadDto, mediumFactory.uploadFile({ checksum: asset.checksum }));
expect(response).toEqual({ id: expect.any(String), status: AssetMediaStatus.DUPLICATE });
const update = await sharedLinkRepo.get(user.id, sharedLink.id);
const assets = update!.assets;
expect(assets).toHaveLength(1);
expect(assets[0]).toMatchObject({ id: response.id });
});
it('should add to an album shared link', async () => {
const { sut, ctx } = setup();
const sharedLinkRepo = ctx.get(SharedLinkRepository);
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user.id });
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(50),
type: SharedLinkType.Album,
albumId: album.id,
description: 'Shared link description',
userId: user.id,
allowDownload: true,
allowUpload: true,
});
const auth = factory.auth({ user: { id: user.id }, sharedLink });
const uploadDto = {
deviceId: 'some-id',
deviceAssetId: 'some-id',
fileModifiedAt: new Date(),
fileCreatedAt: new Date(),
assetData: Buffer.from('some data'),
};
const response = await sut.uploadAsset(auth, uploadDto, mediumFactory.uploadFile());
expect(response).toEqual({ id: expect.any(String), status: AssetMediaStatus.CREATED });
const result = await ctx.get(AlbumRepository).getAssetIds(album.id, [response.id]);
const assets = [...result];
expect(assets).toHaveLength(1);
expect(assets[0]).toEqual(response.id);
});
it('should handle adding a duplicate asset to an album shared link', async () => {
const { sut, ctx } = setup();
const sharedLinkRepo = ctx.get(SharedLinkRepository);
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const { album } = await ctx.newAlbum({ ownerId: user.id }, [asset.id]);
// await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(50),
type: SharedLinkType.Album,
albumId: album.id,
description: 'Shared link description',
userId: user.id,
allowDownload: true,
allowUpload: true,
});
const auth = factory.auth({ user: { id: user.id }, sharedLink });
const uploadDto = {
deviceId: 'some-id',
deviceAssetId: 'some-id',
fileModifiedAt: new Date(),
fileCreatedAt: new Date(),
assetData: Buffer.from('some data'),
};
const response = await sut.uploadAsset(auth, uploadDto, mediumFactory.uploadFile({ checksum: asset.checksum }));
expect(response).toEqual({ id: expect.any(String), status: AssetMediaStatus.DUPLICATE });
const result = await ctx.get(AlbumRepository).getAssetIds(album.id, [response.id]);
const assets = [...result];
expect(assets).toHaveLength(1);
expect(assets[0]).toEqual(response.id);
});
});
describe('viewThumbnail', () => {

View File

@@ -47,15 +47,15 @@ describe(UserService.name, () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const user = mediumFactory.userInsert();
await expect(sut.createUser({ name: 'Test', email: user.email })).resolves.toMatchObject({ email: user.email });
await expect(sut.createUser({ name: 'Test', email: user.email })).rejects.toThrow('User exists');
await expect(sut.createUser({ email: user.email })).resolves.toMatchObject({ email: user.email });
await expect(sut.createUser({ email: user.email })).rejects.toThrow('User exists');
});
it('should not return password', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const dto = mediumFactory.userInsert({ password: 'password' });
const user = await sut.createUser({ name: 'Test', email: dto.email, password: 'password' });
const user = await sut.createUser({ email: dto.email, password: 'password' });
expect((user as any).password).toBeUndefined();
});
});

View File

@@ -63,22 +63,12 @@ const authSharedLinkFactory = (sharedLink: Partial<AuthSharedLink> = {}) => {
expiresAt = null,
userId = newUuid(),
showExif = true,
albumId = null,
allowUpload = false,
allowDownload = true,
password = null,
} = sharedLink;
return {
id,
albumId,
expiresAt,
userId,
showExif,
allowUpload,
allowDownload,
password,
};
return { id, expiresAt, userId, showExif, allowUpload, allowDownload, password };
};
const authApiKeyFactory = (apiKey: Partial<AuthApiKey> = {}) => ({

View File

@@ -1,6 +1,6 @@
{
"name": "immich-web",
"version": "2.6.1",
"version": "2.6.0",
"license": "GNU Affero General Public License version 3",
"type": "module",
"scripts": {
@@ -27,7 +27,7 @@
"@formatjs/icu-messageformat-parser": "^3.0.0",
"@immich/justified-layout-wasm": "^0.4.3",
"@immich/sdk": "workspace:*",
"@immich/ui": "^0.67.2",
"@immich/ui": "^0.64.0",
"@mapbox/mapbox-gl-rtl-text": "0.3.0",
"@mdi/js": "^7.4.47",
"@photo-sphere-viewer/core": "^5.14.0",
@@ -72,10 +72,10 @@
"@koddsson/eslint-plugin-tscompat": "^0.2.0",
"@socket.io/component-emitter": "^3.1.0",
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/enhanced-img": "^0.10.4",
"@sveltejs/enhanced-img": "^0.10.0",
"@sveltejs/kit": "^2.27.1",
"@sveltejs/vite-plugin-svelte": "7.0.0",
"@tailwindcss/vite": "^4.2.2",
"@sveltejs/vite-plugin-svelte": "6.2.4",
"@tailwindcss/vite": "^4.1.7",
"@testing-library/jest-dom": "^6.4.2",
"@testing-library/svelte": "^5.2.8",
"@testing-library/user-event": "^14.5.2",
@@ -100,13 +100,13 @@
"prettier-plugin-sort-json": "^4.1.1",
"prettier-plugin-svelte": "^3.3.3",
"rollup-plugin-visualizer": "^6.0.0",
"svelte": "5.53.13",
"svelte": "5.53.7",
"svelte-check": "^4.1.5",
"svelte-eslint-parser": "^1.3.3",
"tailwindcss": "^4.2.2",
"tailwindcss": "^4.1.7",
"typescript": "^5.8.3",
"typescript-eslint": "^8.45.0",
"vite": "^8.0.0",
"vite": "^7.1.2",
"vitest": "^4.0.0"
},
"volta": {

View File

@@ -0,0 +1,33 @@
<script lang="ts">
import { Button, ToastContainer, ToastContent, type Color, type IconLike } from '@immich/ui';
type Props = {
onClose?: () => void;
color?: Color;
title: string;
icon?: IconLike | false;
description: string;
button?: {
text: string;
color?: Color;
onClick: () => void;
};
};
const { onClose, title, description, color, icon, button }: Props = $props();
const onClick = () => {
button?.onClick();
onClose?.();
};
</script>
<ToastContainer {color}>
<ToastContent {color} {title} {description} {onClose} {icon}>
{#if button}
<div class="flex justify-end gap-2 px-2 pb-2 me-3 mt-2">
<Button color={button.color ?? 'secondary'} size="small" onclick={onClick}>{button.text}</Button>
</div>
{/if}
</ToastContent>
</ToastContainer>

View File

@@ -11,7 +11,7 @@
import { user } from '$lib/stores/user.store';
import { handleError } from '$lib/utils/handle-error';
import { sendTestEmailAdmin } from '@immich/sdk';
import { Button, toastManager } from '@immich/ui';
import { Button, LoadingSpinner, toastManager } from '@immich/ui';
import { t } from 'svelte-i18n';
import { fade } from 'svelte/transition';
@@ -142,7 +142,6 @@
<Button
size="small"
shape="round"
loading={isSending}
disabled={!configToEdit.notifications.smtp.enabled}
onclick={handleSendTestEmail}
>
@@ -152,6 +151,9 @@
{$t('admin.notification_email_sent_test_email_button')}
{/if}
</Button>
{#if isSending}
<LoadingSpinner />
{/if}
</div>
</div>
</SettingAccordion>

View File

@@ -33,11 +33,13 @@
{#if isOwned}
<Textarea
bind:value={description}
variant="ghost"
class="outline-none border-b max-h-32 border-transparent pl-0 bg-transparent ring-0 focus:ring-0 resize-none focus:border-b-2 focus:border-immich-primary dark:focus:border-immich-dark-primary dark:bg-transparent"
rows={1}
grow
shape="rectangle"
onfocusout={handleFocusOut}
placeholder={$t('add_a_description')}
data-testid="autogrow-textarea"
class="max-h-32"
{@attach fromAction(shortcut, () => ({
shortcut: { key: 'Enter', ctrl: true },
onShortcut: (e) => e.currentTarget.blur(),

View File

@@ -3,56 +3,59 @@
import { eventManager } from '$lib/managers/event-manager.svelte';
import { handleError } from '$lib/utils/handle-error';
import { updateAlbumInfo } from '@immich/sdk';
import { Textarea } from '@immich/ui';
import { t } from 'svelte-i18n';
import { fromAction } from 'svelte/attachments';
import { tv } from 'tailwind-variants';
type Props = {
interface Props {
id: string;
albumName: string;
isOwned: boolean;
onUpdate: (albumName: string) => void;
};
}
let { id, albumName = $bindable(), isOwned, onUpdate }: Props = $props();
let newAlbumName = $derived(albumName);
const handleUpdate = async () => {
newAlbumName = newAlbumName.replaceAll('\n', ' ').trim();
const handleUpdateName = async () => {
if (newAlbumName === albumName) {
return;
}
try {
const response = await updateAlbumInfo({ id, updateAlbumDto: { albumName: newAlbumName } });
const response = await updateAlbumInfo({
id,
updateAlbumDto: {
albumName: newAlbumName,
},
});
({ albumName } = response);
eventManager.emit('AlbumUpdate', response);
onUpdate(albumName);
} catch (error) {
handleError(error, $t('errors.unable_to_save_album'));
return;
}
};
const textClasses = 'text-2xl lg:text-6xl text-primary';
const styles = tv({
base: 'w-[99%] mb-2 border-b-2 border-transparent text-2xl md:text-4xl lg:text-6xl text-primary outline-none transition-all focus:border-b-2 focus:border-immich-primary focus:outline-none bg-light dark:focus:border-immich-dark-primary dark:focus:bg-immich-dark-gray placeholder:text-primary/90',
variants: {
isOwned: {
true: 'hover:border-gray-400',
false: 'hover:border-transparent',
},
},
});
</script>
<div class="mb-2">
{#if isOwned}
<Textarea
bind:value={newAlbumName}
variant="ghost"
title={$t('edit_title')}
onblur={handleUpdate}
placeholder={$t('add_a_title')}
class={textClasses}
{@attach fromAction(shortcut, () => ({
shortcut: { key: 'Enter' },
onShortcut: (event) => event.currentTarget.blur(),
}))}
/>
{:else}
<div class={textClasses}>{newAlbumName}</div>
{/if}
</div>
<input
use:shortcut={{ shortcut: { key: 'Enter' }, onShortcut: (e) => e.currentTarget.blur() }}
onblur={handleUpdateName}
class={styles({ isOwned })}
type="text"
bind:value={newAlbumName}
disabled={!isOwned}
title={$t('edit_title')}
placeholder={$t('add_a_title')}
/>

View File

@@ -212,12 +212,12 @@
bottom: `${rootHeight - top}px`,
left: `${left}px`,
width: `${boundary.width}px`,
maxHeight: maxHeight(boundary.top - dropdownOffset),
maxHeight: maxHeight(top - dropdownOffset),
};
}
const viewportHeight = visualViewport?.height || window.innerHeight;
const availableHeight = viewportHeight - boundary.bottom;
const viewportHeight = visualViewport?.height || rootHeight;
const availableHeight = modalBounds ? rootHeight - bottom : viewportHeight - boundary.bottom;
return {
top: `${bottom}px`,
left: `${left}px`,

View File

@@ -1,6 +1,5 @@
import { eventManager } from '$lib/managers/event-manager.svelte';
import { uploadAssetsStore } from '$lib/stores/upload';
import { cancelUploadRequests } from '$lib/utils';
import { getSupportedMediaTypes, type ServerMediaTypesResponseDto } from '@immich/sdk';
class UploadManager {
@@ -14,7 +13,6 @@ class UploadManager {
}
reset() {
cancelUploadRequests();
uploadAssetsStore.reset();
}

View File

@@ -1,4 +1,5 @@
import { goto } from '$app/navigation';
import ToastAction from '$lib/components/ToastAction.svelte';
import { authManager } from '$lib/managers/auth-manager.svelte';
import { eventManager } from '$lib/managers/event-manager.svelte';
import type { TimelineAsset } from '$lib/managers/timeline-manager/types';
@@ -137,8 +138,16 @@ const notifyAddToAlbum = ($t: MessageFormatter, albumId: string, assetIds: strin
description = $t('assets_were_part_of_album_count', { values: { count: duplicateCount } });
}
toastManager.primary(
{ description, button: { label: $t('view_album'), onclick: () => goto(Route.viewAlbum({ id: albumId })) } },
toastManager.custom(
{
component: ToastAction,
props: {
title: $t('info'),
color: 'primary',
description,
button: { text: $t('view_album'), color: 'primary', onClick: () => goto(Route.viewAlbum({ id: albumId })) },
},
},
{ timeout: 5000 },
);
};
@@ -220,9 +229,18 @@ export const handleUpdateAlbum = async ({ id }: { id: string }, dto: UpdateAlbum
try {
const response = await updateAlbumInfo({ id, updateAlbumDto: dto });
eventManager.emit('AlbumUpdate', response);
toastManager.primary({
description: $t('album_info_updated'),
button: { label: $t('view_album'), onclick: () => goto(Route.viewAlbum({ id })) },
toastManager.custom({
component: ToastAction,
props: {
color: 'primary',
title: $t('success'),
description: $t('album_info_updated'),
button: {
text: $t('view_album'),
color: 'primary',
onClick: () => goto(Route.viewAlbum({ id })),
},
},
});
return true;
@@ -255,7 +273,7 @@ export const handleDeleteAlbum = async (album: AlbumResponseDto, options?: { pro
}
return true;
} catch (error) {
handleError(error, $t('errors.unable_to_delete_album'), { notify });
handleError(error, $t('errors.unable_to_delete_album'));
return false;
}
};

View File

@@ -8,16 +8,17 @@ import SharedLinkCreateModal from '$lib/modals/SharedLinkCreateModal.svelte';
import { isFaceEditMode } from '$lib/stores/face-edit.svelte';
import { user as authUser, preferences } from '$lib/stores/user.store';
import type { AssetControlContext } from '$lib/types';
import { getAssetMediaUrl, getSharedLink, sleep } from '$lib/utils';
import { getSharedLink, sleep } from '$lib/utils';
import { downloadUrl } from '$lib/utils/asset-utils';
import { handleError } from '$lib/utils/handle-error';
import { getFormatter } from '$lib/utils/i18n';
import { asQueryString } from '$lib/utils/shared-links';
import {
AssetJobName,
AssetMediaSize,
AssetTypeEnum,
AssetVisibility,
getAssetInfo,
getBaseUrl,
runAssetJobs,
updateAsset,
type AssetJobsDto,
@@ -307,7 +308,6 @@ export const handleDownloadAsset = async (asset: AssetResponseDto, { edited }: {
{
filename: asset.originalFileName,
id: asset.id,
cacheKey: asset.thumbhash,
},
];
@@ -321,12 +321,13 @@ export const handleDownloadAsset = async (asset: AssetResponseDto, { edited }: {
assets.push({
filename: motionAsset.originalFileName,
id: asset.livePhotoVideoId,
cacheKey: motionAsset.thumbhash,
});
}
}
for (const [i, { filename, id, cacheKey }] of assets.entries()) {
const queryParams = asQueryString(authManager.params);
for (const [i, { filename, id }] of assets.entries()) {
if (i !== 0) {
// play nice with Safari
await sleep(500);
@@ -334,7 +335,12 @@ export const handleDownloadAsset = async (asset: AssetResponseDto, { edited }: {
try {
toastManager.primary($t('downloading_asset_filename', { values: { filename } }));
downloadUrl(getAssetMediaUrl({ id, size: AssetMediaSize.Original, edited, cacheKey }), filename);
downloadUrl(
getBaseUrl() +
`/assets/${id}/original` +
(queryParams ? `?${queryParams}&edited=${edited}` : `?edited=${edited}`),
filename,
);
} catch (error) {
handleError(error, $t('errors.error_downloading', { values: { filename } }));
}

View File

@@ -80,34 +80,7 @@ function createUploadStore() {
};
const removeItem = (id: string) => {
uploadAssets.update((uploadingAsset) => {
const assetToRemove = uploadingAsset.find((a) => a.id === id);
if (assetToRemove) {
stats.update((stats) => {
switch (assetToRemove.state) {
case UploadState.DONE: {
stats.success--;
break;
}
case UploadState.DUPLICATED: {
stats.duplicates--;
break;
}
case UploadState.ERROR: {
stats.errors--;
break;
}
}
stats.total--;
return stats;
});
}
return uploadingAsset.filter((a) => a.id != id);
});
uploadAssets.update((uploadingAsset) => uploadingAsset.filter((a) => a.id != id));
};
const dismissErrors = () =>

View File

@@ -78,40 +78,17 @@ export const sleep = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};
let unsubscribeId = 0;
const uploads: Record<number, () => void> = {};
const trackUpload = (unsubscribe: () => void) => {
const id = unsubscribeId++;
uploads[id] = unsubscribe;
return () => {
delete uploads[id];
};
};
export const cancelUploadRequests = () => {
for (const unsubscribe of Object.values(uploads)) {
unsubscribe();
}
};
export const uploadRequest = async <T>(options: UploadRequestOptions): Promise<{ data: T; status: number }> => {
const { onUploadProgress: onProgress, data, url } = options;
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
const unsubscribe = trackUpload(() => xhr.abort());
xhr.addEventListener('error', (error) => {
unsubscribe();
reject(error);
});
xhr.addEventListener('error', (error) => reject(error));
xhr.addEventListener('load', () => {
if (xhr.readyState === 4 && xhr.status >= 200 && xhr.status < 300) {
unsubscribe();
resolve({ data: xhr.response as T, status: xhr.status });
} else {
unsubscribe();
reject(new ApiError(xhr.statusText, xhr.status, xhr.response));
}
});

View File

@@ -1,3 +1,4 @@
import ToastAction from '$lib/components/ToastAction.svelte';
import { TimelineManager } from '$lib/managers/timeline-manager/timeline-manager.svelte';
import type { TimelineAsset } from '$lib/managers/timeline-manager/types';
import type { StackResponse } from '$lib/utils/asset-utils';
@@ -31,15 +32,24 @@ export const deleteAssets = async (
await deleteBulk({ assetBulkDeleteDto: { ids, force } });
onAssetDelete(ids);
toastManager.primary(
toastManager.custom(
{
description: force
? $t('assets_permanently_deleted_count', { values: { count: ids.length } })
: $t('assets_trashed_count', { values: { count: ids.length } }),
button:
onUndoDelete && !force
? { label: $t('undo'), color: 'secondary', onclick: () => undoDeleteAssets(onUndoDelete, assets) }
: undefined,
component: ToastAction,
props: {
title: $t('success'),
description: force
? $t('assets_permanently_deleted_count', { values: { count: ids.length } })
: $t('assets_trashed_count', { values: { count: ids.length } }),
color: 'success',
button:
onUndoDelete && !force
? {
color: 'secondary',
text: $t('undo'),
onClick: () => undoDeleteAssets(onUndoDelete, assets),
}
: undefined,
},
},
{ timeout: 5000 },
);

View File

@@ -1,3 +1,4 @@
import ToastAction from '$lib/components/ToastAction.svelte';
import { authManager } from '$lib/managers/auth-manager.svelte';
import { downloadManager } from '$lib/managers/download-manager.svelte';
import { TimelineManager } from '$lib/managers/timeline-manager/timeline-manager.svelte';
@@ -325,11 +326,16 @@ export const stackAssets = async (assets: { id: string }[], showNotification = t
try {
const stack = await createStack({ stackCreateDto: { assetIds: assets.map(({ id }) => id) } });
if (showNotification) {
toastManager.primary({
description: $t('stacked_assets_count', { values: { count: stack.assets.length } }),
button: {
label: $t('view_stack'),
onclick: () => navigate({ targetRoute: 'current', assetId: stack.primaryAssetId }),
toastManager.custom({
component: ToastAction,
props: {
title: $t('success'),
description: $t('stacked_assets_count', { values: { count: stack.assets.length } }),
color: 'success',
button: {
text: $t('view_stack'),
onClick: () => navigate({ targetRoute: 'current', assetId: stack.primaryAssetId }),
},
},
});
}

View File

@@ -216,7 +216,7 @@ async function fileUploader({
uploadAssetsStore.track('success');
}
if (albumId && !authManager.isSharedLink) {
if (albumId) {
uploadAssetsStore.updateItem(deviceAssetId, { message: $t('asset_adding_to_album') });
await addAssetsToAlbums([albumId], [responseData.id], { notify: false });
uploadAssetsStore.updateItem(deviceAssetId, { message: $t('asset_added_to_album') });

View File

@@ -23,8 +23,7 @@ export function standardizeError(error: unknown) {
return error instanceof Error ? error : new Error(String(error));
}
export function handleError(error: unknown, localizedMessage: string, options?: { notify?: boolean }) {
const { notify = true } = options ?? {};
export function handleError(error: unknown, localizedMessage: string) {
const standardizedError = standardizeError(error);
if (standardizedError.name === 'AbortError') {
return;
@@ -40,9 +39,7 @@ export function handleError(error: unknown, localizedMessage: string, options?:
const errorMessage = serverMessage || localizedMessage;
if (notify) {
toastManager.danger(errorMessage);
}
toastManager.danger(errorMessage);
return errorMessage;
} catch (error) {

View File

@@ -287,11 +287,7 @@
}
};
const onAlbumAddAssets = async ({ albumIds }: { albumIds: string[] }) => {
if (!albumIds.includes(album.id)) {
return;
}
const onAlbumAddAssets = async () => {
await refreshAlbum();
timelineInteraction.clearMultiselect();
await setModeToView();

View File

@@ -178,7 +178,19 @@
const handleFirst = () => navigateToIndex(0);
const handlePrevious = () => navigateToIndex(Math.max(duplicatesIndex - 1, 0));
const handlePreviousShortcut = async () => {
if ($showAssetViewer) {
return;
}
await handlePrevious();
};
const handleNext = async () => navigateToIndex(Math.min(duplicatesIndex + 1, duplicates.length - 1));
const handleNextShortcut = async () => {
if ($showAssetViewer) {
return;
}
await handleNext();
};
const handleLast = () => navigateToIndex(duplicates.length - 1);
const navigateToIndex = async (index: number) =>
@@ -186,12 +198,10 @@
</script>
<svelte:document
use:shortcuts={$showAssetViewer
? []
: [
{ shortcut: { key: 'ArrowLeft' }, onShortcut: handlePrevious },
{ shortcut: { key: 'ArrowRight' }, onShortcut: handleNext },
]}
use:shortcuts={[
{ shortcut: { key: 'ArrowLeft' }, onShortcut: handlePreviousShortcut },
{ shortcut: { key: 'ArrowRight' }, onShortcut: handleNextShortcut },
]}
/>
<UserPageLayout title={data.meta.title + ` (${duplicates.length.toLocaleString($locale)})`} scrollbar={true}>