Compare commits

...

22 Commits

Author SHA1 Message Date
github-actions
7c15e11efc chore: version v1.116.1 2024-09-27 15:32:16 +00:00
Alex
03aa346020 fix(mobile): incorrect filename is retrieved during upload (#12990)
* fix(mobile): incorrect filename is retrieve during upload

* use the same convention to get local id

* revert previous change

* pr feedback
2024-09-27 22:28:31 +07:00
martin
3a37fc8bfd feat: no slideshow transition (#12989) 2024-09-27 15:05:07 +00:00
Jason Rasmussen
36ee72cd87 refactor(server): access env via repository (#12987) 2024-09-27 10:28:56 -04:00
Jason Rasmussen
12da250028 refactor: enums (#12988) 2024-09-27 10:28:42 -04:00
Ryan Ribeiro
5b282733fe chore(Brazilian README): fix broken image links and update translation (#12980) 2024-09-27 08:15:25 -04:00
Alex
971ba63447 fix(mobile): uninitialize provider causes unable to logging in (#12970)
fix(mobile): use uninitialize provider
2024-09-27 09:40:55 +07:00
KD-MM2
d5ee823fbc refactor(docs): fix heading tag, update Vietnamese translation for image alt, formatting features table (#12971)
* feat(readme): add Vietnamese translation

* feat(readme): add Vietnamese translation

* refactor(readme): update Vietnamese translation section

* Update README_vi_VN.md

* refactor(docs): fix heading tag, update Vietnamese translation for image alt, formatting features table

---------

Co-authored-by: tdcaot <cao@sohobb.jp>
2024-09-27 02:40:00 +00:00
KD-MM2
26f33652e1 feat(docs): add Vietnamese translation (#12967)
* feat(readme): add Vietnamese translation

* feat(readme): add Vietnamese translation

* refactor(readme): update Vietnamese translation section

---------

Co-authored-by: tdcaot <cao@sohobb.jp>
2024-09-27 01:57:26 +00:00
Spencer Fasulo
c86fa81e47 docs(web): JSDoc comments for svelte actions (#12963)
* Web: JSDoc comments for Actions

* Remove comment
2024-09-27 01:41:22 +00:00
Lauritz Tieste
42ad3e6bb0 fix(mobile): navigation panel overlaps with right rotate (#12950)
fix: navigation panel overlaps with right rotate
2024-09-27 08:40:07 +07:00
Alex
a6e703ed6b chore(mobile): post release task (#12955) 2024-09-27 08:11:22 +07:00
Jason Rasmussen
b6f871786c fix(server): handle numeric hierarchical subject values (#12949) 2024-09-26 14:32:10 -04:00
Gus Price
62a490eca2 docs: add clarity to non root user section (#12956)
* clarity

* prettier
2024-09-26 17:34:01 +00:00
github-actions
60679a6369 chore: version v1.116.0 2024-09-26 14:51:27 +00:00
Alex
63ad3c8373 chore(mobile): invalidate api repository for new sign in instance (#12940)
* chore(mobile): invalidate api repository for new sign in instasnce

* add comments
2024-09-26 19:45:39 +07:00
Jason Rasmussen
ad0dbf0315 fix(web): delete non-empty album (#12937) 2024-09-25 20:54:42 +00:00
Jonathan Jogenfors
b2f2be3485 refactor(server): library syncing (#12220)
* refactor: library scanning

fix tests

remove offline files step

cleanup library service

improve tests

cleanup tests

add db migration

fix e2e

cleanup openapi

fix tests

fix tests

update docs

update docs

update mobile code

fix formatting

don't remove assets from library with invalid import path

use trash for offline files

add migration

simplify scan endpoint

cleanup library panel

fix library tests

e2e lint

fix e2e

trash e2e

fix lint

add asset trash tests

add more tests

ensure thumbs are generated

cleanup svelte

cleanup queue names

fix tests

fix lint

add warning due to trash

fix trash tests

fix lint

fix tests

Admin message for offline asset

fix comments

Update web/src/lib/components/asset-viewer/asset-viewer-nav-bar.svelte

Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com>

add permission to library scan endpoint

revert asset interface sort

add trash reason to shared link stub

improve path view in offline

update docs

improve trash performance

fix comments

remove stray comment

* refactor: add back isOffline and remove trashReason from asset, change sync job flow

* chore(server): drop coverage to 80% for functions

* chore: rebase and generated files

---------

Co-authored-by: Zack Pollard <zackpollard@ymail.com>
2024-09-25 18:26:19 +01:00
Jason Rasmussen
1ef2834603 docs: hidden files cursed knowledge (#12929) 2024-09-25 16:30:01 +00:00
Weblate (bot)
35e03c1d6f chore(web): update translations (#12737)
Co-authored-by: -J- <heyj0e@tuta.io>
Co-authored-by: Albert Stoynov <albertstoynov@aol.com>
Co-authored-by: Benjamin Gynther <koti.gynther@gmail.com>
Co-authored-by: Bezruchenko Simon <worcposj44@gmail.com>
Co-authored-by: CanbiZ <mickey.leskowitz@gmail.com>
Co-authored-by: David Abner Ciuhan <dciuhan@gmail.com>
Co-authored-by: Dean Cvjetanović <forteee@gmail.com>
Co-authored-by: Denis Pacquier <denis.pacquier@gmail.com>
Co-authored-by: Fjuro <fjuro@alius.cz>
Co-authored-by: Florian Ostertag <florian.kuepper@gmail.com>
Co-authored-by: Hary <sys.hary@gmail.com>
Co-authored-by: Hurricane-32 <rodrigorimo@hotmail.com>
Co-authored-by: Indrek Haav <IndrekHaav@users.noreply.hosted.weblate.org>
Co-authored-by: João Gonçalves <jpcg89@gmail.com>
Co-authored-by: Michel Heusschen <59014050+michelheusschen@users.noreply.github.com>
Co-authored-by: Miki Mrvos <medolino2009@gmail.com>
Co-authored-by: Mārtiņš Bruņenieks <martinsb@gmail.com>
Co-authored-by: Petri Hämäläinen <petri.hamalainen@mailbox.org>
Co-authored-by: Shawn <xiaxinx@gmail.com>
Co-authored-by: Xo <xocodokie@users.noreply.hosted.weblate.org>
Co-authored-by: btpv <villeriusborro+weblate@gmail.com>
Co-authored-by: chapvic <victor@chapaev.org>
Co-authored-by: dvbthien <dvbthien@dvbthien.onmicrosoft.com>
Co-authored-by: fmis13 <fmis13@disroot.org>
Co-authored-by: gallegonovato <fran-carro@hotmail.es>
Co-authored-by: phewi <phewnix@gmail.com>
Co-authored-by: pyccl <changcongliang@163.com>
Co-authored-by: pyorot <FMasic@hotmail.co.uk>
Co-authored-by: rrole <roger.sole.v@gmail.com>
Co-authored-by: 李奕寯 <eugenelego88@gmail.com>
2024-09-25 16:19:10 +00:00
Jason Rasmussen
005528ab5e fix(server): http error parsing on endpoints without a default response (#12927) 2024-09-25 12:05:03 -04:00
Ben
8d515adac5 feat(web): fixed combobox positioning (#12848)
* fix(web): modal sticky bottom scrolling

* chore: minor styling tweaks

* wip: add portal so modals show on Safari in detail panel

* feat: fixed position dropdown menu

* chore: refactoring and cleanup

* feat: zooming and virtual keyboard working for iPadOS/Safari

* Revert "feat: zooming and virtual keyboard working for iPadOS/Safari"

This reverts commit cac29bac0d.

* wip: minor code cleanup

* wip: recover from visual viewport changes

* wip: ease in a little more visualviewport magic

* wip: code cleanup

* fix: only show dropdown above when viewport is zoomed out

* fix: code review suggestions for code style

Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com>

* fix: better variable naming

* chore: better documentation for the bottom breakpoint

---------

Co-authored-by: Daniel Dietzler <36593685+danieldietzler@users.noreply.github.com>
2024-09-25 12:04:53 -04:00
192 changed files with 3738 additions and 3402 deletions

View File

@@ -33,6 +33,7 @@
<a href="readme_i18n/README_pt_BR.md">Português Brasileiro</a>
<a href="readme_i18n/README_sv_SE.md">Svenska</a>
<a href="readme_i18n/README_ar_JO.md">العربية</a>
<a href="readme_i18n/README_vi_VN.md">Tiếng Việt</a>
</p>

6
cli/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@immich/cli",
"version": "2.2.19",
"version": "2.2.21",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@immich/cli",
"version": "2.2.19",
"version": "2.2.21",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"fast-glob": "^3.3.2",
@@ -52,7 +52,7 @@
},
"../open-api/typescript-sdk": {
"name": "@immich/sdk",
"version": "1.115.0",
"version": "1.116.1",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.19",
"version": "2.2.21",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",

View File

@@ -333,7 +333,11 @@ You may need to add mount points or docker volumes for the following internal co
- `immich-machine-learning:/.cache`
- `redis:/data`
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION`.
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION` and `/cache` for machine-learning.
:::note Docker Compose Volumes
The Docker Compose top level volume element does not support non-root access, all of the above volumes must be local volume mounts.
:::
For a further hardened system, you can add the following block to every container except for `immich_postgres`.

View File

@@ -1,18 +1,14 @@
# Libraries
# External Libraries
## Overview
External libraries track assets stored in the filesystem outside of Immich. When the external library is scanned, Immich will load videos and photos from disk and create the corresponding assets. These assets will then be shown in the main timeline, and they will look and behave like any other asset, including viewing on the map, adding to albums, etc. Later, if a file is modified outside of Immich, you need to scan the library for the changes to show up.
Immich supports the creation of libraries which is a top-level asset container. Currently, there are two types of libraries: traditional upload libraries that can sync with a mobile device, and external libraries, that keeps up to date with files on disk. Libraries are different from albums in that an asset can belong to multiple albums but only one library, and deleting a library deletes all assets contained within. As of August 2023, this is a new feature and libraries have a lot of potential for future development beyond what is documented here. This document attempts to describe the current state of libraries.
If an external asset is deleted from disk, Immich will move it to trash on rescan. To restore the asset, you need to restore the original file. After 30 days the file will be removed from trash, and any changes to metadata within Immich will be lost.
## External Libraries
:::caution
External libraries tracks assets stored outside of Immich, i.e. in the file system. When the external library is scanned, Immich will read the metadata from the file and create an asset in the library for each image or video file. These items will then be shown in the main timeline, and they will look and behave like any other asset, including viewing on the map, adding to albums, etc.
If you add metadata to an external asset in any way (i.e. add it to an album or edit the description), that metadata is only stored inside Immich and will not be persisted to the external asset file. If you move an asset to another location within the library all such metadata will be lost upon rescan. This is because the asset is considered a new asset after the move. This is a known issue and will be fixed in a future release.
If a file is modified outside of Immich, the changes will not be reflected in immich until the library is scanned again. There are different ways to scan a library depending on the use case:
- Scan Library Files: This is the default scan method and also the quickest. It will scan all files in the library and add new files to the library. It will notice if any files are missing (see below) but not check existing assets
- Scan All Library Files: Same as above, but will check each existing asset to see if the modification time has changed. If it has, the asset will be updated. Since it has to check each asset, this is slower than Scan Library Files.
- Force Scan All Library Files: Same as above, but will read each asset from disk no matter the modification time. This is useful in some cases where an asset has been modified externally but the modification time has not changed. This is the slowest way to scan because it reads each asset from disk.
:::
:::caution
@@ -20,22 +16,6 @@ Due to aggressive caching it can take some time for a refreshed asset to appear
:::
In external libraries, the file path is used for duplicate detection. This means that if a file is moved to a different location, it will be added as a new asset. If the file is moved back to its original location, it will be added as a new asset. In contrast to upload libraries, two identical files can be uploaded if they are in different locations. This is a deliberate design choice to make Immich reflect the file system as closely as possible. Remember that duplication detection is only done within the same library, so if you have multiple external libraries, the same file can be added to multiple libraries.
:::caution
If you add assets from an external library to an album and then move the asset to another location within the library, the asset will be removed from the album upon rescan. This is because the asset is considered a new asset after the move. This is a known issue and will be fixed in a future release.
:::
### Deleted External Assets
Note: Either a manual or scheduled library scan must have been performed to identify offline assets before this process will work.
In all above scan methods, Immich will check if any files are missing. This can happen if files are deleted, or if they are on a storage location that is currently unavailable, like a network drive that is not mounted, or a USB drive that has been unplugged. In order to prevent accidental deletion of assets, Immich will not immediately delete an asset from the library if the file is missing. Instead, the asset will be internally marked as offline and will still be visible in the main timeline. If the file is moved back to its original location and the library is scanned again, the asset will be restored.
Finally, files can be deleted from Immich via the `Remove Offline Files` job. This job can be found by the three dots menu for the associated external storage that was configured under Administration > Libraries (the same location described at [create external libraries](#create-external-libraries)). When this job is run, any assets marked as offline will then be removed from Immich. Run this job whenever files have been deleted from the file system and you want to remove them from Immich.
### Import Paths
External libraries use import paths to determine which files to scan. Each library can have multiple import paths so that files from different locations can be added to the same library. Import paths are scanned recursively, and if a file is in multiple import paths, it will only be added once. Each import file must be a readable directory that exists on the filesystem; the import path dialog will alert you of any paths that are not accessible.
@@ -66,9 +46,13 @@ Some basic examples:
- `**/Raw/**` will exclude all files in any directory named `Raw`
- `**/*.{tif,jpg}` will exclude all files with the extension `.tif` or `.jpg`
Special characters such as @ should be escaped, for instance:
- `**/\@eadir/**` will exclude all files in any directory named `@eadir`
### Automatic watching (EXPERIMENTAL)
This feature - currently hidden in the config file - is considered experimental and for advanced users only. If enabled, it will allow automatic watching of the filesystem which means new assets are automatically imported to Immich without needing to rescan. Deleted assets are, as always, marked as offline and can be removed with the "Remove offline files" button.
This feature - currently hidden in the config file - is considered experimental and for advanced users only. If enabled, it will allow automatic watching of the filesystem which means new assets are automatically imported to Immich without needing to rescan.
If your photos are on a network drive, automatic file watching likely won't work. In that case, you will have to rely on a periodic library refresh to pull in your changes.
@@ -84,7 +68,7 @@ In rare cases, the library watcher can hang, preventing Immich from starting up.
### Nightly job
There is an automatic job that's run once a day and refreshes all modified files in all libraries as well as cleans up any libraries stuck in deletion.
There is an automatic scan job that is scheduled to run once a day. This job also cleans up any libraries stuck in deletion.
## Usage
@@ -120,7 +104,7 @@ This will disallow the images from being deleted in the web UI, or adding metada
_Remember to run `docker compose up -d` to register the changes. Make sure you can see the mounted path in the container._
:::
### Create External Libraries
### Create A New Library
These actions must be performed by the Immich administrator.
@@ -144,7 +128,7 @@ Next, we'll add an exclusion pattern to filter out raw files.
- Enter `**/Raw/**` and click save.
- Click save
- Click the drop-down menu on the newly created library
- Click on Scan Library Files
- Click on Scan
The christmas trip library will now be scanned in the background. In the meantime, let's add the videos and old photos to another library.
@@ -161,7 +145,7 @@ If you get an error here, please rename the other external library to something
- Click on Add Path
- Enter `/mnt/media/videos` then click Add
- Click Save
- Click on Scan Library Files
- Click on Scan
Within seconds, the assets from the old-pics and videos folders should show up in the main timeline.

View File

@@ -6,6 +6,7 @@ import {
mdiLeadPencil,
mdiLockOff,
mdiLockOutline,
mdiMicrosoftWindows,
mdiSecurity,
mdiSpeedometerSlow,
mdiTrashCan,
@@ -21,6 +22,18 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
const items: Item[] = [
{
icon: mdiMicrosoftWindows,
iconColor: '#357EC7',
title: 'Hidden files in Windows are cursed',
description:
'Hidden files in Windows cannot be opened with the "w" flag. That, combined with SMB option "hide dot files" leads to a lot of confusion.',
link: {
url: 'https://github.com/immich-app/immich/pull/12812',
text: '#12812',
},
date: new Date(2024, 8, 20),
},
{
icon: mdiWrap,
iconColor: 'gray',

View File

@@ -1,4 +1,12 @@
[
{
"label": "v1.116.1",
"url": "https://v1.116.1.archive.immich.app"
},
{
"label": "v1.116.0",
"url": "https://v1.116.0.archive.immich.app"
},
{
"label": "v1.115.0",
"url": "https://v1.115.0.archive.immich.app"

8
e2e/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "immich-e2e",
"version": "1.115.0",
"version": "1.116.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "immich-e2e",
"version": "1.115.0",
"version": "1.116.1",
"license": "GNU Affero General Public License version 3",
"devDependencies": {
"@eslint/eslintrc": "^3.1.0",
@@ -45,7 +45,7 @@
},
"../cli": {
"name": "@immich/cli",
"version": "2.2.19",
"version": "2.2.21",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {
@@ -92,7 +92,7 @@
},
"../open-api/typescript-sdk": {
"name": "@immich/sdk",
"version": "1.115.0",
"version": "1.116.1",
"dev": true,
"license": "GNU Affero General Public License version 3",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "1.115.0",
"version": "1.116.1",
"description": "",
"main": "index.js",
"type": "module",

View File

@@ -1,11 +1,4 @@
import {
LibraryResponseDto,
LoginResponseDto,
ScanLibraryDto,
getAllLibraries,
removeOfflineFiles,
scanLibrary,
} from '@immich/sdk';
import { LibraryResponseDto, LoginResponseDto, getAllLibraries, scanLibrary } from '@immich/sdk';
import { cpSync, existsSync } from 'node:fs';
import { Socket } from 'socket.io-client';
import { userDto, uuidDto } from 'src/fixtures';
@@ -15,8 +8,7 @@ import request from 'supertest';
import { utimes } from 'utimes';
import { afterAll, beforeAll, beforeEach, describe, expect, it } from 'vitest';
const scan = async (accessToken: string, id: string, dto: ScanLibraryDto = {}) =>
scanLibrary({ id, scanLibraryDto: dto }, { headers: asBearerAuth(accessToken) });
const scan = async (accessToken: string, id: string) => scanLibrary({ id }, { headers: asBearerAuth(accessToken) });
describe('/libraries', () => {
let admin: LoginResponseDto;
@@ -293,14 +285,19 @@ describe('/libraries', () => {
expect(body).toEqual(errorDto.unauthorized);
});
it('should scan external library', async () => {
it('should import new asset when scanning external library', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/directoryA`],
});
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 1 });
const { status } = await request(app)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, {
originalPath: `${testAssetDirInternal}/temp/directoryA/assetA.png`,
@@ -315,8 +312,13 @@ describe('/libraries', () => {
exclusionPatterns: ['**/directoryA'],
});
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 1 });
const { status } = await request(app)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
@@ -330,8 +332,13 @@ describe('/libraries', () => {
importPaths: [`${testAssetDirInternal}/temp/directoryA`, `${testAssetDirInternal}/temp/directoryB`],
});
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 2 });
const { status } = await request(app)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
@@ -340,95 +347,144 @@ describe('/libraries', () => {
expect(assets.items.find((asset) => asset.originalPath.includes('directoryB'))).toBeDefined();
});
it('should pick up new files', async () => {
it('should reimport a modified file', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 2 });
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(2);
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 3 });
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, `${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_001);
expect(newAssets.count).toBe(3);
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
const { status } = await request(app)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ refreshModifiedFiles: true });
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
const { assets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
model: 'NIKON D750',
});
expect(assets.count).toBe(1);
});
it('should offline a file missing from disk', async () => {
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
it('should not reimport unmodified files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, `${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
const { status } = await request(app)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ refreshModifiedFiles: true });
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
const { assets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
model: 'NIKON D750',
});
expect(assets.count).toBe(0);
});
it('should set an asset offline if its file is missing', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(3);
expect(assets.count).toBe(1);
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetC.png`);
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
const { status } = await request(app)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const trashedAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
expect(trashedAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`);
expect(trashedAsset.isOffline).toEqual(true);
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(newAssets.count).toBe(3);
expect(newAssets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
isOffline: true,
originalFileName: 'assetC.png',
}),
]),
);
expect(newAssets.items).toEqual([]);
});
it('should offline a file outside of import paths', async () => {
it('should set an asset offline its file is not in any import path', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
utils.createDirectory(`${testAssetDir}/temp/another-path/`);
await request(app)
.put(`/libraries/${library.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ importPaths: [`${testAssetDirInternal}/temp/directoryA`] });
.send({ importPaths: [`${testAssetDirInternal}/temp/another-path/`] });
const { status } = await request(app)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
const trashedAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
expect(trashedAsset.originalPath).toBe(`${testAssetDirInternal}/temp/offline/offline.png`);
expect(trashedAsset.isOffline).toBe(true);
expect(assets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
isOffline: false,
originalFileName: 'assetA.png',
}),
expect.objectContaining({
isOffline: true,
originalFileName: 'assetB.png',
}),
]),
);
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(newAssets.items).toEqual([]);
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
utils.removeDirectory(`${testAssetDir}/temp/another-path/`);
});
it('should offline a file covered by an exclusion pattern', async () => {
it('should set an asset offline if its file is covered by an exclusion pattern', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
@@ -437,6 +493,12 @@ describe('/libraries', () => {
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
originalFileName: 'assetB.png',
});
expect(assets.count).toBe(1);
await request(app)
.put(`/libraries/${library.id}`)
.set('Authorization', `Bearer ${admin.accessToken}`)
@@ -445,282 +507,21 @@ describe('/libraries', () => {
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
const trashedAsset = await utils.getAssetInfo(admin.accessToken, assets.items[0].id);
expect(trashedAsset.isTrashed).toBe(true);
expect(trashedAsset.originalPath).toBe(`${testAssetDirInternal}/temp/directoryB/assetB.png`);
expect(trashedAsset.isOffline).toBe(true);
expect(assets.count).toBe(2);
const { assets: newAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
isOffline: false,
originalFileName: 'assetA.png',
}),
expect.objectContaining({
isOffline: true,
originalFileName: 'assetB.png',
}),
]),
);
expect(newAssets.items).toEqual([
expect.objectContaining({
originalFileName: 'assetA.png',
}),
]);
});
it('should not try to delete offline files', async () => {
utils.createImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline1`],
});
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(initialAssets).toEqual({
count: 1,
total: 1,
facets: [],
items: [expect.objectContaining({ originalFileName: 'assetA.png' })],
nextPage: null,
});
utils.removeImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: offlineAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
isOffline: true,
});
expect(offlineAssets).toEqual({
count: 1,
total: 1,
facets: [],
items: [expect.objectContaining({ originalFileName: 'assetA.png' })],
nextPage: null,
});
utils.createImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
await removeOfflineFiles({ id: library.id }, { headers: asBearerAuth(admin.accessToken) });
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForWebsocketEvent({ event: 'assetDelete', total: 1 });
expect(existsSync(`${testAssetDir}/temp/offline1/assetA.png`)).toBe(true);
utils.removeImageFile(`${testAssetDir}/temp/offline1/assetA.png`);
});
it('should scan new files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
utils.createImageFile(`${testAssetDir}/temp/directoryC/assetC.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(3);
expect(assets.items).toEqual(
expect.arrayContaining([
expect.objectContaining({
originalFileName: 'assetC.png',
}),
]),
);
utils.removeImageFile(`${testAssetDir}/temp/directoryC/assetC.png`);
});
describe('with refreshModifiedFiles=true', () => {
it('should reimport modified files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, `${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_001);
await scan(admin.accessToken, library.id, { refreshModifiedFiles: true });
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
const { assets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
model: 'NIKON D750',
});
expect(assets.count).toBe(1);
});
it('should not reimport unmodified files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, `${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
await scan(admin.accessToken, library.id, { refreshModifiedFiles: true });
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
const { assets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
model: 'NIKON D750',
});
expect(assets.count).toBe(0);
});
});
describe('with refreshAllFiles=true', () => {
it('should reimport all files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
});
utils.createImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
cpSync(`${testAssetDir}/albums/nature/tanners_ridge.jpg`, `${testAssetDir}/temp/directoryA/assetB.jpg`);
await utimes(`${testAssetDir}/temp/directoryA/assetB.jpg`, 447_775_200_000);
await scan(admin.accessToken, library.id, { refreshAllFiles: true });
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'metadataExtraction');
utils.removeImageFile(`${testAssetDir}/temp/directoryA/assetB.jpg`);
const { assets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
model: 'NIKON D750',
});
expect(assets.count).toBe(1);
});
});
});
describe('POST /libraries/:id/removeOffline', () => {
it('should require authentication', async () => {
const { status, body } = await request(app).post(`/libraries/${uuidDto.notFound}/removeOffline`).send({});
expect(status).toBe(401);
expect(body).toEqual(errorDto.unauthorized);
});
it('should remove offline files', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/online.png`);
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
});
expect(initialAssets.count).toBe(2);
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: offlineAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
isOffline: true,
});
expect(offlineAssets.count).toBe(1);
const { status } = await request(app)
.post(`/libraries/${library.id}/removeOffline`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'backgroundTask');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
utils.removeImageFile(`${testAssetDir}/temp/offline/online.png`);
});
it('should remove offline files from trash', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/online.png`);
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: initialAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
});
expect(initialAssets.count).toBe(2);
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets: offlineAssets } = await utils.metadataSearch(admin.accessToken, {
libraryId: library.id,
isOffline: true,
});
expect(offlineAssets.count).toBe(1);
const { status } = await request(app)
.post(`/libraries/${library.id}/removeOffline`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
await utils.waitForQueueFinish(admin.accessToken, 'backgroundTask');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
expect(assets.items[0].isOffline).toBe(false);
expect(assets.items[0].originalPath).toEqual(`${testAssetDirInternal}/temp/offline/online.png`);
utils.removeImageFile(`${testAssetDir}/temp/offline/online.png`);
});
it('should not remove online files', async () => {
it('should not trash an online asset', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp`],
@@ -733,10 +534,11 @@ describe('/libraries', () => {
expect(assetsBefore.count).toBeGreaterThan(1);
const { status } = await request(app)
.post(`/libraries/${library.id}/removeOffline`)
.post(`/libraries/${library.id}/scan`)
.set('Authorization', `Bearer ${admin.accessToken}`)
.send();
expect(status).toBe(204);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
@@ -828,7 +630,7 @@ describe('/libraries', () => {
});
await scan(admin.accessToken, library.id);
await utils.waitForWebsocketEvent({ event: 'assetUpload', total: 2 });
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { status, body } = await request(app)
.delete(`/libraries/${library.id}`)

View File

@@ -181,7 +181,7 @@ describe('/search', () => {
dto: { size: -1.5 },
expected: ['size must not be less than 1', 'size must be an integer number'],
},
...['isArchived', 'isFavorite', 'isEncoded', 'isMotion', 'isOffline', 'isVisible'].map((value) => ({
...['isArchived', 'isFavorite', 'isEncoded', 'isOffline', 'isMotion', 'isVisible'].map((value) => ({
should: `should reject ${value} not a boolean`,
dto: { [value]: 'immich' },
expected: [`${value} must be a boolean value`],

View File

@@ -1,10 +1,13 @@
import { LoginResponseDto, getAssetInfo, getAssetStatistics } from '@immich/sdk';
import { LoginResponseDto, getAssetInfo, getAssetStatistics, scanLibrary } from '@immich/sdk';
import { existsSync } from 'node:fs';
import { Socket } from 'socket.io-client';
import { errorDto } from 'src/responses';
import { app, asBearerAuth, utils } from 'src/utils';
import { app, asBearerAuth, testAssetDir, testAssetDirInternal, utils } from 'src/utils';
import request from 'supertest';
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
const scan = async (accessToken: string, id: string) => scanLibrary({ id }, { headers: asBearerAuth(accessToken) });
describe('/trash', () => {
let admin: LoginResponseDto;
let ws: Socket;
@@ -44,6 +47,8 @@ describe('/trash', () => {
const after = await getAssetStatistics({ isTrashed: true }, { headers: asBearerAuth(admin.accessToken) });
expect(after.total).toBe(0);
expect(existsSync(before.originalPath)).toBe(false);
});
it('should empty the trash with archived assets', async () => {
@@ -64,6 +69,46 @@ describe('/trash', () => {
const after = await getAssetStatistics({ isTrashed: true }, { headers: asBearerAuth(admin.accessToken) });
expect(after.total).toBe(0);
expect(existsSync(before.originalPath)).toBe(false);
});
it('should not delete offline-trashed assets from disk', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.items.length).toBe(1);
const asset = assets.items[0];
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const assetBefore = await utils.getAssetInfo(admin.accessToken, asset.id);
expect(assetBefore).toMatchObject({ isTrashed: true, isOffline: true });
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
const { status } = await request(app).post('/trash/empty').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
await utils.waitForQueueFinish(admin.accessToken, 'backgroundTask');
const assetAfter = await utils.getAssetInfo(admin.accessToken, asset.id);
expect(assetAfter).toMatchObject({ isTrashed: true, isOffline: true });
expect(existsSync(`${testAssetDir}/temp/offline/offline.png`)).toBe(true);
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
});
});
@@ -91,6 +136,37 @@ describe('/trash', () => {
const after = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toStrictEqual(expect.objectContaining({ id: assetId, isTrashed: false }));
});
it('should not restore offline-trashed assets', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
const assetId = assets.items[0].id;
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const before = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
expect(before).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
const { status } = await request(app).post('/trash/restore').set('Authorization', `Bearer ${admin.accessToken}`);
expect(status).toBe(200);
const after = await getAssetInfo({ id: assetId }, { headers: asBearerAuth(admin.accessToken) });
expect(after).toStrictEqual(expect.objectContaining({ id: assetId, isOffline: true }));
});
});
describe('POST /trash/restore/assets', () => {
@@ -118,5 +194,38 @@ describe('/trash', () => {
const after = await utils.getAssetInfo(admin.accessToken, assetId);
expect(after.isTrashed).toBe(false);
});
it('should not restore an offline-trashed asset', async () => {
const library = await utils.createLibrary(admin.accessToken, {
ownerId: admin.userId,
importPaths: [`${testAssetDirInternal}/temp/offline`],
});
utils.createImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const { assets } = await utils.metadataSearch(admin.accessToken, { libraryId: library.id });
expect(assets.count).toBe(1);
const assetId = assets.items[0].id;
utils.removeImageFile(`${testAssetDir}/temp/offline/offline.png`);
await scan(admin.accessToken, library.id);
await utils.waitForQueueFinish(admin.accessToken, 'library');
const before = await utils.getAssetInfo(admin.accessToken, assetId);
expect(before.isTrashed).toBe(true);
const { status } = await request(app)
.post('/trash/restore/assets')
.set('Authorization', `Bearer ${admin.accessToken}`)
.send({ ids: [assetId] });
expect(status).toBe(200);
const after = await utils.getAssetInfo(admin.accessToken, assetId);
expect(after.isTrashed).toBe(true);
});
});
});

View File

@@ -372,6 +372,12 @@ export const utils = {
writeFileSync(path, makeRandomImage());
},
createDirectory: (path: string) => {
if (!existsSync(dirname(path))) {
mkdirSync(dirname(path), { recursive: true });
}
},
removeImageFile: (path: string) => {
if (!existsSync(path)) {
return;
@@ -380,6 +386,14 @@ export const utils = {
rmSync(path);
},
removeDirectory: (path: string) => {
if (!existsSync(path)) {
return;
}
rmSync(path);
},
getAssetInfo: (accessToken: string, id: string) => getAssetInfo({ id }, { headers: asBearerAuth(accessToken) }),
checkExistingAssets: (accessToken: string, checkExistingAssetsDto: CheckExistingAssetsDto) =>

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = "machine-learning"
version = "1.115.0"
version = "1.116.1"
description = ""
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
readme = "README.md"

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 159,
"android.injected.version.name" => "1.115.0",
"android.injected.version.code" => 161,
"android.injected.version.name" => "1.116.1",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

View File

@@ -401,7 +401,7 @@
CODE_SIGN_ENTITLEMENTS = Runner/RunnerProfile.entitlements;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 175;
CURRENT_PROJECT_VERSION = 176;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -543,7 +543,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 175;
CURRENT_PROJECT_VERSION = 176;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;
@@ -571,7 +571,7 @@
CLANG_ENABLE_MODULES = YES;
CODE_SIGN_IDENTITY = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 175;
CURRENT_PROJECT_VERSION = 176;
DEVELOPMENT_TEAM = 2F67MQ8R79;
ENABLE_BITCODE = NO;
INFOPLIST_FILE = Runner/Info.plist;

View File

@@ -58,11 +58,11 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.115.0</string>
<string>1.116.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>175</string>
<string>176</string>
<key>FLTEnableImpeller</key>
<true/>
<key>ITSAppUsesNonExemptEncryption</key>

View File

@@ -19,7 +19,7 @@ platform :ios do
desc "iOS Release"
lane :release do
increment_version_number(
version_number: "1.115.0"
version_number: "1.116.1"
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -57,69 +57,64 @@ const AssetSchema = CollectionSchema(
name: r'isFavorite',
type: IsarType.bool,
),
r'isOffline': PropertySchema(
id: 8,
name: r'isOffline',
type: IsarType.bool,
),
r'isTrashed': PropertySchema(
id: 9,
id: 8,
name: r'isTrashed',
type: IsarType.bool,
),
r'livePhotoVideoId': PropertySchema(
id: 10,
id: 9,
name: r'livePhotoVideoId',
type: IsarType.string,
),
r'localId': PropertySchema(
id: 11,
id: 10,
name: r'localId',
type: IsarType.string,
),
r'ownerId': PropertySchema(
id: 12,
id: 11,
name: r'ownerId',
type: IsarType.long,
),
r'remoteId': PropertySchema(
id: 13,
id: 12,
name: r'remoteId',
type: IsarType.string,
),
r'stackCount': PropertySchema(
id: 14,
id: 13,
name: r'stackCount',
type: IsarType.long,
),
r'stackId': PropertySchema(
id: 15,
id: 14,
name: r'stackId',
type: IsarType.string,
),
r'stackPrimaryAssetId': PropertySchema(
id: 16,
id: 15,
name: r'stackPrimaryAssetId',
type: IsarType.string,
),
r'thumbhash': PropertySchema(
id: 17,
id: 16,
name: r'thumbhash',
type: IsarType.string,
),
r'type': PropertySchema(
id: 18,
id: 17,
name: r'type',
type: IsarType.byte,
enumMap: _AssettypeEnumValueMap,
),
r'updatedAt': PropertySchema(
id: 19,
id: 18,
name: r'updatedAt',
type: IsarType.dateTime,
),
r'width': PropertySchema(
id: 20,
id: 19,
name: r'width',
type: IsarType.int,
)
@@ -244,19 +239,18 @@ void _assetSerialize(
writer.writeInt(offsets[5], object.height);
writer.writeBool(offsets[6], object.isArchived);
writer.writeBool(offsets[7], object.isFavorite);
writer.writeBool(offsets[8], object.isOffline);
writer.writeBool(offsets[9], object.isTrashed);
writer.writeString(offsets[10], object.livePhotoVideoId);
writer.writeString(offsets[11], object.localId);
writer.writeLong(offsets[12], object.ownerId);
writer.writeString(offsets[13], object.remoteId);
writer.writeLong(offsets[14], object.stackCount);
writer.writeString(offsets[15], object.stackId);
writer.writeString(offsets[16], object.stackPrimaryAssetId);
writer.writeString(offsets[17], object.thumbhash);
writer.writeByte(offsets[18], object.type.index);
writer.writeDateTime(offsets[19], object.updatedAt);
writer.writeInt(offsets[20], object.width);
writer.writeBool(offsets[8], object.isTrashed);
writer.writeString(offsets[9], object.livePhotoVideoId);
writer.writeString(offsets[10], object.localId);
writer.writeLong(offsets[11], object.ownerId);
writer.writeString(offsets[12], object.remoteId);
writer.writeLong(offsets[13], object.stackCount);
writer.writeString(offsets[14], object.stackId);
writer.writeString(offsets[15], object.stackPrimaryAssetId);
writer.writeString(offsets[16], object.thumbhash);
writer.writeByte(offsets[17], object.type.index);
writer.writeDateTime(offsets[18], object.updatedAt);
writer.writeInt(offsets[19], object.width);
}
Asset _assetDeserialize(
@@ -275,20 +269,19 @@ Asset _assetDeserialize(
id: id,
isArchived: reader.readBoolOrNull(offsets[6]) ?? false,
isFavorite: reader.readBoolOrNull(offsets[7]) ?? false,
isOffline: reader.readBoolOrNull(offsets[8]) ?? false,
isTrashed: reader.readBoolOrNull(offsets[9]) ?? false,
livePhotoVideoId: reader.readStringOrNull(offsets[10]),
localId: reader.readStringOrNull(offsets[11]),
ownerId: reader.readLong(offsets[12]),
remoteId: reader.readStringOrNull(offsets[13]),
stackCount: reader.readLongOrNull(offsets[14]) ?? 0,
stackId: reader.readStringOrNull(offsets[15]),
stackPrimaryAssetId: reader.readStringOrNull(offsets[16]),
thumbhash: reader.readStringOrNull(offsets[17]),
type: _AssettypeValueEnumMap[reader.readByteOrNull(offsets[18])] ??
isTrashed: reader.readBoolOrNull(offsets[8]) ?? false,
livePhotoVideoId: reader.readStringOrNull(offsets[9]),
localId: reader.readStringOrNull(offsets[10]),
ownerId: reader.readLong(offsets[11]),
remoteId: reader.readStringOrNull(offsets[12]),
stackCount: reader.readLongOrNull(offsets[13]) ?? 0,
stackId: reader.readStringOrNull(offsets[14]),
stackPrimaryAssetId: reader.readStringOrNull(offsets[15]),
thumbhash: reader.readStringOrNull(offsets[16]),
type: _AssettypeValueEnumMap[reader.readByteOrNull(offsets[17])] ??
AssetType.other,
updatedAt: reader.readDateTime(offsets[19]),
width: reader.readIntOrNull(offsets[20]),
updatedAt: reader.readDateTime(offsets[18]),
width: reader.readIntOrNull(offsets[19]),
);
return object;
}
@@ -319,29 +312,27 @@ P _assetDeserializeProp<P>(
case 8:
return (reader.readBoolOrNull(offset) ?? false) as P;
case 9:
return (reader.readBoolOrNull(offset) ?? false) as P;
return (reader.readStringOrNull(offset)) as P;
case 10:
return (reader.readStringOrNull(offset)) as P;
case 11:
return (reader.readStringOrNull(offset)) as P;
case 12:
return (reader.readLong(offset)) as P;
case 13:
case 12:
return (reader.readStringOrNull(offset)) as P;
case 14:
case 13:
return (reader.readLongOrNull(offset) ?? 0) as P;
case 14:
return (reader.readStringOrNull(offset)) as P;
case 15:
return (reader.readStringOrNull(offset)) as P;
case 16:
return (reader.readStringOrNull(offset)) as P;
case 17:
return (reader.readStringOrNull(offset)) as P;
case 18:
return (_AssettypeValueEnumMap[reader.readByteOrNull(offset)] ??
AssetType.other) as P;
case 19:
case 18:
return (reader.readDateTime(offset)) as P;
case 20:
case 19:
return (reader.readIntOrNull(offset)) as P;
default:
throw IsarError('Unknown property with id $propertyId');
@@ -1362,16 +1353,6 @@ extension AssetQueryFilter on QueryBuilder<Asset, Asset, QFilterCondition> {
});
}
QueryBuilder<Asset, Asset, QAfterFilterCondition> isOfflineEqualTo(
bool value) {
return QueryBuilder.apply(this, (query) {
return query.addFilterCondition(FilterCondition.equalTo(
property: r'isOffline',
value: value,
));
});
}
QueryBuilder<Asset, Asset, QAfterFilterCondition> isTrashedEqualTo(
bool value) {
return QueryBuilder.apply(this, (query) {
@@ -2647,18 +2628,6 @@ extension AssetQuerySortBy on QueryBuilder<Asset, Asset, QSortBy> {
});
}
QueryBuilder<Asset, Asset, QAfterSortBy> sortByIsOffline() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'isOffline', Sort.asc);
});
}
QueryBuilder<Asset, Asset, QAfterSortBy> sortByIsOfflineDesc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'isOffline', Sort.desc);
});
}
QueryBuilder<Asset, Asset, QAfterSortBy> sortByIsTrashed() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'isTrashed', Sort.asc);
@@ -2913,18 +2882,6 @@ extension AssetQuerySortThenBy on QueryBuilder<Asset, Asset, QSortThenBy> {
});
}
QueryBuilder<Asset, Asset, QAfterSortBy> thenByIsOffline() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'isOffline', Sort.asc);
});
}
QueryBuilder<Asset, Asset, QAfterSortBy> thenByIsOfflineDesc() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'isOffline', Sort.desc);
});
}
QueryBuilder<Asset, Asset, QAfterSortBy> thenByIsTrashed() {
return QueryBuilder.apply(this, (query) {
return query.addSortBy(r'isTrashed', Sort.asc);
@@ -3121,12 +3078,6 @@ extension AssetQueryWhereDistinct on QueryBuilder<Asset, Asset, QDistinct> {
});
}
QueryBuilder<Asset, Asset, QDistinct> distinctByIsOffline() {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'isOffline');
});
}
QueryBuilder<Asset, Asset, QDistinct> distinctByIsTrashed() {
return QueryBuilder.apply(this, (query) {
return query.addDistinctBy(r'isTrashed');
@@ -3263,12 +3214,6 @@ extension AssetQueryProperty on QueryBuilder<Asset, Asset, QQueryProperty> {
});
}
QueryBuilder<Asset, bool, QQueryOperations> isOfflineProperty() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'isOffline');
});
}
QueryBuilder<Asset, bool, QQueryOperations> isTrashedProperty() {
return QueryBuilder.apply(this, (query) {
return query.addPropertyName(r'isTrashed');

View File

@@ -72,13 +72,14 @@ extension AssetListExtension on Iterable<Asset> {
}
/// Filters out offline assets and returns those that are still accessible by the Immich server
/// TODO: isOffline is removed from Immich, so this method is not useful anymore
Iterable<Asset> nonOfflineOnly({
void Function()? errorCallback,
}) {
final bool onlyLive = every((e) => !e.isOffline);
final bool onlyLive = every((e) => false);
if (!onlyLive) {
if (errorCallback != null) errorCallback();
return where((a) => !a.isOffline);
return where((a) => false);
}
return this;
}

View File

@@ -4,4 +4,7 @@ abstract interface class IAssetMediaRepository {
Future<List<String>> deleteAll(List<String> ids);
Future<Asset?> get(String id);
/// Obtaining the correct original filename of the asset
Future<String?> getOriginalFilename(String id);
}

View File

@@ -51,107 +51,109 @@ class CropImagePage extends HookWidget {
],
),
backgroundColor: context.scaffoldBackgroundColor,
body: LayoutBuilder(
builder: (BuildContext context, BoxConstraints constraints) {
return Column(
children: [
Container(
padding: const EdgeInsets.only(top: 20),
width: constraints.maxWidth * 0.9,
height: constraints.maxHeight * 0.6,
child: CropImage(
controller: cropController,
image: image,
gridColor: Colors.white,
),
),
Expanded(
child: Container(
width: double.infinity,
decoration: BoxDecoration(
color: context.scaffoldBackgroundColor,
borderRadius: const BorderRadius.only(
topLeft: Radius.circular(20),
topRight: Radius.circular(20),
),
body: SafeArea(
child: LayoutBuilder(
builder: (BuildContext context, BoxConstraints constraints) {
return Column(
children: [
Container(
padding: const EdgeInsets.only(top: 20),
width: constraints.maxWidth * 0.9,
height: constraints.maxHeight * 0.6,
child: CropImage(
controller: cropController,
image: image,
gridColor: Colors.white,
),
child: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Padding(
padding: const EdgeInsets.only(
left: 20,
right: 20,
bottom: 10,
),
Expanded(
child: Container(
width: double.infinity,
decoration: BoxDecoration(
color: context.scaffoldBackgroundColor,
borderRadius: const BorderRadius.only(
topLeft: Radius.circular(20),
topRight: Radius.circular(20),
),
),
child: Center(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Padding(
padding: const EdgeInsets.only(
left: 20,
right: 20,
bottom: 10,
),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
IconButton(
icon: Icon(
Icons.rotate_left,
color: Theme.of(context).iconTheme.color,
),
onPressed: () {
cropController.rotateLeft();
},
),
IconButton(
icon: Icon(
Icons.rotate_right,
color: Theme.of(context).iconTheme.color,
),
onPressed: () {
cropController.rotateRight();
},
),
],
),
),
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
IconButton(
icon: Icon(
Icons.rotate_left,
color: Theme.of(context).iconTheme.color,
),
onPressed: () {
cropController.rotateLeft();
},
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: null,
label: 'Free',
),
IconButton(
icon: Icon(
Icons.rotate_right,
color: Theme.of(context).iconTheme.color,
),
onPressed: () {
cropController.rotateRight();
},
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 1.0,
label: '1:1',
),
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 16.0 / 9.0,
label: '16:9',
),
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 3.0 / 2.0,
label: '3:2',
),
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 7.0 / 5.0,
label: '7:5',
),
],
),
),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: null,
label: 'Free',
),
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 1.0,
label: '1:1',
),
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 16.0 / 9.0,
label: '16:9',
),
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 3.0 / 2.0,
label: '3:2',
),
_AspectRatioButton(
cropController: cropController,
aspectRatio: aspectRatio,
ratio: 7.0 / 5.0,
label: '7:5',
),
],
),
],
],
),
),
),
),
),
],
);
},
],
);
},
),
),
);
}

View File

@@ -86,12 +86,16 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
void handleAppPause() {
state = AppLifeCycleEnum.paused;
_wasPaused = true;
// Do not cancel backup if manual upload is in progress
if (_ref.read(backupProvider.notifier).backupProgress !=
BackUpProgressEnum.manualInProgress) {
_ref.read(backupProvider.notifier).cancelBackup();
if (_ref.read(authenticationProvider).isAuthenticated) {
// Do not cancel backup if manual upload is in progress
if (_ref.read(backupProvider.notifier).backupProgress !=
BackUpProgressEnum.manualInProgress) {
_ref.read(backupProvider.notifier).cancelBackup();
}
_ref.read(websocketProvider.notifier).disconnect();
}
_ref.read(websocketProvider.notifier).disconnect();
ImmichLogger().flush();
}

View File

@@ -43,4 +43,17 @@ class AssetMediaRepository implements IAssetMediaRepository {
asset.local = local;
return asset;
}
@override
Future<String?> getOriginalFilename(String id) async {
final entity = await AssetEntity.fromId(id);
if (entity == null) {
return null;
}
// titleAsync gets the correct original filename for some assets on iOS
// otherwise using the `entity.title` would return a random GUID
return await entity.titleAsync;
}
}

View File

@@ -15,6 +15,7 @@ import 'package:immich_mobile/models/backup/success_upload_asset.model.dart';
import 'package:immich_mobile/repositories/album.repository.dart';
import 'package:immich_mobile/repositories/album_api.repository.dart';
import 'package:immich_mobile/repositories/asset.repository.dart';
import 'package:immich_mobile/repositories/asset_media.repository.dart';
import 'package:immich_mobile/repositories/backup.repository.dart';
import 'package:immich_mobile/repositories/album_media.repository.dart';
import 'package:immich_mobile/repositories/file_media.repository.dart';
@@ -368,6 +369,7 @@ class BackgroundService {
BackupRepository backupAlbumRepository = BackupRepository(db);
AlbumMediaRepository albumMediaRepository = AlbumMediaRepository();
FileMediaRepository fileMediaRepository = FileMediaRepository();
AssetMediaRepository assetMediaRepository = AssetMediaRepository();
UserRepository userRepository = UserRepository(db);
UserApiRepository userApiRepository =
UserApiRepository(apiService.usersApi);
@@ -409,6 +411,7 @@ class BackgroundService {
albumService,
albumMediaRepository,
fileMediaRepository,
assetMediaRepository,
);
final selectedAlbums = backupService.selectedAlbumsQuery().findAllSync();

View File

@@ -12,6 +12,7 @@ import 'package:immich_mobile/entities/backup_album.entity.dart';
import 'package:immich_mobile/entities/duplicated_asset.entity.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/interfaces/album_media.interface.dart';
import 'package:immich_mobile/interfaces/asset_media.interface.dart';
import 'package:immich_mobile/interfaces/file_media.interface.dart';
import 'package:immich_mobile/models/backup/backup_candidate.model.dart';
import 'package:immich_mobile/models/backup/current_upload_asset.model.dart';
@@ -21,6 +22,7 @@ import 'package:immich_mobile/providers/api.provider.dart';
import 'package:immich_mobile/providers/app_settings.provider.dart';
import 'package:immich_mobile/providers/db.provider.dart';
import 'package:immich_mobile/repositories/album_media.repository.dart';
import 'package:immich_mobile/repositories/asset_media.repository.dart';
import 'package:immich_mobile/repositories/file_media.repository.dart';
import 'package:immich_mobile/services/album.service.dart';
import 'package:immich_mobile/services/api.service.dart';
@@ -40,6 +42,7 @@ final backupServiceProvider = Provider(
ref.watch(albumServiceProvider),
ref.watch(albumMediaRepositoryProvider),
ref.watch(fileMediaRepositoryProvider),
ref.watch(assetMediaRepositoryProvider),
),
);
@@ -52,6 +55,7 @@ class BackupService {
final AlbumService _albumService;
final IAlbumMediaRepository _albumMediaRepository;
final IFileMediaRepository _fileMediaRepository;
final IAssetMediaRepository _assetMediaRepository;
BackupService(
this._apiService,
@@ -60,6 +64,7 @@ class BackupService {
this._albumService,
this._albumMediaRepository,
this._fileMediaRepository,
this._assetMediaRepository,
);
Future<List<String>?> getDeviceBackupAsset() async {
@@ -329,7 +334,9 @@ class BackupService {
}
if (file != null) {
String originalFileName = asset.fileName;
String? originalFileName =
await _assetMediaRepository.getOriginalFilename(asset.localId!);
originalFileName ??= asset.fileName;
if (asset.local!.isLivePhoto) {
if (livePhotoFile == null) {

View File

@@ -0,0 +1,16 @@
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/repositories/activity_api.repository.dart';
import 'package:immich_mobile/repositories/album_api.repository.dart';
import 'package:immich_mobile/repositories/asset_api.repository.dart';
import 'package:immich_mobile/repositories/partner_api.repository.dart';
import 'package:immich_mobile/repositories/person_api.repository.dart';
import 'package:immich_mobile/repositories/user_api.repository.dart';
void invalidateAllApiRepositoryProviders(WidgetRef ref) {
ref.invalidate(userApiRepositoryProvider);
ref.invalidate(activityApiRepositoryProvider);
ref.invalidate(partnerApiRepositoryProvider);
ref.invalidate(albumApiRepositoryProvider);
ref.invalidate(personApiRepositoryProvider);
ref.invalidate(assetApiRepositoryProvider);
}

View File

@@ -172,29 +172,12 @@ class BottomGalleryBar extends ConsumerWidget {
}
shareAsset() {
if (asset.isOffline) {
ImmichToast.show(
durationInSecond: 1,
context: context,
msg: 'asset_action_share_err_offline'.tr(),
gravity: ToastGravity.BOTTOM,
);
return;
}
ref.read(imageViewerStateProvider.notifier).shareAsset(asset, context);
}
void handleEdit() async {
final image = Image(image: ImmichImage.imageProvider(asset: asset));
if (asset.isOffline) {
ImmichToast.show(
durationInSecond: 1,
context: context,
msg: 'asset_action_edit_err_offline'.tr(),
gravity: ToastGravity.BOTTOM,
);
return;
}
Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => EditImagePage(
@@ -219,16 +202,6 @@ class BottomGalleryBar extends ConsumerWidget {
if (asset.isLocal) {
return;
}
if (asset.isOffline) {
ImmichToast.show(
durationInSecond: 1,
context: context,
msg: 'asset_action_share_err_offline'.tr(),
gravity: ToastGravity.BOTTOM,
);
return;
}
ref.read(imageViewerStateProvider.notifier).downloadAsset(
asset,
context,

View File

@@ -183,8 +183,7 @@ class TopControlAppBar extends HookConsumerWidget {
if (asset.isRemote && isOwner) buildFavoriteButton(a),
if (asset.livePhotoVideoId != null) buildLivePhotoButton(),
if (asset.isLocal && !asset.isRemote) buildUploadButton(),
if (asset.isRemote && !asset.isLocal && !asset.isOffline && isOwner)
buildDownloadButton(),
if (asset.isRemote && !asset.isLocal && isOwner) buildDownloadButton(),
if (asset.isRemote && (isOwner || isPartner) && !asset.isTrashed)
buildAddToAlbumButton(),
if (asset.isTrashed) buildRestoreButton(),

View File

@@ -16,6 +16,7 @@ import 'package:immich_mobile/providers/asset.provider.dart';
import 'package:immich_mobile/providers/authentication.provider.dart';
import 'package:immich_mobile/providers/backup/backup.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
import 'package:immich_mobile/utils/provider_utils.dart';
import 'package:immich_mobile/utils/version_compatibility.dart';
import 'package:immich_mobile/widgets/common/immich_logo.dart';
import 'package:immich_mobile/widgets/common/immich_title_text.dart';
@@ -186,6 +187,9 @@ class LoginForm extends HookConsumerWidget {
// This will remove current cache asset state of previous user login.
ref.read(assetProvider.notifier).clearAllAsset();
// Invalidate all api repository provider instance to take into account new access token
invalidateAllApiRepositoryProviders(ref);
try {
final isAuthenticated =
await ref.read(authenticationProvider.notifier).login(

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 1.115.0
- API version: 1.116.1
- Generator version: 7.8.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen
@@ -133,7 +133,6 @@ Class | Method | HTTP request | Description
*LibrariesApi* | [**getAllLibraries**](doc//LibrariesApi.md#getalllibraries) | **GET** /libraries |
*LibrariesApi* | [**getLibrary**](doc//LibrariesApi.md#getlibrary) | **GET** /libraries/{id} |
*LibrariesApi* | [**getLibraryStatistics**](doc//LibrariesApi.md#getlibrarystatistics) | **GET** /libraries/{id}/statistics |
*LibrariesApi* | [**removeOfflineFiles**](doc//LibrariesApi.md#removeofflinefiles) | **POST** /libraries/{id}/removeOffline |
*LibrariesApi* | [**scanLibrary**](doc//LibrariesApi.md#scanlibrary) | **POST** /libraries/{id}/scan |
*LibrariesApi* | [**updateLibrary**](doc//LibrariesApi.md#updatelibrary) | **PUT** /libraries/{id} |
*LibrariesApi* | [**validate**](doc//LibrariesApi.md#validate) | **POST** /libraries/{id}/validate |
@@ -385,7 +384,6 @@ Class | Method | HTTP request | Description
- [ReactionLevel](doc//ReactionLevel.md)
- [ReactionType](doc//ReactionType.md)
- [ReverseGeocodingStateResponseDto](doc//ReverseGeocodingStateResponseDto.md)
- [ScanLibraryDto](doc//ScanLibraryDto.md)
- [SearchAlbumResponseDto](doc//SearchAlbumResponseDto.md)
- [SearchAssetResponseDto](doc//SearchAssetResponseDto.md)
- [SearchExploreItem](doc//SearchExploreItem.md)
@@ -448,6 +446,7 @@ Class | Method | HTTP request | Description
- [TagUpsertDto](doc//TagUpsertDto.md)
- [TagsResponse](doc//TagsResponse.md)
- [TagsUpdate](doc//TagsUpdate.md)
- [TestEmailResponseDto](doc//TestEmailResponseDto.md)
- [TimeBucketResponseDto](doc//TimeBucketResponseDto.md)
- [TimeBucketSize](doc//TimeBucketSize.md)
- [ToneMapping](doc//ToneMapping.md)

View File

@@ -197,7 +197,6 @@ part 'model/ratings_update.dart';
part 'model/reaction_level.dart';
part 'model/reaction_type.dart';
part 'model/reverse_geocoding_state_response_dto.dart';
part 'model/scan_library_dto.dart';
part 'model/search_album_response_dto.dart';
part 'model/search_asset_response_dto.dart';
part 'model/search_explore_item.dart';
@@ -260,6 +259,7 @@ part 'model/tag_update_dto.dart';
part 'model/tag_upsert_dto.dart';
part 'model/tags_response.dart';
part 'model/tags_update.dart';
part 'model/test_email_response_dto.dart';
part 'model/time_bucket_response_dto.dart';
part 'model/time_bucket_size.dart';
part 'model/tone_mapping.dart';

View File

@@ -833,14 +833,12 @@ class AssetsApi {
///
/// * [bool] isFavorite:
///
/// * [bool] isOffline:
///
/// * [bool] isVisible:
///
/// * [String] livePhotoVideoId:
///
/// * [MultipartFile] sidecarData:
Future<Response> uploadAssetWithHttpInfo(MultipartFile assetData, String deviceAssetId, String deviceId, DateTime fileCreatedAt, DateTime fileModifiedAt, { String? key, String? xImmichChecksum, String? duration, bool? isArchived, bool? isFavorite, bool? isOffline, bool? isVisible, String? livePhotoVideoId, MultipartFile? sidecarData, }) async {
Future<Response> uploadAssetWithHttpInfo(MultipartFile assetData, String deviceAssetId, String deviceId, DateTime fileCreatedAt, DateTime fileModifiedAt, { String? key, String? xImmichChecksum, String? duration, bool? isArchived, bool? isFavorite, bool? isVisible, String? livePhotoVideoId, MultipartFile? sidecarData, }) async {
// ignore: prefer_const_declarations
final path = r'/assets';
@@ -896,10 +894,6 @@ class AssetsApi {
hasFields = true;
mp.fields[r'isFavorite'] = parameterToString(isFavorite);
}
if (isOffline != null) {
hasFields = true;
mp.fields[r'isOffline'] = parameterToString(isOffline);
}
if (isVisible != null) {
hasFields = true;
mp.fields[r'isVisible'] = parameterToString(isVisible);
@@ -951,15 +945,13 @@ class AssetsApi {
///
/// * [bool] isFavorite:
///
/// * [bool] isOffline:
///
/// * [bool] isVisible:
///
/// * [String] livePhotoVideoId:
///
/// * [MultipartFile] sidecarData:
Future<AssetMediaResponseDto?> uploadAsset(MultipartFile assetData, String deviceAssetId, String deviceId, DateTime fileCreatedAt, DateTime fileModifiedAt, { String? key, String? xImmichChecksum, String? duration, bool? isArchived, bool? isFavorite, bool? isOffline, bool? isVisible, String? livePhotoVideoId, MultipartFile? sidecarData, }) async {
final response = await uploadAssetWithHttpInfo(assetData, deviceAssetId, deviceId, fileCreatedAt, fileModifiedAt, key: key, xImmichChecksum: xImmichChecksum, duration: duration, isArchived: isArchived, isFavorite: isFavorite, isOffline: isOffline, isVisible: isVisible, livePhotoVideoId: livePhotoVideoId, sidecarData: sidecarData, );
Future<AssetMediaResponseDto?> uploadAsset(MultipartFile assetData, String deviceAssetId, String deviceId, DateTime fileCreatedAt, DateTime fileModifiedAt, { String? key, String? xImmichChecksum, String? duration, bool? isArchived, bool? isFavorite, bool? isVisible, String? livePhotoVideoId, MultipartFile? sidecarData, }) async {
final response = await uploadAssetWithHttpInfo(assetData, deviceAssetId, deviceId, fileCreatedAt, fileModifiedAt, key: key, xImmichChecksum: xImmichChecksum, duration: duration, isArchived: isArchived, isFavorite: isFavorite, isVisible: isVisible, livePhotoVideoId: livePhotoVideoId, sidecarData: sidecarData, );
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}

View File

@@ -243,13 +243,13 @@ class LibrariesApi {
return null;
}
/// Performs an HTTP 'POST /libraries/{id}/removeOffline' operation and returns the [Response].
/// Performs an HTTP 'POST /libraries/{id}/scan' operation and returns the [Response].
/// Parameters:
///
/// * [String] id (required):
Future<Response> removeOfflineFilesWithHttpInfo(String id,) async {
Future<Response> scanLibraryWithHttpInfo(String id,) async {
// ignore: prefer_const_declarations
final path = r'/libraries/{id}/removeOffline'
final path = r'/libraries/{id}/scan'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
@@ -276,52 +276,8 @@ class LibrariesApi {
/// Parameters:
///
/// * [String] id (required):
Future<void> removeOfflineFiles(String id,) async {
final response = await removeOfflineFilesWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
}
/// Performs an HTTP 'POST /libraries/{id}/scan' operation and returns the [Response].
/// Parameters:
///
/// * [String] id (required):
///
/// * [ScanLibraryDto] scanLibraryDto (required):
Future<Response> scanLibraryWithHttpInfo(String id, ScanLibraryDto scanLibraryDto,) async {
// ignore: prefer_const_declarations
final path = r'/libraries/{id}/scan'
.replaceAll('{id}', id);
// ignore: prefer_final_locals
Object? postBody = scanLibraryDto;
final queryParams = <QueryParam>[];
final headerParams = <String, String>{};
final formParams = <String, String>{};
const contentTypes = <String>['application/json'];
return apiClient.invokeAPI(
path,
'POST',
queryParams,
postBody,
headerParams,
formParams,
contentTypes.isEmpty ? null : contentTypes.first,
);
}
/// Parameters:
///
/// * [String] id (required):
///
/// * [ScanLibraryDto] scanLibraryDto (required):
Future<void> scanLibrary(String id, ScanLibraryDto scanLibraryDto,) async {
final response = await scanLibraryWithHttpInfo(id, scanLibraryDto,);
Future<void> scanLibrary(String id,) async {
final response = await scanLibraryWithHttpInfo(id,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}

View File

@@ -48,10 +48,18 @@ class NotificationsApi {
/// Parameters:
///
/// * [SystemConfigSmtpDto] systemConfigSmtpDto (required):
Future<void> sendTestEmail(SystemConfigSmtpDto systemConfigSmtpDto,) async {
Future<TestEmailResponseDto?> sendTestEmail(SystemConfigSmtpDto systemConfigSmtpDto,) async {
final response = await sendTestEmailWithHttpInfo(systemConfigSmtpDto,);
if (response.statusCode >= HttpStatus.badRequest) {
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
}
// When a remote server returns no body with a status of 204, we shall not decode it.
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
// FormatException when trying to decode an empty string.
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'TestEmailResponseDto',) as TestEmailResponseDto;
}
return null;
}
}

View File

@@ -448,8 +448,6 @@ class ApiClient {
return ReactionTypeTypeTransformer().decode(value);
case 'ReverseGeocodingStateResponseDto':
return ReverseGeocodingStateResponseDto.fromJson(value);
case 'ScanLibraryDto':
return ScanLibraryDto.fromJson(value);
case 'SearchAlbumResponseDto':
return SearchAlbumResponseDto.fromJson(value);
case 'SearchAssetResponseDto':
@@ -574,6 +572,8 @@ class ApiClient {
return TagsResponse.fromJson(value);
case 'TagsUpdate':
return TagsUpdate.fromJson(value);
case 'TestEmailResponseDto':
return TestEmailResponseDto.fromJson(value);
case 'TimeBucketResponseDto':
return TimeBucketResponseDto.fromJson(value);
case 'TimeBucketSize':

View File

@@ -1,125 +0,0 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class ScanLibraryDto {
/// Returns a new [ScanLibraryDto] instance.
ScanLibraryDto({
this.refreshAllFiles,
this.refreshModifiedFiles,
});
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
bool? refreshAllFiles;
///
/// Please note: This property should have been non-nullable! Since the specification file
/// does not include a default value (using the "default:" property), however, the generated
/// source code must fall back to having a nullable type.
/// Consider adding a "default:" property in the specification file to hide this note.
///
bool? refreshModifiedFiles;
@override
bool operator ==(Object other) => identical(this, other) || other is ScanLibraryDto &&
other.refreshAllFiles == refreshAllFiles &&
other.refreshModifiedFiles == refreshModifiedFiles;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(refreshAllFiles == null ? 0 : refreshAllFiles!.hashCode) +
(refreshModifiedFiles == null ? 0 : refreshModifiedFiles!.hashCode);
@override
String toString() => 'ScanLibraryDto[refreshAllFiles=$refreshAllFiles, refreshModifiedFiles=$refreshModifiedFiles]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
if (this.refreshAllFiles != null) {
json[r'refreshAllFiles'] = this.refreshAllFiles;
} else {
// json[r'refreshAllFiles'] = null;
}
if (this.refreshModifiedFiles != null) {
json[r'refreshModifiedFiles'] = this.refreshModifiedFiles;
} else {
// json[r'refreshModifiedFiles'] = null;
}
return json;
}
/// Returns a new [ScanLibraryDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static ScanLibraryDto? fromJson(dynamic value) {
upgradeDto(value, "ScanLibraryDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return ScanLibraryDto(
refreshAllFiles: mapValueOfType<bool>(json, r'refreshAllFiles'),
refreshModifiedFiles: mapValueOfType<bool>(json, r'refreshModifiedFiles'),
);
}
return null;
}
static List<ScanLibraryDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <ScanLibraryDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = ScanLibraryDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, ScanLibraryDto> mapFromJson(dynamic json) {
final map = <String, ScanLibraryDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = ScanLibraryDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of ScanLibraryDto-objects as value to a dart map
static Map<String, List<ScanLibraryDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<ScanLibraryDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = ScanLibraryDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
};
}

View File

@@ -0,0 +1,99 @@
//
// AUTO-GENERATED FILE, DO NOT MODIFY!
//
// @dart=2.18
// ignore_for_file: unused_element, unused_import
// ignore_for_file: always_put_required_named_parameters_first
// ignore_for_file: constant_identifier_names
// ignore_for_file: lines_longer_than_80_chars
part of openapi.api;
class TestEmailResponseDto {
/// Returns a new [TestEmailResponseDto] instance.
TestEmailResponseDto({
required this.messageId,
});
String messageId;
@override
bool operator ==(Object other) => identical(this, other) || other is TestEmailResponseDto &&
other.messageId == messageId;
@override
int get hashCode =>
// ignore: unnecessary_parenthesis
(messageId.hashCode);
@override
String toString() => 'TestEmailResponseDto[messageId=$messageId]';
Map<String, dynamic> toJson() {
final json = <String, dynamic>{};
json[r'messageId'] = this.messageId;
return json;
}
/// Returns a new [TestEmailResponseDto] instance and imports its values from
/// [value] if it's a [Map], null otherwise.
// ignore: prefer_constructors_over_static_methods
static TestEmailResponseDto? fromJson(dynamic value) {
upgradeDto(value, "TestEmailResponseDto");
if (value is Map) {
final json = value.cast<String, dynamic>();
return TestEmailResponseDto(
messageId: mapValueOfType<String>(json, r'messageId')!,
);
}
return null;
}
static List<TestEmailResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
final result = <TestEmailResponseDto>[];
if (json is List && json.isNotEmpty) {
for (final row in json) {
final value = TestEmailResponseDto.fromJson(row);
if (value != null) {
result.add(value);
}
}
}
return result.toList(growable: growable);
}
static Map<String, TestEmailResponseDto> mapFromJson(dynamic json) {
final map = <String, TestEmailResponseDto>{};
if (json is Map && json.isNotEmpty) {
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
for (final entry in json.entries) {
final value = TestEmailResponseDto.fromJson(entry.value);
if (value != null) {
map[entry.key] = value;
}
}
}
return map;
}
// maps a json object with a list of TestEmailResponseDto-objects as value to a dart map
static Map<String, List<TestEmailResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
final map = <String, List<TestEmailResponseDto>>{};
if (json is Map && json.isNotEmpty) {
// ignore: parameter_assignments
json = json.cast<String, dynamic>();
for (final entry in json.entries) {
map[entry.key] = TestEmailResponseDto.listFromJson(entry.value, growable: growable,);
}
}
return map;
}
/// The list of required keys that must be present in a JSON.
static const requiredKeys = <String>{
'messageId',
};
}

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: 'none'
version: 1.115.0+159
version: 1.116.1+161
environment:
sdk: '>=3.3.0 <4.0.0'

View File

@@ -2853,41 +2853,6 @@
]
}
},
"/libraries/{id}/removeOffline": {
"post": {
"operationId": "removeOfflineFiles",
"parameters": [
{
"name": "id",
"required": true,
"in": "path",
"schema": {
"format": "uuid",
"type": "string"
}
}
],
"responses": {
"204": {
"description": ""
}
},
"security": [
{
"bearer": []
},
{
"cookie": []
},
{
"api_key": []
}
],
"tags": [
"Libraries"
]
}
},
"/libraries/{id}/scan": {
"post": {
"operationId": "scanLibrary",
@@ -2902,16 +2867,6 @@
}
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/ScanLibraryDto"
}
}
},
"required": true
},
"responses": {
"204": {
"description": ""
@@ -3491,6 +3446,13 @@
},
"responses": {
"200": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/TestEmailResponseDto"
}
}
},
"description": ""
}
},
@@ -7447,7 +7409,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "1.115.0",
"version": "1.116.1",
"contact": {}
},
"tags": [],
@@ -8280,9 +8242,6 @@
"isFavorite": {
"type": "boolean"
},
"isOffline": {
"type": "boolean"
},
"isVisible": {
"type": "boolean"
},
@@ -10621,17 +10580,6 @@
],
"type": "object"
},
"ScanLibraryDto": {
"properties": {
"refreshAllFiles": {
"type": "boolean"
},
"refreshModifiedFiles": {
"type": "boolean"
}
},
"type": "object"
},
"SearchAlbumResponseDto": {
"properties": {
"count": {
@@ -12348,6 +12296,17 @@
},
"type": "object"
},
"TestEmailResponseDto": {
"properties": {
"messageId": {
"type": "string"
}
},
"required": [
"messageId"
],
"type": "object"
},
"TimeBucketResponseDto": {
"properties": {
"count": {

View File

@@ -1,12 +1,12 @@
{
"name": "@immich/sdk",
"version": "1.115.0",
"version": "1.116.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@immich/sdk",
"version": "1.115.0",
"version": "1.116.1",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@oazapfts/runtime": "^1.0.2"

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/sdk",
"version": "1.115.0",
"version": "1.116.1",
"description": "Auto-generated TypeScript SDK for the Immich API",
"type": "module",
"main": "./build/index.js",

View File

@@ -1,6 +1,6 @@
/**
* Immich
* 1.115.0
* 1.116.1
* DO NOT MODIFY - This file has been generated using oazapfts.
* See https://www.npmjs.com/package/oazapfts
*/
@@ -366,7 +366,6 @@ export type AssetMediaCreateDto = {
fileModifiedAt: string;
isArchived?: boolean;
isFavorite?: boolean;
isOffline?: boolean;
isVisible?: boolean;
livePhotoVideoId?: string;
sidecarData?: Blob;
@@ -579,10 +578,6 @@ export type UpdateLibraryDto = {
importPaths?: string[];
name?: string;
};
export type ScanLibraryDto = {
refreshAllFiles?: boolean;
refreshModifiedFiles?: boolean;
};
export type LibraryStatsResponseDto = {
photos: number;
total: number;
@@ -656,6 +651,9 @@ export type SystemConfigSmtpDto = {
replyTo: string;
transport: SystemConfigSmtpTransportDto;
};
export type TestEmailResponseDto = {
messageId: string;
};
export type OAuthConfigDto = {
redirectUri: string;
};
@@ -2063,24 +2061,14 @@ export function updateLibrary({ id, updateLibraryDto }: {
body: updateLibraryDto
})));
}
export function removeOfflineFiles({ id }: {
export function scanLibrary({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/libraries/${encodeURIComponent(id)}/removeOffline`, {
return oazapfts.ok(oazapfts.fetchText(`/libraries/${encodeURIComponent(id)}/scan`, {
...opts,
method: "POST"
}));
}
export function scanLibrary({ id, scanLibraryDto }: {
id: string;
scanLibraryDto: ScanLibraryDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText(`/libraries/${encodeURIComponent(id)}/scan`, oazapfts.json({
...opts,
method: "POST",
body: scanLibraryDto
})));
}
export function getLibraryStatistics({ id }: {
id: string;
}, opts?: Oazapfts.RequestOpts) {
@@ -2220,7 +2208,10 @@ export function addMemoryAssets({ id, bulkIdsDto }: {
export function sendTestEmail({ systemConfigSmtpDto }: {
systemConfigSmtpDto: SystemConfigSmtpDto;
}, opts?: Oazapfts.RequestOpts) {
return oazapfts.ok(oazapfts.fetchText("/notifications/test-email", oazapfts.json({
return oazapfts.ok(oazapfts.fetchJson<{
status: 200;
data: TestEmailResponseDto;
}>("/notifications/test-email", oazapfts.json({
...opts,
method: "POST",
body: systemConfigSmtpDto

View File

@@ -1,84 +1,90 @@
<p align="center">
<br/>
<p align="center">
<br/>
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="Licença: AGPLv3"></a>
<a href="https://discord.immich.app">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a>
<br/>
<br/>
<br/>
<br/>
</p>
<p align="center">
<img src="design/immich-logo.svg" width="150" title="Login com URL customizada">
<img src="../design/immich-logo-stacked-light.svg" width="150" title="Immich Logo">
</p>
<h3 align="center">Immich - Solução self-hosted de alta performance para backup de fotos e vídeos</h3>
<h3 align="center">Solução self-hosted de alta performance para backup de fotos e vídeos</h3>
<br/>
<a href="https://immich.app">
<img src="design/immich-screenshots.png" title="Captura de tela princial">
<img src="../design/immich-screenshots.png" title="Captura de tela princial">
</a>
<br/>
<p align="center">
<a href="../README.md">English</a>
<a href="README_ca_ES.md">Català</a>
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_it_IT.md">Italiano</a>
<a href="README_ja_JP.md">日本語</a>
<a href="README_ko_KR.md">한국어</a>
<a href="README_de_DE.md">Deutsch</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_tr_TR.md">Türkçe</a>
<a href="README_zh_CN.md">中文</a>
<a href="README_ru_RU.md">Русский</a>
<a href="README_sv_SE.md">Svenska</a>
<a href="README_ar_JO.md">العربية</a>
<a href="../README.md">English</a>
<a href="README_ca_ES.md">Català</a>
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_it_IT.md">Italiano</a>
<a href="README_ja_JP.md">日本語</a>
<a href="README_ko_KR.md">한국어</a>
<a href="README_de_DE.md">Deutsch</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_tr_TR.md">Türkçe</a>
<a href="README_zh_CN.md">中文</a>
<a href="README_ru_RU.md">Русский</a>
<a href="README_sv_SE.md">Svenska</a>
<a href="README_ar_JO.md">العربية</a>
<a href="README_vi_VN.md">Tiếng Việt</a>
</p>
## Avisos
- ⚠️ Este projeto está sob **desenvolvimento constante**.
- ⚠️ Podem ocorrer bugs e _breaking changes_ (alterações que quebram a compatibilidade com versões anteriores).
- ⚠️ **Não use esta solução como a única forma de fazer backup das suas fotos e vídeos.**
- ⚠️ Sempre siga o plano [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) de backup para as suas mídias preciosas!
- ⚠️ Podem ocorrer bugs e _breaking changes_ (alterações que quebram a
compatibilidade com versões anteriores).
- ⚠️ **Não use esta solução como a única forma de fazer backup das suas fotos e
vídeos.**
- ⚠️ Sempre siga o plano
[3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) de backup
para as suas mídias preciosas!
## Conteúdo
> [!NOTE]
> Você pode encontrar a documentação principal, incluindo guias de instalação, em https://immich.app/.
- [Documentação Oficial](https://immich.app/docs)
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
- [Demonstração](#demo)
- [Recursos](#features)
- [Introdução](https://immich.app/docs/overview/introduction)
## Links
- [Documentação](https://immich.app/docs)
- [Sobre](https://immich.app/docs/overview/introduction)
- [Instalação](https://immich.app/docs/install/requirements)
- [Roadmap](https://github.com/orgs/immich-app/projects/1)
- [Demonstração](#demonstração)
- [Funcionalidades](#funcionalidades)
- [Traduções](https://immich.app/docs/developer/translations)
- [Diretrizes de Contribuição](https://immich.app/docs/overview/support-the-project)
## Documentação
Você pode encontrar a documentação principal, incluindo guias de instalação, em https://immich.app/.
## Demonstração
Você pode acessar a demonstração web em https://demo.immich.app
Acesse a demonstração [aqui](https://demo.immich.app). A demonstração está
hospedada no Nível Gratuito da Oracle VM em Amsterdam com um processador 2.4Ghz
quad-core ARM64 e 24GB de RAM.
No aplicativo para dispositivos móveis, você pode usar `https://demo.immich.app/api` no campo `Server Endpoint URL`
No aplicativo para dispositivos móveis, você pode usar
`https://demo.immich.app/api` no campo `Server Endpoint URL`
```bash title="Credenciais de Demonstração"
Credenciais de Demonstração
email: demo@immich.app
senha: demo
```
### Credenciais de login
```
Especificações: Nível Gratuito da Oracle VM - Amsterdam - 2.4Ghz quad-core ARM64 CPU, 24GB RAM
```
| Email | Senha |
| --------------- | ----- |
| demo@immich.app | demo |
## Atividades
![Atividades](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Imagem de Analytics do Repobeats")
## Recursos
## Funcionalidades
| Recursos | Aplicativo Móvel | Web |
|:----------------------------------------------------|------------------|-----|
| Funcionalidades | Aplicativo Móvel | Web |
| :-------------------------------------------------- | ---------------- | --- |
| Fazer upload e visualizar fotos e vídeos | Sim | Sim |
| Backup automático ao abrir o aplicativo | Sim | N/A |
| Prevenir a duplicação de arquivos | Sim | Sim |
@@ -88,17 +94,17 @@ Especificações: Nível Gratuito da Oracle VM - Amsterdam - 2.4Ghz quad-core AR
| Criação de álbuns e álbuns compartilhados | Sim | Sim |
| Barra de rolagem arrastável | Sim | Sim |
| Suporta formatos RAW | Sim | Sim |
| Visualização de metadados (EXIF, map) | Sim | Sim |
| Pesquisar por metadados, objetos, rostos, and CLIP | Sim | Sim |
| Visualização de metadados (EXIF, mapa) | Sim | Sim |
| Pesquisar por metadados, objetos, rostos, e CLIP | Sim | Sim |
| Funções administrativas (gerenciamento de usuários) | Não | Sim |
| Backup em segundo plano | Sim | N/A |
| Virtual scroll | Sim | Sim |
| Rolagem virtual | Sim | Sim |
| Suporte OAuth | Sim | Sim |
| Chaves de API | N/A | Sim |
| Backup e visualização de LivePhoto/MotionPhoto | Sim | Sim |
| Backup e reprodução de LivePhoto/MotionPhoto | Sim | Sim |
| Visualização de imagens 360º | Não | Sim |
| Estrutura de armazenamento definida pelo usuário | Sim | Sim |
| Compartilhar com o público | Não | Sim |
| Compartilhar com o público | Sim | Sim |
| Arquivo e Favoritos | Sim | Sim |
| Mapa Global | Sim | Sim |
| Compartilhamento com parceiro | Sim | Sim |
@@ -108,6 +114,29 @@ Especificações: Nível Gratuito da Oracle VM - Amsterdam - 2.4Ghz quad-core AR
| Galeria em modo apenas leitura | Sim | Sim |
| Empilhamento de fotos | Sim | Sim |
## Traduções
Leia mais sobre as traduções
[aqui](https://immich.app/docs/developer/translations).
<a href="https://hosted.weblate.org/engage/immich/">
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Status da tradução" />
</a>
## Atividade do repositório
![Activities](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Imagem de análise de atividade Repobeats")
## Histórico de estrelas
<a href="https://star-history.com/#immich-app/immich&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
<img alt="Gráfico de histórico de estrelas" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
</picture>
</a>
## Contribuidores
<a href="https://github.com/alextran1502/immich/graphs/contributors">

133
readme_i18n/README_vi_VN.md Normal file
View File

@@ -0,0 +1,133 @@
<p align="center">
<br/>
<a href="https://opensource.org/license/agpl-v3"><img src="https://img.shields.io/badge/License-AGPL_v3-blue.svg?color=3F51B5&style=for-the-badge&label=License&logoColor=000000&labelColor=ececec" alt="Giấy phép: AGPLv3"></a>
<a href="https://discord.immich.app">
<img src="https://img.shields.io/discord/979116623879368755.svg?label=Discord&logo=Discord&style=for-the-badge&logoColor=000000&labelColor=ececec" alt="Discord"/>
</a>
<br/>
<br/>
</p>
<p align="center">
<img src="../design/immich-logo-stacked-light.svg" width="300" title="Đăng nhập bằng URL Tuỳ chỉnh">
</p>
<h3 align="center">Giải pháp quản lý ảnh và video tự lưu trữ hiệu suất cao</h3>
<br/>
<a href="https://immich.app">
<img src="../design/immich-screenshots.png" title="Ảnh chụp màn hình chính">
</a>
<br/>
<p align="center">
<a href="../README.md">English</a>
<a href="README_ca_ES.md">Català</a>
<a href="README_es_ES.md">Español</a>
<a href="README_fr_FR.md">Français</a>
<a href="README_it_IT.md">Italiano</a>
<a href="README_ja_JP.md">日本語</a>
<a href="README_ko_KR.md">한국어</a>
<a href="README_de_DE.md">Deutsch</a>
<a href="README_nl_NL.md">Nederlands</a>
<a href="README_tr_TR.md">Türkçe</a>
<a href="README_zh_CN.md">中文</a>
<a href="README_ru_RU.md">Русский</a>
<a href="README_pt_BR.md">Português Brasileiro</a>
<a href="README_sv_SE.md">Svenska</a>
<a href="README_ar_JO.md">العربية</a>
<a href="README_vi_VN.md">Tiếng Việt</a>
</p>
## Tuyên bố miễn trừ trách nhiệm
- ⚠️ Dự án đang được phát triển **rất tích cực**.
- ⚠️ Dự kiến sẽ có lỗi và thay đổi đột ngột.
- ⚠️ **Không sử dụng ứng dụng như là cách duy nhất để lưu trữ ảnh và video của bạn.**
- ⚠️ Luôn tuân thủ kế hoạch sao lưu [3-2-1](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) cho những bức ảnh và video quý giá của bạn!
> [!NOTE]
> Bạn có thể tìm thấy tài liệu chính, bao gồm hướng dẫn cài đặt, tại https://immich.app/.
## Liên kết
- [Tài liệu](https://immich.app/docs)
- [Giới thiệu](https://immich.app/docs/overview/introduction)
- [Cài đặt](https://immich.app/docs/install/requirements)
- [Lộ trình](https://immich.app/roadmap)
- [Demo](#demo)
- [Tính năng](#Tính-năng)
- [Dịch thuật](https://immich.app/docs/developer/translations)
- [Đóng góp](https://immich.app/docs/overview/support-the-project)
## Demo
Truy cập bản demo [tại đây](https://demo.immich.app). Bản demo đang chạy trên máy ảo Oracle Free-tier ở Amsterdam với CPU ARM64 lõi tứ 2,4 GHz và RAM 24 GB.
Đối với ứng dụng di động, bạn có thể sử dụng `https://demo.immich.app/api` cho `Server Endpoint URL`
### Thông tin đăng nhập
| Email | Mật khẩu |
| --------------- | -------- |
| demo@immich.app | demo |
## Tính năng
| Tính năng | Mobile | Web |
| :--------------------------------------------------- | ------ | ----- |
| Tải lên và xem video, ảnh | Có | Có |
| Tự động sao lưu khi ứng dụng được mở | Có | N/A |
| Ngăn chặn sự trùng lặp nội dung | Có | Có |
| Album được chọn để sao lưu | Có | N/A |
| Tải ảnh và video xuống thiết bị cục bộ | Có | Có |
| Hỗ trợ nhiều người dùng | Có | Có |
| Album và Album được chia sẻ | Có | Có |
| Thanh cuộn có thể chà / kéo | Có | Có |
| Hỗ trợ định dạng raw | Có | Có |
| Xem metadata (EXIF, bản đồ) | Có | Có |
| Tìm kiếm theo metadata, đối tượng, khuôn mặt và CLIP | Có | Có |
| Chức năng quản trị (quản lý người dùng) | Không | Có |
| Sao lưu trong nền | Có | N/A |
| Cuộn ảo | Có | Có |
| Hỗ trợ OAuth | Có | Có |
| API Keys | N/A | Có |
| Sao lưu và phát lại Live Photo/Motion Photo | Có | Có |
| Hỗ trợ hiển thị hình ảnh 360 độ | Không | Có |
| Cấu trúc lưu trữ do người dùng xác định | Có | Có |
| Chia sẻ công khai | Có | Có |
| Lưu trữ và Yêu thích | Có | Có |
| Bản đồ toàn cầu | Có | Có |
| Chia sẻ đối tác | Có | Có |
| Nhận dạng khuôn mặt và phân cụm | Có | Có |
| Kỷ niệm (x năm trước) | Có | Có |
| Hỗ trợ ngoại tuyến | Có | Không |
| Thư viện chỉ đọc | Có | Có |
| Ảnh xếp chồng | Có | Có |
## Dịch thuật
Đọc thêm về dịch thuật [tại đây](https://immich.app/docs/developer/translations).
<a href="https://hosted.weblate.org/engage/immich/">
<img src="https://hosted.weblate.org/widget/immich/immich/multi-auto.svg" alt="Tình trạng dịch thuật" />
</a>
## Hoạt động của repository
![Hoạt động](https://repobeats.axiom.co/api/embed/9e86d9dc3ddd137161f2f6d2e758d7863b1789cb.svg "Hình ảnh phân tích Repobeats")
## Lịch sử Đánh dấu sao
<a href="https://star-history.com/#immich-app/immich&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
<img alt="Biểu đồ Lịch sử Đánh dấu" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
</picture>
</a>
## Người đóng góp
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>

View File

@@ -1,12 +1,12 @@
{
"name": "immich",
"version": "1.115.0",
"version": "1.116.1",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "immich",
"version": "1.115.0",
"version": "1.116.1",
"license": "GNU Affero General Public License version 3",
"dependencies": {
"@nestjs/bullmq": "^10.0.1",

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "1.115.0",
"version": "1.116.1",
"description": "",
"author": "",
"private": true,

View File

@@ -7,83 +7,20 @@ import { RedisOptions } from 'ioredis';
import Joi, { Root } from 'joi';
import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
import { ImmichHeader } from 'src/dtos/auth.dto';
import {
AudioCodec,
Colorspace,
CQMode,
ImageFormat,
LogLevel,
ToneMapping,
TranscodeHWAccel,
TranscodePolicy,
VideoCodec,
VideoContainer,
} from 'src/enum';
import { ConcurrentQueueName, QueueName } from 'src/interfaces/job.interface';
export enum TranscodePolicy {
ALL = 'all',
OPTIMAL = 'optimal',
BITRATE = 'bitrate',
REQUIRED = 'required',
DISABLED = 'disabled',
}
export enum TranscodeTarget {
NONE,
AUDIO,
VIDEO,
ALL,
}
export enum VideoCodec {
H264 = 'h264',
HEVC = 'hevc',
VP9 = 'vp9',
AV1 = 'av1',
}
export enum AudioCodec {
MP3 = 'mp3',
AAC = 'aac',
LIBOPUS = 'libopus',
}
export enum VideoContainer {
MOV = 'mov',
MP4 = 'mp4',
OGG = 'ogg',
WEBM = 'webm',
}
export enum TranscodeHWAccel {
NVENC = 'nvenc',
QSV = 'qsv',
VAAPI = 'vaapi',
RKMPP = 'rkmpp',
DISABLED = 'disabled',
}
export enum ToneMapping {
HABLE = 'hable',
MOBIUS = 'mobius',
REINHARD = 'reinhard',
DISABLED = 'disabled',
}
export enum CQMode {
AUTO = 'auto',
CQP = 'cqp',
ICQ = 'icq',
}
export enum Colorspace {
SRGB = 'srgb',
P3 = 'p3',
}
export enum ImageFormat {
JPEG = 'jpeg',
WEBP = 'webp',
}
export enum LogLevel {
VERBOSE = 'verbose',
DEBUG = 'debug',
LOG = 'log',
WARN = 'warn',
ERROR = 'error',
FATAL = 'fatal',
}
export interface SystemConfig {
ffmpeg: {
crf: number;

View File

@@ -54,11 +54,6 @@ export const resourcePaths = {
export const MOBILE_REDIRECT = 'app.immich:///oauth-callback';
export const LOGIN_URL = '/auth/login?autoLaunch=0';
export enum AuthType {
PASSWORD = 'password',
OAUTH = 'oauth',
}
export const excludePaths = ['/.well-known/immich', '/custom.css', '/favicon.ico'];
export const FACE_THUMBNAIL_SIZE = 250;

View File

@@ -33,16 +33,17 @@ import {
UploadFieldName,
} from 'src/dtos/asset-media.dto';
import { AuthDto, ImmichHeader } from 'src/dtos/auth.dto';
import { RouteKey } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { AssetUploadInterceptor } from 'src/middleware/asset-upload.interceptor';
import { Auth, Authenticated, FileResponse } from 'src/middleware/auth.guard';
import { FileUploadInterceptor, Route, UploadFiles, getFiles } from 'src/middleware/file-upload.interceptor';
import { FileUploadInterceptor, UploadFiles, getFiles } from 'src/middleware/file-upload.interceptor';
import { AssetMediaService } from 'src/services/asset-media.service';
import { sendFile } from 'src/utils/file';
import { FileNotEmptyValidator, UUIDParamDto } from 'src/validation';
@ApiTags('Assets')
@Controller(Route.ASSET)
@Controller(RouteKey.ASSET)
export class AssetMediaController {
constructor(
@Inject(ILoggerRepository) private logger: ILoggerRepository,

View File

@@ -14,13 +14,13 @@ import {
} from 'src/dtos/asset.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { MemoryLaneDto } from 'src/dtos/search.dto';
import { RouteKey } from 'src/enum';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { Route } from 'src/middleware/file-upload.interceptor';
import { AssetService } from 'src/services/asset.service';
import { UUIDParamDto } from 'src/validation';
@ApiTags('Assets')
@Controller(Route.ASSET)
@Controller(RouteKey.ASSET)
export class AssetController {
constructor(private service: AssetService) {}

View File

@@ -1,7 +1,6 @@
import { Body, Controller, HttpCode, HttpStatus, Post, Req, Res } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express';
import { AuthType } from 'src/constants';
import {
AuthDto,
ChangePasswordDto,
@@ -13,6 +12,7 @@ import {
ValidateAccessTokenResponseDto,
} from 'src/dtos/auth.dto';
import { UserAdminResponseDto } from 'src/dtos/user.dto';
import { AuthType } from 'src/enum';
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
import { AuthService, LoginDetails } from 'src/services/auth.service';
import { respondWithCookie, respondWithoutCookie } from 'src/utils/response';

View File

@@ -4,7 +4,6 @@ import {
CreateLibraryDto,
LibraryResponseDto,
LibraryStatsResponseDto,
ScanLibraryDto,
UpdateLibraryDto,
ValidateLibraryDto,
ValidateLibraryResponseDto,
@@ -43,6 +42,13 @@ export class LibraryController {
return this.service.update(id, dto);
}
@Delete(':id')
@HttpCode(HttpStatus.NO_CONTENT)
@Authenticated({ permission: Permission.LIBRARY_DELETE, admin: true })
deleteLibrary(@Param() { id }: UUIDParamDto): Promise<void> {
return this.service.delete(id);
}
@Post(':id/validate')
@HttpCode(200)
@Authenticated({ admin: true })
@@ -51,13 +57,6 @@ export class LibraryController {
return this.service.validate(id, dto);
}
@Delete(':id')
@HttpCode(HttpStatus.NO_CONTENT)
@Authenticated({ permission: Permission.LIBRARY_DELETE, admin: true })
deleteLibrary(@Param() { id }: UUIDParamDto): Promise<void> {
return this.service.delete(id);
}
@Get(':id/statistics')
@Authenticated({ permission: Permission.LIBRARY_STATISTICS, admin: true })
getLibraryStatistics(@Param() { id }: UUIDParamDto): Promise<LibraryStatsResponseDto> {
@@ -66,15 +65,8 @@ export class LibraryController {
@Post(':id/scan')
@HttpCode(HttpStatus.NO_CONTENT)
@Authenticated({ admin: true })
scanLibrary(@Param() { id }: UUIDParamDto, @Body() dto: ScanLibraryDto) {
return this.service.queueScan(id, dto);
}
@Post(':id/removeOffline')
@HttpCode(HttpStatus.NO_CONTENT)
@Authenticated({ admin: true })
removeOfflineFiles(@Param() { id }: UUIDParamDto) {
return this.service.queueRemoveOffline(id);
@Authenticated({ permission: Permission.LIBRARY_UPDATE, admin: true })
scanLibrary(@Param() { id }: UUIDParamDto) {
return this.service.queueScan(id);
}
}

View File

@@ -1,6 +1,7 @@
import { Body, Controller, HttpCode, HttpStatus, Post } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { AuthDto } from 'src/dtos/auth.dto';
import { TestEmailResponseDto } from 'src/dtos/notification.dto';
import { SystemConfigSmtpDto } from 'src/dtos/system-config.dto';
import { Auth, Authenticated } from 'src/middleware/auth.guard';
import { NotificationService } from 'src/services/notification.service';
@@ -13,7 +14,7 @@ export class NotificationController {
@Post('test-email')
@HttpCode(HttpStatus.OK)
@Authenticated({ admin: true })
sendTestEmail(@Auth() auth: AuthDto, @Body() dto: SystemConfigSmtpDto) {
sendTestEmail(@Auth() auth: AuthDto, @Body() dto: SystemConfigSmtpDto): Promise<TestEmailResponseDto> {
return this.service.sendTestEmail(auth.user.id, dto);
}
}

View File

@@ -1,7 +1,6 @@
import { Body, Controller, Get, HttpCode, HttpStatus, Post, Redirect, Req, Res } from '@nestjs/common';
import { ApiTags } from '@nestjs/swagger';
import { Request, Response } from 'express';
import { AuthType } from 'src/constants';
import {
AuthDto,
ImmichCookie,
@@ -11,6 +10,7 @@ import {
OAuthConfigDto,
} from 'src/dtos/auth.dto';
import { UserAdminResponseDto } from 'src/dtos/user.dto';
import { AuthType } from 'src/enum';
import { Auth, Authenticated, GetLoginDetails } from 'src/middleware/auth.guard';
import { AuthService, LoginDetails } from 'src/services/auth.service';
import { respondWithCookie } from 'src/utils/response';

View File

@@ -21,15 +21,16 @@ import { LicenseKeyDto, LicenseResponseDto } from 'src/dtos/license.dto';
import { UserPreferencesResponseDto, UserPreferencesUpdateDto } from 'src/dtos/user-preferences.dto';
import { CreateProfileImageDto, CreateProfileImageResponseDto } from 'src/dtos/user-profile.dto';
import { UserAdminResponseDto, UserResponseDto, UserUpdateMeDto } from 'src/dtos/user.dto';
import { RouteKey } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { Auth, Authenticated, FileResponse } from 'src/middleware/auth.guard';
import { FileUploadInterceptor, Route } from 'src/middleware/file-upload.interceptor';
import { FileUploadInterceptor } from 'src/middleware/file-upload.interceptor';
import { UserService } from 'src/services/user.service';
import { sendFile } from 'src/utils/file';
import { UUIDParamDto } from 'src/validation';
@ApiTags('Users')
@Controller(Route.USER)
@Controller(RouteKey.USER)
export class UserController {
constructor(
private service: UserService,

View File

@@ -1,12 +1,10 @@
import { randomUUID } from 'node:crypto';
import { dirname, join, resolve } from 'node:path';
import { ImageFormat } from 'src/config';
import { APP_MEDIA_LOCATION } from 'src/constants';
import { SystemConfigCore } from 'src/cores/system-config.core';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetPathType, PathType, PersonPathType } from 'src/entities/move.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { AssetFileType } from 'src/enum';
import { AssetFileType, AssetPathType, ImageFormat, PathType, PersonPathType, StorageFolder } from 'src/enum';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
@@ -16,14 +14,6 @@ import { IStorageRepository } from 'src/interfaces/storage.interface';
import { ISystemMetadataRepository } from 'src/interfaces/system-metadata.interface';
import { getAssetFiles } from 'src/utils/asset.util';
export enum StorageFolder {
ENCODED_VIDEO = 'encoded-video',
LIBRARY = 'library',
UPLOAD = 'upload',
PROFILE = 'profile',
THUMBNAILS = 'thumbs',
}
export const THUMBNAIL_DIR = resolve(join(APP_MEDIA_LOCATION, StorageFolder.THUMBNAILS));
export const ENCODED_VIDEO_DIR = resolve(join(APP_MEDIA_LOCATION, StorageFolder.ENCODED_VIDEO));

View File

@@ -4,8 +4,8 @@ import { OnEventOptions } from '@nestjs/event-emitter/dist/interfaces';
import { ApiExtension, ApiOperation, ApiProperty, ApiTags } from '@nestjs/swagger';
import _ from 'lodash';
import { ADDED_IN_PREFIX, DEPRECATED_IN_PREFIX, LIFECYCLE_EXTENSION } from 'src/constants';
import { MetadataKey } from 'src/enum';
import { EmitEvent, ServerEvent } from 'src/interfaces/event.interface';
import { Metadata } from 'src/middleware/auth.guard';
import { setUnion } from 'src/utils/set';
// PostgreSQL uses a 16-bit integer to indicate the number of bound parameters. This means that the
@@ -141,7 +141,7 @@ export type EmitConfig = {
/** lower value has higher priority, defaults to 0 */
priority?: number;
};
export const OnEmit = (config: EmitConfig) => SetMetadata(Metadata.ON_EMIT_CONFIG, config);
export const OnEmit = (config: EmitConfig) => SetMetadata(MetadataKey.ON_EMIT_CONFIG, config);
type LifecycleRelease = 'NEXT_RELEASE' | string;
type LifecycleMetadata = {

View File

@@ -56,9 +56,6 @@ export class AssetMediaCreateDto extends AssetMediaBase {
@ValidateBoolean({ optional: true })
isVisible?: boolean;
@ValidateBoolean({ optional: true })
isOffline?: boolean;
@ValidateUUID({ optional: true })
livePhotoVideoId?: string;

View File

@@ -1,8 +1,7 @@
import { ApiProperty } from '@nestjs/swagger';
import { Type } from 'class-transformer';
import { IsArray, IsEnum, IsString, IsUUID, ValidateNested } from 'class-validator';
import { AssetPathType, PathType, PersonPathType, UserPathType } from 'src/entities/move.entity';
import { EntityType } from 'src/enum';
import { AssetPathType, EntityType, PathType, PersonPathType, UserPathType } from 'src/enum';
import { Optional, ValidateDate, ValidateUUID } from 'src/validation';
const PathEnum = Object.values({ ...AssetPathType, ...PersonPathType, ...UserPathType });

View File

@@ -1,7 +1,7 @@
import { ApiProperty } from '@nestjs/swagger';
import { ArrayMaxSize, ArrayUnique, IsNotEmpty, IsString } from 'class-validator';
import { LibraryEntity } from 'src/entities/library.entity';
import { Optional, ValidateBoolean, ValidateUUID } from 'src/validation';
import { Optional, ValidateUUID } from 'src/validation';
export class CreateLibraryDto {
@ValidateUUID()
@@ -89,14 +89,6 @@ export class LibrarySearchDto {
userId?: string;
}
export class ScanLibraryDto {
@ValidateBoolean({ optional: true })
refreshModifiedFiles?: boolean;
@ValidateBoolean({ optional: true })
refreshAllFiles?: boolean;
}
export class LibraryResponseDto {
id!: string;
ownerId!: string;

View File

@@ -0,0 +1,3 @@
export class TestEmailResponseDto {
messageId!: string;
}

View File

@@ -18,20 +18,20 @@ import {
ValidatorConstraint,
ValidatorConstraintInterface,
} from 'class-validator';
import { SystemConfig } from 'src/config';
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
import {
AudioCodec,
CQMode,
Colorspace,
ImageFormat,
LogLevel,
SystemConfig,
ToneMapping,
TranscodeHWAccel,
TranscodePolicy,
VideoCodec,
VideoContainer,
} from 'src/config';
import { CLIPConfig, DuplicateDetectionConfig, FacialRecognitionConfig } from 'src/dtos/model-config.dto';
} from 'src/enum';
import { ConcurrentQueueName, QueueName } from 'src/interfaces/job.interface';
import { ValidateBoolean, validateCronExpression } from 'src/validation';

View File

@@ -1,3 +1,4 @@
import { PathType } from 'src/enum';
import { Column, Entity, PrimaryGeneratedColumn, Unique } from 'typeorm';
@Entity('move_history')
@@ -21,21 +22,3 @@ export class MoveEntity {
@Column({ type: 'varchar' })
newPath!: string;
}
export enum AssetPathType {
ORIGINAL = 'original',
PREVIEW = 'preview',
THUMBNAIL = 'thumbnail',
ENCODED_VIDEO = 'encoded_video',
SIDECAR = 'sidecar',
}
export enum PersonPathType {
FACE = 'face',
}
export enum UserPathType {
PROFILE = 'profile',
}
export type PathType = AssetPathType | PersonPathType | UserPathType;

View File

@@ -1,3 +1,8 @@
export enum AuthType {
PASSWORD = 'password',
OAUTH = 'oauth',
}
export enum AssetType {
IMAGE = 'IMAGE',
VIDEO = 'VIDEO',
@@ -148,6 +153,14 @@ export enum SharedLinkType {
INDIVIDUAL = 'INDIVIDUAL',
}
export enum StorageFolder {
ENCODED_VIDEO = 'encoded-video',
LIBRARY = 'library',
UPLOAD = 'upload',
PROFILE = 'profile',
THUMBNAILS = 'thumbs',
}
export enum SystemMetadataKey {
REVERSE_GEOCODING_STATE = 'reverse-geocoding-state',
FACIAL_RECOGNITION_STATE = 'facial-recognition-state',
@@ -198,3 +211,120 @@ export enum ManualJobName {
TAG_CLEANUP = 'tag-cleanup',
USER_CLEANUP = 'user-cleanup',
}
export enum AssetPathType {
ORIGINAL = 'original',
PREVIEW = 'preview',
THUMBNAIL = 'thumbnail',
ENCODED_VIDEO = 'encoded_video',
SIDECAR = 'sidecar',
}
export enum PersonPathType {
FACE = 'face',
}
export enum UserPathType {
PROFILE = 'profile',
}
export type PathType = AssetPathType | PersonPathType | UserPathType;
export enum TranscodePolicy {
ALL = 'all',
OPTIMAL = 'optimal',
BITRATE = 'bitrate',
REQUIRED = 'required',
DISABLED = 'disabled',
}
export enum TranscodeTarget {
NONE,
AUDIO,
VIDEO,
ALL,
}
export enum VideoCodec {
H264 = 'h264',
HEVC = 'hevc',
VP9 = 'vp9',
AV1 = 'av1',
}
export enum AudioCodec {
MP3 = 'mp3',
AAC = 'aac',
LIBOPUS = 'libopus',
}
export enum VideoContainer {
MOV = 'mov',
MP4 = 'mp4',
OGG = 'ogg',
WEBM = 'webm',
}
export enum TranscodeHWAccel {
NVENC = 'nvenc',
QSV = 'qsv',
VAAPI = 'vaapi',
RKMPP = 'rkmpp',
DISABLED = 'disabled',
}
export enum ToneMapping {
HABLE = 'hable',
MOBIUS = 'mobius',
REINHARD = 'reinhard',
DISABLED = 'disabled',
}
export enum CQMode {
AUTO = 'auto',
CQP = 'cqp',
ICQ = 'icq',
}
export enum Colorspace {
SRGB = 'srgb',
P3 = 'p3',
}
export enum ImageFormat {
JPEG = 'jpeg',
WEBP = 'webp',
}
export enum LogLevel {
VERBOSE = 'verbose',
DEBUG = 'debug',
LOG = 'log',
WARN = 'warn',
ERROR = 'error',
FATAL = 'fatal',
}
export enum MetadataKey {
AUTH_ROUTE = 'auth_route',
ADMIN_ROUTE = 'admin_route',
SHARED_ROUTE = 'shared_route',
API_KEY_SECURITY = 'api_key',
ON_EMIT_CONFIG = 'on_emit_config',
}
export enum RouteKey {
ASSET = 'assets',
USER = 'users',
}
export enum CacheControl {
PRIVATE_WITH_CACHE = 'private_with_cache',
PRIVATE_WITHOUT_CACHE = 'private_without_cache',
NONE = 'none',
}
export enum PaginationMode {
LIMIT_OFFSET = 'limit-offset',
SKIP_TAKE = 'skip-take',
}

View File

@@ -36,8 +36,6 @@ export enum WithoutProperty {
export enum WithProperty {
SIDECAR = 'sidecar',
IS_ONLINE = 'isOnline',
IS_OFFLINE = 'isOffline',
}
export enum TimeBucketSize {
@@ -176,7 +174,6 @@ export interface IAssetRepository {
): Paginated<AssetEntity>;
getRandom(userIds: string[], count: number): Promise<AssetEntity[]>;
getLastUpdatedAssetForAlbumId(albumId: string): Promise<AssetEntity | null>;
getExternalLibraryAssetPaths(pagination: PaginationOptions, libraryId: string): Paginated<AssetPathEntity>;
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string): Promise<AssetEntity | null>;
deleteAll(ownerId: string): Promise<void>;
getAll(pagination: PaginationOptions, options?: AssetSearchOptions): Paginated<AssetEntity>;

View File

@@ -0,0 +1,14 @@
import { VectorExtension } from 'src/interfaces/database.interface';
export const IConfigRepository = 'IConfigRepository';
export interface EnvData {
database: {
skipMigrations: boolean;
vectorExtension: VectorExtension;
};
}
export interface IConfigRepository {
getEnv(): EnvData;
}

View File

@@ -76,12 +76,12 @@ export enum JobName {
FACIAL_RECOGNITION = 'facial-recognition',
// library management
LIBRARY_SCAN = 'library-refresh',
LIBRARY_SCAN_ASSET = 'library-refresh-asset',
LIBRARY_REMOVE_OFFLINE = 'library-remove-offline',
LIBRARY_CHECK_OFFLINE = 'library-check-offline',
LIBRARY_QUEUE_SYNC_FILES = 'library-queue-sync-files',
LIBRARY_QUEUE_SYNC_ASSETS = 'library-queue-sync-assets',
LIBRARY_SYNC_FILE = 'library-sync-file',
LIBRARY_SYNC_ASSET = 'library-sync-asset',
LIBRARY_DELETE = 'library-delete',
LIBRARY_QUEUE_SCAN_ALL = 'library-queue-all-refresh',
LIBRARY_QUEUE_SYNC_ALL = 'library-queue-sync-all',
LIBRARY_QUEUE_CLEANUP = 'library-queue-cleanup',
// cleanup
@@ -137,16 +137,11 @@ export interface ILibraryFileJob extends IEntityJob {
assetPath: string;
}
export interface ILibraryOfflineJob extends IEntityJob {
export interface ILibraryAssetJob extends IEntityJob {
importPaths: string[];
exclusionPatterns: string[];
}
export interface ILibraryRefreshJob extends IEntityJob {
refreshModifiedFiles: boolean;
refreshAllFiles: boolean;
}
export interface IBulkEntityJob extends IBaseJob {
ids: string[];
}
@@ -277,12 +272,12 @@ export type JobItem =
| { name: JobName.ASSET_DELETION_CHECK; data?: IBaseJob }
// Library Management
| { name: JobName.LIBRARY_SCAN_ASSET; data: ILibraryFileJob }
| { name: JobName.LIBRARY_SCAN; data: ILibraryRefreshJob }
| { name: JobName.LIBRARY_REMOVE_OFFLINE; data: IEntityJob }
| { name: JobName.LIBRARY_SYNC_FILE; data: ILibraryFileJob }
| { name: JobName.LIBRARY_QUEUE_SYNC_FILES; data: IEntityJob }
| { name: JobName.LIBRARY_QUEUE_SYNC_ASSETS; data: IEntityJob }
| { name: JobName.LIBRARY_SYNC_ASSET; data: IEntityJob }
| { name: JobName.LIBRARY_DELETE; data: IEntityJob }
| { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data: IBaseJob }
| { name: JobName.LIBRARY_CHECK_OFFLINE; data: IEntityJob }
| { name: JobName.LIBRARY_QUEUE_SYNC_ALL; data?: IBaseJob }
| { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob }
// Notification

View File

@@ -1,4 +1,4 @@
import { LogLevel } from 'src/config';
import { LogLevel } from 'src/enum';
export const ILoggerRepository = 'ILoggerRepository';

View File

@@ -1,5 +1,5 @@
import { Writable } from 'node:stream';
import { ImageFormat, TranscodeTarget, VideoCodec } from 'src/config';
import { ImageFormat, TranscodeTarget, VideoCodec } from 'src/enum';
export const IMediaRepository = 'IMediaRepository';

View File

@@ -7,7 +7,18 @@ export interface ExifDuration {
Scale?: number;
}
type TagsWithWrongTypes = 'FocalLength' | 'Duration' | 'Description' | 'ImageDescription' | 'RegionInfo';
type StringOrNumber = string | number;
type TagsWithWrongTypes =
| 'FocalLength'
| 'Duration'
| 'Description'
| 'ImageDescription'
| 'RegionInfo'
| 'TagsList'
| 'Keywords'
| 'HierarchicalSubject'
| 'ISO';
export interface ImmichTags extends Omit<Tags, TagsWithWrongTypes> {
ContentIdentifier?: string;
MotionPhoto?: number;
@@ -20,10 +31,14 @@ export interface ImmichTags extends Omit<Tags, TagsWithWrongTypes> {
EmbeddedVideoType?: string;
EmbeddedVideoFile?: BinaryField;
MotionPhotoVideo?: BinaryField;
TagsList?: StringOrNumber[];
HierarchicalSubject?: StringOrNumber[];
Keywords?: StringOrNumber | StringOrNumber[];
ISO?: number | number[];
// Type is wrong, can also be number.
Description?: string | number;
ImageDescription?: string | number;
Description?: StringOrNumber;
ImageDescription?: StringOrNumber;
// Extended properties for image regions, such as faces
RegionInfo?: {

View File

@@ -1,4 +1,5 @@
import { MoveEntity, PathType } from 'src/entities/move.entity';
import { MoveEntity } from 'src/entities/move.entity';
import { PathType } from 'src/enum';
export const IMoveRepository = 'IMoveRepository';

View File

@@ -2,7 +2,7 @@ import { CommandFactory } from 'nest-commander';
import { fork } from 'node:child_process';
import { Worker } from 'node:worker_threads';
import { ImmichAdminModule } from 'src/app.module';
import { LogLevel } from 'src/config';
import { LogLevel } from 'src/enum';
import { getWorkers } from 'src/utils/workers';
const immichApp = process.argv[2] || process.env.IMMICH_APP;

View File

@@ -11,19 +11,11 @@ import { Reflector } from '@nestjs/core';
import { ApiBearerAuth, ApiCookieAuth, ApiOkResponse, ApiQuery, ApiSecurity } from '@nestjs/swagger';
import { Request } from 'express';
import { AuthDto, ImmichQuery } from 'src/dtos/auth.dto';
import { Permission } from 'src/enum';
import { MetadataKey, Permission } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { AuthService, LoginDetails } from 'src/services/auth.service';
import { UAParser } from 'ua-parser-js';
export enum Metadata {
AUTH_ROUTE = 'auth_route',
ADMIN_ROUTE = 'admin_route',
SHARED_ROUTE = 'shared_route',
API_KEY_SECURITY = 'api_key',
ON_EMIT_CONFIG = 'on_emit_config',
}
type AdminRoute = { admin?: true };
type SharedLinkRoute = { sharedLink?: true };
type AuthenticatedOptions = { permission?: Permission } & (AdminRoute | SharedLinkRoute);
@@ -32,8 +24,8 @@ export const Authenticated = (options?: AuthenticatedOptions): MethodDecorator =
const decorators: MethodDecorator[] = [
ApiBearerAuth(),
ApiCookieAuth(),
ApiSecurity(Metadata.API_KEY_SECURITY),
SetMetadata(Metadata.AUTH_ROUTE, options || {}),
ApiSecurity(MetadataKey.API_KEY_SECURITY),
SetMetadata(MetadataKey.AUTH_ROUTE, options || {}),
];
if ((options as SharedLinkRoute)?.sharedLink) {
@@ -85,7 +77,7 @@ export class AuthGuard implements CanActivate {
async canActivate(context: ExecutionContext): Promise<boolean> {
const targets = [context.getHandler()];
const options = this.reflector.getAllAndOverride<AuthenticatedOptions | undefined>(Metadata.AUTH_ROUTE, targets);
const options = this.reflector.getAllAndOverride<AuthenticatedOptions | undefined>(MetadataKey.AUTH_ROUTE, targets);
if (!options) {
return true;
}

View File

@@ -7,6 +7,7 @@ import multer, { StorageEngine, diskStorage } from 'multer';
import { createHash, randomUUID } from 'node:crypto';
import { Observable } from 'rxjs';
import { UploadFieldName } from 'src/dtos/asset-media.dto';
import { RouteKey } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { AuthRequest } from 'src/middleware/auth.guard';
import { AssetMediaService, UploadFile } from 'src/services/asset-media.service';
@@ -28,11 +29,6 @@ export function getFiles(files: UploadFiles) {
};
}
export enum Route {
ASSET = 'assets',
USER = 'users',
}
export interface ImmichFile extends Express.Multer.File {
/** sha1 hash of file */
uuid: string;
@@ -115,7 +111,7 @@ export class FileUploadInterceptor implements NestInterceptor {
const context_ = context.switchToHttp();
const route = this.reflect.get<string>(PATH_METADATA, context.getClass());
const handler: RequestHandler | null = this.getHandler(route as Route);
const handler: RequestHandler | null = this.getHandler(route as RouteKey);
if (handler) {
await new Promise<void>((resolve, reject) => {
const next: NextFunction = (error) => (error ? reject(transformException(error)) : resolve());
@@ -176,13 +172,13 @@ export class FileUploadInterceptor implements NestInterceptor {
return false;
}
private getHandler(route: Route) {
private getHandler(route: RouteKey) {
switch (route) {
case Route.ASSET: {
case RouteKey.ASSET: {
return this.handlers.assetUpload;
}
case Route.USER: {
case RouteKey.USER: {
return this.handlers.userProfile;
}

View File

@@ -268,35 +268,6 @@ DELETE FROM "assets"
WHERE
"ownerId" = $1
-- AssetRepository.getExternalLibraryAssetPaths
SELECT DISTINCT
"distinctAlias"."AssetEntity_id" AS "ids_AssetEntity_id"
FROM
(
SELECT
"AssetEntity"."id" AS "AssetEntity_id",
"AssetEntity"."originalPath" AS "AssetEntity_originalPath",
"AssetEntity"."isOffline" AS "AssetEntity_isOffline"
FROM
"assets" "AssetEntity"
LEFT JOIN "libraries" "AssetEntity__AssetEntity_library" ON "AssetEntity__AssetEntity_library"."id" = "AssetEntity"."libraryId"
AND (
"AssetEntity__AssetEntity_library"."deletedAt" IS NULL
)
WHERE
(
(
((("AssetEntity__AssetEntity_library"."id" = $1)))
AND ("AssetEntity"."isExternal" = $2)
)
)
AND ("AssetEntity"."deletedAt" IS NULL)
) "distinctAlias"
ORDER BY
"AssetEntity_id" ASC
LIMIT
2
-- AssetRepository.getByLibraryIdAndOriginalPath
SELECT DISTINCT
"distinctAlias"."AssetEntity_id" AS "ids_AssetEntity_id"
@@ -366,18 +337,6 @@ WHERE
AND "originalPath" = path
);
-- AssetRepository.updateOfflineLibraryAssets
UPDATE "assets"
SET
"isOffline" = $1,
"updatedAt" = CURRENT_TIMESTAMP
WHERE
(
"libraryId" = $2
AND NOT ("originalPath" IN ($3))
AND "isOffline" = $4
)
-- AssetRepository.getAllByDeviceId
SELECT
"AssetEntity"."deviceAssetId" AS "AssetEntity_deviceAssetId",

View File

@@ -6,14 +6,13 @@ import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { ExifEntity } from 'src/entities/exif.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
import { AssetFileType, AssetOrder, AssetStatus, AssetType, PaginationMode } from 'src/enum';
import {
AssetBuilderOptions,
AssetCreate,
AssetDeltaSyncOptions,
AssetExploreFieldOptions,
AssetFullSyncOptions,
AssetPathEntity,
AssetStats,
AssetStatsOptions,
AssetUpdateAllOptions,
@@ -31,7 +30,7 @@ import {
import { AssetSearchOptions, SearchExploreItem } from 'src/interfaces/search.interface';
import { searchAssetBuilder } from 'src/utils/database';
import { Instrumentation } from 'src/utils/instrumentation';
import { Paginated, PaginationMode, PaginationOptions, paginate, paginatedBuilder } from 'src/utils/pagination';
import { Paginated, PaginationOptions, paginate, paginatedBuilder } from 'src/utils/pagination';
import {
Brackets,
FindOptionsOrder,
@@ -177,14 +176,6 @@ export class AssetRepository implements IAssetRepository {
return this.getAll(pagination, { ...options, userIds: [userId] });
}
@GenerateSql({ params: [{ take: 1, skip: 0 }, DummyValue.UUID] })
getExternalLibraryAssetPaths(pagination: PaginationOptions, libraryId: string): Paginated<AssetPathEntity> {
return paginate(this.repository, pagination, {
select: { id: true, originalPath: true, isOffline: true },
where: { library: { id: libraryId }, isExternal: true },
});
}
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string): Promise<AssetEntity | null> {
return this.repository.findOne({
@@ -198,24 +189,16 @@ export class AssetRepository implements IAssetRepository {
async getPathsNotInLibrary(libraryId: string, originalPaths: string[]): Promise<string[]> {
const result = await this.repository.query(
`
WITH paths AS (SELECT unnest($2::text[]) AS path)
SELECT path FROM paths
WHERE NOT EXISTS (SELECT 1 FROM assets WHERE "libraryId" = $1 AND "originalPath" = path);
`,
WITH paths AS (SELECT unnest($2::text[]) AS path)
SELECT path
FROM paths
WHERE NOT EXISTS (SELECT 1 FROM assets WHERE "libraryId" = $1 AND "originalPath" = path);
`,
[libraryId, originalPaths],
);
return result.map((row: { path: string }) => row.path);
}
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.STRING]] })
@ChunkedArray({ paramIndex: 1 })
async updateOfflineLibraryAssets(libraryId: string, originalPaths: string[]): Promise<void> {
await this.repository.update(
{ library: { id: libraryId }, originalPath: Not(In(originalPaths)), isOffline: false },
{ isOffline: true },
);
}
getAll(pagination: PaginationOptions, options: AssetSearchOptions = {}): Paginated<AssetEntity> {
let builder = this.repository.createQueryBuilder('asset').leftJoinAndSelect('asset.files', 'files');
builder = searchAssetBuilder(builder, options);
@@ -373,12 +356,10 @@ export class AssetRepository implements IAssetRepository {
}
@GenerateSql(
...Object.values(WithProperty)
.filter((property) => property !== WithProperty.IS_OFFLINE && property !== WithProperty.IS_ONLINE)
.map((property) => ({
name: property,
params: [DummyValue.PAGINATION, property],
})),
...Object.values(WithProperty).map((property) => ({
name: property,
params: [DummyValue.PAGINATION, property],
})),
)
getWithout(pagination: PaginationOptions, property: WithoutProperty): Paginated<AssetEntity> {
let relations: FindOptionsRelations<AssetEntity> = {};
@@ -531,26 +512,16 @@ export class AssetRepository implements IAssetRepository {
where = [{ sidecarPath: Not(IsNull()), isVisible: true }];
break;
}
case WithProperty.IS_OFFLINE: {
if (!libraryId) {
throw new Error('Library id is required when finding offline assets');
}
where = [{ isOffline: true, libraryId }];
break;
}
case WithProperty.IS_ONLINE: {
if (!libraryId) {
throw new Error('Library id is required when finding online assets');
}
where = [{ isOffline: false, libraryId }];
break;
}
default: {
throw new Error(`Invalid getWith property: ${property}`);
}
}
if (libraryId) {
where = [{ ...where, libraryId }];
}
return paginate(this.repository, pagination, {
where,
withDeleted,
@@ -750,7 +721,10 @@ export class AssetRepository implements IAssetRepository {
builder.andWhere(`asset.deletedAt ${options.isTrashed ? 'IS NOT NULL' : 'IS NULL'}`).withDeleted();
if (options.isTrashed) {
builder.andWhere('asset.status = :status', { status: AssetStatus.TRASHED });
// TODO: Temporarily inverted to support showing offline assets in the trash queries.
// Once offline assets are handled in a separate screen, this should be set back to status = TRASHED
// and the offline screens should use a separate isOffline = true parameter in the timeline query.
builder.andWhere('asset.status != :status', { status: AssetStatus.DELETED });
}
}

View File

@@ -0,0 +1,15 @@
import { Injectable } from '@nestjs/common';
import { getVectorExtension } from 'src/database.config';
import { EnvData, IConfigRepository } from 'src/interfaces/config.interface';
@Injectable()
export class ConfigRepository implements IConfigRepository {
getEnv(): EnvData {
return {
database: {
skipMigrations: process.env.DB_SKIP_MIGRATIONS === 'true',
vectorExtension: getVectorExtension(),
},
};
}
}

View File

@@ -5,6 +5,7 @@ import { IAlbumRepository } from 'src/interfaces/album.interface';
import { IKeyRepository } from 'src/interfaces/api-key.interface';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IAuditRepository } from 'src/interfaces/audit.interface';
import { IConfigRepository } from 'src/interfaces/config.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { IDatabaseRepository } from 'src/interfaces/database.interface';
import { IEventRepository } from 'src/interfaces/event.interface';
@@ -39,6 +40,7 @@ import { AlbumRepository } from 'src/repositories/album.repository';
import { ApiKeyRepository } from 'src/repositories/api-key.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { AuditRepository } from 'src/repositories/audit.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EventRepository } from 'src/repositories/event.repository';
@@ -74,6 +76,7 @@ export const repositories = [
{ provide: IAlbumUserRepository, useClass: AlbumUserRepository },
{ provide: IAssetRepository, useClass: AssetRepository },
{ provide: IAuditRepository, useClass: AuditRepository },
{ provide: IConfigRepository, useClass: ConfigRepository },
{ provide: ICryptoRepository, useClass: CryptoRepository },
{ provide: IDatabaseRepository, useClass: DatabaseRepository },
{ provide: IEventRepository, useClass: EventRepository },

View File

@@ -79,12 +79,12 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
[JobName.SIDECAR_WRITE]: QueueName.SIDECAR,
// Library management
[JobName.LIBRARY_SCAN_ASSET]: QueueName.LIBRARY,
[JobName.LIBRARY_SCAN]: QueueName.LIBRARY,
[JobName.LIBRARY_SYNC_FILE]: QueueName.LIBRARY,
[JobName.LIBRARY_QUEUE_SYNC_FILES]: QueueName.LIBRARY,
[JobName.LIBRARY_QUEUE_SYNC_ASSETS]: QueueName.LIBRARY,
[JobName.LIBRARY_DELETE]: QueueName.LIBRARY,
[JobName.LIBRARY_CHECK_OFFLINE]: QueueName.LIBRARY,
[JobName.LIBRARY_REMOVE_OFFLINE]: QueueName.LIBRARY,
[JobName.LIBRARY_QUEUE_SCAN_ALL]: QueueName.LIBRARY,
[JobName.LIBRARY_SYNC_ASSET]: QueueName.LIBRARY,
[JobName.LIBRARY_QUEUE_SYNC_ALL]: QueueName.LIBRARY,
[JobName.LIBRARY_QUEUE_CLEANUP]: QueueName.LIBRARY,
// Notification

View File

@@ -1,7 +1,7 @@
import { ConsoleLogger, Injectable, Scope } from '@nestjs/common';
import { isLogLevelEnabled } from '@nestjs/common/services/utils/is-log-level-enabled.util';
import { ClsService } from 'nestjs-cls';
import { LogLevel } from 'src/config';
import { LogLevel } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { LogColor } from 'src/utils/logger';

View File

@@ -5,7 +5,7 @@ import fs from 'node:fs/promises';
import { Writable } from 'node:stream';
import { promisify } from 'node:util';
import sharp from 'sharp';
import { Colorspace } from 'src/config';
import { Colorspace } from 'src/enum';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import {
IMediaRepository,

View File

@@ -1,7 +1,8 @@
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { DummyValue, GenerateSql } from 'src/decorators';
import { MoveEntity, PathType } from 'src/entities/move.entity';
import { MoveEntity } from 'src/entities/move.entity';
import { PathType } from 'src/enum';
import { IMoveRepository, MoveCreate } from 'src/interfaces/move.interface';
import { Instrumentation } from 'src/utils/instrumentation';
import { Repository } from 'typeorm';

View File

@@ -6,7 +6,7 @@ import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetJobStatusEntity } from 'src/entities/asset-job-status.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { SourceType } from 'src/enum';
import { PaginationMode, SourceType } from 'src/enum';
import {
AssetFaceId,
DeleteAllFacesOptions,
@@ -19,7 +19,7 @@ import {
UpdateFacesData,
} from 'src/interfaces/person.interface';
import { Instrumentation } from 'src/utils/instrumentation';
import { Paginated, PaginationMode, PaginationOptions, paginate, paginatedBuilder } from 'src/utils/pagination';
import { Paginated, PaginationOptions, paginate, paginatedBuilder } from 'src/utils/pagination';
import { DataSource, FindManyOptions, FindOptionsRelations, FindOptionsSelect, In, Repository } from 'typeorm';
@Instrumentation()

View File

@@ -8,7 +8,7 @@ import { ExifEntity } from 'src/entities/exif.entity';
import { GeodataPlacesEntity } from 'src/entities/geodata-places.entity';
import { SmartInfoEntity } from 'src/entities/smart-info.entity';
import { SmartSearchEntity } from 'src/entities/smart-search.entity';
import { AssetType } from 'src/enum';
import { AssetType, PaginationMode } from 'src/enum';
import { DatabaseExtension } from 'src/interfaces/database.interface';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import {
@@ -23,7 +23,7 @@ import {
} from 'src/interfaces/search.interface';
import { asVector, searchAssetBuilder } from 'src/utils/database';
import { Instrumentation } from 'src/utils/instrumentation';
import { Paginated, PaginationMode, PaginationResult, paginatedBuilder } from 'src/utils/pagination';
import { Paginated, PaginationResult, paginatedBuilder } from 'src/utils/pagination';
import { isValidInteger } from 'src/validation';
import { Repository, SelectQueryBuilder } from 'typeorm';

View File

@@ -3,7 +3,7 @@ import { AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus } from 'src/enum';
import { ITrashRepository } from 'src/interfaces/trash.interface';
import { Paginated, paginatedBuilder, PaginationOptions } from 'src/utils/pagination';
import { In, IsNull, Not, Repository } from 'typeorm';
import { In, Repository } from 'typeorm';
export class TrashRepository implements ITrashRepository {
constructor(@InjectRepository(AssetEntity) private assetRepository: Repository<AssetEntity>) {}
@@ -26,7 +26,7 @@ export class TrashRepository implements ITrashRepository {
async restore(userId: string): Promise<number> {
const result = await this.assetRepository.update(
{ ownerId: userId, deletedAt: Not(IsNull()) },
{ ownerId: userId, status: AssetStatus.TRASHED },
{ status: AssetStatus.ACTIVE, deletedAt: null },
);
@@ -35,7 +35,7 @@ export class TrashRepository implements ITrashRepository {
async empty(userId: string): Promise<number> {
const result = await this.assetRepository.update(
{ ownerId: userId, deletedAt: Not(IsNull()), status: AssetStatus.TRASHED },
{ ownerId: userId, status: AssetStatus.TRASHED },
{ status: AssetStatus.DELETED },
);
@@ -43,7 +43,10 @@ export class TrashRepository implements ITrashRepository {
}
async restoreAll(ids: string[]): Promise<number> {
const result = await this.assetRepository.update({ id: In(ids) }, { status: AssetStatus.ACTIVE, deletedAt: null });
const result = await this.assetRepository.update(
{ id: In(ids), status: AssetStatus.TRASHED },
{ status: AssetStatus.ACTIVE, deletedAt: null },
);
return result.affected ?? 0;
}
}

View File

@@ -4,7 +4,7 @@ import { AssetMediaStatus, AssetRejectReason, AssetUploadAction } from 'src/dtos
import { AssetMediaCreateDto, AssetMediaReplaceDto, UploadFieldName } from 'src/dtos/asset-media.dto';
import { AssetFileEntity } from 'src/entities/asset-files.entity';
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus, AssetType } from 'src/enum';
import { AssetStatus, AssetType, CacheControl } from 'src/enum';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IEventRepository } from 'src/interfaces/event.interface';
import { IJobRepository, JobName } from 'src/interfaces/job.interface';
@@ -12,7 +12,7 @@ import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { IStorageRepository } from 'src/interfaces/storage.interface';
import { IUserRepository } from 'src/interfaces/user.interface';
import { AssetMediaService } from 'src/services/asset-media.service';
import { CacheControl, ImmichFileResponse } from 'src/utils/file';
import { ImmichFileResponse } from 'src/utils/file';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { fileStub } from 'test/fixtures/file.stub';

View File

@@ -7,7 +7,7 @@ import {
} from '@nestjs/common';
import { extname } from 'node:path';
import sanitize from 'sanitize-filename';
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
import { StorageCore } from 'src/cores/storage.core';
import {
AssetBulkUploadCheckResponseDto,
AssetMediaResponseDto,
@@ -27,7 +27,7 @@ import {
} from 'src/dtos/asset-media.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { ASSET_CHECKSUM_CONSTRAINT, AssetEntity } from 'src/entities/asset.entity';
import { AssetStatus, AssetType, Permission } from 'src/enum';
import { AssetStatus, AssetType, CacheControl, Permission, StorageFolder } from 'src/enum';
import { IAccessRepository } from 'src/interfaces/access.interface';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IEventRepository } from 'src/interfaces/event.interface';
@@ -37,7 +37,7 @@ import { IStorageRepository } from 'src/interfaces/storage.interface';
import { IUserRepository } from 'src/interfaces/user.interface';
import { requireAccess, requireUploadAccess } from 'src/utils/access';
import { getAssetFiles, onBeforeLink } from 'src/utils/asset.util';
import { CacheControl, ImmichFileResponse } from 'src/utils/file';
import { ImmichFileResponse } from 'src/utils/file';
import { mimeTypes } from 'src/utils/mime-types';
import { fromChecksum } from 'src/utils/request';
import { QueryFailedError } from 'typeorm';
@@ -427,7 +427,6 @@ export class AssetMediaService {
livePhotoVideoId: dto.livePhotoVideoId,
originalFileName: file.originalName,
sidecarPath: sidecarFile?.originalPath,
isOffline: dto.isOffline ?? false,
});
if (sidecarFile) {

View File

@@ -2,7 +2,7 @@ import { BadRequestException, Inject, Injectable } from '@nestjs/common';
import { DateTime } from 'luxon';
import { resolve } from 'node:path';
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
import { StorageCore, StorageFolder } from 'src/cores/storage.core';
import { StorageCore } from 'src/cores/storage.core';
import {
AuditDeletesDto,
AuditDeletesResponseDto,
@@ -12,8 +12,15 @@ import {
PathEntityType,
} from 'src/dtos/audit.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetPathType, PersonPathType, UserPathType } from 'src/entities/move.entity';
import { AssetFileType, DatabaseAction, Permission } from 'src/enum';
import {
AssetFileType,
AssetPathType,
DatabaseAction,
Permission,
PersonPathType,
StorageFolder,
UserPathType,
} from 'src/enum';
import { IAccessRepository } from 'src/interfaces/access.interface';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { IAuditRepository } from 'src/interfaces/audit.interface';

View File

@@ -1,9 +1,9 @@
import { BadRequestException, ForbiddenException, UnauthorizedException } from '@nestjs/common';
import { Issuer, generators } from 'openid-client';
import { AuthType } from 'src/constants';
import { AuthDto, SignUpDto } from 'src/dtos/auth.dto';
import { UserMetadataEntity } from 'src/entities/user-metadata.entity';
import { UserEntity } from 'src/entities/user.entity';
import { AuthType } from 'src/enum';
import { IKeyRepository } from 'src/interfaces/api-key.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { IEventRepository } from 'src/interfaces/event.interface';

View File

@@ -12,7 +12,7 @@ import { DateTime } from 'luxon';
import { IncomingHttpHeaders } from 'node:http';
import { Issuer, UserinfoResponse, custom, generators } from 'openid-client';
import { SystemConfig } from 'src/config';
import { AuthType, LOGIN_URL, MOBILE_REDIRECT, SALT_ROUNDS } from 'src/constants';
import { LOGIN_URL, MOBILE_REDIRECT, SALT_ROUNDS } from 'src/constants';
import { SystemConfigCore } from 'src/cores/system-config.core';
import { UserCore } from 'src/cores/user.core';
import {
@@ -31,7 +31,7 @@ import {
} from 'src/dtos/auth.dto';
import { UserAdminResponseDto, mapUserAdmin } from 'src/dtos/user.dto';
import { UserEntity } from 'src/entities/user.entity';
import { Permission } from 'src/enum';
import { AuthType, Permission } from 'src/enum';
import { IKeyRepository } from 'src/interfaces/api-key.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { IEventRepository } from 'src/interfaces/event.interface';

View File

@@ -1,12 +1,20 @@
import { DatabaseExtension, EXTENSION_NAMES, IDatabaseRepository } from 'src/interfaces/database.interface';
import { IConfigRepository } from 'src/interfaces/config.interface';
import {
DatabaseExtension,
EXTENSION_NAMES,
IDatabaseRepository,
VectorExtension,
} from 'src/interfaces/database.interface';
import { ILoggerRepository } from 'src/interfaces/logger.interface';
import { DatabaseService } from 'src/services/database.service';
import { newConfigRepositoryMock } from 'test/repositories/config.repository.mock';
import { newDatabaseRepositoryMock } from 'test/repositories/database.repository.mock';
import { newLoggerRepositoryMock } from 'test/repositories/logger.repository.mock';
import { Mocked } from 'vitest';
describe(DatabaseService.name, () => {
let sut: DatabaseService;
let configMock: Mocked<IConfigRepository>;
let databaseMock: Mocked<IDatabaseRepository>;
let loggerMock: Mocked<ILoggerRepository>;
let extensionRange: string;
@@ -16,9 +24,11 @@ describe(DatabaseService.name, () => {
let versionAboveRange: string;
beforeEach(() => {
configMock = newConfigRepositoryMock();
databaseMock = newDatabaseRepositoryMock();
loggerMock = newLoggerRepositoryMock();
sut = new DatabaseService(databaseMock, loggerMock);
sut = new DatabaseService(configMock, databaseMock, loggerMock);
extensionRange = '0.2.x';
databaseMock.getExtensionVersionRange.mockReturnValue(extensionRange);
@@ -33,11 +43,6 @@ describe(DatabaseService.name, () => {
});
});
afterEach(() => {
delete process.env.DB_SKIP_MIGRATIONS;
delete process.env.DB_VECTOR_EXTENSION;
});
it('should work', () => {
expect(sut).toBeDefined();
});
@@ -50,12 +55,12 @@ describe(DatabaseService.name, () => {
expect(databaseMock.getPostgresVersion).toHaveBeenCalledTimes(1);
});
describe.each([
describe.each(<Array<{ extension: VectorExtension; extensionName: string }>>[
{ extension: DatabaseExtension.VECTOR, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTOR] },
{ extension: DatabaseExtension.VECTORS, extensionName: EXTENSION_NAMES[DatabaseExtension.VECTORS] },
])('should work with $extensionName', ({ extension, extensionName }) => {
beforeEach(() => {
process.env.DB_VECTOR_EXTENSION = extensionName;
configMock.getEnv.mockReturnValue({ database: { skipMigrations: false, vectorExtension: extension } });
});
it(`should start up successfully with ${extension}`, async () => {
@@ -236,18 +241,28 @@ describe(DatabaseService.name, () => {
expect(databaseMock.runMigrations).toHaveBeenCalledTimes(1);
expect(loggerMock.fatal).not.toHaveBeenCalled();
});
});
it('should skip migrations if DB_SKIP_MIGRATIONS=true', async () => {
process.env.DB_SKIP_MIGRATIONS = 'true';
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(databaseMock.runMigrations).not.toHaveBeenCalled();
it('should skip migrations if DB_SKIP_MIGRATIONS=true', async () => {
configMock.getEnv.mockReturnValue({
database: {
skipMigrations: true,
vectorExtension: DatabaseExtension.VECTORS,
},
});
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(databaseMock.runMigrations).not.toHaveBeenCalled();
});
it(`should throw error if pgvector extension could not be created`, async () => {
process.env.DB_VECTOR_EXTENSION = 'pgvector';
configMock.getEnv.mockReturnValue({
database: {
skipMigrations: true,
vectorExtension: DatabaseExtension.VECTOR,
},
});
databaseMock.getExtensionVersion.mockResolvedValue({
installedVersion: null,
availableVersion: minVersionInRange,

View File

@@ -1,8 +1,8 @@
import { Inject, Injectable } from '@nestjs/common';
import { Duration } from 'luxon';
import semver from 'semver';
import { getVectorExtension } from 'src/database.config';
import { OnEmit } from 'src/decorators';
import { IConfigRepository } from 'src/interfaces/config.interface';
import {
DatabaseExtension,
DatabaseLock,
@@ -67,6 +67,7 @@ export class DatabaseService {
private reconnection?: NodeJS.Timeout;
constructor(
@Inject(IConfigRepository) private configRepository: IConfigRepository,
@Inject(IDatabaseRepository) private databaseRepository: IDatabaseRepository,
@Inject(ILoggerRepository) private logger: ILoggerRepository,
) {
@@ -85,7 +86,8 @@ export class DatabaseService {
}
await this.databaseRepository.withLock(DatabaseLock.Migrations, async () => {
const extension = getVectorExtension();
const envData = this.configRepository.getEnv();
const extension = envData.database.vectorExtension;
const name = EXTENSION_NAMES[extension];
const extensionRange = this.databaseRepository.getExtensionVersionRange(extension);
@@ -116,7 +118,8 @@ export class DatabaseService {
await this.checkReindexing();
if (process.env.DB_SKIP_MIGRATIONS !== 'true') {
const { database } = this.configRepository.getEnv();
if (!database.skipMigrations) {
await this.databaseRepository.runMigrations();
}
});

View File

@@ -164,7 +164,7 @@ export class JobService {
}
case QueueName.LIBRARY: {
return this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force } });
return this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ALL, data: { force } });
}
default: {

View File

@@ -10,9 +10,8 @@ import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { IDatabaseRepository } from 'src/interfaces/database.interface';
import {
IJobRepository,
ILibraryAssetJob,
ILibraryFileJob,
ILibraryOfflineJob,
ILibraryRefreshJob,
JobName,
JOBS_LIBRARY_PAGINATION_SIZE,
JobStatus,
@@ -37,6 +36,10 @@ import { makeMockWatcher, newStorageRepositoryMock } from 'test/repositories/sto
import { newSystemMetadataRepositoryMock } from 'test/repositories/system-metadata.repository.mock';
import { Mocked, vitest } from 'vitest';
async function* mockWalk() {
yield await Promise.resolve(['/data/user1/photo.jpg']);
}
describe(LibraryService.name, () => {
let sut: LibraryService;
@@ -91,7 +94,7 @@ describe(LibraryService.name, () => {
enabled: true,
cronExpression: '0 1 * * *',
},
watch: { enabled: false },
watch: { enabled: true },
},
} as SystemConfig);
@@ -163,102 +166,29 @@ describe(LibraryService.name, () => {
describe('handleQueueAssetRefresh', () => {
it('should queue refresh of a new asset', async () => {
const mockLibraryJob: ILibraryRefreshJob = {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: false,
refreshAllFiles: false,
};
assetMock.getWith.mockResolvedValue({ items: [], hasNextPage: false });
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
// eslint-disable-next-line @typescript-eslint/require-await
storageMock.walk.mockImplementation(async function* generator() {
yield ['/data/user1/photo.jpg'];
});
assetMock.getExternalLibraryAssetPaths.mockResolvedValue({ items: [], hasNextPage: false });
storageMock.walk.mockImplementation(mockWalk);
await sut.handleQueueAssetRefresh(mockLibraryJob);
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibrary1.id });
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SCAN_ASSET,
name: JobName.LIBRARY_SYNC_FILE,
data: {
id: libraryStub.externalLibrary1.id,
ownerId: libraryStub.externalLibrary1.owner.id,
assetPath: '/data/user1/photo.jpg',
force: false,
},
},
]);
});
it('should queue offline check of existing online assets', async () => {
const mockLibraryJob: ILibraryRefreshJob = {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: false,
refreshAllFiles: false,
};
assetMock.getWith.mockResolvedValue({ items: [], hasNextPage: false });
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
storageMock.walk.mockImplementation(async function* generator() {});
assetMock.getWith.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
await sut.handleQueueAssetRefresh(mockLibraryJob);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_CHECK_OFFLINE,
data: {
id: assetStub.external.id,
importPaths: libraryStub.externalLibrary1.importPaths,
exclusionPatterns: [],
},
},
]);
});
it("should fail when library can't be found", async () => {
const mockLibraryJob: ILibraryRefreshJob = {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: false,
refreshAllFiles: false,
};
libraryMock.get.mockResolvedValue(null);
await expect(sut.handleQueueAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
});
it('should force queue new assets', async () => {
const mockLibraryJob: ILibraryRefreshJob = {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: false,
refreshAllFiles: true,
};
assetMock.getWith.mockResolvedValue({ items: [], hasNextPage: false });
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
// eslint-disable-next-line @typescript-eslint/require-await
storageMock.walk.mockImplementation(async function* generator() {
yield ['/data/user1/photo.jpg'];
});
assetMock.getExternalLibraryAssetPaths.mockResolvedValue({ items: [], hasNextPage: false });
await sut.handleQueueAssetRefresh(mockLibraryJob);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SCAN_ASSET,
data: {
id: libraryStub.externalLibrary1.id,
ownerId: libraryStub.externalLibrary1.owner.id,
assetPath: '/data/user1/photo.jpg',
force: true,
},
},
]);
await expect(sut.handleQueueSyncFiles({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SKIPPED);
});
it('should ignore import paths that do not exist', async () => {
@@ -276,16 +206,9 @@ describe(LibraryService.name, () => {
assetMock.getWith.mockResolvedValue({ items: [], hasNextPage: false });
const mockLibraryJob: ILibraryRefreshJob = {
id: libraryStub.externalLibraryWithImportPaths1.id,
refreshModifiedFiles: false,
refreshAllFiles: false,
};
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
assetMock.getExternalLibraryAssetPaths.mockResolvedValue({ items: [], hasNextPage: false });
await sut.handleQueueAssetRefresh(mockLibraryJob);
await sut.handleQueueSyncFiles({ id: libraryStub.externalLibraryWithImportPaths1.id });
expect(storageMock.walk).toHaveBeenCalledWith({
pathsToCrawl: [libraryStub.externalLibraryWithImportPaths1.importPaths[1]],
@@ -296,9 +219,36 @@ describe(LibraryService.name, () => {
});
});
describe('handleOfflineCheck', () => {
describe('handleQueueRemoveDeleted', () => {
it('should queue online check of existing assets', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
storageMock.walk.mockImplementation(async function* generator() {});
assetMock.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
await sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id });
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SYNC_ASSET,
data: {
id: assetStub.external.id,
importPaths: libraryStub.externalLibrary1.importPaths,
exclusionPatterns: [],
},
},
]);
});
it("should fail when library can't be found", async () => {
libraryMock.get.mockResolvedValue(null);
await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SKIPPED);
});
});
describe('handleSyncAsset', () => {
it('should skip missing assets', async () => {
const mockAssetJob: ILibraryOfflineJob = {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: [],
@@ -306,41 +256,31 @@ describe(LibraryService.name, () => {
assetMock.getById.mockResolvedValue(null);
await expect(sut.handleOfflineCheck(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SKIPPED);
expect(assetMock.update).not.toHaveBeenCalled();
});
it('should do nothing with already-offline assets', async () => {
const mockAssetJob: ILibraryOfflineJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: [],
};
assetMock.getById.mockResolvedValue(assetStub.offline);
await expect(sut.handleOfflineCheck(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.update).not.toHaveBeenCalled();
expect(assetMock.remove).not.toHaveBeenCalled();
});
it('should offline assets no longer on disk', async () => {
const mockAssetJob: ILibraryOfflineJob = {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: [],
};
assetMock.getById.mockResolvedValue(assetStub.external);
storageMock.stat.mockRejectedValue(new Error('ENOENT, no such file or directory'));
await expect(sut.handleOfflineCheck(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.external.id, isOffline: true });
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true,
deletedAt: expect.any(Date),
});
});
it('should offline assets matching an exclusion pattern', async () => {
const mockAssetJob: ILibraryOfflineJob = {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: ['**/user1/**'],
@@ -348,13 +288,15 @@ describe(LibraryService.name, () => {
assetMock.getById.mockResolvedValue(assetStub.external);
await expect(sut.handleOfflineCheck(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.external.id, isOffline: true });
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true,
deletedAt: expect.any(Date),
});
});
it('should set assets outside of import paths as offline', async () => {
const mockAssetJob: ILibraryOfflineJob = {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/data/user2'],
exclusionPatterns: [],
@@ -363,28 +305,74 @@ describe(LibraryService.name, () => {
assetMock.getById.mockResolvedValue(assetStub.external);
storageMock.checkFileExists.mockResolvedValue(true);
await expect(sut.handleOfflineCheck(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.external.id, isOffline: true });
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
isOffline: true,
deletedAt: expect.any(Date),
});
});
it('should do nothing with online assets', async () => {
const mockAssetJob: ILibraryOfflineJob = {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: [],
};
assetMock.getById.mockResolvedValue(assetStub.external);
storageMock.checkFileExists.mockResolvedValue(true);
storageMock.stat.mockResolvedValue({ mtime: assetStub.external.fileModifiedAt } as Stats);
await expect(sut.handleOfflineCheck(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.update).not.toHaveBeenCalled();
expect(assetMock.updateAll).not.toHaveBeenCalled();
});
it('should un-trash an asset previously marked as offline', async () => {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: [],
};
assetMock.getById.mockResolvedValue(assetStub.trashedOffline);
storageMock.stat.mockResolvedValue({ mtime: assetStub.trashedOffline.fileModifiedAt } as Stats);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.trashedOffline.id], {
deletedAt: null,
fileCreatedAt: assetStub.trashedOffline.fileModifiedAt,
fileModifiedAt: assetStub.trashedOffline.fileModifiedAt,
isOffline: false,
originalFileName: 'path.jpg',
});
});
});
describe('handleAssetRefresh', () => {
it('should update file when mtime has changed', async () => {
const mockAssetJob: ILibraryAssetJob = {
id: assetStub.external.id,
importPaths: ['/'],
exclusionPatterns: [],
};
const newMTime = new Date();
assetMock.getById.mockResolvedValue(assetStub.external);
storageMock.stat.mockResolvedValue({ mtime: newMTime } as Stats);
await expect(sut.handleSyncAsset(mockAssetJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.external.id], {
fileModifiedAt: newMTime,
fileCreatedAt: newMTime,
isOffline: false,
originalFileName: 'photo.jpg',
deletedAt: null,
});
});
describe('handleSyncFile', () => {
let mockUser: UserEntity;
beforeEach(() => {
@@ -397,42 +385,18 @@ describe(LibraryService.name, () => {
} as Stats);
});
it('should reject an unknown file extension', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/file.xyz',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
await expect(sut.handleAssetRefresh(mockLibraryJob)).rejects.toBeInstanceOf(BadRequestException);
});
it('should reject an unknown file type', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/file.xyz',
force: false,
};
await expect(sut.handleAssetRefresh(mockLibraryJob)).rejects.toBeInstanceOf(BadRequestException);
});
it('should add a new image', async () => {
it('should import a new asset', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create.mock.calls).toEqual([
[
@@ -467,19 +431,19 @@ describe(LibraryService.name, () => {
]);
});
it('should add a new image with sidecar', async () => {
it('should import a new asset with sidecar', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
storageMock.checkFileExists.mockResolvedValue(true);
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create.mock.calls).toEqual([
[
@@ -514,18 +478,18 @@ describe(LibraryService.name, () => {
]);
});
it('should add a new video', async () => {
it('should import a new video', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/video.mp4',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.video);
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create.mock.calls).toEqual([
[
@@ -568,29 +532,27 @@ describe(LibraryService.name, () => {
]);
});
it('should not add an image to a soft deleted library', async () => {
it('should not import an asset to a soft deleted library', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
libraryMock.get.mockResolvedValue({ ...libraryStub.externalLibrary1, deletedAt: new Date() });
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
expect(assetMock.create.mock.calls).toEqual([]);
});
it('should not import an asset when mtime matches db asset', async () => {
it('should not refresh a file whose mtime matches existing asset', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: assetStub.hasFileExtension.originalPath,
force: false,
};
storageMock.stat.mockResolvedValue({
@@ -601,190 +563,52 @@ describe(LibraryService.name, () => {
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.hasFileExtension);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
expect(jobMock.queue).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalled();
});
it('should import an asset when mtime differs from db asset', async () => {
it('should skip existing asset', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.METADATA_EXTRACTION,
data: {
id: assetStub.image.id,
source: 'upload',
},
});
expect(jobMock.queue).not.toHaveBeenCalledWith({
name: JobName.VIDEO_CONVERSION,
data: {
id: assetStub.image.id,
},
});
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
});
it('should import an asset that is missing a file extension', async () => {
// This tests for the case where the file extension is missing from the asset path.
// This happened in previous versions of Immich
it('should not refresh an asset trashed by user', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: mockUser.id,
assetPath: assetStub.missingFileExtension.originalPath,
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.missingFileExtension);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.updateAll).toHaveBeenCalledWith(
[assetStub.missingFileExtension.id],
expect.objectContaining({ originalFileName: 'photo.jpg' }),
);
});
it('should set a missing asset to offline', async () => {
storageMock.stat.mockRejectedValue(new Error('Path not found'));
const mockLibraryJob: ILibraryFileJob = {
id: assetStub.image.id,
ownerId: mockUser.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.image.id, isOffline: true });
expect(jobMock.queue).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalled();
});
it('should online a previously-offline asset', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: assetStub.offline.id,
ownerId: mockUser.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.offline);
assetMock.create.mockResolvedValue(assetStub.offline);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.offline.id, isOffline: false });
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.METADATA_EXTRACTION,
data: {
id: assetStub.offline.id,
source: 'upload',
},
});
expect(jobMock.queue).not.toHaveBeenCalledWith({
name: JobName.VIDEO_CONVERSION,
data: {
id: assetStub.offline.id,
},
});
});
it('should do nothing when mtime matches existing asset', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: assetStub.image.id,
ownerId: assetStub.image.ownerId,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.image);
assetMock.create.mockResolvedValue(assetStub.image);
expect(assetMock.update).not.toHaveBeenCalled();
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
});
it('should refresh an existing asset if forced', async () => {
const mockLibraryJob: ILibraryFileJob = {
id: assetStub.image.id,
ownerId: assetStub.hasFileExtension.ownerId,
assetPath: assetStub.hasFileExtension.originalPath,
force: true,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.hasFileExtension);
assetMock.create.mockResolvedValue(assetStub.hasFileExtension);
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.trashed);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.SKIPPED);
expect(assetMock.updateAll).toHaveBeenCalledWith([assetStub.hasFileExtension.id], {
fileCreatedAt: new Date('2023-01-01'),
fileModifiedAt: new Date('2023-01-01'),
originalFileName: assetStub.hasFileExtension.originalFileName,
});
expect(jobMock.queue).not.toHaveBeenCalled();
expect(jobMock.queueAll).not.toHaveBeenCalled();
});
it('should refresh an existing asset with modified mtime', async () => {
const filemtime = new Date();
filemtime.setSeconds(assetStub.image.fileModifiedAt.getSeconds() + 10);
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: userStub.admin.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
storageMock.stat.mockResolvedValue({
size: 100,
mtime: filemtime,
ctime: new Date('2023-01-01'),
} as Stats);
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).resolves.toBe(JobStatus.SUCCESS);
expect(assetMock.create).toHaveBeenCalled();
const createdAsset = assetMock.create.mock.calls[0][0];
expect(createdAsset.fileModifiedAt).toEqual(filemtime);
});
it('should throw error when asset does not exist', async () => {
it('should throw BadRequestException when asset does not exist', async () => {
storageMock.stat.mockRejectedValue(new Error("ENOENT, no such file or directory '/data/user1/photo.jpg'"));
const mockLibraryJob: ILibraryFileJob = {
id: libraryStub.externalLibrary1.id,
ownerId: userStub.admin.id,
assetPath: '/data/user1/photo.jpg',
force: false,
};
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(null);
assetMock.create.mockResolvedValue(assetStub.image);
await expect(sut.handleAssetRefresh(mockLibraryJob)).rejects.toBeInstanceOf(BadRequestException);
await expect(sut.handleSyncFile(mockLibraryJob)).resolves.toBe(JobStatus.FAILED);
});
});
@@ -857,7 +681,6 @@ describe(LibraryService.name, () => {
describe('getStatistics', () => {
it('should return library statistics', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
libraryMock.getStatistics.mockResolvedValue({ photos: 10, videos: 0, total: 10, usage: 1337 });
await expect(sut.getStatistics(libraryStub.externalLibrary1.id)).resolves.toEqual({
photos: 10,
@@ -1092,12 +915,11 @@ describe(LibraryService.name, () => {
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SCAN_ASSET,
name: JobName.LIBRARY_SYNC_FILE,
data: {
id: libraryStub.externalLibraryWithImportPaths1.id,
assetPath: '/foo/photo.jpg',
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
force: false,
},
},
]);
@@ -1114,30 +936,16 @@ describe(LibraryService.name, () => {
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SCAN_ASSET,
name: JobName.LIBRARY_SYNC_FILE,
data: {
id: libraryStub.externalLibraryWithImportPaths1.id,
assetPath: '/foo/photo.jpg',
ownerId: libraryStub.externalLibraryWithImportPaths1.owner.id,
force: false,
},
},
]);
});
it('should handle a file unlink event', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibraryWithImportPaths1]);
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.external);
storageMock.watch.mockImplementation(
makeMockWatcher({ items: [{ event: 'unlink', value: '/foo/photo.jpg' }] }),
);
await sut.watchAll();
expect(assetMock.update).toHaveBeenCalledWith({ id: assetStub.external.id, isOffline: true });
});
it('should handle an error event', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibraryWithImportPaths1);
assetMock.getByLibraryIdAndOriginalPath.mockResolvedValue(assetStub.external);
@@ -1232,72 +1040,23 @@ describe(LibraryService.name, () => {
});
describe('queueScan', () => {
it('should queue a library scan of external library', async () => {
it('should queue a library scan', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await sut.queueScan(libraryStub.externalLibrary1.id, {});
await sut.queueScan(libraryStub.externalLibrary1.id);
expect(jobMock.queue.mock.calls).toEqual([
[
{
name: JobName.LIBRARY_SCAN,
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
data: {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: false,
refreshAllFiles: false,
},
},
],
]);
});
it('should queue a library scan of all modified assets', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await sut.queueScan(libraryStub.externalLibrary1.id, { refreshModifiedFiles: true });
expect(jobMock.queue.mock.calls).toEqual([
[
{
name: JobName.LIBRARY_SCAN,
data: {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: true,
refreshAllFiles: false,
},
},
],
]);
});
it('should queue a forced library scan', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
await sut.queueScan(libraryStub.externalLibrary1.id, { refreshAllFiles: true });
expect(jobMock.queue.mock.calls).toEqual([
[
{
name: JobName.LIBRARY_SCAN,
data: {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: false,
refreshAllFiles: true,
},
},
],
]);
});
});
describe('queueEmptyTrash', () => {
it('should queue the trash job', async () => {
await sut.queueRemoveOffline(libraryStub.externalLibrary1.id);
expect(jobMock.queue.mock.calls).toEqual([
[
{
name: JobName.LIBRARY_REMOVE_OFFLINE,
name: JobName.LIBRARY_QUEUE_SYNC_ASSETS,
data: {
id: libraryStub.externalLibrary1.id,
},
@@ -1311,7 +1070,7 @@ describe(LibraryService.name, () => {
it('should queue the refresh job', async () => {
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
await expect(sut.handleQueueAllScan({})).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleQueueSyncAll()).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queue.mock.calls).toEqual([
[
@@ -1323,48 +1082,32 @@ describe(LibraryService.name, () => {
]);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SCAN,
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
data: {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: true,
refreshAllFiles: false,
},
},
]);
});
it('should queue the force refresh job', async () => {
libraryMock.getAll.mockResolvedValue([libraryStub.externalLibrary1]);
await expect(sut.handleQueueAllScan({ force: true })).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queue).toHaveBeenCalledWith({
name: JobName.LIBRARY_QUEUE_CLEANUP,
data: {},
});
expect(jobMock.queueAll).toHaveBeenCalledWith([
{
name: JobName.LIBRARY_SCAN,
data: {
id: libraryStub.externalLibrary1.id,
refreshModifiedFiles: false,
refreshAllFiles: true,
},
},
]);
});
});
describe('handleRemoveOfflineFiles', () => {
it('should queue trash deletion jobs', async () => {
assetMock.getWith.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false });
describe('handleQueueAssetOfflineCheck', () => {
it('should queue removal jobs', async () => {
libraryMock.get.mockResolvedValue(libraryStub.externalLibrary1);
assetMock.getAll.mockResolvedValue({ items: [assetStub.image1], hasNextPage: false });
assetMock.getById.mockResolvedValue(assetStub.image1);
await expect(sut.handleRemoveOffline({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
await expect(sut.handleQueueSyncAssets({ id: libraryStub.externalLibrary1.id })).resolves.toBe(JobStatus.SUCCESS);
expect(jobMock.queueAll).toHaveBeenCalledWith([
{ name: JobName.ASSET_DELETION, data: { id: assetStub.image1.id, deleteOnDisk: false } },
{
name: JobName.LIBRARY_SYNC_ASSET,
data: {
id: assetStub.image1.id,
importPaths: libraryStub.externalLibrary1.importPaths,
exclusionPatterns: libraryStub.externalLibrary1.exclusionPatterns,
},
},
]);
});
});

View File

@@ -1,6 +1,5 @@
import { BadRequestException, Inject, Injectable } from '@nestjs/common';
import { R_OK } from 'node:constants';
import { Stats } from 'node:fs';
import path, { basename, parse } from 'node:path';
import picomatch from 'picomatch';
import { StorageCore } from 'src/cores/storage.core';
@@ -10,27 +9,26 @@ import {
CreateLibraryDto,
LibraryResponseDto,
LibraryStatsResponseDto,
ScanLibraryDto,
mapLibrary,
UpdateLibraryDto,
ValidateLibraryDto,
ValidateLibraryImportPathResponseDto,
ValidateLibraryResponseDto,
mapLibrary,
} from 'src/dtos/library.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { LibraryEntity } from 'src/entities/library.entity';
import { AssetType } from 'src/enum';
import { IAssetRepository, WithProperty } from 'src/interfaces/asset.interface';
import { IAssetRepository } from 'src/interfaces/asset.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { DatabaseLock, IDatabaseRepository } from 'src/interfaces/database.interface';
import { ArgOf } from 'src/interfaces/event.interface';
import {
IBaseJob,
IEntityJob,
IJobRepository,
ILibraryAssetJob,
ILibraryFileJob,
ILibraryOfflineJob,
ILibraryRefreshJob,
JOBS_LIBRARY_PAGINATION_SIZE,
JobName,
JOBS_LIBRARY_PAGINATION_SIZE,
JobStatus,
} from 'src/interfaces/job.interface';
import { ILibraryRepository } from 'src/interfaces/library.interface';
@@ -78,11 +76,7 @@ export class LibraryService {
this.jobRepository.addCronJob(
'libraryScan',
scan.cronExpression,
() =>
handlePromiseError(
this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SCAN_ALL, data: { force: false } }),
this.logger,
),
() => handlePromiseError(this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ALL }), this.logger),
scan.enabled,
);
@@ -143,7 +137,7 @@ export class LibraryService {
const handler = async () => {
this.logger.debug(`File add event received for ${path} in library ${library.id}}`);
if (matcher(path)) {
await this.scanAssets(library.id, [path], library.ownerId, false);
await this.syncFiles(library, [path]);
}
};
return handlePromiseError(handler(), this.logger);
@@ -151,9 +145,13 @@ export class LibraryService {
onChange: (path) => {
const handler = async () => {
this.logger.debug(`Detected file change for ${path} in library ${library.id}`);
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
if (asset) {
await this.syncAssets(library, [asset.id]);
}
if (matcher(path)) {
// Note: if the changed file was not previously imported, it will be imported now.
await this.scanAssets(library.id, [path], library.ownerId, false);
await this.syncFiles(library, [path]);
}
};
return handlePromiseError(handler(), this.logger);
@@ -162,8 +160,8 @@ export class LibraryService {
const handler = async () => {
this.logger.debug(`Detected deleted file at ${path} in library ${library.id}`);
const asset = await this.assetRepository.getByLibraryIdAndOriginalPath(library.id, path);
if (asset && matcher(path)) {
await this.assetRepository.update({ id: asset.id, isOffline: true });
if (asset) {
await this.syncAssets(library, [asset.id]);
}
};
return handlePromiseError(handler(), this.logger);
@@ -216,7 +214,7 @@ export class LibraryService {
async getStatistics(id: string): Promise<LibraryStatsResponseDto> {
const statistics = await this.repository.getStatistics(id);
if (!statistics) {
throw new BadRequestException('Library not found');
throw new BadRequestException(`Library ${id} not found`);
}
return statistics;
}
@@ -250,20 +248,28 @@ export class LibraryService {
return mapLibrary(library);
}
private async scanAssets(libraryId: string, assetPaths: string[], ownerId: string, force = false) {
private async syncFiles({ id, ownerId }: LibraryEntity, assetPaths: string[]) {
await this.jobRepository.queueAll(
assetPaths.map((assetPath) => ({
name: JobName.LIBRARY_SCAN_ASSET,
name: JobName.LIBRARY_SYNC_FILE,
data: {
id: libraryId,
id,
assetPath,
ownerId,
force,
},
})),
);
}
private async syncAssets({ importPaths, exclusionPatterns }: LibraryEntity, assetIds: string[]) {
await this.jobRepository.queueAll(
assetIds.map((assetId) => ({
name: JobName.LIBRARY_SYNC_ASSET,
data: { id: assetId, importPaths, exclusionPatterns },
})),
);
}
private async validateImportPath(importPath: string): Promise<ValidateLibraryImportPathResponseDto> {
const validation = new ValidateLibraryImportPathResponseDto();
validation.importPath = importPath;
@@ -366,258 +372,182 @@ export class LibraryService {
return JobStatus.SUCCESS;
}
async handleAssetRefresh(job: ILibraryFileJob): Promise<JobStatus> {
async handleSyncFile(job: ILibraryFileJob): Promise<JobStatus> {
// Only needs to handle new assets
const assetPath = path.normalize(job.assetPath);
const existingAssetEntity = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath);
let stats: Stats;
try {
stats = await this.storageRepository.stat(assetPath);
} catch (error: Error | any) {
// Can't access file, probably offline
if (existingAssetEntity) {
// Mark asset as offline
this.logger.debug(`Marking asset as offline: ${assetPath}`);
await this.assetRepository.update({ id: existingAssetEntity.id, isOffline: true });
return JobStatus.SUCCESS;
} else {
// File can't be accessed and does not already exist in db
throw new BadRequestException('Cannot access file', { cause: error });
}
}
let doImport = false;
let doRefresh = false;
if (job.force) {
doRefresh = true;
}
const originalFileName = parse(assetPath).base;
if (!existingAssetEntity) {
// This asset is new to us, read it from disk
this.logger.debug(`Importing new asset: ${assetPath}`);
doImport = true;
} else if (stats.mtime.toISOString() !== existingAssetEntity.fileModifiedAt.toISOString()) {
// File modification time has changed since last time we checked, re-read from disk
this.logger.debug(
`File modification time has changed, re-importing asset: ${assetPath}. Old mtime: ${existingAssetEntity.fileModifiedAt}. New mtime: ${stats.mtime}`,
);
doRefresh = true;
} else if (existingAssetEntity.originalFileName !== originalFileName) {
// TODO: We can likely remove this check in the second half of 2024 when all assets have likely been re-imported by all users
this.logger.debug(
`Asset is missing file extension, re-importing: ${assetPath}. Current incorrect filename: ${existingAssetEntity.originalFileName}.`,
);
doRefresh = true;
} else if (!job.force && stats && !existingAssetEntity.isOffline) {
// Asset exists on disk and in db and mtime has not changed. Also, we are not forcing refresn. Therefore, do nothing
this.logger.debug(`Asset already exists in database and on disk, will not import: ${assetPath}`);
}
if (stats && existingAssetEntity?.isOffline) {
// File was previously offline but is now online
this.logger.debug(`Marking previously-offline asset as online: ${assetPath}`);
await this.assetRepository.update({ id: existingAssetEntity.id, isOffline: false });
doRefresh = true;
}
if (!doImport && !doRefresh) {
// If we don't import, exit here
let asset = await this.assetRepository.getByLibraryIdAndOriginalPath(job.id, assetPath);
if (asset) {
return JobStatus.SKIPPED;
}
let assetType: AssetType;
if (mimeTypes.isImage(assetPath)) {
assetType = AssetType.IMAGE;
} else if (mimeTypes.isVideo(assetPath)) {
assetType = AssetType.VIDEO;
} else {
throw new BadRequestException(`Unsupported file type ${assetPath}`);
let stat;
try {
stat = await this.storageRepository.stat(assetPath);
} catch (error: any) {
if (error.code === 'ENOENT') {
this.logger.error(`File not found: ${assetPath}`);
return JobStatus.SKIPPED;
}
this.logger.error(`Error reading file: ${assetPath}. Error: ${error}`);
return JobStatus.FAILED;
}
this.logger.log(`Importing new library asset: ${assetPath}`);
const library = await this.repository.get(job.id, true);
if (!library || library.deletedAt) {
this.logger.error('Cannot import asset into deleted library');
return JobStatus.FAILED;
}
// TODO: device asset id is deprecated, remove it
const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, '');
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
// TODO: doesn't xmp replace the file extension? Will need investigation
let sidecarPath: string | null = null;
if (await this.storageRepository.checkFileExists(`${assetPath}.xmp`, R_OK)) {
sidecarPath = `${assetPath}.xmp`;
}
// TODO: device asset id is deprecated, remove it
const deviceAssetId = `${basename(assetPath)}`.replaceAll(/\s+/g, '');
const assetType = mimeTypes.isVideo(assetPath) ? AssetType.VIDEO : AssetType.IMAGE;
let assetId;
if (doImport) {
const library = await this.repository.get(job.id, true);
if (library?.deletedAt) {
this.logger.error('Cannot import asset into deleted library');
return JobStatus.FAILED;
}
const mtime = stat.mtime;
const pathHash = this.cryptoRepository.hashSha1(`path:${assetPath}`);
asset = await this.assetRepository.create({
ownerId: job.ownerId,
libraryId: job.id,
checksum: pathHash,
originalPath: assetPath,
deviceAssetId,
deviceId: 'Library Import',
fileCreatedAt: mtime,
fileModifiedAt: mtime,
localDateTime: mtime,
type: assetType,
originalFileName: parse(assetPath).base,
// TODO: In wait of refactoring the domain asset service, this function is just manually written like this
const addedAsset = await this.assetRepository.create({
ownerId: job.ownerId,
libraryId: job.id,
checksum: pathHash,
originalPath: assetPath,
deviceAssetId,
deviceId: 'Library Import',
fileCreatedAt: stats.mtime,
fileModifiedAt: stats.mtime,
localDateTime: stats.mtime,
type: assetType,
originalFileName,
sidecarPath,
isExternal: true,
});
assetId = addedAsset.id;
} else if (doRefresh && existingAssetEntity) {
assetId = existingAssetEntity.id;
await this.assetRepository.updateAll([existingAssetEntity.id], {
fileCreatedAt: stats.mtime,
fileModifiedAt: stats.mtime,
originalFileName,
});
} else {
// Not importing and not refreshing, do nothing
return JobStatus.SKIPPED;
}
sidecarPath,
isExternal: true,
});
this.logger.debug(`Queueing metadata extraction for: ${assetPath}`);
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: assetId, source: 'upload' } });
if (assetType === AssetType.VIDEO) {
await this.jobRepository.queue({ name: JobName.VIDEO_CONVERSION, data: { id: assetId } });
}
await this.queuePostSyncJobs(asset);
return JobStatus.SUCCESS;
}
async queueScan(id: string, dto: ScanLibraryDto) {
async queuePostSyncJobs(asset: AssetEntity) {
this.logger.debug(`Queueing metadata extraction for: ${asset.originalPath}`);
await this.jobRepository.queue({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id, source: 'upload' } });
if (asset.type === AssetType.VIDEO) {
await this.jobRepository.queue({ name: JobName.VIDEO_CONVERSION, data: { id: asset.id } });
}
}
async queueScan(id: string) {
await this.findOrFail(id);
await this.jobRepository.queue({
name: JobName.LIBRARY_SCAN,
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
data: {
id,
refreshModifiedFiles: dto.refreshModifiedFiles ?? false,
refreshAllFiles: dto.refreshAllFiles ?? false,
},
});
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_SYNC_ASSETS, data: { id } });
}
async queueRemoveOffline(id: string) {
this.logger.verbose(`Queueing offline file removal from library ${id}`);
await this.jobRepository.queue({ name: JobName.LIBRARY_REMOVE_OFFLINE, data: { id } });
}
async handleQueueAllScan(job: IBaseJob): Promise<JobStatus> {
this.logger.debug(`Refreshing all external libraries: force=${job.force}`);
async handleQueueSyncAll(): Promise<JobStatus> {
this.logger.debug(`Refreshing all external libraries`);
await this.jobRepository.queue({ name: JobName.LIBRARY_QUEUE_CLEANUP, data: {} });
const libraries = await this.repository.getAll(true);
await this.jobRepository.queueAll(
libraries.map((library) => ({
name: JobName.LIBRARY_SCAN,
name: JobName.LIBRARY_QUEUE_SYNC_FILES,
data: {
id: library.id,
},
})),
);
await this.jobRepository.queueAll(
libraries.map((library) => ({
name: JobName.LIBRARY_QUEUE_SYNC_ASSETS,
data: {
id: library.id,
refreshModifiedFiles: !job.force,
refreshAllFiles: job.force ?? false,
},
})),
);
return JobStatus.SUCCESS;
}
async handleOfflineCheck(job: ILibraryOfflineJob): Promise<JobStatus> {
async handleSyncAsset(job: ILibraryAssetJob): Promise<JobStatus> {
const asset = await this.assetRepository.getById(job.id);
if (!asset) {
// Asset is no longer in the database, skip
return JobStatus.SKIPPED;
}
if (asset.isOffline) {
this.logger.verbose(`Asset is already offline: ${asset.originalPath}`);
return JobStatus.SUCCESS;
}
const markOffline = async (explanation: string) => {
if (!asset.isOffline) {
this.logger.debug(`${explanation}, removing: ${asset.originalPath}`);
await this.assetRepository.updateAll([asset.id], { isOffline: true, deletedAt: new Date() });
}
};
const isInPath = job.importPaths.find((path) => asset.originalPath.startsWith(path));
if (!isInPath) {
this.logger.debug(`Asset is no longer in an import path, marking offline: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, isOffline: true });
await markOffline('Asset is no longer in an import path');
return JobStatus.SUCCESS;
}
const isExcluded = job.exclusionPatterns.some((pattern) => picomatch.isMatch(asset.originalPath, pattern));
if (isExcluded) {
this.logger.debug(`Asset is covered by an exclusion pattern, marking offline: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, isOffline: true });
await markOffline('Asset is covered by an exclusion pattern');
return JobStatus.SUCCESS;
}
const fileExists = await this.storageRepository.checkFileExists(asset.originalPath, R_OK);
if (!fileExists) {
this.logger.debug(`Asset is no longer found on disk, marking offline: ${asset.originalPath}`);
await this.assetRepository.update({ id: asset.id, isOffline: true });
let stat;
try {
stat = await this.storageRepository.stat(asset.originalPath);
} catch {
await markOffline('Asset is no longer on disk or is inaccessible because of permissions');
return JobStatus.SUCCESS;
}
this.logger.verbose(
`Asset is found on disk, not covered by an exclusion pattern, and is in an import path, keeping online: ${asset.originalPath}`,
);
const mtime = stat.mtime;
const isAssetModified = mtime.toISOString() !== asset.fileModifiedAt.toISOString();
if (asset.isOffline || isAssetModified) {
this.logger.debug(`Asset was offline or modified, updating asset record ${asset.originalPath}`);
//TODO: When we have asset status, we need to leave deletedAt as is when status is trashed
await this.assetRepository.updateAll([asset.id], {
isOffline: false,
deletedAt: null,
fileCreatedAt: mtime,
fileModifiedAt: mtime,
originalFileName: parse(asset.originalPath).base,
});
}
if (isAssetModified) {
this.logger.debug(`Asset was modified, queuing metadata extraction for: ${asset.originalPath}`);
await this.queuePostSyncJobs(asset);
}
return JobStatus.SUCCESS;
}
async handleRemoveOffline(job: IEntityJob): Promise<JobStatus> {
this.logger.debug(`Removing offline assets for library ${job.id}`);
const assetPagination = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getWith(pagination, WithProperty.IS_OFFLINE, job.id, true),
);
let offlineAssets = 0;
for await (const assets of assetPagination) {
offlineAssets += assets.length;
if (assets.length > 0) {
this.logger.debug(`Discovered ${offlineAssets} offline assets in library ${job.id}`);
await this.jobRepository.queueAll(
assets.map((asset) => ({
name: JobName.ASSET_DELETION,
data: {
id: asset.id,
deleteOnDisk: false,
},
})),
);
this.logger.verbose(`Queued deletion of ${assets.length} offline assets in library ${job.id}`);
}
}
if (offlineAssets) {
this.logger.debug(`Finished queueing deletion of ${offlineAssets} offline assets for library ${job.id}`);
} else {
this.logger.debug(`Found no offline assets to delete from library ${job.id}`);
}
return JobStatus.SUCCESS;
}
async handleQueueAssetRefresh(job: ILibraryRefreshJob): Promise<JobStatus> {
async handleQueueSyncFiles(job: IEntityJob): Promise<JobStatus> {
const library = await this.repository.get(job.id);
if (!library) {
this.logger.debug(`Library ${job.id} not found, skipping refresh`);
return JobStatus.SKIPPED;
}
this.logger.log(`Refreshing library ${library.id}`);
this.logger.log(`Refreshing library ${library.id} for new assets`);
const validImportPaths: string[] = [];
@@ -630,55 +560,66 @@ export class LibraryService {
}
}
if (validImportPaths.length === 0) {
if (validImportPaths) {
const assetsOnDisk = this.storageRepository.walk({
pathsToCrawl: validImportPaths,
includeHidden: false,
exclusionPatterns: library.exclusionPatterns,
take: JOBS_LIBRARY_PAGINATION_SIZE,
});
let count = 0;
for await (const assetBatch of assetsOnDisk) {
count += assetBatch.length;
this.logger.debug(`Discovered ${count} asset(s) on disk for library ${library.id}...`);
await this.syncFiles(library, assetBatch);
this.logger.verbose(`Queued scan of ${assetBatch.length} crawled asset(s) in library ${library.id}...`);
}
if (count > 0) {
this.logger.debug(`Finished queueing scan of ${count} assets on disk for library ${library.id}`);
} else {
this.logger.debug(`No non-excluded assets found in any import path for library ${library.id}`);
}
} else {
this.logger.warn(`No valid import paths found for library ${library.id}`);
}
const assetsOnDisk = this.storageRepository.walk({
pathsToCrawl: validImportPaths,
includeHidden: false,
exclusionPatterns: library.exclusionPatterns,
take: JOBS_LIBRARY_PAGINATION_SIZE,
});
await this.repository.update({ id: job.id, refreshedAt: new Date() });
let crawledAssets = 0;
return JobStatus.SUCCESS;
}
for await (const assetBatch of assetsOnDisk) {
crawledAssets += assetBatch.length;
this.logger.debug(`Discovered ${crawledAssets} asset(s) on disk for library ${library.id}...`);
await this.scanAssets(job.id, assetBatch, library.ownerId, job.refreshAllFiles ?? false);
this.logger.verbose(`Queued scan of ${assetBatch.length} crawled asset(s) in library ${library.id}...`);
async handleQueueSyncAssets(job: IEntityJob): Promise<JobStatus> {
const library = await this.repository.get(job.id);
if (!library) {
return JobStatus.SKIPPED;
}
if (crawledAssets) {
this.logger.debug(`Finished queueing scan of ${crawledAssets} assets on disk for library ${library.id}`);
} else {
this.logger.debug(`No non-excluded assets found in any import path for library ${library.id}`);
}
this.logger.log(`Scanning library ${library.id} for removed assets`);
const onlineAssets = usePagination(JOBS_LIBRARY_PAGINATION_SIZE, (pagination) =>
this.assetRepository.getWith(pagination, WithProperty.IS_ONLINE, job.id),
this.assetRepository.getAll(pagination, { libraryId: job.id }),
);
let onlineAssetCount = 0;
let assetCount = 0;
for await (const assets of onlineAssets) {
onlineAssetCount += assets.length;
this.logger.debug(`Discovered ${onlineAssetCount} asset(s) in library ${library.id}...`);
assetCount += assets.length;
this.logger.debug(`Discovered ${assetCount} asset(s) in library ${library.id}...`);
await this.jobRepository.queueAll(
assets.map((asset) => ({
name: JobName.LIBRARY_CHECK_OFFLINE,
data: { id: asset.id, importPaths: validImportPaths, exclusionPatterns: library.exclusionPatterns },
name: JobName.LIBRARY_SYNC_ASSET,
data: { id: asset.id, importPaths: library.importPaths, exclusionPatterns: library.exclusionPatterns },
})),
);
this.logger.debug(`Queued online check of ${assets.length} asset(s) in library ${library.id}...`);
this.logger.debug(`Queued check of ${assets.length} asset(s) in library ${library.id}...`);
}
if (onlineAssetCount) {
this.logger.log(`Finished queueing online check of ${onlineAssetCount} assets for library ${library.id}`);
if (assetCount) {
this.logger.log(`Finished queueing check of ${assetCount} assets for library ${library.id}`);
}
await this.repository.update({ id: job.id, refreshedAt: new Date() });
return JobStatus.SUCCESS;
}

View File

@@ -1,5 +1,8 @@
import { Stats } from 'node:fs';
import { ExifEntity } from 'src/entities/exif.entity';
import {
AssetFileType,
AssetType,
AudioCodec,
Colorspace,
ImageFormat,
@@ -7,9 +10,7 @@ import {
TranscodeHWAccel,
TranscodePolicy,
VideoCodec,
} from 'src/config';
import { ExifEntity } from 'src/entities/exif.entity';
import { AssetFileType, AssetType } from 'src/enum';
} from 'src/enum';
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import { IJobRepository, JobName, JobStatus } from 'src/interfaces/job.interface';

View File

@@ -1,21 +1,23 @@
import { Inject, Injectable, UnsupportedMediaTypeException } from '@nestjs/common';
import { dirname } from 'node:path';
import { GeneratedImageType, StorageCore } from 'src/cores/storage.core';
import { SystemConfigCore } from 'src/cores/system-config.core';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import {
AssetFileType,
AssetPathType,
AssetType,
AudioCodec,
Colorspace,
ImageFormat,
StorageFolder,
TranscodeHWAccel,
TranscodePolicy,
TranscodeTarget,
VideoCodec,
VideoContainer,
} from 'src/config';
import { GeneratedImageType, StorageCore, StorageFolder } from 'src/cores/storage.core';
import { SystemConfigCore } from 'src/cores/system-config.core';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import { AssetEntity } from 'src/entities/asset.entity';
import { AssetPathType } from 'src/entities/move.entity';
import { AssetFileType, AssetType } from 'src/enum';
} from 'src/enum';
import { IAssetRepository, WithoutProperty } from 'src/interfaces/asset.interface';
import { ICryptoRepository } from 'src/interfaces/crypto.interface';
import {

View File

@@ -316,7 +316,7 @@ describe(MetadataService.name, () => {
it('should handle lists of numbers', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ ISO: [160] as any });
metadataMock.readTags.mockResolvedValue({ ISO: [160] });
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
@@ -411,7 +411,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a list with a number', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ Keywords: ['Parent', 2024] as any[] });
metadataMock.readTags.mockResolvedValue({ Keywords: ['Parent', 2024] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
@@ -467,6 +467,17 @@ describe(MetadataService.name, () => {
expect(tagMock.upsertValue).toHaveBeenNthCalledWith(3, { userId: 'user-id', value: 'TagA', parent: undefined });
});
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ HierarchicalSubject: ['Parent', 2024] });
tagMock.upsertValue.mockResolvedValue(tagStub.parent);
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(tagMock.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: 'Parent', parent: undefined });
expect(tagMock.upsertValue).toHaveBeenCalledWith({ userId: 'user-id', value: '2024', parent: undefined });
});
it('should extract ignore / characters in a HierarchicalSubject tag', async () => {
assetMock.getByIds.mockResolvedValue([assetStub.image]);
metadataMock.readTags.mockResolvedValue({ HierarchicalSubject: ['Mom/Dad'] });

View File

@@ -11,6 +11,7 @@ import { SystemConfigCore } from 'src/cores/system-config.core';
import { OnEmit } from 'src/decorators';
import { AssetFaceEntity } from 'src/entities/asset-face.entity';
import { AssetEntity } from 'src/entities/asset.entity';
import { ExifEntity } from 'src/entities/exif.entity';
import { PersonEntity } from 'src/entities/person.entity';
import { AssetType, SourceType } from 'src/enum';
import { IAlbumRepository } from 'src/interfaces/album.interface';
@@ -236,7 +237,7 @@ export class MetadataService {
const { dateTimeOriginal, localDateTime, timeZone, modifyDate } = this.getDates(asset, exifTags);
const { latitude, longitude, country, state, city } = await this.getGeo(exifTags, reverseGeocoding);
const exifData = {
const exifData: Partial<ExifEntity> = {
assetId: asset.id,
// dates
@@ -264,7 +265,7 @@ export class MetadataService {
make: exifTags.Make ?? null,
model: exifTags.Model ?? null,
fps: validate(Number.parseFloat(exifTags.VideoFrameRate!)),
iso: validate(exifTags.ISO),
iso: validate(exifTags.ISO) as number,
exposureTime: exifTags.ExposureTime ?? null,
lensModel: exifTags.LensModel ?? null,
fNumber: validate(exifTags.FNumber),
@@ -395,13 +396,13 @@ export class MetadataService {
}
private async applyTagList(asset: AssetEntity, exifTags: ImmichTags) {
const tags: Array<string | number> = [];
const tags: string[] = [];
if (exifTags.TagsList) {
tags.push(...exifTags.TagsList);
tags.push(...exifTags.TagsList.map(String));
} else if (exifTags.HierarchicalSubject) {
tags.push(
...exifTags.HierarchicalSubject.map((tag) =>
tag
String(tag)
// convert | to /
.replaceAll('/', '<PLACEHOLDER>')
.replaceAll('|', '/')
@@ -413,10 +414,10 @@ export class MetadataService {
if (!Array.isArray(keywords)) {
keywords = [keywords];
}
tags.push(...keywords);
tags.push(...keywords.map(String));
}
const results = await upsertTags(this.tagRepository, { userId: asset.ownerId, tags: tags.map(String) });
const results = await upsertTags(this.tagRepository, { userId: asset.ownerId, tags });
await this.tagRepository.upsertAssetTags({ assetId: asset.id, tagIds: results.map((tag) => tag.id) });
}

Some files were not shown because too many files have changed in this diff Show More