Compare commits

...

27 Commits

Author SHA1 Message Date
shenlong-tanwen
c8e6080ddc replace trigger with manual pruning 2025-11-08 01:23:13 +05:30
shenlong-tanwen
70300ad3d4 ignore failed uploads from being retried on timer 2025-11-06 21:18:35 +05:30
shenlong-tanwen
91fe3c8d96 fetch failed uploads from local asset upload entity 2025-11-06 20:54:09 +05:30
Alex
3b6e23fed7 Merge branch 'main' into fix/periodic-batch-enqueue 2025-11-05 13:29:28 -06:00
shenlong
365abd8906 fix: check if unmetered instead of wifi (#23380)
Co-authored-by: shenlong-tanwen <139912620+shalong-tanwen@users.noreply.github.com>
2025-11-05 13:27:38 -06:00
Alex
25fb43bbe3 fix: fully sync local library on app restart (#23323) 2025-11-05 14:09:50 +00:00
bo0tzz
125e8cee01 chore: update config.json example (#23471)
* chore: update config.json example

closes #23465

* fix: format, for real this time
2025-11-05 08:05:53 -06:00
Arnaud Wery
c15e9bfa72 fix(web): "select all" button in trash and permanently deleted count (#23594) 2025-11-05 14:05:48 +00:00
Dmitry
35e188e6e7 docs: sync ru docs with main README.md (#23627) 2025-11-05 08:05:03 -06:00
Sergey Katsubo
3cc9dd126c fix(web): fix timezone dropdown for timestamps lacking milliseconds (#23615)
Fix timezone selector for timestamps without milliseconds
2025-11-05 08:03:55 -06:00
shenlong-tanwen
d37cae5dcd track asset upload status and sort upload queue 2025-11-05 18:00:37 +05:30
shenlong-tanwen
94044c98bf fix: enqueue assets in batches for uploads 2025-11-05 18:00:37 +05:30
Jason Rasmussen
aa69d89b9f fix: bad merge (#23610) 2025-11-04 16:22:45 -05:00
Jason Rasmussen
29c14a3f58 refactor: database column names (#23356) 2025-11-04 16:03:21 -05:00
Jason Rasmussen
0df70365d7 feat: exif medium tests (#23561) 2025-11-04 16:03:02 -05:00
Mees Frensel
c34be73d81 fix(web): consistently use mdiMotionPauseOutline icon (#23595) 2025-11-04 12:12:47 +01:00
renovate[bot]
f396e9e374 chore(deps): update prom/prometheus docker digest to 4921475 (#23578)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-04 11:49:12 +01:00
renovate[bot]
821a9d4691 chore(deps): update redis:6.2-alpine docker digest to 37e0024 (#23579)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-04 11:48:21 +01:00
renovate[bot]
cad654586f chore(deps): update dependency @types/node to ^22.18.13 (#23581)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-04 11:47:54 +01:00
github-actions
28eb1bc13c chore: version v2.2.3 2025-11-04 03:14:34 +00:00
Brandon Wees
1e4779cf48 fix(mobile): ignore patch releases for app version alerts (#23565)
* fix(mobile): ignore patch releases for app version alerts

* chore: make difference type nullable to indicate when versions match

* chore: add error handling for semver parsing

* chore: tests
2025-11-03 21:09:32 -06:00
Sergey Katsubo
0647c22956 fix(mobile): handle empty original filename (#23469)
* Handle empty original filename

* Handle TypeError from photo_manager titleAsync

* More compact exception log
2025-11-03 21:09:18 -06:00
Alex
b8087b4fa2 chore: ios prod build with correct argument, get version number from pubspec (#23554)
* chore: ios prod build with correct argument, get version number from pubspec

* Update mobile/ios/fastlane/Fastfile

Co-authored-by: bo0tzz <git@bo0tzz.me>

---------

Co-authored-by: bo0tzz <git@bo0tzz.me>
2025-11-03 10:11:11 -06:00
Jonathan S
d94cb9641b chore: correct hosted isar paths in fdroid_build_isar.sh (#23529)
This should hopefully unblock F-Droid builds, which are a few versions behind.

Based on the suggestion in https://github.com/immich-app/immich/pull/22757#issuecomment-3404516987
2025-11-03 08:35:56 -06:00
Daniel Dietzler
517c3e1d4c fix: exif gps parsing of malformed data (#23551)
* fix: exif gps parsing of malformed data

* chore: e2e test
2025-11-03 09:02:41 -05:00
Ben
619de2a5e4 fix(web): search bar accessibility (#23550)
* fix: always show search type when search bar is focused

* fix: indicate search type to screen reader users
2025-11-03 08:31:57 -05:00
Mert
79d0e3e1ed fix(ml): ocr inputs not resized correctly (#23541)
* fix resizing, use pillow

* unused import

* linting

* lanczos

* optimizations

fused operations

unused import
2025-11-03 07:21:30 +00:00
106 changed files with 10346 additions and 1394 deletions

View File

@@ -382,6 +382,7 @@ jobs:
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
submodules: 'recursive'
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.100",
"version": "2.2.101",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -20,7 +20,7 @@
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0",
"cli-progress": "^3.12.0",

View File

@@ -83,7 +83,7 @@ services:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:23031bfe0e74a13004252caaa74eccd0d62b6c6e7a04711d5b8bf5b7e113adc7
image: prom/prometheus@sha256:49214755b6153f90a597adcbff0252cc61069f8ab69ce8411285cd4a560e8038
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus

View File

@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
```sql title="Count by tag"
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
```
```sql title="Count by tag (per user)"
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
```

View File

@@ -16,48 +16,76 @@ The default configuration looks like this:
```json
{
"ffmpeg": {
"crf": 23,
"threads": 0,
"preset": "ultrafast",
"targetVideoCodec": "h264",
"acceptedVideoCodecs": ["h264"],
"targetAudioCodec": "aac",
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
"acceptedContainers": ["mov", "ogg", "webm"],
"targetResolution": "720",
"maxBitrate": "0",
"bframes": -1,
"refs": 0,
"gopSize": 0,
"temporalAQ": false,
"cqMode": "auto",
"twoPass": false,
"preferredHwDevice": "auto",
"transcode": "required",
"tonemap": "hable",
"accel": "disabled",
"accelDecode": false
},
"backup": {
"database": {
"enabled": true,
"cronExpression": "0 02 * * *",
"enabled": true,
"keepLastAmount": 14
}
},
"ffmpeg": {
"accel": "disabled",
"accelDecode": false,
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
"acceptedContainers": ["mov", "ogg", "webm"],
"acceptedVideoCodecs": ["h264"],
"bframes": -1,
"cqMode": "auto",
"crf": 23,
"gopSize": 0,
"maxBitrate": "0",
"preferredHwDevice": "auto",
"preset": "ultrafast",
"refs": 0,
"targetAudioCodec": "aac",
"targetResolution": "720",
"targetVideoCodec": "h264",
"temporalAQ": false,
"threads": 0,
"tonemap": "hable",
"transcode": "required",
"twoPass": false
},
"image": {
"colorspace": "p3",
"extractEmbedded": false,
"fullsize": {
"enabled": false,
"format": "jpeg",
"quality": 80
},
"preview": {
"format": "jpeg",
"quality": 80,
"size": 1440
},
"thumbnail": {
"format": "webp",
"quality": 80,
"size": 250
}
},
"job": {
"backgroundTask": {
"concurrency": 5
},
"smartSearch": {
"faceDetection": {
"concurrency": 2
},
"library": {
"concurrency": 5
},
"metadataExtraction": {
"concurrency": 5
},
"faceDetection": {
"concurrency": 2
"migration": {
"concurrency": 5
},
"notifications": {
"concurrency": 5
},
"ocr": {
"concurrency": 1
},
"search": {
"concurrency": 5
@@ -65,20 +93,23 @@ The default configuration looks like this:
"sidecar": {
"concurrency": 5
},
"library": {
"concurrency": 5
},
"migration": {
"concurrency": 5
"smartSearch": {
"concurrency": 2
},
"thumbnailGeneration": {
"concurrency": 3
},
"videoConversion": {
"concurrency": 1
}
},
"library": {
"scan": {
"cronExpression": "0 0 * * *",
"enabled": true
},
"notifications": {
"concurrency": 5
"watch": {
"enabled": false
}
},
"logging": {
@@ -86,8 +117,11 @@ The default configuration looks like this:
"level": "log"
},
"machineLearning": {
"enabled": true,
"urls": ["http://immich-machine-learning:3003"],
"availabilityChecks": {
"enabled": true,
"interval": 30000,
"timeout": 2000
},
"clip": {
"enabled": true,
"modelName": "ViT-B-32__openai"
@@ -96,27 +130,59 @@ The default configuration looks like this:
"enabled": true,
"maxDistance": 0.01
},
"enabled": true,
"facialRecognition": {
"enabled": true,
"modelName": "buffalo_l",
"minScore": 0.7,
"maxDistance": 0.5,
"minFaces": 3
}
"minFaces": 3,
"minScore": 0.7,
"modelName": "buffalo_l"
},
"ocr": {
"enabled": true,
"maxResolution": 736,
"minDetectionScore": 0.5,
"minRecognitionScore": 0.8,
"modelName": "PP-OCRv5_mobile"
},
"urls": ["http://immich-machine-learning:3003"]
},
"map": {
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
"enabled": true,
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json",
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
},
"reverseGeocoding": {
"enabled": true
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
},
"metadata": {
"faces": {
"import": false
}
},
"newVersionCheck": {
"enabled": true
},
"nightlyTasks": {
"clusterNewFaces": true,
"databaseCleanup": true,
"generateMemories": true,
"missingThumbnails": true,
"startTime": "00:00",
"syncQuotaUsage": true
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"host": "",
"ignoreCert": false,
"password": "",
"port": 587,
"secure": false,
"username": ""
}
}
},
"oauth": {
"autoLaunch": false,
"autoRegister": true,
@@ -128,70 +194,44 @@ The default configuration looks like this:
"issuerUrl": "",
"mobileOverrideEnabled": false,
"mobileRedirectUri": "",
"profileSigningAlgorithm": "none",
"roleClaim": "immich_role",
"scope": "openid email profile",
"signingAlgorithm": "RS256",
"profileSigningAlgorithm": "none",
"storageLabelClaim": "preferred_username",
"storageQuotaClaim": "immich_quota"
"storageQuotaClaim": "immich_quota",
"timeout": 30000,
"tokenEndpointAuthMethod": "client_secret_post"
},
"passwordLogin": {
"enabled": true
},
"reverseGeocoding": {
"enabled": true
},
"server": {
"externalDomain": "",
"loginPageMessage": "",
"publicUsers": true
},
"storageTemplate": {
"enabled": false,
"hashVerificationEnabled": true,
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
},
"image": {
"thumbnail": {
"format": "webp",
"size": 250,
"quality": 80
},
"preview": {
"format": "jpeg",
"size": 1440,
"quality": 80
},
"colorspace": "p3",
"extractEmbedded": false
},
"newVersionCheck": {
"enabled": true
},
"trash": {
"enabled": true,
"days": 30
"templates": {
"email": {
"albumInviteTemplate": "",
"albumUpdateTemplate": "",
"welcomeTemplate": ""
}
},
"theme": {
"customCss": ""
},
"library": {
"scan": {
"enabled": true,
"cronExpression": "0 0 * * *"
},
"watch": {
"enabled": false
}
},
"server": {
"externalDomain": "",
"loginPageMessage": ""
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"ignoreCert": false,
"host": "",
"port": 587,
"username": "",
"password": ""
}
}
"trash": {
"days": 30,
"enabled": true
},
"user": {
"deleteDelay": 7

View File

@@ -1,4 +1,8 @@
[
{
"label": "v2.2.3",
"url": "https://docs.v2.2.3.archive.immich.app"
},
{
"label": "v2.2.2",
"url": "https://docs.v2.2.2.archive.immich.app"

View File

@@ -35,7 +35,7 @@ services:
- 2285:2285
redis:
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
database:
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338

View File

@@ -1,6 +1,6 @@
{
"name": "immich-e2e",
"version": "2.2.2",
"version": "2.2.3",
"description": "",
"main": "index.js",
"type": "module",
@@ -25,7 +25,7 @@
"@playwright/test": "^1.44.1",
"@socket.io/component-emitter": "^3.1.2",
"@types/luxon": "^3.4.2",
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"@types/oidc-provider": "^9.0.0",
"@types/pg": "^8.15.1",
"@types/pngjs": "^6.0.4",

View File

@@ -15,7 +15,6 @@ import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto';
import { readFile, writeFile } from 'node:fs/promises';
import { basename, join } from 'node:path';
import sharp from 'sharp';
import { Socket } from 'socket.io-client';
import { createUserDto, uuidDto } from 'src/fixtures';
import { makeRandomImage } from 'src/generators';
@@ -41,40 +40,6 @@ const today = DateTime.fromObject({
}) as DateTime<true>;
const yesterday = today.minus({ days: 1 });
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
// Generate unique color to ensure different checksums for each image
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
// Create a 100x100 solid color JPEG using Sharp
const imageBytes = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
// Add random suffix to filename to avoid collisions
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
const filepath = join(tempDir, uniqueFilename);
await writeFile(filepath, imageBytes);
// Filter out undefined values before writing EXIF
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
await exiftool.write(filepath, cleanExifData);
// Re-read the image bytes after EXIF has been written
const finalImageBytes = await readFile(filepath);
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
};
describe('/asset', () => {
let admin: LoginResponseDto;
let websocket: Socket;
@@ -1249,411 +1214,6 @@ describe('/asset', () => {
});
});
describe('EXIF metadata extraction', () => {
describe('Additional date tag extraction', () => {
describe('Date-time vs time-only tag handling', () => {
it('should fall back to file timestamps when only time-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
// Exclude all date-time tags to force fallback to file timestamps
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
SonyDateTime2: undefined,
GPSDateStamp: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should prefer DateTimeOriginal over time-only tags', async () => {
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal, not TimeCreated
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
describe('GPSDateTime tag extraction', () => {
it('should extract GPSDateTime with GPS coordinates', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: 37.7749,
GPSLongitude: -122.4194,
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T12:30:00.000Z').getTime(),
);
});
});
describe('CreateDate tag extraction', () => {
it('should extract CreateDate when available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
CreateDate: '2023:11:15 10:30:00',
// Exclude other higher priority date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-11-15T10:30:00.000Z').getTime(),
);
});
});
describe('GPSDateStamp tag extraction', () => {
it('should fall back to file timestamps when only date-only tags are available', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
GPSLatitude: 51.5074,
GPSLongitude: -0.1278,
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
DateTimeOriginal: undefined,
CreateDate: undefined,
CreationDate: undefined,
GPSDateTime: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
});
/*
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
*
* NOT WRITABLE to JPEG:
* - MediaCreateDate: Can be read from video files but not written to JPEG
* - DateTimeCreated: Read-only tag in JPEG format
* - DateTimeUTC: Cannot be written to JPEG files
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
* - SubSecMediaCreateDate: Tag not defined for JPEG format
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
*
* WRITABLE but NOT READABLE from JPEG:
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
* - SubSecCreateDate: Can be written but not read back from JPEG
*
* EFFECTIVELY TESTABLE TAGS (writable and readable):
* - DateTimeOriginal ✓
* - CreateDate ✓
* - CreationDate ✓
* - GPSDateTime ✓
*
* The metadata service correctly handles non-readable tags and will fall back to
* file timestamps when only non-readable tags are present.
*/
describe('Date tag priority order', () => {
it('should respect the complete date tag priority order', async () => {
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
const testCases = [
{
name: 'DateTimeOriginal has highest priority among testable tags',
exifData: {
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-04-04T04:00:00.000Z',
},
{
name: 'CreationDate when DateTimeOriginal missing',
exifData: {
CreationDate: '2023:05:05 05:00:00', // TESTABLE
CreateDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-05-05T05:00:00.000Z',
},
{
name: 'CreationDate when standard EXIF tags missing',
exifData: {
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
},
expectedDate: '2023-07-07T07:00:00.000Z',
},
{
name: 'GPSDateTime when no other testable date tags present',
exifData: {
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
Make: 'SONY',
},
expectedDate: '2023-10-10T10:00:00.000Z',
},
];
for (const testCase of testCases) {
const { imageBytes, filename } = await createTestImageWithExif(
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
testCase.exifData,
);
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
expect(
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
`Date mismatch for: ${testCase.name}`,
).toBe(new Date(testCase.expectedDate).getTime());
}
});
});
describe('Edge cases for date tag handling', () => {
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
// Intentionally no GPSTimeStamp
// Exclude all other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
SubSecMediaCreateDate: undefined,
CreateDate: undefined,
MediaCreateDate: undefined,
CreationDate: undefined,
DateTimeCreated: undefined,
TimeCreated: undefined,
GPSDateTime: undefined,
DateTimeUTC: undefined,
});
const oldDate = new Date('2020-01-01T00:00:00.000Z');
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
fileCreatedAt: oldDate.toISOString(),
fileModifiedAt: oldDate.toISOString(),
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should fall back to file timestamps, which we set to 2020-01-01
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2020-01-01T00:00:00.000Z').getTime(),
);
});
it('should handle all testable date tags present to verify complete priority order', async () => {
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
// All TESTABLE date tags to JPEG format (writable AND readable)
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
CreateDate: '2023:05:05 05:00:00', // TESTABLE
CreationDate: '2023:07:07 07:00:00', // TESTABLE
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
// Note: Excluded non-testable tags:
// SubSec tags: writable but not readable from JPEG
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use DateTimeOriginal as it has the highest priority among testable tags
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-04-04T04:00:00.000Z').getTime(),
);
});
it('should use CreationDate when SubSec tags are missing', async () => {
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
CreationDate: '2023:07:07 07:00:00', // WRITABLE
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
// Exclude SubSec and standard EXIF tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should use CreationDate when available
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-07-07T07:00:00.000Z').getTime(),
);
});
it('should skip invalid date formats and use next valid tag', async () => {
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
// Note: Testing invalid date handling with only WRITABLE tags
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
// Exclude other date tags
SubSecDateTimeOriginal: undefined,
DateTimeOriginal: undefined,
SubSecCreateDate: undefined,
CreateDate: undefined,
});
const asset = await utils.createAsset(admin.accessToken, {
assetData: {
filename,
bytes: imageBytes,
},
});
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
// Should skip invalid dates and use the first valid one (GPSDateTime)
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
new Date('2023-10-10T10:00:00.000Z').getTime(),
);
});
});
});
});
describe('POST /assets/exist', () => {
it('ignores invalid deviceAssetIds', async () => {
const response = await utils.checkExistingAssets(user1.accessToken, {

View File

@@ -1,178 +0,0 @@
#!/usr/bin/env node
/**
* Script to generate test images with additional EXIF date tags
* This creates actual JPEG images with embedded metadata for testing
* Images are generated into e2e/test-assets/metadata/dates/
*/
import { execSync } from 'node:child_process';
import { writeFileSync } from 'node:fs';
import { dirname, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import sharp from 'sharp';
interface TestImage {
filename: string;
description: string;
exifTags: Record<string, string>;
}
const testImages: TestImage[] = [
{
filename: 'time-created.jpg',
description: 'Image with TimeCreated tag',
exifTags: {
TimeCreated: '2023:11:15 14:30:00',
Make: 'Canon',
Model: 'EOS R5',
},
},
{
filename: 'gps-datetime.jpg',
description: 'Image with GPSDateTime and coordinates',
exifTags: {
GPSDateTime: '2023:11:15 12:30:00Z',
GPSLatitude: '37.7749',
GPSLongitude: '-122.4194',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'datetime-utc.jpg',
description: 'Image with DateTimeUTC tag',
exifTags: {
DateTimeUTC: '2023:11:15 10:30:00',
Make: 'Nikon',
Model: 'D850',
},
},
{
filename: 'gps-datestamp.jpg',
description: 'Image with GPSDateStamp and GPSTimeStamp',
exifTags: {
GPSDateStamp: '2023:11:15',
GPSTimeStamp: '08:30:00',
GPSLatitude: '51.5074',
GPSLongitude: '-0.1278',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
{
filename: 'sony-datetime2.jpg',
description: 'Sony camera image with SonyDateTime2 tag',
exifTags: {
SonyDateTime2: '2023:11:15 06:30:00',
Make: 'SONY',
Model: 'ILCE-7RM5',
},
},
{
filename: 'date-priority-test.jpg',
description: 'Image with multiple date tags to test priority',
exifTags: {
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
DateTimeOriginal: '2023:02:02 02:00:00',
SubSecCreateDate: '2023:03:03 03:00:00',
CreateDate: '2023:04:04 04:00:00',
CreationDate: '2023:05:05 05:00:00',
DateTimeCreated: '2023:06:06 06:00:00',
TimeCreated: '2023:07:07 07:00:00',
GPSDateTime: '2023:08:08 08:00:00',
DateTimeUTC: '2023:09:09 09:00:00',
GPSDateStamp: '2023:10:10',
SonyDateTime2: '2023:11:11 11:00:00',
},
},
{
filename: 'new-tags-only.jpg',
description: 'Image with only additional date tags (no standard tags)',
exifTags: {
TimeCreated: '2023:12:01 15:45:30',
GPSDateTime: '2023:12:01 13:45:30Z',
DateTimeUTC: '2023:12:01 13:45:30',
GPSDateStamp: '2023:12:01',
SonyDateTime2: '2023:12:01 08:45:30',
GPSLatitude: '40.7128',
GPSLongitude: '-74.0060',
GPSLatitudeRef: 'N',
GPSLongitudeRef: 'W',
},
},
];
const generateTestImages = async (): Promise<void> => {
// Target directory: e2e/test-assets/metadata/dates/
// Current file is in: e2e/src/
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
console.log('Generating test images with additional EXIF date tags...');
console.log(`Target directory: ${targetDir}`);
for (const image of testImages) {
try {
const imagePath = join(targetDir, image.filename);
// Create unique JPEG file using Sharp
const r = Math.floor(Math.random() * 256);
const g = Math.floor(Math.random() * 256);
const b = Math.floor(Math.random() * 256);
const jpegData = await sharp({
create: {
width: 100,
height: 100,
channels: 3,
background: { r, g, b },
},
})
.jpeg({ quality: 90 })
.toBuffer();
writeFileSync(imagePath, jpegData);
// Build exiftool command to add EXIF data
const exifArgs = Object.entries(image.exifTags)
.map(([tag, value]) => `-${tag}="${value}"`)
.join(' ');
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
console.log(`Creating ${image.filename}: ${image.description}`);
execSync(command, { stdio: 'pipe' });
// Verify the tags were written
const verifyCommand = `exiftool -json "${imagePath}"`;
const result = execSync(verifyCommand, { encoding: 'utf8' });
const metadata = JSON.parse(result)[0];
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
// Log first date tag found for verification
const firstDateTag = Object.keys(image.exifTags).find(
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
);
if (firstDateTag && metadata[firstDateTag]) {
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
}
} catch (error) {
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
}
}
console.log('\nTest image generation complete!');
console.log('Files created in:', targetDir);
console.log('\nTo test these images:');
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
};
export { generateTestImages };
// Run the generator if this file is executed directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateTestImages().catch(console.error);
}

View File

@@ -1,8 +1,10 @@
from typing import Any
import cv2
import numpy as np
from numpy.typing import NDArray
from PIL import Image
from rapidocr.ch_ppocr_det import TextDetector as RapidTextDetector
from rapidocr.ch_ppocr_det.utils import DBPostProcess
from rapidocr.inference_engine.base import FileInfo, InferSession
from rapidocr.utils import DownloadFile, DownloadFileInput
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
@@ -10,11 +12,10 @@ from rapidocr.utils.typings import ModelType as RapidModelType
from immich_ml.config import log
from immich_ml.models.base import InferenceModel
from immich_ml.models.transforms import decode_cv2
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
from immich_ml.sessions.ort import OrtSession
from .schemas import OcrOptions, TextDetectionOutput
from .schemas import TextDetectionOutput
class TextDetector(InferenceModel):
@@ -24,13 +25,20 @@ class TextDetector(InferenceModel):
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
self.max_resolution = 736
self.min_score = 0.5
self.score_mode = "fast"
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)
self._empty: TextDetectionOutput = {
"image": np.empty(0, dtype=np.float32),
"boxes": np.empty(0, dtype=np.float32),
"scores": np.empty(0, dtype=np.float32),
}
self.postprocess = DBPostProcess(
thresh=0.3,
box_thresh=model_kwargs.get("minScore", 0.5),
max_candidates=1000,
unclip_ratio=1.6,
use_dilation=True,
score_mode="fast",
)
def _download(self) -> None:
model_info = InferSession.get_model_url(
@@ -52,35 +60,65 @@ class TextDetector(InferenceModel):
def _load(self) -> ModelSession:
# TODO: support other runtime sessions
session = OrtSession(self.model_path)
self.model = RapidTextDetector(
OcrOptions(
session=session.session,
limit_side_len=self.max_resolution,
limit_type="min",
box_thresh=self.min_score,
score_mode=self.score_mode,
)
)
return session
return OrtSession(self.model_path)
def _predict(self, inputs: bytes | Image.Image) -> TextDetectionOutput:
results = self.model(decode_cv2(inputs))
if results.boxes is None or results.scores is None or results.img is None:
# partly adapted from RapidOCR
def _predict(self, inputs: Image.Image) -> TextDetectionOutput:
w, h = inputs.size
if w < 32 or h < 32:
return self._empty
out = self.session.run(None, {"x": self._transform(inputs)})[0]
boxes, scores = self.postprocess(out, (h, w))
if len(boxes) == 0:
return self._empty
return {
"image": results.img,
"boxes": np.array(results.boxes, dtype=np.float32),
"scores": np.array(results.scores, dtype=np.float32),
"boxes": self.sorted_boxes(boxes),
"scores": np.array(scores, dtype=np.float32),
}
# adapted from RapidOCR
def _transform(self, img: Image.Image) -> NDArray[np.float32]:
if img.height < img.width:
ratio = float(self.max_resolution) / img.height
else:
ratio = float(self.max_resolution) / img.width
resize_h = int(img.height * ratio)
resize_w = int(img.width * ratio)
resize_h = int(round(resize_h / 32) * 32)
resize_w = int(round(resize_w / 32) * 32)
resized_img = img.resize((int(resize_w), int(resize_h)), resample=Image.Resampling.LANCZOS)
img_np: NDArray[np.float32] = cv2.cvtColor(np.array(resized_img, dtype=np.float32), cv2.COLOR_RGB2BGR) # type: ignore
img_np -= self.mean
img_np *= self.std_inv
img_np = np.transpose(img_np, (2, 0, 1))
return np.expand_dims(img_np, axis=0)
def sorted_boxes(self, dt_boxes: NDArray[np.float32]) -> NDArray[np.float32]:
if len(dt_boxes) == 0:
return dt_boxes
# Sort by y, then identify lines, then sort by (line, x)
y_order = np.argsort(dt_boxes[:, 0, 1], kind="stable")
sorted_y = dt_boxes[y_order, 0, 1]
line_ids = np.empty(len(dt_boxes), dtype=np.int32)
line_ids[0] = 0
np.cumsum(np.abs(np.diff(sorted_y)) >= 10, out=line_ids[1:])
# Create composite sort key for final ordering
# Shift line_ids by large factor, add x for tie-breaking
sort_key = line_ids[y_order] * 1e6 + dt_boxes[y_order, 0, 0]
final_order = np.argsort(sort_key, kind="stable")
sorted_boxes: NDArray[np.float32] = dt_boxes[y_order[final_order]]
return sorted_boxes
def configure(self, **kwargs: Any) -> None:
if (max_resolution := kwargs.get("maxResolution")) is not None:
self.max_resolution = max_resolution
self.model.limit_side_len = max_resolution
if (min_score := kwargs.get("minScore")) is not None:
self.min_score = min_score
self.model.postprocess_op.box_thresh = min_score
self.postprocess.box_thresh = min_score
if (score_mode := kwargs.get("scoreMode")) is not None:
self.score_mode = score_mode
self.model.postprocess_op.score_mode = score_mode
self.postprocess.score_mode = score_mode

View File

@@ -1,9 +1,8 @@
from typing import Any
import cv2
import numpy as np
from numpy.typing import NDArray
from PIL.Image import Image
from PIL import Image
from rapidocr.ch_ppocr_rec import TextRecInput
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
from rapidocr.inference_engine.base import FileInfo, InferSession
@@ -14,6 +13,7 @@ from rapidocr.utils.vis_res import VisRes
from immich_ml.config import log, settings
from immich_ml.models.base import InferenceModel
from immich_ml.models.transforms import pil_to_cv2
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
from immich_ml.sessions.ort import OrtSession
@@ -65,17 +65,16 @@ class TextRecognizer(InferenceModel):
)
return session
def _predict(self, _: Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
boxes, img, box_scores = texts["boxes"], texts["image"], texts["scores"]
def _predict(self, img: Image.Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
boxes, box_scores = texts["boxes"], texts["scores"]
if boxes.shape[0] == 0:
return self._empty
rec = self.model(TextRecInput(img=self.get_crop_img_list(img, boxes)))
if rec.txts is None:
return self._empty
height, width = img.shape[0:2]
boxes[:, :, 0] /= width
boxes[:, :, 1] /= height
boxes[:, :, 0] /= img.width
boxes[:, :, 1] /= img.height
text_scores = np.array(rec.scores)
valid_text_score_idx = text_scores > self.min_score
@@ -87,7 +86,7 @@ class TextRecognizer(InferenceModel):
"textScore": text_scores[valid_text_score_idx],
}
def get_crop_img_list(self, img: NDArray[np.float32], boxes: NDArray[np.float32]) -> list[NDArray[np.float32]]:
def get_crop_img_list(self, img: Image.Image, boxes: NDArray[np.float32]) -> list[NDArray[np.uint8]]:
img_crop_width = np.maximum(
np.linalg.norm(boxes[:, 1] - boxes[:, 0], axis=1), np.linalg.norm(boxes[:, 2] - boxes[:, 3], axis=1)
).astype(np.int32)
@@ -98,22 +97,55 @@ class TextRecognizer(InferenceModel):
pts_std[:, 1:3, 0] = img_crop_width[:, None]
pts_std[:, 2:4, 1] = img_crop_height[:, None]
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1).tolist()
imgs: list[NDArray[np.float32]] = []
for box, pts_std, dst_size in zip(list(boxes), list(pts_std), img_crop_sizes):
M = cv2.getPerspectiveTransform(box, pts_std)
dst_img: NDArray[np.float32] = cv2.warpPerspective(
img,
M,
dst_size,
borderMode=cv2.BORDER_REPLICATE,
flags=cv2.INTER_CUBIC,
) # type: ignore
dst_height, dst_width = dst_img.shape[0:2]
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1)
all_coeffs = self._get_perspective_transform(pts_std, boxes)
imgs: list[NDArray[np.uint8]] = []
for coeffs, dst_size in zip(all_coeffs, img_crop_sizes):
dst_img = img.transform(
size=tuple(dst_size),
method=Image.Transform.PERSPECTIVE,
data=tuple(coeffs),
resample=Image.Resampling.BICUBIC,
)
dst_width, dst_height = dst_img.size
if dst_height * 1.0 / dst_width >= 1.5:
dst_img = np.rot90(dst_img)
imgs.append(dst_img)
dst_img = dst_img.rotate(90, expand=True)
imgs.append(pil_to_cv2(dst_img))
return imgs
def _get_perspective_transform(self, src: NDArray[np.float32], dst: NDArray[np.float32]) -> NDArray[np.float32]:
N = src.shape[0]
x, y = src[:, :, 0], src[:, :, 1]
u, v = dst[:, :, 0], dst[:, :, 1]
A = np.zeros((N, 8, 9), dtype=np.float32)
# Fill even rows (0, 2, 4, 6): [x, y, 1, 0, 0, 0, -u*x, -u*y, -u]
A[:, ::2, 0] = x
A[:, ::2, 1] = y
A[:, ::2, 2] = 1
A[:, ::2, 6] = -u * x
A[:, ::2, 7] = -u * y
A[:, ::2, 8] = -u
# Fill odd rows (1, 3, 5, 7): [0, 0, 0, x, y, 1, -v*x, -v*y, -v]
A[:, 1::2, 3] = x
A[:, 1::2, 4] = y
A[:, 1::2, 5] = 1
A[:, 1::2, 6] = -v * x
A[:, 1::2, 7] = -v * y
A[:, 1::2, 8] = -v
# Solve using SVD for all matrices at once
_, _, Vt = np.linalg.svd(A)
H = Vt[:, -1, :].reshape(N, 3, 3)
H = H / H[:, 2:3, 2:3]
# Extract the 8 coefficients for each transformation
return np.column_stack(
[H[:, 0, 0], H[:, 0, 1], H[:, 0, 2], H[:, 1, 0], H[:, 1, 1], H[:, 1, 2], H[:, 2, 0], H[:, 2, 1]]
) # pyright: ignore[reportReturnType]
def configure(self, **kwargs: Any) -> None:
self.min_score = kwargs.get("minScore", self.min_score)

View File

@@ -7,7 +7,6 @@ from typing_extensions import TypedDict
class TextDetectionOutput(TypedDict):
image: npt.NDArray[np.float32]
boxes: npt.NDArray[np.float32]
scores: npt.NDArray[np.float32]

View File

@@ -1,6 +1,6 @@
[project]
name = "immich-ml"
version = "2.2.2"
version = "2.2.3"
description = ""
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
requires-python = ">=3.10,<4.0"

View File

@@ -88,7 +88,6 @@ if [ "$CURRENT_MOBILE" != "$NEXT_MOBILE" ]; then
fi
sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile
sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/" mobile/ios/fastlane/Fastfile
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml

View File

@@ -35,8 +35,8 @@ platform :android do
task: 'bundle',
build_type: 'Release',
properties: {
"android.injected.version.code" => 3025,
"android.injected.version.name" => "2.2.2",
"android.injected.version.code" => 3026,
"android.injected.version.name" => "2.2.3",
}
)
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')

File diff suppressed because one or more lines are too long

View File

@@ -32,6 +32,17 @@ platform :ios do
)
end
# Helper method to get version from pubspec.yaml
def get_version_from_pubspec
require 'yaml'
pubspec_path = File.join(Dir.pwd, "../..", "pubspec.yaml")
pubspec = YAML.load_file(pubspec_path)
version_string = pubspec['version']
version_string ? version_string.split('+').first : nil
end
# Helper method to configure code signing for all targets
def configure_code_signing(bundle_id_suffix: "")
bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}"
@@ -158,7 +169,8 @@ platform :ios do
# Build and upload with version number
build_and_upload(
api_key: api_key,
version_number: "2.1.0"
version_number: get_version_from_pubspec,
distribute_external: false,
)
end
@@ -168,8 +180,9 @@ platform :ios do
path: "./Runner.xcodeproj",
targets: ["Runner", "ShareExtension", "WidgetExtension"]
)
increment_version_number(
version_number: "2.2.2"
version_number: get_version_from_pubspec
)
increment_build_number(
build_number: latest_testflight_build_number + 1,

View File

@@ -7,3 +7,5 @@ enum AssetVisibilityEnum { timeline, hidden, archive, locked }
enum SortUserBy { id }
enum ActionSource { timeline, viewer }
enum UploadErrorType { none, network, client, server, unknown }

View File

@@ -239,7 +239,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
return _ref
?.read(uploadServiceProvider)
.startBackupWithHttpClient(currentUser.id, networkCapabilities.hasWifi, _cancellationToken);
.startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
},
(error, stack) {
dPrint(() => "Error in backup zone $error, $stack");

View File

@@ -0,0 +1,18 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/utils/drift_default.mixin.dart';
class LocalAssetUploadEntity extends Table with DriftDefaultsMixin {
const LocalAssetUploadEntity();
TextColumn get assetId => text().references(LocalAssetEntity, #id, onDelete: KeyAction.cascade)();
IntColumn get numberOfAttempts => integer().withDefault(const Constant(0))();
DateTimeColumn get lastAttemptAt => dateTime().withDefault(currentDateAndTime)();
IntColumn get errorType => intEnum<UploadErrorType>().withDefault(const Constant(0))();
TextColumn get errorMessage => text().nullable()();
@override
Set<Column> get primaryKey => {assetId};
}

View File

@@ -0,0 +1,783 @@
// dart format width=80
// ignore_for_file: type=lint
import 'package:drift/drift.dart' as i0;
import 'package:immich_mobile/infrastructure/entities/local_asset_upload_entity.drift.dart'
as i1;
import 'package:immich_mobile/constants/enums.dart' as i2;
import 'package:immich_mobile/infrastructure/entities/local_asset_upload_entity.dart'
as i3;
import 'package:drift/src/runtime/query_builder/query_builder.dart' as i4;
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.drift.dart'
as i5;
import 'package:drift/internal/modular.dart' as i6;
typedef $$LocalAssetUploadEntityTableCreateCompanionBuilder =
i1.LocalAssetUploadEntityCompanion Function({
required String assetId,
i0.Value<int> numberOfAttempts,
i0.Value<DateTime> lastAttemptAt,
i0.Value<i2.UploadErrorType> errorType,
i0.Value<String?> errorMessage,
});
typedef $$LocalAssetUploadEntityTableUpdateCompanionBuilder =
i1.LocalAssetUploadEntityCompanion Function({
i0.Value<String> assetId,
i0.Value<int> numberOfAttempts,
i0.Value<DateTime> lastAttemptAt,
i0.Value<i2.UploadErrorType> errorType,
i0.Value<String?> errorMessage,
});
final class $$LocalAssetUploadEntityTableReferences
extends
i0.BaseReferences<
i0.GeneratedDatabase,
i1.$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData
> {
$$LocalAssetUploadEntityTableReferences(
super.$_db,
super.$_table,
super.$_typedResult,
);
static i5.$LocalAssetEntityTable _assetIdTable(i0.GeneratedDatabase db) =>
i6.ReadDatabaseContainer(db)
.resultSet<i5.$LocalAssetEntityTable>('local_asset_entity')
.createAlias(
i0.$_aliasNameGenerator(
i6.ReadDatabaseContainer(db)
.resultSet<i1.$LocalAssetUploadEntityTable>(
'local_asset_upload_entity',
)
.assetId,
i6.ReadDatabaseContainer(
db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity').id,
),
);
i5.$$LocalAssetEntityTableProcessedTableManager get assetId {
final $_column = $_itemColumn<String>('asset_id')!;
final manager = i5
.$$LocalAssetEntityTableTableManager(
$_db,
i6.ReadDatabaseContainer(
$_db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
)
.filter((f) => f.id.sqlEquals($_column));
final item = $_typedResult.readTableOrNull(_assetIdTable($_db));
if (item == null) return manager;
return i0.ProcessedTableManager(
manager.$state.copyWith(prefetchedData: [item]),
);
}
}
class $$LocalAssetUploadEntityTableFilterComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LocalAssetUploadEntityTable> {
$$LocalAssetUploadEntityTableFilterComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnFilters<int> get numberOfAttempts => $composableBuilder(
column: $table.numberOfAttempts,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnFilters<DateTime> get lastAttemptAt => $composableBuilder(
column: $table.lastAttemptAt,
builder: (column) => i0.ColumnFilters(column),
);
i0.ColumnWithTypeConverterFilters<i2.UploadErrorType, i2.UploadErrorType, int>
get errorType => $composableBuilder(
column: $table.errorType,
builder: (column) => i0.ColumnWithTypeConverterFilters(column),
);
i0.ColumnFilters<String> get errorMessage => $composableBuilder(
column: $table.errorMessage,
builder: (column) => i0.ColumnFilters(column),
);
i5.$$LocalAssetEntityTableFilterComposer get assetId {
final i5.$$LocalAssetEntityTableFilterComposer composer = $composerBuilder(
composer: this,
getCurrentColumn: (t) => t.assetId,
referencedTable: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
getReferencedColumn: (t) => t.id,
builder:
(
joinBuilder, {
$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer,
}) => i5.$$LocalAssetEntityTableFilterComposer(
$db: $db,
$table: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
),
);
return composer;
}
}
class $$LocalAssetUploadEntityTableOrderingComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LocalAssetUploadEntityTable> {
$$LocalAssetUploadEntityTableOrderingComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.ColumnOrderings<int> get numberOfAttempts => $composableBuilder(
column: $table.numberOfAttempts,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<DateTime> get lastAttemptAt => $composableBuilder(
column: $table.lastAttemptAt,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<int> get errorType => $composableBuilder(
column: $table.errorType,
builder: (column) => i0.ColumnOrderings(column),
);
i0.ColumnOrderings<String> get errorMessage => $composableBuilder(
column: $table.errorMessage,
builder: (column) => i0.ColumnOrderings(column),
);
i5.$$LocalAssetEntityTableOrderingComposer get assetId {
final i5.$$LocalAssetEntityTableOrderingComposer composer =
$composerBuilder(
composer: this,
getCurrentColumn: (t) => t.assetId,
referencedTable: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
getReferencedColumn: (t) => t.id,
builder:
(
joinBuilder, {
$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer,
}) => i5.$$LocalAssetEntityTableOrderingComposer(
$db: $db,
$table: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
),
);
return composer;
}
}
class $$LocalAssetUploadEntityTableAnnotationComposer
extends i0.Composer<i0.GeneratedDatabase, i1.$LocalAssetUploadEntityTable> {
$$LocalAssetUploadEntityTableAnnotationComposer({
required super.$db,
required super.$table,
super.joinBuilder,
super.$addJoinBuilderToRootComposer,
super.$removeJoinBuilderFromRootComposer,
});
i0.GeneratedColumn<int> get numberOfAttempts => $composableBuilder(
column: $table.numberOfAttempts,
builder: (column) => column,
);
i0.GeneratedColumn<DateTime> get lastAttemptAt => $composableBuilder(
column: $table.lastAttemptAt,
builder: (column) => column,
);
i0.GeneratedColumnWithTypeConverter<i2.UploadErrorType, int> get errorType =>
$composableBuilder(column: $table.errorType, builder: (column) => column);
i0.GeneratedColumn<String> get errorMessage => $composableBuilder(
column: $table.errorMessage,
builder: (column) => column,
);
i5.$$LocalAssetEntityTableAnnotationComposer get assetId {
final i5.$$LocalAssetEntityTableAnnotationComposer composer =
$composerBuilder(
composer: this,
getCurrentColumn: (t) => t.assetId,
referencedTable: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
getReferencedColumn: (t) => t.id,
builder:
(
joinBuilder, {
$addJoinBuilderToRootComposer,
$removeJoinBuilderFromRootComposer,
}) => i5.$$LocalAssetEntityTableAnnotationComposer(
$db: $db,
$table: i6.ReadDatabaseContainer(
$db,
).resultSet<i5.$LocalAssetEntityTable>('local_asset_entity'),
$addJoinBuilderToRootComposer: $addJoinBuilderToRootComposer,
joinBuilder: joinBuilder,
$removeJoinBuilderFromRootComposer:
$removeJoinBuilderFromRootComposer,
),
);
return composer;
}
}
class $$LocalAssetUploadEntityTableTableManager
extends
i0.RootTableManager<
i0.GeneratedDatabase,
i1.$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableFilterComposer,
i1.$$LocalAssetUploadEntityTableOrderingComposer,
i1.$$LocalAssetUploadEntityTableAnnotationComposer,
$$LocalAssetUploadEntityTableCreateCompanionBuilder,
$$LocalAssetUploadEntityTableUpdateCompanionBuilder,
(
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableReferences,
),
i1.LocalAssetUploadEntityData,
i0.PrefetchHooks Function({bool assetId})
> {
$$LocalAssetUploadEntityTableTableManager(
i0.GeneratedDatabase db,
i1.$LocalAssetUploadEntityTable table,
) : super(
i0.TableManagerState(
db: db,
table: table,
createFilteringComposer: () =>
i1.$$LocalAssetUploadEntityTableFilterComposer(
$db: db,
$table: table,
),
createOrderingComposer: () =>
i1.$$LocalAssetUploadEntityTableOrderingComposer(
$db: db,
$table: table,
),
createComputedFieldComposer: () =>
i1.$$LocalAssetUploadEntityTableAnnotationComposer(
$db: db,
$table: table,
),
updateCompanionCallback:
({
i0.Value<String> assetId = const i0.Value.absent(),
i0.Value<int> numberOfAttempts = const i0.Value.absent(),
i0.Value<DateTime> lastAttemptAt = const i0.Value.absent(),
i0.Value<i2.UploadErrorType> errorType =
const i0.Value.absent(),
i0.Value<String?> errorMessage = const i0.Value.absent(),
}) => i1.LocalAssetUploadEntityCompanion(
assetId: assetId,
numberOfAttempts: numberOfAttempts,
lastAttemptAt: lastAttemptAt,
errorType: errorType,
errorMessage: errorMessage,
),
createCompanionCallback:
({
required String assetId,
i0.Value<int> numberOfAttempts = const i0.Value.absent(),
i0.Value<DateTime> lastAttemptAt = const i0.Value.absent(),
i0.Value<i2.UploadErrorType> errorType =
const i0.Value.absent(),
i0.Value<String?> errorMessage = const i0.Value.absent(),
}) => i1.LocalAssetUploadEntityCompanion.insert(
assetId: assetId,
numberOfAttempts: numberOfAttempts,
lastAttemptAt: lastAttemptAt,
errorType: errorType,
errorMessage: errorMessage,
),
withReferenceMapper: (p0) => p0
.map(
(e) => (
e.readTable(table),
i1.$$LocalAssetUploadEntityTableReferences(db, table, e),
),
)
.toList(),
prefetchHooksCallback: ({assetId = false}) {
return i0.PrefetchHooks(
db: db,
explicitlyWatchedTables: [],
addJoins:
<
T extends i0.TableManagerState<
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic,
dynamic
>
>(state) {
if (assetId) {
state =
state.withJoin(
currentTable: table,
currentColumn: table.assetId,
referencedTable: i1
.$$LocalAssetUploadEntityTableReferences
._assetIdTable(db),
referencedColumn: i1
.$$LocalAssetUploadEntityTableReferences
._assetIdTable(db)
.id,
)
as T;
}
return state;
},
getPrefetchedDataCallback: (items) async {
return [];
},
);
},
),
);
}
typedef $$LocalAssetUploadEntityTableProcessedTableManager =
i0.ProcessedTableManager<
i0.GeneratedDatabase,
i1.$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableFilterComposer,
i1.$$LocalAssetUploadEntityTableOrderingComposer,
i1.$$LocalAssetUploadEntityTableAnnotationComposer,
$$LocalAssetUploadEntityTableCreateCompanionBuilder,
$$LocalAssetUploadEntityTableUpdateCompanionBuilder,
(
i1.LocalAssetUploadEntityData,
i1.$$LocalAssetUploadEntityTableReferences,
),
i1.LocalAssetUploadEntityData,
i0.PrefetchHooks Function({bool assetId})
>;
class $LocalAssetUploadEntityTable extends i3.LocalAssetUploadEntity
with
i0.TableInfo<
$LocalAssetUploadEntityTable,
i1.LocalAssetUploadEntityData
> {
@override
final i0.GeneratedDatabase attachedDatabase;
final String? _alias;
$LocalAssetUploadEntityTable(this.attachedDatabase, [this._alias]);
static const i0.VerificationMeta _assetIdMeta = const i0.VerificationMeta(
'assetId',
);
@override
late final i0.GeneratedColumn<String> assetId = i0.GeneratedColumn<String>(
'asset_id',
aliasedName,
false,
type: i0.DriftSqlType.string,
requiredDuringInsert: true,
defaultConstraints: i0.GeneratedColumn.constraintIsAlways(
'REFERENCES local_asset_entity (id) ON DELETE CASCADE',
),
);
static const i0.VerificationMeta _numberOfAttemptsMeta =
const i0.VerificationMeta('numberOfAttempts');
@override
late final i0.GeneratedColumn<int> numberOfAttempts = i0.GeneratedColumn<int>(
'number_of_attempts',
aliasedName,
false,
type: i0.DriftSqlType.int,
requiredDuringInsert: false,
defaultValue: const i4.Constant(0),
);
static const i0.VerificationMeta _lastAttemptAtMeta =
const i0.VerificationMeta('lastAttemptAt');
@override
late final i0.GeneratedColumn<DateTime> lastAttemptAt =
i0.GeneratedColumn<DateTime>(
'last_attempt_at',
aliasedName,
false,
type: i0.DriftSqlType.dateTime,
requiredDuringInsert: false,
defaultValue: i4.currentDateAndTime,
);
@override
late final i0.GeneratedColumnWithTypeConverter<i2.UploadErrorType, int>
errorType =
i0.GeneratedColumn<int>(
'error_type',
aliasedName,
false,
type: i0.DriftSqlType.int,
requiredDuringInsert: false,
defaultValue: const i4.Constant(0),
).withConverter<i2.UploadErrorType>(
i1.$LocalAssetUploadEntityTable.$convertererrorType,
);
static const i0.VerificationMeta _errorMessageMeta =
const i0.VerificationMeta('errorMessage');
@override
late final i0.GeneratedColumn<String> errorMessage =
i0.GeneratedColumn<String>(
'error_message',
aliasedName,
true,
type: i0.DriftSqlType.string,
requiredDuringInsert: false,
);
@override
List<i0.GeneratedColumn> get $columns => [
assetId,
numberOfAttempts,
lastAttemptAt,
errorType,
errorMessage,
];
@override
String get aliasedName => _alias ?? actualTableName;
@override
String get actualTableName => $name;
static const String $name = 'local_asset_upload_entity';
@override
i0.VerificationContext validateIntegrity(
i0.Insertable<i1.LocalAssetUploadEntityData> instance, {
bool isInserting = false,
}) {
final context = i0.VerificationContext();
final data = instance.toColumns(true);
if (data.containsKey('asset_id')) {
context.handle(
_assetIdMeta,
assetId.isAcceptableOrUnknown(data['asset_id']!, _assetIdMeta),
);
} else if (isInserting) {
context.missing(_assetIdMeta);
}
if (data.containsKey('number_of_attempts')) {
context.handle(
_numberOfAttemptsMeta,
numberOfAttempts.isAcceptableOrUnknown(
data['number_of_attempts']!,
_numberOfAttemptsMeta,
),
);
}
if (data.containsKey('last_attempt_at')) {
context.handle(
_lastAttemptAtMeta,
lastAttemptAt.isAcceptableOrUnknown(
data['last_attempt_at']!,
_lastAttemptAtMeta,
),
);
}
if (data.containsKey('error_message')) {
context.handle(
_errorMessageMeta,
errorMessage.isAcceptableOrUnknown(
data['error_message']!,
_errorMessageMeta,
),
);
}
return context;
}
@override
Set<i0.GeneratedColumn> get $primaryKey => {assetId};
@override
i1.LocalAssetUploadEntityData map(
Map<String, dynamic> data, {
String? tablePrefix,
}) {
final effectivePrefix = tablePrefix != null ? '$tablePrefix.' : '';
return i1.LocalAssetUploadEntityData(
assetId: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}asset_id'],
)!,
numberOfAttempts: attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}number_of_attempts'],
)!,
lastAttemptAt: attachedDatabase.typeMapping.read(
i0.DriftSqlType.dateTime,
data['${effectivePrefix}last_attempt_at'],
)!,
errorType: i1.$LocalAssetUploadEntityTable.$convertererrorType.fromSql(
attachedDatabase.typeMapping.read(
i0.DriftSqlType.int,
data['${effectivePrefix}error_type'],
)!,
),
errorMessage: attachedDatabase.typeMapping.read(
i0.DriftSqlType.string,
data['${effectivePrefix}error_message'],
),
);
}
@override
$LocalAssetUploadEntityTable createAlias(String alias) {
return $LocalAssetUploadEntityTable(attachedDatabase, alias);
}
static i0.JsonTypeConverter2<i2.UploadErrorType, int, int>
$convertererrorType = const i0.EnumIndexConverter<i2.UploadErrorType>(
i2.UploadErrorType.values,
);
@override
bool get withoutRowId => true;
@override
bool get isStrict => true;
}
class LocalAssetUploadEntityData extends i0.DataClass
implements i0.Insertable<i1.LocalAssetUploadEntityData> {
final String assetId;
final int numberOfAttempts;
final DateTime lastAttemptAt;
final i2.UploadErrorType errorType;
final String? errorMessage;
const LocalAssetUploadEntityData({
required this.assetId,
required this.numberOfAttempts,
required this.lastAttemptAt,
required this.errorType,
this.errorMessage,
});
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
map['asset_id'] = i0.Variable<String>(assetId);
map['number_of_attempts'] = i0.Variable<int>(numberOfAttempts);
map['last_attempt_at'] = i0.Variable<DateTime>(lastAttemptAt);
{
map['error_type'] = i0.Variable<int>(
i1.$LocalAssetUploadEntityTable.$convertererrorType.toSql(errorType),
);
}
if (!nullToAbsent || errorMessage != null) {
map['error_message'] = i0.Variable<String>(errorMessage);
}
return map;
}
factory LocalAssetUploadEntityData.fromJson(
Map<String, dynamic> json, {
i0.ValueSerializer? serializer,
}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return LocalAssetUploadEntityData(
assetId: serializer.fromJson<String>(json['assetId']),
numberOfAttempts: serializer.fromJson<int>(json['numberOfAttempts']),
lastAttemptAt: serializer.fromJson<DateTime>(json['lastAttemptAt']),
errorType: i1.$LocalAssetUploadEntityTable.$convertererrorType.fromJson(
serializer.fromJson<int>(json['errorType']),
),
errorMessage: serializer.fromJson<String?>(json['errorMessage']),
);
}
@override
Map<String, dynamic> toJson({i0.ValueSerializer? serializer}) {
serializer ??= i0.driftRuntimeOptions.defaultSerializer;
return <String, dynamic>{
'assetId': serializer.toJson<String>(assetId),
'numberOfAttempts': serializer.toJson<int>(numberOfAttempts),
'lastAttemptAt': serializer.toJson<DateTime>(lastAttemptAt),
'errorType': serializer.toJson<int>(
i1.$LocalAssetUploadEntityTable.$convertererrorType.toJson(errorType),
),
'errorMessage': serializer.toJson<String?>(errorMessage),
};
}
i1.LocalAssetUploadEntityData copyWith({
String? assetId,
int? numberOfAttempts,
DateTime? lastAttemptAt,
i2.UploadErrorType? errorType,
i0.Value<String?> errorMessage = const i0.Value.absent(),
}) => i1.LocalAssetUploadEntityData(
assetId: assetId ?? this.assetId,
numberOfAttempts: numberOfAttempts ?? this.numberOfAttempts,
lastAttemptAt: lastAttemptAt ?? this.lastAttemptAt,
errorType: errorType ?? this.errorType,
errorMessage: errorMessage.present ? errorMessage.value : this.errorMessage,
);
LocalAssetUploadEntityData copyWithCompanion(
i1.LocalAssetUploadEntityCompanion data,
) {
return LocalAssetUploadEntityData(
assetId: data.assetId.present ? data.assetId.value : this.assetId,
numberOfAttempts: data.numberOfAttempts.present
? data.numberOfAttempts.value
: this.numberOfAttempts,
lastAttemptAt: data.lastAttemptAt.present
? data.lastAttemptAt.value
: this.lastAttemptAt,
errorType: data.errorType.present ? data.errorType.value : this.errorType,
errorMessage: data.errorMessage.present
? data.errorMessage.value
: this.errorMessage,
);
}
@override
String toString() {
return (StringBuffer('LocalAssetUploadEntityData(')
..write('assetId: $assetId, ')
..write('numberOfAttempts: $numberOfAttempts, ')
..write('lastAttemptAt: $lastAttemptAt, ')
..write('errorType: $errorType, ')
..write('errorMessage: $errorMessage')
..write(')'))
.toString();
}
@override
int get hashCode => Object.hash(
assetId,
numberOfAttempts,
lastAttemptAt,
errorType,
errorMessage,
);
@override
bool operator ==(Object other) =>
identical(this, other) ||
(other is i1.LocalAssetUploadEntityData &&
other.assetId == this.assetId &&
other.numberOfAttempts == this.numberOfAttempts &&
other.lastAttemptAt == this.lastAttemptAt &&
other.errorType == this.errorType &&
other.errorMessage == this.errorMessage);
}
class LocalAssetUploadEntityCompanion
extends i0.UpdateCompanion<i1.LocalAssetUploadEntityData> {
final i0.Value<String> assetId;
final i0.Value<int> numberOfAttempts;
final i0.Value<DateTime> lastAttemptAt;
final i0.Value<i2.UploadErrorType> errorType;
final i0.Value<String?> errorMessage;
const LocalAssetUploadEntityCompanion({
this.assetId = const i0.Value.absent(),
this.numberOfAttempts = const i0.Value.absent(),
this.lastAttemptAt = const i0.Value.absent(),
this.errorType = const i0.Value.absent(),
this.errorMessage = const i0.Value.absent(),
});
LocalAssetUploadEntityCompanion.insert({
required String assetId,
this.numberOfAttempts = const i0.Value.absent(),
this.lastAttemptAt = const i0.Value.absent(),
this.errorType = const i0.Value.absent(),
this.errorMessage = const i0.Value.absent(),
}) : assetId = i0.Value(assetId);
static i0.Insertable<i1.LocalAssetUploadEntityData> custom({
i0.Expression<String>? assetId,
i0.Expression<int>? numberOfAttempts,
i0.Expression<DateTime>? lastAttemptAt,
i0.Expression<int>? errorType,
i0.Expression<String>? errorMessage,
}) {
return i0.RawValuesInsertable({
if (assetId != null) 'asset_id': assetId,
if (numberOfAttempts != null) 'number_of_attempts': numberOfAttempts,
if (lastAttemptAt != null) 'last_attempt_at': lastAttemptAt,
if (errorType != null) 'error_type': errorType,
if (errorMessage != null) 'error_message': errorMessage,
});
}
i1.LocalAssetUploadEntityCompanion copyWith({
i0.Value<String>? assetId,
i0.Value<int>? numberOfAttempts,
i0.Value<DateTime>? lastAttemptAt,
i0.Value<i2.UploadErrorType>? errorType,
i0.Value<String?>? errorMessage,
}) {
return i1.LocalAssetUploadEntityCompanion(
assetId: assetId ?? this.assetId,
numberOfAttempts: numberOfAttempts ?? this.numberOfAttempts,
lastAttemptAt: lastAttemptAt ?? this.lastAttemptAt,
errorType: errorType ?? this.errorType,
errorMessage: errorMessage ?? this.errorMessage,
);
}
@override
Map<String, i0.Expression> toColumns(bool nullToAbsent) {
final map = <String, i0.Expression>{};
if (assetId.present) {
map['asset_id'] = i0.Variable<String>(assetId.value);
}
if (numberOfAttempts.present) {
map['number_of_attempts'] = i0.Variable<int>(numberOfAttempts.value);
}
if (lastAttemptAt.present) {
map['last_attempt_at'] = i0.Variable<DateTime>(lastAttemptAt.value);
}
if (errorType.present) {
map['error_type'] = i0.Variable<int>(
i1.$LocalAssetUploadEntityTable.$convertererrorType.toSql(
errorType.value,
),
);
}
if (errorMessage.present) {
map['error_message'] = i0.Variable<String>(errorMessage.value);
}
return map;
}
@override
String toString() {
return (StringBuffer('LocalAssetUploadEntityCompanion(')
..write('assetId: $assetId, ')
..write('numberOfAttempts: $numberOfAttempts, ')
..write('lastAttemptAt: $lastAttemptAt, ')
..write('errorType: $errorType, ')
..write('errorMessage: $errorMessage')
..write(')'))
.toString();
}
}

View File

@@ -8,6 +8,8 @@ import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
enum SortCandidatesBy { createdAt, attemptCount }
final backupRepositoryProvider = Provider<DriftBackupRepository>(
(ref) => DriftBackupRepository(ref.watch(driftProvider)),
);
@@ -81,37 +83,67 @@ class DriftBackupRepository extends DriftDatabaseRepository {
);
}
Future<List<LocalAsset>> getCandidates(String userId, {bool onlyHashed = true}) async {
Future<List<LocalAsset>> getCandidates(
String userId, {
bool onlyHashed = true,
bool ignoreFailed = false,
int? limit,
SortCandidatesBy sortBy = SortCandidatesBy.createdAt,
}) async {
final selectedAlbumIds = _db.localAlbumEntity.selectOnly(distinct: true)
..addColumns([_db.localAlbumEntity.id])
..where(_db.localAlbumEntity.backupSelection.equalsValue(BackupSelection.selected));
final query = _db.localAssetEntity.select()
..where(
(lae) =>
existsQuery(
_db.localAlbumAssetEntity.selectOnly()
..addColumns([_db.localAlbumAssetEntity.assetId])
..where(
_db.localAlbumAssetEntity.albumId.isInQuery(selectedAlbumIds) &
_db.localAlbumAssetEntity.assetId.equalsExp(lae.id),
),
) &
notExistsQuery(
_db.remoteAssetEntity.selectOnly()
..addColumns([_db.remoteAssetEntity.checksum])
..where(
_db.remoteAssetEntity.checksum.equalsExp(lae.checksum) & _db.remoteAssetEntity.ownerId.equals(userId),
),
) &
lae.id.isNotInQuery(_getExcludedSubquery()),
)
..orderBy([(localAsset) => OrderingTerm.desc(localAsset.createdAt)]);
final query =
_db.localAssetEntity.select().join([
leftOuterJoin(
_db.localAssetUploadEntity,
_db.localAssetEntity.id.equalsExp(_db.localAssetUploadEntity.assetId),
useColumns: false,
),
])..where(
existsQuery(
_db.localAlbumAssetEntity.selectOnly()
..addColumns([_db.localAlbumAssetEntity.assetId])
..where(
_db.localAlbumAssetEntity.albumId.isInQuery(selectedAlbumIds) &
_db.localAlbumAssetEntity.assetId.equalsExp(_db.localAssetEntity.id),
),
) &
notExistsQuery(
_db.remoteAssetEntity.selectOnly()
..addColumns([_db.remoteAssetEntity.checksum])
..where(
_db.remoteAssetEntity.checksum.equalsExp(_db.localAssetEntity.checksum) &
_db.remoteAssetEntity.ownerId.equals(userId),
),
) &
_db.localAssetEntity.id.isNotInQuery(_getExcludedSubquery()),
);
if (onlyHashed) {
query.where((lae) => lae.checksum.isNotNull());
switch (sortBy) {
case SortCandidatesBy.createdAt:
query.orderBy([OrderingTerm.asc(_db.localAssetEntity.createdAt)]);
case SortCandidatesBy.attemptCount:
query.orderBy([
OrderingTerm.asc(_db.localAssetUploadEntity.numberOfAttempts, nulls: NullsOrder.first),
OrderingTerm.asc(_db.localAssetUploadEntity.lastAttemptAt, nulls: NullsOrder.first),
OrderingTerm.asc(_db.localAssetEntity.createdAt),
]);
}
return query.map((localAsset) => localAsset.toDto()).get();
if (onlyHashed) {
query.where(_db.localAssetEntity.checksum.isNotNull());
}
if (ignoreFailed) {
query.where(_db.localAssetUploadEntity.assetId.isNull());
}
if (limit != null) {
query.limit(limit);
}
return query.map((row) => row.readTable(_db.localAssetEntity).toDto()).get();
}
}

View File

@@ -10,6 +10,7 @@ import 'package:immich_mobile/infrastructure/entities/exif.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_album_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset_upload_entity.dart';
import 'package:immich_mobile/infrastructure/entities/memory.entity.dart';
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.dart';
import 'package:immich_mobile/infrastructure/entities/partner.entity.dart';
@@ -50,6 +51,7 @@ class IsarDatabaseRepository implements IDatabaseRepository {
PartnerEntity,
LocalAlbumEntity,
LocalAssetEntity,
LocalAssetUploadEntity,
LocalAlbumAssetEntity,
RemoteAssetEntity,
RemoteExifEntity,
@@ -93,7 +95,7 @@ class Drift extends $Drift implements IDatabaseRepository {
}
@override
int get schemaVersion => 12;
int get schemaVersion => 13;
@override
MigrationStrategy get migration => MigrationStrategy(
@@ -178,6 +180,9 @@ class Drift extends $Drift implements IDatabaseRepository {
);
}
},
from12To13: (m, v13) async {
await m.createTable(v13.localAssetUploadEntity);
},
),
);

View File

@@ -21,25 +21,27 @@ import 'package:immich_mobile/infrastructure/entities/user_metadata.entity.drift
as i9;
import 'package:immich_mobile/infrastructure/entities/partner.entity.drift.dart'
as i10;
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/local_asset_upload_entity.drift.dart'
as i11;
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/exif.entity.drift.dart'
as i12;
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/remote_album_asset.entity.drift.dart'
as i13;
import 'package:immich_mobile/infrastructure/entities/memory.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/remote_album_user.entity.drift.dart'
as i14;
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/memory.entity.drift.dart'
as i15;
import 'package:immich_mobile/infrastructure/entities/person.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/memory_asset.entity.drift.dart'
as i16;
import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/person.entity.drift.dart'
as i17;
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
import 'package:immich_mobile/infrastructure/entities/asset_face.entity.drift.dart'
as i18;
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
import 'package:immich_mobile/infrastructure/entities/store.entity.drift.dart'
as i19;
import 'package:drift/internal/modular.dart' as i20;
import 'package:immich_mobile/infrastructure/entities/merged_asset.drift.dart'
as i20;
import 'package:drift/internal/modular.dart' as i21;
abstract class $Drift extends i0.GeneratedDatabase {
$Drift(i0.QueryExecutor e) : super(e);
@@ -64,22 +66,24 @@ abstract class $Drift extends i0.GeneratedDatabase {
late final i10.$PartnerEntityTable partnerEntity = i10.$PartnerEntityTable(
this,
);
late final i11.$RemoteExifEntityTable remoteExifEntity = i11
late final i11.$LocalAssetUploadEntityTable localAssetUploadEntity = i11
.$LocalAssetUploadEntityTable(this);
late final i12.$RemoteExifEntityTable remoteExifEntity = i12
.$RemoteExifEntityTable(this);
late final i12.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity = i12
late final i13.$RemoteAlbumAssetEntityTable remoteAlbumAssetEntity = i13
.$RemoteAlbumAssetEntityTable(this);
late final i13.$RemoteAlbumUserEntityTable remoteAlbumUserEntity = i13
late final i14.$RemoteAlbumUserEntityTable remoteAlbumUserEntity = i14
.$RemoteAlbumUserEntityTable(this);
late final i14.$MemoryEntityTable memoryEntity = i14.$MemoryEntityTable(this);
late final i15.$MemoryAssetEntityTable memoryAssetEntity = i15
late final i15.$MemoryEntityTable memoryEntity = i15.$MemoryEntityTable(this);
late final i16.$MemoryAssetEntityTable memoryAssetEntity = i16
.$MemoryAssetEntityTable(this);
late final i16.$PersonEntityTable personEntity = i16.$PersonEntityTable(this);
late final i17.$AssetFaceEntityTable assetFaceEntity = i17
late final i17.$PersonEntityTable personEntity = i17.$PersonEntityTable(this);
late final i18.$AssetFaceEntityTable assetFaceEntity = i18
.$AssetFaceEntityTable(this);
late final i18.$StoreEntityTable storeEntity = i18.$StoreEntityTable(this);
i19.MergedAssetDrift get mergedAssetDrift => i20.ReadDatabaseContainer(
late final i19.$StoreEntityTable storeEntity = i19.$StoreEntityTable(this);
i20.MergedAssetDrift get mergedAssetDrift => i21.ReadDatabaseContainer(
this,
).accessor<i19.MergedAssetDrift>(i19.MergedAssetDrift.new);
).accessor<i20.MergedAssetDrift>(i20.MergedAssetDrift.new);
@override
Iterable<i0.TableInfo<i0.Table, Object?>> get allTables =>
allSchemaEntities.whereType<i0.TableInfo<i0.Table, Object?>>();
@@ -100,6 +104,7 @@ abstract class $Drift extends i0.GeneratedDatabase {
authUserEntity,
userMetadataEntity,
partnerEntity,
localAssetUploadEntity,
remoteExifEntity,
remoteAlbumAssetEntity,
remoteAlbumUserEntity,
@@ -108,7 +113,7 @@ abstract class $Drift extends i0.GeneratedDatabase {
personEntity,
assetFaceEntity,
storeEntity,
i11.idxLatLng,
i12.idxLatLng,
];
@override
i0.StreamQueryUpdateRules
@@ -197,6 +202,15 @@ abstract class $Drift extends i0.GeneratedDatabase {
),
result: [i0.TableUpdate('partner_entity', kind: i0.UpdateKind.delete)],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName(
'local_asset_entity',
limitUpdateKind: i0.UpdateKind.delete,
),
result: [
i0.TableUpdate('local_asset_upload_entity', kind: i0.UpdateKind.delete),
],
),
i0.WritePropagation(
on: i0.TableUpdateQuery.onTableName(
'remote_asset_entity',
@@ -317,23 +331,28 @@ class $DriftManager {
i9.$$UserMetadataEntityTableTableManager(_db, _db.userMetadataEntity);
i10.$$PartnerEntityTableTableManager get partnerEntity =>
i10.$$PartnerEntityTableTableManager(_db, _db.partnerEntity);
i11.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
i11.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
i12.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
i12.$$RemoteAlbumAssetEntityTableTableManager(
i11.$$LocalAssetUploadEntityTableTableManager get localAssetUploadEntity =>
i11.$$LocalAssetUploadEntityTableTableManager(
_db,
_db.localAssetUploadEntity,
);
i12.$$RemoteExifEntityTableTableManager get remoteExifEntity =>
i12.$$RemoteExifEntityTableTableManager(_db, _db.remoteExifEntity);
i13.$$RemoteAlbumAssetEntityTableTableManager get remoteAlbumAssetEntity =>
i13.$$RemoteAlbumAssetEntityTableTableManager(
_db,
_db.remoteAlbumAssetEntity,
);
i13.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i13
i14.$$RemoteAlbumUserEntityTableTableManager get remoteAlbumUserEntity => i14
.$$RemoteAlbumUserEntityTableTableManager(_db, _db.remoteAlbumUserEntity);
i14.$$MemoryEntityTableTableManager get memoryEntity =>
i14.$$MemoryEntityTableTableManager(_db, _db.memoryEntity);
i15.$$MemoryAssetEntityTableTableManager get memoryAssetEntity =>
i15.$$MemoryAssetEntityTableTableManager(_db, _db.memoryAssetEntity);
i16.$$PersonEntityTableTableManager get personEntity =>
i16.$$PersonEntityTableTableManager(_db, _db.personEntity);
i17.$$AssetFaceEntityTableTableManager get assetFaceEntity =>
i17.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
i18.$$StoreEntityTableTableManager get storeEntity =>
i18.$$StoreEntityTableTableManager(_db, _db.storeEntity);
i15.$$MemoryEntityTableTableManager get memoryEntity =>
i15.$$MemoryEntityTableTableManager(_db, _db.memoryEntity);
i16.$$MemoryAssetEntityTableTableManager get memoryAssetEntity =>
i16.$$MemoryAssetEntityTableTableManager(_db, _db.memoryAssetEntity);
i17.$$PersonEntityTableTableManager get personEntity =>
i17.$$PersonEntityTableTableManager(_db, _db.personEntity);
i18.$$AssetFaceEntityTableTableManager get assetFaceEntity =>
i18.$$AssetFaceEntityTableTableManager(_db, _db.assetFaceEntity);
i19.$$StoreEntityTableTableManager get storeEntity =>
i19.$$StoreEntityTableTableManager(_db, _db.storeEntity);
}

View File

@@ -5037,6 +5037,441 @@ final class Schema12 extends i0.VersionedSchema {
);
}
final class Schema13 extends i0.VersionedSchema {
Schema13({required super.database}) : super(version: 13);
@override
late final List<i1.DatabaseSchemaEntity> entities = [
userEntity,
remoteAssetEntity,
stackEntity,
localAssetEntity,
remoteAlbumEntity,
localAlbumEntity,
localAlbumAssetEntity,
idxLocalAssetChecksum,
idxRemoteAssetOwnerChecksum,
uQRemoteAssetsOwnerChecksum,
uQRemoteAssetsOwnerLibraryChecksum,
idxRemoteAssetChecksum,
authUserEntity,
userMetadataEntity,
partnerEntity,
localAssetUploadEntity,
remoteExifEntity,
remoteAlbumAssetEntity,
remoteAlbumUserEntity,
memoryEntity,
memoryAssetEntity,
personEntity,
assetFaceEntity,
storeEntity,
idxLatLng,
];
late final Shape20 userEntity = Shape20(
source: i0.VersionedTable(
entityName: 'user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_3,
_column_84,
_column_85,
_column_91,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape17 remoteAssetEntity = Shape17(
source: i0.VersionedTable(
entityName: 'remote_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_1,
_column_8,
_column_9,
_column_5,
_column_10,
_column_11,
_column_12,
_column_0,
_column_13,
_column_14,
_column_15,
_column_16,
_column_17,
_column_18,
_column_19,
_column_20,
_column_21,
_column_86,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape3 stackEntity = Shape3(
source: i0.VersionedTable(
entityName: 'stack_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [_column_0, _column_9, _column_5, _column_15, _column_75],
attachedDatabase: database,
),
alias: null,
);
late final Shape2 localAssetEntity = Shape2(
source: i0.VersionedTable(
entityName: 'local_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_1,
_column_8,
_column_9,
_column_5,
_column_10,
_column_11,
_column_12,
_column_0,
_column_22,
_column_14,
_column_23,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape9 remoteAlbumEntity = Shape9(
source: i0.VersionedTable(
entityName: 'remote_album_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_56,
_column_9,
_column_5,
_column_15,
_column_57,
_column_58,
_column_59,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape19 localAlbumEntity = Shape19(
source: i0.VersionedTable(
entityName: 'local_album_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_5,
_column_31,
_column_32,
_column_90,
_column_33,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape22 localAlbumAssetEntity = Shape22(
source: i0.VersionedTable(
entityName: 'local_album_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
columns: [_column_34, _column_35, _column_33],
attachedDatabase: database,
),
alias: null,
);
final i1.Index idxLocalAssetChecksum = i1.Index(
'idx_local_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_local_asset_checksum ON local_asset_entity (checksum)',
);
final i1.Index idxRemoteAssetOwnerChecksum = i1.Index(
'idx_remote_asset_owner_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_owner_checksum ON remote_asset_entity (owner_id, checksum)',
);
final i1.Index uQRemoteAssetsOwnerChecksum = i1.Index(
'UQ_remote_assets_owner_checksum',
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_checksum ON remote_asset_entity (owner_id, checksum) WHERE(library_id IS NULL)',
);
final i1.Index uQRemoteAssetsOwnerLibraryChecksum = i1.Index(
'UQ_remote_assets_owner_library_checksum',
'CREATE UNIQUE INDEX IF NOT EXISTS UQ_remote_assets_owner_library_checksum ON remote_asset_entity (owner_id, library_id, checksum) WHERE(library_id IS NOT NULL)',
);
final i1.Index idxRemoteAssetChecksum = i1.Index(
'idx_remote_asset_checksum',
'CREATE INDEX IF NOT EXISTS idx_remote_asset_checksum ON remote_asset_entity (checksum)',
);
late final Shape21 authUserEntity = Shape21(
source: i0.VersionedTable(
entityName: 'auth_user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_1,
_column_3,
_column_2,
_column_84,
_column_85,
_column_92,
_column_93,
_column_7,
_column_94,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape4 userMetadataEntity = Shape4(
source: i0.VersionedTable(
entityName: 'user_metadata_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(user_id, "key")'],
columns: [_column_25, _column_26, _column_27],
attachedDatabase: database,
),
alias: null,
);
late final Shape5 partnerEntity = Shape5(
source: i0.VersionedTable(
entityName: 'partner_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(shared_by_id, shared_with_id)'],
columns: [_column_28, _column_29, _column_30],
attachedDatabase: database,
),
alias: null,
);
late final Shape23 localAssetUploadEntity = Shape23(
source: i0.VersionedTable(
entityName: 'local_asset_upload_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id)'],
columns: [_column_34, _column_95, _column_96, _column_97, _column_98],
attachedDatabase: database,
),
alias: null,
);
late final Shape8 remoteExifEntity = Shape8(
source: i0.VersionedTable(
entityName: 'remote_exif_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id)'],
columns: [
_column_36,
_column_37,
_column_38,
_column_39,
_column_40,
_column_41,
_column_11,
_column_10,
_column_42,
_column_43,
_column_44,
_column_45,
_column_46,
_column_47,
_column_48,
_column_49,
_column_50,
_column_51,
_column_52,
_column_53,
_column_54,
_column_55,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape7 remoteAlbumAssetEntity = Shape7(
source: i0.VersionedTable(
entityName: 'remote_album_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, album_id)'],
columns: [_column_36, _column_60],
attachedDatabase: database,
),
alias: null,
);
late final Shape10 remoteAlbumUserEntity = Shape10(
source: i0.VersionedTable(
entityName: 'remote_album_user_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(album_id, user_id)'],
columns: [_column_60, _column_25, _column_61],
attachedDatabase: database,
),
alias: null,
);
late final Shape11 memoryEntity = Shape11(
source: i0.VersionedTable(
entityName: 'memory_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_9,
_column_5,
_column_18,
_column_15,
_column_8,
_column_62,
_column_63,
_column_64,
_column_65,
_column_66,
_column_67,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape12 memoryAssetEntity = Shape12(
source: i0.VersionedTable(
entityName: 'memory_asset_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(asset_id, memory_id)'],
columns: [_column_36, _column_68],
attachedDatabase: database,
),
alias: null,
);
late final Shape14 personEntity = Shape14(
source: i0.VersionedTable(
entityName: 'person_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_9,
_column_5,
_column_15,
_column_1,
_column_69,
_column_71,
_column_72,
_column_73,
_column_74,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape15 assetFaceEntity = Shape15(
source: i0.VersionedTable(
entityName: 'asset_face_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [
_column_0,
_column_36,
_column_76,
_column_77,
_column_78,
_column_79,
_column_80,
_column_81,
_column_82,
_column_83,
],
attachedDatabase: database,
),
alias: null,
);
late final Shape18 storeEntity = Shape18(
source: i0.VersionedTable(
entityName: 'store_entity',
withoutRowId: true,
isStrict: true,
tableConstraints: ['PRIMARY KEY(id)'],
columns: [_column_87, _column_88, _column_89],
attachedDatabase: database,
),
alias: null,
);
final i1.Index idxLatLng = i1.Index(
'idx_lat_lng',
'CREATE INDEX IF NOT EXISTS idx_lat_lng ON remote_exif_entity (latitude, longitude)',
);
}
class Shape23 extends i0.VersionedTable {
Shape23({required super.source, required super.alias}) : super.aliased();
i1.GeneratedColumn<String> get assetId =>
columnsByName['asset_id']! as i1.GeneratedColumn<String>;
i1.GeneratedColumn<int> get numberOfAttempts =>
columnsByName['number_of_attempts']! as i1.GeneratedColumn<int>;
i1.GeneratedColumn<DateTime> get lastAttemptAt =>
columnsByName['last_attempt_at']! as i1.GeneratedColumn<DateTime>;
i1.GeneratedColumn<int> get errorType =>
columnsByName['error_type']! as i1.GeneratedColumn<int>;
i1.GeneratedColumn<String> get errorMessage =>
columnsByName['error_message']! as i1.GeneratedColumn<String>;
}
i1.GeneratedColumn<int> _column_95(String aliasedName) =>
i1.GeneratedColumn<int>(
'number_of_attempts',
aliasedName,
false,
type: i1.DriftSqlType.int,
defaultValue: const CustomExpression('0'),
);
i1.GeneratedColumn<DateTime> _column_96(String aliasedName) =>
i1.GeneratedColumn<DateTime>(
'last_attempt_at',
aliasedName,
false,
type: i1.DriftSqlType.dateTime,
defaultValue: const CustomExpression('CURRENT_TIMESTAMP'),
);
i1.GeneratedColumn<int> _column_97(String aliasedName) =>
i1.GeneratedColumn<int>(
'error_type',
aliasedName,
false,
type: i1.DriftSqlType.int,
defaultValue: const CustomExpression('0'),
);
i1.GeneratedColumn<String> _column_98(String aliasedName) =>
i1.GeneratedColumn<String>(
'error_message',
aliasedName,
true,
type: i1.DriftSqlType.string,
);
i0.MigrationStepWithVersion migrationSteps({
required Future<void> Function(i1.Migrator m, Schema2 schema) from1To2,
required Future<void> Function(i1.Migrator m, Schema3 schema) from2To3,
@@ -5049,6 +5484,7 @@ i0.MigrationStepWithVersion migrationSteps({
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
}) {
return (currentVersion, database) async {
switch (currentVersion) {
@@ -5107,6 +5543,11 @@ i0.MigrationStepWithVersion migrationSteps({
final migrator = i1.Migrator(database, schema);
await from11To12(migrator, schema);
return 12;
case 12:
final schema = Schema13(database: database);
final migrator = i1.Migrator(database, schema);
await from12To13(migrator, schema);
return 13;
default:
throw ArgumentError.value('Unknown migration from $currentVersion');
}
@@ -5125,6 +5566,7 @@ i1.OnUpgrade stepByStep({
required Future<void> Function(i1.Migrator m, Schema10 schema) from9To10,
required Future<void> Function(i1.Migrator m, Schema11 schema) from10To11,
required Future<void> Function(i1.Migrator m, Schema12 schema) from11To12,
required Future<void> Function(i1.Migrator m, Schema13 schema) from12To13,
}) => i0.VersionedSchema.stepByStepHelper(
step: migrationSteps(
from1To2: from1To2,
@@ -5138,5 +5580,6 @@ i1.OnUpgrade stepByStep({
from9To10: from9To10,
from10To11: from10To11,
from11To12: from11To12,
from12To13: from12To13,
),
);

View File

@@ -0,0 +1,70 @@
import 'package:drift/drift.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/infrastructure/entities/local_asset_upload_entity.drift.dart';
import 'package:immich_mobile/infrastructure/repositories/db.repository.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
class DriftLocalAssetUploadRepository extends DriftDatabaseRepository {
final Drift _db;
const DriftLocalAssetUploadRepository(this._db) : super(_db);
Stream<List<DriftUploadStatus>> watchAll() {
final query = _db.localAssetUploadEntity.select().addColumns([_db.localAssetEntity.name]).join([
leftOuterJoin(
_db.localAssetEntity,
_db.localAssetEntity.id.equalsExp(_db.localAssetUploadEntity.assetId),
useColumns: false,
),
]);
return query.map((row) {
final upload = row.readTable(_db.localAssetUploadEntity);
final assetName = row.read(_db.localAssetEntity.name)!;
return DriftUploadStatus(taskId: upload.assetId, filename: assetName, error: upload.errorMessage, isFailed: true);
}).watch();
}
Future<void> upsert(String assetId, UploadErrorType errorType, String error) {
return _db
.into(_db.localAssetUploadEntity)
.insert(
LocalAssetUploadEntityCompanion(
assetId: Value(assetId),
numberOfAttempts: const Value(1),
lastAttemptAt: Value(DateTime.now()),
errorType: Value(errorType),
errorMessage: Value(error),
),
onConflict: DoUpdate(
(old) => LocalAssetUploadEntityCompanion.custom(
numberOfAttempts: (old.numberOfAttempts + const Constant(1)),
lastAttemptAt: currentDateAndTime,
errorType: Variable.withInt(errorType.index),
errorMessage: Variable.withString(error),
),
),
);
}
Future<void> delete(String assetId) async {
await _db.managers.localAssetUploadEntity.filter((row) => row.assetId.id.equals(assetId)).delete();
}
Future<void> prune() async {
final query =
_db.localAssetUploadEntity.selectOnly().join([
leftOuterJoin(
_db.localAssetEntity,
_db.localAssetUploadEntity.assetId.equalsExp(_db.localAssetEntity.id),
useColumns: false,
),
leftOuterJoin(
_db.remoteAssetEntity,
_db.remoteAssetEntity.checksum.equalsExp(_db.localAssetEntity.checksum),
useColumns: false,
),
])
..where(_db.remoteAssetEntity.checksum.isNotNull())
..addColumns([_db.localAssetUploadEntity.assetId]);
await _db.localAssetUploadEntity.deleteWhere((row) => row.assetId.isInQuery(query));
}
}

View File

@@ -25,6 +25,7 @@ import 'package:immich_mobile/providers/asset_viewer/share_intent_upload.provide
import 'package:immich_mobile/providers/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/infrastructure/upload.provider.dart';
import 'package:immich_mobile/providers/locale_provider.dart';
import 'package:immich_mobile/providers/routes.provider.dart';
import 'package:immich_mobile/providers/theme.provider.dart';
@@ -211,6 +212,8 @@ class ImmichAppState extends ConsumerState<ImmichApp> with WidgetsBindingObserve
WidgetsBinding.instance.addPostFrameCallback((_) {
// needs to be delayed so that EasyLocalization is working
if (Store.isBetaTimelineEnabled) {
// Start upload timer
ref.read(uploadTimerProvider.notifier).start();
ref.read(backgroundServiceProvider).disableService();
ref.read(backgroundWorkerFgServiceProvider).enable();
if (Platform.isAndroid) {

View File

@@ -1,4 +1,5 @@
import 'package:auto_route/auto_route.dart';
import 'package:collection/collection.dart';
import 'package:flutter/material.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
@@ -7,6 +8,8 @@ import 'package:immich_mobile/extensions/translate_extensions.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/upload.provider.dart';
import 'package:immich_mobile/services/upload.service.dart';
import 'package:immich_mobile/utils/bytes_units.dart';
import 'package:path/path.dart' as path;
@@ -16,7 +19,10 @@ class DriftUploadDetailPage extends ConsumerWidget {
@override
Widget build(BuildContext context, WidgetRef ref) {
final uploadItems = ref.watch(driftBackupProvider.select((state) => state.uploadItems));
final inProgressUploads = ref.watch(driftBackupProvider.select((state) => state.uploadItems));
final failedUploads = ref.watch(failedUploadStatusProvider).valueOrNull ?? {};
// In-progress uploads take precedence over failed uploads with the same key
final uploadItems = {...failedUploads, ...inProgressUploads};
return Scaffold(
appBar: AppBar(
@@ -46,21 +52,21 @@ class DriftUploadDetailPage extends ConsumerWidget {
}
Widget _buildUploadList(Map<String, DriftUploadStatus> uploadItems) {
final sortedKeys = uploadItems.keys.sorted();
return ListView.separated(
addAutomaticKeepAlives: true,
padding: const EdgeInsets.all(16),
itemCount: uploadItems.length,
itemCount: sortedKeys.length,
separatorBuilder: (context, index) => const SizedBox(height: 4),
itemBuilder: (context, index) {
final item = uploadItems.values.elementAt(index);
final item = uploadItems[sortedKeys[index]]!;
return _buildUploadCard(context, item);
},
);
}
Widget _buildUploadCard(BuildContext context, DriftUploadStatus item) {
final isCompleted = item.progress >= 1.0;
final double progressPercentage = (item.progress * 100).clamp(0, 100);
final progress = item.progress;
return Card(
elevation: 0,
@@ -108,13 +114,16 @@ class DriftUploadDetailPage extends ConsumerWidget {
],
),
),
_buildProgressIndicator(
context,
item.progress,
progressPercentage,
isCompleted,
item.networkSpeedAsString,
),
if (item.isFailed == true)
_buildRetryButton(item)
else if (progress != null)
_buildProgressIndicator(
context,
progress,
(progress * 100).clamp(0, 100),
progress >= 1.0,
item.networkSpeedAsString,
),
],
),
],
@@ -129,7 +138,7 @@ class DriftUploadDetailPage extends ConsumerWidget {
double progress,
double percentage,
bool isCompleted,
String networkSpeedAsString,
String? networkSpeedAsString,
) {
return Column(
children: [
@@ -159,17 +168,38 @@ class DriftUploadDetailPage extends ConsumerWidget {
),
],
),
Text(
networkSpeedAsString,
style: context.textTheme.labelSmall?.copyWith(
color: context.colorScheme.onSurface.withValues(alpha: 0.6),
fontSize: 10,
if (networkSpeedAsString != null)
Text(
networkSpeedAsString,
style: context.textTheme.labelSmall?.copyWith(
color: context.colorScheme.onSurface.withValues(alpha: 0.6),
fontSize: 10,
),
),
),
],
);
}
Widget _buildRetryButton(DriftUploadStatus item) {
return Consumer(
builder: (context, ref, child) {
return IconButton(
onPressed: () => _retryFailedUpload(ref, item),
icon: const Icon(Icons.refresh_rounded),
iconSize: 24,
color: context.colorScheme.onErrorContainer,
tooltip: "retry_upload".t(context: context),
);
},
);
}
Future<void> _retryFailedUpload(WidgetRef ref, DriftUploadStatus item) async {
await ref.read(uploadServiceProvider).clearError(item.taskId);
ref.invalidate(failedUploadStatusProvider);
await ref.read(uploadServiceProvider).manualBackupId(item.taskId);
}
Future<void> _showFileDetailDialog(BuildContext context, DriftUploadStatus item) {
return showDialog(
context: context,
@@ -241,7 +271,8 @@ class FileDetailDialog extends ConsumerWidget {
_buildInfoSection(context, [
_buildInfoRow(context, "Filename", path.basename(uploadStatus.filename)),
_buildInfoRow(context, "Local ID", asset.id),
_buildInfoRow(context, "File Size", formatHumanReadableBytes(uploadStatus.fileSize, 2)),
if (uploadStatus.fileSize != null)
_buildInfoRow(context, "File Size", formatHumanReadableBytes(uploadStatus.fileSize!, 2)),
if (asset.width != null) _buildInfoRow(context, "Width", "${asset.width}px"),
if (asset.height != null) _buildInfoRow(context, "Height", "${asset.height}px"),
_buildInfoRow(context, "Created At", asset.createdAt.toString()),

View File

@@ -65,7 +65,7 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
if (Store.isBetaTimelineEnabled) {
bool syncSuccess = false;
await Future.wait([
backgroundManager.syncLocal(),
backgroundManager.syncLocal(full: true),
backgroundManager.syncRemote().then((success) => syncSuccess = success),
]);

View File

@@ -16,6 +16,7 @@ import 'package:immich_mobile/providers/backup/ios_background_settings.provider.
import 'package:immich_mobile/providers/backup/manual_upload.provider.dart';
import 'package:immich_mobile/providers/gallery_permission.provider.dart';
import 'package:immich_mobile/providers/infrastructure/platform.provider.dart';
import 'package:immich_mobile/providers/infrastructure/upload.provider.dart';
import 'package:immich_mobile/providers/memory.provider.dart';
import 'package:immich_mobile/providers/notification_permission.provider.dart';
import 'package:immich_mobile/providers/server_info.provider.dart';
@@ -139,6 +140,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
Future<void> _handleBetaTimelineResume() async {
_ref.read(backupProvider.notifier).cancelBackup();
_ref.read(uploadTimerProvider.notifier).start();
unawaited(_ref.read(backgroundWorkerLockServiceProvider).lock());
// Give isolates time to complete any ongoing database transactions
@@ -216,6 +218,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
try {
if (Store.isBetaTimelineEnabled) {
_ref.read(uploadTimerProvider.notifier).stop();
unawaited(_ref.read(backgroundWorkerLockServiceProvider).unlock());
}
await _performPause();
@@ -250,6 +253,7 @@ class AppLifeCycleNotifier extends StateNotifier<AppLifeCycleEnum> {
state = AppLifeCycleEnum.detached;
if (Store.isBetaTimelineEnabled) {
_ref.read(uploadTimerProvider.notifier).stop();
unawaited(_ref.read(backgroundWorkerLockServiceProvider).unlock());
}

View File

@@ -5,6 +5,7 @@ import 'package:background_downloader/background_downloader.dart';
import 'package:collection/collection.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/domain/models/album/local_album.model.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/extensions/string_extensions.dart';
@@ -32,18 +33,18 @@ class EnqueueStatus {
class DriftUploadStatus {
final String taskId;
final String filename;
final double progress;
final int fileSize;
final String networkSpeedAsString;
final double? progress;
final int? fileSize;
final String? networkSpeedAsString;
final bool? isFailed;
final String? error;
const DriftUploadStatus({
required this.taskId,
required this.filename,
required this.progress,
required this.fileSize,
required this.networkSpeedAsString,
this.progress,
this.fileSize,
this.networkSpeedAsString,
this.isFailed,
this.error,
});
@@ -231,7 +232,7 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
}
}
void _handleTaskStatusUpdate(TaskStatusUpdate update) {
void _handleTaskStatusUpdate(TaskStatusUpdate update) async {
final taskId = update.task.taskId;
switch (update.status) {
@@ -249,35 +250,36 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
});
}
await _uploadService.clearError(taskId);
case TaskStatus.failed:
_removeUploadItem(taskId);
// Ignore retry errors to avoid confusing users
if (update.exception?.description == 'Delayed or retried enqueue failed') {
_removeUploadItem(taskId);
return;
}
final currentItem = state.uploadItems[taskId];
if (currentItem == null) {
return;
}
String? error;
final exception = update.exception;
if (exception != null && exception is TaskHttpException) {
final message = tryJsonDecode(exception.description)?['message'] as String?;
if (message != null) {
final responseCode = exception.httpResponseCode;
error = "${exception.exceptionType}, response code $responseCode: $message";
}
}
error ??= update.exception?.toString();
if (exception != null) {
final errorType = switch (exception) {
TaskConnectionException() => UploadErrorType.network,
TaskFileSystemException() || TaskUrlException() => UploadErrorType.client,
TaskHttpException() => UploadErrorType.server,
_ => UploadErrorType.unknown,
};
state = state.copyWith(
uploadItems: {
...state.uploadItems,
taskId: currentItem.copyWith(isFailed: true, error: error),
},
);
if (exception is TaskHttpException) {
final message = tryJsonDecode(exception.description)?['message'] as String?;
if (message != null) {
final responseCode = exception.httpResponseCode;
error = "${exception.exceptionType}, response code $responseCode: $message";
}
}
error ??= exception.toString();
await _uploadService.updateError(taskId, errorType, error);
}
_logger.fine("Upload failed for taskId: $taskId, exception: ${update.exception}");
break;
@@ -350,9 +352,9 @@ class DriftBackupNotifier extends StateNotifier<DriftBackupState> {
state = state.copyWith(isSyncing: isSyncing);
}
Future<void> startBackup(String userId) {
Future<void> startBackup(String userId, {bool ignoreFailed = false}) {
state = state.copyWith(error: BackupError.none);
return _uploadService.startBackup(userId, _updateEnqueueCount);
return _uploadService.startBackup(userId, _updateEnqueueCount, ignoreFailed: ignoreFailed);
}
void _updateEnqueueCount(EnqueueStatus status) {

View File

@@ -0,0 +1,91 @@
import 'dart:async';
import 'package:background_downloader/background_downloader.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset_upload.repository.dart';
import 'package:immich_mobile/providers/app_settings.provider.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/providers/infrastructure/db.provider.dart';
import 'package:immich_mobile/providers/user.provider.dart';
import 'package:immich_mobile/services/app_settings.service.dart';
import 'package:logging/logging.dart';
import 'package:riverpod_annotation/riverpod_annotation.dart';
class UploadTimerNotifier extends Notifier<bool> {
Timer? _timer;
final _timerLogger = Logger('UploadTimer');
static const _refreshDuration = Duration(seconds: 45);
void start() {
if (state) {
return;
}
state = true;
_schedule();
}
void stop() {
if (!state) {
return;
}
_timer?.cancel();
_timer = null;
state = false;
}
void _schedule() {
_timer?.cancel();
_timer = Timer(_refreshDuration, () async {
if (!state) {
return;
}
await _backup();
if (state) {
_schedule();
}
});
}
Future<void> _backup() async {
final isBackupEnabled = ref.read(appSettingsServiceProvider).getSetting(AppSettingsEnum.enableBackup);
if (!isBackupEnabled) {
_timerLogger.fine("UploadTimer: Backup is disabled, skipping backup start.");
return;
}
final tasks = await FileDownloader().allTasks(group: kBackupGroup);
final currentUserId = ref.read(currentUserProvider)?.id;
if (tasks.isEmpty && currentUserId != null) {
await ref.read(driftBackupProvider.notifier).startBackup(currentUserId, ignoreFailed: true);
} else {
_timerLogger.fine("UploadTimer: There are still active upload tasks - ${tasks.length}, skipping backup start.");
}
}
@override
bool build() {
Future.microtask(start);
ref.onDispose(() {
_timer?.cancel();
});
// Timer is not running yet
return false;
}
}
final uploadTimerProvider = NotifierProvider<UploadTimerNotifier, bool>(UploadTimerNotifier.new);
final assetUploadRepositoryProvider = Provider<DriftLocalAssetUploadRepository>(
(ref) => DriftLocalAssetUploadRepository(ref.watch(driftProvider)),
);
final failedUploadStatusProvider = StreamProvider.autoDispose<Map<String, DriftUploadStatus>>((ref) {
return ref.watch(assetUploadRepositoryProvider).watchAll().map((uploads) {
return uploads.fold<Map<String, DriftUploadStatus>>({}, (acc, upload) {
acc[upload.taskId] = upload;
return acc;
});
});
});

View File

@@ -67,7 +67,7 @@ class ServerInfoNotifier extends StateNotifier<ServerInfo> {
return;
}
if (clientVersion < serverVersion) {
if (clientVersion < serverVersion && clientVersion.differenceType(serverVersion) != SemVerType.patch) {
state = state.copyWith(versionStatus: VersionStatus.clientOutOfDate);
return;
}

View File

@@ -89,9 +89,16 @@ class AssetMediaRepository {
return null;
}
// titleAsync gets the correct original filename for some assets on iOS
// otherwise using the `entity.title` would return a random GUID
return await entity.titleAsync;
try {
// titleAsync gets the correct original filename for some assets on iOS
// otherwise using the `entity.title` would return a random GUID
final originalFilename = await entity.titleAsync;
// treat empty filename as missing
return originalFilename.isNotEmpty ? originalFilename : null;
} catch (e) {
_log.warning("Failed to get original filename for asset: $id. Error: $e");
return null;
}
}
// TODO: make this more efficient

View File

@@ -7,17 +7,20 @@ import 'package:cancellation_token_http/http.dart';
import 'package:flutter/foundation.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/constants/constants.dart';
import 'package:immich_mobile/constants/enums.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/store.model.dart';
import 'package:immich_mobile/entities/store.entity.dart';
import 'package:immich_mobile/extensions/platform_extensions.dart';
import 'package:immich_mobile/infrastructure/repositories/backup.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset_upload.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/storage.repository.dart';
import 'package:immich_mobile/providers/app_settings.provider.dart';
import 'package:immich_mobile/providers/backup/drift_backup.provider.dart';
import 'package:immich_mobile/providers/infrastructure/asset.provider.dart';
import 'package:immich_mobile/providers/infrastructure/storage.provider.dart';
import 'package:immich_mobile/providers/infrastructure/upload.provider.dart';
import 'package:immich_mobile/repositories/asset_media.repository.dart';
import 'package:immich_mobile/repositories/upload.repository.dart';
import 'package:immich_mobile/services/api.service.dart';
@@ -34,6 +37,7 @@ final uploadServiceProvider = Provider((ref) {
ref.watch(localAssetRepository),
ref.watch(appSettingsServiceProvider),
ref.watch(assetMediaRepositoryProvider),
ref.watch(assetUploadRepositoryProvider),
);
ref.onDispose(service.dispose);
@@ -48,6 +52,7 @@ class UploadService {
this._localAssetRepository,
this._appSettingsService,
this._assetMediaRepository,
this._assetUploadRepository,
) {
_uploadRepository.onUploadStatus = _onUploadCallback;
_uploadRepository.onTaskProgress = _onTaskProgressCallback;
@@ -59,6 +64,7 @@ class UploadService {
final DriftLocalAssetRepository _localAssetRepository;
final AppSettingsService _appSettingsService;
final AssetMediaRepository _assetMediaRepository;
final DriftLocalAssetUploadRepository _assetUploadRepository;
final Logger _logger = Logger('UploadService');
final StreamController<TaskStatusUpdate> _taskStatusController = StreamController<TaskStatusUpdate>.broadcast();
@@ -87,6 +93,14 @@ class UploadService {
_taskProgressController.close();
}
Future<void> updateError(String assetId, UploadErrorType errorType, String error) {
return _assetUploadRepository.upsert(assetId, errorType, error);
}
Future<void> clearError(String assetId) {
return _assetUploadRepository.delete(assetId);
}
Future<List<bool>> enqueueTasks(List<UploadTask> tasks) {
return _uploadRepository.enqueueBackgroundAll(tasks);
}
@@ -99,6 +113,15 @@ class UploadService {
return _backupRepository.getAllCounts(userId);
}
Future<void> manualBackupId(String localId) async {
final localAsset = await _localAssetRepository.get(localId);
if (localAsset == null) {
_logger.warning('Local asset with id $localId not found for manual backup');
return;
}
await manualBackup([localAsset]);
}
Future<void> manualBackup(List<LocalAsset> localAssets) async {
await _storageRepository.clearCache();
List<UploadTask> tasks = [];
@@ -121,47 +144,41 @@ class UploadService {
/// Find backup candidates
/// Build the upload tasks
/// Enqueue the tasks
Future<void> startBackup(String userId, void Function(EnqueueStatus status) onEnqueueTasks) async {
Future<void> startBackup(
String userId,
void Function(EnqueueStatus status) onEnqueueTasks, {
bool ignoreFailed = false,
}) async {
await _storageRepository.clearCache();
await _assetUploadRepository.prune();
shouldAbortQueuingTasks = false;
final candidates = await _backupRepository.getCandidates(userId);
final candidates = await _backupRepository.getCandidates(
userId,
limit: 100,
ignoreFailed: ignoreFailed,
sortBy: SortCandidatesBy.attemptCount,
);
if (candidates.isEmpty) {
return;
}
const batchSize = 100;
int count = 0;
for (int i = 0; i < candidates.length; i += batchSize) {
if (shouldAbortQueuingTasks) {
break;
}
final tasks = (await Future.wait(candidates.map((asset) => getUploadTask(asset)))).nonNulls.toList();
if (tasks.isNotEmpty && !shouldAbortQueuingTasks) {
await enqueueTasks(tasks);
final batch = candidates.skip(i).take(batchSize).toList();
List<UploadTask> tasks = [];
for (final asset in batch) {
final task = await getUploadTask(asset);
if (task != null) {
tasks.add(task);
}
}
if (tasks.isNotEmpty && !shouldAbortQueuingTasks) {
count += tasks.length;
await enqueueTasks(tasks);
onEnqueueTasks(EnqueueStatus(enqueueCount: count, totalCount: candidates.length));
}
onEnqueueTasks(EnqueueStatus(enqueueCount: tasks.length, totalCount: candidates.length));
}
}
Future<void> startBackupWithHttpClient(String userId, bool hasWifi, CancellationToken token) async {
await _storageRepository.clearCache();
await _assetUploadRepository.prune();
shouldAbortQueuingTasks = false;
final candidates = await _backupRepository.getCandidates(userId);
final candidates = await _backupRepository.getCandidates(userId, sortBy: SortCandidatesBy.attemptCount);
if (candidates.isEmpty) {
return;
}
@@ -200,6 +217,7 @@ class UploadService {
shouldAbortQueuingTasks = true;
await _storageRepository.clearCache();
await _assetUploadRepository.prune();
await _uploadRepository.reset(kBackupGroup);
await _uploadRepository.deleteDatabaseRecords(kBackupGroup);

View File

@@ -1,3 +1,5 @@
enum SemVerType { major, minor, patch }
class SemVer {
final int major;
final int minor;
@@ -15,8 +17,20 @@ class SemVer {
}
factory SemVer.fromString(String version) {
if (version.toLowerCase().startsWith("v")) {
version = version.substring(1);
}
final parts = version.split("-")[0].split('.');
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
if (parts.length != 3) {
throw FormatException('Invalid semantic version string: $version');
}
try {
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
} catch (e) {
throw FormatException('Invalid semantic version string: $version');
}
}
bool operator >(SemVer other) {
@@ -54,6 +68,20 @@ class SemVer {
return other is SemVer && other.major == major && other.minor == minor && other.patch == patch;
}
SemVerType? differenceType(SemVer other) {
if (major != other.major) {
return SemVerType.major;
}
if (minor != other.minor) {
return SemVerType.minor;
}
if (patch != other.patch) {
return SemVerType.patch;
}
return null;
}
@override
int get hashCode => major.hashCode ^ minor.hashCode ^ patch.hashCode;
}

View File

@@ -3,7 +3,7 @@ Immich API
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
- API version: 2.2.2
- API version: 2.2.3
- Generator version: 7.8.0
- Build package: org.openapitools.codegen.languages.DartClientCodegen

View File

@@ -2,7 +2,7 @@ name: immich_mobile
description: Immich - selfhosted backup media file on mobile phone
publish_to: 'none'
version: 2.2.2+3025
version: 2.2.3+3026
environment:
sdk: '>=3.8.0 <4.0.0'

View File

@@ -8,11 +8,11 @@ bash tool/build_android.sh x64
bash tool/build_android.sh armv7
bash tool/build_android.sh arm64
mv libisar_android_arm64.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
mv libisar_android_armv7.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
mv libisar_android_x64.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/x86_64/
mv libisar_android_x86.so libisar.so
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/
)
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/x86/
)

View File

@@ -15,6 +15,7 @@ import 'schema_v9.dart' as v9;
import 'schema_v10.dart' as v10;
import 'schema_v11.dart' as v11;
import 'schema_v12.dart' as v12;
import 'schema_v13.dart' as v13;
class GeneratedHelper implements SchemaInstantiationHelper {
@override
@@ -44,10 +45,12 @@ class GeneratedHelper implements SchemaInstantiationHelper {
return v11.DatabaseAtV11(db);
case 12:
return v12.DatabaseAtV12(db);
case 13:
return v13.DatabaseAtV13(db);
default:
throw MissingSchemaException(version, versions);
}
}
static const versions = const [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12];
static const versions = const [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13];
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +1,19 @@
import 'package:immich_mobile/infrastructure/repositories/exif.repository.dart';
import 'package:immich_mobile/repositories/partner_api.repository.dart';
import 'package:immich_mobile/repositories/album_media.repository.dart';
import 'package:immich_mobile/infrastructure/repositories/local_asset_upload.repository.dart';
import 'package:immich_mobile/repositories/album.repository.dart';
import 'package:immich_mobile/repositories/album_api.repository.dart';
import 'package:immich_mobile/repositories/partner.repository.dart';
import 'package:immich_mobile/repositories/etag.repository.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
import 'package:immich_mobile/repositories/file_media.repository.dart';
import 'package:immich_mobile/repositories/backup.repository.dart';
import 'package:immich_mobile/repositories/album_media.repository.dart';
import 'package:immich_mobile/repositories/asset.repository.dart';
import 'package:immich_mobile/repositories/asset_api.repository.dart';
import 'package:immich_mobile/repositories/asset_media.repository.dart';
import 'package:immich_mobile/repositories/auth.repository.dart';
import 'package:immich_mobile/repositories/auth_api.repository.dart';
import 'package:immich_mobile/repositories/asset.repository.dart';
import 'package:immich_mobile/repositories/asset_media.repository.dart';
import 'package:immich_mobile/repositories/album.repository.dart';
import 'package:immich_mobile/repositories/asset_api.repository.dart';
import 'package:immich_mobile/repositories/backup.repository.dart';
import 'package:immich_mobile/repositories/etag.repository.dart';
import 'package:immich_mobile/repositories/file_media.repository.dart';
import 'package:immich_mobile/repositories/local_files_manager.repository.dart';
import 'package:immich_mobile/repositories/partner.repository.dart';
import 'package:immich_mobile/repositories/partner_api.repository.dart';
import 'package:mocktail/mocktail.dart';
class MockAlbumRepository extends Mock implements AlbumRepository {}
@@ -46,3 +47,5 @@ class MockPartnerRepository extends Mock implements PartnerRepository {}
class MockPartnerApiRepository extends Mock implements PartnerApiRepository {}
class MockLocalFilesManagerRepository extends Mock implements LocalFilesManagerRepository {}
class MockLocalAssetUploadRepository extends Mock implements DriftLocalAssetUploadRepository {}

View File

@@ -29,6 +29,7 @@ void main() {
late MockDriftLocalAssetRepository mockLocalAssetRepository;
late MockAppSettingsService mockAppSettingsService;
late MockAssetMediaRepository mockAssetMediaRepository;
late MockLocalAssetUploadRepository mockLocalAssetUploadRepository;
late Drift db;
setUpAll(() async {
@@ -53,6 +54,7 @@ void main() {
mockLocalAssetRepository = MockDriftLocalAssetRepository();
mockAppSettingsService = MockAppSettingsService();
mockAssetMediaRepository = MockAssetMediaRepository();
mockLocalAssetUploadRepository = MockLocalAssetUploadRepository();
when(() => mockAppSettingsService.getSetting(AppSettingsEnum.useCellularForUploadVideos)).thenReturn(false);
when(() => mockAppSettingsService.getSetting(AppSettingsEnum.useCellularForUploadPhotos)).thenReturn(false);
@@ -64,6 +66,7 @@ void main() {
mockLocalAssetRepository,
mockAppSettingsService,
mockAssetMediaRepository,
mockLocalAssetUploadRepository,
);
mockUploadRepository.onUploadStatus = (_) {};

View File

@@ -0,0 +1,92 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:immich_mobile/utils/semver.dart';
void main() {
group('SemVer', () {
test('Parses valid semantic version strings correctly', () {
final version = SemVer.fromString('1.2.3');
expect(version.major, 1);
expect(version.minor, 2);
expect(version.patch, 3);
});
test('Throws FormatException for invalid version strings', () {
expect(() => SemVer.fromString('1.2'), throwsFormatException);
expect(() => SemVer.fromString('a.b.c'), throwsFormatException);
expect(() => SemVer.fromString('1.2.3.4'), throwsFormatException);
});
test('Compares equal versons correctly', () {
final v1 = SemVer.fromString('1.2.3');
final v2 = SemVer.fromString('1.2.3');
expect(v1 == v2, isTrue);
expect(v1 > v2, isFalse);
expect(v1 < v2, isFalse);
});
test('Compares major version correctly', () {
final v1 = SemVer.fromString('2.0.0');
final v2 = SemVer.fromString('1.9.9');
expect(v1 == v2, isFalse);
expect(v1 > v2, isTrue);
expect(v1 < v2, isFalse);
});
test('Compares minor version correctly', () {
final v1 = SemVer.fromString('1.3.0');
final v2 = SemVer.fromString('1.2.9');
expect(v1 == v2, isFalse);
expect(v1 > v2, isTrue);
expect(v1 < v2, isFalse);
});
test('Compares patch version correctly', () {
final v1 = SemVer.fromString('1.2.4');
final v2 = SemVer.fromString('1.2.3');
expect(v1 == v2, isFalse);
expect(v1 > v2, isTrue);
expect(v1 < v2, isFalse);
});
test('Gives correct major difference type', () {
final v1 = SemVer.fromString('2.0.0');
final v2 = SemVer.fromString('1.9.9');
expect(v1.differenceType(v2), SemVerType.major);
});
test('Gives correct minor difference type', () {
final v1 = SemVer.fromString('1.3.0');
final v2 = SemVer.fromString('1.2.9');
expect(v1.differenceType(v2), SemVerType.minor);
});
test('Gives correct patch difference type', () {
final v1 = SemVer.fromString('1.2.4');
final v2 = SemVer.fromString('1.2.3');
expect(v1.differenceType(v2), SemVerType.patch);
});
test('Gives null difference type for equal versions', () {
final v1 = SemVer.fromString('1.2.3');
final v2 = SemVer.fromString('1.2.3');
expect(v1.differenceType(v2), isNull);
});
test('toString returns correct format', () {
final version = SemVer.fromString('1.2.3');
expect(version.toString(), '1.2.3');
});
test('Parses versions with leading v correctly', () {
final version1 = SemVer.fromString('v1.2.3');
expect(version1.major, 1);
expect(version1.minor, 2);
expect(version1.patch, 3);
final version2 = SemVer.fromString('V1.2.3');
expect(version2.major, 1);
expect(version2.minor, 2);
expect(version2.patch, 3);
});
});
}

View File

@@ -10006,7 +10006,7 @@
"info": {
"title": "Immich",
"description": "Immich API",
"version": "2.2.2",
"version": "2.2.3",
"contact": {}
},
"tags": [],

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/sdk",
"version": "2.2.2",
"version": "2.2.3",
"description": "Auto-generated TypeScript SDK for the Immich API",
"type": "module",
"main": "./build/index.js",
@@ -19,7 +19,7 @@
"@oazapfts/runtime": "^1.0.2"
},
"devDependencies": {
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"typescript": "^5.3.3"
},
"repository": {

View File

@@ -1,6 +1,6 @@
/**
* Immich
* 2.2.2
* 2.2.3
* DO NOT MODIFY - This file has been generated using oazapfts.
* See https://www.npmjs.com/package/oazapfts
*/

8
pnpm-lock.yaml generated
View File

@@ -63,7 +63,7 @@ importers:
specifier: ^4.13.1
version: 4.13.4
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
'@vitest/coverage-v8':
specifier: ^3.0.0
@@ -211,7 +211,7 @@ importers:
specifier: ^3.4.2
version: 3.7.1
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
'@types/oidc-provider':
specifier: ^9.0.0
@@ -293,7 +293,7 @@ importers:
version: 1.0.4
devDependencies:
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
typescript:
specifier: ^5.3.3
@@ -582,7 +582,7 @@ importers:
specifier: ^2.0.0
version: 2.0.0
'@types/node':
specifier: ^22.18.12
specifier: ^22.18.13
version: 22.18.13
'@types/nodemailer':
specifier: ^7.0.0

View File

@@ -94,7 +94,7 @@
| LivePhoto/MotionPhoto воспроизведение и бекап | Да | Да |
| Отображение 360° изображений | Нет | Да |
| Настраиваемая структура хранилища | Да | Да |
| Общий доступ к контенту | Нет | Да |
| Общий доступ к контенту | Да | Да |
| Архив и избранное | Да | Да |
| Мировая карта | Да | Да |
| Совместное использование | Да | Да |
@@ -104,7 +104,7 @@
| Галереи только для просмотра | Да | Да |
| Коллажи | Да | Да |
| Метки (теги) | Нет | Да |
| Просмотр папкой | Нет | Да |
| Просмотр папкой | Да | Да |
## Перевод

View File

@@ -1,6 +1,6 @@
{
"name": "immich",
"version": "2.2.2",
"version": "2.2.3",
"description": "",
"author": "",
"private": true,
@@ -129,7 +129,7 @@
"@types/luxon": "^3.6.2",
"@types/mock-fs": "^4.13.1",
"@types/multer": "^2.0.0",
"@types/node": "^22.18.12",
"@types/node": "^22.18.13",
"@types/nodemailer": "^7.0.0",
"@types/picomatch": "^4.0.0",
"@types/pngjs": "^6.0.5",

View File

@@ -356,7 +356,7 @@ export const columns = {
'asset.stackId',
'asset.libraryId',
],
syncAlbumUser: ['album_user.albumsId as albumId', 'album_user.usersId as userId', 'album_user.role'],
syncAlbumUser: ['album_user.albumId as albumId', 'album_user.userId as userId', 'album_user.role'],
syncStack: ['stack.id', 'stack.createdAt', 'stack.updatedAt', 'stack.primaryAssetId', 'stack.ownerId'],
syncUser: ['id', 'name', 'email', 'avatarColor', 'deletedAt', 'updateId', 'profileImagePath', 'profileChangedAt'],
stack: ['stack.id', 'stack.primaryAssetId', 'ownerId'],

View File

@@ -25,8 +25,8 @@ select
"album"."id"
from
"album"
left join "album_user" as "albumUsers" on "albumUsers"."albumsId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."usersId"
left join "album_user" as "albumUsers" on "albumUsers"."albumId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."userId"
and "user"."deletedAt" is null
where
"album"."id" in ($1)
@@ -52,8 +52,8 @@ select
"album"."id"
from
"album"
left join "album_user" on "album_user"."albumsId" = "album"."id"
left join "user" on "user"."id" = "album_user"."usersId"
left join "album_user" on "album_user"."albumId" = "album"."id"
left join "user" on "user"."id" = "album_user"."userId"
and "user"."deletedAt" is null
where
"album"."id" in ($1)
@@ -81,11 +81,11 @@ select
"asset"."livePhotoVideoId"
from
"album"
inner join "album_asset" as "albumAssets" on "album"."id" = "albumAssets"."albumsId"
inner join "asset" on "asset"."id" = "albumAssets"."assetsId"
inner join "album_asset" as "albumAssets" on "album"."id" = "albumAssets"."albumId"
inner join "asset" on "asset"."id" = "albumAssets"."assetId"
and "asset"."deletedAt" is null
left join "album_user" as "albumUsers" on "albumUsers"."albumsId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."usersId"
left join "album_user" as "albumUsers" on "albumUsers"."albumId" = "album"."id"
left join "user" on "user"."id" = "albumUsers"."userId"
and "user"."deletedAt" is null
cross join "target"
where
@@ -136,11 +136,11 @@ from
"shared_link"
left join "album" on "album"."id" = "shared_link"."albumId"
and "album"."deletedAt" is null
left join "shared_link_asset" on "shared_link_asset"."sharedLinksId" = "shared_link"."id"
left join "asset" on "asset"."id" = "shared_link_asset"."assetsId"
left join "shared_link_asset" on "shared_link_asset"."sharedLinkId" = "shared_link"."id"
left join "asset" on "asset"."id" = "shared_link_asset"."assetId"
and "asset"."deletedAt" is null
left join "album_asset" on "album_asset"."albumsId" = "album"."id"
left join "asset" as "albumAssets" on "albumAssets"."id" = "album_asset"."assetsId"
left join "album_asset" on "album_asset"."albumId" = "album"."id"
left join "asset" as "albumAssets" on "albumAssets"."id" = "album_asset"."assetId"
and "albumAssets"."deletedAt" is null
where
"shared_link"."id" = $1

View File

@@ -43,13 +43,13 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers",
(
@@ -76,9 +76,9 @@ select
from
"asset"
left join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
where
"album_asset"."albumsId" = "album"."id"
"album_asset"."albumId" = "album"."id"
and "asset"."deletedAt" is null
and "asset"."visibility" in ('archive', 'timeline')
order by
@@ -134,18 +134,18 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers"
from
"album"
inner join "album_asset" on "album_asset"."albumsId" = "album"."id"
inner join "album_asset" on "album_asset"."albumId" = "album"."id"
where
(
"album"."ownerId" = $1
@@ -154,11 +154,11 @@ where
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
and "album_user"."usersId" = $2
"album_user"."albumId" = "album"."id"
and "album_user"."userId" = $2
)
)
and "album_asset"."assetsId" = $3
and "album_asset"."assetId" = $3
and "album"."deletedAt" is null
order by
"album"."createdAt" desc,
@@ -166,7 +166,7 @@ order by
-- AlbumRepository.getMetadataForIds
select
"album_asset"."albumsId" as "albumId",
"album_asset"."albumId" as "albumId",
min(
("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date
) as "startDate",
@@ -177,13 +177,13 @@ select
count("asset"."id")::int as "assetCount"
from
"asset"
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
where
"asset"."visibility" in ('archive', 'timeline')
and "album_asset"."albumsId" in ($1)
and "album_asset"."albumId" in ($1)
and "asset"."deletedAt" is null
group by
"album_asset"."albumsId"
"album_asset"."albumId"
-- AlbumRepository.getOwned
select
@@ -228,13 +228,13 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers",
(
@@ -283,13 +283,13 @@ select
from
"user"
where
"user"."id" = "album_user"."usersId"
"user"."id" = "album_user"."userId"
) as obj
) as "user"
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
) as agg
) as "albumUsers",
(
@@ -332,10 +332,10 @@ where
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
and (
"album"."ownerId" = $1
or "album_user"."usersId" = $2
or "album_user"."userId" = $2
)
)
or exists (
@@ -382,7 +382,7 @@ where
from
"album_user"
where
"album_user"."albumsId" = "album"."id"
"album_user"."albumId" = "album"."id"
)
and not exists (
select
@@ -397,7 +397,7 @@ order by
-- AlbumRepository.removeAssetsFromAll
delete from "album_asset"
where
"album_asset"."assetsId" in ($1)
"album_asset"."assetId" in ($1)
-- AlbumRepository.getAssetIds
select
@@ -405,8 +405,8 @@ select
from
"album_asset"
where
"album_asset"."albumsId" = $1
and "album_asset"."assetsId" in ($2)
"album_asset"."albumId" = $1
and "album_asset"."assetId" in ($2)
-- AlbumRepository.getContributorCounts
select
@@ -414,10 +414,10 @@ select
count(*) as "assetCount"
from
"album_asset"
inner join "asset" on "asset"."id" = "assetsId"
inner join "asset" on "asset"."id" = "assetId"
where
"asset"."deletedAt" is null
and "album_asset"."albumsId" = $1
and "album_asset"."albumId" = $1
group by
"asset"."ownerId"
order by
@@ -427,10 +427,10 @@ order by
insert into
"album_asset"
select
"album_asset"."albumsId",
$1 as "assetsId"
"album_asset"."albumId",
$1 as "assetId"
from
"album_asset"
where
"album_asset"."assetsId" = $2
"album_asset"."assetId" = $2
on conflict do nothing

View File

@@ -2,12 +2,12 @@
-- AlbumUserRepository.create
insert into
"album_user" ("usersId", "albumsId")
"album_user" ("userId", "albumId")
values
($1, $2)
returning
"usersId",
"albumsId",
"userId",
"albumId",
"role"
-- AlbumUserRepository.update
@@ -15,13 +15,13 @@ update "album_user"
set
"role" = $1
where
"usersId" = $2
and "albumsId" = $3
"userId" = $2
and "albumId" = $3
returning
*
-- AlbumUserRepository.delete
delete from "album_user"
where
"usersId" = $1
and "albumsId" = $2
"userId" = $1
and "albumId" = $2

View File

@@ -31,9 +31,9 @@ select
"tag"."value"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"asset"."id" = "tag_asset"."assetsId"
"asset"."id" = "tag_asset"."assetId"
) as agg
) as "tags"
from

View File

@@ -160,9 +160,9 @@ select
"tag"."parentId"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"asset"."id" = "tag_asset"."assetsId"
"asset"."id" = "tag_asset"."assetId"
) as agg
) as "tags",
to_json("asset_exif") as "exifInfo"

View File

@@ -23,8 +23,8 @@ where
from
"album_asset"
where
"asset"."id" = "album_asset"."assetsId"
and "album_asset"."albumsId" in ($3)
"asset"."id" = "album_asset"."assetId"
and "album_asset"."albumId" in ($3)
)
)
order by

View File

@@ -37,7 +37,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -66,7 +66,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -104,7 +104,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -137,7 +137,7 @@ select
"asset".*
from
"asset"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
where
"memory_asset"."memoriesId" = "memory"."id"
and "asset"."visibility" = 'timeline'
@@ -159,15 +159,15 @@ where
-- MemoryRepository.getAssetIds
select
"assetsId"
"assetId"
from
"memory_asset"
where
"memoriesId" = $1
and "assetsId" in ($2)
and "assetId" in ($2)
-- MemoryRepository.addAssetIds
insert into
"memory_asset" ("memoriesId", "assetsId")
"memory_asset" ("memoriesId", "assetId")
values
($1, $2)

View File

@@ -4,10 +4,10 @@
insert into
"shared_link_asset"
select
$1 as "assetsId",
"shared_link_asset"."sharedLinksId"
$1 as "assetId",
"shared_link_asset"."sharedLinkId"
from
"shared_link_asset"
where
"shared_link_asset"."assetsId" = $2
"shared_link_asset"."assetId" = $2
on conflict do nothing

View File

@@ -19,7 +19,7 @@ from
to_json("exifInfo") as "exifInfo"
from
"shared_link_asset"
inner join "asset" on "asset"."id" = "shared_link_asset"."assetsId"
inner join "asset" on "asset"."id" = "shared_link_asset"."assetId"
inner join lateral (
select
"asset_exif".*
@@ -29,7 +29,7 @@ from
"asset_exif"."assetId" = "asset"."id"
) as "exifInfo" on true
where
"shared_link"."id" = "shared_link_asset"."sharedLinksId"
"shared_link"."id" = "shared_link_asset"."sharedLinkId"
and "asset"."deletedAt" is null
order by
"asset"."fileCreatedAt" asc
@@ -51,7 +51,7 @@ from
to_json("owner") as "owner"
from
"album"
left join "album_asset" on "album_asset"."albumsId" = "album"."id"
left join "album_asset" on "album_asset"."albumId" = "album"."id"
left join lateral (
select
"asset".*,
@@ -67,7 +67,7 @@ from
"asset_exif"."assetId" = "asset"."id"
) as "exifInfo" on true
where
"album_asset"."assetsId" = "asset"."id"
"album_asset"."assetId" = "asset"."id"
and "asset"."deletedAt" is null
order by
"asset"."fileCreatedAt" asc
@@ -108,14 +108,14 @@ select distinct
to_json("album") as "album"
from
"shared_link"
left join "shared_link_asset" on "shared_link_asset"."sharedLinksId" = "shared_link"."id"
left join "shared_link_asset" on "shared_link_asset"."sharedLinkId" = "shared_link"."id"
left join lateral (
select
json_agg("asset") as "assets"
from
"asset"
where
"asset"."id" = "shared_link_asset"."assetsId"
"asset"."id" = "shared_link_asset"."assetId"
and "asset"."deletedAt" is null
) as "assets" on true
left join lateral (

View File

@@ -89,9 +89,9 @@ select
"tag"."parentId"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"tag_asset"."assetsId" = "asset"."id"
"tag_asset"."assetId" = "asset"."id"
) as agg
) as "tags",
to_json("exifInfo") as "exifInfo"

View File

@@ -2,12 +2,12 @@
-- SyncRepository.album.getCreatedAfter
select
"albumsId" as "id",
"albumId" as "id",
"createId"
from
"album_user"
where
"usersId" = $1
"userId" = $1
and "createId" >= $2
and "createId" < $3
order by
@@ -40,13 +40,13 @@ select distinct
"album"."updateId"
from
"album" as "album"
left join "album_user" as "album_users" on "album"."id" = "album_users"."albumsId"
left join "album_user" as "album_users" on "album"."id" = "album_users"."albumId"
where
"album"."updateId" < $1
and "album"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_users"."usersId" = $4
or "album_users"."userId" = $4
)
order by
"album"."updateId" asc
@@ -72,12 +72,12 @@ select
"album_asset"."updateId"
from
"album_asset" as "album_asset"
inner join "asset" on "asset"."id" = "album_asset"."assetsId"
inner join "asset" on "asset"."id" = "album_asset"."assetId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" <= $2
and "album_asset"."updateId" >= $3
and "album_asset"."albumsId" = $4
and "album_asset"."albumId" = $4
order by
"album_asset"."updateId" asc
@@ -102,16 +102,16 @@ select
"asset"."updateId"
from
"asset" as "asset"
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"asset"."updateId" < $1
and "asset"."updateId" > $2
and "album_asset"."updateId" <= $3
and (
"album"."ownerId" = $4
or "album_user"."usersId" = $5
or "album_user"."userId" = $5
)
order by
"asset"."updateId" asc
@@ -137,15 +137,15 @@ select
"asset"."libraryId"
from
"album_asset" as "album_asset"
inner join "asset" on "asset"."id" = "album_asset"."assetsId"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "asset" on "asset"."id" = "album_asset"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_user"."usersId" = $4
or "album_user"."userId" = $4
)
order by
"album_asset"."updateId" asc
@@ -180,12 +180,12 @@ select
"album_asset"."updateId"
from
"album_asset" as "album_asset"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetsId"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" <= $2
and "album_asset"."updateId" >= $3
and "album_asset"."albumsId" = $4
and "album_asset"."albumId" = $4
order by
"album_asset"."updateId" asc
@@ -219,16 +219,16 @@ select
"asset_exif"."updateId"
from
"asset_exif" as "asset_exif"
inner join "album_asset" on "album_asset"."assetsId" = "asset_exif"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "album_asset" on "album_asset"."assetId" = "asset_exif"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"asset_exif"."updateId" < $1
and "asset_exif"."updateId" > $2
and "album_asset"."updateId" <= $3
and (
"album"."ownerId" = $4
or "album_user"."usersId" = $5
or "album_user"."userId" = $5
)
order by
"asset_exif"."updateId" asc
@@ -263,23 +263,23 @@ select
"asset_exif"."fps"
from
"album_asset" as "album_asset"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetsId"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_user"."usersId" = $4
or "album_user"."userId" = $4
)
order by
"album_asset"."updateId" asc
-- SyncRepository.albumToAsset.getBackfill
select
"album_asset"."assetsId" as "assetId",
"album_asset"."albumsId" as "albumId",
"album_asset"."assetId" as "assetId",
"album_asset"."albumId" as "albumId",
"album_asset"."updateId"
from
"album_asset" as "album_asset"
@@ -287,7 +287,7 @@ where
"album_asset"."updateId" < $1
and "album_asset"."updateId" <= $2
and "album_asset"."updateId" >= $3
and "album_asset"."albumsId" = $4
and "album_asset"."albumId" = $4
order by
"album_asset"."updateId" asc
@@ -311,11 +311,11 @@ where
union
(
select
"album_user"."albumsId" as "id"
"album_user"."albumId" as "id"
from
"album_user"
where
"album_user"."usersId" = $4
"album_user"."userId" = $4
)
)
order by
@@ -323,27 +323,27 @@ order by
-- SyncRepository.albumToAsset.getUpserts
select
"album_asset"."assetsId" as "assetId",
"album_asset"."albumsId" as "albumId",
"album_asset"."assetId" as "assetId",
"album_asset"."albumId" as "albumId",
"album_asset"."updateId"
from
"album_asset" as "album_asset"
inner join "album" on "album"."id" = "album_asset"."albumsId"
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
inner join "album" on "album"."id" = "album_asset"."albumId"
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
where
"album_asset"."updateId" < $1
and "album_asset"."updateId" > $2
and (
"album"."ownerId" = $3
or "album_user"."usersId" = $4
or "album_user"."userId" = $4
)
order by
"album_asset"."updateId" asc
-- SyncRepository.albumUser.getBackfill
select
"album_user"."albumsId" as "albumId",
"album_user"."usersId" as "userId",
"album_user"."albumId" as "albumId",
"album_user"."userId" as "userId",
"album_user"."role",
"album_user"."updateId"
from
@@ -352,7 +352,7 @@ where
"album_user"."updateId" < $1
and "album_user"."updateId" <= $2
and "album_user"."updateId" >= $3
and "albumsId" = $4
and "albumId" = $4
order by
"album_user"."updateId" asc
@@ -376,11 +376,11 @@ where
union
(
select
"album_user"."albumsId" as "id"
"album_user"."albumId" as "id"
from
"album_user"
where
"album_user"."usersId" = $4
"album_user"."userId" = $4
)
)
order by
@@ -388,8 +388,8 @@ order by
-- SyncRepository.albumUser.getUpserts
select
"album_user"."albumsId" as "albumId",
"album_user"."usersId" as "userId",
"album_user"."albumId" as "albumId",
"album_user"."userId" as "userId",
"album_user"."role",
"album_user"."updateId"
from
@@ -397,7 +397,7 @@ from
where
"album_user"."updateId" < $1
and "album_user"."updateId" > $2
and "album_user"."albumsId" in (
and "album_user"."albumId" in (
select
"id"
from
@@ -407,11 +407,11 @@ where
union
(
select
"albumUsers"."albumsId" as "id"
"albumUsers"."albumId" as "id"
from
"album_user" as "albumUsers"
where
"albumUsers"."usersId" = $4
"albumUsers"."userId" = $4
)
)
order by
@@ -656,7 +656,7 @@ order by
-- SyncRepository.memoryToAsset.getUpserts
select
"memoriesId" as "memoryId",
"assetsId" as "assetId",
"assetId" as "assetId",
"updateId"
from
"memory_asset" as "memory_asset"

View File

@@ -84,19 +84,19 @@ where
-- TagRepository.addAssetIds
insert into
"tag_asset" ("tagsId", "assetsId")
"tag_asset" ("tagId", "assetId")
values
($1, $2)
-- TagRepository.removeAssetIds
delete from "tag_asset"
where
"tagsId" = $1
and "assetsId" in ($2)
"tagId" = $1
and "assetId" in ($2)
-- TagRepository.upsertAssetIds
insert into
"tag_asset" ("assetId", "tagsIds")
"tag_asset" ("assetId", "tagIds")
values
($1, $2)
on conflict do nothing
@@ -107,9 +107,9 @@ returning
begin
delete from "tag_asset"
where
"assetsId" = $1
"assetId" = $1
insert into
"tag_asset" ("tagsId", "assetsId")
"tag_asset" ("tagId", "assetId")
values
($1, $2)
on conflict do nothing

View File

@@ -52,8 +52,8 @@ class ActivityAccess {
return this.db
.selectFrom('album')
.select('album.id')
.leftJoin('album_user as albumUsers', 'albumUsers.albumsId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.usersId').on('user.deletedAt', 'is', null))
.leftJoin('album_user as albumUsers', 'albumUsers.albumId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.userId').on('user.deletedAt', 'is', null))
.where('album.id', 'in', [...albumIds])
.where('album.isActivityEnabled', '=', true)
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('user.id', '=', userId)]))
@@ -96,8 +96,8 @@ class AlbumAccess {
return this.db
.selectFrom('album')
.select('album.id')
.leftJoin('album_user', 'album_user.albumsId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'album_user.usersId').on('user.deletedAt', 'is', null))
.leftJoin('album_user', 'album_user.albumId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'album_user.userId').on('user.deletedAt', 'is', null))
.where('album.id', 'in', [...albumIds])
.where('album.deletedAt', 'is', null)
.where('user.id', '=', userId)
@@ -138,12 +138,12 @@ class AssetAccess {
return this.db
.with('target', (qb) => qb.selectNoFrom(sql`array[${sql.join([...assetIds])}]::uuid[]`.as('ids')))
.selectFrom('album')
.innerJoin('album_asset as albumAssets', 'album.id', 'albumAssets.albumsId')
.innerJoin('album_asset as albumAssets', 'album.id', 'albumAssets.albumId')
.innerJoin('asset', (join) =>
join.onRef('asset.id', '=', 'albumAssets.assetsId').on('asset.deletedAt', 'is', null),
join.onRef('asset.id', '=', 'albumAssets.assetId').on('asset.deletedAt', 'is', null),
)
.leftJoin('album_user as albumUsers', 'albumUsers.albumsId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.usersId').on('user.deletedAt', 'is', null))
.leftJoin('album_user as albumUsers', 'albumUsers.albumId', 'album.id')
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.userId').on('user.deletedAt', 'is', null))
.crossJoin('target')
.select(['asset.id', 'asset.livePhotoVideoId'])
.where((eb) =>
@@ -223,13 +223,13 @@ class AssetAccess {
return this.db
.selectFrom('shared_link')
.leftJoin('album', (join) => join.onRef('album.id', '=', 'shared_link.albumId').on('album.deletedAt', 'is', null))
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
.leftJoin('asset', (join) =>
join.onRef('asset.id', '=', 'shared_link_asset.assetsId').on('asset.deletedAt', 'is', null),
join.onRef('asset.id', '=', 'shared_link_asset.assetId').on('asset.deletedAt', 'is', null),
)
.leftJoin('album_asset', 'album_asset.albumsId', 'album.id')
.leftJoin('album_asset', 'album_asset.albumId', 'album.id')
.leftJoin('asset as albumAssets', (join) =>
join.onRef('albumAssets.id', '=', 'album_asset.assetsId').on('albumAssets.deletedAt', 'is', null),
join.onRef('albumAssets.id', '=', 'album_asset.assetId').on('albumAssets.deletedAt', 'is', null),
)
.select([
'asset.id as assetId',

View File

@@ -7,36 +7,36 @@ import { DB } from 'src/schema';
import { AlbumUserTable } from 'src/schema/tables/album-user.table';
export type AlbumPermissionId = {
albumsId: string;
usersId: string;
albumId: string;
userId: string;
};
@Injectable()
export class AlbumUserRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }] })
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
create(albumUser: Insertable<AlbumUserTable>) {
return this.db
.insertInto('album_user')
.values(albumUser)
.returning(['usersId', 'albumsId', 'role'])
.returning(['userId', 'albumId', 'role'])
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] })
update({ usersId, albumsId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] })
update({ userId, albumId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
return this.db
.updateTable('album_user')
.set(dto)
.where('usersId', '=', usersId)
.where('albumsId', '=', albumsId)
.where('userId', '=', userId)
.where('albumId', '=', albumId)
.returningAll()
.executeTakeFirstOrThrow();
}
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }] })
async delete({ usersId, albumsId }: AlbumPermissionId): Promise<void> {
await this.db.deleteFrom('album_user').where('usersId', '=', usersId).where('albumsId', '=', albumsId).execute();
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
async delete({ userId, albumId }: AlbumPermissionId): Promise<void> {
await this.db.deleteFrom('album_user').where('userId', '=', userId).where('albumId', '=', albumId).execute();
}
}

View File

@@ -33,11 +33,11 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'album'>) => {
.selectFrom('album_user')
.select('album_user.role')
.select((eb) =>
jsonObjectFrom(eb.selectFrom('user').select(columns.user).whereRef('user.id', '=', 'album_user.usersId'))
jsonObjectFrom(eb.selectFrom('user').select(columns.user).whereRef('user.id', '=', 'album_user.userId'))
.$notNull()
.as('user'),
)
.whereRef('album_user.albumsId', '=', 'album.id'),
.whereRef('album_user.albumId', '=', 'album.id'),
)
.$notNull()
.as('albumUsers');
@@ -57,8 +57,8 @@ const withAssets = (eb: ExpressionBuilder<DB, 'album'>) => {
.selectAll('asset')
.leftJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.select((eb) => eb.table('asset_exif').$castTo<Exif>().as('exifInfo'))
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
.whereRef('album_asset.albumsId', '=', 'album.id')
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
.whereRef('album_asset.albumId', '=', 'album.id')
.where('asset.deletedAt', 'is', null)
.$call(withDefaultVisibility)
.orderBy('asset.fileCreatedAt', 'desc')
@@ -92,19 +92,19 @@ export class AlbumRepository {
return this.db
.selectFrom('album')
.selectAll('album')
.innerJoin('album_asset', 'album_asset.albumsId', 'album.id')
.innerJoin('album_asset', 'album_asset.albumId', 'album.id')
.where((eb) =>
eb.or([
eb('album.ownerId', '=', ownerId),
eb.exists(
eb
.selectFrom('album_user')
.whereRef('album_user.albumsId', '=', 'album.id')
.where('album_user.usersId', '=', ownerId),
.whereRef('album_user.albumId', '=', 'album.id')
.where('album_user.userId', '=', ownerId),
),
]),
)
.where('album_asset.assetsId', '=', assetId)
.where('album_asset.assetId', '=', assetId)
.where('album.deletedAt', 'is', null)
.orderBy('album.createdAt', 'desc')
.select(withOwner)
@@ -125,16 +125,16 @@ export class AlbumRepository {
this.db
.selectFrom('asset')
.$call(withDefaultVisibility)
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
.select('album_asset.albumsId as albumId')
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
.select('album_asset.albumId as albumId')
.select((eb) => eb.fn.min(sql<Date>`("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date`).as('startDate'))
.select((eb) => eb.fn.max(sql<Date>`("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date`).as('endDate'))
// lastModifiedAssetTimestamp is only used in mobile app, please remove if not need
.select((eb) => eb.fn.max('asset.updatedAt').as('lastModifiedAssetTimestamp'))
.select((eb) => sql<number>`${eb.fn.count('asset.id')}::int`.as('assetCount'))
.where('album_asset.albumsId', 'in', ids)
.where('album_asset.albumId', 'in', ids)
.where('asset.deletedAt', 'is', null)
.groupBy('album_asset.albumsId')
.groupBy('album_asset.albumId')
.execute()
);
}
@@ -166,8 +166,8 @@ export class AlbumRepository {
eb.exists(
eb
.selectFrom('album_user')
.whereRef('album_user.albumsId', '=', 'album.id')
.where((eb) => eb.or([eb('album.ownerId', '=', ownerId), eb('album_user.usersId', '=', ownerId)])),
.whereRef('album_user.albumId', '=', 'album.id')
.where((eb) => eb.or([eb('album.ownerId', '=', ownerId), eb('album_user.userId', '=', ownerId)])),
),
eb.exists(
eb
@@ -195,7 +195,7 @@ export class AlbumRepository {
.selectAll('album')
.where('album.ownerId', '=', ownerId)
.where('album.deletedAt', 'is', null)
.where((eb) => eb.not(eb.exists(eb.selectFrom('album_user').whereRef('album_user.albumsId', '=', 'album.id'))))
.where((eb) => eb.not(eb.exists(eb.selectFrom('album_user').whereRef('album_user.albumId', '=', 'album.id'))))
.where((eb) => eb.not(eb.exists(eb.selectFrom('shared_link').whereRef('shared_link.albumId', '=', 'album.id'))))
.select(withOwner)
.orderBy('album.createdAt', 'desc')
@@ -217,7 +217,7 @@ export class AlbumRepository {
@GenerateSql({ params: [[DummyValue.UUID]] })
@Chunked()
async removeAssetsFromAll(assetIds: string[]): Promise<void> {
await this.db.deleteFrom('album_asset').where('album_asset.assetsId', 'in', assetIds).execute();
await this.db.deleteFrom('album_asset').where('album_asset.assetId', 'in', assetIds).execute();
}
@Chunked({ paramIndex: 1 })
@@ -228,8 +228,8 @@ export class AlbumRepository {
await this.db
.deleteFrom('album_asset')
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.assetsId', 'in', assetIds)
.where('album_asset.albumId', '=', albumId)
.where('album_asset.assetId', 'in', assetIds)
.execute();
}
@@ -250,10 +250,10 @@ export class AlbumRepository {
return this.db
.selectFrom('album_asset')
.selectAll()
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.assetsId', 'in', assetIds)
.where('album_asset.albumId', '=', albumId)
.where('album_asset.assetId', 'in', assetIds)
.execute()
.then((results) => new Set(results.map(({ assetsId }) => assetsId)));
.then((results) => new Set(results.map(({ assetId }) => assetId)));
}
async addAssetIds(albumId: string, assetIds: string[]): Promise<void> {
@@ -276,7 +276,7 @@ export class AlbumRepository {
await tx
.insertInto('album_user')
.values(
albumUsers.map((albumUser) => ({ albumsId: newAlbum.id, usersId: albumUser.userId, role: albumUser.role })),
albumUsers.map((albumUser) => ({ albumId: newAlbum.id, userId: albumUser.userId, role: albumUser.role })),
)
.execute();
}
@@ -317,12 +317,12 @@ export class AlbumRepository {
await db
.insertInto('album_asset')
.values(assetIds.map((assetId) => ({ albumsId: albumId, assetsId: assetId })))
.values(assetIds.map((assetId) => ({ albumId, assetId })))
.execute();
}
@Chunked({ chunkSize: 30_000 })
async addAssetIdsToAlbums(values: { albumsId: string; assetsId: string }[]): Promise<void> {
async addAssetIdsToAlbums(values: { albumId: string; assetId: string }[]): Promise<void> {
if (values.length === 0) {
return;
}
@@ -344,7 +344,7 @@ export class AlbumRepository {
.updateTable('album')
.set((eb) => ({
albumThumbnailAssetId: this.updateThumbnailBuilder(eb)
.select('album_asset.assetsId')
.select('album_asset.assetId')
.orderBy('asset.fileCreatedAt', 'desc')
.limit(1),
}))
@@ -360,7 +360,7 @@ export class AlbumRepository {
eb.exists(
this.updateThumbnailBuilder(eb)
.select(sql`1`.as('1'))
.whereRef('album.albumThumbnailAssetId', '=', 'album_asset.assetsId'), // Has invalid assets
.whereRef('album.albumThumbnailAssetId', '=', 'album_asset.assetId'), // Has invalid assets
),
),
]),
@@ -375,9 +375,9 @@ export class AlbumRepository {
return eb
.selectFrom('album_asset')
.innerJoin('asset', (join) =>
join.onRef('album_asset.assetsId', '=', 'asset.id').on('asset.deletedAt', 'is', null),
join.onRef('album_asset.assetId', '=', 'asset.id').on('asset.deletedAt', 'is', null),
)
.whereRef('album_asset.albumsId', '=', 'album.id');
.whereRef('album_asset.albumId', '=', 'album.id');
}
/**
@@ -388,9 +388,9 @@ export class AlbumRepository {
getContributorCounts(id: string) {
return this.db
.selectFrom('album_asset')
.innerJoin('asset', 'asset.id', 'assetsId')
.innerJoin('asset', 'asset.id', 'assetId')
.where('asset.deletedAt', 'is', sql.lit(null))
.where('album_asset.albumsId', '=', id)
.where('album_asset.albumId', '=', id)
.select('asset.ownerId as userId')
.select((eb) => eb.fn.countAll<number>().as('assetCount'))
.groupBy('asset.ownerId')
@@ -405,8 +405,8 @@ export class AlbumRepository {
.expression((eb) =>
eb
.selectFrom('album_asset')
.select((eb) => ['album_asset.albumsId', eb.val(targetAssetId).as('assetsId')])
.where('album_asset.assetsId', '=', sourceAssetId),
.select((eb) => ['album_asset.albumId', eb.val(targetAssetId).as('assetId')])
.where('album_asset.assetId', '=', sourceAssetId),
)
.onConflict((oc) => oc.doNothing())
.execute();

View File

@@ -46,8 +46,8 @@ export class AssetJobRepository {
eb
.selectFrom('tag')
.select(['tag.value'])
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('asset.id', '=', 'tag_asset.assetsId'),
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('asset.id', '=', 'tag_asset.assetId'),
).as('tags'),
)
.limit(1)

View File

@@ -563,8 +563,8 @@ export class AssetRepository {
.$if(!!options.visibility, (qb) => qb.where('asset.visibility', '=', options.visibility!))
.$if(!!options.albumId, (qb) =>
qb
.innerJoin('album_asset', 'asset.id', 'album_asset.assetsId')
.where('album_asset.albumsId', '=', asUuid(options.albumId!)),
.innerJoin('album_asset', 'asset.id', 'album_asset.assetId')
.where('album_asset.albumId', '=', asUuid(options.albumId!)),
)
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
.$if(!!options.withStacked, (qb) =>
@@ -641,8 +641,8 @@ export class AssetRepository {
eb.exists(
eb
.selectFrom('album_asset')
.whereRef('album_asset.assetsId', '=', 'asset.id')
.where('album_asset.albumsId', '=', asUuid(options.albumId!)),
.whereRef('album_asset.assetId', '=', 'asset.id')
.where('album_asset.albumId', '=', asUuid(options.albumId!)),
),
),
)

View File

@@ -26,8 +26,8 @@ export class DownloadRepository {
downloadAlbumId(albumId: string) {
return builder(this.db)
.innerJoin('album_asset', 'asset.id', 'album_asset.assetsId')
.where('album_asset.albumsId', '=', albumId)
.innerJoin('album_asset', 'asset.id', 'album_asset.assetId')
.where('album_asset.albumId', '=', albumId)
.stream();
}

View File

@@ -126,8 +126,8 @@ export class MapRepository {
eb.exists((eb) =>
eb
.selectFrom('album_asset')
.whereRef('asset.id', '=', 'album_asset.assetsId')
.where('album_asset.albumsId', 'in', albumIds),
.whereRef('asset.id', '=', 'album_asset.assetId')
.where('album_asset.albumId', 'in', albumIds),
),
);
}

View File

@@ -18,7 +18,7 @@ export class MemoryRepository implements IBulkAsset {
await this.db
.deleteFrom('memory_asset')
.using('asset')
.whereRef('memory_asset.assetsId', '=', 'asset.id')
.whereRef('memory_asset.assetId', '=', 'asset.id')
.where('asset.visibility', '!=', AssetVisibility.Timeline)
.execute();
@@ -64,7 +64,7 @@ export class MemoryRepository implements IBulkAsset {
eb
.selectFrom('asset')
.selectAll('asset')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetId')
.whereRef('memory_asset.memoriesId', '=', 'memory.id')
.orderBy('asset.fileCreatedAt', 'asc')
.where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline))
@@ -86,7 +86,7 @@ export class MemoryRepository implements IBulkAsset {
const { id } = await tx.insertInto('memory').values(memory).returning('id').executeTakeFirstOrThrow();
if (assetIds.size > 0) {
const values = [...assetIds].map((assetId) => ({ memoriesId: id, assetsId: assetId }));
const values = [...assetIds].map((assetId) => ({ memoriesId: id, assetId }));
await tx.insertInto('memory_asset').values(values).execute();
}
@@ -116,12 +116,12 @@ export class MemoryRepository implements IBulkAsset {
const results = await this.db
.selectFrom('memory_asset')
.select(['assetsId'])
.select(['assetId'])
.where('memoriesId', '=', id)
.where('assetsId', 'in', assetIds)
.where('assetId', 'in', assetIds)
.execute();
return new Set(results.map(({ assetsId }) => assetsId));
return new Set(results.map(({ assetId }) => assetId));
}
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.UUID]] })
@@ -132,7 +132,7 @@ export class MemoryRepository implements IBulkAsset {
await this.db
.insertInto('memory_asset')
.values(assetIds.map((assetId) => ({ memoriesId: id, assetsId: assetId })))
.values(assetIds.map((assetId) => ({ memoriesId: id, assetId })))
.execute();
}
@@ -143,7 +143,7 @@ export class MemoryRepository implements IBulkAsset {
return;
}
await this.db.deleteFrom('memory_asset').where('memoriesId', '=', id).where('assetsId', 'in', assetIds).execute();
await this.db.deleteFrom('memory_asset').where('memoriesId', '=', id).where('assetId', 'in', assetIds).execute();
}
private getByIdBuilder(id: string) {
@@ -155,7 +155,7 @@ export class MemoryRepository implements IBulkAsset {
eb
.selectFrom('asset')
.selectAll('asset')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId')
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetId')
.whereRef('memory_asset.memoriesId', '=', 'memory.id')
.orderBy('asset.fileCreatedAt', 'asc')
.where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline))

View File

@@ -6,15 +6,15 @@ import { DB } from 'src/schema';
export class SharedLinkAssetRepository {
constructor(@InjectKysely() private db: Kysely<DB>) {}
async remove(sharedLinkId: string, assetsId: string[]) {
async remove(sharedLinkId: string, assetId: string[]) {
const deleted = await this.db
.deleteFrom('shared_link_asset')
.where('shared_link_asset.sharedLinksId', '=', sharedLinkId)
.where('shared_link_asset.assetsId', 'in', assetsId)
.returning('assetsId')
.where('shared_link_asset.sharedLinkId', '=', sharedLinkId)
.where('shared_link_asset.assetId', 'in', assetId)
.returning('assetId')
.execute();
return deleted.map((row) => row.assetsId);
return deleted.map((row) => row.assetId);
}
@GenerateSql({ params: [{ sourceAssetId: DummyValue.UUID, targetAssetId: DummyValue.UUID }] })
@@ -24,8 +24,8 @@ export class SharedLinkAssetRepository {
.expression((eb) =>
eb
.selectFrom('shared_link_asset')
.select((eb) => [eb.val(targetAssetId).as('assetsId'), 'shared_link_asset.sharedLinksId'])
.where('shared_link_asset.assetsId', '=', sourceAssetId),
.select((eb) => [eb.val(targetAssetId).as('assetId'), 'shared_link_asset.sharedLinkId'])
.where('shared_link_asset.assetId', '=', sourceAssetId),
)
.onConflict((oc) => oc.doNothing())
.execute();

View File

@@ -28,8 +28,8 @@ export class SharedLinkRepository {
(eb) =>
eb
.selectFrom('shared_link_asset')
.whereRef('shared_link.id', '=', 'shared_link_asset.sharedLinksId')
.innerJoin('asset', 'asset.id', 'shared_link_asset.assetsId')
.whereRef('shared_link.id', '=', 'shared_link_asset.sharedLinkId')
.innerJoin('asset', 'asset.id', 'shared_link_asset.assetId')
.where('asset.deletedAt', 'is', null)
.selectAll('asset')
.innerJoinLateral(
@@ -53,13 +53,13 @@ export class SharedLinkRepository {
.selectAll('album')
.whereRef('album.id', '=', 'shared_link.albumId')
.where('album.deletedAt', 'is', null)
.leftJoin('album_asset', 'album_asset.albumsId', 'album.id')
.leftJoin('album_asset', 'album_asset.albumId', 'album.id')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('asset')
.selectAll('asset')
.whereRef('album_asset.assetsId', '=', 'asset.id')
.whereRef('album_asset.assetId', '=', 'asset.id')
.where('asset.deletedAt', 'is', null)
.innerJoinLateral(
(eb) =>
@@ -123,13 +123,13 @@ export class SharedLinkRepository {
.selectFrom('shared_link')
.selectAll('shared_link')
.where('shared_link.userId', '=', userId)
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('asset')
.select((eb) => eb.fn.jsonAgg('asset').as('assets'))
.whereRef('asset.id', '=', 'shared_link_asset.assetsId')
.whereRef('asset.id', '=', 'shared_link_asset.assetId')
.where('asset.deletedAt', 'is', null)
.as('assets'),
(join) => join.onTrue(),
@@ -215,7 +215,7 @@ export class SharedLinkRepository {
if (entity.assetIds && entity.assetIds.length > 0) {
await this.db
.insertInto('shared_link_asset')
.values(entity.assetIds!.map((assetsId) => ({ assetsId, sharedLinksId: id })))
.values(entity.assetIds!.map((assetId) => ({ assetId, sharedLinkId: id })))
.execute();
}
@@ -233,7 +233,7 @@ export class SharedLinkRepository {
if (entity.assetIds && entity.assetIds.length > 0) {
await this.db
.insertInto('shared_link_asset')
.values(entity.assetIds!.map((assetsId) => ({ assetsId, sharedLinksId: id })))
.values(entity.assetIds!.map((assetId) => ({ assetId, sharedLinkId: id })))
.execute();
}
@@ -249,12 +249,12 @@ export class SharedLinkRepository {
.selectFrom('shared_link')
.selectAll('shared_link')
.where('shared_link.id', '=', id)
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
.leftJoinLateral(
(eb) =>
eb
.selectFrom('asset')
.whereRef('asset.id', '=', 'shared_link_asset.assetsId')
.whereRef('asset.id', '=', 'shared_link_asset.assetId')
.selectAll('asset')
.innerJoinLateral(
(eb) =>

View File

@@ -33,8 +33,8 @@ const withAssets = (eb: ExpressionBuilder<DB, 'stack'>, withTags = false) => {
eb
.selectFrom('tag')
.select(columns.tag)
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('tag_asset.assetsId', '=', 'asset.id'),
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('tag_asset.assetId', '=', 'asset.id'),
).as('tags'),
),
)

View File

@@ -143,8 +143,8 @@ class AlbumSync extends BaseSync {
getCreatedAfter({ nowId, userId, afterCreateId }: SyncCreatedAfterOptions) {
return this.db
.selectFrom('album_user')
.select(['albumsId as id', 'createId'])
.where('usersId', '=', userId)
.select(['albumId as id', 'createId'])
.where('userId', '=', userId)
.$if(!!afterCreateId, (qb) => qb.where('createId', '>=', afterCreateId!))
.where('createId', '<', nowId)
.orderBy('createId', 'asc')
@@ -168,8 +168,8 @@ class AlbumSync extends BaseSync {
const userId = options.userId;
return this.upsertQuery('album', options)
.distinctOn(['album.id', 'album.updateId'])
.leftJoin('album_user as album_users', 'album.id', 'album_users.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_users.usersId', '=', userId)]))
.leftJoin('album_user as album_users', 'album.id', 'album_users.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_users.userId', '=', userId)]))
.select([
'album.id',
'album.ownerId',
@@ -190,10 +190,10 @@ class AlbumAssetSync extends BaseSync {
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
getBackfill(options: SyncBackfillOptions, albumId: string) {
return this.backfillQuery('album_asset', options)
.innerJoin('asset', 'asset.id', 'album_asset.assetsId')
.innerJoin('asset', 'asset.id', 'album_asset.assetId')
.select(columns.syncAsset)
.select('album_asset.updateId')
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.albumId', '=', albumId)
.stream();
}
@@ -201,13 +201,13 @@ class AlbumAssetSync extends BaseSync {
getUpdates(options: SyncQueryOptions, albumToAssetAck: SyncAck) {
const userId = options.userId;
return this.upsertQuery('asset', options)
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
.select(columns.syncAsset)
.select('asset.updateId')
.where('album_asset.updateId', '<=', albumToAssetAck.updateId) // Ensure we only send updates for assets that the client already knows about
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
@@ -216,11 +216,11 @@ class AlbumAssetSync extends BaseSync {
const userId = options.userId;
return this.upsertQuery('album_asset', options)
.select('album_asset.updateId')
.innerJoin('asset', 'asset.id', 'album_asset.assetsId')
.innerJoin('asset', 'asset.id', 'album_asset.assetId')
.select(columns.syncAsset)
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
}
@@ -229,10 +229,10 @@ class AlbumAssetExifSync extends BaseSync {
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
getBackfill(options: SyncBackfillOptions, albumId: string) {
return this.backfillQuery('album_asset', options)
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetsId')
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetId')
.select(columns.syncAssetExif)
.select('album_asset.updateId')
.where('album_asset.albumsId', '=', albumId)
.where('album_asset.albumId', '=', albumId)
.stream();
}
@@ -240,13 +240,13 @@ class AlbumAssetExifSync extends BaseSync {
getUpdates(options: SyncQueryOptions, albumToAssetAck: SyncAck) {
const userId = options.userId;
return this.upsertQuery('asset_exif', options)
.innerJoin('album_asset', 'album_asset.assetsId', 'asset_exif.assetId')
.innerJoin('album_asset', 'album_asset.assetId', 'asset_exif.assetId')
.select(columns.syncAssetExif)
.select('asset_exif.updateId')
.where('album_asset.updateId', '<=', albumToAssetAck.updateId) // Ensure we only send exif updates for assets that the client already knows about
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
@@ -255,11 +255,11 @@ class AlbumAssetExifSync extends BaseSync {
const userId = options.userId;
return this.upsertQuery('album_asset', options)
.select('album_asset.updateId')
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetsId')
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetId')
.select(columns.syncAssetExif)
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
}
@@ -268,8 +268,8 @@ class AlbumToAssetSync extends BaseSync {
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
getBackfill(options: SyncBackfillOptions, albumId: string) {
return this.backfillQuery('album_asset', options)
.select(['album_asset.assetsId as assetId', 'album_asset.albumsId as albumId', 'album_asset.updateId'])
.where('album_asset.albumsId', '=', albumId)
.select(['album_asset.assetId as assetId', 'album_asset.albumId as albumId', 'album_asset.updateId'])
.where('album_asset.albumId', '=', albumId)
.stream();
}
@@ -290,8 +290,8 @@ class AlbumToAssetSync extends BaseSync {
eb.parens(
eb
.selectFrom('album_user')
.select(['album_user.albumsId as id'])
.where('album_user.usersId', '=', userId),
.select(['album_user.albumId as id'])
.where('album_user.userId', '=', userId),
),
),
),
@@ -307,10 +307,10 @@ class AlbumToAssetSync extends BaseSync {
getUpserts(options: SyncQueryOptions) {
const userId = options.userId;
return this.upsertQuery('album_asset', options)
.select(['album_asset.assetsId as assetId', 'album_asset.albumsId as albumId', 'album_asset.updateId'])
.innerJoin('album', 'album.id', 'album_asset.albumsId')
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
.select(['album_asset.assetId as assetId', 'album_asset.albumId as albumId', 'album_asset.updateId'])
.innerJoin('album', 'album.id', 'album_asset.albumId')
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
.stream();
}
}
@@ -321,7 +321,7 @@ class AlbumUserSync extends BaseSync {
return this.backfillQuery('album_user', options)
.select(columns.syncAlbumUser)
.select('album_user.updateId')
.where('albumsId', '=', albumId)
.where('albumId', '=', albumId)
.stream();
}
@@ -342,8 +342,8 @@ class AlbumUserSync extends BaseSync {
eb.parens(
eb
.selectFrom('album_user')
.select(['album_user.albumsId as id'])
.where('album_user.usersId', '=', userId),
.select(['album_user.albumId as id'])
.where('album_user.userId', '=', userId),
),
),
),
@@ -363,7 +363,7 @@ class AlbumUserSync extends BaseSync {
.select('album_user.updateId')
.where((eb) =>
eb(
'album_user.albumsId',
'album_user.albumId',
'in',
eb
.selectFrom('album')
@@ -373,8 +373,8 @@ class AlbumUserSync extends BaseSync {
eb.parens(
eb
.selectFrom('album_user as albumUsers')
.select(['albumUsers.albumsId as id'])
.where('albumUsers.usersId', '=', userId),
.select(['albumUsers.albumId as id'])
.where('albumUsers.userId', '=', userId),
),
),
),
@@ -550,7 +550,7 @@ class MemoryToAssetSync extends BaseSync {
@GenerateSql({ params: [dummyQueryOptions], stream: true })
getUpserts(options: SyncQueryOptions) {
return this.upsertQuery('memory_asset', options)
.select(['memoriesId as memoryId', 'assetsId as assetId'])
.select(['memoriesId as memoryId', 'assetId as assetId'])
.select('updateId')
.where('memoriesId', 'in', (eb) => eb.selectFrom('memory').select('id').where('ownerId', '=', options.userId))
.stream();

View File

@@ -97,9 +97,9 @@ export class TagRepository {
const results = await this.db
.selectFrom('tag_asset')
.select(['assetsId as assetId'])
.where('tagsId', '=', tagId)
.where('assetsId', 'in', assetIds)
.select(['assetId as assetId'])
.where('tagId', '=', tagId)
.where('assetId', 'in', assetIds)
.execute();
return new Set(results.map(({ assetId }) => assetId));
@@ -114,7 +114,7 @@ export class TagRepository {
await this.db
.insertInto('tag_asset')
.values(assetIds.map((assetId) => ({ tagsId: tagId, assetsId: assetId })))
.values(assetIds.map((assetId) => ({ tagId, assetId })))
.execute();
}
@@ -125,10 +125,10 @@ export class TagRepository {
return;
}
await this.db.deleteFrom('tag_asset').where('tagsId', '=', tagId).where('assetsId', 'in', assetIds).execute();
await this.db.deleteFrom('tag_asset').where('tagId', '=', tagId).where('assetId', 'in', assetIds).execute();
}
@GenerateSql({ params: [[{ assetId: DummyValue.UUID, tagsIds: [DummyValue.UUID] }]] })
@GenerateSql({ params: [[{ assetId: DummyValue.UUID, tagIds: DummyValue.UUID }]] })
@Chunked()
upsertAssetIds(items: Insertable<TagAssetTable>[]) {
if (items.length === 0) {
@@ -147,7 +147,7 @@ export class TagRepository {
@Chunked({ paramIndex: 1 })
replaceAssetTags(assetId: string, tagIds: string[]) {
return this.db.transaction().execute(async (tx) => {
await tx.deleteFrom('tag_asset').where('assetsId', '=', assetId).execute();
await tx.deleteFrom('tag_asset').where('assetId', '=', assetId).execute();
if (tagIds.length === 0) {
return;
@@ -155,7 +155,7 @@ export class TagRepository {
return tx
.insertInto('tag_asset')
.values(tagIds.map((tagId) => ({ tagsId: tagId, assetsId: assetId })))
.values(tagIds.map((tagId) => ({ tagId, assetId })))
.onConflict((oc) => oc.doNothing())
.returningAll()
.execute();
@@ -170,7 +170,7 @@ export class TagRepository {
exists(
selectFrom('tag_closure')
.whereRef('tag.id', '=', 'tag_closure.id_ancestor')
.innerJoin('tag_asset', 'tag_closure.id_descendant', 'tag_asset.tagsId'),
.innerJoin('tag_asset', 'tag_closure.id_descendant', 'tag_asset.tagId'),
),
),
)

View File

@@ -29,7 +29,7 @@ export const album_user_after_insert = registerFunction({
body: `
BEGIN
UPDATE album SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp())
WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows);
WHERE "id" IN (SELECT DISTINCT "albumId" FROM inserted_rows);
RETURN NULL;
END`,
});
@@ -139,8 +139,8 @@ export const album_asset_delete_audit = registerFunction({
body: `
BEGIN
INSERT INTO album_asset_audit ("albumId", "assetId")
SELECT "albumsId", "assetsId" FROM OLD
WHERE "albumsId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumsId" FROM OLD));
SELECT "albumId", "assetId" FROM OLD
WHERE "albumId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumId" FROM OLD));
RETURN NULL;
END`,
});
@@ -152,12 +152,12 @@ export const album_user_delete_audit = registerFunction({
body: `
BEGIN
INSERT INTO album_audit ("albumId", "userId")
SELECT "albumsId", "usersId"
SELECT "albumId", "userId"
FROM OLD;
IF pg_trigger_depth() = 1 THEN
INSERT INTO album_user_audit ("albumId", "userId")
SELECT "albumsId", "usersId"
SELECT "albumId", "userId"
FROM OLD;
END IF;
@@ -185,7 +185,7 @@ export const memory_asset_delete_audit = registerFunction({
body: `
BEGIN
INSERT INTO memory_asset_audit ("memoryId", "assetId")
SELECT "memoriesId", "assetsId" FROM OLD
SELECT "memoriesId", "assetId" FROM OLD
WHERE "memoriesId" IN (SELECT "id" FROM memory WHERE "id" IN (SELECT "memoriesId" FROM OLD));
RETURN NULL;
END`,

View File

@@ -0,0 +1,99 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
// rename columns
await sql`ALTER TABLE "album_asset" RENAME COLUMN "albumsId" TO "albumId";`.execute(db);
await sql`ALTER TABLE "album_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME COLUMN "albumsId" TO "albumId";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME COLUMN "usersId" TO "userId";`.execute(db);
await sql`ALTER TABLE "memory_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME COLUMN "sharedLinksId" TO "sharedLinkId";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME COLUMN "tagsId" TO "tagId";`.execute(db);
// rename constraints
await sql`ALTER TABLE "album_asset" RENAME CONSTRAINT "album_asset_albumsId_fkey" TO "album_asset_albumId_fkey";`.execute(db);
await sql`ALTER TABLE "album_asset" RENAME CONSTRAINT "album_asset_assetsId_fkey" TO "album_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME CONSTRAINT "album_user_albumsId_fkey" TO "album_user_albumId_fkey";`.execute(db);
await sql`ALTER TABLE "album_user" RENAME CONSTRAINT "album_user_usersId_fkey" TO "album_user_userId_fkey";`.execute(db);
await sql`ALTER TABLE "memory_asset" RENAME CONSTRAINT "memory_asset_assetsId_fkey" TO "memory_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME CONSTRAINT "shared_link_asset_assetsId_fkey" TO "shared_link_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "shared_link_asset" RENAME CONSTRAINT "shared_link_asset_sharedLinksId_fkey" TO "shared_link_asset_sharedLinkId_fkey";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME CONSTRAINT "tag_asset_assetsId_fkey" TO "tag_asset_assetId_fkey";`.execute(db);
await sql`ALTER TABLE "tag_asset" RENAME CONSTRAINT "tag_asset_tagsId_fkey" TO "tag_asset_tagId_fkey";`.execute(db);
// rename indexes
await sql`ALTER INDEX "album_asset_albumsId_idx" RENAME TO "album_asset_albumId_idx";`.execute(db);
await sql`ALTER INDEX "album_asset_assetsId_idx" RENAME TO "album_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "album_user_usersId_idx" RENAME TO "album_user_userId_idx";`.execute(db);
await sql`ALTER INDEX "album_user_albumsId_idx" RENAME TO "album_user_albumId_idx";`.execute(db);
await sql`ALTER INDEX "memory_asset_assetsId_idx" RENAME TO "memory_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "shared_link_asset_sharedLinksId_idx" RENAME TO "shared_link_asset_sharedLinkId_idx";`.execute(db);
await sql`ALTER INDEX "shared_link_asset_assetsId_idx" RENAME TO "shared_link_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "tag_asset_assetsId_idx" RENAME TO "tag_asset_assetId_idx";`.execute(db);
await sql`ALTER INDEX "tag_asset_tagsId_idx" RENAME TO "tag_asset_tagId_idx";`.execute(db);
await sql`ALTER INDEX "tag_asset_assetsId_tagsId_idx" RENAME TO "tag_asset_assetId_tagId_idx";`.execute(db);
// update triggers and functions
await sql`CREATE OR REPLACE FUNCTION album_user_after_insert()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
UPDATE album SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp())
WHERE "id" IN (SELECT DISTINCT "albumId" FROM inserted_rows);
RETURN NULL;
END
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION album_asset_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO album_asset_audit ("albumId", "assetId")
SELECT "albumId", "assetId" FROM OLD
WHERE "albumId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumId" FROM OLD));
RETURN NULL;
END
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION album_user_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO album_audit ("albumId", "userId")
SELECT "albumId", "userId"
FROM OLD;
IF pg_trigger_depth() = 1 THEN
INSERT INTO album_user_audit ("albumId", "userId")
SELECT "albumId", "userId"
FROM OLD;
END IF;
RETURN NULL;
END
$$;`.execute(db);
await sql`CREATE OR REPLACE FUNCTION memory_asset_delete_audit()
RETURNS TRIGGER
LANGUAGE PLPGSQL
AS $$
BEGIN
INSERT INTO memory_asset_audit ("memoryId", "assetId")
SELECT "memoriesId", "assetId" FROM OLD
WHERE "memoriesId" IN (SELECT "id" FROM memory WHERE "id" IN (SELECT "memoriesId" FROM OLD));
RETURN NULL;
END
$$;`.execute(db);
// update overrides
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_user_after_insert","sql":"CREATE OR REPLACE FUNCTION album_user_after_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE album SET \\"updatedAt\\" = clock_timestamp(), \\"updateId\\" = immich_uuid_v7(clock_timestamp())\\n WHERE \\"id\\" IN (SELECT DISTINCT \\"albumId\\" FROM inserted_rows);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_user_after_insert';`.execute(db);
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_asset_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_asset_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_asset_audit (\\"albumId\\", \\"assetId\\")\\n SELECT \\"albumId\\", \\"assetId\\" FROM OLD\\n WHERE \\"albumId\\" IN (SELECT \\"id\\" FROM album WHERE \\"id\\" IN (SELECT \\"albumId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_asset_delete_audit';`.execute(db);
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_user_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_user_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumId\\", \\"userId\\"\\n FROM OLD;\\n\\n IF pg_trigger_depth() = 1 THEN\\n INSERT INTO album_user_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumId\\", \\"userId\\"\\n FROM OLD;\\n END IF;\\n\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_user_delete_audit';`.execute(db);
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"memory_asset_delete_audit","sql":"CREATE OR REPLACE FUNCTION memory_asset_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memory_asset_audit (\\"memoryId\\", \\"assetId\\")\\n SELECT \\"memoriesId\\", \\"assetId\\" FROM OLD\\n WHERE \\"memoriesId\\" IN (SELECT \\"id\\" FROM memory WHERE \\"id\\" IN (SELECT \\"memoriesId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_memory_asset_delete_audit';`.execute(db);
}
export function down() {
// not implemented
}

View File

@@ -32,7 +32,7 @@ import {
@ForeignKeyConstraint({
columns: ['albumId', 'assetId'],
referenceTable: () => AlbumAssetTable,
referenceColumns: ['albumsId', 'assetsId'],
referenceColumns: ['albumId', 'assetId'],
onUpdate: 'NO ACTION',
onDelete: 'CASCADE',
})

View File

@@ -22,10 +22,10 @@ import {
})
export class AlbumAssetTable {
@ForeignKeyColumn(() => AlbumTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
albumsId!: string;
albumId!: string;
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
assetsId!: string;
assetId!: string;
@CreateDateColumn()
createdAt!: Generated<Timestamp>;

View File

@@ -37,7 +37,7 @@ export class AlbumUserTable {
nullable: false,
primary: true,
})
albumsId!: string;
albumId!: string;
@ForeignKeyColumn(() => UserTable, {
onDelete: 'CASCADE',
@@ -45,7 +45,7 @@ export class AlbumUserTable {
nullable: false,
primary: true,
})
usersId!: string;
userId!: string;
@Column({ type: 'character varying', default: AlbumUserRole.Editor })
role!: Generated<AlbumUserRole>;

View File

@@ -25,7 +25,7 @@ export class MemoryAssetTable {
memoriesId!: string;
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
assetsId!: string;
assetId!: string;
@CreateDateColumn()
createdAt!: Generated<Timestamp>;

View File

@@ -5,8 +5,8 @@ import { ForeignKeyColumn, Table } from 'src/sql-tools';
@Table('shared_link_asset')
export class SharedLinkAssetTable {
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
assetsId!: string;
assetId!: string;
@ForeignKeyColumn(() => SharedLinkTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
sharedLinksId!: string;
sharedLinkId!: string;
}

View File

@@ -2,12 +2,12 @@ import { AssetTable } from 'src/schema/tables/asset.table';
import { TagTable } from 'src/schema/tables/tag.table';
import { ForeignKeyColumn, Index, Table } from 'src/sql-tools';
@Index({ columns: ['assetsId', 'tagsId'] })
@Index({ columns: ['assetId', 'tagId'] })
@Table('tag_asset')
export class TagAssetTable {
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
assetsId!: string;
assetId!: string;
@ForeignKeyColumn(() => TagTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
tagsId!: string;
tagId!: string;
}

View File

@@ -402,16 +402,16 @@ describe(AlbumService.name, () => {
mocks.album.update.mockResolvedValue(albumStub.sharedWithAdmin);
mocks.user.get.mockResolvedValue(userStub.user2);
mocks.albumUser.create.mockResolvedValue({
usersId: userStub.user2.id,
albumsId: albumStub.sharedWithAdmin.id,
userId: userStub.user2.id,
albumId: albumStub.sharedWithAdmin.id,
role: AlbumUserRole.Editor,
});
await sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, {
albumUsers: [{ userId: authStub.user2.user.id }],
});
expect(mocks.albumUser.create).toHaveBeenCalledWith({
usersId: authStub.user2.user.id,
albumsId: albumStub.sharedWithAdmin.id,
userId: authStub.user2.user.id,
albumId: albumStub.sharedWithAdmin.id,
});
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumInvite', {
id: albumStub.sharedWithAdmin.id,
@@ -439,8 +439,8 @@ describe(AlbumService.name, () => {
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
albumsId: albumStub.sharedWithUser.id,
usersId: userStub.user1.id,
albumId: albumStub.sharedWithUser.id,
userId: userStub.user1.id,
});
expect(mocks.album.getById).toHaveBeenCalledWith(albumStub.sharedWithUser.id, { withAssets: false });
});
@@ -467,8 +467,8 @@ describe(AlbumService.name, () => {
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
albumsId: albumStub.sharedWithUser.id,
usersId: authStub.user1.user.id,
albumId: albumStub.sharedWithUser.id,
userId: authStub.user1.user.id,
});
});
@@ -480,8 +480,8 @@ describe(AlbumService.name, () => {
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
albumsId: albumStub.sharedWithUser.id,
usersId: authStub.user1.user.id,
albumId: albumStub.sharedWithUser.id,
userId: authStub.user1.user.id,
});
});
@@ -515,7 +515,7 @@ describe(AlbumService.name, () => {
role: AlbumUserRole.Editor,
});
expect(mocks.albumUser.update).toHaveBeenCalledWith(
{ albumsId: albumStub.sharedWithAdmin.id, usersId: userStub.admin.id },
{ albumId: albumStub.sharedWithAdmin.id, userId: userStub.admin.id },
{ role: AlbumUserRole.Editor },
);
});
@@ -804,12 +804,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
});
@@ -840,12 +840,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-id',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
});
@@ -876,12 +876,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumUpdate', {
id: 'album-123',
@@ -936,9 +936,9 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
]);
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumUpdate', {
id: 'album-123',
@@ -977,12 +977,12 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-123', assetsId: 'asset-1' },
{ albumsId: 'album-123', assetsId: 'asset-2' },
{ albumsId: 'album-123', assetsId: 'asset-3' },
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-123', assetId: 'asset-1' },
{ albumId: 'album-123', assetId: 'asset-2' },
{ albumId: 'album-123', assetId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
expect(mocks.access.asset.checkPartnerAccess).toHaveBeenCalledWith(
authStub.admin.user.id,
@@ -1014,9 +1014,9 @@ describe(AlbumService.name, () => {
albumThumbnailAssetId: 'asset-1',
});
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
{ albumsId: 'album-321', assetsId: 'asset-1' },
{ albumsId: 'album-321', assetsId: 'asset-2' },
{ albumsId: 'album-321', assetsId: 'asset-3' },
{ albumId: 'album-321', assetId: 'asset-1' },
{ albumId: 'album-321', assetId: 'asset-2' },
{ albumId: 'album-321', assetId: 'asset-3' },
]);
});

View File

@@ -215,7 +215,7 @@ export class AlbumService extends BaseService {
return results;
}
const albumAssetValues: { albumsId: string; assetsId: string }[] = [];
const albumAssetValues: { albumId: string; assetId: string }[] = [];
const events: { id: string; recipients: string[] }[] = [];
for (const albumId of allowedAlbumIds) {
const existingAssetIds = await this.albumRepository.getAssetIds(albumId, [...allowedAssetIds]);
@@ -228,7 +228,7 @@ export class AlbumService extends BaseService {
results.success = true;
for (const assetId of notPresentAssetIds) {
albumAssetValues.push({ albumsId: albumId, assetsId: assetId });
albumAssetValues.push({ albumId, assetId });
}
await this.albumRepository.update(albumId, {
id: albumId,
@@ -289,7 +289,7 @@ export class AlbumService extends BaseService {
throw new BadRequestException('User not found');
}
await this.albumUserRepository.create({ usersId: userId, albumsId: id, role });
await this.albumUserRepository.create({ userId, albumId: id, role });
await this.eventRepository.emit('AlbumInvite', { id, userId });
}
@@ -317,12 +317,12 @@ export class AlbumService extends BaseService {
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
}
await this.albumUserRepository.delete({ albumsId: id, usersId: userId });
await this.albumUserRepository.delete({ albumId: id, userId });
}
async updateUser(auth: AuthDto, id: string, userId: string, dto: UpdateAlbumUserDto): Promise<void> {
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
await this.albumUserRepository.update({ albumsId: id, usersId: userId }, { role: dto.role });
await this.albumUserRepository.update({ albumId: id, userId }, { role: dto.role });
}
private async findOrFail(id: string, options: AlbumInfoOptions) {

View File

@@ -1,4 +1,5 @@
import { BinaryField, ExifDateTime } from 'exiftool-vendored';
import { DateTime } from 'luxon';
import { randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { defaults } from 'src/config';
@@ -231,7 +232,7 @@ describe(MetadataService.name, () => {
});
});
it('should account for the server being in a non-UTC timezone', async () => {
it('should determine dateTimeOriginal regardless of the server time zone', async () => {
process.env.TZ = 'America/Los_Angeles';
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.sidecar);
mockReadTags({ DateTimeOriginal: '2022:01:01 00:00:00' });
@@ -239,7 +240,7 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: assetStub.image.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
dateTimeOriginal: new Date('2022-01-01T08:00:00.000Z'),
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
}),
);
@@ -856,6 +857,7 @@ describe(MetadataService.name, () => {
tz: 'UTC-11:30',
Rating: 3,
};
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mockReadTags(tags);
@@ -897,7 +899,7 @@ describe(MetadataService.name, () => {
id: assetStub.image.id,
duration: null,
fileCreatedAt: dateForTest,
localDateTime: dateForTest,
localDateTime: DateTime.fromISO('1970-01-01T00:00:00.000Z').toJSDate(),
}),
);
});
@@ -1595,7 +1597,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
expect(result?.tag).toBe('SonyDateTime2');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-07-07T07:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-07-07T07:00:00');
});
it('should respect full priority order with all date tags present', () => {
@@ -1624,7 +1626,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
// Should use SubSecDateTimeOriginal as it has highest priority
expect(result?.tag).toBe('SubSecDateTimeOriginal');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-01-01T01:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-01-01T01:00:00');
});
it('should handle missing SubSec tags and use available date tags', () => {
@@ -1644,7 +1646,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
// Should use CreationDate when available
expect(result?.tag).toBe('CreationDate');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-07-07T07:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-07-07T07:00:00');
});
it('should handle invalid date formats gracefully', () => {
@@ -1658,7 +1660,7 @@ describe(MetadataService.name, () => {
const result = firstDateTime(tags);
// Should skip invalid dates and use the first valid one
expect(result?.tag).toBe('GPSDateTime');
expect(result?.dateTime?.toDate()?.toISOString()).toBe('2023-10-10T10:00:00.000Z');
expect(result?.dateTime?.toISOString()).toBe('2023-10-10T10:00:00');
});
it('should prefer CreationDate over CreateDate', () => {

View File

@@ -2,7 +2,7 @@ import { Injectable } from '@nestjs/common';
import { ContainerDirectoryItem, ExifDateTime, Tags } from 'exiftool-vendored';
import { Insertable } from 'kysely';
import _ from 'lodash';
import { Duration } from 'luxon';
import { DateTime, Duration } from 'luxon';
import { Stats } from 'node:fs';
import { constants } from 'node:fs/promises';
import { join, parse } from 'node:path';
@@ -236,8 +236,8 @@ export class MetadataService extends BaseService {
latitude: number | null = null,
longitude: number | null = null;
if (this.hasGeo(exifTags)) {
latitude = exifTags.GPSLatitude;
longitude = exifTags.GPSLongitude;
latitude = Number(exifTags.GPSLatitude);
longitude = Number(exifTags.GPSLongitude);
if (reverseGeocoding.enabled) {
geo = await this.mapRepository.reverseGeocode({ latitude, longitude });
}
@@ -866,40 +866,47 @@ export class MetadataService extends BaseService {
this.logger.debug(`No timezone information found for asset ${asset.id}: ${asset.originalPath}`);
}
let dateTimeOriginal = dateTime?.toDate();
let localDateTime = dateTime?.toDateTime().setZone('UTC', { keepLocalTime: true }).toJSDate();
let dateTimeOriginal = dateTime?.toDateTime();
// do not let JavaScript use local timezone
if (dateTimeOriginal && !dateTime?.hasZone) {
dateTimeOriginal = dateTimeOriginal.setZone('UTC', { keepLocalTime: true });
}
// align with whatever timeZone we chose
dateTimeOriginal = dateTimeOriginal?.setZone(timeZone ?? 'UTC');
// store as "local time"
let localDateTime = dateTimeOriginal?.setZone('UTC', { keepLocalTime: true });
if (!localDateTime || !dateTimeOriginal) {
// FileCreateDate is not available on linux, likely because exiftool hasn't integrated the statx syscall yet
// birthtime is not available in Docker on macOS, so it appears as 0
const earliestDate = new Date(
const earliestDate = DateTime.fromMillis(
Math.min(
asset.fileCreatedAt.getTime(),
stats.birthtimeMs ? Math.min(stats.mtimeMs, stats.birthtimeMs) : stats.mtime.getTime(),
),
);
this.logger.debug(
`No exif date time found, falling back on ${earliestDate.toISOString()}, earliest of file creation and modification for asset ${asset.id}: ${asset.originalPath}`,
`No exif date time found, falling back on ${earliestDate.toISO()}, earliest of file creation and modification for asset ${asset.id}: ${asset.originalPath}`,
);
dateTimeOriginal = localDateTime = earliestDate;
}
this.logger.verbose(
`Found local date time ${localDateTime.toISOString()} for asset ${asset.id}: ${asset.originalPath}`,
);
this.logger.verbose(`Found local date time ${localDateTime.toISO()} for asset ${asset.id}: ${asset.originalPath}`);
return {
dateTimeOriginal,
timeZone,
localDateTime,
localDateTime: localDateTime.toJSDate(),
dateTimeOriginal: dateTimeOriginal.toJSDate(),
};
}
private hasGeo(tags: ImmichTags): tags is ImmichTags & { GPSLatitude: number; GPSLongitude: number } {
return (
tags.GPSLatitude !== undefined &&
tags.GPSLongitude !== undefined &&
(tags.GPSLatitude !== 0 || tags.GPSLatitude !== 0)
);
private hasGeo(tags: ImmichTags) {
const lat = Number(tags.GPSLatitude);
const lng = Number(tags.GPSLongitude);
return !Number.isNaN(lat) && !Number.isNaN(lng) && (lat !== 0 || lng !== 0);
}
private getAutoStackId(tags: ImmichTags | null): string | null {

View File

@@ -192,12 +192,12 @@ describe(TagService.name, () => {
mocks.access.tag.checkOwnerAccess.mockResolvedValue(new Set(['tag-1', 'tag-2']));
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set(['asset-1', 'asset-2', 'asset-3']));
mocks.tag.upsertAssetIds.mockResolvedValue([
{ tagsId: 'tag-1', assetsId: 'asset-1' },
{ tagsId: 'tag-1', assetsId: 'asset-2' },
{ tagsId: 'tag-1', assetsId: 'asset-3' },
{ tagsId: 'tag-2', assetsId: 'asset-1' },
{ tagsId: 'tag-2', assetsId: 'asset-2' },
{ tagsId: 'tag-2', assetsId: 'asset-3' },
{ tagId: 'tag-1', assetId: 'asset-1' },
{ tagId: 'tag-1', assetId: 'asset-2' },
{ tagId: 'tag-1', assetId: 'asset-3' },
{ tagId: 'tag-2', assetId: 'asset-1' },
{ tagId: 'tag-2', assetId: 'asset-2' },
{ tagId: 'tag-2', assetId: 'asset-3' },
]);
await expect(
sut.bulkTagAssets(authStub.admin, { tagIds: ['tag-1', 'tag-2'], assetIds: ['asset-1', 'asset-2', 'asset-3'] }),
@@ -205,12 +205,12 @@ describe(TagService.name, () => {
count: 6,
});
expect(mocks.tag.upsertAssetIds).toHaveBeenCalledWith([
{ tagsId: 'tag-1', assetsId: 'asset-1' },
{ tagsId: 'tag-1', assetsId: 'asset-2' },
{ tagsId: 'tag-1', assetsId: 'asset-3' },
{ tagsId: 'tag-2', assetsId: 'asset-1' },
{ tagsId: 'tag-2', assetsId: 'asset-2' },
{ tagsId: 'tag-2', assetsId: 'asset-3' },
{ tagId: 'tag-1', assetId: 'asset-1' },
{ tagId: 'tag-1', assetId: 'asset-2' },
{ tagId: 'tag-1', assetId: 'asset-3' },
{ tagId: 'tag-2', assetId: 'asset-1' },
{ tagId: 'tag-2', assetId: 'asset-2' },
{ tagId: 'tag-2', assetId: 'asset-3' },
]);
});
});

View File

@@ -82,14 +82,14 @@ export class TagService extends BaseService {
]);
const items: Insertable<TagAssetTable>[] = [];
for (const tagsId of tagIds) {
for (const assetsId of assetIds) {
items.push({ tagsId, assetsId });
for (const tagId of tagIds) {
for (const assetId of assetIds) {
items.push({ tagId, assetId });
}
}
const results = await this.tagRepository.upsertAssetIds(items);
for (const assetId of new Set(results.map((item) => item.assetsId))) {
for (const assetId of new Set(results.map((item) => item.assetId))) {
await this.eventRepository.emit('AssetTag', { assetId });
}

View File

@@ -244,12 +244,12 @@ export function inAlbums<O>(qb: SelectQueryBuilder<DB, 'asset', O>, albumIds: st
(eb) =>
eb
.selectFrom('album_asset')
.select('assetsId')
.where('albumsId', '=', anyUuid(albumIds!))
.groupBy('assetsId')
.having((eb) => eb.fn.count('albumsId').distinct(), '=', albumIds.length)
.select('assetId')
.where('albumId', '=', anyUuid(albumIds!))
.groupBy('assetId')
.having((eb) => eb.fn.count('albumId').distinct(), '=', albumIds.length)
.as('has_album'),
(join) => join.onRef('has_album.assetsId', '=', 'asset.id'),
(join) => join.onRef('has_album.assetId', '=', 'asset.id'),
);
}
@@ -258,13 +258,13 @@ export function hasTags<O>(qb: SelectQueryBuilder<DB, 'asset', O>, tagIds: strin
(eb) =>
eb
.selectFrom('tag_asset')
.select('assetsId')
.innerJoin('tag_closure', 'tag_asset.tagsId', 'tag_closure.id_descendant')
.select('assetId')
.innerJoin('tag_closure', 'tag_asset.tagId', 'tag_closure.id_descendant')
.where('tag_closure.id_ancestor', '=', anyUuid(tagIds))
.groupBy('assetsId')
.groupBy('assetId')
.having((eb) => eb.fn.count('tag_closure.id_ancestor').distinct(), '>=', tagIds.length)
.as('has_tags'),
(join) => join.onRef('has_tags.assetsId', '=', 'asset.id'),
(join) => join.onRef('has_tags.assetId', '=', 'asset.id'),
);
}
@@ -285,8 +285,8 @@ export function withTags(eb: ExpressionBuilder<DB, 'asset'>) {
eb
.selectFrom('tag')
.select(columns.tag)
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
.whereRef('asset.id', '=', 'tag_asset.assetsId'),
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('asset.id', '=', 'tag_asset.assetId'),
).as('tags');
}
@@ -299,8 +299,8 @@ export function withTagId<O>(qb: SelectQueryBuilder<DB, 'asset', O>, tagId: stri
eb.exists(
eb
.selectFrom('tag_closure')
.innerJoin('tag_asset', 'tag_asset.tagsId', 'tag_closure.id_descendant')
.whereRef('tag_asset.assetsId', '=', 'asset.id')
.innerJoin('tag_asset', 'tag_asset.tagId', 'tag_closure.id_descendant')
.whereRef('tag_asset.assetId', '=', 'asset.id')
.where('tag_closure.id_ancestor', '=', tagId),
),
);
@@ -320,7 +320,7 @@ export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuild
.$if(!!options.albumIds && options.albumIds.length > 0, (qb) => inAlbums(qb, options.albumIds!))
.$if(!!options.tagIds && options.tagIds.length > 0, (qb) => hasTags(qb, options.tagIds!))
.$if(options.tagIds === null, (qb) =>
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('tag_asset').whereRef('assetsId', '=', 'asset.id')))),
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('tag_asset').whereRef('assetId', '=', 'asset.id')))),
)
.$if(!!options.personIds && options.personIds.length > 0, (qb) => hasPeople(qb, options.personIds!))
.$if(!!options.createdBefore, (qb) => qb.where('asset.createdAt', '<=', options.createdBefore!))
@@ -403,7 +403,7 @@ export function searchAssetBuilder(kysely: Kysely<DB>, options: AssetSearchBuild
qb.where('asset.livePhotoVideoId', options.isMotion ? 'is not' : 'is', null),
)
.$if(!!options.isNotInAlbum && (!options.albumIds || options.albumIds.length === 0), (qb) =>
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('album_asset').whereRef('assetsId', '=', 'asset.id')))),
qb.where((eb) => eb.not(eb.exists((eb) => eb.selectFrom('album_asset').whereRef('assetId', '=', 'asset.id')))),
)
.$if(!!options.withExif, withExifInner)
.$if(!!(options.withFaces || options.withPeople || options.personIds), (qb) => qb.select(withFacesAndPeople))

View File

@@ -2,6 +2,7 @@
import { Insertable, Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { createHash, randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { Writable } from 'node:stream';
import { AssetFace } from 'src/database';
import { AuthDto, LoginResponseDto } from 'src/dtos/auth.dto';
@@ -28,7 +29,9 @@ import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { MapRepository } from 'src/repositories/map.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
@@ -61,7 +64,9 @@ import { TagAssetTable } from 'src/schema/tables/tag-asset.table';
import { TagTable } from 'src/schema/tables/tag.table';
import { UserTable } from 'src/schema/tables/user.table';
import { BASE_SERVICE_DEPENDENCIES, BaseService } from 'src/services/base.service';
import { MetadataService } from 'src/services/metadata.service';
import { SyncService } from 'src/services/sync.service';
import { mockEnvData } from 'test/repositories/config.repository.mock';
import { newTelemetryRepositoryMock } from 'test/repositories/telemetry.repository.mock';
import { factory, newDate, newEmbedding, newUuid } from 'test/small.factory';
import { automock, wait } from 'test/utils';
@@ -212,7 +217,7 @@ export class MediumTestContext<S extends BaseService = BaseService> {
async newAlbumUser(dto: { albumId: string; userId: string; role?: AlbumUserRole }) {
const { albumId, userId, role = AlbumUserRole.Editor } = dto;
const result = await this.get(AlbumUserRepository).create({ albumsId: albumId, usersId: userId, role });
const result = await this.get(AlbumUserRepository).create({ albumId, userId, role });
return { albumUser: { albumId, userId, role }, result };
}
@@ -255,9 +260,9 @@ export class MediumTestContext<S extends BaseService = BaseService> {
async newTagAsset(tagBulkAssets: { tagIds: string[]; assetIds: string[] }) {
const tagsAssets: Insertable<TagAssetTable>[] = [];
for (const tagsId of tagBulkAssets.tagIds) {
for (const assetsId of tagBulkAssets.assetIds) {
tagsAssets.push({ tagsId, assetsId });
for (const tagId of tagBulkAssets.tagIds) {
for (const assetId of tagBulkAssets.assetIds) {
tagsAssets.push({ tagId, assetId });
}
}
@@ -305,6 +310,63 @@ export class SyncTestContext extends MediumTestContext<SyncService> {
}
}
const mockDate = new Date('2024-06-01T12:00:00.000Z');
const mockStats = {
mtime: mockDate,
atime: mockDate,
ctime: mockDate,
birthtime: mockDate,
atimeMs: 0,
mtimeMs: 0,
ctimeMs: 0,
birthtimeMs: 0,
};
export class ExifTestContext extends MediumTestContext<MetadataService> {
constructor(database: Kysely<DB>) {
super(MetadataService, {
database,
real: [AssetRepository, AssetJobRepository, MetadataRepository, SystemMetadataRepository, TagRepository],
mock: [ConfigRepository, EventRepository, LoggingRepository, MapRepository, StorageRepository],
});
this.getMock(ConfigRepository).getEnv.mockReturnValue(mockEnvData({}));
this.getMock(EventRepository).emit.mockResolvedValue();
this.getMock(MapRepository).reverseGeocode.mockResolvedValue({ country: null, state: null, city: null });
this.getMock(StorageRepository).stat.mockResolvedValue(mockStats as Stats);
}
getMockStats() {
return mockStats;
}
getGps(assetId: string) {
return this.database
.selectFrom('asset_exif')
.select(['latitude', 'longitude'])
.where('assetId', '=', assetId)
.executeTakeFirstOrThrow();
}
getTags(assetId: string) {
return this.database
.selectFrom('tag')
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.where('tag_asset.assetId', '=', assetId)
.selectAll()
.execute();
}
getDates(assetId: string) {
return this.database
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.where('id', '=', assetId)
.select(['asset.fileCreatedAt', 'asset.localDateTime', 'asset_exif.dateTimeOriginal', 'asset_exif.timeZone'])
.executeTakeFirstOrThrow();
}
}
const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
switch (key) {
case AccessRepository:
@@ -344,6 +406,14 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
return new key(LoggingRepository.create());
}
case MetadataRepository: {
return new key(LoggingRepository.create());
}
case StorageRepository: {
return new key(LoggingRepository.create());
}
case TagRepository: {
return new key(db, LoggingRepository.create());
}
@@ -381,6 +451,10 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
return automock(key);
}
case MapRepository: {
return automock(MapRepository, { args: [undefined, undefined, { setContext: () => {} }] });
}
case TelemetryRepository: {
return newTelemetryRepositoryMock();
}

View File

@@ -0,0 +1,65 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif date time', () => {
it('should prioritize DateTimeOriginal', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/date-priority-test.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: null,
dateTimeOriginal: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
localDateTime: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
fileCreatedAt: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
});
});
it('should extract GPSDateTime with GPS coordinates ', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/gps-datetime.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: 'America/Los_Angeles',
dateTimeOriginal: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
localDateTime: DateTime.fromISO('2023-11-15T04:30:00.000Z').toJSDate(),
fileCreatedAt: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
});
});
it('should ignore the TimeCreated tag', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/time-created.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
const stats = ctx.getMockStats();
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: null,
dateTimeOriginal: stats.mtime,
localDateTime: stats.mtime,
fileCreatedAt: stats.mtime,
});
});
});

View File

@@ -0,0 +1,31 @@
import { Kysely } from 'kysely';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif gps', () => {
it('should handle empty strings', async () => {
const { ctx, sut, asset } = await setup('metadata/gps-position/empty_gps.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getGps(asset.id)).resolves.toEqual({ latitude: null, longitude: null });
});
});

View File

@@ -0,0 +1,34 @@
import { Kysely } from 'kysely';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif tags', () => {
it('should detect and regular tags', async () => {
const { ctx, sut, asset } = await setup('metadata/tags/picasa.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getTags(asset.id)).resolves.toEqual([
expect.objectContaining({ assetId: asset.id, value: 'Frost', parentId: null }),
expect.objectContaining({ assetId: asset.id, value: 'Yard', parentId: null }),
]);
});
});

View File

@@ -65,42 +65,6 @@ describe(MetadataService.name, () => {
timeZone: null,
},
},
{
description: 'should handle no time zone information and server behind UTC',
serverTimeZone: 'America/Los_Angeles',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2022-01-01T08:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle no time zone information and server ahead of UTC',
serverTimeZone: 'Europe/Brussels',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2021-12-31T23:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle no time zone information and server ahead of UTC in the summer',
serverTimeZone: 'Europe/Brussels',
exifData: {
DateTimeOriginal: '2022:06:01 00:00:00',
},
expected: {
localDateTime: '2022-06-01T00:00:00.000Z',
dateTimeOriginal: '2022-05-31T22:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle a +13:00 time zone',
exifData: {

View File

@@ -74,7 +74,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumsId: album.id, usersId: user1.id }, { role: AlbumUserRole.Viewer });
await albumUserRepo.update({ albumId: album.id, userId: user1.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
@@ -104,7 +104,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user1.id });
await albumUserRepo.delete({ albumId: album.id, userId: user1.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
@@ -171,7 +171,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumsId: album.id, usersId: user.id }, { role: AlbumUserRole.Viewer });
await albumUserRepo.update({ albumId: album.id, userId: user.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
@@ -208,7 +208,7 @@ describe(SyncRequestType.AlbumUsersV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: user.id });
await albumUserRepo.delete({ albumId: album.id, userId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([

View File

@@ -217,7 +217,7 @@ describe(SyncRequestType.AlbumsV1, () => {
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await albumUserRepo.delete({ albumsId: album.id, usersId: auth.user.id });
await albumUserRepo.delete({ albumId: album.id, userId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toEqual([
{

Some files were not shown because too many files have changed in this diff Show More