mirror of
https://github.com/immich-app/immich.git
synced 2026-01-24 10:24:39 -08:00
Compare commits
7 Commits
feat/serve
...
refactor/t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
12a59f8c68 | ||
|
|
de84e46f62 | ||
|
|
6d3dda7e2e | ||
|
|
4ca76b24e9 | ||
|
|
9a5e8c07ab | ||
|
|
9bcbf003e6 | ||
|
|
8e97c584cf |
24
.github/workflows/build-mobile.yml
vendored
24
.github/workflows/build-mobile.yml
vendored
@@ -20,30 +20,6 @@ on:
|
||||
required: true
|
||||
ANDROID_STORE_PASSWORD:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID:
|
||||
required: true
|
||||
APP_STORE_CONNECT_API_KEY:
|
||||
required: true
|
||||
IOS_CERTIFICATE_P12:
|
||||
required: true
|
||||
IOS_CERTIFICATE_PASSWORD:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION:
|
||||
required: true
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION:
|
||||
required: true
|
||||
FASTLANE_TEAM_ID:
|
||||
required: true
|
||||
pull_request:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
14
.github/workflows/prepare-release.yml
vendored
14
.github/workflows/prepare-release.yml
vendored
@@ -99,20 +99,6 @@ jobs:
|
||||
ALIAS: ${{ secrets.ALIAS }}
|
||||
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
|
||||
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
|
||||
# iOS secrets
|
||||
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
|
||||
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
|
||||
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
|
||||
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
|
||||
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
|
||||
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
|
||||
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
|
||||
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
|
||||
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
|
||||
|
||||
with:
|
||||
ref: ${{ needs.bump_version.outputs.ref }}
|
||||
environment: production
|
||||
|
||||
1
.github/workflows/test.yml
vendored
1
.github/workflows/test.yml
vendored
@@ -382,7 +382,6 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: 'recursive'
|
||||
token: ${{ steps.token.outputs.token }}
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/cli",
|
||||
"version": "2.2.101",
|
||||
"version": "2.2.99",
|
||||
"description": "Command Line Interface (CLI) for Immich",
|
||||
"type": "module",
|
||||
"exports": "./dist/index.js",
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/micromatch": "^4.0.9",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/node": "^22.18.13",
|
||||
"@types/node": "^22.18.12",
|
||||
"@vitest/coverage-v8": "^3.0.0",
|
||||
"byte-size": "^9.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
|
||||
@@ -83,7 +83,7 @@ services:
|
||||
container_name: immich_prometheus
|
||||
ports:
|
||||
- 9090:9090
|
||||
image: prom/prometheus@sha256:49214755b6153f90a597adcbff0252cc61069f8ab69ce8411285cd4a560e8038
|
||||
image: prom/prometheus@sha256:23031bfe0e74a13004252caaa74eccd0d62b6c6e7a04711d5b8bf5b7e113adc7
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
- prometheus-data:/prometheus
|
||||
|
||||
@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
|
||||
|
||||
```sql title="Count by tag"
|
||||
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
```sql title="Count by tag (per user)"
|
||||
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
|
||||
WHERE "a"."visibility" != 'hidden'
|
||||
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
|
||||
```
|
||||
|
||||
@@ -16,76 +16,48 @@ The default configuration looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"ffmpeg": {
|
||||
"crf": 23,
|
||||
"threads": 0,
|
||||
"preset": "ultrafast",
|
||||
"targetVideoCodec": "h264",
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"targetAudioCodec": "aac",
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"targetResolution": "720",
|
||||
"maxBitrate": "0",
|
||||
"bframes": -1,
|
||||
"refs": 0,
|
||||
"gopSize": 0,
|
||||
"temporalAQ": false,
|
||||
"cqMode": "auto",
|
||||
"twoPass": false,
|
||||
"preferredHwDevice": "auto",
|
||||
"transcode": "required",
|
||||
"tonemap": "hable",
|
||||
"accel": "disabled",
|
||||
"accelDecode": false
|
||||
},
|
||||
"backup": {
|
||||
"database": {
|
||||
"cronExpression": "0 02 * * *",
|
||||
"enabled": true,
|
||||
"cronExpression": "0 02 * * *",
|
||||
"keepLastAmount": 14
|
||||
}
|
||||
},
|
||||
"ffmpeg": {
|
||||
"accel": "disabled",
|
||||
"accelDecode": false,
|
||||
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
|
||||
"acceptedContainers": ["mov", "ogg", "webm"],
|
||||
"acceptedVideoCodecs": ["h264"],
|
||||
"bframes": -1,
|
||||
"cqMode": "auto",
|
||||
"crf": 23,
|
||||
"gopSize": 0,
|
||||
"maxBitrate": "0",
|
||||
"preferredHwDevice": "auto",
|
||||
"preset": "ultrafast",
|
||||
"refs": 0,
|
||||
"targetAudioCodec": "aac",
|
||||
"targetResolution": "720",
|
||||
"targetVideoCodec": "h264",
|
||||
"temporalAQ": false,
|
||||
"threads": 0,
|
||||
"tonemap": "hable",
|
||||
"transcode": "required",
|
||||
"twoPass": false
|
||||
},
|
||||
"image": {
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false,
|
||||
"fullsize": {
|
||||
"enabled": false,
|
||||
"format": "jpeg",
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"quality": 80,
|
||||
"size": 1440
|
||||
},
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"quality": 80,
|
||||
"size": 250
|
||||
}
|
||||
},
|
||||
"job": {
|
||||
"backgroundTask": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"faceDetection": {
|
||||
"smartSearch": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"metadataExtraction": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"ocr": {
|
||||
"concurrency": 1
|
||||
"faceDetection": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"search": {
|
||||
"concurrency": 5
|
||||
@@ -93,23 +65,20 @@ The default configuration looks like this:
|
||||
"sidecar": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"smartSearch": {
|
||||
"concurrency": 2
|
||||
"library": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"migration": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"thumbnailGeneration": {
|
||||
"concurrency": 3
|
||||
},
|
||||
"videoConversion": {
|
||||
"concurrency": 1
|
||||
}
|
||||
},
|
||||
"library": {
|
||||
"scan": {
|
||||
"cronExpression": "0 0 * * *",
|
||||
"enabled": true
|
||||
},
|
||||
"watch": {
|
||||
"enabled": false
|
||||
"notifications": {
|
||||
"concurrency": 5
|
||||
}
|
||||
},
|
||||
"logging": {
|
||||
@@ -117,11 +86,8 @@ The default configuration looks like this:
|
||||
"level": "log"
|
||||
},
|
||||
"machineLearning": {
|
||||
"availabilityChecks": {
|
||||
"enabled": true,
|
||||
"interval": 30000,
|
||||
"timeout": 2000
|
||||
},
|
||||
"enabled": true,
|
||||
"urls": ["http://immich-machine-learning:3003"],
|
||||
"clip": {
|
||||
"enabled": true,
|
||||
"modelName": "ViT-B-32__openai"
|
||||
@@ -130,59 +96,27 @@ The default configuration looks like this:
|
||||
"enabled": true,
|
||||
"maxDistance": 0.01
|
||||
},
|
||||
"enabled": true,
|
||||
"facialRecognition": {
|
||||
"enabled": true,
|
||||
"maxDistance": 0.5,
|
||||
"minFaces": 3,
|
||||
"modelName": "buffalo_l",
|
||||
"minScore": 0.7,
|
||||
"modelName": "buffalo_l"
|
||||
},
|
||||
"ocr": {
|
||||
"enabled": true,
|
||||
"maxResolution": 736,
|
||||
"minDetectionScore": 0.5,
|
||||
"minRecognitionScore": 0.8,
|
||||
"modelName": "PP-OCRv5_mobile"
|
||||
},
|
||||
"urls": ["http://immich-machine-learning:3003"]
|
||||
"maxDistance": 0.5,
|
||||
"minFaces": 3
|
||||
}
|
||||
},
|
||||
"map": {
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
|
||||
"enabled": true,
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
|
||||
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json",
|
||||
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
},
|
||||
"metadata": {
|
||||
"faces": {
|
||||
"import": false
|
||||
}
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"nightlyTasks": {
|
||||
"clusterNewFaces": true,
|
||||
"databaseCleanup": true,
|
||||
"generateMemories": true,
|
||||
"missingThumbnails": true,
|
||||
"startTime": "00:00",
|
||||
"syncQuotaUsage": true
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"host": "",
|
||||
"ignoreCert": false,
|
||||
"password": "",
|
||||
"port": 587,
|
||||
"secure": false,
|
||||
"username": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"oauth": {
|
||||
"autoLaunch": false,
|
||||
"autoRegister": true,
|
||||
@@ -194,44 +128,70 @@ The default configuration looks like this:
|
||||
"issuerUrl": "",
|
||||
"mobileOverrideEnabled": false,
|
||||
"mobileRedirectUri": "",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"roleClaim": "immich_role",
|
||||
"scope": "openid email profile",
|
||||
"signingAlgorithm": "RS256",
|
||||
"profileSigningAlgorithm": "none",
|
||||
"storageLabelClaim": "preferred_username",
|
||||
"storageQuotaClaim": "immich_quota",
|
||||
"timeout": 30000,
|
||||
"tokenEndpointAuthMethod": "client_secret_post"
|
||||
"storageQuotaClaim": "immich_quota"
|
||||
},
|
||||
"passwordLogin": {
|
||||
"enabled": true
|
||||
},
|
||||
"reverseGeocoding": {
|
||||
"enabled": true
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": "",
|
||||
"publicUsers": true
|
||||
},
|
||||
"storageTemplate": {
|
||||
"enabled": false,
|
||||
"hashVerificationEnabled": true,
|
||||
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
|
||||
},
|
||||
"templates": {
|
||||
"email": {
|
||||
"albumInviteTemplate": "",
|
||||
"albumUpdateTemplate": "",
|
||||
"welcomeTemplate": ""
|
||||
}
|
||||
"image": {
|
||||
"thumbnail": {
|
||||
"format": "webp",
|
||||
"size": 250,
|
||||
"quality": 80
|
||||
},
|
||||
"preview": {
|
||||
"format": "jpeg",
|
||||
"size": 1440,
|
||||
"quality": 80
|
||||
},
|
||||
"colorspace": "p3",
|
||||
"extractEmbedded": false
|
||||
},
|
||||
"newVersionCheck": {
|
||||
"enabled": true
|
||||
},
|
||||
"trash": {
|
||||
"enabled": true,
|
||||
"days": 30
|
||||
},
|
||||
"theme": {
|
||||
"customCss": ""
|
||||
},
|
||||
"trash": {
|
||||
"days": 30,
|
||||
"enabled": true
|
||||
"library": {
|
||||
"scan": {
|
||||
"enabled": true,
|
||||
"cronExpression": "0 0 * * *"
|
||||
},
|
||||
"watch": {
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
"server": {
|
||||
"externalDomain": "",
|
||||
"loginPageMessage": ""
|
||||
},
|
||||
"notifications": {
|
||||
"smtp": {
|
||||
"enabled": false,
|
||||
"from": "",
|
||||
"replyTo": "",
|
||||
"transport": {
|
||||
"ignoreCert": false,
|
||||
"host": "",
|
||||
"port": 587,
|
||||
"username": "",
|
||||
"password": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"deleteDelay": 7
|
||||
|
||||
8
docs/static/archived-versions.json
vendored
8
docs/static/archived-versions.json
vendored
@@ -1,12 +1,4 @@
|
||||
[
|
||||
{
|
||||
"label": "v2.2.3",
|
||||
"url": "https://docs.v2.2.3.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.2",
|
||||
"url": "https://docs.v2.2.2.archive.immich.app"
|
||||
},
|
||||
{
|
||||
"label": "v2.2.1",
|
||||
"url": "https://docs.v2.2.1.archive.immich.app"
|
||||
|
||||
@@ -35,7 +35,7 @@ services:
|
||||
- 2285:2285
|
||||
|
||||
redis:
|
||||
image: redis:6.2-alpine@sha256:37e002448575b32a599109664107e374c8709546905c372a34d64919043b9ceb
|
||||
image: redis:6.2-alpine@sha256:77697a75da9f94e9357b61fcaf8345f69e3d9d32e9d15032c8415c21263977dc
|
||||
|
||||
database:
|
||||
image: ghcr.io/immich-app/postgres:14-vectorchord0.3.0@sha256:6f3e9d2c2177af16c2988ff71425d79d89ca630ec2f9c8db03209ab716542338
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich-e2e",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.1",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@socket.io/component-emitter": "^3.1.2",
|
||||
"@types/luxon": "^3.4.2",
|
||||
"@types/node": "^22.18.13",
|
||||
"@types/node": "^22.18.12",
|
||||
"@types/oidc-provider": "^9.0.0",
|
||||
"@types/pg": "^8.15.1",
|
||||
"@types/pngjs": "^6.0.4",
|
||||
@@ -53,8 +53,5 @@
|
||||
},
|
||||
"volta": {
|
||||
"node": "24.11.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"structured-headers": "^2.0.2"
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,7 @@ import { DateTime } from 'luxon';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { readFile, writeFile } from 'node:fs/promises';
|
||||
import { basename, join } from 'node:path';
|
||||
import sharp from 'sharp';
|
||||
import { Socket } from 'socket.io-client';
|
||||
import { createUserDto, uuidDto } from 'src/fixtures';
|
||||
import { makeRandomImage } from 'src/generators';
|
||||
@@ -40,6 +41,40 @@ const today = DateTime.fromObject({
|
||||
}) as DateTime<true>;
|
||||
const yesterday = today.minus({ days: 1 });
|
||||
|
||||
const createTestImageWithExif = async (filename: string, exifData: Record<string, any>) => {
|
||||
// Generate unique color to ensure different checksums for each image
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
// Create a 100x100 solid color JPEG using Sharp
|
||||
const imageBytes = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
// Add random suffix to filename to avoid collisions
|
||||
const uniqueFilename = filename.replace('.jpg', `-${randomBytes(4).toString('hex')}.jpg`);
|
||||
const filepath = join(tempDir, uniqueFilename);
|
||||
await writeFile(filepath, imageBytes);
|
||||
|
||||
// Filter out undefined values before writing EXIF
|
||||
const cleanExifData = Object.fromEntries(Object.entries(exifData).filter(([, value]) => value !== undefined));
|
||||
|
||||
await exiftool.write(filepath, cleanExifData);
|
||||
|
||||
// Re-read the image bytes after EXIF has been written
|
||||
const finalImageBytes = await readFile(filepath);
|
||||
|
||||
return { filepath, imageBytes: finalImageBytes, filename: uniqueFilename };
|
||||
};
|
||||
|
||||
describe('/asset', () => {
|
||||
let admin: LoginResponseDto;
|
||||
let websocket: Socket;
|
||||
@@ -1214,6 +1249,411 @@ describe('/asset', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('EXIF metadata extraction', () => {
|
||||
describe('Additional date tag extraction', () => {
|
||||
describe('Date-time vs time-only tag handling', () => {
|
||||
it('should fall back to file timestamps when only time-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('time-only-fallback.jpg', {
|
||||
TimeCreated: '2023:11:15 14:30:00', // Time-only tag, should not be used for dateTimeOriginal
|
||||
// Exclude all date-time tags to force fallback to file timestamps
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
SonyDateTime2: undefined,
|
||||
GPSDateStamp: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should prefer DateTimeOriginal over time-only tags', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('datetime-over-time.jpg', {
|
||||
DateTimeOriginal: '2023:10:10 10:00:00', // Should be preferred
|
||||
TimeCreated: '2023:11:15 14:30:00', // Should be ignored (time-only)
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal, not TimeCreated
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateTime tag extraction', () => {
|
||||
it('should extract GPSDateTime with GPS coordinates', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datetime.jpg', {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: 37.7749,
|
||||
GPSLongitude: -122.4194,
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(37.7749, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-122.4194, 4);
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T12:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('CreateDate tag extraction', () => {
|
||||
it('should extract CreateDate when available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('create-date.jpg', {
|
||||
CreateDate: '2023:11:15 10:30:00',
|
||||
// Exclude other higher priority date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-11-15T10:30:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('GPSDateStamp tag extraction', () => {
|
||||
it('should fall back to file timestamps when only date-only tags are available', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp.jpg', {
|
||||
GPSDateStamp: '2023:11:15', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Note: NOT including GPSTimeStamp to avoid automatic GPSDateTime creation
|
||||
GPSLatitude: 51.5074,
|
||||
GPSLongitude: -0.1278,
|
||||
// Explicitly exclude all testable date-time tags to force fallback to file timestamps
|
||||
DateTimeOriginal: undefined,
|
||||
CreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
GPSDateTime: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
expect(assetInfo.exifInfo?.latitude).toBeCloseTo(51.5074, 4);
|
||||
expect(assetInfo.exifInfo?.longitude).toBeCloseTo(-0.1278, 4);
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
/*
|
||||
* NOTE: The following EXIF date tags are NOT effectively usable with JPEG test files:
|
||||
*
|
||||
* NOT WRITABLE to JPEG:
|
||||
* - MediaCreateDate: Can be read from video files but not written to JPEG
|
||||
* - DateTimeCreated: Read-only tag in JPEG format
|
||||
* - DateTimeUTC: Cannot be written to JPEG files
|
||||
* - SonyDateTime2: Proprietary Sony tag, not writable to JPEG
|
||||
* - SubSecMediaCreateDate: Tag not defined for JPEG format
|
||||
* - SourceImageCreateTime: Non-standard insta360 tag, not writable to JPEG
|
||||
*
|
||||
* WRITABLE but NOT READABLE from JPEG:
|
||||
* - SubSecDateTimeOriginal: Can be written but not read back from JPEG
|
||||
* - SubSecCreateDate: Can be written but not read back from JPEG
|
||||
*
|
||||
* EFFECTIVELY TESTABLE TAGS (writable and readable):
|
||||
* - DateTimeOriginal ✓
|
||||
* - CreateDate ✓
|
||||
* - CreationDate ✓
|
||||
* - GPSDateTime ✓
|
||||
*
|
||||
* The metadata service correctly handles non-readable tags and will fall back to
|
||||
* file timestamps when only non-readable tags are present.
|
||||
*/
|
||||
|
||||
describe('Date tag priority order', () => {
|
||||
it('should respect the complete date tag priority order', async () => {
|
||||
// Test cases using only EFFECTIVELY TESTABLE tags (writable AND readable from JPEG)
|
||||
const testCases = [
|
||||
{
|
||||
name: 'DateTimeOriginal has highest priority among testable tags',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-04-04T04:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when DateTimeOriginal missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreateDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-05-05T05:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'CreationDate when standard EXIF tags missing',
|
||||
exifData: {
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
},
|
||||
expectedDate: '2023-07-07T07:00:00.000Z',
|
||||
},
|
||||
{
|
||||
name: 'GPSDateTime when no other testable date tags present',
|
||||
exifData: {
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
Make: 'SONY',
|
||||
},
|
||||
expectedDate: '2023-10-10T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
const { imageBytes, filename } = await createTestImageWithExif(
|
||||
`${testCase.name.replaceAll(/\s+/g, '-').toLowerCase()}.jpg`,
|
||||
testCase.exifData,
|
||||
);
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal, `Failed for: ${testCase.name}`).toBeDefined();
|
||||
expect(
|
||||
new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime(),
|
||||
`Date mismatch for: ${testCase.name}`,
|
||||
).toBe(new Date(testCase.expectedDate).getTime());
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases for date tag handling', () => {
|
||||
it('should fall back to file timestamps with GPSDateStamp alone', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('gps-datestamp-only.jpg', {
|
||||
GPSDateStamp: '2023:08:08', // Date-only tag, should not be used for dateTimeOriginal
|
||||
// Intentionally no GPSTimeStamp
|
||||
// Exclude all other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
SubSecMediaCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
MediaCreateDate: undefined,
|
||||
CreationDate: undefined,
|
||||
DateTimeCreated: undefined,
|
||||
TimeCreated: undefined,
|
||||
GPSDateTime: undefined,
|
||||
DateTimeUTC: undefined,
|
||||
});
|
||||
|
||||
const oldDate = new Date('2020-01-01T00:00:00.000Z');
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
fileCreatedAt: oldDate.toISOString(),
|
||||
fileModifiedAt: oldDate.toISOString(),
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should fall back to file timestamps, which we set to 2020-01-01
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2020-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle all testable date tags present to verify complete priority order', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('all-testable-date-tags.jpg', {
|
||||
// All TESTABLE date tags to JPEG format (writable AND readable)
|
||||
DateTimeOriginal: '2023:04:04 04:00:00', // TESTABLE - highest priority among readable tags
|
||||
CreateDate: '2023:05:05 05:00:00', // TESTABLE
|
||||
CreationDate: '2023:07:07 07:00:00', // TESTABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // TESTABLE
|
||||
// Note: Excluded non-testable tags:
|
||||
// SubSec tags: writable but not readable from JPEG
|
||||
// Non-writable tags: MediaCreateDate, DateTimeCreated, DateTimeUTC, SonyDateTime2, etc.
|
||||
// Time-only/date-only tags: already excluded from EXIF_DATE_TAGS
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use DateTimeOriginal as it has the highest priority among testable tags
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-04-04T04:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use CreationDate when SubSec tags are missing', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('creation-date-priority.jpg', {
|
||||
CreationDate: '2023:07:07 07:00:00', // WRITABLE
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE
|
||||
// Note: DateTimeCreated, DateTimeUTC, SonyDateTime2 are NOT writable to JPEG
|
||||
// Note: TimeCreated and GPSDateStamp are excluded from EXIF_DATE_TAGS (time-only/date-only)
|
||||
// Exclude SubSec and standard EXIF tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should use CreationDate when available
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-07-07T07:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip invalid date formats and use next valid tag', async () => {
|
||||
const { imageBytes, filename } = await createTestImageWithExif('invalid-date-handling.jpg', {
|
||||
// Note: Testing invalid date handling with only WRITABLE tags
|
||||
GPSDateTime: '2023:10:10 10:00:00', // WRITABLE - Valid date
|
||||
CreationDate: '2023:13:13 13:00:00', // WRITABLE - Valid date
|
||||
// Note: TimeCreated excluded (time-only), DateTimeCreated not writable to JPEG
|
||||
// Exclude other date tags
|
||||
SubSecDateTimeOriginal: undefined,
|
||||
DateTimeOriginal: undefined,
|
||||
SubSecCreateDate: undefined,
|
||||
CreateDate: undefined,
|
||||
});
|
||||
|
||||
const asset = await utils.createAsset(admin.accessToken, {
|
||||
assetData: {
|
||||
filename,
|
||||
bytes: imageBytes,
|
||||
},
|
||||
});
|
||||
|
||||
await utils.waitForWebsocketEvent({ event: 'assetUpload', id: asset.id });
|
||||
|
||||
const assetInfo = await getAssetInfo({ id: asset.id }, { headers: asBearerAuth(admin.accessToken) });
|
||||
|
||||
expect(assetInfo.exifInfo?.dateTimeOriginal).toBeDefined();
|
||||
// Should skip invalid dates and use the first valid one (GPSDateTime)
|
||||
expect(new Date(assetInfo.exifInfo!.dateTimeOriginal!).getTime()).toBe(
|
||||
new Date('2023-10-10T10:00:00.000Z').getTime(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('POST /assets/exist', () => {
|
||||
it('ignores invalid deviceAssetIds', async () => {
|
||||
const response = await utils.checkExistingAssets(user1.accessToken, {
|
||||
|
||||
178
e2e/src/generate-date-tag-test-images.ts
Normal file
178
e2e/src/generate-date-tag-test-images.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Script to generate test images with additional EXIF date tags
|
||||
* This creates actual JPEG images with embedded metadata for testing
|
||||
* Images are generated into e2e/test-assets/metadata/dates/
|
||||
*/
|
||||
|
||||
import { execSync } from 'node:child_process';
|
||||
import { writeFileSync } from 'node:fs';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import sharp from 'sharp';
|
||||
|
||||
interface TestImage {
|
||||
filename: string;
|
||||
description: string;
|
||||
exifTags: Record<string, string>;
|
||||
}
|
||||
|
||||
const testImages: TestImage[] = [
|
||||
{
|
||||
filename: 'time-created.jpg',
|
||||
description: 'Image with TimeCreated tag',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:11:15 14:30:00',
|
||||
Make: 'Canon',
|
||||
Model: 'EOS R5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datetime.jpg',
|
||||
description: 'Image with GPSDateTime and coordinates',
|
||||
exifTags: {
|
||||
GPSDateTime: '2023:11:15 12:30:00Z',
|
||||
GPSLatitude: '37.7749',
|
||||
GPSLongitude: '-122.4194',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'datetime-utc.jpg',
|
||||
description: 'Image with DateTimeUTC tag',
|
||||
exifTags: {
|
||||
DateTimeUTC: '2023:11:15 10:30:00',
|
||||
Make: 'Nikon',
|
||||
Model: 'D850',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'gps-datestamp.jpg',
|
||||
description: 'Image with GPSDateStamp and GPSTimeStamp',
|
||||
exifTags: {
|
||||
GPSDateStamp: '2023:11:15',
|
||||
GPSTimeStamp: '08:30:00',
|
||||
GPSLatitude: '51.5074',
|
||||
GPSLongitude: '-0.1278',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'sony-datetime2.jpg',
|
||||
description: 'Sony camera image with SonyDateTime2 tag',
|
||||
exifTags: {
|
||||
SonyDateTime2: '2023:11:15 06:30:00',
|
||||
Make: 'SONY',
|
||||
Model: 'ILCE-7RM5',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'date-priority-test.jpg',
|
||||
description: 'Image with multiple date tags to test priority',
|
||||
exifTags: {
|
||||
SubSecDateTimeOriginal: '2023:01:01 01:00:00',
|
||||
DateTimeOriginal: '2023:02:02 02:00:00',
|
||||
SubSecCreateDate: '2023:03:03 03:00:00',
|
||||
CreateDate: '2023:04:04 04:00:00',
|
||||
CreationDate: '2023:05:05 05:00:00',
|
||||
DateTimeCreated: '2023:06:06 06:00:00',
|
||||
TimeCreated: '2023:07:07 07:00:00',
|
||||
GPSDateTime: '2023:08:08 08:00:00',
|
||||
DateTimeUTC: '2023:09:09 09:00:00',
|
||||
GPSDateStamp: '2023:10:10',
|
||||
SonyDateTime2: '2023:11:11 11:00:00',
|
||||
},
|
||||
},
|
||||
{
|
||||
filename: 'new-tags-only.jpg',
|
||||
description: 'Image with only additional date tags (no standard tags)',
|
||||
exifTags: {
|
||||
TimeCreated: '2023:12:01 15:45:30',
|
||||
GPSDateTime: '2023:12:01 13:45:30Z',
|
||||
DateTimeUTC: '2023:12:01 13:45:30',
|
||||
GPSDateStamp: '2023:12:01',
|
||||
SonyDateTime2: '2023:12:01 08:45:30',
|
||||
GPSLatitude: '40.7128',
|
||||
GPSLongitude: '-74.0060',
|
||||
GPSLatitudeRef: 'N',
|
||||
GPSLongitudeRef: 'W',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateTestImages = async (): Promise<void> => {
|
||||
// Target directory: e2e/test-assets/metadata/dates/
|
||||
// Current file is in: e2e/src/
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const targetDir = join(__dirname, '..', 'test-assets', 'metadata', 'dates');
|
||||
|
||||
console.log('Generating test images with additional EXIF date tags...');
|
||||
console.log(`Target directory: ${targetDir}`);
|
||||
|
||||
for (const image of testImages) {
|
||||
try {
|
||||
const imagePath = join(targetDir, image.filename);
|
||||
|
||||
// Create unique JPEG file using Sharp
|
||||
const r = Math.floor(Math.random() * 256);
|
||||
const g = Math.floor(Math.random() * 256);
|
||||
const b = Math.floor(Math.random() * 256);
|
||||
|
||||
const jpegData = await sharp({
|
||||
create: {
|
||||
width: 100,
|
||||
height: 100,
|
||||
channels: 3,
|
||||
background: { r, g, b },
|
||||
},
|
||||
})
|
||||
.jpeg({ quality: 90 })
|
||||
.toBuffer();
|
||||
|
||||
writeFileSync(imagePath, jpegData);
|
||||
|
||||
// Build exiftool command to add EXIF data
|
||||
const exifArgs = Object.entries(image.exifTags)
|
||||
.map(([tag, value]) => `-${tag}="${value}"`)
|
||||
.join(' ');
|
||||
|
||||
const command = `exiftool ${exifArgs} -overwrite_original "${imagePath}"`;
|
||||
|
||||
console.log(`Creating ${image.filename}: ${image.description}`);
|
||||
execSync(command, { stdio: 'pipe' });
|
||||
|
||||
// Verify the tags were written
|
||||
const verifyCommand = `exiftool -json "${imagePath}"`;
|
||||
const result = execSync(verifyCommand, { encoding: 'utf8' });
|
||||
const metadata = JSON.parse(result)[0];
|
||||
|
||||
console.log(` ✓ Created with ${Object.keys(image.exifTags).length} EXIF tags`);
|
||||
|
||||
// Log first date tag found for verification
|
||||
const firstDateTag = Object.keys(image.exifTags).find(
|
||||
(tag) => tag.includes('Date') || tag.includes('Time') || tag.includes('Created'),
|
||||
);
|
||||
if (firstDateTag && metadata[firstDateTag]) {
|
||||
console.log(` ✓ Verified ${firstDateTag}: ${metadata[firstDateTag]}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to create ${image.filename}:`, (error as Error).message);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nTest image generation complete!');
|
||||
console.log('Files created in:', targetDir);
|
||||
console.log('\nTo test these images:');
|
||||
console.log(`cd ${targetDir} && exiftool -time:all -gps:all *.jpg`);
|
||||
};
|
||||
|
||||
export { generateTestImages };
|
||||
|
||||
// Run the generator if this file is executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
generateTestImages().catch(console.error);
|
||||
}
|
||||
@@ -561,16 +561,6 @@ export const utils = {
|
||||
await utils.waitForQueueFinish(accessToken, 'sidecar');
|
||||
await utils.waitForQueueFinish(accessToken, 'metadataExtraction');
|
||||
},
|
||||
|
||||
downloadAsset: async (accessToken: string, id: string) => {
|
||||
const downloadedRes = await fetch(`${baseUrl}/api/assets/${id}/original`, {
|
||||
headers: asBearerAuth(accessToken),
|
||||
});
|
||||
if (!downloadedRes.ok) {
|
||||
throw new Error(`Failed to download asset ${id}: ${downloadedRes.status} ${await downloadedRes.text()}`);
|
||||
}
|
||||
return await downloadedRes.blob();
|
||||
},
|
||||
};
|
||||
|
||||
utils.initSdk();
|
||||
|
||||
Submodule e2e/test-assets updated: 163c251744...37f60ea537
@@ -1,10 +1,8 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
from rapidocr.ch_ppocr_det.utils import DBPostProcess
|
||||
from rapidocr.ch_ppocr_det import TextDetector as RapidTextDetector
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
from rapidocr.utils import DownloadFile, DownloadFileInput
|
||||
from rapidocr.utils.typings import EngineType, LangDet, OCRVersion, TaskType
|
||||
@@ -12,10 +10,11 @@ from rapidocr.utils.typings import ModelType as RapidModelType
|
||||
|
||||
from immich_ml.config import log
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import decode_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
from .schemas import TextDetectionOutput
|
||||
from .schemas import OcrOptions, TextDetectionOutput
|
||||
|
||||
|
||||
class TextDetector(InferenceModel):
|
||||
@@ -25,20 +24,13 @@ class TextDetector(InferenceModel):
|
||||
def __init__(self, model_name: str, **model_kwargs: Any) -> None:
|
||||
super().__init__(model_name, **model_kwargs, model_format=ModelFormat.ONNX)
|
||||
self.max_resolution = 736
|
||||
self.mean = np.array([0.5, 0.5, 0.5], dtype=np.float32)
|
||||
self.std_inv = np.float32(1.0) / (np.array([0.5, 0.5, 0.5], dtype=np.float32) * 255.0)
|
||||
self.min_score = 0.5
|
||||
self.score_mode = "fast"
|
||||
self._empty: TextDetectionOutput = {
|
||||
"image": np.empty(0, dtype=np.float32),
|
||||
"boxes": np.empty(0, dtype=np.float32),
|
||||
"scores": np.empty(0, dtype=np.float32),
|
||||
}
|
||||
self.postprocess = DBPostProcess(
|
||||
thresh=0.3,
|
||||
box_thresh=model_kwargs.get("minScore", 0.5),
|
||||
max_candidates=1000,
|
||||
unclip_ratio=1.6,
|
||||
use_dilation=True,
|
||||
score_mode="fast",
|
||||
)
|
||||
|
||||
def _download(self) -> None:
|
||||
model_info = InferSession.get_model_url(
|
||||
@@ -60,65 +52,35 @@ class TextDetector(InferenceModel):
|
||||
|
||||
def _load(self) -> ModelSession:
|
||||
# TODO: support other runtime sessions
|
||||
return OrtSession(self.model_path)
|
||||
session = OrtSession(self.model_path)
|
||||
self.model = RapidTextDetector(
|
||||
OcrOptions(
|
||||
session=session.session,
|
||||
limit_side_len=self.max_resolution,
|
||||
limit_type="min",
|
||||
box_thresh=self.min_score,
|
||||
score_mode=self.score_mode,
|
||||
)
|
||||
)
|
||||
return session
|
||||
|
||||
# partly adapted from RapidOCR
|
||||
def _predict(self, inputs: Image.Image) -> TextDetectionOutput:
|
||||
w, h = inputs.size
|
||||
if w < 32 or h < 32:
|
||||
return self._empty
|
||||
out = self.session.run(None, {"x": self._transform(inputs)})[0]
|
||||
boxes, scores = self.postprocess(out, (h, w))
|
||||
if len(boxes) == 0:
|
||||
def _predict(self, inputs: bytes | Image.Image) -> TextDetectionOutput:
|
||||
results = self.model(decode_cv2(inputs))
|
||||
if results.boxes is None or results.scores is None or results.img is None:
|
||||
return self._empty
|
||||
return {
|
||||
"boxes": self.sorted_boxes(boxes),
|
||||
"scores": np.array(scores, dtype=np.float32),
|
||||
"image": results.img,
|
||||
"boxes": np.array(results.boxes, dtype=np.float32),
|
||||
"scores": np.array(results.scores, dtype=np.float32),
|
||||
}
|
||||
|
||||
# adapted from RapidOCR
|
||||
def _transform(self, img: Image.Image) -> NDArray[np.float32]:
|
||||
if img.height < img.width:
|
||||
ratio = float(self.max_resolution) / img.height
|
||||
else:
|
||||
ratio = float(self.max_resolution) / img.width
|
||||
|
||||
resize_h = int(img.height * ratio)
|
||||
resize_w = int(img.width * ratio)
|
||||
|
||||
resize_h = int(round(resize_h / 32) * 32)
|
||||
resize_w = int(round(resize_w / 32) * 32)
|
||||
resized_img = img.resize((int(resize_w), int(resize_h)), resample=Image.Resampling.LANCZOS)
|
||||
|
||||
img_np: NDArray[np.float32] = cv2.cvtColor(np.array(resized_img, dtype=np.float32), cv2.COLOR_RGB2BGR) # type: ignore
|
||||
img_np -= self.mean
|
||||
img_np *= self.std_inv
|
||||
img_np = np.transpose(img_np, (2, 0, 1))
|
||||
return np.expand_dims(img_np, axis=0)
|
||||
|
||||
def sorted_boxes(self, dt_boxes: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
if len(dt_boxes) == 0:
|
||||
return dt_boxes
|
||||
|
||||
# Sort by y, then identify lines, then sort by (line, x)
|
||||
y_order = np.argsort(dt_boxes[:, 0, 1], kind="stable")
|
||||
sorted_y = dt_boxes[y_order, 0, 1]
|
||||
|
||||
line_ids = np.empty(len(dt_boxes), dtype=np.int32)
|
||||
line_ids[0] = 0
|
||||
np.cumsum(np.abs(np.diff(sorted_y)) >= 10, out=line_ids[1:])
|
||||
|
||||
# Create composite sort key for final ordering
|
||||
# Shift line_ids by large factor, add x for tie-breaking
|
||||
sort_key = line_ids[y_order] * 1e6 + dt_boxes[y_order, 0, 0]
|
||||
final_order = np.argsort(sort_key, kind="stable")
|
||||
sorted_boxes: NDArray[np.float32] = dt_boxes[y_order[final_order]]
|
||||
return sorted_boxes
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
if (max_resolution := kwargs.get("maxResolution")) is not None:
|
||||
self.max_resolution = max_resolution
|
||||
self.model.limit_side_len = max_resolution
|
||||
if (min_score := kwargs.get("minScore")) is not None:
|
||||
self.postprocess.box_thresh = min_score
|
||||
self.min_score = min_score
|
||||
self.model.postprocess_op.box_thresh = min_score
|
||||
if (score_mode := kwargs.get("scoreMode")) is not None:
|
||||
self.postprocess.score_mode = score_mode
|
||||
self.score_mode = score_mode
|
||||
self.model.postprocess_op.score_mode = score_mode
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from PIL import Image
|
||||
from PIL.Image import Image
|
||||
from rapidocr.ch_ppocr_rec import TextRecInput
|
||||
from rapidocr.ch_ppocr_rec import TextRecognizer as RapidTextRecognizer
|
||||
from rapidocr.inference_engine.base import FileInfo, InferSession
|
||||
@@ -13,7 +14,6 @@ from rapidocr.utils.vis_res import VisRes
|
||||
|
||||
from immich_ml.config import log, settings
|
||||
from immich_ml.models.base import InferenceModel
|
||||
from immich_ml.models.transforms import pil_to_cv2
|
||||
from immich_ml.schemas import ModelFormat, ModelSession, ModelTask, ModelType
|
||||
from immich_ml.sessions.ort import OrtSession
|
||||
|
||||
@@ -65,16 +65,17 @@ class TextRecognizer(InferenceModel):
|
||||
)
|
||||
return session
|
||||
|
||||
def _predict(self, img: Image.Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, box_scores = texts["boxes"], texts["scores"]
|
||||
def _predict(self, _: Image, texts: TextDetectionOutput) -> TextRecognitionOutput:
|
||||
boxes, img, box_scores = texts["boxes"], texts["image"], texts["scores"]
|
||||
if boxes.shape[0] == 0:
|
||||
return self._empty
|
||||
rec = self.model(TextRecInput(img=self.get_crop_img_list(img, boxes)))
|
||||
if rec.txts is None:
|
||||
return self._empty
|
||||
|
||||
boxes[:, :, 0] /= img.width
|
||||
boxes[:, :, 1] /= img.height
|
||||
height, width = img.shape[0:2]
|
||||
boxes[:, :, 0] /= width
|
||||
boxes[:, :, 1] /= height
|
||||
|
||||
text_scores = np.array(rec.scores)
|
||||
valid_text_score_idx = text_scores > self.min_score
|
||||
@@ -86,7 +87,7 @@ class TextRecognizer(InferenceModel):
|
||||
"textScore": text_scores[valid_text_score_idx],
|
||||
}
|
||||
|
||||
def get_crop_img_list(self, img: Image.Image, boxes: NDArray[np.float32]) -> list[NDArray[np.uint8]]:
|
||||
def get_crop_img_list(self, img: NDArray[np.float32], boxes: NDArray[np.float32]) -> list[NDArray[np.float32]]:
|
||||
img_crop_width = np.maximum(
|
||||
np.linalg.norm(boxes[:, 1] - boxes[:, 0], axis=1), np.linalg.norm(boxes[:, 2] - boxes[:, 3], axis=1)
|
||||
).astype(np.int32)
|
||||
@@ -97,55 +98,22 @@ class TextRecognizer(InferenceModel):
|
||||
pts_std[:, 1:3, 0] = img_crop_width[:, None]
|
||||
pts_std[:, 2:4, 1] = img_crop_height[:, None]
|
||||
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1)
|
||||
all_coeffs = self._get_perspective_transform(pts_std, boxes)
|
||||
imgs: list[NDArray[np.uint8]] = []
|
||||
for coeffs, dst_size in zip(all_coeffs, img_crop_sizes):
|
||||
dst_img = img.transform(
|
||||
size=tuple(dst_size),
|
||||
method=Image.Transform.PERSPECTIVE,
|
||||
data=tuple(coeffs),
|
||||
resample=Image.Resampling.BICUBIC,
|
||||
)
|
||||
|
||||
dst_width, dst_height = dst_img.size
|
||||
img_crop_sizes = np.stack([img_crop_width, img_crop_height], axis=1).tolist()
|
||||
imgs: list[NDArray[np.float32]] = []
|
||||
for box, pts_std, dst_size in zip(list(boxes), list(pts_std), img_crop_sizes):
|
||||
M = cv2.getPerspectiveTransform(box, pts_std)
|
||||
dst_img: NDArray[np.float32] = cv2.warpPerspective(
|
||||
img,
|
||||
M,
|
||||
dst_size,
|
||||
borderMode=cv2.BORDER_REPLICATE,
|
||||
flags=cv2.INTER_CUBIC,
|
||||
) # type: ignore
|
||||
dst_height, dst_width = dst_img.shape[0:2]
|
||||
if dst_height * 1.0 / dst_width >= 1.5:
|
||||
dst_img = dst_img.rotate(90, expand=True)
|
||||
imgs.append(pil_to_cv2(dst_img))
|
||||
|
||||
dst_img = np.rot90(dst_img)
|
||||
imgs.append(dst_img)
|
||||
return imgs
|
||||
|
||||
def _get_perspective_transform(self, src: NDArray[np.float32], dst: NDArray[np.float32]) -> NDArray[np.float32]:
|
||||
N = src.shape[0]
|
||||
x, y = src[:, :, 0], src[:, :, 1]
|
||||
u, v = dst[:, :, 0], dst[:, :, 1]
|
||||
A = np.zeros((N, 8, 9), dtype=np.float32)
|
||||
|
||||
# Fill even rows (0, 2, 4, 6): [x, y, 1, 0, 0, 0, -u*x, -u*y, -u]
|
||||
A[:, ::2, 0] = x
|
||||
A[:, ::2, 1] = y
|
||||
A[:, ::2, 2] = 1
|
||||
A[:, ::2, 6] = -u * x
|
||||
A[:, ::2, 7] = -u * y
|
||||
A[:, ::2, 8] = -u
|
||||
|
||||
# Fill odd rows (1, 3, 5, 7): [0, 0, 0, x, y, 1, -v*x, -v*y, -v]
|
||||
A[:, 1::2, 3] = x
|
||||
A[:, 1::2, 4] = y
|
||||
A[:, 1::2, 5] = 1
|
||||
A[:, 1::2, 6] = -v * x
|
||||
A[:, 1::2, 7] = -v * y
|
||||
A[:, 1::2, 8] = -v
|
||||
|
||||
# Solve using SVD for all matrices at once
|
||||
_, _, Vt = np.linalg.svd(A)
|
||||
H = Vt[:, -1, :].reshape(N, 3, 3)
|
||||
H = H / H[:, 2:3, 2:3]
|
||||
|
||||
# Extract the 8 coefficients for each transformation
|
||||
return np.column_stack(
|
||||
[H[:, 0, 0], H[:, 0, 1], H[:, 0, 2], H[:, 1, 0], H[:, 1, 1], H[:, 1, 2], H[:, 2, 0], H[:, 2, 1]]
|
||||
) # pyright: ignore[reportReturnType]
|
||||
|
||||
def configure(self, **kwargs: Any) -> None:
|
||||
self.min_score = kwargs.get("minScore", self.min_score)
|
||||
|
||||
@@ -7,6 +7,7 @@ from typing_extensions import TypedDict
|
||||
|
||||
|
||||
class TextDetectionOutput(TypedDict):
|
||||
image: npt.NDArray[np.float32]
|
||||
boxes: npt.NDArray[np.float32]
|
||||
scores: npt.NDArray[np.float32]
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "immich-ml"
|
||||
version = "2.2.3"
|
||||
version = "2.2.1"
|
||||
description = ""
|
||||
authors = [{ name = "Hau Tran", email = "alex.tran1502@gmail.com" }]
|
||||
requires-python = ">=3.10,<4.0"
|
||||
|
||||
@@ -88,6 +88,7 @@ if [ "$CURRENT_MOBILE" != "$NEXT_MOBILE" ]; then
|
||||
fi
|
||||
|
||||
sed -i "s/\"android\.injected\.version\.name\" => \"$CURRENT_SERVER\",/\"android\.injected\.version\.name\" => \"$NEXT_SERVER\",/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/version_number: \"$CURRENT_SERVER\"$/version_number: \"$NEXT_SERVER\"/" mobile/ios/fastlane/Fastfile
|
||||
sed -i "s/\"android\.injected\.version\.code\" => $CURRENT_MOBILE,/\"android\.injected\.version\.code\" => $NEXT_MOBILE,/" mobile/android/fastlane/Fastfile
|
||||
sed -i "s/^version: $CURRENT_SERVER+$CURRENT_MOBILE$/version: $NEXT_SERVER+$NEXT_MOBILE/" mobile/pubspec.yaml
|
||||
|
||||
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 3026,
|
||||
"android.injected.version.name" => "2.2.3",
|
||||
"android.injected.version.code" => 3024,
|
||||
"android.injected.version.name" => "2.2.1",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -32,17 +32,6 @@ platform :ios do
|
||||
)
|
||||
end
|
||||
|
||||
# Helper method to get version from pubspec.yaml
|
||||
def get_version_from_pubspec
|
||||
require 'yaml'
|
||||
|
||||
pubspec_path = File.join(Dir.pwd, "../..", "pubspec.yaml")
|
||||
pubspec = YAML.load_file(pubspec_path)
|
||||
|
||||
version_string = pubspec['version']
|
||||
version_string ? version_string.split('+').first : nil
|
||||
end
|
||||
|
||||
# Helper method to configure code signing for all targets
|
||||
def configure_code_signing(bundle_id_suffix: "")
|
||||
bundle_suffix = bundle_id_suffix.empty? ? "" : ".#{bundle_id_suffix}"
|
||||
@@ -169,8 +158,7 @@ end
|
||||
# Build and upload with version number
|
||||
build_and_upload(
|
||||
api_key: api_key,
|
||||
version_number: get_version_from_pubspec,
|
||||
distribute_external: false,
|
||||
version_number: "2.1.0"
|
||||
)
|
||||
end
|
||||
|
||||
@@ -180,9 +168,8 @@ end
|
||||
path: "./Runner.xcodeproj",
|
||||
targets: ["Runner", "ShareExtension", "WidgetExtension"]
|
||||
)
|
||||
|
||||
increment_version_number(
|
||||
version_number: get_version_from_pubspec
|
||||
version_number: "2.2.1"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -239,7 +239,7 @@ class BackgroundWorkerBgService extends BackgroundWorkerFlutterApi {
|
||||
final networkCapabilities = await _ref?.read(connectivityApiProvider).getCapabilities() ?? [];
|
||||
return _ref
|
||||
?.read(uploadServiceProvider)
|
||||
.startBackupWithHttpClient(currentUser.id, networkCapabilities.isUnmetered, _cancellationToken);
|
||||
.startBackupWithHttpClient(currentUser.id, networkCapabilities.hasWifi, _cancellationToken);
|
||||
},
|
||||
(error, stack) {
|
||||
dPrint(() => "Error in backup zone $error, $stack");
|
||||
|
||||
@@ -132,8 +132,7 @@ class SyncStreamService {
|
||||
return;
|
||||
// SyncCompleteV1 is used to signal the completion of the sync process. Cleanup stale assets and signal completion
|
||||
case SyncEntityType.syncCompleteV1:
|
||||
return;
|
||||
// return _syncStreamRepository.pruneAssets();
|
||||
return _syncStreamRepository.pruneAssets();
|
||||
// Request to reset the client state. Clear everything related to remote entities
|
||||
case SyncEntityType.syncResetV1:
|
||||
return _syncStreamRepository.reset();
|
||||
|
||||
@@ -65,7 +65,7 @@ class SplashScreenPageState extends ConsumerState<SplashScreenPage> {
|
||||
if (Store.isBetaTimelineEnabled) {
|
||||
bool syncSuccess = false;
|
||||
await Future.wait([
|
||||
backgroundManager.syncLocal(full: true),
|
||||
backgroundManager.syncLocal(),
|
||||
backgroundManager.syncRemote().then((success) => syncSuccess = success),
|
||||
]);
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ class ServerInfoNotifier extends StateNotifier<ServerInfo> {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clientVersion < serverVersion && clientVersion.differenceType(serverVersion) != SemVerType.patch) {
|
||||
if (clientVersion < serverVersion) {
|
||||
state = state.copyWith(versionStatus: VersionStatus.clientOutOfDate);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -89,16 +89,9 @@ class AssetMediaRepository {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
// titleAsync gets the correct original filename for some assets on iOS
|
||||
// otherwise using the `entity.title` would return a random GUID
|
||||
final originalFilename = await entity.titleAsync;
|
||||
// treat empty filename as missing
|
||||
return originalFilename.isNotEmpty ? originalFilename : null;
|
||||
} catch (e) {
|
||||
_log.warning("Failed to get original filename for asset: $id. Error: $e");
|
||||
return null;
|
||||
}
|
||||
// titleAsync gets the correct original filename for some assets on iOS
|
||||
// otherwise using the `entity.title` would return a random GUID
|
||||
return await entity.titleAsync;
|
||||
}
|
||||
|
||||
// TODO: make this more efficient
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
enum SemVerType { major, minor, patch }
|
||||
|
||||
class SemVer {
|
||||
final int major;
|
||||
final int minor;
|
||||
@@ -17,20 +15,8 @@ class SemVer {
|
||||
}
|
||||
|
||||
factory SemVer.fromString(String version) {
|
||||
if (version.toLowerCase().startsWith("v")) {
|
||||
version = version.substring(1);
|
||||
}
|
||||
|
||||
final parts = version.split("-")[0].split('.');
|
||||
if (parts.length != 3) {
|
||||
throw FormatException('Invalid semantic version string: $version');
|
||||
}
|
||||
|
||||
try {
|
||||
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
|
||||
} catch (e) {
|
||||
throw FormatException('Invalid semantic version string: $version');
|
||||
}
|
||||
return SemVer(major: int.parse(parts[0]), minor: int.parse(parts[1]), patch: int.parse(parts[2]));
|
||||
}
|
||||
|
||||
bool operator >(SemVer other) {
|
||||
@@ -68,20 +54,6 @@ class SemVer {
|
||||
return other is SemVer && other.major == major && other.minor == minor && other.patch == patch;
|
||||
}
|
||||
|
||||
SemVerType? differenceType(SemVer other) {
|
||||
if (major != other.major) {
|
||||
return SemVerType.major;
|
||||
}
|
||||
if (minor != other.minor) {
|
||||
return SemVerType.minor;
|
||||
}
|
||||
if (patch != other.patch) {
|
||||
return SemVerType.patch;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@override
|
||||
int get hashCode => major.hashCode ^ minor.hashCode ^ patch.hashCode;
|
||||
}
|
||||
|
||||
9
mobile/openapi/README.md
generated
9
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 2.2.3
|
||||
- API version: 2.2.1
|
||||
- Generator version: 7.8.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
@@ -265,11 +265,6 @@ Class | Method | HTTP request | Description
|
||||
*TrashApi* | [**emptyTrash**](doc//TrashApi.md#emptytrash) | **POST** /trash/empty |
|
||||
*TrashApi* | [**restoreAssets**](doc//TrashApi.md#restoreassets) | **POST** /trash/restore/assets |
|
||||
*TrashApi* | [**restoreTrash**](doc//TrashApi.md#restoretrash) | **POST** /trash/restore |
|
||||
*UploadApi* | [**cancelUpload**](doc//UploadApi.md#cancelupload) | **DELETE** /upload/{id} |
|
||||
*UploadApi* | [**getUploadOptions**](doc//UploadApi.md#getuploadoptions) | **OPTIONS** /upload |
|
||||
*UploadApi* | [**getUploadStatus**](doc//UploadApi.md#getuploadstatus) | **HEAD** /upload/{id} |
|
||||
*UploadApi* | [**resumeUpload**](doc//UploadApi.md#resumeupload) | **PATCH** /upload/{id} |
|
||||
*UploadApi* | [**startUpload**](doc//UploadApi.md#startupload) | **POST** /upload |
|
||||
*UsersApi* | [**createProfileImage**](doc//UsersApi.md#createprofileimage) | **POST** /users/profile-image |
|
||||
*UsersApi* | [**deleteProfileImage**](doc//UsersApi.md#deleteprofileimage) | **DELETE** /users/profile-image |
|
||||
*UsersApi* | [**deleteUserLicense**](doc//UsersApi.md#deleteuserlicense) | **DELETE** /users/me/license |
|
||||
@@ -584,8 +579,6 @@ Class | Method | HTTP request | Description
|
||||
- [UpdateAlbumUserDto](doc//UpdateAlbumUserDto.md)
|
||||
- [UpdateAssetDto](doc//UpdateAssetDto.md)
|
||||
- [UpdateLibraryDto](doc//UpdateLibraryDto.md)
|
||||
- [UploadBackupConfig](doc//UploadBackupConfig.md)
|
||||
- [UploadOkDto](doc//UploadOkDto.md)
|
||||
- [UsageByUserDto](doc//UsageByUserDto.md)
|
||||
- [UserAdminCreateDto](doc//UserAdminCreateDto.md)
|
||||
- [UserAdminDeleteDto](doc//UserAdminDeleteDto.md)
|
||||
|
||||
3
mobile/openapi/lib/api.dart
generated
3
mobile/openapi/lib/api.dart
generated
@@ -60,7 +60,6 @@ part 'api/system_metadata_api.dart';
|
||||
part 'api/tags_api.dart';
|
||||
part 'api/timeline_api.dart';
|
||||
part 'api/trash_api.dart';
|
||||
part 'api/upload_api.dart';
|
||||
part 'api/users_api.dart';
|
||||
part 'api/users_admin_api.dart';
|
||||
part 'api/view_api.dart';
|
||||
@@ -348,8 +347,6 @@ part 'model/update_album_dto.dart';
|
||||
part 'model/update_album_user_dto.dart';
|
||||
part 'model/update_asset_dto.dart';
|
||||
part 'model/update_library_dto.dart';
|
||||
part 'model/upload_backup_config.dart';
|
||||
part 'model/upload_ok_dto.dart';
|
||||
part 'model/usage_by_user_dto.dart';
|
||||
part 'model/user_admin_create_dto.dart';
|
||||
part 'model/user_admin_delete_dto.dart';
|
||||
|
||||
379
mobile/openapi/lib/api/upload_api.dart
generated
379
mobile/openapi/lib/api/upload_api.dart
generated
@@ -1,379 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class UploadApi {
|
||||
UploadApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] id (required):
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
Future<Response> cancelUploadWithHttpInfo(String id, { String? key, String? slug, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/upload/{id}'
|
||||
.replaceAll('{id}', id);
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
if (key != null) {
|
||||
queryParams.addAll(_queryParams('', 'key', key));
|
||||
}
|
||||
if (slug != null) {
|
||||
queryParams.addAll(_queryParams('', 'slug', slug));
|
||||
}
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'DELETE',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] id (required):
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
Future<void> cancelUpload(String id, { String? key, String? slug, }) async {
|
||||
final response = await cancelUploadWithHttpInfo(id, key: key, slug: slug, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// Performs an HTTP 'OPTIONS /upload' operation and returns the [Response].
|
||||
Future<Response> getUploadOptionsWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/upload';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'OPTIONS',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> getUploadOptions() async {
|
||||
final response = await getUploadOptionsWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] id (required):
|
||||
///
|
||||
/// * [String] uploadDraftInteropVersion (required):
|
||||
/// Indicates the version of the RUFH protocol supported by the client.
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
Future<Response> getUploadStatusWithHttpInfo(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/upload/{id}'
|
||||
.replaceAll('{id}', id);
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
if (key != null) {
|
||||
queryParams.addAll(_queryParams('', 'key', key));
|
||||
}
|
||||
if (slug != null) {
|
||||
queryParams.addAll(_queryParams('', 'slug', slug));
|
||||
}
|
||||
|
||||
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'HEAD',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] id (required):
|
||||
///
|
||||
/// * [String] uploadDraftInteropVersion (required):
|
||||
/// Indicates the version of the RUFH protocol supported by the client.
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
Future<void> getUploadStatus(String id, String uploadDraftInteropVersion, { String? key, String? slug, }) async {
|
||||
final response = await getUploadStatusWithHttpInfo(id, uploadDraftInteropVersion, key: key, slug: slug, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
}
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] contentLength (required):
|
||||
/// Non-negative size of the request body in bytes.
|
||||
///
|
||||
/// * [String] id (required):
|
||||
///
|
||||
/// * [String] uploadComplete (required):
|
||||
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
|
||||
///
|
||||
/// * [String] uploadDraftInteropVersion (required):
|
||||
/// Indicates the version of the RUFH protocol supported by the client.
|
||||
///
|
||||
/// * [String] uploadOffset (required):
|
||||
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
Future<Response> resumeUploadWithHttpInfo(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/upload/{id}'
|
||||
.replaceAll('{id}', id);
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
if (key != null) {
|
||||
queryParams.addAll(_queryParams('', 'key', key));
|
||||
}
|
||||
if (slug != null) {
|
||||
queryParams.addAll(_queryParams('', 'slug', slug));
|
||||
}
|
||||
|
||||
headerParams[r'content-length'] = parameterToString(contentLength);
|
||||
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
|
||||
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
|
||||
headerParams[r'upload-offset'] = parameterToString(uploadOffset);
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'PATCH',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] contentLength (required):
|
||||
/// Non-negative size of the request body in bytes.
|
||||
///
|
||||
/// * [String] id (required):
|
||||
///
|
||||
/// * [String] uploadComplete (required):
|
||||
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
|
||||
///
|
||||
/// * [String] uploadDraftInteropVersion (required):
|
||||
/// Indicates the version of the RUFH protocol supported by the client.
|
||||
///
|
||||
/// * [String] uploadOffset (required):
|
||||
/// Non-negative byte offset indicating the starting position of the data in the request body within the entire file.
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
Future<UploadOkDto?> resumeUpload(String contentLength, String id, String uploadComplete, String uploadDraftInteropVersion, String uploadOffset, { String? key, String? slug, }) async {
|
||||
final response = await resumeUploadWithHttpInfo(contentLength, id, uploadComplete, uploadDraftInteropVersion, uploadOffset, key: key, slug: slug, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'UploadOkDto',) as UploadOkDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Note: This method returns the HTTP [Response].
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] contentLength (required):
|
||||
/// Non-negative size of the request body in bytes.
|
||||
///
|
||||
/// * [String] reprDigest (required):
|
||||
/// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
|
||||
///
|
||||
/// * [String] xImmichAssetData (required):
|
||||
/// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - live-photo-video-id (string, optional): Live photo ID for assets from iOS devices - icloud-id (string, optional): iCloud identifier for assets from iOS devices
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
///
|
||||
/// * [String] uploadComplete:
|
||||
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
|
||||
///
|
||||
/// * [String] uploadDraftInteropVersion:
|
||||
/// Indicates the version of the RUFH protocol supported by the client.
|
||||
Future<Response> startUploadWithHttpInfo(String contentLength, String reprDigest, String xImmichAssetData, { String? key, String? slug, String? uploadComplete, String? uploadDraftInteropVersion, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final apiPath = r'/upload';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
if (key != null) {
|
||||
queryParams.addAll(_queryParams('', 'key', key));
|
||||
}
|
||||
if (slug != null) {
|
||||
queryParams.addAll(_queryParams('', 'slug', slug));
|
||||
}
|
||||
|
||||
headerParams[r'content-length'] = parameterToString(contentLength);
|
||||
headerParams[r'repr-digest'] = parameterToString(reprDigest);
|
||||
if (uploadComplete != null) {
|
||||
headerParams[r'upload-complete'] = parameterToString(uploadComplete);
|
||||
}
|
||||
if (uploadDraftInteropVersion != null) {
|
||||
headerParams[r'upload-draft-interop-version'] = parameterToString(uploadDraftInteropVersion);
|
||||
}
|
||||
headerParams[r'x-immich-asset-data'] = parameterToString(xImmichAssetData);
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
apiPath,
|
||||
'POST',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// This endpoint requires the `asset.upload` permission.
|
||||
///
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [String] contentLength (required):
|
||||
/// Non-negative size of the request body in bytes.
|
||||
///
|
||||
/// * [String] reprDigest (required):
|
||||
/// RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.
|
||||
///
|
||||
/// * [String] xImmichAssetData (required):
|
||||
/// RFC 9651 structured dictionary containing asset metadata with the following keys: - device-asset-id (string, required): Unique device asset identifier - device-id (string, required): Device identifier - file-created-at (string/date, required): ISO 8601 date string or Unix timestamp - file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp - filename (string, required): Original filename - is-favorite (boolean, optional): Favorite status - live-photo-video-id (string, optional): Live photo ID for assets from iOS devices - icloud-id (string, optional): iCloud identifier for assets from iOS devices
|
||||
///
|
||||
/// * [String] key:
|
||||
///
|
||||
/// * [String] slug:
|
||||
///
|
||||
/// * [String] uploadComplete:
|
||||
/// Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.
|
||||
///
|
||||
/// * [String] uploadDraftInteropVersion:
|
||||
/// Indicates the version of the RUFH protocol supported by the client.
|
||||
Future<UploadOkDto?> startUpload(String contentLength, String reprDigest, String xImmichAssetData, { String? key, String? slug, String? uploadComplete, String? uploadDraftInteropVersion, }) async {
|
||||
final response = await startUploadWithHttpInfo(contentLength, reprDigest, xImmichAssetData, key: key, slug: slug, uploadComplete: uploadComplete, uploadDraftInteropVersion: uploadDraftInteropVersion, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'UploadOkDto',) as UploadOkDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
4
mobile/openapi/lib/api_client.dart
generated
4
mobile/openapi/lib/api_client.dart
generated
@@ -748,10 +748,6 @@ class ApiClient {
|
||||
return UpdateAssetDto.fromJson(value);
|
||||
case 'UpdateLibraryDto':
|
||||
return UpdateLibraryDto.fromJson(value);
|
||||
case 'UploadBackupConfig':
|
||||
return UploadBackupConfig.fromJson(value);
|
||||
case 'UploadOkDto':
|
||||
return UploadOkDto.fromJson(value);
|
||||
case 'UsageByUserDto':
|
||||
return UsageByUserDto.fromJson(value);
|
||||
case 'UserAdminCreateDto':
|
||||
|
||||
@@ -14,31 +14,25 @@ class SystemConfigBackupsDto {
|
||||
/// Returns a new [SystemConfigBackupsDto] instance.
|
||||
SystemConfigBackupsDto({
|
||||
required this.database,
|
||||
required this.upload,
|
||||
});
|
||||
|
||||
DatabaseBackupConfig database;
|
||||
|
||||
UploadBackupConfig upload;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SystemConfigBackupsDto &&
|
||||
other.database == database &&
|
||||
other.upload == upload;
|
||||
other.database == database;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(database.hashCode) +
|
||||
(upload.hashCode);
|
||||
(database.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SystemConfigBackupsDto[database=$database, upload=$upload]';
|
||||
String toString() => 'SystemConfigBackupsDto[database=$database]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'database'] = this.database;
|
||||
json[r'upload'] = this.upload;
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -52,7 +46,6 @@ class SystemConfigBackupsDto {
|
||||
|
||||
return SystemConfigBackupsDto(
|
||||
database: DatabaseBackupConfig.fromJson(json[r'database'])!,
|
||||
upload: UploadBackupConfig.fromJson(json[r'upload'])!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
@@ -101,7 +94,6 @@ class SystemConfigBackupsDto {
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'database',
|
||||
'upload',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -17,7 +17,6 @@ class SystemConfigNightlyTasksDto {
|
||||
required this.databaseCleanup,
|
||||
required this.generateMemories,
|
||||
required this.missingThumbnails,
|
||||
required this.removeStaleUploads,
|
||||
required this.startTime,
|
||||
required this.syncQuotaUsage,
|
||||
});
|
||||
@@ -30,8 +29,6 @@ class SystemConfigNightlyTasksDto {
|
||||
|
||||
bool missingThumbnails;
|
||||
|
||||
bool removeStaleUploads;
|
||||
|
||||
String startTime;
|
||||
|
||||
bool syncQuotaUsage;
|
||||
@@ -42,7 +39,6 @@ class SystemConfigNightlyTasksDto {
|
||||
other.databaseCleanup == databaseCleanup &&
|
||||
other.generateMemories == generateMemories &&
|
||||
other.missingThumbnails == missingThumbnails &&
|
||||
other.removeStaleUploads == removeStaleUploads &&
|
||||
other.startTime == startTime &&
|
||||
other.syncQuotaUsage == syncQuotaUsage;
|
||||
|
||||
@@ -53,12 +49,11 @@ class SystemConfigNightlyTasksDto {
|
||||
(databaseCleanup.hashCode) +
|
||||
(generateMemories.hashCode) +
|
||||
(missingThumbnails.hashCode) +
|
||||
(removeStaleUploads.hashCode) +
|
||||
(startTime.hashCode) +
|
||||
(syncQuotaUsage.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SystemConfigNightlyTasksDto[clusterNewFaces=$clusterNewFaces, databaseCleanup=$databaseCleanup, generateMemories=$generateMemories, missingThumbnails=$missingThumbnails, removeStaleUploads=$removeStaleUploads, startTime=$startTime, syncQuotaUsage=$syncQuotaUsage]';
|
||||
String toString() => 'SystemConfigNightlyTasksDto[clusterNewFaces=$clusterNewFaces, databaseCleanup=$databaseCleanup, generateMemories=$generateMemories, missingThumbnails=$missingThumbnails, startTime=$startTime, syncQuotaUsage=$syncQuotaUsage]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
@@ -66,7 +61,6 @@ class SystemConfigNightlyTasksDto {
|
||||
json[r'databaseCleanup'] = this.databaseCleanup;
|
||||
json[r'generateMemories'] = this.generateMemories;
|
||||
json[r'missingThumbnails'] = this.missingThumbnails;
|
||||
json[r'removeStaleUploads'] = this.removeStaleUploads;
|
||||
json[r'startTime'] = this.startTime;
|
||||
json[r'syncQuotaUsage'] = this.syncQuotaUsage;
|
||||
return json;
|
||||
@@ -85,7 +79,6 @@ class SystemConfigNightlyTasksDto {
|
||||
databaseCleanup: mapValueOfType<bool>(json, r'databaseCleanup')!,
|
||||
generateMemories: mapValueOfType<bool>(json, r'generateMemories')!,
|
||||
missingThumbnails: mapValueOfType<bool>(json, r'missingThumbnails')!,
|
||||
removeStaleUploads: mapValueOfType<bool>(json, r'removeStaleUploads')!,
|
||||
startTime: mapValueOfType<String>(json, r'startTime')!,
|
||||
syncQuotaUsage: mapValueOfType<bool>(json, r'syncQuotaUsage')!,
|
||||
);
|
||||
@@ -139,7 +132,6 @@ class SystemConfigNightlyTasksDto {
|
||||
'databaseCleanup',
|
||||
'generateMemories',
|
||||
'missingThumbnails',
|
||||
'removeStaleUploads',
|
||||
'startTime',
|
||||
'syncQuotaUsage',
|
||||
};
|
||||
|
||||
100
mobile/openapi/lib/model/upload_backup_config.dart
generated
100
mobile/openapi/lib/model/upload_backup_config.dart
generated
@@ -1,100 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class UploadBackupConfig {
|
||||
/// Returns a new [UploadBackupConfig] instance.
|
||||
UploadBackupConfig({
|
||||
required this.maxAgeHours,
|
||||
});
|
||||
|
||||
/// Minimum value: 1
|
||||
num maxAgeHours;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is UploadBackupConfig &&
|
||||
other.maxAgeHours == maxAgeHours;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(maxAgeHours.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'UploadBackupConfig[maxAgeHours=$maxAgeHours]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'maxAgeHours'] = this.maxAgeHours;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [UploadBackupConfig] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static UploadBackupConfig? fromJson(dynamic value) {
|
||||
upgradeDto(value, "UploadBackupConfig");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return UploadBackupConfig(
|
||||
maxAgeHours: num.parse('${json[r'maxAgeHours']}'),
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<UploadBackupConfig> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <UploadBackupConfig>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = UploadBackupConfig.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, UploadBackupConfig> mapFromJson(dynamic json) {
|
||||
final map = <String, UploadBackupConfig>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = UploadBackupConfig.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of UploadBackupConfig-objects as value to a dart map
|
||||
static Map<String, List<UploadBackupConfig>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<UploadBackupConfig>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = UploadBackupConfig.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'maxAgeHours',
|
||||
};
|
||||
}
|
||||
|
||||
99
mobile/openapi/lib/model/upload_ok_dto.dart
generated
99
mobile/openapi/lib/model/upload_ok_dto.dart
generated
@@ -1,99 +0,0 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.18
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class UploadOkDto {
|
||||
/// Returns a new [UploadOkDto] instance.
|
||||
UploadOkDto({
|
||||
required this.id,
|
||||
});
|
||||
|
||||
String id;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is UploadOkDto &&
|
||||
other.id == id;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(id.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'UploadOkDto[id=$id]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'id'] = this.id;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [UploadOkDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static UploadOkDto? fromJson(dynamic value) {
|
||||
upgradeDto(value, "UploadOkDto");
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return UploadOkDto(
|
||||
id: mapValueOfType<String>(json, r'id')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<UploadOkDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <UploadOkDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = UploadOkDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, UploadOkDto> mapFromJson(dynamic json) {
|
||||
final map = <String, UploadOkDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = UploadOkDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of UploadOkDto-objects as value to a dart map
|
||||
static Map<String, List<UploadOkDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<UploadOkDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = UploadOkDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'id',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: 'none'
|
||||
version: 2.2.3+3026
|
||||
version: 2.2.1+3024
|
||||
|
||||
environment:
|
||||
sdk: '>=3.8.0 <4.0.0'
|
||||
|
||||
@@ -8,11 +8,11 @@ bash tool/build_android.sh x64
|
||||
bash tool/build_android.sh armv7
|
||||
bash tool/build_android.sh arm64
|
||||
mv libisar_android_arm64.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/
|
||||
mv libisar_android_armv7.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/
|
||||
mv libisar_android_x64.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/x86_64/
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/
|
||||
mv libisar_android_x86.so libisar.so
|
||||
mv libisar.so ../.pub-cache/hosted/pub.dev/isar_community_flutter_libs-*/android/src/main/jniLibs/x86/
|
||||
)
|
||||
mv libisar.so ../.pub-cache/hosted/pub.isar-community.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/
|
||||
)
|
||||
@@ -1,92 +0,0 @@
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:immich_mobile/utils/semver.dart';
|
||||
|
||||
void main() {
|
||||
group('SemVer', () {
|
||||
test('Parses valid semantic version strings correctly', () {
|
||||
final version = SemVer.fromString('1.2.3');
|
||||
expect(version.major, 1);
|
||||
expect(version.minor, 2);
|
||||
expect(version.patch, 3);
|
||||
});
|
||||
|
||||
test('Throws FormatException for invalid version strings', () {
|
||||
expect(() => SemVer.fromString('1.2'), throwsFormatException);
|
||||
expect(() => SemVer.fromString('a.b.c'), throwsFormatException);
|
||||
expect(() => SemVer.fromString('1.2.3.4'), throwsFormatException);
|
||||
});
|
||||
|
||||
test('Compares equal versons correctly', () {
|
||||
final v1 = SemVer.fromString('1.2.3');
|
||||
final v2 = SemVer.fromString('1.2.3');
|
||||
expect(v1 == v2, isTrue);
|
||||
expect(v1 > v2, isFalse);
|
||||
expect(v1 < v2, isFalse);
|
||||
});
|
||||
|
||||
test('Compares major version correctly', () {
|
||||
final v1 = SemVer.fromString('2.0.0');
|
||||
final v2 = SemVer.fromString('1.9.9');
|
||||
expect(v1 == v2, isFalse);
|
||||
expect(v1 > v2, isTrue);
|
||||
expect(v1 < v2, isFalse);
|
||||
});
|
||||
|
||||
test('Compares minor version correctly', () {
|
||||
final v1 = SemVer.fromString('1.3.0');
|
||||
final v2 = SemVer.fromString('1.2.9');
|
||||
expect(v1 == v2, isFalse);
|
||||
expect(v1 > v2, isTrue);
|
||||
expect(v1 < v2, isFalse);
|
||||
});
|
||||
|
||||
test('Compares patch version correctly', () {
|
||||
final v1 = SemVer.fromString('1.2.4');
|
||||
final v2 = SemVer.fromString('1.2.3');
|
||||
expect(v1 == v2, isFalse);
|
||||
expect(v1 > v2, isTrue);
|
||||
expect(v1 < v2, isFalse);
|
||||
});
|
||||
|
||||
test('Gives correct major difference type', () {
|
||||
final v1 = SemVer.fromString('2.0.0');
|
||||
final v2 = SemVer.fromString('1.9.9');
|
||||
expect(v1.differenceType(v2), SemVerType.major);
|
||||
});
|
||||
|
||||
test('Gives correct minor difference type', () {
|
||||
final v1 = SemVer.fromString('1.3.0');
|
||||
final v2 = SemVer.fromString('1.2.9');
|
||||
expect(v1.differenceType(v2), SemVerType.minor);
|
||||
});
|
||||
|
||||
test('Gives correct patch difference type', () {
|
||||
final v1 = SemVer.fromString('1.2.4');
|
||||
final v2 = SemVer.fromString('1.2.3');
|
||||
expect(v1.differenceType(v2), SemVerType.patch);
|
||||
});
|
||||
|
||||
test('Gives null difference type for equal versions', () {
|
||||
final v1 = SemVer.fromString('1.2.3');
|
||||
final v2 = SemVer.fromString('1.2.3');
|
||||
expect(v1.differenceType(v2), isNull);
|
||||
});
|
||||
|
||||
test('toString returns correct format', () {
|
||||
final version = SemVer.fromString('1.2.3');
|
||||
expect(version.toString(), '1.2.3');
|
||||
});
|
||||
|
||||
test('Parses versions with leading v correctly', () {
|
||||
final version1 = SemVer.fromString('v1.2.3');
|
||||
expect(version1.major, 1);
|
||||
expect(version1.minor, 2);
|
||||
expect(version1.patch, 3);
|
||||
|
||||
final version2 = SemVer.fromString('V1.2.3');
|
||||
expect(version2.major, 1);
|
||||
expect(version2.minor, 2);
|
||||
expect(version2.patch, 3);
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -9373,324 +9373,6 @@
|
||||
"description": "This endpoint requires the `asset.delete` permission."
|
||||
}
|
||||
},
|
||||
"/upload": {
|
||||
"options": {
|
||||
"operationId": "getUploadOptions",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"tags": [
|
||||
"Upload"
|
||||
]
|
||||
},
|
||||
"post": {
|
||||
"operationId": "startUpload",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "content-length",
|
||||
"in": "header",
|
||||
"description": "Non-negative size of the request body in bytes.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "repr-digest",
|
||||
"in": "header",
|
||||
"description": "RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "slug",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "upload-complete",
|
||||
"in": "header",
|
||||
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "upload-draft-interop-version",
|
||||
"in": "header",
|
||||
"description": "Indicates the version of the RUFH protocol supported by the client.",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "x-immich-asset-data",
|
||||
"in": "header",
|
||||
"description": "RFC 9651 structured dictionary containing asset metadata with the following keys:\n- device-asset-id (string, required): Unique device asset identifier\n- device-id (string, required): Device identifier\n- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp\n- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp\n- filename (string, required): Original filename\n- is-favorite (boolean, optional): Favorite status\n- live-photo-video-id (string, optional): Live photo ID for assets from iOS devices\n- icloud-id (string, optional): iCloud identifier for assets from iOS devices",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/UploadOkDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
},
|
||||
"201": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Upload"
|
||||
],
|
||||
"x-immich-permission": "asset.upload",
|
||||
"description": "This endpoint requires the `asset.upload` permission."
|
||||
}
|
||||
},
|
||||
"/upload/{id}": {
|
||||
"delete": {
|
||||
"operationId": "cancelUpload",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"required": true,
|
||||
"in": "path",
|
||||
"schema": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "slug",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Upload"
|
||||
],
|
||||
"x-immich-permission": "asset.upload",
|
||||
"description": "This endpoint requires the `asset.upload` permission."
|
||||
},
|
||||
"head": {
|
||||
"operationId": "getUploadStatus",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "id",
|
||||
"required": true,
|
||||
"in": "path",
|
||||
"schema": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "slug",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "upload-draft-interop-version",
|
||||
"in": "header",
|
||||
"description": "Indicates the version of the RUFH protocol supported by the client.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Upload"
|
||||
],
|
||||
"x-immich-permission": "asset.upload",
|
||||
"description": "This endpoint requires the `asset.upload` permission."
|
||||
},
|
||||
"patch": {
|
||||
"operationId": "resumeUpload",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "content-length",
|
||||
"in": "header",
|
||||
"description": "Non-negative size of the request body in bytes.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "id",
|
||||
"required": true,
|
||||
"in": "path",
|
||||
"schema": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "slug",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "upload-complete",
|
||||
"in": "header",
|
||||
"description": "Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "upload-draft-interop-version",
|
||||
"in": "header",
|
||||
"description": "Indicates the version of the RUFH protocol supported by the client.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "upload-offset",
|
||||
"in": "header",
|
||||
"description": "Non-negative byte offset indicating the starting position of the data in the request body within the entire file.",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/UploadOkDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Upload"
|
||||
],
|
||||
"x-immich-permission": "asset.upload",
|
||||
"description": "This endpoint requires the `asset.upload` permission."
|
||||
}
|
||||
},
|
||||
"/users": {
|
||||
"get": {
|
||||
"operationId": "searchUsers",
|
||||
@@ -10324,7 +10006,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.1",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
@@ -16658,14 +16340,10 @@
|
||||
"properties": {
|
||||
"database": {
|
||||
"$ref": "#/components/schemas/DatabaseBackupConfig"
|
||||
},
|
||||
"upload": {
|
||||
"$ref": "#/components/schemas/UploadBackupConfig"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"database",
|
||||
"upload"
|
||||
"database"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
@@ -17198,9 +16876,6 @@
|
||||
"missingThumbnails": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"removeStaleUploads": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"startTime": {
|
||||
"type": "string"
|
||||
},
|
||||
@@ -17213,7 +16888,6 @@
|
||||
"databaseCleanup",
|
||||
"generateMemories",
|
||||
"missingThumbnails",
|
||||
"removeStaleUploads",
|
||||
"startTime",
|
||||
"syncQuotaUsage"
|
||||
],
|
||||
@@ -18066,29 +17740,6 @@
|
||||
},
|
||||
"type": "object"
|
||||
},
|
||||
"UploadBackupConfig": {
|
||||
"properties": {
|
||||
"maxAgeHours": {
|
||||
"minimum": 1,
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"maxAgeHours"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"UploadOkDto": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"UsageByUserDto": {
|
||||
"properties": {
|
||||
"photos": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@immich/sdk",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.1",
|
||||
"description": "Auto-generated TypeScript SDK for the Immich API",
|
||||
"type": "module",
|
||||
"main": "./build/index.js",
|
||||
@@ -19,7 +19,7 @@
|
||||
"@oazapfts/runtime": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.18.13",
|
||||
"@types/node": "^22.18.12",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/**
|
||||
* Immich
|
||||
* 2.2.3
|
||||
* 2.2.1
|
||||
* DO NOT MODIFY - This file has been generated using oazapfts.
|
||||
* See https://www.npmjs.com/package/oazapfts
|
||||
*/
|
||||
@@ -1359,12 +1359,8 @@ export type DatabaseBackupConfig = {
|
||||
enabled: boolean;
|
||||
keepLastAmount: number;
|
||||
};
|
||||
export type UploadBackupConfig = {
|
||||
maxAgeHours: number;
|
||||
};
|
||||
export type SystemConfigBackupsDto = {
|
||||
database: DatabaseBackupConfig;
|
||||
upload: UploadBackupConfig;
|
||||
};
|
||||
export type SystemConfigFFmpegDto = {
|
||||
accel: TranscodeHWAccel;
|
||||
@@ -1493,7 +1489,6 @@ export type SystemConfigNightlyTasksDto = {
|
||||
databaseCleanup: boolean;
|
||||
generateMemories: boolean;
|
||||
missingThumbnails: boolean;
|
||||
removeStaleUploads: boolean;
|
||||
startTime: string;
|
||||
syncQuotaUsage: boolean;
|
||||
};
|
||||
@@ -1659,9 +1654,6 @@ export type TimeBucketsResponseDto = {
|
||||
export type TrashResponseDto = {
|
||||
count: number;
|
||||
};
|
||||
export type UploadOkDto = {
|
||||
id: string;
|
||||
};
|
||||
export type UserUpdateMeDto = {
|
||||
avatarColor?: (UserAvatarColor) | null;
|
||||
email?: string;
|
||||
@@ -4526,109 +4518,6 @@ export function restoreAssets({ bulkIdsDto }: {
|
||||
body: bulkIdsDto
|
||||
})));
|
||||
}
|
||||
export function getUploadOptions(opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText("/upload", {
|
||||
...opts,
|
||||
method: "OPTIONS"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* This endpoint requires the `asset.upload` permission.
|
||||
*/
|
||||
export function startUpload({ contentLength, key, reprDigest, slug, uploadComplete, uploadDraftInteropVersion, xImmichAssetData }: {
|
||||
contentLength: string;
|
||||
key?: string;
|
||||
reprDigest: string;
|
||||
slug?: string;
|
||||
uploadComplete?: string;
|
||||
uploadDraftInteropVersion?: string;
|
||||
xImmichAssetData: string;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: UploadOkDto;
|
||||
} | {
|
||||
status: 201;
|
||||
}>(`/upload${QS.query(QS.explode({
|
||||
key,
|
||||
slug
|
||||
}))}`, {
|
||||
...opts,
|
||||
method: "POST",
|
||||
headers: oazapfts.mergeHeaders(opts?.headers, {
|
||||
"content-length": contentLength,
|
||||
"repr-digest": reprDigest,
|
||||
"upload-complete": uploadComplete,
|
||||
"upload-draft-interop-version": uploadDraftInteropVersion,
|
||||
"x-immich-asset-data": xImmichAssetData
|
||||
})
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* This endpoint requires the `asset.upload` permission.
|
||||
*/
|
||||
export function cancelUpload({ id, key, slug }: {
|
||||
id: string;
|
||||
key?: string;
|
||||
slug?: string;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
|
||||
key,
|
||||
slug
|
||||
}))}`, {
|
||||
...opts,
|
||||
method: "DELETE"
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* This endpoint requires the `asset.upload` permission.
|
||||
*/
|
||||
export function getUploadStatus({ id, key, slug, uploadDraftInteropVersion }: {
|
||||
id: string;
|
||||
key?: string;
|
||||
slug?: string;
|
||||
uploadDraftInteropVersion: string;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchText(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
|
||||
key,
|
||||
slug
|
||||
}))}`, {
|
||||
...opts,
|
||||
method: "HEAD",
|
||||
headers: oazapfts.mergeHeaders(opts?.headers, {
|
||||
"upload-draft-interop-version": uploadDraftInteropVersion
|
||||
})
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* This endpoint requires the `asset.upload` permission.
|
||||
*/
|
||||
export function resumeUpload({ contentLength, id, key, slug, uploadComplete, uploadDraftInteropVersion, uploadOffset }: {
|
||||
contentLength: string;
|
||||
id: string;
|
||||
key?: string;
|
||||
slug?: string;
|
||||
uploadComplete: string;
|
||||
uploadDraftInteropVersion: string;
|
||||
uploadOffset: string;
|
||||
}, opts?: Oazapfts.RequestOpts) {
|
||||
return oazapfts.ok(oazapfts.fetchJson<{
|
||||
status: 200;
|
||||
data: UploadOkDto;
|
||||
}>(`/upload/${encodeURIComponent(id)}${QS.query(QS.explode({
|
||||
key,
|
||||
slug
|
||||
}))}`, {
|
||||
...opts,
|
||||
method: "PATCH",
|
||||
headers: oazapfts.mergeHeaders(opts?.headers, {
|
||||
"content-length": contentLength,
|
||||
"upload-complete": uploadComplete,
|
||||
"upload-draft-interop-version": uploadDraftInteropVersion,
|
||||
"upload-offset": uploadOffset
|
||||
})
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* This endpoint requires the `user.read` permission.
|
||||
*/
|
||||
|
||||
21
pnpm-lock.yaml
generated
21
pnpm-lock.yaml
generated
@@ -63,7 +63,7 @@ importers:
|
||||
specifier: ^4.13.1
|
||||
version: 4.13.4
|
||||
'@types/node':
|
||||
specifier: ^22.18.13
|
||||
specifier: ^22.18.12
|
||||
version: 22.18.13
|
||||
'@vitest/coverage-v8':
|
||||
specifier: ^3.0.0
|
||||
@@ -191,10 +191,6 @@ importers:
|
||||
version: 5.9.3
|
||||
|
||||
e2e:
|
||||
dependencies:
|
||||
structured-headers:
|
||||
specifier: ^2.0.2
|
||||
version: 2.0.2
|
||||
devDependencies:
|
||||
'@eslint/js':
|
||||
specifier: ^9.8.0
|
||||
@@ -215,7 +211,7 @@ importers:
|
||||
specifier: ^3.4.2
|
||||
version: 3.7.1
|
||||
'@types/node':
|
||||
specifier: ^22.18.13
|
||||
specifier: ^22.18.12
|
||||
version: 22.18.13
|
||||
'@types/oidc-provider':
|
||||
specifier: ^9.0.0
|
||||
@@ -297,7 +293,7 @@ importers:
|
||||
version: 1.0.4
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^22.18.13
|
||||
specifier: ^22.18.12
|
||||
version: 22.18.13
|
||||
typescript:
|
||||
specifier: ^5.3.3
|
||||
@@ -515,9 +511,6 @@ importers:
|
||||
socket.io:
|
||||
specifier: ^4.8.1
|
||||
version: 4.8.1
|
||||
structured-headers:
|
||||
specifier: ^2.0.2
|
||||
version: 2.0.2
|
||||
tailwindcss-preset-email:
|
||||
specifier: ^1.4.0
|
||||
version: 1.4.1(tailwindcss@3.4.18(yaml@2.8.1))
|
||||
@@ -589,7 +582,7 @@ importers:
|
||||
specifier: ^2.0.0
|
||||
version: 2.0.0
|
||||
'@types/node':
|
||||
specifier: ^22.18.13
|
||||
specifier: ^22.18.12
|
||||
version: 22.18.13
|
||||
'@types/nodemailer':
|
||||
specifier: ^7.0.0
|
||||
@@ -10414,10 +10407,6 @@ packages:
|
||||
resolution: {integrity: sha512-KIy5nylvC5le1OdaaoCJ07L+8iQzJHGH6pWDuzS+d07Cu7n1MZ2x26P8ZKIWfbK02+XIL8Mp4RkWeqdUCrDMfg==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
structured-headers@2.0.2:
|
||||
resolution: {integrity: sha512-IUul56vVHuMg2UxWhwDj9zVJE6ztYEQQkynr1FQ/NydPhivtk5+Qb2N1RS36owEFk2fNUriTguJ2R7htRObcdA==}
|
||||
engines: {node: '>=18', npm: '>=6'}
|
||||
|
||||
style-to-js@1.1.18:
|
||||
resolution: {integrity: sha512-JFPn62D4kJaPTnhFUI244MThx+FEGbi+9dw1b9yBBQ+1CZpV7QAT8kUtJ7b7EUNdHajjF/0x8fT+16oLJoojLg==}
|
||||
|
||||
@@ -23408,8 +23397,6 @@ snapshots:
|
||||
dependencies:
|
||||
'@tokenizer/token': 0.3.0
|
||||
|
||||
structured-headers@2.0.2: {}
|
||||
|
||||
style-to-js@1.1.18:
|
||||
dependencies:
|
||||
style-to-object: 1.0.11
|
||||
|
||||
@@ -94,7 +94,7 @@
|
||||
| LivePhoto/MotionPhoto воспроизведение и бекап | Да | Да |
|
||||
| Отображение 360° изображений | Нет | Да |
|
||||
| Настраиваемая структура хранилища | Да | Да |
|
||||
| Общий доступ к контенту | Да | Да |
|
||||
| Общий доступ к контенту | Нет | Да |
|
||||
| Архив и избранное | Да | Да |
|
||||
| Мировая карта | Да | Да |
|
||||
| Совместное использование | Да | Да |
|
||||
@@ -104,7 +104,7 @@
|
||||
| Галереи только для просмотра | Да | Да |
|
||||
| Коллажи | Да | Да |
|
||||
| Метки (теги) | Нет | Да |
|
||||
| Просмотр папкой | Да | Да |
|
||||
| Просмотр папкой | Нет | Да |
|
||||
|
||||
## Перевод
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.1",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
@@ -104,7 +104,6 @@
|
||||
"sharp": "^0.34.4",
|
||||
"sirv": "^3.0.0",
|
||||
"socket.io": "^4.8.1",
|
||||
"structured-headers": "^2.0.2",
|
||||
"tailwindcss-preset-email": "^1.4.0",
|
||||
"thumbhash": "^0.1.1",
|
||||
"ua-parser-js": "^2.0.0",
|
||||
@@ -130,7 +129,7 @@
|
||||
"@types/luxon": "^3.6.2",
|
||||
"@types/mock-fs": "^4.13.1",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^22.18.13",
|
||||
"@types/node": "^22.18.12",
|
||||
"@types/nodemailer": "^7.0.0",
|
||||
"@types/picomatch": "^4.0.0",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
|
||||
@@ -22,9 +22,6 @@ export interface SystemConfig {
|
||||
cronExpression: string;
|
||||
keepLastAmount: number;
|
||||
};
|
||||
upload: {
|
||||
maxAgeHours: number;
|
||||
};
|
||||
};
|
||||
ffmpeg: {
|
||||
crf: number;
|
||||
@@ -143,7 +140,6 @@ export interface SystemConfig {
|
||||
clusterNewFaces: boolean;
|
||||
generateMemories: boolean;
|
||||
syncQuotaUsage: boolean;
|
||||
removeStaleUploads: boolean;
|
||||
};
|
||||
trash: {
|
||||
enabled: boolean;
|
||||
@@ -202,9 +198,6 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
cronExpression: CronExpression.EVERY_DAY_AT_2AM,
|
||||
keepLastAmount: 14,
|
||||
},
|
||||
upload: {
|
||||
maxAgeHours: 72,
|
||||
},
|
||||
},
|
||||
ffmpeg: {
|
||||
crf: 23,
|
||||
@@ -348,7 +341,6 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
syncQuotaUsage: true,
|
||||
missingThumbnails: true,
|
||||
clusterNewFaces: true,
|
||||
removeStaleUploads: true,
|
||||
},
|
||||
trash: {
|
||||
enabled: true,
|
||||
|
||||
@@ -1,445 +0,0 @@
|
||||
import { createHash, randomUUID } from 'node:crypto';
|
||||
import { AssetUploadController } from 'src/controllers/asset-upload.controller';
|
||||
import { AssetUploadService } from 'src/services/asset-upload.service';
|
||||
import { serializeDictionary } from 'structured-headers';
|
||||
import request from 'supertest';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { ControllerContext, controllerSetup, mockBaseService } from 'test/utils';
|
||||
|
||||
const makeAssetData = (overrides?: Partial<any>): string => {
|
||||
return serializeDictionary({
|
||||
filename: 'test-image.jpg',
|
||||
'device-asset-id': 'test-asset-id',
|
||||
'device-id': 'test-device',
|
||||
'file-created-at': new Date('2025-01-02T00:00:00Z').toISOString(),
|
||||
'file-modified-at': new Date('2025-01-01T00:00:00Z').toISOString(),
|
||||
'is-favorite': false,
|
||||
...overrides,
|
||||
});
|
||||
};
|
||||
|
||||
describe(AssetUploadController.name, () => {
|
||||
let ctx: ControllerContext;
|
||||
let buffer: Buffer;
|
||||
let checksum: string;
|
||||
const service = mockBaseService(AssetUploadService);
|
||||
|
||||
beforeAll(async () => {
|
||||
ctx = await controllerSetup(AssetUploadController, [{ provide: AssetUploadService, useValue: service }]);
|
||||
return () => ctx.close();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
service.resetAllMocks();
|
||||
service.startUpload.mockImplementation((_, __, res, ___) => {
|
||||
res.send();
|
||||
return Promise.resolve();
|
||||
});
|
||||
service.resumeUpload.mockImplementation((_, __, res, ___, ____) => {
|
||||
res.send();
|
||||
return Promise.resolve();
|
||||
});
|
||||
service.cancelUpload.mockImplementation((_, __, res) => {
|
||||
res.send();
|
||||
return Promise.resolve();
|
||||
});
|
||||
service.getUploadStatus.mockImplementation((_, res, __, ___) => {
|
||||
res.send();
|
||||
return Promise.resolve();
|
||||
});
|
||||
ctx.reset();
|
||||
|
||||
buffer = Buffer.from(randomUUID());
|
||||
checksum = `sha=:${createHash('sha1').update(buffer).digest('base64')}:`;
|
||||
});
|
||||
|
||||
describe('POST /upload', () => {
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).post('/upload');
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should require at least version 3 of Upload-Draft-Interop-Version header if provided', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Upload-Draft-Interop-Version', '2')
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining(['version must not be less than 3']),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should require X-Immich-Asset-Data header', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: 'x-immich-asset-data header is required' }));
|
||||
});
|
||||
|
||||
it('should require Repr-Digest header', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: 'Missing repr-digest header' }));
|
||||
});
|
||||
|
||||
it('should allow conventional upload without Upload-Complete header', async () => {
|
||||
const { status } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should require Upload-Length header for incomplete upload', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?0')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: 'Missing upload-length header' }));
|
||||
});
|
||||
|
||||
it('should infer upload length from content length if complete upload', async () => {
|
||||
const { status } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?1')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(201);
|
||||
});
|
||||
|
||||
it('should reject invalid Repr-Digest format', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', checksum)
|
||||
.set('Repr-Digest', 'invalid-format')
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: 'Invalid repr-digest header' }));
|
||||
});
|
||||
|
||||
it('should validate device-asset-id is required in asset data', async () => {
|
||||
const assetData = serializeDictionary({
|
||||
filename: 'test.jpg',
|
||||
'device-id': 'test-device',
|
||||
'file-created-at': new Date().toISOString(),
|
||||
'file-modified-at': new Date().toISOString(),
|
||||
});
|
||||
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', assetData)
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining([expect.stringContaining('deviceAssetId')]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should validate device-id is required in asset data', async () => {
|
||||
const assetData = serializeDictionary({
|
||||
filename: 'test.jpg',
|
||||
'device-asset-id': 'test-asset',
|
||||
'file-created-at': new Date().toISOString(),
|
||||
'file-modified-at': new Date().toISOString(),
|
||||
});
|
||||
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', assetData)
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining([expect.stringContaining('deviceId')]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should validate filename is required in asset data', async () => {
|
||||
const assetData = serializeDictionary({
|
||||
'device-asset-id': 'test-asset',
|
||||
'device-id': 'test-device',
|
||||
'file-created-at': new Date().toISOString(),
|
||||
'file-modified-at': new Date().toISOString(),
|
||||
});
|
||||
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', assetData)
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining([expect.stringContaining('filename')]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should accept Upload-Incomplete header for version 3', async () => {
|
||||
const { body, status } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '3')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Incomplete', '?0')
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(body).toEqual({});
|
||||
expect(status).not.toBe(400);
|
||||
});
|
||||
|
||||
it('should validate Upload-Complete is a boolean structured field', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', 'true')
|
||||
.set('Upload-Length', '1024')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: 'upload-complete must be a structured boolean value' }));
|
||||
});
|
||||
|
||||
it('should validate Upload-Length is a positive integer', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.post('/upload')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('X-Immich-Asset-Data', makeAssetData())
|
||||
.set('Repr-Digest', checksum)
|
||||
.set('Upload-Complete', '?1')
|
||||
.set('Upload-Length', '-100')
|
||||
.send(buffer);
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining(['uploadLength must not be less than 1']),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('PATCH /upload/:id', () => {
|
||||
const uploadId = factory.uuid();
|
||||
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).patch(`/upload/${uploadId}`);
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should require Upload-Draft-Interop-Version header', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.patch(`/upload/${uploadId}`)
|
||||
.set('Upload-Offset', '0')
|
||||
.set('Upload-Complete', '?1')
|
||||
.send(Buffer.from('test'));
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining(['version must be an integer number', 'version must not be less than 3']),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should require Upload-Offset header', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.patch(`/upload/${uploadId}`)
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('Upload-Complete', '?1')
|
||||
.send(Buffer.from('test'));
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining([
|
||||
'uploadOffset must be an integer number',
|
||||
'uploadOffset must not be less than 0',
|
||||
]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should require Upload-Complete header', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.patch(`/upload/${uploadId}`)
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('Upload-Offset', '0')
|
||||
.set('Content-Type', 'application/partial-upload')
|
||||
.send(Buffer.from('test'));
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: ['uploadComplete must be a boolean value'] }));
|
||||
});
|
||||
|
||||
it('should validate UUID parameter', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.patch('/upload/invalid-uuid')
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('Upload-Offset', '0')
|
||||
.set('Upload-Complete', '?0')
|
||||
.send(Buffer.from('test'));
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
|
||||
});
|
||||
|
||||
it('should validate Upload-Offset is a non-negative integer', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.patch(`/upload/${uploadId}`)
|
||||
.set('Upload-Draft-Interop-Version', '8')
|
||||
.set('Upload-Offset', '-50')
|
||||
.set('Upload-Complete', '?0')
|
||||
.send(Buffer.from('test'));
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining(['uploadOffset must not be less than 0']),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should require Content-Type: application/partial-upload for version >= 6', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer())
|
||||
.patch(`/upload/${uploadId}`)
|
||||
.set('Upload-Draft-Interop-Version', '6')
|
||||
.set('Upload-Offset', '0')
|
||||
.set('Upload-Complete', '?0')
|
||||
.set('Content-Type', 'application/octet-stream')
|
||||
.send(Buffer.from('test'));
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(
|
||||
expect.objectContaining({
|
||||
message: ['contentType must be equal to application/partial-upload'],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should allow other Content-Type for version < 6', async () => {
|
||||
const { body } = await request(ctx.getHttpServer())
|
||||
.patch(`/upload/${uploadId}`)
|
||||
.set('Upload-Draft-Interop-Version', '3')
|
||||
.set('Upload-Offset', '0')
|
||||
.set('Upload-Incomplete', '?1')
|
||||
.set('Content-Type', 'application/octet-stream')
|
||||
.send();
|
||||
|
||||
// Will fail for other reasons, but not content-type validation
|
||||
expect(body).not.toEqual(
|
||||
expect.objectContaining({
|
||||
message: expect.arrayContaining([expect.stringContaining('contentType')]),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should accept Upload-Incomplete header for version 3', async () => {
|
||||
const { status } = await request(ctx.getHttpServer())
|
||||
.patch(`/upload/${uploadId}`)
|
||||
.set('Upload-Draft-Interop-Version', '3')
|
||||
.set('Upload-Offset', '0')
|
||||
.set('Upload-Incomplete', '?1')
|
||||
.send();
|
||||
|
||||
// Should not fail validation
|
||||
expect(status).not.toBe(400);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DELETE /upload/:id', () => {
|
||||
const uploadId = factory.uuid();
|
||||
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).delete(`/upload/${uploadId}`);
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should validate UUID parameter', async () => {
|
||||
const { status, body } = await request(ctx.getHttpServer()).delete('/upload/invalid-uuid');
|
||||
|
||||
expect(status).toBe(400);
|
||||
expect(body).toEqual(expect.objectContaining({ message: ['id must be a UUID'] }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('HEAD /upload/:id', () => {
|
||||
const uploadId = factory.uuid();
|
||||
|
||||
it('should be an authenticated route', async () => {
|
||||
await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
|
||||
expect(ctx.authenticate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should require Upload-Draft-Interop-Version header', async () => {
|
||||
const { status } = await request(ctx.getHttpServer()).head(`/upload/${uploadId}`);
|
||||
|
||||
expect(status).toBe(400);
|
||||
});
|
||||
|
||||
it('should validate UUID parameter', async () => {
|
||||
const { status } = await request(ctx.getHttpServer())
|
||||
.head('/upload/invalid-uuid')
|
||||
.set('Upload-Draft-Interop-Version', '8');
|
||||
|
||||
expect(status).toBe(400);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,108 +0,0 @@
|
||||
import { Controller, Delete, Head, HttpCode, HttpStatus, Options, Param, Patch, Post, Req, Res } from '@nestjs/common';
|
||||
import { ApiHeader, ApiOkResponse, ApiTags } from '@nestjs/swagger';
|
||||
import { Request, Response } from 'express';
|
||||
import { GetUploadStatusDto, Header, ResumeUploadDto, StartUploadDto, UploadOkDto } from 'src/dtos/asset-upload.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import { ImmichHeader, Permission } from 'src/enum';
|
||||
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
||||
import { AssetUploadService } from 'src/services/asset-upload.service';
|
||||
import { validateSyncOrReject } from 'src/utils/request';
|
||||
import { UUIDParamDto } from 'src/validation';
|
||||
|
||||
const apiInteropVersion = {
|
||||
name: Header.InteropVersion,
|
||||
description: `Indicates the version of the RUFH protocol supported by the client.`,
|
||||
required: true,
|
||||
};
|
||||
|
||||
const apiUploadComplete = {
|
||||
name: Header.UploadComplete,
|
||||
description:
|
||||
'Structured boolean indicating whether this request completes the file. Use Upload-Incomplete instead for version <= 3.',
|
||||
required: true,
|
||||
};
|
||||
|
||||
const apiContentLength = {
|
||||
name: Header.ContentLength,
|
||||
description: 'Non-negative size of the request body in bytes.',
|
||||
required: true,
|
||||
};
|
||||
|
||||
// This is important to let go of the asset lock for an inactive request
|
||||
const SOCKET_TIMEOUT_MS = 30_000;
|
||||
|
||||
@ApiTags('Upload')
|
||||
@Controller('upload')
|
||||
export class AssetUploadController {
|
||||
constructor(private service: AssetUploadService) {}
|
||||
|
||||
@Post()
|
||||
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
|
||||
@ApiHeader({
|
||||
name: ImmichHeader.AssetData,
|
||||
description: `RFC 9651 structured dictionary containing asset metadata with the following keys:
|
||||
- device-asset-id (string, required): Unique device asset identifier
|
||||
- device-id (string, required): Device identifier
|
||||
- file-created-at (string/date, required): ISO 8601 date string or Unix timestamp
|
||||
- file-modified-at (string/date, required): ISO 8601 date string or Unix timestamp
|
||||
- filename (string, required): Original filename
|
||||
- is-favorite (boolean, optional): Favorite status
|
||||
- live-photo-video-id (string, optional): Live photo ID for assets from iOS devices
|
||||
- icloud-id (string, optional): iCloud identifier for assets from iOS devices`,
|
||||
required: true,
|
||||
example:
|
||||
'device-asset-id="abc123", device-id="phone1", filename="photo.jpg", file-created-at="2024-01-01T00:00:00Z", file-modified-at="2024-01-01T00:00:00Z"',
|
||||
})
|
||||
@ApiHeader({
|
||||
name: Header.ReprDigest,
|
||||
description:
|
||||
'RFC 9651 structured dictionary containing an `sha` (bytesequence) checksum used to detect duplicate files and validate data integrity.',
|
||||
required: true,
|
||||
})
|
||||
@ApiHeader({ ...apiInteropVersion, required: false })
|
||||
@ApiHeader({ ...apiUploadComplete, required: false })
|
||||
@ApiHeader(apiContentLength)
|
||||
@ApiOkResponse({ type: UploadOkDto })
|
||||
startUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response): Promise<void> {
|
||||
res.setTimeout(SOCKET_TIMEOUT_MS);
|
||||
return this.service.startUpload(auth, req, res, validateSyncOrReject(StartUploadDto, req.headers));
|
||||
}
|
||||
|
||||
@Patch(':id')
|
||||
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
|
||||
@ApiHeader({
|
||||
name: Header.UploadOffset,
|
||||
description:
|
||||
'Non-negative byte offset indicating the starting position of the data in the request body within the entire file.',
|
||||
required: true,
|
||||
})
|
||||
@ApiHeader(apiInteropVersion)
|
||||
@ApiHeader(apiUploadComplete)
|
||||
@ApiHeader(apiContentLength)
|
||||
@ApiOkResponse({ type: UploadOkDto })
|
||||
resumeUpload(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
|
||||
res.setTimeout(SOCKET_TIMEOUT_MS);
|
||||
return this.service.resumeUpload(auth, req, res, id, validateSyncOrReject(ResumeUploadDto, req.headers));
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
|
||||
cancelUpload(@Auth() auth: AuthDto, @Res() res: Response, @Param() { id }: UUIDParamDto) {
|
||||
res.setTimeout(SOCKET_TIMEOUT_MS);
|
||||
return this.service.cancelUpload(auth, id, res);
|
||||
}
|
||||
|
||||
@Head(':id')
|
||||
@Authenticated({ sharedLink: true, permission: Permission.AssetUpload })
|
||||
@ApiHeader(apiInteropVersion)
|
||||
getUploadStatus(@Auth() auth: AuthDto, @Req() req: Request, @Res() res: Response, @Param() { id }: UUIDParamDto) {
|
||||
res.setTimeout(SOCKET_TIMEOUT_MS);
|
||||
return this.service.getUploadStatus(auth, res, id, validateSyncOrReject(GetUploadStatusDto, req.headers));
|
||||
}
|
||||
|
||||
@Options()
|
||||
@HttpCode(HttpStatus.NO_CONTENT)
|
||||
getUploadOptions(@Res() res: Response) {
|
||||
return this.service.getUploadOptions(res);
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { AlbumController } from 'src/controllers/album.controller';
|
||||
import { ApiKeyController } from 'src/controllers/api-key.controller';
|
||||
import { AppController } from 'src/controllers/app.controller';
|
||||
import { AssetMediaController } from 'src/controllers/asset-media.controller';
|
||||
import { AssetUploadController } from 'src/controllers/asset-upload.controller';
|
||||
import { AssetController } from 'src/controllers/asset.controller';
|
||||
import { AuthAdminController } from 'src/controllers/auth-admin.controller';
|
||||
import { AuthController } from 'src/controllers/auth.controller';
|
||||
@@ -41,7 +40,6 @@ export const controllers = [
|
||||
AppController,
|
||||
AssetController,
|
||||
AssetMediaController,
|
||||
AssetUploadController,
|
||||
AuthController,
|
||||
AuthAdminController,
|
||||
DownloadController,
|
||||
|
||||
@@ -356,7 +356,7 @@ export const columns = {
|
||||
'asset.stackId',
|
||||
'asset.libraryId',
|
||||
],
|
||||
syncAlbumUser: ['album_user.albumId as albumId', 'album_user.userId as userId', 'album_user.role'],
|
||||
syncAlbumUser: ['album_user.albumsId as albumId', 'album_user.usersId as userId', 'album_user.role'],
|
||||
syncStack: ['stack.id', 'stack.createdAt', 'stack.updatedAt', 'stack.primaryAssetId', 'stack.ownerId'],
|
||||
syncUser: ['id', 'name', 'email', 'avatarColor', 'deletedAt', 'updateId', 'profileImagePath', 'profileChangedAt'],
|
||||
stack: ['stack.id', 'stack.primaryAssetId', 'ownerId'],
|
||||
|
||||
@@ -1,196 +0,0 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { Expose, plainToInstance, Transform, Type } from 'class-transformer';
|
||||
import { Equals, IsBoolean, IsInt, IsNotEmpty, IsString, Min, ValidateIf, ValidateNested } from 'class-validator';
|
||||
import { ImmichHeader } from 'src/enum';
|
||||
import { Optional, ValidateBoolean, ValidateDate } from 'src/validation';
|
||||
import { parseDictionary } from 'structured-headers';
|
||||
|
||||
export enum Header {
|
||||
ContentLength = 'content-length',
|
||||
ContentType = 'content-type',
|
||||
InteropVersion = 'upload-draft-interop-version',
|
||||
ReprDigest = 'repr-digest',
|
||||
UploadComplete = 'upload-complete',
|
||||
UploadIncomplete = 'upload-incomplete',
|
||||
UploadLength = 'upload-length',
|
||||
UploadOffset = 'upload-offset',
|
||||
}
|
||||
|
||||
export class UploadAssetDataDto {
|
||||
@IsNotEmpty()
|
||||
@IsString()
|
||||
deviceAssetId!: string;
|
||||
|
||||
@IsNotEmpty()
|
||||
@IsString()
|
||||
deviceId!: string;
|
||||
|
||||
@ValidateDate()
|
||||
fileCreatedAt!: Date;
|
||||
|
||||
@ValidateDate()
|
||||
fileModifiedAt!: Date;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
filename!: string;
|
||||
|
||||
@ValidateBoolean({ optional: true })
|
||||
isFavorite?: boolean;
|
||||
|
||||
@Optional()
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
livePhotoVideoId?: string;
|
||||
|
||||
@Optional()
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
iCloudId!: string;
|
||||
}
|
||||
|
||||
export class BaseUploadHeadersDto {
|
||||
@Expose({ name: Header.ContentLength })
|
||||
@Min(0)
|
||||
@IsInt()
|
||||
@Type(() => Number)
|
||||
contentLength!: number;
|
||||
}
|
||||
|
||||
export class StartUploadDto extends BaseUploadHeadersDto {
|
||||
@Expose({ name: Header.InteropVersion })
|
||||
@Optional()
|
||||
@Min(3)
|
||||
@IsInt()
|
||||
@Type(() => Number)
|
||||
version?: number;
|
||||
|
||||
@Expose({ name: ImmichHeader.AssetData })
|
||||
@ValidateNested()
|
||||
@Transform(({ value }) => {
|
||||
if (!value) {
|
||||
throw new BadRequestException(`${ImmichHeader.AssetData} header is required`);
|
||||
}
|
||||
|
||||
try {
|
||||
const dict = parseDictionary(value);
|
||||
return plainToInstance(UploadAssetDataDto, {
|
||||
deviceAssetId: dict.get('device-asset-id')?.[0],
|
||||
deviceId: dict.get('device-id')?.[0],
|
||||
filename: dict.get('filename')?.[0],
|
||||
duration: dict.get('duration')?.[0],
|
||||
fileCreatedAt: dict.get('file-created-at')?.[0],
|
||||
fileModifiedAt: dict.get('file-modified-at')?.[0],
|
||||
isFavorite: dict.get('is-favorite')?.[0],
|
||||
livePhotoVideoId: dict.get('live-photo-video-id')?.[0],
|
||||
iCloudId: dict.get('icloud-id')?.[0],
|
||||
});
|
||||
} catch {
|
||||
throw new BadRequestException(`${ImmichHeader.AssetData} must be a valid structured dictionary`);
|
||||
}
|
||||
})
|
||||
assetData!: UploadAssetDataDto;
|
||||
|
||||
@Expose({ name: Header.ReprDigest })
|
||||
@Transform(({ value }) => {
|
||||
if (!value) {
|
||||
throw new BadRequestException(`Missing ${Header.ReprDigest} header`);
|
||||
}
|
||||
|
||||
const checksum = parseDictionary(value).get('sha')?.[0];
|
||||
if (checksum instanceof ArrayBuffer && checksum.byteLength === 20) {
|
||||
return Buffer.from(checksum);
|
||||
}
|
||||
throw new BadRequestException(`Invalid ${Header.ReprDigest} header`);
|
||||
})
|
||||
checksum!: Buffer;
|
||||
|
||||
@Expose()
|
||||
@Min(1)
|
||||
@IsInt()
|
||||
@Transform(({ obj }) => {
|
||||
const uploadLength = obj[Header.UploadLength];
|
||||
if (uploadLength != undefined) {
|
||||
return Number(uploadLength);
|
||||
}
|
||||
|
||||
const contentLength = obj[Header.ContentLength];
|
||||
if (contentLength && isUploadComplete(obj) !== false) {
|
||||
return Number(contentLength);
|
||||
}
|
||||
throw new BadRequestException(`Missing ${Header.UploadLength} header`);
|
||||
})
|
||||
uploadLength!: number;
|
||||
|
||||
@Expose()
|
||||
@Transform(({ obj }) => isUploadComplete(obj))
|
||||
uploadComplete?: boolean;
|
||||
}
|
||||
|
||||
export class ResumeUploadDto extends BaseUploadHeadersDto {
|
||||
@Expose({ name: Header.InteropVersion })
|
||||
@Min(3)
|
||||
@IsInt()
|
||||
@Type(() => Number)
|
||||
version!: number;
|
||||
|
||||
@Expose({ name: Header.ContentType })
|
||||
@ValidateIf((o) => o.version && o.version >= 6)
|
||||
@Equals('application/partial-upload')
|
||||
contentType!: string;
|
||||
|
||||
@Expose({ name: Header.UploadLength })
|
||||
@Min(1)
|
||||
@IsInt()
|
||||
@Type(() => Number)
|
||||
@Optional()
|
||||
uploadLength?: number;
|
||||
|
||||
@Expose({ name: Header.UploadOffset })
|
||||
@Min(0)
|
||||
@IsInt()
|
||||
@Type(() => Number)
|
||||
uploadOffset!: number;
|
||||
|
||||
@Expose()
|
||||
@IsBoolean()
|
||||
@Transform(({ obj }) => isUploadComplete(obj))
|
||||
uploadComplete!: boolean;
|
||||
}
|
||||
|
||||
export class GetUploadStatusDto {
|
||||
@Expose({ name: Header.InteropVersion })
|
||||
@Min(3)
|
||||
@IsInt()
|
||||
@Type(() => Number)
|
||||
version!: number;
|
||||
}
|
||||
|
||||
export class UploadOkDto {
|
||||
@ApiProperty()
|
||||
id!: string;
|
||||
}
|
||||
|
||||
const STRUCTURED_TRUE = '?1';
|
||||
const STRUCTURED_FALSE = '?0';
|
||||
|
||||
function isUploadComplete(obj: any) {
|
||||
const uploadComplete = obj[Header.UploadComplete];
|
||||
if (uploadComplete === STRUCTURED_TRUE) {
|
||||
return true;
|
||||
} else if (uploadComplete === STRUCTURED_FALSE) {
|
||||
return false;
|
||||
} else if (uploadComplete !== undefined) {
|
||||
throw new BadRequestException('upload-complete must be a structured boolean value');
|
||||
}
|
||||
|
||||
const uploadIncomplete = obj[Header.UploadIncomplete];
|
||||
if (uploadIncomplete === STRUCTURED_TRUE) {
|
||||
return false;
|
||||
} else if (uploadIncomplete === STRUCTURED_FALSE) {
|
||||
return true;
|
||||
} else if (uploadComplete !== undefined) {
|
||||
throw new BadRequestException('upload-incomplete must be a structured boolean value');
|
||||
}
|
||||
}
|
||||
@@ -55,23 +55,11 @@ export class DatabaseBackupConfig {
|
||||
keepLastAmount!: number;
|
||||
}
|
||||
|
||||
export class UploadBackupConfig {
|
||||
@IsInt()
|
||||
@IsPositive()
|
||||
@IsNotEmpty()
|
||||
maxAgeHours!: number;
|
||||
}
|
||||
|
||||
export class SystemConfigBackupsDto {
|
||||
@Type(() => DatabaseBackupConfig)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
database!: DatabaseBackupConfig;
|
||||
|
||||
@Type(() => UploadBackupConfig)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
upload!: UploadBackupConfig;
|
||||
}
|
||||
|
||||
export class SystemConfigFFmpegDto {
|
||||
@@ -367,9 +355,6 @@ class SystemConfigNightlyTasksDto {
|
||||
|
||||
@ValidateBoolean()
|
||||
syncQuotaUsage!: boolean;
|
||||
|
||||
@ValidateBoolean()
|
||||
removeStaleUploads!: boolean;
|
||||
}
|
||||
|
||||
class SystemConfigOAuthDto {
|
||||
|
||||
@@ -20,7 +20,6 @@ export enum ImmichHeader {
|
||||
SharedLinkSlug = 'x-immich-share-slug',
|
||||
Checksum = 'x-immich-checksum',
|
||||
Cid = 'x-immich-cid',
|
||||
AssetData = 'x-immich-asset-data',
|
||||
}
|
||||
|
||||
export enum ImmichQuery {
|
||||
@@ -307,7 +306,6 @@ export enum AssetStatus {
|
||||
Active = 'active',
|
||||
Trashed = 'trashed',
|
||||
Deleted = 'deleted',
|
||||
Partial = 'partial',
|
||||
}
|
||||
|
||||
export enum SourceType {
|
||||
@@ -498,7 +496,6 @@ export enum BootstrapEventPriority {
|
||||
JobService = -190,
|
||||
// Initialise config after other bootstrap services, stop other services from using config on bootstrap
|
||||
SystemConfig = 100,
|
||||
UploadService = 200,
|
||||
}
|
||||
|
||||
export enum QueueName {
|
||||
@@ -535,8 +532,6 @@ export enum JobName {
|
||||
AssetFileMigration = 'AssetFileMigration',
|
||||
AssetGenerateThumbnailsQueueAll = 'AssetGenerateThumbnailsQueueAll',
|
||||
AssetGenerateThumbnails = 'AssetGenerateThumbnails',
|
||||
PartialAssetCleanup = 'PartialAssetCleanup',
|
||||
PartialAssetCleanupQueueAll = 'PartialAssetCleanupQueueAll',
|
||||
|
||||
AuditLogCleanup = 'AuditLogCleanup',
|
||||
AuditTableCleanup = 'AuditTableCleanup',
|
||||
|
||||
@@ -25,8 +25,8 @@ select
|
||||
"album"."id"
|
||||
from
|
||||
"album"
|
||||
left join "album_user" as "albumUsers" on "albumUsers"."albumId" = "album"."id"
|
||||
left join "user" on "user"."id" = "albumUsers"."userId"
|
||||
left join "album_user" as "albumUsers" on "albumUsers"."albumsId" = "album"."id"
|
||||
left join "user" on "user"."id" = "albumUsers"."usersId"
|
||||
and "user"."deletedAt" is null
|
||||
where
|
||||
"album"."id" in ($1)
|
||||
@@ -52,8 +52,8 @@ select
|
||||
"album"."id"
|
||||
from
|
||||
"album"
|
||||
left join "album_user" on "album_user"."albumId" = "album"."id"
|
||||
left join "user" on "user"."id" = "album_user"."userId"
|
||||
left join "album_user" on "album_user"."albumsId" = "album"."id"
|
||||
left join "user" on "user"."id" = "album_user"."usersId"
|
||||
and "user"."deletedAt" is null
|
||||
where
|
||||
"album"."id" in ($1)
|
||||
@@ -81,11 +81,11 @@ select
|
||||
"asset"."livePhotoVideoId"
|
||||
from
|
||||
"album"
|
||||
inner join "album_asset" as "albumAssets" on "album"."id" = "albumAssets"."albumId"
|
||||
inner join "asset" on "asset"."id" = "albumAssets"."assetId"
|
||||
inner join "album_asset" as "albumAssets" on "album"."id" = "albumAssets"."albumsId"
|
||||
inner join "asset" on "asset"."id" = "albumAssets"."assetsId"
|
||||
and "asset"."deletedAt" is null
|
||||
left join "album_user" as "albumUsers" on "albumUsers"."albumId" = "album"."id"
|
||||
left join "user" on "user"."id" = "albumUsers"."userId"
|
||||
left join "album_user" as "albumUsers" on "albumUsers"."albumsId" = "album"."id"
|
||||
left join "user" on "user"."id" = "albumUsers"."usersId"
|
||||
and "user"."deletedAt" is null
|
||||
cross join "target"
|
||||
where
|
||||
@@ -136,11 +136,11 @@ from
|
||||
"shared_link"
|
||||
left join "album" on "album"."id" = "shared_link"."albumId"
|
||||
and "album"."deletedAt" is null
|
||||
left join "shared_link_asset" on "shared_link_asset"."sharedLinkId" = "shared_link"."id"
|
||||
left join "asset" on "asset"."id" = "shared_link_asset"."assetId"
|
||||
left join "shared_link_asset" on "shared_link_asset"."sharedLinksId" = "shared_link"."id"
|
||||
left join "asset" on "asset"."id" = "shared_link_asset"."assetsId"
|
||||
and "asset"."deletedAt" is null
|
||||
left join "album_asset" on "album_asset"."albumId" = "album"."id"
|
||||
left join "asset" as "albumAssets" on "albumAssets"."id" = "album_asset"."assetId"
|
||||
left join "album_asset" on "album_asset"."albumsId" = "album"."id"
|
||||
left join "asset" as "albumAssets" on "albumAssets"."id" = "album_asset"."assetsId"
|
||||
and "albumAssets"."deletedAt" is null
|
||||
where
|
||||
"shared_link"."id" = $1
|
||||
|
||||
@@ -43,13 +43,13 @@ select
|
||||
from
|
||||
"user"
|
||||
where
|
||||
"user"."id" = "album_user"."userId"
|
||||
"user"."id" = "album_user"."usersId"
|
||||
) as obj
|
||||
) as "user"
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."albumId" = "album"."id"
|
||||
"album_user"."albumsId" = "album"."id"
|
||||
) as agg
|
||||
) as "albumUsers",
|
||||
(
|
||||
@@ -76,9 +76,9 @@ select
|
||||
from
|
||||
"asset"
|
||||
left join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
|
||||
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
|
||||
where
|
||||
"album_asset"."albumId" = "album"."id"
|
||||
"album_asset"."albumsId" = "album"."id"
|
||||
and "asset"."deletedAt" is null
|
||||
and "asset"."visibility" in ('archive', 'timeline')
|
||||
order by
|
||||
@@ -134,18 +134,18 @@ select
|
||||
from
|
||||
"user"
|
||||
where
|
||||
"user"."id" = "album_user"."userId"
|
||||
"user"."id" = "album_user"."usersId"
|
||||
) as obj
|
||||
) as "user"
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."albumId" = "album"."id"
|
||||
"album_user"."albumsId" = "album"."id"
|
||||
) as agg
|
||||
) as "albumUsers"
|
||||
from
|
||||
"album"
|
||||
inner join "album_asset" on "album_asset"."albumId" = "album"."id"
|
||||
inner join "album_asset" on "album_asset"."albumsId" = "album"."id"
|
||||
where
|
||||
(
|
||||
"album"."ownerId" = $1
|
||||
@@ -154,11 +154,11 @@ where
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."albumId" = "album"."id"
|
||||
and "album_user"."userId" = $2
|
||||
"album_user"."albumsId" = "album"."id"
|
||||
and "album_user"."usersId" = $2
|
||||
)
|
||||
)
|
||||
and "album_asset"."assetId" = $3
|
||||
and "album_asset"."assetsId" = $3
|
||||
and "album"."deletedAt" is null
|
||||
order by
|
||||
"album"."createdAt" desc,
|
||||
@@ -166,7 +166,7 @@ order by
|
||||
|
||||
-- AlbumRepository.getMetadataForIds
|
||||
select
|
||||
"album_asset"."albumId" as "albumId",
|
||||
"album_asset"."albumsId" as "albumId",
|
||||
min(
|
||||
("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date
|
||||
) as "startDate",
|
||||
@@ -177,13 +177,13 @@ select
|
||||
count("asset"."id")::int as "assetCount"
|
||||
from
|
||||
"asset"
|
||||
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
|
||||
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
|
||||
where
|
||||
"asset"."visibility" in ('archive', 'timeline')
|
||||
and "album_asset"."albumId" in ($1)
|
||||
and "album_asset"."albumsId" in ($1)
|
||||
and "asset"."deletedAt" is null
|
||||
group by
|
||||
"album_asset"."albumId"
|
||||
"album_asset"."albumsId"
|
||||
|
||||
-- AlbumRepository.getOwned
|
||||
select
|
||||
@@ -228,13 +228,13 @@ select
|
||||
from
|
||||
"user"
|
||||
where
|
||||
"user"."id" = "album_user"."userId"
|
||||
"user"."id" = "album_user"."usersId"
|
||||
) as obj
|
||||
) as "user"
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."albumId" = "album"."id"
|
||||
"album_user"."albumsId" = "album"."id"
|
||||
) as agg
|
||||
) as "albumUsers",
|
||||
(
|
||||
@@ -283,13 +283,13 @@ select
|
||||
from
|
||||
"user"
|
||||
where
|
||||
"user"."id" = "album_user"."userId"
|
||||
"user"."id" = "album_user"."usersId"
|
||||
) as obj
|
||||
) as "user"
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."albumId" = "album"."id"
|
||||
"album_user"."albumsId" = "album"."id"
|
||||
) as agg
|
||||
) as "albumUsers",
|
||||
(
|
||||
@@ -332,10 +332,10 @@ where
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."albumId" = "album"."id"
|
||||
"album_user"."albumsId" = "album"."id"
|
||||
and (
|
||||
"album"."ownerId" = $1
|
||||
or "album_user"."userId" = $2
|
||||
or "album_user"."usersId" = $2
|
||||
)
|
||||
)
|
||||
or exists (
|
||||
@@ -382,7 +382,7 @@ where
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."albumId" = "album"."id"
|
||||
"album_user"."albumsId" = "album"."id"
|
||||
)
|
||||
and not exists (
|
||||
select
|
||||
@@ -397,7 +397,7 @@ order by
|
||||
-- AlbumRepository.removeAssetsFromAll
|
||||
delete from "album_asset"
|
||||
where
|
||||
"album_asset"."assetId" in ($1)
|
||||
"album_asset"."assetsId" in ($1)
|
||||
|
||||
-- AlbumRepository.getAssetIds
|
||||
select
|
||||
@@ -405,8 +405,8 @@ select
|
||||
from
|
||||
"album_asset"
|
||||
where
|
||||
"album_asset"."albumId" = $1
|
||||
and "album_asset"."assetId" in ($2)
|
||||
"album_asset"."albumsId" = $1
|
||||
and "album_asset"."assetsId" in ($2)
|
||||
|
||||
-- AlbumRepository.getContributorCounts
|
||||
select
|
||||
@@ -414,10 +414,10 @@ select
|
||||
count(*) as "assetCount"
|
||||
from
|
||||
"album_asset"
|
||||
inner join "asset" on "asset"."id" = "assetId"
|
||||
inner join "asset" on "asset"."id" = "assetsId"
|
||||
where
|
||||
"asset"."deletedAt" is null
|
||||
and "album_asset"."albumId" = $1
|
||||
and "album_asset"."albumsId" = $1
|
||||
group by
|
||||
"asset"."ownerId"
|
||||
order by
|
||||
@@ -427,10 +427,10 @@ order by
|
||||
insert into
|
||||
"album_asset"
|
||||
select
|
||||
"album_asset"."albumId",
|
||||
$1 as "assetId"
|
||||
"album_asset"."albumsId",
|
||||
$1 as "assetsId"
|
||||
from
|
||||
"album_asset"
|
||||
where
|
||||
"album_asset"."assetId" = $2
|
||||
"album_asset"."assetsId" = $2
|
||||
on conflict do nothing
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
-- AlbumUserRepository.create
|
||||
insert into
|
||||
"album_user" ("userId", "albumId")
|
||||
"album_user" ("usersId", "albumsId")
|
||||
values
|
||||
($1, $2)
|
||||
returning
|
||||
"userId",
|
||||
"albumId",
|
||||
"usersId",
|
||||
"albumsId",
|
||||
"role"
|
||||
|
||||
-- AlbumUserRepository.update
|
||||
@@ -15,13 +15,13 @@ update "album_user"
|
||||
set
|
||||
"role" = $1
|
||||
where
|
||||
"userId" = $2
|
||||
and "albumId" = $3
|
||||
"usersId" = $2
|
||||
and "albumsId" = $3
|
||||
returning
|
||||
*
|
||||
|
||||
-- AlbumUserRepository.delete
|
||||
delete from "album_user"
|
||||
where
|
||||
"userId" = $1
|
||||
and "albumId" = $2
|
||||
"usersId" = $1
|
||||
and "albumsId" = $2
|
||||
|
||||
@@ -14,7 +14,6 @@ from
|
||||
left join "smart_search" on "asset"."id" = "smart_search"."assetId"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
and "asset"."status" != 'partial'
|
||||
limit
|
||||
$2
|
||||
|
||||
@@ -32,16 +31,15 @@ select
|
||||
"tag"."value"
|
||||
from
|
||||
"tag"
|
||||
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
|
||||
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
|
||||
where
|
||||
"asset"."id" = "tag_asset"."assetId"
|
||||
"asset"."id" = "tag_asset"."assetsId"
|
||||
) as agg
|
||||
) as "tags"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
and "asset"."status" != 'partial'
|
||||
limit
|
||||
$2
|
||||
|
||||
@@ -54,7 +52,6 @@ from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1::uuid
|
||||
and "asset"."status" != 'partial'
|
||||
limit
|
||||
$2
|
||||
|
||||
@@ -81,8 +78,7 @@ from
|
||||
"asset"
|
||||
inner join "asset_job_status" on "asset_job_status"."assetId" = "asset"."id"
|
||||
where
|
||||
"asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."visibility" != $1
|
||||
and (
|
||||
"asset_job_status"."previewAt" is null
|
||||
@@ -114,7 +110,6 @@ from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.getForGenerateThumbnailJob
|
||||
select
|
||||
@@ -146,7 +141,6 @@ from
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"asset"."id" = $1
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.getForMetadataExtraction
|
||||
select
|
||||
@@ -184,7 +178,6 @@ from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $1
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.getAlbumThumbnailFiles
|
||||
select
|
||||
@@ -205,8 +198,7 @@ from
|
||||
inner join "smart_search" on "asset"."id" = "smart_search"."assetId"
|
||||
inner join "asset_job_status" as "job_status" on "job_status"."assetId" = "asset"."id"
|
||||
where
|
||||
"asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."visibility" in ('archive', 'timeline')
|
||||
and "job_status"."duplicatesDetectedAt" is null
|
||||
|
||||
@@ -218,7 +210,6 @@ from
|
||||
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
|
||||
where
|
||||
"asset"."visibility" != $1
|
||||
and "asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
and "job_status"."previewAt" is not null
|
||||
and not exists (
|
||||
@@ -253,7 +244,6 @@ from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = $2
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.getForDetectFacesJob
|
||||
select
|
||||
@@ -294,7 +284,6 @@ from
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"asset"."id" = $2
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.getForOcr
|
||||
select
|
||||
@@ -396,7 +385,6 @@ from
|
||||
) as "stacked_assets" on "stack"."id" is not null
|
||||
where
|
||||
"asset"."id" = $2
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.streamForVideoConversion
|
||||
select
|
||||
@@ -410,7 +398,6 @@ where
|
||||
or "asset"."encodedVideoPath" = $2
|
||||
)
|
||||
and "asset"."visibility" != $3
|
||||
and "asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
|
||||
-- AssetJobRepository.getForVideoConversion
|
||||
@@ -424,7 +411,6 @@ from
|
||||
where
|
||||
"asset"."id" = $1
|
||||
and "asset"."type" = $2
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.streamForMetadataExtraction
|
||||
select
|
||||
@@ -437,7 +423,6 @@ where
|
||||
"asset_job_status"."metadataExtractedAt" is null
|
||||
or "asset_job_status"."assetId" is null
|
||||
)
|
||||
and "asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
|
||||
-- AssetJobRepository.getForStorageTemplateJob
|
||||
@@ -458,8 +443,7 @@ from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
"asset"."deletedAt" is null
|
||||
and "asset"."id" = $1
|
||||
|
||||
-- AssetJobRepository.streamForStorageTemplateJob
|
||||
@@ -480,8 +464,7 @@ from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
"asset"."deletedAt" is null
|
||||
|
||||
-- AssetJobRepository.streamForDeletedJob
|
||||
select
|
||||
@@ -491,7 +474,6 @@ from
|
||||
"asset"
|
||||
where
|
||||
"asset"."deletedAt" <= $1
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.streamForSidecar
|
||||
select
|
||||
@@ -504,7 +486,6 @@ where
|
||||
or "asset"."sidecarPath" is null
|
||||
)
|
||||
and "asset"."visibility" != $2
|
||||
and "asset"."status" != 'partial'
|
||||
|
||||
-- AssetJobRepository.streamForDetectFacesJob
|
||||
select
|
||||
@@ -514,10 +495,8 @@ from
|
||||
inner join "asset_job_status" as "job_status" on "assetId" = "asset"."id"
|
||||
where
|
||||
"asset"."visibility" != $1
|
||||
and "asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
and "job_status"."previewAt" is not null
|
||||
and "asset"."status" != 'partial'
|
||||
order by
|
||||
"asset"."fileCreatedAt" desc
|
||||
|
||||
@@ -538,14 +517,4 @@ select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."status" != 'partial'
|
||||
and "asset"."deletedAt" is null
|
||||
|
||||
-- AssetJobRepository.streamForPartialAssetCleanupJob
|
||||
select
|
||||
"id"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."status" = 'partial'
|
||||
and "asset"."createdAt" < $1
|
||||
"asset"."deletedAt" is null
|
||||
|
||||
@@ -46,68 +46,6 @@ where
|
||||
"assetId" = $1
|
||||
and "key" = $2
|
||||
|
||||
-- AssetRepository.getCompletionMetadata
|
||||
select
|
||||
"originalPath" as "path",
|
||||
"status",
|
||||
"fileModifiedAt",
|
||||
"createdAt",
|
||||
"checksum",
|
||||
"fileSizeInByte" as "size"
|
||||
from
|
||||
"asset"
|
||||
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
|
||||
where
|
||||
"id" = $1
|
||||
and "ownerId" = $2
|
||||
|
||||
-- AssetRepository.setComplete
|
||||
update "asset" as "complete_asset"
|
||||
set
|
||||
"status" = 'active',
|
||||
"visibility" = case
|
||||
when (
|
||||
"complete_asset"."type" = 'VIDEO'
|
||||
and exists (
|
||||
select
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"complete_asset"."id" = "asset"."livePhotoVideoId"
|
||||
)
|
||||
) then 'hidden'::asset_visibility_enum
|
||||
else 'timeline'::asset_visibility_enum
|
||||
end
|
||||
where
|
||||
"id" = $1
|
||||
and "status" = 'partial'
|
||||
|
||||
-- AssetRepository.removeAndDecrementQuota
|
||||
with
|
||||
"asset_exif" as (
|
||||
select
|
||||
"fileSizeInByte"
|
||||
from
|
||||
"asset_exif"
|
||||
where
|
||||
"assetId" = $1
|
||||
),
|
||||
"asset" as (
|
||||
delete from "asset"
|
||||
where
|
||||
"id" = $2
|
||||
returning
|
||||
"ownerId"
|
||||
)
|
||||
update "user"
|
||||
set
|
||||
"quotaUsageInBytes" = "quotaUsageInBytes" - "fileSizeInByte"
|
||||
from
|
||||
"asset_exif",
|
||||
"asset"
|
||||
where
|
||||
"user"."id" = "asset"."ownerId"
|
||||
|
||||
-- AssetRepository.getByDayOfYear
|
||||
with
|
||||
"res" as (
|
||||
@@ -222,9 +160,9 @@ select
|
||||
"tag"."parentId"
|
||||
from
|
||||
"tag"
|
||||
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
|
||||
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
|
||||
where
|
||||
"asset"."id" = "tag_asset"."assetId"
|
||||
"asset"."id" = "tag_asset"."assetsId"
|
||||
) as agg
|
||||
) as "tags",
|
||||
to_json("asset_exif") as "exifInfo"
|
||||
@@ -320,9 +258,7 @@ where
|
||||
|
||||
-- AssetRepository.getUploadAssetIdByChecksum
|
||||
select
|
||||
"id",
|
||||
"status",
|
||||
"createdAt"
|
||||
"id"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
|
||||
@@ -23,8 +23,8 @@ where
|
||||
from
|
||||
"album_asset"
|
||||
where
|
||||
"asset"."id" = "album_asset"."assetId"
|
||||
and "album_asset"."albumId" in ($3)
|
||||
"asset"."id" = "album_asset"."assetsId"
|
||||
and "album_asset"."albumsId" in ($3)
|
||||
)
|
||||
)
|
||||
order by
|
||||
|
||||
@@ -37,7 +37,7 @@ select
|
||||
"asset".*
|
||||
from
|
||||
"asset"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
|
||||
where
|
||||
"memory_asset"."memoriesId" = "memory"."id"
|
||||
and "asset"."visibility" = 'timeline'
|
||||
@@ -66,7 +66,7 @@ select
|
||||
"asset".*
|
||||
from
|
||||
"asset"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
|
||||
where
|
||||
"memory_asset"."memoriesId" = "memory"."id"
|
||||
and "asset"."visibility" = 'timeline'
|
||||
@@ -104,7 +104,7 @@ select
|
||||
"asset".*
|
||||
from
|
||||
"asset"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
|
||||
where
|
||||
"memory_asset"."memoriesId" = "memory"."id"
|
||||
and "asset"."visibility" = 'timeline'
|
||||
@@ -137,7 +137,7 @@ select
|
||||
"asset".*
|
||||
from
|
||||
"asset"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetId"
|
||||
inner join "memory_asset" on "asset"."id" = "memory_asset"."assetsId"
|
||||
where
|
||||
"memory_asset"."memoriesId" = "memory"."id"
|
||||
and "asset"."visibility" = 'timeline'
|
||||
@@ -159,15 +159,15 @@ where
|
||||
|
||||
-- MemoryRepository.getAssetIds
|
||||
select
|
||||
"assetId"
|
||||
"assetsId"
|
||||
from
|
||||
"memory_asset"
|
||||
where
|
||||
"memoriesId" = $1
|
||||
and "assetId" in ($2)
|
||||
and "assetsId" in ($2)
|
||||
|
||||
-- MemoryRepository.addAssetIds
|
||||
insert into
|
||||
"memory_asset" ("memoriesId", "assetId")
|
||||
"memory_asset" ("memoriesId", "assetsId")
|
||||
values
|
||||
($1, $2)
|
||||
|
||||
@@ -4,10 +4,10 @@
|
||||
insert into
|
||||
"shared_link_asset"
|
||||
select
|
||||
$1 as "assetId",
|
||||
"shared_link_asset"."sharedLinkId"
|
||||
$1 as "assetsId",
|
||||
"shared_link_asset"."sharedLinksId"
|
||||
from
|
||||
"shared_link_asset"
|
||||
where
|
||||
"shared_link_asset"."assetId" = $2
|
||||
"shared_link_asset"."assetsId" = $2
|
||||
on conflict do nothing
|
||||
|
||||
@@ -19,7 +19,7 @@ from
|
||||
to_json("exifInfo") as "exifInfo"
|
||||
from
|
||||
"shared_link_asset"
|
||||
inner join "asset" on "asset"."id" = "shared_link_asset"."assetId"
|
||||
inner join "asset" on "asset"."id" = "shared_link_asset"."assetsId"
|
||||
inner join lateral (
|
||||
select
|
||||
"asset_exif".*
|
||||
@@ -29,7 +29,7 @@ from
|
||||
"asset_exif"."assetId" = "asset"."id"
|
||||
) as "exifInfo" on true
|
||||
where
|
||||
"shared_link"."id" = "shared_link_asset"."sharedLinkId"
|
||||
"shared_link"."id" = "shared_link_asset"."sharedLinksId"
|
||||
and "asset"."deletedAt" is null
|
||||
order by
|
||||
"asset"."fileCreatedAt" asc
|
||||
@@ -51,7 +51,7 @@ from
|
||||
to_json("owner") as "owner"
|
||||
from
|
||||
"album"
|
||||
left join "album_asset" on "album_asset"."albumId" = "album"."id"
|
||||
left join "album_asset" on "album_asset"."albumsId" = "album"."id"
|
||||
left join lateral (
|
||||
select
|
||||
"asset".*,
|
||||
@@ -67,7 +67,7 @@ from
|
||||
"asset_exif"."assetId" = "asset"."id"
|
||||
) as "exifInfo" on true
|
||||
where
|
||||
"album_asset"."assetId" = "asset"."id"
|
||||
"album_asset"."assetsId" = "asset"."id"
|
||||
and "asset"."deletedAt" is null
|
||||
order by
|
||||
"asset"."fileCreatedAt" asc
|
||||
@@ -108,14 +108,14 @@ select distinct
|
||||
to_json("album") as "album"
|
||||
from
|
||||
"shared_link"
|
||||
left join "shared_link_asset" on "shared_link_asset"."sharedLinkId" = "shared_link"."id"
|
||||
left join "shared_link_asset" on "shared_link_asset"."sharedLinksId" = "shared_link"."id"
|
||||
left join lateral (
|
||||
select
|
||||
json_agg("asset") as "assets"
|
||||
from
|
||||
"asset"
|
||||
where
|
||||
"asset"."id" = "shared_link_asset"."assetId"
|
||||
"asset"."id" = "shared_link_asset"."assetsId"
|
||||
and "asset"."deletedAt" is null
|
||||
) as "assets" on true
|
||||
left join lateral (
|
||||
|
||||
@@ -89,9 +89,9 @@ select
|
||||
"tag"."parentId"
|
||||
from
|
||||
"tag"
|
||||
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
|
||||
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagsId"
|
||||
where
|
||||
"tag_asset"."assetId" = "asset"."id"
|
||||
"tag_asset"."assetsId" = "asset"."id"
|
||||
) as agg
|
||||
) as "tags",
|
||||
to_json("exifInfo") as "exifInfo"
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
|
||||
-- SyncRepository.album.getCreatedAfter
|
||||
select
|
||||
"albumId" as "id",
|
||||
"albumsId" as "id",
|
||||
"createId"
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"userId" = $1
|
||||
"usersId" = $1
|
||||
and "createId" >= $2
|
||||
and "createId" < $3
|
||||
order by
|
||||
@@ -40,13 +40,13 @@ select distinct
|
||||
"album"."updateId"
|
||||
from
|
||||
"album" as "album"
|
||||
left join "album_user" as "album_users" on "album"."id" = "album_users"."albumId"
|
||||
left join "album_user" as "album_users" on "album"."id" = "album_users"."albumsId"
|
||||
where
|
||||
"album"."updateId" < $1
|
||||
and "album"."updateId" > $2
|
||||
and (
|
||||
"album"."ownerId" = $3
|
||||
or "album_users"."userId" = $4
|
||||
or "album_users"."usersId" = $4
|
||||
)
|
||||
order by
|
||||
"album"."updateId" asc
|
||||
@@ -72,12 +72,12 @@ select
|
||||
"album_asset"."updateId"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
inner join "asset" on "asset"."id" = "album_asset"."assetId"
|
||||
inner join "asset" on "asset"."id" = "album_asset"."assetsId"
|
||||
where
|
||||
"album_asset"."updateId" < $1
|
||||
and "album_asset"."updateId" <= $2
|
||||
and "album_asset"."updateId" >= $3
|
||||
and "album_asset"."albumId" = $4
|
||||
and "album_asset"."albumsId" = $4
|
||||
order by
|
||||
"album_asset"."updateId" asc
|
||||
|
||||
@@ -102,16 +102,16 @@ select
|
||||
"asset"."updateId"
|
||||
from
|
||||
"asset" as "asset"
|
||||
inner join "album_asset" on "album_asset"."assetId" = "asset"."id"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumId"
|
||||
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
|
||||
inner join "album_asset" on "album_asset"."assetsId" = "asset"."id"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumsId"
|
||||
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
|
||||
where
|
||||
"asset"."updateId" < $1
|
||||
and "asset"."updateId" > $2
|
||||
and "album_asset"."updateId" <= $3
|
||||
and (
|
||||
"album"."ownerId" = $4
|
||||
or "album_user"."userId" = $5
|
||||
or "album_user"."usersId" = $5
|
||||
)
|
||||
order by
|
||||
"asset"."updateId" asc
|
||||
@@ -137,15 +137,15 @@ select
|
||||
"asset"."libraryId"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
inner join "asset" on "asset"."id" = "album_asset"."assetId"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumId"
|
||||
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
|
||||
inner join "asset" on "asset"."id" = "album_asset"."assetsId"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumsId"
|
||||
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
|
||||
where
|
||||
"album_asset"."updateId" < $1
|
||||
and "album_asset"."updateId" > $2
|
||||
and (
|
||||
"album"."ownerId" = $3
|
||||
or "album_user"."userId" = $4
|
||||
or "album_user"."usersId" = $4
|
||||
)
|
||||
order by
|
||||
"album_asset"."updateId" asc
|
||||
@@ -180,12 +180,12 @@ select
|
||||
"album_asset"."updateId"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetId"
|
||||
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetsId"
|
||||
where
|
||||
"album_asset"."updateId" < $1
|
||||
and "album_asset"."updateId" <= $2
|
||||
and "album_asset"."updateId" >= $3
|
||||
and "album_asset"."albumId" = $4
|
||||
and "album_asset"."albumsId" = $4
|
||||
order by
|
||||
"album_asset"."updateId" asc
|
||||
|
||||
@@ -219,16 +219,16 @@ select
|
||||
"asset_exif"."updateId"
|
||||
from
|
||||
"asset_exif" as "asset_exif"
|
||||
inner join "album_asset" on "album_asset"."assetId" = "asset_exif"."assetId"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumId"
|
||||
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
|
||||
inner join "album_asset" on "album_asset"."assetsId" = "asset_exif"."assetId"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumsId"
|
||||
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
|
||||
where
|
||||
"asset_exif"."updateId" < $1
|
||||
and "asset_exif"."updateId" > $2
|
||||
and "album_asset"."updateId" <= $3
|
||||
and (
|
||||
"album"."ownerId" = $4
|
||||
or "album_user"."userId" = $5
|
||||
or "album_user"."usersId" = $5
|
||||
)
|
||||
order by
|
||||
"asset_exif"."updateId" asc
|
||||
@@ -263,23 +263,23 @@ select
|
||||
"asset_exif"."fps"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetId"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumId"
|
||||
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
|
||||
inner join "asset_exif" on "asset_exif"."assetId" = "album_asset"."assetsId"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumsId"
|
||||
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
|
||||
where
|
||||
"album_asset"."updateId" < $1
|
||||
and "album_asset"."updateId" > $2
|
||||
and (
|
||||
"album"."ownerId" = $3
|
||||
or "album_user"."userId" = $4
|
||||
or "album_user"."usersId" = $4
|
||||
)
|
||||
order by
|
||||
"album_asset"."updateId" asc
|
||||
|
||||
-- SyncRepository.albumToAsset.getBackfill
|
||||
select
|
||||
"album_asset"."assetId" as "assetId",
|
||||
"album_asset"."albumId" as "albumId",
|
||||
"album_asset"."assetsId" as "assetId",
|
||||
"album_asset"."albumsId" as "albumId",
|
||||
"album_asset"."updateId"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
@@ -287,7 +287,7 @@ where
|
||||
"album_asset"."updateId" < $1
|
||||
and "album_asset"."updateId" <= $2
|
||||
and "album_asset"."updateId" >= $3
|
||||
and "album_asset"."albumId" = $4
|
||||
and "album_asset"."albumsId" = $4
|
||||
order by
|
||||
"album_asset"."updateId" asc
|
||||
|
||||
@@ -311,11 +311,11 @@ where
|
||||
union
|
||||
(
|
||||
select
|
||||
"album_user"."albumId" as "id"
|
||||
"album_user"."albumsId" as "id"
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."userId" = $4
|
||||
"album_user"."usersId" = $4
|
||||
)
|
||||
)
|
||||
order by
|
||||
@@ -323,27 +323,27 @@ order by
|
||||
|
||||
-- SyncRepository.albumToAsset.getUpserts
|
||||
select
|
||||
"album_asset"."assetId" as "assetId",
|
||||
"album_asset"."albumId" as "albumId",
|
||||
"album_asset"."assetsId" as "assetId",
|
||||
"album_asset"."albumsId" as "albumId",
|
||||
"album_asset"."updateId"
|
||||
from
|
||||
"album_asset" as "album_asset"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumId"
|
||||
left join "album_user" on "album_user"."albumId" = "album_asset"."albumId"
|
||||
inner join "album" on "album"."id" = "album_asset"."albumsId"
|
||||
left join "album_user" on "album_user"."albumsId" = "album_asset"."albumsId"
|
||||
where
|
||||
"album_asset"."updateId" < $1
|
||||
and "album_asset"."updateId" > $2
|
||||
and (
|
||||
"album"."ownerId" = $3
|
||||
or "album_user"."userId" = $4
|
||||
or "album_user"."usersId" = $4
|
||||
)
|
||||
order by
|
||||
"album_asset"."updateId" asc
|
||||
|
||||
-- SyncRepository.albumUser.getBackfill
|
||||
select
|
||||
"album_user"."albumId" as "albumId",
|
||||
"album_user"."userId" as "userId",
|
||||
"album_user"."albumsId" as "albumId",
|
||||
"album_user"."usersId" as "userId",
|
||||
"album_user"."role",
|
||||
"album_user"."updateId"
|
||||
from
|
||||
@@ -352,7 +352,7 @@ where
|
||||
"album_user"."updateId" < $1
|
||||
and "album_user"."updateId" <= $2
|
||||
and "album_user"."updateId" >= $3
|
||||
and "albumId" = $4
|
||||
and "albumsId" = $4
|
||||
order by
|
||||
"album_user"."updateId" asc
|
||||
|
||||
@@ -376,11 +376,11 @@ where
|
||||
union
|
||||
(
|
||||
select
|
||||
"album_user"."albumId" as "id"
|
||||
"album_user"."albumsId" as "id"
|
||||
from
|
||||
"album_user"
|
||||
where
|
||||
"album_user"."userId" = $4
|
||||
"album_user"."usersId" = $4
|
||||
)
|
||||
)
|
||||
order by
|
||||
@@ -388,8 +388,8 @@ order by
|
||||
|
||||
-- SyncRepository.albumUser.getUpserts
|
||||
select
|
||||
"album_user"."albumId" as "albumId",
|
||||
"album_user"."userId" as "userId",
|
||||
"album_user"."albumsId" as "albumId",
|
||||
"album_user"."usersId" as "userId",
|
||||
"album_user"."role",
|
||||
"album_user"."updateId"
|
||||
from
|
||||
@@ -397,7 +397,7 @@ from
|
||||
where
|
||||
"album_user"."updateId" < $1
|
||||
and "album_user"."updateId" > $2
|
||||
and "album_user"."albumId" in (
|
||||
and "album_user"."albumsId" in (
|
||||
select
|
||||
"id"
|
||||
from
|
||||
@@ -407,11 +407,11 @@ where
|
||||
union
|
||||
(
|
||||
select
|
||||
"albumUsers"."albumId" as "id"
|
||||
"albumUsers"."albumsId" as "id"
|
||||
from
|
||||
"album_user" as "albumUsers"
|
||||
where
|
||||
"albumUsers"."userId" = $4
|
||||
"albumUsers"."usersId" = $4
|
||||
)
|
||||
)
|
||||
order by
|
||||
@@ -656,7 +656,7 @@ order by
|
||||
-- SyncRepository.memoryToAsset.getUpserts
|
||||
select
|
||||
"memoriesId" as "memoryId",
|
||||
"assetId" as "assetId",
|
||||
"assetsId" as "assetId",
|
||||
"updateId"
|
||||
from
|
||||
"memory_asset" as "memory_asset"
|
||||
|
||||
@@ -84,19 +84,19 @@ where
|
||||
|
||||
-- TagRepository.addAssetIds
|
||||
insert into
|
||||
"tag_asset" ("tagId", "assetId")
|
||||
"tag_asset" ("tagsId", "assetsId")
|
||||
values
|
||||
($1, $2)
|
||||
|
||||
-- TagRepository.removeAssetIds
|
||||
delete from "tag_asset"
|
||||
where
|
||||
"tagId" = $1
|
||||
and "assetId" in ($2)
|
||||
"tagsId" = $1
|
||||
and "assetsId" in ($2)
|
||||
|
||||
-- TagRepository.upsertAssetIds
|
||||
insert into
|
||||
"tag_asset" ("assetId", "tagIds")
|
||||
"tag_asset" ("assetId", "tagsIds")
|
||||
values
|
||||
($1, $2)
|
||||
on conflict do nothing
|
||||
@@ -107,9 +107,9 @@ returning
|
||||
begin
|
||||
delete from "tag_asset"
|
||||
where
|
||||
"assetId" = $1
|
||||
"assetsId" = $1
|
||||
insert into
|
||||
"tag_asset" ("tagId", "assetId")
|
||||
"tag_asset" ("tagsId", "assetsId")
|
||||
values
|
||||
($1, $2)
|
||||
on conflict do nothing
|
||||
|
||||
@@ -52,8 +52,8 @@ class ActivityAccess {
|
||||
return this.db
|
||||
.selectFrom('album')
|
||||
.select('album.id')
|
||||
.leftJoin('album_user as albumUsers', 'albumUsers.albumId', 'album.id')
|
||||
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.userId').on('user.deletedAt', 'is', null))
|
||||
.leftJoin('album_user as albumUsers', 'albumUsers.albumsId', 'album.id')
|
||||
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.usersId').on('user.deletedAt', 'is', null))
|
||||
.where('album.id', 'in', [...albumIds])
|
||||
.where('album.isActivityEnabled', '=', true)
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('user.id', '=', userId)]))
|
||||
@@ -96,8 +96,8 @@ class AlbumAccess {
|
||||
return this.db
|
||||
.selectFrom('album')
|
||||
.select('album.id')
|
||||
.leftJoin('album_user', 'album_user.albumId', 'album.id')
|
||||
.leftJoin('user', (join) => join.onRef('user.id', '=', 'album_user.userId').on('user.deletedAt', 'is', null))
|
||||
.leftJoin('album_user', 'album_user.albumsId', 'album.id')
|
||||
.leftJoin('user', (join) => join.onRef('user.id', '=', 'album_user.usersId').on('user.deletedAt', 'is', null))
|
||||
.where('album.id', 'in', [...albumIds])
|
||||
.where('album.deletedAt', 'is', null)
|
||||
.where('user.id', '=', userId)
|
||||
@@ -138,12 +138,12 @@ class AssetAccess {
|
||||
return this.db
|
||||
.with('target', (qb) => qb.selectNoFrom(sql`array[${sql.join([...assetIds])}]::uuid[]`.as('ids')))
|
||||
.selectFrom('album')
|
||||
.innerJoin('album_asset as albumAssets', 'album.id', 'albumAssets.albumId')
|
||||
.innerJoin('album_asset as albumAssets', 'album.id', 'albumAssets.albumsId')
|
||||
.innerJoin('asset', (join) =>
|
||||
join.onRef('asset.id', '=', 'albumAssets.assetId').on('asset.deletedAt', 'is', null),
|
||||
join.onRef('asset.id', '=', 'albumAssets.assetsId').on('asset.deletedAt', 'is', null),
|
||||
)
|
||||
.leftJoin('album_user as albumUsers', 'albumUsers.albumId', 'album.id')
|
||||
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.userId').on('user.deletedAt', 'is', null))
|
||||
.leftJoin('album_user as albumUsers', 'albumUsers.albumsId', 'album.id')
|
||||
.leftJoin('user', (join) => join.onRef('user.id', '=', 'albumUsers.usersId').on('user.deletedAt', 'is', null))
|
||||
.crossJoin('target')
|
||||
.select(['asset.id', 'asset.livePhotoVideoId'])
|
||||
.where((eb) =>
|
||||
@@ -223,13 +223,13 @@ class AssetAccess {
|
||||
return this.db
|
||||
.selectFrom('shared_link')
|
||||
.leftJoin('album', (join) => join.onRef('album.id', '=', 'shared_link.albumId').on('album.deletedAt', 'is', null))
|
||||
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
|
||||
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
|
||||
.leftJoin('asset', (join) =>
|
||||
join.onRef('asset.id', '=', 'shared_link_asset.assetId').on('asset.deletedAt', 'is', null),
|
||||
join.onRef('asset.id', '=', 'shared_link_asset.assetsId').on('asset.deletedAt', 'is', null),
|
||||
)
|
||||
.leftJoin('album_asset', 'album_asset.albumId', 'album.id')
|
||||
.leftJoin('album_asset', 'album_asset.albumsId', 'album.id')
|
||||
.leftJoin('asset as albumAssets', (join) =>
|
||||
join.onRef('albumAssets.id', '=', 'album_asset.assetId').on('albumAssets.deletedAt', 'is', null),
|
||||
join.onRef('albumAssets.id', '=', 'album_asset.assetsId').on('albumAssets.deletedAt', 'is', null),
|
||||
)
|
||||
.select([
|
||||
'asset.id as assetId',
|
||||
|
||||
@@ -7,36 +7,36 @@ import { DB } from 'src/schema';
|
||||
import { AlbumUserTable } from 'src/schema/tables/album-user.table';
|
||||
|
||||
export type AlbumPermissionId = {
|
||||
albumId: string;
|
||||
userId: string;
|
||||
albumsId: string;
|
||||
usersId: string;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class AlbumUserRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
|
||||
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }] })
|
||||
create(albumUser: Insertable<AlbumUserTable>) {
|
||||
return this.db
|
||||
.insertInto('album_user')
|
||||
.values(albumUser)
|
||||
.returning(['userId', 'albumId', 'role'])
|
||||
.returning(['usersId', 'albumsId', 'role'])
|
||||
.executeTakeFirstOrThrow();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] })
|
||||
update({ userId, albumId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
|
||||
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }, { role: AlbumUserRole.Viewer }] })
|
||||
update({ usersId, albumsId }: AlbumPermissionId, dto: Updateable<AlbumUserTable>) {
|
||||
return this.db
|
||||
.updateTable('album_user')
|
||||
.set(dto)
|
||||
.where('userId', '=', userId)
|
||||
.where('albumId', '=', albumId)
|
||||
.where('usersId', '=', usersId)
|
||||
.where('albumsId', '=', albumsId)
|
||||
.returningAll()
|
||||
.executeTakeFirstOrThrow();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ userId: DummyValue.UUID, albumId: DummyValue.UUID }] })
|
||||
async delete({ userId, albumId }: AlbumPermissionId): Promise<void> {
|
||||
await this.db.deleteFrom('album_user').where('userId', '=', userId).where('albumId', '=', albumId).execute();
|
||||
@GenerateSql({ params: [{ usersId: DummyValue.UUID, albumsId: DummyValue.UUID }] })
|
||||
async delete({ usersId, albumsId }: AlbumPermissionId): Promise<void> {
|
||||
await this.db.deleteFrom('album_user').where('usersId', '=', usersId).where('albumsId', '=', albumsId).execute();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,11 +33,11 @@ const withAlbumUsers = (eb: ExpressionBuilder<DB, 'album'>) => {
|
||||
.selectFrom('album_user')
|
||||
.select('album_user.role')
|
||||
.select((eb) =>
|
||||
jsonObjectFrom(eb.selectFrom('user').select(columns.user).whereRef('user.id', '=', 'album_user.userId'))
|
||||
jsonObjectFrom(eb.selectFrom('user').select(columns.user).whereRef('user.id', '=', 'album_user.usersId'))
|
||||
.$notNull()
|
||||
.as('user'),
|
||||
)
|
||||
.whereRef('album_user.albumId', '=', 'album.id'),
|
||||
.whereRef('album_user.albumsId', '=', 'album.id'),
|
||||
)
|
||||
.$notNull()
|
||||
.as('albumUsers');
|
||||
@@ -57,8 +57,8 @@ const withAssets = (eb: ExpressionBuilder<DB, 'album'>) => {
|
||||
.selectAll('asset')
|
||||
.leftJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
|
||||
.select((eb) => eb.table('asset_exif').$castTo<Exif>().as('exifInfo'))
|
||||
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
|
||||
.whereRef('album_asset.albumId', '=', 'album.id')
|
||||
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
|
||||
.whereRef('album_asset.albumsId', '=', 'album.id')
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.$call(withDefaultVisibility)
|
||||
.orderBy('asset.fileCreatedAt', 'desc')
|
||||
@@ -92,19 +92,19 @@ export class AlbumRepository {
|
||||
return this.db
|
||||
.selectFrom('album')
|
||||
.selectAll('album')
|
||||
.innerJoin('album_asset', 'album_asset.albumId', 'album.id')
|
||||
.innerJoin('album_asset', 'album_asset.albumsId', 'album.id')
|
||||
.where((eb) =>
|
||||
eb.or([
|
||||
eb('album.ownerId', '=', ownerId),
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('album_user')
|
||||
.whereRef('album_user.albumId', '=', 'album.id')
|
||||
.where('album_user.userId', '=', ownerId),
|
||||
.whereRef('album_user.albumsId', '=', 'album.id')
|
||||
.where('album_user.usersId', '=', ownerId),
|
||||
),
|
||||
]),
|
||||
)
|
||||
.where('album_asset.assetId', '=', assetId)
|
||||
.where('album_asset.assetsId', '=', assetId)
|
||||
.where('album.deletedAt', 'is', null)
|
||||
.orderBy('album.createdAt', 'desc')
|
||||
.select(withOwner)
|
||||
@@ -125,16 +125,16 @@ export class AlbumRepository {
|
||||
this.db
|
||||
.selectFrom('asset')
|
||||
.$call(withDefaultVisibility)
|
||||
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
|
||||
.select('album_asset.albumId as albumId')
|
||||
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
|
||||
.select('album_asset.albumsId as albumId')
|
||||
.select((eb) => eb.fn.min(sql<Date>`("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date`).as('startDate'))
|
||||
.select((eb) => eb.fn.max(sql<Date>`("asset"."localDateTime" AT TIME ZONE 'UTC'::text)::date`).as('endDate'))
|
||||
// lastModifiedAssetTimestamp is only used in mobile app, please remove if not need
|
||||
.select((eb) => eb.fn.max('asset.updatedAt').as('lastModifiedAssetTimestamp'))
|
||||
.select((eb) => sql<number>`${eb.fn.count('asset.id')}::int`.as('assetCount'))
|
||||
.where('album_asset.albumId', 'in', ids)
|
||||
.where('album_asset.albumsId', 'in', ids)
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.groupBy('album_asset.albumId')
|
||||
.groupBy('album_asset.albumsId')
|
||||
.execute()
|
||||
);
|
||||
}
|
||||
@@ -166,8 +166,8 @@ export class AlbumRepository {
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('album_user')
|
||||
.whereRef('album_user.albumId', '=', 'album.id')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', ownerId), eb('album_user.userId', '=', ownerId)])),
|
||||
.whereRef('album_user.albumsId', '=', 'album.id')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', ownerId), eb('album_user.usersId', '=', ownerId)])),
|
||||
),
|
||||
eb.exists(
|
||||
eb
|
||||
@@ -195,7 +195,7 @@ export class AlbumRepository {
|
||||
.selectAll('album')
|
||||
.where('album.ownerId', '=', ownerId)
|
||||
.where('album.deletedAt', 'is', null)
|
||||
.where((eb) => eb.not(eb.exists(eb.selectFrom('album_user').whereRef('album_user.albumId', '=', 'album.id'))))
|
||||
.where((eb) => eb.not(eb.exists(eb.selectFrom('album_user').whereRef('album_user.albumsId', '=', 'album.id'))))
|
||||
.where((eb) => eb.not(eb.exists(eb.selectFrom('shared_link').whereRef('shared_link.albumId', '=', 'album.id'))))
|
||||
.select(withOwner)
|
||||
.orderBy('album.createdAt', 'desc')
|
||||
@@ -217,7 +217,7 @@ export class AlbumRepository {
|
||||
@GenerateSql({ params: [[DummyValue.UUID]] })
|
||||
@Chunked()
|
||||
async removeAssetsFromAll(assetIds: string[]): Promise<void> {
|
||||
await this.db.deleteFrom('album_asset').where('album_asset.assetId', 'in', assetIds).execute();
|
||||
await this.db.deleteFrom('album_asset').where('album_asset.assetsId', 'in', assetIds).execute();
|
||||
}
|
||||
|
||||
@Chunked({ paramIndex: 1 })
|
||||
@@ -228,8 +228,8 @@ export class AlbumRepository {
|
||||
|
||||
await this.db
|
||||
.deleteFrom('album_asset')
|
||||
.where('album_asset.albumId', '=', albumId)
|
||||
.where('album_asset.assetId', 'in', assetIds)
|
||||
.where('album_asset.albumsId', '=', albumId)
|
||||
.where('album_asset.assetsId', 'in', assetIds)
|
||||
.execute();
|
||||
}
|
||||
|
||||
@@ -250,10 +250,10 @@ export class AlbumRepository {
|
||||
return this.db
|
||||
.selectFrom('album_asset')
|
||||
.selectAll()
|
||||
.where('album_asset.albumId', '=', albumId)
|
||||
.where('album_asset.assetId', 'in', assetIds)
|
||||
.where('album_asset.albumsId', '=', albumId)
|
||||
.where('album_asset.assetsId', 'in', assetIds)
|
||||
.execute()
|
||||
.then((results) => new Set(results.map(({ assetId }) => assetId)));
|
||||
.then((results) => new Set(results.map(({ assetsId }) => assetsId)));
|
||||
}
|
||||
|
||||
async addAssetIds(albumId: string, assetIds: string[]): Promise<void> {
|
||||
@@ -276,7 +276,7 @@ export class AlbumRepository {
|
||||
await tx
|
||||
.insertInto('album_user')
|
||||
.values(
|
||||
albumUsers.map((albumUser) => ({ albumId: newAlbum.id, userId: albumUser.userId, role: albumUser.role })),
|
||||
albumUsers.map((albumUser) => ({ albumsId: newAlbum.id, usersId: albumUser.userId, role: albumUser.role })),
|
||||
)
|
||||
.execute();
|
||||
}
|
||||
@@ -317,12 +317,12 @@ export class AlbumRepository {
|
||||
|
||||
await db
|
||||
.insertInto('album_asset')
|
||||
.values(assetIds.map((assetId) => ({ albumId, assetId })))
|
||||
.values(assetIds.map((assetId) => ({ albumsId: albumId, assetsId: assetId })))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@Chunked({ chunkSize: 30_000 })
|
||||
async addAssetIdsToAlbums(values: { albumId: string; assetId: string }[]): Promise<void> {
|
||||
async addAssetIdsToAlbums(values: { albumsId: string; assetsId: string }[]): Promise<void> {
|
||||
if (values.length === 0) {
|
||||
return;
|
||||
}
|
||||
@@ -344,7 +344,7 @@ export class AlbumRepository {
|
||||
.updateTable('album')
|
||||
.set((eb) => ({
|
||||
albumThumbnailAssetId: this.updateThumbnailBuilder(eb)
|
||||
.select('album_asset.assetId')
|
||||
.select('album_asset.assetsId')
|
||||
.orderBy('asset.fileCreatedAt', 'desc')
|
||||
.limit(1),
|
||||
}))
|
||||
@@ -360,7 +360,7 @@ export class AlbumRepository {
|
||||
eb.exists(
|
||||
this.updateThumbnailBuilder(eb)
|
||||
.select(sql`1`.as('1'))
|
||||
.whereRef('album.albumThumbnailAssetId', '=', 'album_asset.assetId'), // Has invalid assets
|
||||
.whereRef('album.albumThumbnailAssetId', '=', 'album_asset.assetsId'), // Has invalid assets
|
||||
),
|
||||
),
|
||||
]),
|
||||
@@ -375,9 +375,9 @@ export class AlbumRepository {
|
||||
return eb
|
||||
.selectFrom('album_asset')
|
||||
.innerJoin('asset', (join) =>
|
||||
join.onRef('album_asset.assetId', '=', 'asset.id').on('asset.deletedAt', 'is', null),
|
||||
join.onRef('album_asset.assetsId', '=', 'asset.id').on('asset.deletedAt', 'is', null),
|
||||
)
|
||||
.whereRef('album_asset.albumId', '=', 'album.id');
|
||||
.whereRef('album_asset.albumsId', '=', 'album.id');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -388,9 +388,9 @@ export class AlbumRepository {
|
||||
getContributorCounts(id: string) {
|
||||
return this.db
|
||||
.selectFrom('album_asset')
|
||||
.innerJoin('asset', 'asset.id', 'assetId')
|
||||
.innerJoin('asset', 'asset.id', 'assetsId')
|
||||
.where('asset.deletedAt', 'is', sql.lit(null))
|
||||
.where('album_asset.albumId', '=', id)
|
||||
.where('album_asset.albumsId', '=', id)
|
||||
.select('asset.ownerId as userId')
|
||||
.select((eb) => eb.fn.countAll<number>().as('assetCount'))
|
||||
.groupBy('asset.ownerId')
|
||||
@@ -405,8 +405,8 @@ export class AlbumRepository {
|
||||
.expression((eb) =>
|
||||
eb
|
||||
.selectFrom('album_asset')
|
||||
.select((eb) => ['album_asset.albumId', eb.val(targetAssetId).as('assetId')])
|
||||
.where('album_asset.assetId', '=', sourceAssetId),
|
||||
.select((eb) => ['album_asset.albumsId', eb.val(targetAssetId).as('assetsId')])
|
||||
.where('album_asset.assetsId', '=', sourceAssetId),
|
||||
)
|
||||
.onConflict((oc) => oc.doNothing())
|
||||
.execute();
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Kysely, sql } from 'kysely';
|
||||
import { Kysely } from 'kysely';
|
||||
import { jsonArrayFrom } from 'kysely/helpers/postgres';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { Asset, columns } from 'src/database';
|
||||
import { DummyValue, GenerateSql } from 'src/decorators';
|
||||
import { AssetFileType, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { AssetFileType, AssetType, AssetVisibility } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { StorageAsset } from 'src/types';
|
||||
import {
|
||||
@@ -29,7 +29,6 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.leftJoin('smart_search', 'asset.id', 'smart_search.assetId')
|
||||
.select(['id', 'type', 'ownerId', 'duplicateId', 'stackId', 'visibility', 'smart_search.embedding'])
|
||||
.limit(1)
|
||||
@@ -41,15 +40,14 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.select((eb) =>
|
||||
jsonArrayFrom(
|
||||
eb
|
||||
.selectFrom('tag')
|
||||
.select(['tag.value'])
|
||||
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
|
||||
.whereRef('asset.id', '=', 'tag_asset.assetId'),
|
||||
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
|
||||
.whereRef('asset.id', '=', 'tag_asset.assetsId'),
|
||||
).as('tags'),
|
||||
)
|
||||
.limit(1)
|
||||
@@ -61,7 +59,6 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.id', '=', asUuid(id))
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.select(['id', 'sidecarPath', 'originalPath'])
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
@@ -73,7 +70,6 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id', 'asset.thumbhash'])
|
||||
.select(withFiles)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.$if(!force, (qb) =>
|
||||
@@ -98,7 +94,6 @@ export class AssetJobRepository {
|
||||
.select(['asset.id', 'asset.ownerId', 'asset.encodedVideoPath'])
|
||||
.select(withFiles)
|
||||
.where('asset.id', '=', id)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -118,7 +113,6 @@ export class AssetJobRepository {
|
||||
.select(withFiles)
|
||||
.$call(withExifInner)
|
||||
.where('asset.id', '=', id)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -129,7 +123,6 @@ export class AssetJobRepository {
|
||||
.select(columns.asset)
|
||||
.select(withFaces)
|
||||
.where('asset.id', '=', id)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -147,7 +140,6 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.innerJoin('asset_job_status as job_status', 'assetId', 'asset.id')
|
||||
.where('job_status.previewAt', 'is not', null);
|
||||
@@ -158,7 +150,6 @@ export class AssetJobRepository {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['asset.id'])
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.innerJoin('smart_search', 'asset.id', 'smart_search.assetId')
|
||||
.$call(withDefaultVisibility)
|
||||
@@ -187,7 +178,6 @@ export class AssetJobRepository {
|
||||
.select(['asset.id', 'asset.visibility'])
|
||||
.select((eb) => withFiles(eb, AssetFileType.Preview))
|
||||
.where('asset.id', '=', id)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -200,7 +190,6 @@ export class AssetJobRepository {
|
||||
.select((eb) => withFaces(eb, true))
|
||||
.select((eb) => withFiles(eb, AssetFileType.Preview))
|
||||
.where('asset.id', '=', id)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -262,7 +251,6 @@ export class AssetJobRepository {
|
||||
)
|
||||
.select((eb) => toJson(eb, 'stacked_assets').as('stack'))
|
||||
.where('asset.id', '=', id)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -277,7 +265,6 @@ export class AssetJobRepository {
|
||||
.where((eb) => eb.or([eb('asset.encodedVideoPath', 'is', null), eb('asset.encodedVideoPath', '=', '')]))
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden),
|
||||
)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.stream();
|
||||
}
|
||||
@@ -289,7 +276,6 @@ export class AssetJobRepository {
|
||||
.select(['asset.id', 'asset.ownerId', 'asset.originalPath', 'asset.encodedVideoPath'])
|
||||
.where('asset.id', '=', id)
|
||||
.where('asset.type', '=', AssetType.Video)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@@ -305,7 +291,6 @@ export class AssetJobRepository {
|
||||
eb.or([eb('asset_job_status.metadataExtractedAt', 'is', null), eb('asset_job_status.assetId', 'is', null)]),
|
||||
),
|
||||
)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.stream();
|
||||
}
|
||||
@@ -328,7 +313,6 @@ export class AssetJobRepository {
|
||||
'asset_exif.timeZone',
|
||||
'asset_exif.fileSizeInByte',
|
||||
])
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.deletedAt', 'is', null);
|
||||
}
|
||||
|
||||
@@ -350,7 +334,6 @@ export class AssetJobRepository {
|
||||
.selectFrom('asset')
|
||||
.select(['id', 'isOffline'])
|
||||
.where('asset.deletedAt', '<=', trashedBefore)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -363,7 +346,6 @@ export class AssetJobRepository {
|
||||
qb.where((eb) => eb.or([eb('asset.sidecarPath', '=', ''), eb('asset.sidecarPath', 'is', null)])),
|
||||
)
|
||||
.where('asset.visibility', '!=', AssetVisibility.Hidden)
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -372,7 +354,6 @@ export class AssetJobRepository {
|
||||
return this.assetsWithPreviews()
|
||||
.$if(force === false, (qb) => qb.where('job_status.facesRecognizedAt', 'is', null))
|
||||
.select(['asset.id'])
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.orderBy('asset.fileCreatedAt', 'desc')
|
||||
.stream();
|
||||
}
|
||||
@@ -394,31 +375,6 @@ export class AssetJobRepository {
|
||||
|
||||
@GenerateSql({ params: [DummyValue.DATE], stream: true })
|
||||
streamForMigrationJob() {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['id'])
|
||||
.where('asset.status', '!=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.stream();
|
||||
}
|
||||
|
||||
getForPartialAssetCleanupJob(assetId: string) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
|
||||
.select(['originalPath as path', 'fileSizeInByte as size', 'checksum', 'fileModifiedAt'])
|
||||
.where('id', '=', assetId)
|
||||
.where('status', '=', sql.lit(AssetStatus.Partial))
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.DATE], stream: true })
|
||||
streamForPartialAssetCleanupJob(createdBefore: Date) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.select(['id'])
|
||||
.where('asset.status', '=', sql.lit(AssetStatus.Partial))
|
||||
.where('asset.createdAt', '<', createdBefore)
|
||||
.stream();
|
||||
return this.db.selectFrom('asset').select(['id']).where('asset.deletedAt', 'is', null).stream();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -255,96 +255,6 @@ export class AssetRepository {
|
||||
return this.db.insertInto('asset').values(asset).returningAll().executeTakeFirstOrThrow();
|
||||
}
|
||||
|
||||
createWithMetadata(asset: Insertable<AssetTable> & { id: string }, size: number, metadata?: AssetMetadataItem[]) {
|
||||
let query = this.db;
|
||||
if (asset.livePhotoVideoId) {
|
||||
(query as any) = query.with('motion_asset', (qb) =>
|
||||
qb
|
||||
.updateTable('asset')
|
||||
.set({ visibility: AssetVisibility.Hidden })
|
||||
.where('id', '=', asset.livePhotoVideoId!)
|
||||
.where('type', '=', sql.lit(AssetType.Video))
|
||||
.where('ownerId', '=', asset.ownerId)
|
||||
.returning('id'),
|
||||
);
|
||||
}
|
||||
|
||||
(query as any) = query
|
||||
.with('asset', (qb) =>
|
||||
qb
|
||||
.insertInto('asset')
|
||||
.values(
|
||||
asset.livePhotoVideoId ? { ...asset, livePhotoVideoId: sql<string>`(select id from motion_asset)` } : asset,
|
||||
)
|
||||
.returning(['id', 'ownerId']),
|
||||
)
|
||||
.with('exif', (qb) =>
|
||||
qb
|
||||
.insertInto('asset_exif')
|
||||
.columns(['assetId', 'fileSizeInByte'])
|
||||
.expression((eb) => eb.selectFrom('asset').select(['asset.id', eb.val(size).as('fileSizeInByte')])),
|
||||
);
|
||||
|
||||
if (metadata && metadata.length > 0) {
|
||||
(query as any) = query.with('metadata', (qb) =>
|
||||
qb.insertInto('asset_metadata').values(metadata.map(({ key, value }) => ({ assetId: asset.id, key, value }))),
|
||||
);
|
||||
}
|
||||
|
||||
return query
|
||||
.updateTable('user')
|
||||
.from('asset')
|
||||
.set({ quotaUsageInBytes: sql`"quotaUsageInBytes" + ${size}` })
|
||||
.whereRef('user.id', '=', 'asset.ownerId')
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.UUID] })
|
||||
getCompletionMetadata(assetId: string, ownerId: string) {
|
||||
return this.db
|
||||
.selectFrom('asset')
|
||||
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
|
||||
.select(['originalPath as path', 'status', 'fileModifiedAt', 'createdAt', 'checksum', 'fileSizeInByte as size'])
|
||||
.where('id', '=', assetId)
|
||||
.where('ownerId', '=', ownerId)
|
||||
.executeTakeFirst();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async setComplete(assetId: string) {
|
||||
await this.db
|
||||
.updateTable('asset as complete_asset')
|
||||
.set((eb) => ({
|
||||
status: sql.lit(AssetStatus.Active),
|
||||
visibility: eb
|
||||
.case()
|
||||
.when(
|
||||
eb.and([
|
||||
eb('complete_asset.type', '=', sql.lit(AssetType.Video)),
|
||||
eb.exists(eb.selectFrom('asset').whereRef('complete_asset.id', '=', 'asset.livePhotoVideoId')),
|
||||
]),
|
||||
)
|
||||
.then(sql<AssetVisibility>`'hidden'::asset_visibility_enum`)
|
||||
.else(sql<AssetVisibility>`'timeline'::asset_visibility_enum`)
|
||||
.end(),
|
||||
}))
|
||||
.where('id', '=', assetId)
|
||||
.where('status', '=', sql.lit(AssetStatus.Partial))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID] })
|
||||
async removeAndDecrementQuota(id: string): Promise<void> {
|
||||
await this.db
|
||||
.with('asset_exif', (qb) => qb.selectFrom('asset_exif').where('assetId', '=', id).select('fileSizeInByte'))
|
||||
.with('asset', (qb) => qb.deleteFrom('asset').where('id', '=', id).returning('ownerId'))
|
||||
.updateTable('user')
|
||||
.from(['asset_exif', 'asset'])
|
||||
.set({ quotaUsageInBytes: sql`"quotaUsageInBytes" - "fileSizeInByte"` })
|
||||
.whereRef('user.id', '=', 'asset.ownerId')
|
||||
.execute();
|
||||
}
|
||||
|
||||
createAll(assets: Insertable<AssetTable>[]) {
|
||||
return this.db.insertInto('asset').values(assets).returningAll().execute();
|
||||
}
|
||||
@@ -584,15 +494,17 @@ export class AssetRepository {
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.BUFFER] })
|
||||
getUploadAssetIdByChecksum(ownerId: string, checksum: Buffer) {
|
||||
return this.db
|
||||
async getUploadAssetIdByChecksum(ownerId: string, checksum: Buffer): Promise<string | undefined> {
|
||||
const asset = await this.db
|
||||
.selectFrom('asset')
|
||||
.select(['id', 'status', 'createdAt'])
|
||||
.select('id')
|
||||
.where('ownerId', '=', asUuid(ownerId))
|
||||
.where('checksum', '=', checksum)
|
||||
.where('libraryId', 'is', null)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
|
||||
return asset?.id;
|
||||
}
|
||||
|
||||
findLivePhotoMatch(options: LivePhotoSearchOptions) {
|
||||
@@ -651,8 +563,8 @@ export class AssetRepository {
|
||||
.$if(!!options.visibility, (qb) => qb.where('asset.visibility', '=', options.visibility!))
|
||||
.$if(!!options.albumId, (qb) =>
|
||||
qb
|
||||
.innerJoin('album_asset', 'asset.id', 'album_asset.assetId')
|
||||
.where('album_asset.albumId', '=', asUuid(options.albumId!)),
|
||||
.innerJoin('album_asset', 'asset.id', 'album_asset.assetsId')
|
||||
.where('album_asset.albumsId', '=', asUuid(options.albumId!)),
|
||||
)
|
||||
.$if(!!options.personId, (qb) => hasPeople(qb, [options.personId!]))
|
||||
.$if(!!options.withStacked, (qb) =>
|
||||
@@ -729,8 +641,8 @@ export class AssetRepository {
|
||||
eb.exists(
|
||||
eb
|
||||
.selectFrom('album_asset')
|
||||
.whereRef('album_asset.assetId', '=', 'asset.id')
|
||||
.where('album_asset.albumId', '=', asUuid(options.albumId!)),
|
||||
.whereRef('album_asset.assetsId', '=', 'asset.id')
|
||||
.where('album_asset.albumsId', '=', asUuid(options.albumId!)),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -451,20 +451,6 @@ export class DatabaseRepository {
|
||||
return res as R;
|
||||
}
|
||||
|
||||
async withUuidLock<R>(uuid: string, callback: () => Promise<R>): Promise<R> {
|
||||
let res;
|
||||
await this.db.connection().execute(async (connection) => {
|
||||
try {
|
||||
await this.acquireUuidLock(uuid, connection);
|
||||
res = await callback();
|
||||
} finally {
|
||||
await this.releaseUuidLock(uuid, connection);
|
||||
}
|
||||
});
|
||||
|
||||
return res as R;
|
||||
}
|
||||
|
||||
tryLock(lock: DatabaseLock): Promise<boolean> {
|
||||
return this.db.connection().execute(async (connection) => this.acquireTryLock(lock, connection));
|
||||
}
|
||||
@@ -481,10 +467,6 @@ export class DatabaseRepository {
|
||||
await sql`SELECT pg_advisory_lock(${lock})`.execute(connection);
|
||||
}
|
||||
|
||||
private async acquireUuidLock(uuid: string, connection: Kysely<DB>): Promise<void> {
|
||||
await sql`SELECT pg_advisory_lock(uuid_hash_extended(${uuid}, 0))`.execute(connection);
|
||||
}
|
||||
|
||||
private async acquireTryLock(lock: DatabaseLock, connection: Kysely<DB>): Promise<boolean> {
|
||||
const { rows } = await sql<{
|
||||
pg_try_advisory_lock: boolean;
|
||||
@@ -495,8 +477,4 @@ export class DatabaseRepository {
|
||||
private async releaseLock(lock: DatabaseLock, connection: Kysely<DB>): Promise<void> {
|
||||
await sql`SELECT pg_advisory_unlock(${lock})`.execute(connection);
|
||||
}
|
||||
|
||||
private async releaseUuidLock(uuid: string, connection: Kysely<DB>): Promise<void> {
|
||||
await sql`SELECT pg_advisory_unlock(uuid_hash_extended(${uuid}, 0))`.execute(connection);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,8 +26,8 @@ export class DownloadRepository {
|
||||
|
||||
downloadAlbumId(albumId: string) {
|
||||
return builder(this.db)
|
||||
.innerJoin('album_asset', 'asset.id', 'album_asset.assetId')
|
||||
.where('album_asset.albumId', '=', albumId)
|
||||
.innerJoin('album_asset', 'asset.id', 'album_asset.assetsId')
|
||||
.where('album_asset.albumsId', '=', albumId)
|
||||
.stream();
|
||||
}
|
||||
|
||||
|
||||
@@ -79,9 +79,6 @@ type EventMap = {
|
||||
// stack bulk events
|
||||
StackDeleteAll: [{ stackIds: string[]; userId: string }];
|
||||
|
||||
// upload events
|
||||
UploadAbort: [{ assetId: string; abortTime: Date }];
|
||||
|
||||
// user events
|
||||
UserSignup: [{ notify: boolean; id: string; password?: string }];
|
||||
UserCreate: [UserEvent];
|
||||
|
||||
@@ -126,8 +126,8 @@ export class MapRepository {
|
||||
eb.exists((eb) =>
|
||||
eb
|
||||
.selectFrom('album_asset')
|
||||
.whereRef('asset.id', '=', 'album_asset.assetId')
|
||||
.where('album_asset.albumId', 'in', albumIds),
|
||||
.whereRef('asset.id', '=', 'album_asset.assetsId')
|
||||
.where('album_asset.albumsId', 'in', albumIds),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ export class MemoryRepository implements IBulkAsset {
|
||||
await this.db
|
||||
.deleteFrom('memory_asset')
|
||||
.using('asset')
|
||||
.whereRef('memory_asset.assetId', '=', 'asset.id')
|
||||
.whereRef('memory_asset.assetsId', '=', 'asset.id')
|
||||
.where('asset.visibility', '!=', AssetVisibility.Timeline)
|
||||
.execute();
|
||||
|
||||
@@ -64,7 +64,7 @@ export class MemoryRepository implements IBulkAsset {
|
||||
eb
|
||||
.selectFrom('asset')
|
||||
.selectAll('asset')
|
||||
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetId')
|
||||
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId')
|
||||
.whereRef('memory_asset.memoriesId', '=', 'memory.id')
|
||||
.orderBy('asset.fileCreatedAt', 'asc')
|
||||
.where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline))
|
||||
@@ -86,7 +86,7 @@ export class MemoryRepository implements IBulkAsset {
|
||||
const { id } = await tx.insertInto('memory').values(memory).returning('id').executeTakeFirstOrThrow();
|
||||
|
||||
if (assetIds.size > 0) {
|
||||
const values = [...assetIds].map((assetId) => ({ memoriesId: id, assetId }));
|
||||
const values = [...assetIds].map((assetId) => ({ memoriesId: id, assetsId: assetId }));
|
||||
await tx.insertInto('memory_asset').values(values).execute();
|
||||
}
|
||||
|
||||
@@ -116,12 +116,12 @@ export class MemoryRepository implements IBulkAsset {
|
||||
|
||||
const results = await this.db
|
||||
.selectFrom('memory_asset')
|
||||
.select(['assetId'])
|
||||
.select(['assetsId'])
|
||||
.where('memoriesId', '=', id)
|
||||
.where('assetId', 'in', assetIds)
|
||||
.where('assetsId', 'in', assetIds)
|
||||
.execute();
|
||||
|
||||
return new Set(results.map(({ assetId }) => assetId));
|
||||
return new Set(results.map(({ assetsId }) => assetsId));
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [DummyValue.UUID, [DummyValue.UUID]] })
|
||||
@@ -132,7 +132,7 @@ export class MemoryRepository implements IBulkAsset {
|
||||
|
||||
await this.db
|
||||
.insertInto('memory_asset')
|
||||
.values(assetIds.map((assetId) => ({ memoriesId: id, assetId })))
|
||||
.values(assetIds.map((assetId) => ({ memoriesId: id, assetsId: assetId })))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@@ -143,7 +143,7 @@ export class MemoryRepository implements IBulkAsset {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.db.deleteFrom('memory_asset').where('memoriesId', '=', id).where('assetId', 'in', assetIds).execute();
|
||||
await this.db.deleteFrom('memory_asset').where('memoriesId', '=', id).where('assetsId', 'in', assetIds).execute();
|
||||
}
|
||||
|
||||
private getByIdBuilder(id: string) {
|
||||
@@ -155,7 +155,7 @@ export class MemoryRepository implements IBulkAsset {
|
||||
eb
|
||||
.selectFrom('asset')
|
||||
.selectAll('asset')
|
||||
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetId')
|
||||
.innerJoin('memory_asset', 'asset.id', 'memory_asset.assetsId')
|
||||
.whereRef('memory_asset.memoriesId', '=', 'memory.id')
|
||||
.orderBy('asset.fileCreatedAt', 'asc')
|
||||
.where('asset.visibility', '=', sql.lit(AssetVisibility.Timeline))
|
||||
|
||||
@@ -6,15 +6,15 @@ import { DB } from 'src/schema';
|
||||
export class SharedLinkAssetRepository {
|
||||
constructor(@InjectKysely() private db: Kysely<DB>) {}
|
||||
|
||||
async remove(sharedLinkId: string, assetId: string[]) {
|
||||
async remove(sharedLinkId: string, assetsId: string[]) {
|
||||
const deleted = await this.db
|
||||
.deleteFrom('shared_link_asset')
|
||||
.where('shared_link_asset.sharedLinkId', '=', sharedLinkId)
|
||||
.where('shared_link_asset.assetId', 'in', assetId)
|
||||
.returning('assetId')
|
||||
.where('shared_link_asset.sharedLinksId', '=', sharedLinkId)
|
||||
.where('shared_link_asset.assetsId', 'in', assetsId)
|
||||
.returning('assetsId')
|
||||
.execute();
|
||||
|
||||
return deleted.map((row) => row.assetId);
|
||||
return deleted.map((row) => row.assetsId);
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [{ sourceAssetId: DummyValue.UUID, targetAssetId: DummyValue.UUID }] })
|
||||
@@ -24,8 +24,8 @@ export class SharedLinkAssetRepository {
|
||||
.expression((eb) =>
|
||||
eb
|
||||
.selectFrom('shared_link_asset')
|
||||
.select((eb) => [eb.val(targetAssetId).as('assetId'), 'shared_link_asset.sharedLinkId'])
|
||||
.where('shared_link_asset.assetId', '=', sourceAssetId),
|
||||
.select((eb) => [eb.val(targetAssetId).as('assetsId'), 'shared_link_asset.sharedLinksId'])
|
||||
.where('shared_link_asset.assetsId', '=', sourceAssetId),
|
||||
)
|
||||
.onConflict((oc) => oc.doNothing())
|
||||
.execute();
|
||||
|
||||
@@ -28,8 +28,8 @@ export class SharedLinkRepository {
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('shared_link_asset')
|
||||
.whereRef('shared_link.id', '=', 'shared_link_asset.sharedLinkId')
|
||||
.innerJoin('asset', 'asset.id', 'shared_link_asset.assetId')
|
||||
.whereRef('shared_link.id', '=', 'shared_link_asset.sharedLinksId')
|
||||
.innerJoin('asset', 'asset.id', 'shared_link_asset.assetsId')
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.selectAll('asset')
|
||||
.innerJoinLateral(
|
||||
@@ -53,13 +53,13 @@ export class SharedLinkRepository {
|
||||
.selectAll('album')
|
||||
.whereRef('album.id', '=', 'shared_link.albumId')
|
||||
.where('album.deletedAt', 'is', null)
|
||||
.leftJoin('album_asset', 'album_asset.albumId', 'album.id')
|
||||
.leftJoin('album_asset', 'album_asset.albumsId', 'album.id')
|
||||
.leftJoinLateral(
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('asset')
|
||||
.selectAll('asset')
|
||||
.whereRef('album_asset.assetId', '=', 'asset.id')
|
||||
.whereRef('album_asset.assetsId', '=', 'asset.id')
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.innerJoinLateral(
|
||||
(eb) =>
|
||||
@@ -123,13 +123,13 @@ export class SharedLinkRepository {
|
||||
.selectFrom('shared_link')
|
||||
.selectAll('shared_link')
|
||||
.where('shared_link.userId', '=', userId)
|
||||
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
|
||||
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
|
||||
.leftJoinLateral(
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('asset')
|
||||
.select((eb) => eb.fn.jsonAgg('asset').as('assets'))
|
||||
.whereRef('asset.id', '=', 'shared_link_asset.assetId')
|
||||
.whereRef('asset.id', '=', 'shared_link_asset.assetsId')
|
||||
.where('asset.deletedAt', 'is', null)
|
||||
.as('assets'),
|
||||
(join) => join.onTrue(),
|
||||
@@ -215,7 +215,7 @@ export class SharedLinkRepository {
|
||||
if (entity.assetIds && entity.assetIds.length > 0) {
|
||||
await this.db
|
||||
.insertInto('shared_link_asset')
|
||||
.values(entity.assetIds!.map((assetId) => ({ assetId, sharedLinkId: id })))
|
||||
.values(entity.assetIds!.map((assetsId) => ({ assetsId, sharedLinksId: id })))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@@ -233,7 +233,7 @@ export class SharedLinkRepository {
|
||||
if (entity.assetIds && entity.assetIds.length > 0) {
|
||||
await this.db
|
||||
.insertInto('shared_link_asset')
|
||||
.values(entity.assetIds!.map((assetId) => ({ assetId, sharedLinkId: id })))
|
||||
.values(entity.assetIds!.map((assetsId) => ({ assetsId, sharedLinksId: id })))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@@ -249,12 +249,12 @@ export class SharedLinkRepository {
|
||||
.selectFrom('shared_link')
|
||||
.selectAll('shared_link')
|
||||
.where('shared_link.id', '=', id)
|
||||
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinkId', 'shared_link.id')
|
||||
.leftJoin('shared_link_asset', 'shared_link_asset.sharedLinksId', 'shared_link.id')
|
||||
.leftJoinLateral(
|
||||
(eb) =>
|
||||
eb
|
||||
.selectFrom('asset')
|
||||
.whereRef('asset.id', '=', 'shared_link_asset.assetId')
|
||||
.whereRef('asset.id', '=', 'shared_link_asset.assetsId')
|
||||
.selectAll('asset')
|
||||
.innerJoinLateral(
|
||||
(eb) =>
|
||||
|
||||
@@ -33,8 +33,8 @@ const withAssets = (eb: ExpressionBuilder<DB, 'stack'>, withTags = false) => {
|
||||
eb
|
||||
.selectFrom('tag')
|
||||
.select(columns.tag)
|
||||
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
|
||||
.whereRef('tag_asset.assetId', '=', 'asset.id'),
|
||||
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagsId')
|
||||
.whereRef('tag_asset.assetsId', '=', 'asset.id'),
|
||||
).as('tags'),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -62,11 +62,7 @@ export class StorageRepository {
|
||||
}
|
||||
|
||||
createWriteStream(filepath: string): Writable {
|
||||
return createWriteStream(filepath, { flags: 'w', highWaterMark: 1024 * 1024 });
|
||||
}
|
||||
|
||||
createOrAppendWriteStream(filepath: string): Writable {
|
||||
return createWriteStream(filepath, { flags: 'a', highWaterMark: 1024 * 1024 });
|
||||
return createWriteStream(filepath, { flags: 'w' });
|
||||
}
|
||||
|
||||
createOrOverwriteFile(filepath: string, buffer: Buffer) {
|
||||
@@ -160,13 +156,10 @@ export class StorageRepository {
|
||||
}
|
||||
}
|
||||
|
||||
mkdir(filepath: string): Promise<string | undefined> {
|
||||
return fs.mkdir(filepath, { recursive: true });
|
||||
}
|
||||
|
||||
mkdirSync(filepath: string): void {
|
||||
// does not throw an error if the folder already exists
|
||||
mkdirSync(filepath, { recursive: true });
|
||||
if (!existsSync(filepath)) {
|
||||
mkdirSync(filepath, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
existsSync(filepath: string) {
|
||||
|
||||
@@ -143,8 +143,8 @@ class AlbumSync extends BaseSync {
|
||||
getCreatedAfter({ nowId, userId, afterCreateId }: SyncCreatedAfterOptions) {
|
||||
return this.db
|
||||
.selectFrom('album_user')
|
||||
.select(['albumId as id', 'createId'])
|
||||
.where('userId', '=', userId)
|
||||
.select(['albumsId as id', 'createId'])
|
||||
.where('usersId', '=', userId)
|
||||
.$if(!!afterCreateId, (qb) => qb.where('createId', '>=', afterCreateId!))
|
||||
.where('createId', '<', nowId)
|
||||
.orderBy('createId', 'asc')
|
||||
@@ -168,8 +168,8 @@ class AlbumSync extends BaseSync {
|
||||
const userId = options.userId;
|
||||
return this.upsertQuery('album', options)
|
||||
.distinctOn(['album.id', 'album.updateId'])
|
||||
.leftJoin('album_user as album_users', 'album.id', 'album_users.albumId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_users.userId', '=', userId)]))
|
||||
.leftJoin('album_user as album_users', 'album.id', 'album_users.albumsId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_users.usersId', '=', userId)]))
|
||||
.select([
|
||||
'album.id',
|
||||
'album.ownerId',
|
||||
@@ -190,10 +190,10 @@ class AlbumAssetSync extends BaseSync {
|
||||
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
|
||||
getBackfill(options: SyncBackfillOptions, albumId: string) {
|
||||
return this.backfillQuery('album_asset', options)
|
||||
.innerJoin('asset', 'asset.id', 'album_asset.assetId')
|
||||
.innerJoin('asset', 'asset.id', 'album_asset.assetsId')
|
||||
.select(columns.syncAsset)
|
||||
.select('album_asset.updateId')
|
||||
.where('album_asset.albumId', '=', albumId)
|
||||
.where('album_asset.albumsId', '=', albumId)
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -201,13 +201,13 @@ class AlbumAssetSync extends BaseSync {
|
||||
getUpdates(options: SyncQueryOptions, albumToAssetAck: SyncAck) {
|
||||
const userId = options.userId;
|
||||
return this.upsertQuery('asset', options)
|
||||
.innerJoin('album_asset', 'album_asset.assetId', 'asset.id')
|
||||
.innerJoin('album_asset', 'album_asset.assetsId', 'asset.id')
|
||||
.select(columns.syncAsset)
|
||||
.select('asset.updateId')
|
||||
.where('album_asset.updateId', '<=', albumToAssetAck.updateId) // Ensure we only send updates for assets that the client already knows about
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumId')
|
||||
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumsId')
|
||||
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -216,11 +216,11 @@ class AlbumAssetSync extends BaseSync {
|
||||
const userId = options.userId;
|
||||
return this.upsertQuery('album_asset', options)
|
||||
.select('album_asset.updateId')
|
||||
.innerJoin('asset', 'asset.id', 'album_asset.assetId')
|
||||
.innerJoin('asset', 'asset.id', 'album_asset.assetsId')
|
||||
.select(columns.syncAsset)
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumId')
|
||||
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumsId')
|
||||
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
|
||||
.stream();
|
||||
}
|
||||
}
|
||||
@@ -229,10 +229,10 @@ class AlbumAssetExifSync extends BaseSync {
|
||||
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
|
||||
getBackfill(options: SyncBackfillOptions, albumId: string) {
|
||||
return this.backfillQuery('album_asset', options)
|
||||
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetId')
|
||||
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetsId')
|
||||
.select(columns.syncAssetExif)
|
||||
.select('album_asset.updateId')
|
||||
.where('album_asset.albumId', '=', albumId)
|
||||
.where('album_asset.albumsId', '=', albumId)
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -240,13 +240,13 @@ class AlbumAssetExifSync extends BaseSync {
|
||||
getUpdates(options: SyncQueryOptions, albumToAssetAck: SyncAck) {
|
||||
const userId = options.userId;
|
||||
return this.upsertQuery('asset_exif', options)
|
||||
.innerJoin('album_asset', 'album_asset.assetId', 'asset_exif.assetId')
|
||||
.innerJoin('album_asset', 'album_asset.assetsId', 'asset_exif.assetId')
|
||||
.select(columns.syncAssetExif)
|
||||
.select('asset_exif.updateId')
|
||||
.where('album_asset.updateId', '<=', albumToAssetAck.updateId) // Ensure we only send exif updates for assets that the client already knows about
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumId')
|
||||
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumsId')
|
||||
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -255,11 +255,11 @@ class AlbumAssetExifSync extends BaseSync {
|
||||
const userId = options.userId;
|
||||
return this.upsertQuery('album_asset', options)
|
||||
.select('album_asset.updateId')
|
||||
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetId')
|
||||
.innerJoin('asset_exif', 'asset_exif.assetId', 'album_asset.assetsId')
|
||||
.select(columns.syncAssetExif)
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumId')
|
||||
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumsId')
|
||||
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
|
||||
.stream();
|
||||
}
|
||||
}
|
||||
@@ -268,8 +268,8 @@ class AlbumToAssetSync extends BaseSync {
|
||||
@GenerateSql({ params: [dummyBackfillOptions, DummyValue.UUID], stream: true })
|
||||
getBackfill(options: SyncBackfillOptions, albumId: string) {
|
||||
return this.backfillQuery('album_asset', options)
|
||||
.select(['album_asset.assetId as assetId', 'album_asset.albumId as albumId', 'album_asset.updateId'])
|
||||
.where('album_asset.albumId', '=', albumId)
|
||||
.select(['album_asset.assetsId as assetId', 'album_asset.albumsId as albumId', 'album_asset.updateId'])
|
||||
.where('album_asset.albumsId', '=', albumId)
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -290,8 +290,8 @@ class AlbumToAssetSync extends BaseSync {
|
||||
eb.parens(
|
||||
eb
|
||||
.selectFrom('album_user')
|
||||
.select(['album_user.albumId as id'])
|
||||
.where('album_user.userId', '=', userId),
|
||||
.select(['album_user.albumsId as id'])
|
||||
.where('album_user.usersId', '=', userId),
|
||||
),
|
||||
),
|
||||
),
|
||||
@@ -307,10 +307,10 @@ class AlbumToAssetSync extends BaseSync {
|
||||
getUpserts(options: SyncQueryOptions) {
|
||||
const userId = options.userId;
|
||||
return this.upsertQuery('album_asset', options)
|
||||
.select(['album_asset.assetId as assetId', 'album_asset.albumId as albumId', 'album_asset.updateId'])
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumId')
|
||||
.leftJoin('album_user', 'album_user.albumId', 'album_asset.albumId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.userId', '=', userId)]))
|
||||
.select(['album_asset.assetsId as assetId', 'album_asset.albumsId as albumId', 'album_asset.updateId'])
|
||||
.innerJoin('album', 'album.id', 'album_asset.albumsId')
|
||||
.leftJoin('album_user', 'album_user.albumsId', 'album_asset.albumsId')
|
||||
.where((eb) => eb.or([eb('album.ownerId', '=', userId), eb('album_user.usersId', '=', userId)]))
|
||||
.stream();
|
||||
}
|
||||
}
|
||||
@@ -321,7 +321,7 @@ class AlbumUserSync extends BaseSync {
|
||||
return this.backfillQuery('album_user', options)
|
||||
.select(columns.syncAlbumUser)
|
||||
.select('album_user.updateId')
|
||||
.where('albumId', '=', albumId)
|
||||
.where('albumsId', '=', albumId)
|
||||
.stream();
|
||||
}
|
||||
|
||||
@@ -342,8 +342,8 @@ class AlbumUserSync extends BaseSync {
|
||||
eb.parens(
|
||||
eb
|
||||
.selectFrom('album_user')
|
||||
.select(['album_user.albumId as id'])
|
||||
.where('album_user.userId', '=', userId),
|
||||
.select(['album_user.albumsId as id'])
|
||||
.where('album_user.usersId', '=', userId),
|
||||
),
|
||||
),
|
||||
),
|
||||
@@ -363,7 +363,7 @@ class AlbumUserSync extends BaseSync {
|
||||
.select('album_user.updateId')
|
||||
.where((eb) =>
|
||||
eb(
|
||||
'album_user.albumId',
|
||||
'album_user.albumsId',
|
||||
'in',
|
||||
eb
|
||||
.selectFrom('album')
|
||||
@@ -373,8 +373,8 @@ class AlbumUserSync extends BaseSync {
|
||||
eb.parens(
|
||||
eb
|
||||
.selectFrom('album_user as albumUsers')
|
||||
.select(['albumUsers.albumId as id'])
|
||||
.where('albumUsers.userId', '=', userId),
|
||||
.select(['albumUsers.albumsId as id'])
|
||||
.where('albumUsers.usersId', '=', userId),
|
||||
),
|
||||
),
|
||||
),
|
||||
@@ -550,7 +550,7 @@ class MemoryToAssetSync extends BaseSync {
|
||||
@GenerateSql({ params: [dummyQueryOptions], stream: true })
|
||||
getUpserts(options: SyncQueryOptions) {
|
||||
return this.upsertQuery('memory_asset', options)
|
||||
.select(['memoriesId as memoryId', 'assetId as assetId'])
|
||||
.select(['memoriesId as memoryId', 'assetsId as assetId'])
|
||||
.select('updateId')
|
||||
.where('memoriesId', 'in', (eb) => eb.selectFrom('memory').select('id').where('ownerId', '=', options.userId))
|
||||
.stream();
|
||||
|
||||
@@ -97,9 +97,9 @@ export class TagRepository {
|
||||
|
||||
const results = await this.db
|
||||
.selectFrom('tag_asset')
|
||||
.select(['assetId as assetId'])
|
||||
.where('tagId', '=', tagId)
|
||||
.where('assetId', 'in', assetIds)
|
||||
.select(['assetsId as assetId'])
|
||||
.where('tagsId', '=', tagId)
|
||||
.where('assetsId', 'in', assetIds)
|
||||
.execute();
|
||||
|
||||
return new Set(results.map(({ assetId }) => assetId));
|
||||
@@ -114,7 +114,7 @@ export class TagRepository {
|
||||
|
||||
await this.db
|
||||
.insertInto('tag_asset')
|
||||
.values(assetIds.map((assetId) => ({ tagId, assetId })))
|
||||
.values(assetIds.map((assetId) => ({ tagsId: tagId, assetsId: assetId })))
|
||||
.execute();
|
||||
}
|
||||
|
||||
@@ -125,10 +125,10 @@ export class TagRepository {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.db.deleteFrom('tag_asset').where('tagId', '=', tagId).where('assetId', 'in', assetIds).execute();
|
||||
await this.db.deleteFrom('tag_asset').where('tagsId', '=', tagId).where('assetsId', 'in', assetIds).execute();
|
||||
}
|
||||
|
||||
@GenerateSql({ params: [[{ assetId: DummyValue.UUID, tagIds: DummyValue.UUID }]] })
|
||||
@GenerateSql({ params: [[{ assetId: DummyValue.UUID, tagsIds: [DummyValue.UUID] }]] })
|
||||
@Chunked()
|
||||
upsertAssetIds(items: Insertable<TagAssetTable>[]) {
|
||||
if (items.length === 0) {
|
||||
@@ -147,7 +147,7 @@ export class TagRepository {
|
||||
@Chunked({ paramIndex: 1 })
|
||||
replaceAssetTags(assetId: string, tagIds: string[]) {
|
||||
return this.db.transaction().execute(async (tx) => {
|
||||
await tx.deleteFrom('tag_asset').where('assetId', '=', assetId).execute();
|
||||
await tx.deleteFrom('tag_asset').where('assetsId', '=', assetId).execute();
|
||||
|
||||
if (tagIds.length === 0) {
|
||||
return;
|
||||
@@ -155,7 +155,7 @@ export class TagRepository {
|
||||
|
||||
return tx
|
||||
.insertInto('tag_asset')
|
||||
.values(tagIds.map((tagId) => ({ tagId, assetId })))
|
||||
.values(tagIds.map((tagId) => ({ tagsId: tagId, assetsId: assetId })))
|
||||
.onConflict((oc) => oc.doNothing())
|
||||
.returningAll()
|
||||
.execute();
|
||||
@@ -170,7 +170,7 @@ export class TagRepository {
|
||||
exists(
|
||||
selectFrom('tag_closure')
|
||||
.whereRef('tag.id', '=', 'tag_closure.id_ancestor')
|
||||
.innerJoin('tag_asset', 'tag_closure.id_descendant', 'tag_asset.tagId'),
|
||||
.innerJoin('tag_asset', 'tag_closure.id_descendant', 'tag_asset.tagsId'),
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ import { ArgsOf, EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { handlePromiseError } from 'src/utils/misc';
|
||||
|
||||
export const serverEvents = ['ConfigUpdate', 'UploadAbort'] as const;
|
||||
export const serverEvents = ['ConfigUpdate'] as const;
|
||||
export type ServerEvents = (typeof serverEvents)[number];
|
||||
|
||||
export interface ClientEventMap {
|
||||
|
||||
@@ -29,7 +29,7 @@ export const album_user_after_insert = registerFunction({
|
||||
body: `
|
||||
BEGIN
|
||||
UPDATE album SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp())
|
||||
WHERE "id" IN (SELECT DISTINCT "albumId" FROM inserted_rows);
|
||||
WHERE "id" IN (SELECT DISTINCT "albumsId" FROM inserted_rows);
|
||||
RETURN NULL;
|
||||
END`,
|
||||
});
|
||||
@@ -139,8 +139,8 @@ export const album_asset_delete_audit = registerFunction({
|
||||
body: `
|
||||
BEGIN
|
||||
INSERT INTO album_asset_audit ("albumId", "assetId")
|
||||
SELECT "albumId", "assetId" FROM OLD
|
||||
WHERE "albumId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumId" FROM OLD));
|
||||
SELECT "albumsId", "assetsId" FROM OLD
|
||||
WHERE "albumsId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumsId" FROM OLD));
|
||||
RETURN NULL;
|
||||
END`,
|
||||
});
|
||||
@@ -152,12 +152,12 @@ export const album_user_delete_audit = registerFunction({
|
||||
body: `
|
||||
BEGIN
|
||||
INSERT INTO album_audit ("albumId", "userId")
|
||||
SELECT "albumId", "userId"
|
||||
SELECT "albumsId", "usersId"
|
||||
FROM OLD;
|
||||
|
||||
IF pg_trigger_depth() = 1 THEN
|
||||
INSERT INTO album_user_audit ("albumId", "userId")
|
||||
SELECT "albumId", "userId"
|
||||
SELECT "albumsId", "usersId"
|
||||
FROM OLD;
|
||||
END IF;
|
||||
|
||||
@@ -185,7 +185,7 @@ export const memory_asset_delete_audit = registerFunction({
|
||||
body: `
|
||||
BEGIN
|
||||
INSERT INTO memory_asset_audit ("memoryId", "assetId")
|
||||
SELECT "memoriesId", "assetId" FROM OLD
|
||||
SELECT "memoriesId", "assetsId" FROM OLD
|
||||
WHERE "memoriesId" IN (SELECT "id" FROM memory WHERE "id" IN (SELECT "memoriesId" FROM OLD));
|
||||
RETURN NULL;
|
||||
END`,
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`ALTER TYPE "assets_status_enum" ADD VALUE IF NOT EXISTS 'partial'`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(): Promise<void> {
|
||||
// Cannot remove enum values in PostgreSQL
|
||||
}
|
||||
@@ -1,99 +0,0 @@
|
||||
import { Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
// rename columns
|
||||
await sql`ALTER TABLE "album_asset" RENAME COLUMN "albumsId" TO "albumId";`.execute(db);
|
||||
await sql`ALTER TABLE "album_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
|
||||
await sql`ALTER TABLE "album_user" RENAME COLUMN "albumsId" TO "albumId";`.execute(db);
|
||||
await sql`ALTER TABLE "album_user" RENAME COLUMN "usersId" TO "userId";`.execute(db);
|
||||
await sql`ALTER TABLE "memory_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
|
||||
await sql`ALTER TABLE "shared_link_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
|
||||
await sql`ALTER TABLE "shared_link_asset" RENAME COLUMN "sharedLinksId" TO "sharedLinkId";`.execute(db);
|
||||
await sql`ALTER TABLE "tag_asset" RENAME COLUMN "assetsId" TO "assetId";`.execute(db);
|
||||
await sql`ALTER TABLE "tag_asset" RENAME COLUMN "tagsId" TO "tagId";`.execute(db);
|
||||
|
||||
// rename constraints
|
||||
await sql`ALTER TABLE "album_asset" RENAME CONSTRAINT "album_asset_albumsId_fkey" TO "album_asset_albumId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "album_asset" RENAME CONSTRAINT "album_asset_assetsId_fkey" TO "album_asset_assetId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "album_user" RENAME CONSTRAINT "album_user_albumsId_fkey" TO "album_user_albumId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "album_user" RENAME CONSTRAINT "album_user_usersId_fkey" TO "album_user_userId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "memory_asset" RENAME CONSTRAINT "memory_asset_assetsId_fkey" TO "memory_asset_assetId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "shared_link_asset" RENAME CONSTRAINT "shared_link_asset_assetsId_fkey" TO "shared_link_asset_assetId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "shared_link_asset" RENAME CONSTRAINT "shared_link_asset_sharedLinksId_fkey" TO "shared_link_asset_sharedLinkId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "tag_asset" RENAME CONSTRAINT "tag_asset_assetsId_fkey" TO "tag_asset_assetId_fkey";`.execute(db);
|
||||
await sql`ALTER TABLE "tag_asset" RENAME CONSTRAINT "tag_asset_tagsId_fkey" TO "tag_asset_tagId_fkey";`.execute(db);
|
||||
|
||||
// rename indexes
|
||||
await sql`ALTER INDEX "album_asset_albumsId_idx" RENAME TO "album_asset_albumId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "album_asset_assetsId_idx" RENAME TO "album_asset_assetId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "album_user_usersId_idx" RENAME TO "album_user_userId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "album_user_albumsId_idx" RENAME TO "album_user_albumId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "memory_asset_assetsId_idx" RENAME TO "memory_asset_assetId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "shared_link_asset_sharedLinksId_idx" RENAME TO "shared_link_asset_sharedLinkId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "shared_link_asset_assetsId_idx" RENAME TO "shared_link_asset_assetId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "tag_asset_assetsId_idx" RENAME TO "tag_asset_assetId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "tag_asset_tagsId_idx" RENAME TO "tag_asset_tagId_idx";`.execute(db);
|
||||
await sql`ALTER INDEX "tag_asset_assetsId_tagsId_idx" RENAME TO "tag_asset_assetId_tagId_idx";`.execute(db);
|
||||
|
||||
// update triggers and functions
|
||||
await sql`CREATE OR REPLACE FUNCTION album_user_after_insert()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
UPDATE album SET "updatedAt" = clock_timestamp(), "updateId" = immich_uuid_v7(clock_timestamp())
|
||||
WHERE "id" IN (SELECT DISTINCT "albumId" FROM inserted_rows);
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION album_asset_delete_audit()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
INSERT INTO album_asset_audit ("albumId", "assetId")
|
||||
SELECT "albumId", "assetId" FROM OLD
|
||||
WHERE "albumId" IN (SELECT "id" FROM album WHERE "id" IN (SELECT "albumId" FROM OLD));
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION album_user_delete_audit()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
INSERT INTO album_audit ("albumId", "userId")
|
||||
SELECT "albumId", "userId"
|
||||
FROM OLD;
|
||||
|
||||
IF pg_trigger_depth() = 1 THEN
|
||||
INSERT INTO album_user_audit ("albumId", "userId")
|
||||
SELECT "albumId", "userId"
|
||||
FROM OLD;
|
||||
END IF;
|
||||
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
await sql`CREATE OR REPLACE FUNCTION memory_asset_delete_audit()
|
||||
RETURNS TRIGGER
|
||||
LANGUAGE PLPGSQL
|
||||
AS $$
|
||||
BEGIN
|
||||
INSERT INTO memory_asset_audit ("memoryId", "assetId")
|
||||
SELECT "memoriesId", "assetId" FROM OLD
|
||||
WHERE "memoriesId" IN (SELECT "id" FROM memory WHERE "id" IN (SELECT "memoriesId" FROM OLD));
|
||||
RETURN NULL;
|
||||
END
|
||||
$$;`.execute(db);
|
||||
|
||||
// update overrides
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_user_after_insert","sql":"CREATE OR REPLACE FUNCTION album_user_after_insert()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n UPDATE album SET \\"updatedAt\\" = clock_timestamp(), \\"updateId\\" = immich_uuid_v7(clock_timestamp())\\n WHERE \\"id\\" IN (SELECT DISTINCT \\"albumId\\" FROM inserted_rows);\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_user_after_insert';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_asset_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_asset_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_asset_audit (\\"albumId\\", \\"assetId\\")\\n SELECT \\"albumId\\", \\"assetId\\" FROM OLD\\n WHERE \\"albumId\\" IN (SELECT \\"id\\" FROM album WHERE \\"id\\" IN (SELECT \\"albumId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_asset_delete_audit';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"album_user_delete_audit","sql":"CREATE OR REPLACE FUNCTION album_user_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO album_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumId\\", \\"userId\\"\\n FROM OLD;\\n\\n IF pg_trigger_depth() = 1 THEN\\n INSERT INTO album_user_audit (\\"albumId\\", \\"userId\\")\\n SELECT \\"albumId\\", \\"userId\\"\\n FROM OLD;\\n END IF;\\n\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_album_user_delete_audit';`.execute(db);
|
||||
await sql`UPDATE "migration_overrides" SET "value" = '{"type":"function","name":"memory_asset_delete_audit","sql":"CREATE OR REPLACE FUNCTION memory_asset_delete_audit()\\n RETURNS TRIGGER\\n LANGUAGE PLPGSQL\\n AS $$\\n BEGIN\\n INSERT INTO memory_asset_audit (\\"memoryId\\", \\"assetId\\")\\n SELECT \\"memoriesId\\", \\"assetId\\" FROM OLD\\n WHERE \\"memoriesId\\" IN (SELECT \\"id\\" FROM memory WHERE \\"id\\" IN (SELECT \\"memoriesId\\" FROM OLD));\\n RETURN NULL;\\n END\\n $$;"}'::jsonb WHERE "name" = 'function_memory_asset_delete_audit';`.execute(db);
|
||||
}
|
||||
|
||||
export function down() {
|
||||
// not implemented
|
||||
}
|
||||
@@ -32,7 +32,7 @@ import {
|
||||
@ForeignKeyConstraint({
|
||||
columns: ['albumId', 'assetId'],
|
||||
referenceTable: () => AlbumAssetTable,
|
||||
referenceColumns: ['albumId', 'assetId'],
|
||||
referenceColumns: ['albumsId', 'assetsId'],
|
||||
onUpdate: 'NO ACTION',
|
||||
onDelete: 'CASCADE',
|
||||
})
|
||||
|
||||
@@ -22,10 +22,10 @@ import {
|
||||
})
|
||||
export class AlbumAssetTable {
|
||||
@ForeignKeyColumn(() => AlbumTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
|
||||
albumId!: string;
|
||||
albumsId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', onUpdate: 'CASCADE', nullable: false, primary: true })
|
||||
assetId!: string;
|
||||
assetsId!: string;
|
||||
|
||||
@CreateDateColumn()
|
||||
createdAt!: Generated<Timestamp>;
|
||||
|
||||
@@ -37,7 +37,7 @@ export class AlbumUserTable {
|
||||
nullable: false,
|
||||
primary: true,
|
||||
})
|
||||
albumId!: string;
|
||||
albumsId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => UserTable, {
|
||||
onDelete: 'CASCADE',
|
||||
@@ -45,7 +45,7 @@ export class AlbumUserTable {
|
||||
nullable: false,
|
||||
primary: true,
|
||||
})
|
||||
userId!: string;
|
||||
usersId!: string;
|
||||
|
||||
@Column({ type: 'character varying', default: AlbumUserRole.Editor })
|
||||
role!: Generated<AlbumUserRole>;
|
||||
|
||||
@@ -25,7 +25,7 @@ export class MemoryAssetTable {
|
||||
memoriesId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
assetId!: string;
|
||||
assetsId!: string;
|
||||
|
||||
@CreateDateColumn()
|
||||
createdAt!: Generated<Timestamp>;
|
||||
|
||||
@@ -5,8 +5,8 @@ import { ForeignKeyColumn, Table } from 'src/sql-tools';
|
||||
@Table('shared_link_asset')
|
||||
export class SharedLinkAssetTable {
|
||||
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
assetId!: string;
|
||||
assetsId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => SharedLinkTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true })
|
||||
sharedLinkId!: string;
|
||||
sharedLinksId!: string;
|
||||
}
|
||||
|
||||
@@ -2,12 +2,12 @@ import { AssetTable } from 'src/schema/tables/asset.table';
|
||||
import { TagTable } from 'src/schema/tables/tag.table';
|
||||
import { ForeignKeyColumn, Index, Table } from 'src/sql-tools';
|
||||
|
||||
@Index({ columns: ['assetId', 'tagId'] })
|
||||
@Index({ columns: ['assetsId', 'tagsId'] })
|
||||
@Table('tag_asset')
|
||||
export class TagAssetTable {
|
||||
@ForeignKeyColumn(() => AssetTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
|
||||
assetId!: string;
|
||||
assetsId!: string;
|
||||
|
||||
@ForeignKeyColumn(() => TagTable, { onUpdate: 'CASCADE', onDelete: 'CASCADE', primary: true, index: true })
|
||||
tagId!: string;
|
||||
tagsId!: string;
|
||||
}
|
||||
|
||||
@@ -402,16 +402,16 @@ describe(AlbumService.name, () => {
|
||||
mocks.album.update.mockResolvedValue(albumStub.sharedWithAdmin);
|
||||
mocks.user.get.mockResolvedValue(userStub.user2);
|
||||
mocks.albumUser.create.mockResolvedValue({
|
||||
userId: userStub.user2.id,
|
||||
albumId: albumStub.sharedWithAdmin.id,
|
||||
usersId: userStub.user2.id,
|
||||
albumsId: albumStub.sharedWithAdmin.id,
|
||||
role: AlbumUserRole.Editor,
|
||||
});
|
||||
await sut.addUsers(authStub.user1, albumStub.sharedWithAdmin.id, {
|
||||
albumUsers: [{ userId: authStub.user2.user.id }],
|
||||
});
|
||||
expect(mocks.albumUser.create).toHaveBeenCalledWith({
|
||||
userId: authStub.user2.user.id,
|
||||
albumId: albumStub.sharedWithAdmin.id,
|
||||
usersId: authStub.user2.user.id,
|
||||
albumsId: albumStub.sharedWithAdmin.id,
|
||||
});
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumInvite', {
|
||||
id: albumStub.sharedWithAdmin.id,
|
||||
@@ -439,8 +439,8 @@ describe(AlbumService.name, () => {
|
||||
|
||||
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
|
||||
albumId: albumStub.sharedWithUser.id,
|
||||
userId: userStub.user1.id,
|
||||
albumsId: albumStub.sharedWithUser.id,
|
||||
usersId: userStub.user1.id,
|
||||
});
|
||||
expect(mocks.album.getById).toHaveBeenCalledWith(albumStub.sharedWithUser.id, { withAssets: false });
|
||||
});
|
||||
@@ -467,8 +467,8 @@ describe(AlbumService.name, () => {
|
||||
|
||||
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
|
||||
albumId: albumStub.sharedWithUser.id,
|
||||
userId: authStub.user1.user.id,
|
||||
albumsId: albumStub.sharedWithUser.id,
|
||||
usersId: authStub.user1.user.id,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -480,8 +480,8 @@ describe(AlbumService.name, () => {
|
||||
|
||||
expect(mocks.albumUser.delete).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.albumUser.delete).toHaveBeenCalledWith({
|
||||
albumId: albumStub.sharedWithUser.id,
|
||||
userId: authStub.user1.user.id,
|
||||
albumsId: albumStub.sharedWithUser.id,
|
||||
usersId: authStub.user1.user.id,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -515,7 +515,7 @@ describe(AlbumService.name, () => {
|
||||
role: AlbumUserRole.Editor,
|
||||
});
|
||||
expect(mocks.albumUser.update).toHaveBeenCalledWith(
|
||||
{ albumId: albumStub.sharedWithAdmin.id, userId: userStub.admin.id },
|
||||
{ albumsId: albumStub.sharedWithAdmin.id, usersId: userStub.admin.id },
|
||||
{ role: AlbumUserRole.Editor },
|
||||
);
|
||||
});
|
||||
@@ -804,12 +804,12 @@ describe(AlbumService.name, () => {
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
|
||||
{ albumId: 'album-123', assetId: 'asset-1' },
|
||||
{ albumId: 'album-123', assetId: 'asset-2' },
|
||||
{ albumId: 'album-123', assetId: 'asset-3' },
|
||||
{ albumId: 'album-321', assetId: 'asset-1' },
|
||||
{ albumId: 'album-321', assetId: 'asset-2' },
|
||||
{ albumId: 'album-321', assetId: 'asset-3' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-3' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-3' },
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -840,12 +840,12 @@ describe(AlbumService.name, () => {
|
||||
albumThumbnailAssetId: 'asset-id',
|
||||
});
|
||||
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
|
||||
{ albumId: 'album-123', assetId: 'asset-1' },
|
||||
{ albumId: 'album-123', assetId: 'asset-2' },
|
||||
{ albumId: 'album-123', assetId: 'asset-3' },
|
||||
{ albumId: 'album-321', assetId: 'asset-1' },
|
||||
{ albumId: 'album-321', assetId: 'asset-2' },
|
||||
{ albumId: 'album-321', assetId: 'asset-3' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-3' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-3' },
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -876,12 +876,12 @@ describe(AlbumService.name, () => {
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
|
||||
{ albumId: 'album-123', assetId: 'asset-1' },
|
||||
{ albumId: 'album-123', assetId: 'asset-2' },
|
||||
{ albumId: 'album-123', assetId: 'asset-3' },
|
||||
{ albumId: 'album-321', assetId: 'asset-1' },
|
||||
{ albumId: 'album-321', assetId: 'asset-2' },
|
||||
{ albumId: 'album-321', assetId: 'asset-3' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-3' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-3' },
|
||||
]);
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumUpdate', {
|
||||
id: 'album-123',
|
||||
@@ -936,9 +936,9 @@ describe(AlbumService.name, () => {
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
|
||||
{ albumId: 'album-123', assetId: 'asset-1' },
|
||||
{ albumId: 'album-123', assetId: 'asset-2' },
|
||||
{ albumId: 'album-123', assetId: 'asset-3' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-3' },
|
||||
]);
|
||||
expect(mocks.event.emit).toHaveBeenCalledWith('AlbumUpdate', {
|
||||
id: 'album-123',
|
||||
@@ -977,12 +977,12 @@ describe(AlbumService.name, () => {
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
|
||||
{ albumId: 'album-123', assetId: 'asset-1' },
|
||||
{ albumId: 'album-123', assetId: 'asset-2' },
|
||||
{ albumId: 'album-123', assetId: 'asset-3' },
|
||||
{ albumId: 'album-321', assetId: 'asset-1' },
|
||||
{ albumId: 'album-321', assetId: 'asset-2' },
|
||||
{ albumId: 'album-321', assetId: 'asset-3' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-123', assetsId: 'asset-3' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-3' },
|
||||
]);
|
||||
expect(mocks.access.asset.checkPartnerAccess).toHaveBeenCalledWith(
|
||||
authStub.admin.user.id,
|
||||
@@ -1014,9 +1014,9 @@ describe(AlbumService.name, () => {
|
||||
albumThumbnailAssetId: 'asset-1',
|
||||
});
|
||||
expect(mocks.album.addAssetIdsToAlbums).toHaveBeenCalledWith([
|
||||
{ albumId: 'album-321', assetId: 'asset-1' },
|
||||
{ albumId: 'album-321', assetId: 'asset-2' },
|
||||
{ albumId: 'album-321', assetId: 'asset-3' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-1' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-2' },
|
||||
{ albumsId: 'album-321', assetsId: 'asset-3' },
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
@@ -215,7 +215,7 @@ export class AlbumService extends BaseService {
|
||||
return results;
|
||||
}
|
||||
|
||||
const albumAssetValues: { albumId: string; assetId: string }[] = [];
|
||||
const albumAssetValues: { albumsId: string; assetsId: string }[] = [];
|
||||
const events: { id: string; recipients: string[] }[] = [];
|
||||
for (const albumId of allowedAlbumIds) {
|
||||
const existingAssetIds = await this.albumRepository.getAssetIds(albumId, [...allowedAssetIds]);
|
||||
@@ -228,7 +228,7 @@ export class AlbumService extends BaseService {
|
||||
results.success = true;
|
||||
|
||||
for (const assetId of notPresentAssetIds) {
|
||||
albumAssetValues.push({ albumId, assetId });
|
||||
albumAssetValues.push({ albumsId: albumId, assetsId: assetId });
|
||||
}
|
||||
await this.albumRepository.update(albumId, {
|
||||
id: albumId,
|
||||
@@ -289,7 +289,7 @@ export class AlbumService extends BaseService {
|
||||
throw new BadRequestException('User not found');
|
||||
}
|
||||
|
||||
await this.albumUserRepository.create({ userId, albumId: id, role });
|
||||
await this.albumUserRepository.create({ usersId: userId, albumsId: id, role });
|
||||
await this.eventRepository.emit('AlbumInvite', { id, userId });
|
||||
}
|
||||
|
||||
@@ -317,12 +317,12 @@ export class AlbumService extends BaseService {
|
||||
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
|
||||
}
|
||||
|
||||
await this.albumUserRepository.delete({ albumId: id, userId });
|
||||
await this.albumUserRepository.delete({ albumsId: id, usersId: userId });
|
||||
}
|
||||
|
||||
async updateUser(auth: AuthDto, id: string, userId: string, dto: UpdateAlbumUserDto): Promise<void> {
|
||||
await this.requireAccess({ auth, permission: Permission.AlbumShare, ids: [id] });
|
||||
await this.albumUserRepository.update({ albumId: id, userId }, { role: dto.role });
|
||||
await this.albumUserRepository.update({ albumsId: id, usersId: userId }, { role: dto.role });
|
||||
}
|
||||
|
||||
private async findOrFail(id: string, options: AlbumInfoOptions) {
|
||||
|
||||
@@ -215,11 +215,7 @@ describe(AssetMediaService.name, () => {
|
||||
});
|
||||
|
||||
it('should find an existing asset', async () => {
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
|
||||
id: 'asset-id',
|
||||
createdAt: new Date(),
|
||||
status: AssetStatus.Active,
|
||||
});
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
|
||||
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('hex'))).resolves.toEqual({
|
||||
id: 'asset-id',
|
||||
status: AssetMediaStatus.DUPLICATE,
|
||||
@@ -228,11 +224,7 @@ describe(AssetMediaService.name, () => {
|
||||
});
|
||||
|
||||
it('should find an existing asset by base64', async () => {
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
|
||||
id: 'asset-id',
|
||||
createdAt: new Date(),
|
||||
status: AssetStatus.Active,
|
||||
});
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue('asset-id');
|
||||
await expect(sut.getUploadAssetIdByChecksum(authStub.admin, file1.toString('base64'))).resolves.toEqual({
|
||||
id: 'asset-id',
|
||||
status: AssetMediaStatus.DUPLICATE,
|
||||
@@ -386,11 +378,7 @@ describe(AssetMediaService.name, () => {
|
||||
(error as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
|
||||
|
||||
mocks.asset.create.mockRejectedValue(error);
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
|
||||
id: assetEntity.id,
|
||||
createdAt: new Date(),
|
||||
status: AssetStatus.Active,
|
||||
});
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(assetEntity.id);
|
||||
|
||||
await expect(sut.uploadAsset(authStub.user1, createDto, file)).resolves.toEqual({
|
||||
id: 'id_1',
|
||||
@@ -815,11 +803,7 @@ describe(AssetMediaService.name, () => {
|
||||
|
||||
mocks.asset.update.mockRejectedValue(error);
|
||||
mocks.asset.getById.mockResolvedValueOnce(sidecarAsset);
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
|
||||
id: sidecarAsset.id,
|
||||
createdAt: new Date(),
|
||||
status: AssetStatus.Active,
|
||||
});
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(sidecarAsset.id);
|
||||
mocks.access.asset.checkOwnerAccess.mockResolvedValue(new Set([sidecarAsset.id]));
|
||||
// this is the original file size
|
||||
mocks.storage.stat.mockResolvedValue({ size: 0 } as Stats);
|
||||
|
||||
@@ -43,12 +43,12 @@ export class AssetMediaService extends BaseService {
|
||||
return;
|
||||
}
|
||||
|
||||
const asset = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, fromChecksum(checksum));
|
||||
if (!asset) {
|
||||
const assetId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, fromChecksum(checksum));
|
||||
if (!assetId) {
|
||||
return;
|
||||
}
|
||||
|
||||
return { id: asset.id, status: AssetMediaStatus.DUPLICATE };
|
||||
return { id: assetId, status: AssetMediaStatus.DUPLICATE };
|
||||
}
|
||||
|
||||
canUploadFile({ auth, fieldName, file }: UploadRequest): true {
|
||||
@@ -165,10 +165,6 @@ export class AssetMediaService extends BaseService {
|
||||
throw new Error('Asset not found');
|
||||
}
|
||||
|
||||
if (asset.status === AssetStatus.Partial) {
|
||||
throw new BadRequestException('Cannot replace a partial asset');
|
||||
}
|
||||
|
||||
this.requireQuota(auth, file.size);
|
||||
|
||||
await this.replaceFileData(asset.id, dto, file, sidecarFile?.originalPath);
|
||||
@@ -317,12 +313,12 @@ export class AssetMediaService extends BaseService {
|
||||
|
||||
// handle duplicates with a success response
|
||||
if (isAssetChecksumConstraint(error)) {
|
||||
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, file.checksum);
|
||||
if (!duplicate) {
|
||||
const duplicateId = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, file.checksum);
|
||||
if (!duplicateId) {
|
||||
this.logger.error(`Error locating duplicate for checksum constraint`);
|
||||
throw new InternalServerErrorException();
|
||||
}
|
||||
return { status: AssetMediaStatus.DUPLICATE, id: duplicate.id };
|
||||
return { status: AssetMediaStatus.DUPLICATE, id: duplicateId };
|
||||
}
|
||||
|
||||
this.logger.error(`Error uploading file ${error}`, error?.stack);
|
||||
|
||||
@@ -1,456 +0,0 @@
|
||||
import { BadRequestException, InternalServerErrorException } from '@nestjs/common';
|
||||
import { AssetMetadataKey, AssetStatus, AssetType, AssetVisibility, JobName, JobStatus } from 'src/enum';
|
||||
import { AssetUploadService } from 'src/services/asset-upload.service';
|
||||
import { ASSET_CHECKSUM_CONSTRAINT } from 'src/utils/database';
|
||||
import { authStub } from 'test/fixtures/auth.stub';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
describe(AssetUploadService.name, () => {
|
||||
let sut: AssetUploadService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(AssetUploadService));
|
||||
});
|
||||
|
||||
describe('onStart', () => {
|
||||
const mockDto = {
|
||||
assetData: {
|
||||
filename: 'test.jpg',
|
||||
deviceAssetId: 'device-asset-1',
|
||||
deviceId: 'device-1',
|
||||
fileCreatedAt: new Date('2025-01-01T00:00:00Z'),
|
||||
fileModifiedAt: new Date('2025-01-01T12:00:00Z'),
|
||||
isFavorite: false,
|
||||
iCloudId: '',
|
||||
},
|
||||
checksum: Buffer.from('checksum'),
|
||||
uploadLength: 1024,
|
||||
uploadComplete: true,
|
||||
contentLength: 1024,
|
||||
isComplete: true,
|
||||
version: 8,
|
||||
};
|
||||
|
||||
it('should create a new asset and return upload metadata', async () => {
|
||||
const assetId = factory.uuid();
|
||||
mocks.crypto.randomUUID.mockReturnValue(assetId);
|
||||
|
||||
const result = await sut.onStart(authStub.user1, mockDto);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: assetId,
|
||||
path: expect.stringContaining(assetId),
|
||||
status: AssetStatus.Partial,
|
||||
isDuplicate: false,
|
||||
});
|
||||
|
||||
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
id: assetId,
|
||||
ownerId: authStub.user1.user.id,
|
||||
checksum: mockDto.checksum,
|
||||
deviceAssetId: mockDto.assetData.deviceAssetId,
|
||||
deviceId: mockDto.assetData.deviceId,
|
||||
fileCreatedAt: mockDto.assetData.fileCreatedAt,
|
||||
fileModifiedAt: mockDto.assetData.fileModifiedAt,
|
||||
type: AssetType.Image,
|
||||
isFavorite: false,
|
||||
status: AssetStatus.Partial,
|
||||
visibility: AssetVisibility.Hidden,
|
||||
originalFileName: 'test.jpg',
|
||||
}),
|
||||
1024,
|
||||
undefined,
|
||||
);
|
||||
});
|
||||
|
||||
it('should determine asset type from filename extension', async () => {
|
||||
const videoDto = { ...mockDto, assetData: { ...mockDto.assetData, filename: 'video.mp4' } };
|
||||
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
|
||||
|
||||
await sut.onStart(authStub.user1, videoDto);
|
||||
|
||||
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: AssetType.Video,
|
||||
}),
|
||||
expect.anything(),
|
||||
undefined,
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw BadRequestException for unsupported file types', async () => {
|
||||
const unsupportedDto = { ...mockDto, assetData: { ...mockDto.assetData, filename: 'document.xyz' } };
|
||||
|
||||
await expect(sut.onStart(authStub.user1, unsupportedDto)).rejects.toThrow(BadRequestException);
|
||||
await expect(sut.onStart(authStub.user1, unsupportedDto)).rejects.toThrow('unsupported file type');
|
||||
});
|
||||
|
||||
it('should validate quota before creating asset', async () => {
|
||||
const authWithQuota = {
|
||||
...authStub.user1,
|
||||
user: {
|
||||
...authStub.user1.user,
|
||||
quotaSizeInBytes: 2000,
|
||||
quotaUsageInBytes: 1500,
|
||||
},
|
||||
};
|
||||
|
||||
await expect(sut.onStart(authWithQuota, mockDto)).rejects.toThrow(BadRequestException);
|
||||
await expect(sut.onStart(authWithQuota, mockDto)).rejects.toThrow('Quota has been exceeded');
|
||||
});
|
||||
|
||||
it('should allow upload when quota is null (unlimited)', async () => {
|
||||
const authWithUnlimitedQuota = {
|
||||
...authStub.user1,
|
||||
user: {
|
||||
...authStub.user1.user,
|
||||
quotaSizeInBytes: null,
|
||||
quotaUsageInBytes: 1000,
|
||||
},
|
||||
};
|
||||
|
||||
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
|
||||
|
||||
await expect(sut.onStart(authWithUnlimitedQuota, mockDto)).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
it('should allow upload when within quota', async () => {
|
||||
const authWithQuota = {
|
||||
...authStub.user1,
|
||||
user: {
|
||||
...authStub.user1.user,
|
||||
quotaSizeInBytes: 5000,
|
||||
quotaUsageInBytes: 1000,
|
||||
},
|
||||
};
|
||||
|
||||
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
|
||||
|
||||
const result = await sut.onStart(authWithQuota, mockDto);
|
||||
|
||||
expect(result.isDuplicate).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle duplicate detection via checksum constraint', async () => {
|
||||
const existingAssetId = factory.uuid();
|
||||
const checksumError = new Error('duplicate key value violates unique constraint');
|
||||
(checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
|
||||
|
||||
mocks.asset.createWithMetadata.mockRejectedValue(checksumError);
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue({
|
||||
id: existingAssetId,
|
||||
status: AssetStatus.Partial,
|
||||
createdAt: new Date(),
|
||||
});
|
||||
|
||||
const result = await sut.onStart(authStub.user1, mockDto);
|
||||
|
||||
expect(result).toEqual({
|
||||
id: existingAssetId,
|
||||
path: expect.any(String),
|
||||
status: AssetStatus.Partial,
|
||||
isDuplicate: true,
|
||||
});
|
||||
|
||||
expect(mocks.asset.getUploadAssetIdByChecksum).toHaveBeenCalledWith(authStub.user1.user.id, mockDto.checksum);
|
||||
});
|
||||
|
||||
it('should throw InternalServerErrorException if duplicate lookup fails', async () => {
|
||||
const checksumError = new Error('duplicate key value violates unique constraint');
|
||||
(checksumError as any).constraint_name = ASSET_CHECKSUM_CONSTRAINT;
|
||||
|
||||
mocks.asset.createWithMetadata.mockRejectedValue(checksumError);
|
||||
// eslint-disable-next-line unicorn/no-useless-undefined
|
||||
mocks.asset.getUploadAssetIdByChecksum.mockResolvedValue(undefined);
|
||||
|
||||
await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException);
|
||||
});
|
||||
|
||||
it('should throw InternalServerErrorException for non-checksum errors', async () => {
|
||||
const genericError = new Error('database connection failed');
|
||||
mocks.asset.createWithMetadata.mockRejectedValue(genericError);
|
||||
|
||||
await expect(sut.onStart(authStub.user1, mockDto)).rejects.toThrow(InternalServerErrorException);
|
||||
});
|
||||
|
||||
it('should include iCloud metadata when provided', async () => {
|
||||
const dtoWithICloud = {
|
||||
...mockDto,
|
||||
assetData: {
|
||||
...mockDto.assetData,
|
||||
iCloudId: 'icloud-123',
|
||||
},
|
||||
};
|
||||
|
||||
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
|
||||
|
||||
await sut.onStart(authStub.user1, dtoWithICloud);
|
||||
|
||||
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(expect.anything(), expect.anything(), [
|
||||
{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'icloud-123' } },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should set isFavorite when true', async () => {
|
||||
const favoriteDto = {
|
||||
...mockDto,
|
||||
assetData: {
|
||||
...mockDto.assetData,
|
||||
isFavorite: true,
|
||||
},
|
||||
};
|
||||
|
||||
mocks.crypto.randomUUID.mockReturnValue(factory.uuid());
|
||||
|
||||
await sut.onStart(authStub.user1, favoriteDto);
|
||||
|
||||
expect(mocks.asset.createWithMetadata).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
isFavorite: true,
|
||||
}),
|
||||
expect.anything(),
|
||||
undefined,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onComplete', () => {
|
||||
const assetId = factory.uuid();
|
||||
const path = `/upload/${assetId}/file.jpg`;
|
||||
const fileModifiedAt = new Date('2025-01-01T12:00:00Z');
|
||||
|
||||
it('should mark asset as complete and queue metadata extraction job', async () => {
|
||||
await sut.onComplete({ id: assetId, path, fileModifiedAt });
|
||||
|
||||
expect(mocks.asset.setComplete).toHaveBeenCalledWith(assetId);
|
||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||
name: JobName.AssetExtractMetadata,
|
||||
data: { id: assetId, source: 'upload' },
|
||||
});
|
||||
});
|
||||
|
||||
it('should update file modification time', async () => {
|
||||
await sut.onComplete({ id: assetId, path, fileModifiedAt });
|
||||
|
||||
expect(mocks.storage.utimes).toHaveBeenCalledWith(path, expect.any(Date), fileModifiedAt);
|
||||
});
|
||||
|
||||
it('should handle utimes failure gracefully', async () => {
|
||||
mocks.storage.utimes.mockRejectedValue(new Error('Permission denied'));
|
||||
|
||||
await expect(sut.onComplete({ id: assetId, path, fileModifiedAt })).resolves.toBeUndefined();
|
||||
|
||||
// Should still complete asset and queue job
|
||||
expect(mocks.asset.setComplete).toHaveBeenCalled();
|
||||
expect(mocks.job.queue).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should retry setComplete on transient failures', async () => {
|
||||
mocks.asset.setComplete
|
||||
.mockRejectedValueOnce(new Error('Transient error'))
|
||||
.mockRejectedValueOnce(new Error('Transient error'))
|
||||
.mockResolvedValue();
|
||||
|
||||
await sut.onComplete({ id: assetId, path, fileModifiedAt });
|
||||
|
||||
expect(mocks.asset.setComplete).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should retry job queueing on transient failures', async () => {
|
||||
mocks.job.queue.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
|
||||
|
||||
await sut.onComplete({ id: assetId, path, fileModifiedAt });
|
||||
|
||||
expect(mocks.job.queue).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onCancel', () => {
|
||||
const assetId = factory.uuid();
|
||||
const path = `/upload/${assetId}/file.jpg`;
|
||||
|
||||
it('should delete file and remove asset record', async () => {
|
||||
await sut.onCancel(assetId, path);
|
||||
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
|
||||
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
|
||||
});
|
||||
|
||||
it('should retry unlink on transient failures', async () => {
|
||||
mocks.storage.unlink.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
|
||||
|
||||
await sut.onCancel(assetId, path);
|
||||
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should retry removeAndDecrementQuota on transient failures', async () => {
|
||||
mocks.asset.removeAndDecrementQuota.mockRejectedValueOnce(new Error('Transient error')).mockResolvedValue();
|
||||
|
||||
await sut.onCancel(assetId, path);
|
||||
|
||||
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeStaleUploads', () => {
|
||||
it('should queue cleanup jobs for stale partial assets', async () => {
|
||||
const staleAssets = [{ id: factory.uuid() }, { id: factory.uuid() }, { id: factory.uuid() }];
|
||||
|
||||
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
(async function* () {
|
||||
for (const asset of staleAssets) {
|
||||
yield asset;
|
||||
}
|
||||
})(),
|
||||
);
|
||||
|
||||
await sut.removeStaleUploads();
|
||||
|
||||
expect(mocks.assetJob.streamForPartialAssetCleanupJob).toHaveBeenCalledWith(expect.any(Date));
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||
{ name: JobName.PartialAssetCleanup, data: staleAssets[0] },
|
||||
{ name: JobName.PartialAssetCleanup, data: staleAssets[1] },
|
||||
{ name: JobName.PartialAssetCleanup, data: staleAssets[2] },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should batch cleanup jobs', async () => {
|
||||
const assets = Array.from({ length: 1500 }, () => ({ id: factory.uuid() }));
|
||||
|
||||
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue(
|
||||
// eslint-disable-next-line @typescript-eslint/require-await
|
||||
(async function* () {
|
||||
for (const asset of assets) {
|
||||
yield asset;
|
||||
}
|
||||
})(),
|
||||
);
|
||||
|
||||
await sut.removeStaleUploads();
|
||||
|
||||
// Should be called twice: once for 1000, once for 500
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should handle empty stream', async () => {
|
||||
mocks.assetJob.streamForPartialAssetCleanupJob.mockReturnValue((async function* () {})());
|
||||
|
||||
await sut.removeStaleUploads();
|
||||
|
||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeStaleUpload', () => {
|
||||
const assetId = factory.uuid();
|
||||
const path = `/upload/${assetId}/file.jpg`;
|
||||
|
||||
it('should skip if asset not found', async () => {
|
||||
// eslint-disable-next-line unicorn/no-useless-undefined
|
||||
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue(undefined);
|
||||
|
||||
const result = await sut.removeStaleUpload({ id: assetId });
|
||||
|
||||
expect(result).toBe(JobStatus.Skipped);
|
||||
expect(mocks.storage.stat).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should complete asset if file matches expected state', async () => {
|
||||
const checksum = Buffer.from('checksum');
|
||||
const fileModifiedAt = new Date();
|
||||
|
||||
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
|
||||
path,
|
||||
checksum,
|
||||
fileModifiedAt,
|
||||
size: 1024,
|
||||
});
|
||||
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
mocks.crypto.hashFile.mockResolvedValue(checksum);
|
||||
|
||||
const result = await sut.removeStaleUpload({ id: assetId });
|
||||
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.asset.setComplete).toHaveBeenCalledWith(assetId);
|
||||
expect(mocks.storage.unlink).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should cancel asset if file size does not match', async () => {
|
||||
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
|
||||
path,
|
||||
checksum: Buffer.from('checksum'),
|
||||
fileModifiedAt: new Date(),
|
||||
size: 1024,
|
||||
});
|
||||
|
||||
mocks.storage.stat.mockResolvedValue({ size: 512 } as any);
|
||||
|
||||
const result = await sut.removeStaleUpload({ id: assetId });
|
||||
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
|
||||
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
|
||||
});
|
||||
|
||||
it('should cancel asset if checksum does not match', async () => {
|
||||
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
|
||||
path,
|
||||
checksum: Buffer.from('expected-checksum'),
|
||||
fileModifiedAt: new Date(),
|
||||
size: 1024,
|
||||
});
|
||||
|
||||
mocks.storage.stat.mockResolvedValue({ size: 1024 } as any);
|
||||
mocks.crypto.hashFile.mockResolvedValue(Buffer.from('actual-checksum'));
|
||||
|
||||
const result = await sut.removeStaleUpload({ id: assetId });
|
||||
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(path);
|
||||
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
|
||||
});
|
||||
|
||||
it('should cancel asset if file does not exist', async () => {
|
||||
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
|
||||
path,
|
||||
checksum: Buffer.from('checksum'),
|
||||
fileModifiedAt: new Date(),
|
||||
size: 1024,
|
||||
});
|
||||
|
||||
const error = new Error('File not found') as NodeJS.ErrnoException;
|
||||
error.code = 'ENOENT';
|
||||
mocks.storage.stat.mockRejectedValue(error);
|
||||
|
||||
const result = await sut.removeStaleUpload({ id: assetId });
|
||||
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
|
||||
});
|
||||
|
||||
it('should cancel asset if stat fails with permission error', async () => {
|
||||
mocks.assetJob.getForPartialAssetCleanupJob.mockResolvedValue({
|
||||
path,
|
||||
checksum: Buffer.from('checksum'),
|
||||
fileModifiedAt: new Date(),
|
||||
size: 1024,
|
||||
});
|
||||
|
||||
const error = new Error('Permission denied') as NodeJS.ErrnoException;
|
||||
error.code = 'EACCES';
|
||||
mocks.storage.stat.mockRejectedValue(error);
|
||||
|
||||
const result = await sut.removeStaleUpload({ id: assetId });
|
||||
|
||||
expect(result).toBe(JobStatus.Success);
|
||||
expect(mocks.asset.removeAndDecrementQuota).toHaveBeenCalledWith(assetId);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,454 +0,0 @@
|
||||
import { BadRequestException, Injectable, InternalServerErrorException } from '@nestjs/common';
|
||||
import { Response } from 'express';
|
||||
import { DateTime } from 'luxon';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { dirname, extname, join } from 'node:path';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
import { SystemConfig } from 'src/config';
|
||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||
import { StorageCore } from 'src/cores/storage.core';
|
||||
import { OnEvent, OnJob } from 'src/decorators';
|
||||
import { GetUploadStatusDto, ResumeUploadDto, StartUploadDto } from 'src/dtos/asset-upload.dto';
|
||||
import { AuthDto } from 'src/dtos/auth.dto';
|
||||
import {
|
||||
AssetMetadataKey,
|
||||
AssetStatus,
|
||||
AssetType,
|
||||
AssetVisibility,
|
||||
ImmichWorker,
|
||||
JobName,
|
||||
JobStatus,
|
||||
QueueName,
|
||||
StorageFolder,
|
||||
} from 'src/enum';
|
||||
import { ArgOf } from 'src/repositories/event.repository';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { JobItem, JobOf } from 'src/types';
|
||||
import { isAssetChecksumConstraint } from 'src/utils/database';
|
||||
import { mimeTypes } from 'src/utils/mime-types';
|
||||
import { withRetry } from 'src/utils/misc';
|
||||
|
||||
export const MAX_RUFH_INTEROP_VERSION = 8;
|
||||
|
||||
@Injectable()
|
||||
export class AssetUploadService extends BaseService {
|
||||
// This is used to proactively abort previous requests for the same asset
|
||||
// when a new one arrives. The previous request still holds the asset lock
|
||||
// and will prevent the new request from proceeding until the previous one
|
||||
// times out. As normal client behavior will not have concurrent requests,
|
||||
// we can assume the previous request has already failed on the client end.
|
||||
private activeRequests = new Map<string, { req: Readable; startTime: Date }>();
|
||||
|
||||
@OnEvent({ name: 'UploadAbort', workers: [ImmichWorker.Api], server: true })
|
||||
onUploadAbort({ assetId, abortTime }: ArgOf<'UploadAbort'>) {
|
||||
const entry = this.activeRequests.get(assetId);
|
||||
if (!entry) {
|
||||
return false;
|
||||
}
|
||||
if (abortTime > entry.startTime) {
|
||||
entry.req.destroy();
|
||||
this.activeRequests.delete(assetId);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async startUpload(auth: AuthDto, req: Readable, res: Response, dto: StartUploadDto): Promise<void> {
|
||||
this.logger.verboseFn(() => `Starting upload: ${JSON.stringify(dto)}`);
|
||||
const { uploadComplete, assetData, uploadLength, contentLength, version } = dto;
|
||||
const isComplete = uploadComplete !== false;
|
||||
const isResumable = version && uploadComplete !== undefined;
|
||||
const { backup } = await this.getConfig({ withCache: true });
|
||||
|
||||
const asset = await this.onStart(auth, dto);
|
||||
if (asset.isDuplicate) {
|
||||
if (asset.status !== AssetStatus.Partial) {
|
||||
return this.sendAlreadyCompleted(res);
|
||||
}
|
||||
|
||||
const location = `/api/upload/${asset.id}`;
|
||||
if (isResumable) {
|
||||
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
|
||||
// this is a 5xx to indicate the client should do offset retrieval and resume
|
||||
res.status(500).send('Incomplete asset already exists');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (isComplete && uploadLength !== contentLength) {
|
||||
return this.sendInconsistentLength(res);
|
||||
}
|
||||
|
||||
const location = `/api/upload/${asset.id}`;
|
||||
if (isResumable) {
|
||||
this.sendInterimResponse(res, location, version, this.getUploadLimits(backup));
|
||||
}
|
||||
|
||||
this.addRequest(asset.id, req);
|
||||
await this.databaseRepository.withUuidLock(asset.id, async () => {
|
||||
// conventional upload, check status again with lock acquired before overwriting
|
||||
if (asset.isDuplicate) {
|
||||
const existingAsset = await this.assetRepository.getCompletionMetadata(asset.id, auth.user.id);
|
||||
if (existingAsset?.status !== AssetStatus.Partial) {
|
||||
return this.sendAlreadyCompleted(res);
|
||||
}
|
||||
}
|
||||
await this.storageRepository.mkdir(dirname(asset.path));
|
||||
|
||||
let checksumBuffer: Buffer | undefined;
|
||||
const writeStream = asset.isDuplicate
|
||||
? this.storageRepository.createWriteStream(asset.path)
|
||||
: this.storageRepository.createOrAppendWriteStream(asset.path);
|
||||
this.pipe(req, writeStream, contentLength);
|
||||
if (isComplete) {
|
||||
const hash = createHash('sha1');
|
||||
req.on('data', (data: Buffer) => hash.update(data));
|
||||
writeStream.on('finish', () => (checksumBuffer = hash.digest()));
|
||||
}
|
||||
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
|
||||
if (isResumable) {
|
||||
this.setCompleteHeader(res, version, uploadComplete);
|
||||
}
|
||||
if (!isComplete) {
|
||||
res.status(201).set('Location', location).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
|
||||
return;
|
||||
}
|
||||
if (dto.checksum.compare(checksumBuffer!) !== 0) {
|
||||
return await this.sendChecksumMismatch(res, asset.id, asset.path);
|
||||
}
|
||||
|
||||
await this.onComplete({ id: asset.id, path: asset.path, fileModifiedAt: assetData.fileModifiedAt });
|
||||
res.status(200).send({ id: asset.id });
|
||||
});
|
||||
}
|
||||
|
||||
resumeUpload(auth: AuthDto, req: Readable, res: Response, id: string, dto: ResumeUploadDto): Promise<void> {
|
||||
this.logger.verboseFn(() => `Resuming upload for ${id}: ${JSON.stringify(dto)}`);
|
||||
const { uploadComplete, uploadLength, uploadOffset, contentLength, version } = dto;
|
||||
this.setCompleteHeader(res, version, false);
|
||||
this.addRequest(id, req);
|
||||
return this.databaseRepository.withUuidLock(id, async () => {
|
||||
const completionData = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
|
||||
if (!completionData) {
|
||||
res.status(404).send('Asset not found');
|
||||
return;
|
||||
}
|
||||
const { fileModifiedAt, path, status, checksum: providedChecksum, size } = completionData;
|
||||
|
||||
if (status !== AssetStatus.Partial) {
|
||||
return this.sendAlreadyCompleted(res);
|
||||
}
|
||||
|
||||
if (uploadLength && size && size !== uploadLength) {
|
||||
return this.sendInconsistentLength(res);
|
||||
}
|
||||
|
||||
const expectedOffset = await this.getCurrentOffset(path);
|
||||
if (expectedOffset !== uploadOffset) {
|
||||
return this.sendOffsetMismatch(res, expectedOffset, uploadOffset);
|
||||
}
|
||||
|
||||
const newLength = uploadOffset + contentLength;
|
||||
if (uploadLength !== undefined && newLength > uploadLength) {
|
||||
res.status(400).send('Upload would exceed declared length');
|
||||
return;
|
||||
}
|
||||
|
||||
if (contentLength === 0 && !uploadComplete) {
|
||||
res.status(204).setHeader('Upload-Offset', expectedOffset.toString()).send();
|
||||
return;
|
||||
}
|
||||
|
||||
const writeStream = this.storageRepository.createOrAppendWriteStream(path);
|
||||
this.pipe(req, writeStream, contentLength);
|
||||
await new Promise((resolve, reject) => writeStream.on('close', resolve).on('error', reject));
|
||||
this.setCompleteHeader(res, version, uploadComplete);
|
||||
if (!uploadComplete) {
|
||||
try {
|
||||
const offset = await this.getCurrentOffset(path);
|
||||
res.status(204).setHeader('Upload-Offset', offset.toString()).send();
|
||||
} catch {
|
||||
this.logger.error(`Failed to get current offset for ${path} after write`);
|
||||
res.status(500).send();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const checksum = await this.cryptoRepository.hashFile(path);
|
||||
if (providedChecksum.compare(checksum) !== 0) {
|
||||
return await this.sendChecksumMismatch(res, id, path);
|
||||
}
|
||||
|
||||
await this.onComplete({ id, path, fileModifiedAt });
|
||||
res.status(200).send({ id });
|
||||
});
|
||||
}
|
||||
|
||||
cancelUpload(auth: AuthDto, assetId: string, res: Response): Promise<void> {
|
||||
this.abortExistingRequest(assetId);
|
||||
return this.databaseRepository.withUuidLock(assetId, async () => {
|
||||
const asset = await this.assetRepository.getCompletionMetadata(assetId, auth.user.id);
|
||||
if (!asset) {
|
||||
res.status(404).send('Asset not found');
|
||||
return;
|
||||
}
|
||||
if (asset.status !== AssetStatus.Partial) {
|
||||
return this.sendAlreadyCompleted(res);
|
||||
}
|
||||
await this.onCancel(assetId, asset.path);
|
||||
res.status(204).send();
|
||||
});
|
||||
}
|
||||
|
||||
async getUploadStatus(auth: AuthDto, res: Response, id: string, { version }: GetUploadStatusDto): Promise<void> {
|
||||
this.logger.verboseFn(() => `Getting upload status for ${id} with version ${version}`);
|
||||
const { backup } = await this.getConfig({ withCache: true });
|
||||
this.abortExistingRequest(id);
|
||||
return this.databaseRepository.withUuidLock(id, async () => {
|
||||
const asset = await this.assetRepository.getCompletionMetadata(id, auth.user.id);
|
||||
if (!asset) {
|
||||
res.status(404).send('Asset not found');
|
||||
return;
|
||||
}
|
||||
|
||||
const offset = await this.getCurrentOffset(asset.path);
|
||||
this.setCompleteHeader(res, version, asset.status !== AssetStatus.Partial);
|
||||
res
|
||||
.status(204)
|
||||
.setHeader('Upload-Offset', offset.toString())
|
||||
.setHeader('Cache-Control', 'no-store')
|
||||
.setHeader('Upload-Limit', this.getUploadLimits(backup))
|
||||
.send();
|
||||
});
|
||||
}
|
||||
|
||||
async getUploadOptions(res: Response): Promise<void> {
|
||||
const { backup } = await this.getConfig({ withCache: true });
|
||||
res.status(204).setHeader('Upload-Limit', this.getUploadLimits(backup)).send();
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.PartialAssetCleanupQueueAll, queue: QueueName.BackgroundTask })
|
||||
async removeStaleUploads(): Promise<void> {
|
||||
const config = await this.getConfig({ withCache: false });
|
||||
const createdBefore = DateTime.now().minus({ hours: config.backup.upload.maxAgeHours }).toJSDate();
|
||||
let jobs: JobItem[] = [];
|
||||
const assets = this.assetJobRepository.streamForPartialAssetCleanupJob(createdBefore);
|
||||
for await (const asset of assets) {
|
||||
jobs.push({ name: JobName.PartialAssetCleanup, data: asset });
|
||||
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
jobs = [];
|
||||
}
|
||||
}
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
}
|
||||
|
||||
@OnJob({ name: JobName.PartialAssetCleanup, queue: QueueName.BackgroundTask })
|
||||
removeStaleUpload({ id }: JobOf<JobName.PartialAssetCleanup>): Promise<JobStatus> {
|
||||
return this.databaseRepository.withUuidLock(id, async () => {
|
||||
const asset = await this.assetJobRepository.getForPartialAssetCleanupJob(id);
|
||||
if (!asset) {
|
||||
return JobStatus.Skipped;
|
||||
}
|
||||
const { checksum, fileModifiedAt, path, size } = asset;
|
||||
try {
|
||||
const stat = await this.storageRepository.stat(path);
|
||||
if (size === stat.size && checksum === (await this.cryptoRepository.hashFile(path))) {
|
||||
await this.onComplete({ id, path, fileModifiedAt });
|
||||
return JobStatus.Success;
|
||||
}
|
||||
} catch (error: any) {
|
||||
this.logger.debugFn(() => `Failed to check upload file ${path}: ${error.message}`);
|
||||
}
|
||||
await this.onCancel(id, path);
|
||||
return JobStatus.Success;
|
||||
});
|
||||
}
|
||||
|
||||
async onStart(
|
||||
auth: AuthDto,
|
||||
{ assetData, checksum, uploadLength }: StartUploadDto,
|
||||
): Promise<{ id: string; path: string; status: AssetStatus; isDuplicate: boolean }> {
|
||||
const assetId = this.cryptoRepository.randomUUID();
|
||||
const folder = StorageCore.getNestedFolder(StorageFolder.Upload, auth.user.id, assetId);
|
||||
const extension = extname(assetData.filename);
|
||||
const path = join(folder, `${assetId}${extension}`);
|
||||
const type = mimeTypes.assetType(path);
|
||||
|
||||
if (type === AssetType.Other) {
|
||||
throw new BadRequestException(`${assetData.filename} is an unsupported file type`);
|
||||
}
|
||||
|
||||
this.validateQuota(auth, uploadLength);
|
||||
|
||||
try {
|
||||
await this.assetRepository.createWithMetadata(
|
||||
{
|
||||
id: assetId,
|
||||
ownerId: auth.user.id,
|
||||
libraryId: null,
|
||||
checksum,
|
||||
originalPath: path,
|
||||
deviceAssetId: assetData.deviceAssetId,
|
||||
deviceId: assetData.deviceId,
|
||||
fileCreatedAt: assetData.fileCreatedAt,
|
||||
fileModifiedAt: assetData.fileModifiedAt,
|
||||
localDateTime: assetData.fileCreatedAt,
|
||||
type,
|
||||
isFavorite: assetData.isFavorite,
|
||||
livePhotoVideoId: assetData.livePhotoVideoId,
|
||||
visibility: AssetVisibility.Hidden,
|
||||
originalFileName: assetData.filename,
|
||||
status: AssetStatus.Partial,
|
||||
},
|
||||
uploadLength,
|
||||
assetData.iCloudId ? [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: assetData.iCloudId } }] : undefined,
|
||||
);
|
||||
} catch (error: any) {
|
||||
if (!isAssetChecksumConstraint(error)) {
|
||||
this.logger.error(`Error creating upload asset record: ${error.message}`);
|
||||
throw new InternalServerErrorException('Error creating asset');
|
||||
}
|
||||
|
||||
const duplicate = await this.assetRepository.getUploadAssetIdByChecksum(auth.user.id, checksum);
|
||||
if (!duplicate) {
|
||||
throw new InternalServerErrorException('Error locating duplicate for checksum constraint');
|
||||
}
|
||||
|
||||
return { id: duplicate.id, path, status: duplicate.status, isDuplicate: true };
|
||||
}
|
||||
|
||||
return { id: assetId, path, status: AssetStatus.Partial, isDuplicate: false };
|
||||
}
|
||||
|
||||
async onComplete({ id, path, fileModifiedAt }: { id: string; path: string; fileModifiedAt: Date }) {
|
||||
this.logger.log('Completing upload for asset', id);
|
||||
const jobData = { name: JobName.AssetExtractMetadata, data: { id, source: 'upload' } } as const;
|
||||
await withRetry(() => this.assetRepository.setComplete(id));
|
||||
try {
|
||||
await withRetry(() => this.storageRepository.utimes(path, new Date(), fileModifiedAt));
|
||||
} catch (error: any) {
|
||||
this.logger.error(`Failed to update times for ${path}: ${error.message}`);
|
||||
}
|
||||
await withRetry(() => this.jobRepository.queue(jobData));
|
||||
}
|
||||
|
||||
async onCancel(assetId: string, path: string): Promise<void> {
|
||||
this.logger.log('Cancelling upload for asset', assetId);
|
||||
await withRetry(() => this.storageRepository.unlink(path));
|
||||
await withRetry(() => this.assetRepository.removeAndDecrementQuota(assetId));
|
||||
}
|
||||
|
||||
private addRequest(assetId: string, req: Readable) {
|
||||
const addTime = new Date();
|
||||
const activeRequest = { req, startTime: addTime };
|
||||
this.abortExistingRequest(assetId, addTime);
|
||||
this.activeRequests.set(assetId, activeRequest);
|
||||
req.on('close', () => {
|
||||
if (this.activeRequests.get(assetId)?.req === req) {
|
||||
this.activeRequests.delete(assetId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private abortExistingRequest(assetId: string, abortTime = new Date()) {
|
||||
const abortEvent = { assetId, abortTime };
|
||||
// only emit if we didn't just abort it ourselves
|
||||
if (!this.onUploadAbort(abortEvent)) {
|
||||
this.websocketRepository.serverSend('UploadAbort', abortEvent);
|
||||
}
|
||||
}
|
||||
|
||||
private pipe(req: Readable, writeStream: Writable, size: number) {
|
||||
let receivedLength = 0;
|
||||
req.on('data', (data: Buffer) => {
|
||||
receivedLength += data.length;
|
||||
if (!writeStream.write(data)) {
|
||||
req.pause();
|
||||
writeStream.once('drain', () => req.resume());
|
||||
}
|
||||
});
|
||||
|
||||
req.on('close', () => {
|
||||
if (receivedLength < size) {
|
||||
writeStream.emit('error', new Error('Request closed before all data received'));
|
||||
}
|
||||
writeStream.end();
|
||||
});
|
||||
}
|
||||
|
||||
private sendInterimResponse({ socket }: Response, location: string, interopVersion: number, limits: string): void {
|
||||
if (socket && !socket.destroyed) {
|
||||
// Express doesn't understand interim responses, so write directly to socket
|
||||
socket.write(
|
||||
'HTTP/1.1 104 Upload Resumption Supported\r\n' +
|
||||
`Location: ${location}\r\n` +
|
||||
`Upload-Limit: ${limits}\r\n` +
|
||||
`Upload-Draft-Interop-Version: ${interopVersion}\r\n\r\n`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private sendInconsistentLength(res: Response): void {
|
||||
res.status(400).contentType('application/problem+json').send({
|
||||
type: 'https://iana.org/assignments/http-problem-types#inconsistent-upload-length',
|
||||
title: 'inconsistent length values for upload',
|
||||
});
|
||||
}
|
||||
|
||||
private sendAlreadyCompleted(res: Response): void {
|
||||
res.status(400).contentType('application/problem+json').send({
|
||||
type: 'https://iana.org/assignments/http-problem-types#completed-upload',
|
||||
title: 'upload is already completed',
|
||||
});
|
||||
}
|
||||
|
||||
private sendOffsetMismatch(res: Response, expected: number, actual: number): void {
|
||||
res.status(409).contentType('application/problem+json').setHeader('Upload-Offset', expected.toString()).send({
|
||||
type: 'https://iana.org/assignments/http-problem-types#mismatching-upload-offset',
|
||||
title: 'offset from request does not match offset of resource',
|
||||
'expected-offset': expected,
|
||||
'provided-offset': actual,
|
||||
});
|
||||
}
|
||||
|
||||
private sendChecksumMismatch(res: Response, assetId: string, path: string) {
|
||||
this.logger.warn(`Removing upload asset ${assetId} due to checksum mismatch`);
|
||||
res.status(460).send('File on server does not match provided checksum');
|
||||
return this.onCancel(assetId, path);
|
||||
}
|
||||
|
||||
private validateQuota(auth: AuthDto, size: number): void {
|
||||
const { quotaSizeInBytes: quotaLimit, quotaUsageInBytes: currentUsage } = auth.user;
|
||||
if (quotaLimit === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (quotaLimit < currentUsage + size) {
|
||||
throw new BadRequestException('Quota has been exceeded!');
|
||||
}
|
||||
}
|
||||
|
||||
private async getCurrentOffset(path: string): Promise<number> {
|
||||
try {
|
||||
const stat = await this.storageRepository.stat(path);
|
||||
return stat.size;
|
||||
} catch (error: any) {
|
||||
if ((error as NodeJS.ErrnoException)?.code === 'ENOENT') {
|
||||
return 0;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private setCompleteHeader(res: Response, interopVersion: number | undefined, isComplete: boolean): void {
|
||||
if (interopVersion === undefined || interopVersion > 3) {
|
||||
res.setHeader('Upload-Complete', isComplete ? '?1' : '?0');
|
||||
} else {
|
||||
res.setHeader('Upload-Incomplete', isComplete ? '?0' : '?1');
|
||||
}
|
||||
}
|
||||
|
||||
private getUploadLimits({ upload }: SystemConfig['backup']) {
|
||||
return `min-size=1, max-age=${upload.maxAgeHours * 3600}`;
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { AlbumService } from 'src/services/album.service';
|
||||
import { ApiKeyService } from 'src/services/api-key.service';
|
||||
import { ApiService } from 'src/services/api.service';
|
||||
import { AssetMediaService } from 'src/services/asset-media.service';
|
||||
import { AssetUploadService } from 'src/services/asset-upload.service';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { AuditService } from 'src/services/audit.service';
|
||||
import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
@@ -50,7 +49,6 @@ export const services = [
|
||||
AlbumService,
|
||||
ApiService,
|
||||
AssetMediaService,
|
||||
AssetUploadService,
|
||||
AssetService,
|
||||
AuditService,
|
||||
AuthService,
|
||||
|
||||
@@ -48,7 +48,6 @@ describe(JobService.name, () => {
|
||||
{ name: JobName.UserSyncUsage },
|
||||
{ name: JobName.AssetGenerateThumbnailsQueueAll, data: { force: false } },
|
||||
{ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } },
|
||||
{ name: JobName.PartialAssetCleanupQueueAll },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -303,10 +303,6 @@ export class JobService extends BaseService {
|
||||
jobs.push({ name: JobName.FacialRecognitionQueueAll, data: { force: false, nightly: true } });
|
||||
}
|
||||
|
||||
if (config.nightlyTasks.removeStaleUploads) {
|
||||
jobs.push({ name: JobName.PartialAssetCleanupQueueAll });
|
||||
}
|
||||
|
||||
await this.jobRepository.queueAll(jobs);
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user