Compare commits

...

16 Commits

Author SHA1 Message Date
mertalev
87e0e9c6ce linting 2026-04-27 20:10:59 -04:00
mertalev
2f75d323e3 make timeBase non-nullable 2026-04-27 19:24:43 -04:00
mertalev
80ae5b4994 update test-assets commit 2026-04-23 19:58:15 -04:00
mertalev
3ec35c7aae install noble 2026-04-23 19:30:28 -04:00
mertalev
47b5384cd4 install ffmpeg 2026-04-23 19:19:24 -04:00
mertalev
2dd9eefafe medium tests 2026-04-23 18:21:10 -04:00
mertalev
09d878639f use totalDuration instead of format.duration 2026-04-23 16:00:03 -04:00
mertalev
4120463e05 simplify tests 2026-04-22 18:01:53 -04:00
mertalev
510f33c8eb redundant variable 2026-04-22 17:41:08 -04:00
mertalev
02c717a14f add comments 2026-04-22 17:38:15 -04:00
mertalev
de66598087 linting 2026-04-22 17:05:20 -04:00
mertalev
8f05ce663b formatting 2026-04-22 16:37:42 -04:00
mertalev
215528bf81 duplicate constant 2026-04-22 16:08:59 -04:00
mertalev
6d81bb60b4 revert colorspace change 2026-04-22 16:08:32 -04:00
mertalev
246df2ad16 earlier duration check 2026-04-22 16:08:32 -04:00
mertalev
f6db3ade4b track video metadata 2026-04-22 16:08:32 -04:00
30 changed files with 1720 additions and 389 deletions

View File

@@ -392,6 +392,18 @@ jobs:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install ffmpeg
run: |
version=7.1.3-5
sha256=2df2f182a4b8361f26dcbaf711514d770cf51c6282ddbbf8e1eae584b7e55b4e
deb="jellyfin-ffmpeg7_${version}-noble_amd64.deb"
curl -fsSL -o "/tmp/${deb}" "https://github.com/jellyfin/jellyfin-ffmpeg/releases/download/v${version}/${deb}"
echo "${sha256} /tmp/${deb}" | sha256sum -c -
sudo apt-get update
sudo apt-get install -y "/tmp/${deb}"
sudo ln -sf /usr/lib/jellyfin-ffmpeg/ffmpeg /usr/local/bin/ffmpeg
sudo ln -sf /usr/lib/jellyfin-ffmpeg/ffprobe /usr/local/bin/ffprobe
ffprobe -version | head -1
- name: Run pnpm install
run: SHARP_IGNORE_GLOBAL_LIBVIPS=true pnpm install --frozen-lockfile
- name: Run medium tests

View File

@@ -48,6 +48,7 @@ export default typescriptEslint.config([
'unicorn/import-style': 'off',
'unicorn/prefer-structured-clone': 'off',
'unicorn/no-for-loop': 'off',
'unicorn/no-array-sort': 'off',
'@typescript-eslint/await-thenable': 'error',
'@typescript-eslint/no-misused-promises': 'error',
'require-await': 'off',

View File

@@ -597,6 +597,133 @@ export enum ExifOrientation {
Rotate270CW = 8,
}
/** ITU-T H.273 colour primaries codes. */
export enum ColorPrimaries {
Reserved = 0,
Bt709 = 1,
Unknown = 2,
Bt470M = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Film = 8,
Bt2020 = 9,
Smpte428 = 10,
Smpte431 = 11,
Smpte432 = 12,
Ebu3213 = 22,
}
/** ITU-T H.273 transfer characteristics codes. */
export enum ColorTransfer {
Reserved = 0,
Bt709 = 1,
Unknown = 2,
Bt470M = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Linear = 8,
Log100 = 9,
Log316 = 10,
Iec6196624 = 11,
Bt1361E = 12,
Iec6196621 = 13,
Bt202010 = 14,
Bt202012 = 15,
Smpte2084 = 16,
Smpte428 = 17,
AribStdB67 = 18,
}
/** ITU-T H.273 matrix coefficients codes. */
export enum ColorMatrix {
Gbr = 0,
Bt709 = 1,
Unknown = 2,
Reserved = 3,
Fcc = 4,
Bt470Bg = 5,
Smpte170M = 6,
Smpte240M = 7,
Ycgco = 8,
Bt2020Nc = 9,
Bt2020C = 10,
Smpte2085 = 11,
ChromaDerivedNc = 12,
ChromaDerivedC = 13,
Ictcp = 14,
}
/** H.264 `profile_idc` values. */
// H.264 has a few profiles that have the same value but different names, included so lookup by name works
export enum H264Profile {
ConstrainedBaseline = 66,
// eslint-disable-next-line @typescript-eslint/no-duplicate-enum-values
Baseline = 66,
Main = 77,
Extended = 88,
ConstrainedHigh = 100,
// eslint-disable-next-line @typescript-eslint/no-duplicate-enum-values
ProgressiveHigh = 100,
// eslint-disable-next-line @typescript-eslint/no-duplicate-enum-values
High = 100,
High10 = 110,
High422 = 122,
High444Predictive = 244,
}
/** HEVC `profile_idc` values. */
export enum HevcProfile {
Main = 1,
Main10 = 2,
MainStillPicture = 3,
Rext = 4,
}
/** AV1 `seq_profile` values. */
export enum Av1Profile {
Main = 0,
High = 1,
Professional = 2,
}
/** MPEG-4 Audio Object Type values for AAC. */
export enum AacProfile {
Main = 1,
Lc = 2,
Ssr = 3,
Ltp = 4,
HeAac = 5,
Ld = 23,
HeAacv2 = 29,
Eld = 39,
XheAac = 42,
}
/** Dolby Vision bitstream profile numbers from the DOVI configuration record. */
export enum DvProfile {
Dvhe03 = 3,
Dvhe04 = 4,
Dvhe05 = 5,
Dvhe07 = 7,
Dvhe08 = 8,
Dvav09 = 9,
Dav110 = 10,
}
/**
* Dolby Vision base-layer signal-compatibility ID from the DOVI configuration record.
* Identifies what the base HEVC/AVC layer renders as on a non-DV decoder.
*/
export enum DvSignalCompatibility {
None = 0,
Hdr10 = 1,
Sdr709 = 2,
Hlg = 4,
Sdr2020 = 6,
}
export enum DatabaseExtension {
Cube = 'cube',
EarthDistance = 'earthdistance',

View File

@@ -239,10 +239,71 @@ select
"asset_edit"."assetId" = "asset"."id"
) as agg
) as "edits",
to_json("asset_exif") as "exifInfo"
to_json("asset_exif") as "exifInfo",
(
select
to_json(obj)
from
(
select
"asset_video"."index",
"asset_video"."codecName",
"asset_video"."profile",
"asset_video"."level",
"asset_video"."bitrate",
"asset_exif"."exifImageWidth" as "width",
"asset_exif"."exifImageHeight" as "height",
"asset_video"."pixelFormat",
"asset_video"."frameCount",
"asset_exif"."fps" as "frameRate",
"asset_video"."timeBase",
case
when "asset_exif"."orientation" = '6' then -90
when "asset_exif"."orientation" = '8' then 90
when "asset_exif"."orientation" = '3' then 180
else 0
end as "rotation",
"asset_video"."colorPrimaries",
"asset_video"."colorMatrix",
"asset_video"."colorTransfer",
"asset_video"."dvProfile",
"asset_video"."dvLevel",
"asset_video"."dvBlSignalCompatibilityId"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "videoStream",
(
select
to_json(obj)
from
(
select
"asset_video"."formatName",
"asset_video"."formatLongName",
case
when "asset"."duration" ~ '^\d{2}:\d{2}:\d{2}\.\d{3}$' then substr(asset.duration, 1, 2)::int * 3600000 + substr(asset.duration, 4, 2)::int * 60000 + substr(asset.duration, 7, 2)::int * 1000 + substr(asset.duration, 10, 3)::int
else 0
end as "duration",
"asset_video"."bitrate"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "format"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
left join "asset_video" on "asset_video"."assetId" = "asset"."id"
where
"asset"."id" = $4
@@ -554,9 +615,91 @@ select
where
"asset_file"."assetId" = "asset"."id"
) as agg
) as "files"
) as "files",
(
select
to_json(obj)
from
(
select
"asset_audio"."index",
"asset_audio"."codecName",
"asset_audio"."profile",
"asset_audio"."bitrate"
from
(
select
1
) as "dummy"
where
"asset_audio"."assetId" is not null
) as obj
) as "audioStream",
(
select
to_json(obj)
from
(
select
"asset_video"."index",
"asset_video"."codecName",
"asset_video"."profile",
"asset_video"."level",
"asset_video"."bitrate",
"asset_exif"."exifImageWidth" as "width",
"asset_exif"."exifImageHeight" as "height",
"asset_video"."pixelFormat",
"asset_video"."frameCount",
"asset_exif"."fps" as "frameRate",
"asset_video"."timeBase",
case
when "asset_exif"."orientation" = '6' then -90
when "asset_exif"."orientation" = '8' then 90
when "asset_exif"."orientation" = '3' then 180
else 0
end as "rotation",
"asset_video"."colorPrimaries",
"asset_video"."colorMatrix",
"asset_video"."colorTransfer",
"asset_video"."dvProfile",
"asset_video"."dvLevel",
"asset_video"."dvBlSignalCompatibilityId"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "videoStream",
(
select
to_json(obj)
from
(
select
"asset_video"."formatName",
"asset_video"."formatLongName",
case
when "asset"."duration" ~ '^\d{2}:\d{2}:\d{2}\.\d{3}$' then substr(asset.duration, 1, 2)::int * 3600000 + substr(asset.duration, 4, 2)::int * 60000 + substr(asset.duration, 7, 2)::int * 1000 + substr(asset.duration, 10, 3)::int
else 0
end as "duration",
"asset_video"."bitrate"
from
(
select
1
) as "dummy"
where
"asset_video"."assetId" is not null
) as obj
) as "format"
from
"asset"
inner join "asset_exif" on "asset"."id" = "asset_exif"."assetId"
inner join "asset_video" on "asset_video"."assetId" = "asset"."id"
left join "asset_audio" on "asset_audio"."assetId" = "asset"."id"
where
"asset"."id" = $1
and "asset"."type" = 'VIDEO'

View File

@@ -9,6 +9,7 @@ import { DB } from 'src/schema';
import {
anyUuid,
asUuid,
withAudioStream,
withDefaultVisibility,
withEdits,
withExif,
@@ -16,6 +17,8 @@ import {
withFaces,
withFilePath,
withFiles,
withVideoFormat,
withVideoStream,
} from 'src/utils/database';
import { mimeTypes } from 'src/utils/mime-types';
@@ -134,6 +137,9 @@ export class AssetJobRepository {
)
.select(withEdits)
.$call(withExifInner)
.leftJoin('asset_video', 'asset_video.assetId', 'asset.id')
.select((eb) => withVideoStream(eb).as('videoStream'))
.select((eb) => withVideoFormat(eb).as('format'))
.where('asset.id', '=', id)
.executeTakeFirst();
}
@@ -333,8 +339,14 @@ export class AssetJobRepository {
getForVideoConversion(id: string) {
return this.db
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.innerJoin('asset_video', 'asset_video.assetId', 'asset.id')
.leftJoin('asset_audio', 'asset_audio.assetId', 'asset.id')
.select(['asset.id', 'asset.ownerId', 'asset.originalPath'])
.select(withFiles)
.select((eb) => withAudioStream(eb).as('audioStream'))
.select((eb) => withVideoStream(eb).$notNull().as('videoStream'))
.select((eb) => withVideoFormat(eb).$notNull().as('format'))
.where('asset.id', '=', id)
.where('asset.type', '=', sql.lit(AssetType.Video))
.executeTakeFirst();

View File

@@ -19,6 +19,7 @@ import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetFileType, AssetOrder, AssetStatus, AssetType, AssetVisibility } from 'src/enum';
import { DB } from 'src/schema';
import { AssetAudioTable, AssetKeyframeTable, AssetVideoTable } from 'src/schema/tables/asset-av.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
@@ -124,6 +125,14 @@ interface GetByIdsRelations {
edits?: boolean;
}
type UpsertExifOptions = {
exif: Insertable<AssetExifTable>;
audio?: Insertable<AssetAudioTable>;
video?: Insertable<AssetVideoTable>;
keyframes?: Insertable<AssetKeyframeTable>;
lockedPropertiesBehavior: 'override' | 'append' | 'skip';
};
const distinctLocked = <T extends LockableProperty[] | null>(eb: ExpressionBuilder<DB, 'asset_exif'>, columns: T) =>
sql<T>`nullif(array(select distinct unnest(${eb.ref('asset_exif.lockedProperties')} || ${columns})), '{}')`;
@@ -161,15 +170,76 @@ export class AssetRepository {
@GenerateSql({
params: [
{ dateTimeOriginal: DummyValue.DATE, lockedProperties: ['dateTimeOriginal'] },
{ lockedPropertiesBehavior: 'append' },
{
exif: { dateTimeOriginal: DummyValue.DATE, lockedProperties: ['dateTimeOriginal'] },
lockedPropertiesBehavior: 'append',
},
],
})
async upsertExif(
exif: Insertable<AssetExifTable>,
{ lockedPropertiesBehavior }: { lockedPropertiesBehavior: 'override' | 'append' | 'skip' },
): Promise<void> {
await this.db
async upsertExif({ exif, audio, video, keyframes, lockedPropertiesBehavior }: UpsertExifOptions): Promise<void> {
let query = this.db;
if (audio) {
(query as any) = this.db.with('audio', (qb) =>
qb
.insertInto('asset_audio')
.values(audio)
.onConflict((oc) =>
oc.column('assetId').doUpdateSet(({ ref }) => ({
bitrate: ref('asset_audio.bitrate'),
index: ref('asset_audio.index'),
profile: ref('asset_audio.profile'),
codecName: ref('asset_audio.codecName'),
})),
),
);
}
if (video) {
(query as any) = query.with('video', (qb) =>
qb
.insertInto('asset_video')
.values(video)
.onConflict((oc) =>
oc.column('assetId').doUpdateSet(({ ref }) => ({
bitrate: ref('asset_video.bitrate'),
timeBase: ref('asset_video.timeBase'),
index: ref('asset_video.index'),
profile: ref('asset_video.profile'),
level: ref('asset_video.level'),
colorPrimaries: ref('asset_video.colorPrimaries'),
colorTransfer: ref('asset_video.colorTransfer'),
colorMatrix: ref('asset_video.colorMatrix'),
dvProfile: ref('asset_video.dvProfile'),
dvLevel: ref('asset_video.dvLevel'),
dvBlSignalCompatibilityId: ref('asset_video.dvBlSignalCompatibilityId'),
codecName: ref('asset_video.codecName'),
formatName: ref('asset_video.formatName'),
formatLongName: ref('asset_video.formatLongName'),
pixelFormat: ref('asset_video.pixelFormat'),
})),
),
);
}
if (keyframes) {
(query as any) = query.with('keyframe', (qb) =>
qb
.insertInto('asset_keyframe')
.values(keyframes)
.onConflict((oc) =>
oc.column('assetId').doUpdateSet(({ ref }) => ({
pts: ref('asset_keyframe.pts'),
accDuration: ref('asset_keyframe.accDuration'),
ownDuration: ref('asset_keyframe.ownDuration'),
totalDuration: ref('asset_keyframe.totalDuration'),
packetCount: ref('asset_keyframe.packetCount'),
outputFrames: ref('asset_keyframe.outputFrames'),
})),
),
);
}
await query
.insertInto('asset_exif')
.values(exif)
.onConflict((oc) =>

View File

@@ -1,14 +1,30 @@
import { Injectable } from '@nestjs/common';
import { ExifDateTime, exiftool, WriteTags } from 'exiftool-vendored';
import ffmpeg, { FfprobeData } from 'fluent-ffmpeg';
import ffmpeg, { FfprobeData, FfprobeStream } from 'fluent-ffmpeg';
import _ from 'lodash';
import { Duration } from 'luxon';
import { execFile as execFileCb } from 'node:child_process';
import fs from 'node:fs/promises';
import { Writable } from 'node:stream';
import { promisify } from 'node:util';
import sharp from 'sharp';
import { ORIENTATION_TO_SHARP_ROTATION } from 'src/constants';
import { Exif } from 'src/database';
import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { Colorspace, LogLevel, RawExtractedFormat } from 'src/enum';
import {
AacProfile,
Av1Profile,
ColorMatrix,
ColorPrimaries,
Colorspace,
ColorTransfer,
DvProfile,
DvSignalCompatibility,
H264Profile,
HevcProfile,
LogLevel,
RawExtractedFormat,
} from 'src/enum';
import { LoggingRepository } from 'src/repositories/logging.repository';
import {
DecodeToBufferOptions,
@@ -18,6 +34,7 @@ import {
ProbeOptions,
TranscodeCommand,
VideoInfo,
VideoPacketInfo,
} from 'src/types';
import { handlePromiseError } from 'src/utils/misc';
import { createAffineMatrix } from 'src/utils/transform';
@@ -26,9 +43,14 @@ const probe = (input: string, options: string[]): Promise<FfprobeData> =>
new Promise((resolve, reject) =>
ffmpeg.ffprobe(input, options, (error, data) => (error ? reject(error) : resolve(data))),
);
const execFile = promisify(execFileCb);
sharp.concurrency(0);
sharp.cache({ files: 0 });
const pascalCase = (str: string) => _.upperFirst(_.camelCase(str.toLowerCase()));
type ProgressEvent = {
frames: number;
currentFps: number;
@@ -244,6 +266,7 @@ export class MediaRepository {
},
videoStreams: results.streams
.filter((stream) => stream.codec_type === 'video' && !stream.disposition?.attached_pic)
.sort((a, b) => this.compareStreams(a, b))
.map((stream) => {
const height = this.parseInt(stream.height);
const dar = this.getDar(stream.display_aspect_ratio);
@@ -252,28 +275,98 @@ export class MediaRepository {
height,
width: dar ? Math.round(height * dar) : this.parseInt(stream.width),
codecName: stream.codec_name === 'h265' ? 'hevc' : stream.codec_name,
codecType: stream.codec_type,
profile: this.parseVideoProfile(stream.codec_name, stream.profile as string | undefined),
level: this.parseOptionalInt(stream.level),
frameCount: this.parseInt(options?.countFrames ? stream.nb_read_packets : stream.nb_frames),
frameRate: this.parseFrameRate(stream.r_frame_rate ?? stream.avg_frame_rate),
timeBase: this.parseRational(stream.time_base)?.den,
rotation: this.parseInt(stream.rotation),
isHDR: stream.color_transfer === 'smpte2084' || stream.color_transfer === 'arib-std-b67',
bitrate: this.parseInt(stream.bit_rate),
pixelFormat: stream.pix_fmt || 'yuv420p',
colorPrimaries: stream.color_primaries,
colorSpace: stream.color_space,
colorTransfer: stream.color_transfer,
colorPrimaries: this.parseEnum(ColorPrimaries, stream.color_primaries) ?? ColorPrimaries.Unknown,
colorMatrix: this.parseEnum(ColorMatrix, stream.color_space) ?? ColorMatrix.Unknown,
colorTransfer: this.parseEnum(ColorTransfer, stream.color_transfer) ?? ColorTransfer.Unknown,
dvProfile: this.parseOptionalInt(stream.dv_profile) as DvProfile | undefined,
dvLevel: this.parseOptionalInt(stream.dv_level),
dvBlSignalCompatibilityId: this.parseOptionalInt(stream.dv_bl_signal_compatibility_id) as
| DvSignalCompatibility
| undefined,
};
}),
audioStreams: results.streams
.filter((stream) => stream.codec_type === 'audio')
.sort((a, b) => this.compareStreams(a, b))
.map((stream) => ({
index: stream.index,
codecType: stream.codec_type,
codecName: stream.codec_name,
profile:
stream.codec_name === 'aac' ? this.parseEnum(AacProfile, stream.profile as string | undefined) : undefined,
bitrate: this.parseInt(stream.bit_rate),
})),
};
}
/**
* Needed for accurate segments, especially when remuxing, seeking and/or VFR is involved.
* Scanning packets for keyframes in JS is much faster than -skip_frame nokey since it avoids decoding the video.
*/
async probePackets(input: string, streamIndex: number): Promise<VideoPacketInfo | null> {
const { stdout } = await execFile('ffprobe', [
'-v',
'error',
'-select_streams',
String(streamIndex),
'-show_entries',
'packet=pts,duration,flags',
'-of',
'csv=p=0',
input,
]);
let totalDuration = 0;
const keyframePts: number[] = [];
const keyframeAccDuration: number[] = [];
const keyframeOwnDuration: number[] = [];
const postDiscard: { pts: number; duration: number }[] = [];
for (const line of stdout.split('\n')) {
if (!line) {
continue;
}
const [ptsStr, durationStr, flags] = line.split(',');
const pts = Number.parseInt(ptsStr);
const duration = Number.parseInt(durationStr);
if (Number.isNaN(pts) || Number.isNaN(duration)) {
continue;
}
// Discarded packets don't contribute to packet count, but still contribute to video duration
totalDuration += duration;
if (flags[1] !== 'D') {
postDiscard.push({ pts, duration });
}
if (flags[0] === 'K') {
keyframePts.push(pts);
keyframeAccDuration.push(totalDuration);
// VFR content can have variable duration keyframes,
// so we need to track their duration separately for accurate segment boundaries.
// Non-keyframes are accounted for in totalDuration.
keyframeOwnDuration.push(duration);
}
}
if (postDiscard.length === 0) {
return null;
}
return {
totalDuration,
packetCount: postDiscard.length,
outputFrames: this.cfrOutputFrames(postDiscard, postDiscard.length / totalDuration),
keyframePts,
keyframeAccDuration,
keyframeOwnDuration,
};
}
transcode(input: string, output: string | Writable, options: TranscodeCommand): Promise<void> {
if (!options.twoPass) {
return new Promise((resolve, reject) => {
@@ -356,6 +449,31 @@ export class MediaRepository {
return Number.parseFloat(value as string) || 0;
}
private parseOptionalInt(value: string | number | undefined): number | undefined {
const parsed = Number.parseInt(value as string);
return Number.isNaN(parsed) ? undefined : parsed;
}
private parseEnum<E extends Record<string, number | string>>(enumObj: E, value?: string) {
return value ? (enumObj[pascalCase(value)] as Extract<E[keyof E], number> | undefined) : undefined;
}
/** Parse a rational like "60000/1001" or "1/600" into `{ num, den }`. */
private parseRational(value: string | undefined): { num: number; den: number } | undefined {
if (!value) {
return;
}
const [num, den = 1] = value.split('/').map(Number);
if (num && den) {
return { num, den };
}
}
private parseFrameRate(value: string | undefined): number | undefined {
const r = this.parseRational(value);
return r ? r.num / r.den : undefined;
}
private getDar(dar: string | undefined): number {
if (dar) {
const [darW, darH] = dar.split(':').map(Number);
@@ -366,4 +484,42 @@ export class MediaRepository {
return 0;
}
private parseVideoProfile(codec?: string, profile?: string) {
switch (codec) {
case 'h264': {
return this.parseEnum(H264Profile, profile);
}
case 'h265':
case 'hevc': {
return this.parseEnum(HevcProfile, profile);
}
case 'av1': {
return this.parseEnum(Av1Profile, profile);
}
}
}
private compareStreams(a: FfprobeStream, b: FfprobeStream): number {
const d = (b.disposition?.default ?? 0) - (a.disposition?.default ?? 0);
if (d !== 0) {
return d;
}
return this.parseInt(b.bit_rate) - this.parseInt(a.bit_rate);
}
private cfrOutputFrames(packets: { pts: number; duration: number }[], slotsPerTick: number) {
// Packets may be out of PTS order due to B-frames
packets.sort((a, b) => a.pts - b.pts);
const firstPts = packets[0].pts;
let outputFrames = 0;
let nextPts = 0;
for (const pkt of packets) {
const delta = (pkt.pts - firstPts) * slotsPerTick - nextPts + pkt.duration * slotsPerTick;
const nb = delta < -1.1 ? 0 : delta > 1.1 ? Math.round(delta) : 1;
outputFrames += nb;
nextPts += nb;
}
return outputFrames;
}
}

View File

@@ -33,6 +33,7 @@ import { AlbumUserTable } from 'src/schema/tables/album-user.table';
import { AlbumTable } from 'src/schema/tables/album.table';
import { ApiKeyTable } from 'src/schema/tables/api-key.table';
import { AssetAuditTable } from 'src/schema/tables/asset-audit.table';
import { AssetAudioTable, AssetKeyframeTable, AssetVideoTable } from 'src/schema/tables/asset-av.table';
import { AssetEditAuditTable } from 'src/schema/tables/asset-edit-audit.table';
import { AssetEditTable } from 'src/schema/tables/asset-edit.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
@@ -196,6 +197,9 @@ export interface DB {
asset_metadata_audit: AssetMetadataAuditTable;
asset_job_status: AssetJobStatusTable;
asset_ocr: AssetOcrTable;
asset_audio: AssetAudioTable;
asset_video: AssetVideoTable;
asset_keyframe: AssetKeyframeTable;
ocr_search: OcrSearchTable;
face_search: FaceSearchTable;

View File

@@ -0,0 +1,51 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`CREATE TABLE "asset_audio" (
"assetId" uuid NOT NULL,
"bitrate" integer NOT NULL,
"index" smallint NOT NULL,
"profile" smallint,
"codecName" text NOT NULL,
CONSTRAINT "asset_audio_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE NO ACTION ON DELETE CASCADE,
CONSTRAINT "asset_audio_pkey" PRIMARY KEY ("assetId")
);`.execute(db);
await sql`CREATE TABLE "asset_video" (
"assetId" uuid NOT NULL,
"bitrate" integer NOT NULL,
"frameCount" integer NOT NULL,
"timeBase" integer NOT NULL,
"index" smallint NOT NULL,
"profile" smallint,
"level" smallint,
"colorPrimaries" smallint NOT NULL,
"colorTransfer" smallint NOT NULL,
"colorMatrix" smallint NOT NULL,
"dvProfile" smallint,
"dvLevel" smallint,
"dvBlSignalCompatibilityId" smallint,
"codecName" text NOT NULL,
"formatName" text NOT NULL,
"formatLongName" text NOT NULL,
"pixelFormat" text NOT NULL,
CONSTRAINT "asset_video_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE NO ACTION ON DELETE CASCADE,
CONSTRAINT "asset_video_pkey" PRIMARY KEY ("assetId")
);`.execute(db);
await sql`CREATE TABLE "asset_keyframe" (
"assetId" uuid NOT NULL,
"pts" integer[] NOT NULL,
"accDuration" integer[] NOT NULL,
"ownDuration" integer[] NOT NULL,
"totalDuration" integer NOT NULL,
"packetCount" integer NOT NULL,
"outputFrames" integer NOT NULL,
CONSTRAINT "asset_keyframe_assetId_fkey" FOREIGN KEY ("assetId") REFERENCES "asset" ("id") ON UPDATE NO ACTION ON DELETE CASCADE,
CONSTRAINT "asset_keyframe_pkey" PRIMARY KEY ("assetId")
);`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`DROP TABLE "asset_audio";`.execute(db);
await sql`DROP TABLE "asset_video";`.execute(db);
await sql`DROP TABLE "asset_keyframe";`.execute(db);
}

View File

@@ -0,0 +1,100 @@
import { Column, ForeignKeyColumn, Table } from '@immich/sql-tools';
import { AssetTable } from 'src/schema/tables/asset.table';
const smallint = 'smallint' as 'integer';
@Table('asset_audio')
export class AssetAudioTable {
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', primary: true })
assetId!: string;
@Column({ type: 'integer' })
bitrate!: number;
@Column({ type: smallint })
index!: number;
@Column({ type: smallint, nullable: true })
profile!: number | null;
@Column({ type: 'text' })
codecName!: string;
}
@Table('asset_video')
export class AssetVideoTable {
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', primary: true })
assetId!: string;
@Column({ type: 'integer' })
bitrate!: number;
@Column({ type: 'integer' })
frameCount!: number;
@Column({ type: 'integer' })
timeBase!: number;
@Column({ type: smallint })
index!: number;
@Column({ type: smallint, nullable: true })
profile!: number | null;
@Column({ type: smallint, nullable: true })
level!: number | null;
@Column({ type: smallint })
colorPrimaries!: number;
@Column({ type: smallint })
colorTransfer!: number;
@Column({ type: smallint })
colorMatrix!: number;
@Column({ type: smallint, nullable: true })
dvProfile!: number | null;
@Column({ type: smallint, nullable: true })
dvLevel!: number | null;
@Column({ type: smallint, nullable: true })
dvBlSignalCompatibilityId!: number | null;
@Column({ type: 'text' })
codecName!: string;
@Column({ type: 'text' })
formatName!: string;
@Column({ type: 'text' })
formatLongName!: string;
@Column({ type: 'text' })
pixelFormat!: string;
}
@Table('asset_keyframe')
export class AssetKeyframeTable {
@ForeignKeyColumn(() => AssetTable, { onDelete: 'CASCADE', primary: true })
assetId!: string;
@Column({ type: 'integer', array: true })
pts!: number[];
@Column({ type: 'integer', array: true })
accDuration!: number[];
@Column({ type: 'integer', array: true })
ownDuration!: number[];
@Column({ type: 'integer' })
totalDuration!: number;
@Column({ type: 'integer' })
packetCount!: number;
@Column({ type: 'integer' })
outputFrames!: number;
}

View File

@@ -351,10 +351,10 @@ export class AssetMediaService extends BaseService {
await this.storageRepository.utimes(sidecarFile.originalPath, new Date(), new Date(dto.fileModifiedAt));
}
await this.storageRepository.utimes(file.originalPath, new Date(), new Date(dto.fileModifiedAt));
await this.assetRepository.upsertExif(
{ assetId: asset.id, fileSizeInByte: file.size },
{ lockedPropertiesBehavior: 'override' },
);
await this.assetRepository.upsertExif({
exif: { assetId: asset.id, fileSizeInByte: file.size },
lockedPropertiesBehavior: 'override',
});
await this.eventRepository.emit('AssetCreate', { asset });

View File

@@ -187,8 +187,10 @@ describe(AssetService.name, () => {
await sut.update(authStub.admin, asset.id, { description: 'Test description' });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: asset.id, description: 'Test description', lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: asset.id, description: 'Test description', lockedProperties: ['description'] },
lockedPropertiesBehavior: 'append',
}),
);
});
@@ -201,12 +203,14 @@ describe(AssetService.name, () => {
await sut.update(authStub.admin, asset.id, { rating: 3 });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{
assetId: asset.id,
rating: 3,
lockedProperties: ['rating'],
},
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: {
assetId: asset.id,
rating: 3,
lockedProperties: ['rating'],
},
lockedPropertiesBehavior: 'append',
}),
);
});

View File

@@ -517,13 +517,13 @@ export class AssetService extends BaseService {
);
if (Object.keys(writes).length > 0) {
await this.assetRepository.upsertExif(
updateLockedColumns({
await this.assetRepository.upsertExif({
exif: updateLockedColumns({
assetId: id,
...writes,
}),
{ lockedPropertiesBehavior: 'append' },
);
lockedPropertiesBehavior: 'append',
});
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id } });
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -15,7 +15,6 @@ import {
ImageFormat,
JobName,
JobStatus,
LogLevel,
QueueName,
RawExtractedFormat,
StorageFolder,
@@ -506,10 +505,7 @@ export class MediaService extends BaseService {
};
}
private async generateVideoThumbnails(
asset: ThumbnailPathEntity & { originalPath: string },
{ ffmpeg, image }: SystemConfig,
) {
private async generateVideoThumbnails(asset: ThumbnailAsset, { ffmpeg, image }: SystemConfig) {
const previewFile = this.getImageFile(asset, {
fileType: AssetFileType.Preview,
format: image.preview.format,
@@ -526,22 +522,15 @@ export class MediaService extends BaseService {
});
this.storageCore.ensureFolders(previewFile.path);
const { format, audioStreams, videoStreams } = await this.mediaRepository.probe(asset.originalPath);
const mainVideoStream = this.getMainStream(videoStreams);
if (!mainVideoStream) {
throw new Error(`No video streams found for asset ${asset.id}`);
const { videoStream, format } = asset;
if (!videoStream || !format) {
throw new Error(`Missing video metadata for asset ${asset.id}`);
}
const mainAudioStream = this.getMainStream(audioStreams);
const previewConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.preview.size.toString() });
const thumbnailConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, mainVideoStream, mainAudioStream, format);
const thumbnailOptions = thumbnailConfig.getCommand(
TranscodeTarget.Video,
mainVideoStream,
mainAudioStream,
format,
);
const thumbConfig = ThumbnailConfig.create({ ...ffmpeg, targetResolution: image.thumbnail.size.toString() });
const previewOptions = previewConfig.getCommand(TranscodeTarget.Video, videoStream, undefined, format ?? undefined);
const thumbnailOptions = thumbConfig.getCommand(TranscodeTarget.Video, videoStream, undefined, format ?? undefined);
await this.mediaRepository.transcode(asset.originalPath, previewFile.path, previewOptions);
await this.mediaRepository.transcode(asset.originalPath, thumbnailFile.path, thumbnailOptions);
@@ -554,7 +543,7 @@ export class MediaService extends BaseService {
return {
files: [previewFile, thumbnailFile],
thumbhash,
fullsizeDimensions: { width: mainVideoStream.width, height: mainVideoStream.height },
fullsizeDimensions: { width: videoStream.width, height: videoStream.height },
};
}
@@ -588,17 +577,14 @@ export class MediaService extends BaseService {
const output = StorageCore.getEncodedVideoPath(asset);
this.storageCore.ensureFolders(output);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
countFrames: this.logger.isLevelEnabled(LogLevel.Debug), // makes frame count more reliable for progress logs
});
const videoStream = this.getMainStream(videoStreams);
const audioStream = this.getMainStream(audioStreams);
if (!videoStream || !format.formatName) {
const { videoStream, format } = asset;
const audioStream = asset.audioStream ?? undefined;
if (!videoStream || !format) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: missing metadata; re-run extraction first`);
return JobStatus.Failed;
}
if (!videoStream.height || !videoStream.width) {
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video dimensions`);
return JobStatus.Failed;
}
@@ -667,12 +653,6 @@ export class MediaService extends BaseService {
return JobStatus.Success;
}
private getMainStream<T extends VideoStreamInfo | AudioStreamInfo>(streams: T[]): T {
return streams
.filter((stream) => stream.codecName !== 'unknown')
.toSorted((stream1, stream2) => stream2.bitrate - stream1.bitrate)[0];
}
private getTranscodeTarget(
config: SystemConfigFFmpegDto,
videoStream: VideoStreamInfo,

View File

@@ -18,7 +18,7 @@ import { ImmichTags } from 'src/repositories/metadata.repository';
import { firstDateTime, MetadataService } from 'src/services/metadata.service';
import { AssetFactory } from 'test/factories/asset.factory';
import { PersonFactory } from 'test/factories/person.factory';
import { probeStub } from 'test/fixtures/media.stub';
import { videoInfoStub } from 'test/fixtures/media.stub';
import { tagStub } from 'test/fixtures/tag.stub';
import { getForMetadataExtraction, getForSidecarWrite } from 'test/mappers';
import { factory } from 'test/small.factory';
@@ -59,6 +59,15 @@ const makeFaceTags = (face: Partial<{ Name: string }> = {}, orientation?: Immich
},
});
const emptyPackets = {
totalDuration: 0,
packetCount: 0,
outputFrames: 0,
keyframePts: [],
keyframeAccDuration: [],
keyframeOwnDuration: [],
};
describe(MetadataService.name, () => {
let sut: MetadataService;
let mocks: ServiceMocks;
@@ -183,9 +192,12 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ dateTimeOriginal: sidecarDate }), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ dateTimeOriginal: sidecarDate }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
id: asset.id,
@@ -212,8 +224,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileModifiedAt }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ dateTimeOriginal: fileModifiedAt }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@@ -242,8 +256,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ dateTimeOriginal: fileCreatedAt }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ dateTimeOriginal: fileCreatedAt }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@@ -265,9 +281,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
exif: expect.objectContaining({
dateTimeOriginal: new Date('2022-01-01T00:00:00.000Z'),
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith(
@@ -290,9 +308,12 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining({ iso: 160 }), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ iso: 160 }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
duration: null,
@@ -323,8 +344,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: null, state: null, country: null }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ city: null, state: null, country: null }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@@ -353,8 +376,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ city: 'City', state: 'State', country: 'Country' }),
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith({
id: asset.id,
@@ -378,8 +403,10 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ latitude: null, longitude: null }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ latitude: null, longitude: null }),
lockedPropertiesBehavior: 'skip',
}),
);
});
@@ -585,7 +612,7 @@ describe(MetadataService.name, () => {
it('should not apply motion photos if asset is video', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(probeStub.matroskaContainer);
mocks.media.probe.mockResolvedValue(videoInfoStub.matroskaContainer);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
@@ -611,15 +638,144 @@ describe(MetadataService.name, () => {
it('should extract the correct video orientation', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamVertical2160p);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }),
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: expect.objectContaining({ orientation: ExifOrientation.Rotate270CW.toString() }),
lockedPropertiesBehavior: 'skip',
}),
);
});
it('should persist CICP smallints and profile/level for HDR10 video', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ fps: 59.94 }),
video: expect.objectContaining({
codecName: 'hevc',
profile: 2,
level: 153,
pixelFormat: 'yuv420p10le',
colorPrimaries: 9,
colorTransfer: 16,
colorMatrix: 9,
dvProfile: undefined,
}),
}),
);
});
it('should persist Dolby Vision fields', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamDolbyVision);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
video: expect.objectContaining({
dvProfile: 8,
dvLevel: 10,
dvBlSignalCompatibilityId: 4,
colorTransfer: 18, // ARIB_STD_B67
}),
}),
);
});
it('should persist packet-derived HLS fields', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue({
totalDuration: 12_080,
packetCount: 1148,
outputFrames: 1149,
keyframePts: [-590, 10, 611, 1211],
keyframeAccDuration: [10, 610, 6110, 12_080],
keyframeOwnDuration: [10, 10, 10, 10],
});
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
video: expect.objectContaining({ timeBase: 600 }),
keyframes: expect.objectContaining({
totalDuration: 12_080,
packetCount: 1148,
outputFrames: 1149,
pts: [-590, 10, 611, 1211],
accDuration: [10, 610, 6110, 12_080],
ownDuration: [10, 10, 10, 10],
}),
}),
);
});
it('should omit the keyframe row when the probe returns no keyframes', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.not.objectContaining({ keyframes: expect.anything() }),
);
});
it('should prefer ffprobe frameRate over exiftool VideoFrameRate', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamHDR10);
mocks.media.probePackets.mockResolvedValue(emptyPackets);
mockReadTags({ VideoFrameRate: '30' });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining({ fps: 59.94 }),
lockedPropertiesBehavior: 'skip',
}),
);
});
it('should not insert audio/video/keyframe rows for image assets', async () => {
const asset = AssetFactory.create({ type: AssetType.Image });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mockReadTags({});
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.media.probe).not.toHaveBeenCalled();
expect(mocks.media.probePackets).not.toHaveBeenCalled();
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.not.objectContaining({
audio: expect.anything(),
video: expect.anything(),
keyframes: expect.anything(),
}),
);
});
@@ -909,39 +1065,41 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{
assetId: asset.id,
bitsPerSample: expect.any(Number),
autoStackId: null,
colorspace: tags.ColorSpace,
dateTimeOriginal: dateForTest,
description: tags.ImageDescription,
exifImageHeight: null,
exifImageWidth: null,
exposureTime: tags.ExposureTime,
fNumber: null,
fileSizeInByte: 123_456,
focalLength: tags.FocalLength,
fps: null,
iso: tags.ISO,
latitude: null,
lensModel: tags.LensModel,
livePhotoCID: tags.MediaGroupUUID,
longitude: null,
make: tags.Make,
model: tags.Model,
modifyDate: expect.any(Date),
orientation: tags.Orientation?.toString(),
profileDescription: tags.ProfileDescription,
projectionType: 'EQUIRECTANGULAR',
timeZone: tags.zone,
rating: tags.Rating,
country: null,
state: null,
city: null,
tags: ['parent/child'],
},
{ lockedPropertiesBehavior: 'skip' },
expect.objectContaining({
exif: {
assetId: asset.id,
bitsPerSample: expect.any(Number),
autoStackId: null,
colorspace: tags.ColorSpace,
dateTimeOriginal: dateForTest,
description: tags.ImageDescription,
exifImageHeight: null,
exifImageWidth: null,
exposureTime: tags.ExposureTime,
fNumber: null,
fileSizeInByte: 123_456,
focalLength: tags.FocalLength,
fps: null,
iso: tags.ISO,
latitude: null,
lensModel: tags.LensModel,
livePhotoCID: tags.MediaGroupUUID,
longitude: null,
make: tags.Make,
model: tags.Model,
modifyDate: expect.any(Date),
orientation: tags.Orientation?.toString(),
profileDescription: tags.ProfileDescription,
projectionType: 'EQUIRECTANGULAR',
timeZone: tags.zone,
rating: tags.Rating,
country: null,
state: null,
city: null,
tags: ['parent/child'],
},
lockedPropertiesBehavior: 'skip',
}),
);
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
@@ -975,9 +1133,11 @@ describe(MetadataService.name, () => {
expect(mocks.assetJob.getForMetadataExtraction).toHaveBeenCalledWith(asset.id);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
timeZone: 'UTC+0',
exif: expect.objectContaining({
timeZone: 'UTC+0',
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -985,9 +1145,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 6.21,
},
});
@@ -1008,9 +1168,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create();
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 6.21,
},
});
@@ -1030,9 +1190,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 0,
},
});
@@ -1053,9 +1213,9 @@ describe(MetadataService.name, () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 604_800,
},
});
@@ -1111,9 +1271,9 @@ describe(MetadataService.name, () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mockReadTags({ Duration: 123 }, {});
mocks.media.probe.mockResolvedValue({
...probeStub.videoStreamH264,
...videoInfoStub.videoStreamH264,
format: {
...probeStub.videoStreamH264.format,
...videoInfoStub.videoStreamH264.format,
duration: 456,
},
});
@@ -1132,18 +1292,22 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
description: '',
exif: expect.objectContaining({
description: '',
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
mockReadTags({ ImageDescription: ' my\n description' });
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
description: 'my\n description',
exif: expect.objectContaining({
description: 'my\n description',
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -1155,9 +1319,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
description: '1000',
exif: expect.objectContaining({
description: '1000',
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -1388,9 +1554,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
modifyDate: expect.any(Date),
exif: expect.objectContaining({
modifyDate: expect.any(Date),
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -1402,9 +1570,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
rating: null,
exif: expect.objectContaining({
rating: null,
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -1416,9 +1586,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
rating: 5,
exif: expect.objectContaining({
rating: 5,
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -1430,9 +1602,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
rating: null,
exif: expect.objectContaining({
rating: null,
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -1444,9 +1618,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
rating: -1,
exif: expect.objectContaining({
rating: -1,
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});
@@ -1466,7 +1642,7 @@ describe(MetadataService.name, () => {
it('should handle not finding a match', async () => {
const asset = AssetFactory.create({ type: AssetType.Video });
mocks.media.probe.mockResolvedValue(probeStub.videoStreamVertical2160p);
mocks.media.probe.mockResolvedValue(videoInfoStub.videoStreamVertical2160p);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(getForMetadataExtraction(asset));
mockReadTags({ ContentIdentifier: 'CID' });
@@ -1578,9 +1754,12 @@ describe(MetadataService.name, () => {
mockReadTags(exif);
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(expect.objectContaining(expected), {
lockedPropertiesBehavior: 'skip',
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
exif: expect.objectContaining(expected),
lockedPropertiesBehavior: 'skip',
}),
);
});
it.each([
@@ -1605,9 +1784,11 @@ describe(MetadataService.name, () => {
await sut.handleMetadataExtraction({ id: asset.id });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
lensModel: expected,
exif: expect.objectContaining({
lensModel: expected,
}),
lockedPropertiesBehavior: 'skip',
}),
{ lockedPropertiesBehavior: 'skip' },
);
});

View File

@@ -243,10 +243,11 @@ export class MetadataService extends BaseService {
return;
}
const [exifTags, stats] = await Promise.all([
const [exifResult, stats] = await Promise.all([
this.getExifTags(asset),
this.storageRepository.stat(asset.originalPath),
]);
const { tags: exifTags, audio, video, packets, format } = exifResult;
this.logger.verbose('Exif Tags', exifTags);
const dates = this.getDates(asset, exifTags, stats);
@@ -294,7 +295,7 @@ export class MetadataService extends BaseService {
exifTags.Make ?? exifTags.Device?.Manufacturer ?? exifTags.AndroidMake ?? (exifTags.DeviceManufacturer || null),
model:
exifTags.Model ?? exifTags.Device?.ModelName ?? exifTags.AndroidModel ?? (exifTags.DeviceModelName || null),
fps: validate(Number.parseFloat(exifTags.VideoFrameRate!)),
fps: video?.frameRate ?? validate(Number.parseFloat(exifTags.VideoFrameRate!)),
iso: validate(exifTags.ISO) as number,
exposureTime: exifTags.ExposureTime ?? null,
lensModel: getLensModel(exifTags),
@@ -313,6 +314,53 @@ export class MetadataService extends BaseService {
tags: tags.length > 0 ? tags : null,
};
const audioData =
format && audio?.codecName
? {
assetId: asset.id,
bitrate: audio.bitrate,
index: audio.index,
profile: audio.profile,
codecName: audio.codecName,
}
: undefined;
const videoData =
format?.formatName && format?.formatLongName && video?.codecName && video?.timeBase
? {
assetId: asset.id,
bitrate: video.bitrate,
frameCount: video.frameCount,
timeBase: video.timeBase,
index: video.index,
profile: video.profile,
level: video.level,
colorPrimaries: video.colorPrimaries,
colorTransfer: video.colorTransfer,
colorMatrix: video.colorMatrix,
dvProfile: video.dvProfile,
dvLevel: video.dvLevel,
dvBlSignalCompatibilityId: video.dvBlSignalCompatibilityId,
codecName: video.codecName,
formatName: format.formatName,
formatLongName: format.formatLongName,
pixelFormat: video.pixelFormat,
}
: undefined;
const keyframeData =
packets && packets.keyframePts.length > 0
? {
assetId: asset.id,
totalDuration: packets.totalDuration,
packetCount: packets.packetCount,
outputFrames: packets.outputFrames,
pts: packets.keyframePts,
accDuration: packets.keyframeAccDuration,
ownDuration: packets.keyframeOwnDuration,
}
: undefined;
const isSidewards = exifTags.Orientation && this.isOrientationSidewards(exifTags.Orientation);
const assetWidth = isSidewards ? validate(height) : validate(width);
const assetHeight = isSidewards ? validate(width) : validate(height);
@@ -333,7 +381,13 @@ export class MetadataService extends BaseService {
height: !asset.isEdited || asset.height == null ? assetHeight : undefined,
}),
async () => {
await this.assetRepository.upsertExif(exifData, { lockedPropertiesBehavior: 'skip' });
await this.assetRepository.upsertExif({
exif: exifData,
audio: audioData,
video: videoData,
keyframes: keyframeData,
lockedPropertiesBehavior: 'skip',
});
await this.applyTagList(asset);
},
);
@@ -523,13 +577,14 @@ export class MetadataService extends BaseService {
return { width, height };
}
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }): Promise<ImmichTags> {
private async getExifTags(asset: { originalPath: string; files: AssetFile[]; type: AssetType }) {
const { sidecarFile } = getAssetFiles(asset.files);
const shouldProbe = asset.type === AssetType.Video || asset.originalPath.toLowerCase().endsWith('.gif');
const [mediaTags, sidecarTags, videoTags] = await Promise.all([
const [mediaTags, sidecarTags, videoResult] = await Promise.all([
this.metadataRepository.readTags(asset.originalPath),
sidecarFile ? this.metadataRepository.readTags(sidecarFile.path) : null,
asset.type === AssetType.Video ? this.getVideoTags(asset.originalPath) : null,
shouldProbe ? this.getVideoTags(asset.originalPath) : null,
]);
// prefer dates from sidecar tags
@@ -554,14 +609,20 @@ export class MetadataService extends BaseService {
// prefer duration from video tags
// don't save duration if asset is definitely not an animated image (see e.g. CR3 with Duration: 1s)
if (videoTags || !mimeTypes.isPossiblyAnimatedImage(asset.originalPath)) {
if (videoResult || !mimeTypes.isPossiblyAnimatedImage(asset.originalPath)) {
delete mediaTags.Duration;
}
// never use duration from sidecar
delete sidecarTags?.Duration;
return { ...mediaTags, ...videoTags, ...sidecarTags };
return {
tags: { ...mediaTags, ...videoResult?.tags, ...sidecarTags },
audio: videoResult?.audio,
video: videoResult?.video,
packets: videoResult?.packets,
format: videoResult?.format ?? null,
};
}
private getTagList(exifTags: ImmichTags): string[] {
@@ -1016,20 +1077,22 @@ export class MetadataService extends BaseService {
}
private async getVideoTags(originalPath: string) {
const { videoStreams, format } = await this.mediaRepository.probe(originalPath);
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(originalPath);
const video = videoStreams[0];
const audio = audioStreams[0];
const packets = video?.timeBase ? await this.mediaRepository.probePackets(originalPath, video.index) : null;
const tags: Pick<ImmichTags, 'Duration' | 'Orientation' | 'ImageWidth' | 'ImageHeight'> = {};
if (videoStreams[0]) {
// Set video dimensions
if (videoStreams[0].width) {
tags.ImageWidth = videoStreams[0].width;
if (video) {
if (video.width) {
tags.ImageWidth = video.width;
}
if (videoStreams[0].height) {
tags.ImageHeight = videoStreams[0].height;
if (video.height) {
tags.ImageHeight = video.height;
}
switch (videoStreams[0].rotation) {
switch (video.rotation) {
case -90: {
tags.Orientation = ExifOrientation.Rotate90CW;
break;
@@ -1053,6 +1116,6 @@ export class MetadataService extends BaseService {
tags.Duration = format.duration;
}
return tags;
return { tags, audio, video, packets, format };
}
}

View File

@@ -206,16 +206,22 @@ describe(TagService.name, () => {
count: 6,
});
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-3', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-3', lockedProperties: ['tags'], tags: ['tag-1', 'tag-2'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.tag.upsertAssetIds).toHaveBeenCalledWith([
{ tagId: 'tag-1', assetId: 'asset-1' },
@@ -255,12 +261,16 @@ describe(TagService.name, () => {
]);
expect(mocks.asset.upsertExif).not.toHaveBeenCalledWith(
{ assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-1', lockedProperties: ['tags'], tags: ['tag-1'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
{ assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1'] },
{ lockedPropertiesBehavior: 'append' },
expect.objectContaining({
exif: { assetId: 'asset-2', lockedProperties: ['tags'], tags: ['tag-1'] },
lockedPropertiesBehavior: 'append',
}),
);
expect(mocks.tag.getAssetIds).toHaveBeenCalledWith('tag-1', ['asset-1', 'asset-2']);
expect(mocks.tag.addAssetIds).toHaveBeenCalledWith('tag-1', ['asset-2']);

View File

@@ -152,7 +152,8 @@ export class TagService extends BaseService {
private async updateTags(assetId: string) {
const { tags } = await this.assetRepository.getForUpdateTags(assetId);
await this.assetRepository.upsertExif(updateLockedColumns({ assetId, tags: tags.map(({ value }) => value) }), {
await this.assetRepository.upsertExif({
exif: updateLockedColumns({ assetId, tags: tags.map(({ value }) => value) }),
lockedPropertiesBehavior: 'append',
});
}

View File

@@ -7,9 +7,18 @@ import { AuthDto } from 'src/dtos/auth.dto';
import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { SetMaintenanceModeDto } from 'src/dtos/maintenance.dto';
import {
AacProfile,
AssetOrder,
AssetType,
Av1Profile,
ColorMatrix,
ColorPrimaries,
ColorTransfer,
DvProfile,
DvSignalCompatibility,
ExifOrientation,
H264Profile,
HevcProfile,
ImageFormat,
JobName,
MemoryType,
@@ -81,21 +90,44 @@ export interface VideoStreamInfo {
width: number;
rotation: number;
codecName?: string;
profile?: H264Profile | HevcProfile | Av1Profile;
level?: number;
frameCount: number;
isHDR: boolean;
frameRate?: number;
timeBase?: number;
bitrate: number;
pixelFormat: string;
colorPrimaries?: string;
colorSpace?: string;
colorTransfer?: string;
colorPrimaries: ColorPrimaries;
colorMatrix: ColorMatrix;
colorTransfer: ColorTransfer;
dvProfile?: DvProfile;
dvLevel?: number;
dvBlSignalCompatibilityId?: DvSignalCompatibility;
}
export interface AudioStreamInfo {
index: number;
codecName?: string;
profile?: AacProfile;
bitrate: number;
}
/** Packet-derived video data needed for accurate HLS playlists. */
export interface VideoPacketInfo {
/** Sum of source packet duration across all packets (includes discard). */
totalDuration: number;
/** Post-discard packet count. */
packetCount: number;
/** Output CFR frame count at `packetCount / format.duration`. */
outputFrames: number;
/** All keyframe PTS in source ticks, including pre-roll discard keyframes. */
keyframePts: number[];
/** Cumulative packet duration through each keyframe, inclusive. */
keyframeAccDuration: number[];
/** Each keyframe's own packet duration (needed for VFR). */
keyframeOwnDuration: number[];
}
export interface VideoFormat {
formatName?: string;
formatLongName?: string;
@@ -144,7 +176,7 @@ export interface VideoCodecSWConfig {
getCommand(
target: TranscodeTarget,
videoStream: VideoStreamInfo,
audioStream: AudioStreamInfo,
audioStream?: AudioStreamInfo,
format?: VideoFormat,
): TranscodeCommand;
}

View File

@@ -17,11 +17,11 @@ import { jsonArrayFrom, jsonObjectFrom } from 'kysely/helpers/postgres';
import { Notice, PostgresError } from 'postgres';
import { columns, lockableProperties, LockableProperty, Person } from 'src/database';
import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { AssetFileType, AssetVisibility, DatabaseExtension } from 'src/enum';
import { AssetFileType, AssetVisibility, DatabaseExtension, ExifOrientation } from 'src/enum';
import { AssetSearchBuilderOptions } from 'src/repositories/search.repository';
import { DB } from 'src/schema';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { VectorExtension } from 'src/types';
import { AudioStreamInfo, VectorExtension, VideoFormat, VideoStreamInfo } from 'src/types';
export const getKyselyConfig = (connection: DatabaseConnectionParams): KyselyConfig => {
return {
@@ -99,6 +99,79 @@ export function withExifInner<O>(qb: SelectQueryBuilder<DB, 'asset', O>) {
.$narrowType<{ exifInfo: NotNull }>();
}
export const dummy = sql`(select 1)`.as('dummy');
export function withAudioStream(eb: ExpressionBuilder<DB, 'asset_exif' | 'asset_audio'>) {
return jsonObjectFrom(
eb
.selectFrom(dummy)
.select(['asset_audio.index', 'asset_audio.codecName', 'asset_audio.profile', 'asset_audio.bitrate'])
.where('asset_audio.assetId', 'is not', sql.lit(null))
.$castTo<AudioStreamInfo | null>(),
);
}
export function withVideoStream(eb: ExpressionBuilder<DB, 'asset_exif' | 'asset_video'>) {
return jsonObjectFrom(
eb
.selectFrom(dummy)
.select((eb) => [
'asset_video.index',
'asset_video.codecName',
'asset_video.profile',
'asset_video.level',
'asset_video.bitrate',
'asset_exif.exifImageWidth as width',
'asset_exif.exifImageHeight as height',
'asset_video.pixelFormat',
'asset_video.frameCount',
'asset_exif.fps as frameRate',
'asset_video.timeBase',
eb
.case()
.when('asset_exif.orientation', '=', sql.lit(ExifOrientation.Rotate90CW.toString()))
.then(sql.lit(-90))
.when('asset_exif.orientation', '=', sql.lit(ExifOrientation.Rotate270CW.toString()))
.then(sql.lit(90))
.when('asset_exif.orientation', '=', sql.lit(ExifOrientation.Rotate180.toString()))
.then(sql.lit(180))
.else(0)
.end()
.as('rotation'),
'asset_video.colorPrimaries',
'asset_video.colorMatrix',
'asset_video.colorTransfer',
'asset_video.dvProfile',
'asset_video.dvLevel',
'asset_video.dvBlSignalCompatibilityId',
])
.where('asset_video.assetId', 'is not', sql.lit(null)),
).$castTo<(VideoStreamInfo & { timeBase: NotNull }) | null>();
}
export function withVideoFormat(eb: ExpressionBuilder<DB, 'asset' | 'asset_video'>) {
return jsonObjectFrom(
eb
.selectFrom(dummy)
.select([
'asset_video.formatName',
'asset_video.formatLongName',
// TODO: simplify after https://github.com/immich-app/immich/pull/28003
eb
.case()
.when('asset.duration', '~', sql<string>`'^\\d{2}:\\d{2}:\\d{2}\\.\\d{3}$'`)
.then(
sql<number>`substr(asset.duration, 1, 2)::int * 3600000 + substr(asset.duration, 4, 2)::int * 60000 + substr(asset.duration, 7, 2)::int * 1000 + substr(asset.duration, 10, 3)::int`,
)
.else(sql.lit(0))
.end()
.as('duration'),
'asset_video.bitrate',
])
.where('asset_video.assetId', 'is not', sql.lit(null)),
).$castTo<VideoFormat | null>();
}
export function withSmartSearch<O>(qb: SelectQueryBuilder<DB, 'asset', O>) {
return qb
.leftJoin('smart_search', 'asset.id', 'smart_search.assetId')
@@ -455,5 +528,3 @@ export const updateLockedColumns = <T extends Record<string, unknown> & { locked
exif.lockedProperties = lockableProperties.filter((property) => property in exif);
return exif;
};
export const dummy = sql`(select 1)`.as('dummy');

View File

@@ -1,6 +1,15 @@
import { AUDIO_ENCODER } from 'src/constants';
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
import { CQMode, ToneMapping, TranscodeHardwareAcceleration, TranscodeTarget, VideoCodec } from 'src/enum';
import {
ColorMatrix,
ColorPrimaries,
ColorTransfer,
CQMode,
ToneMapping,
TranscodeHardwareAcceleration,
TranscodeTarget,
VideoCodec,
} from 'src/enum';
import {
AudioStreamInfo,
BitrateDistribution,
@@ -255,7 +264,10 @@ export class BaseConfig implements VideoCodecSWConfig {
}
shouldToneMap(videoStream: VideoStreamInfo) {
return videoStream.isHDR && this.config.tonemap !== ToneMapping.Disabled;
return (
this.config.tonemap !== ToneMapping.Disabled &&
(videoStream.colorTransfer === ColorTransfer.Smpte2084 || videoStream.colorTransfer === ColorTransfer.AribStdB67)
);
}
getScaling(videoStream: VideoStreamInfo, mult = 2) {
@@ -409,15 +421,15 @@ export class ThumbnailConfig extends BaseConfig {
: ['-skip_frame', 'nointra', '-sws_flags', 'accurate_rnd+full_chroma_int'];
const metadataOverrides = [];
if (videoStream.colorPrimaries === 'reserved') {
if (videoStream.colorPrimaries === ColorPrimaries.Reserved) {
metadataOverrides.push('colour_primaries=1');
}
if (videoStream.colorSpace === 'reserved') {
if (videoStream.colorMatrix === ColorMatrix.Reserved) {
metadataOverrides.push('matrix_coefficients=1');
}
if (videoStream.colorTransfer === 'reserved') {
if (videoStream.colorTransfer === ColorTransfer.Reserved) {
metadataOverrides.push('transfer_characteristics=1');
}

View File

@@ -1,3 +1,5 @@
import { NotNull } from 'kysely';
import { ColorMatrix, ColorPrimaries, ColorTransfer, DvProfile, DvSignalCompatibility } from 'src/enum';
import { AudioStreamInfo, VideoFormat, VideoInfo, VideoStreamInfo } from 'src/types';
const probeStubDefaultFormat: VideoFormat = {
@@ -15,9 +17,12 @@ const probeStubDefaultVideoStream: VideoStreamInfo[] = [
codecName: 'hevc',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
];
@@ -29,23 +34,13 @@ const probeStubDefault: VideoInfo = {
audioStreams: probeStubDefaultAudioStream,
};
export const probeStub = {
/** Fixtures in the shape `mediaRepository.probe()` returns (arrays of streams, raw ffprobe format). */
export const videoInfoStub = {
noVideoStreams: Object.freeze<VideoInfo>({ ...probeStubDefault, videoStreams: [] }),
noAudioStreams: Object.freeze<VideoInfo>({ ...probeStubDefault, audioStreams: [] }),
multipleVideoStreams: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 1080,
width: 400,
codecName: 'hevc',
frameCount: 1,
rotation: 0,
isHDR: false,
bitrate: 100,
pixelFormat: 'yuv420p',
},
{
index: 1,
height: 1080,
@@ -53,9 +48,26 @@ export const probeStub = {
codecName: 'hevc',
frameCount: 2,
rotation: 0,
isHDR: false,
bitrate: 101,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
{
index: 0,
height: 1080,
width: 400,
codecName: 'hevc',
frameCount: 1,
rotation: 0,
bitrate: 100,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
{
index: 2,
@@ -64,18 +76,21 @@ export const probeStub = {
codecName: 'h7000',
frameCount: 3,
rotation: 0,
isHDR: false,
bitrate: 99,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
multipleAudioStreams: Object.freeze<VideoInfo>({
...probeStubDefault,
audioStreams: [
{ index: 0, codecName: 'mp3', bitrate: 100 },
{ index: 1, codecName: 'mp3', bitrate: 101 },
{ index: 2, codecName: 'mp3', bitrate: 102 },
{ index: 1, codecName: 'mp3', bitrate: 101 },
{ index: 0, codecName: 'mp3', bitrate: 100 },
],
}),
noHeight: Object.freeze<VideoInfo>({
@@ -88,9 +103,12 @@ export const probeStub = {
codecName: 'hevc',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@@ -104,9 +122,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@@ -117,8 +138,10 @@ export const probeStub = {
videoStreamMTS: Object.freeze<VideoInfo>({
...probeStubDefault,
format: {
...probeStubDefaultFormat,
formatName: 'mpegts',
formatLongName: 'MPEG-TS (MPEG-2 Transport Stream)',
duration: 0,
bitrate: 0,
},
}),
videoStreamHDR: Object.freeze<VideoInfo>({
@@ -131,9 +154,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: true,
colorPrimaries: ColorPrimaries.Bt2020,
colorMatrix: ColorMatrix.Bt2020Nc,
colorTransfer: ColorTransfer.Smpte2084,
bitrate: 0,
pixelFormat: 'yuv420p10le',
timeBase: 600,
},
],
}),
@@ -147,9 +173,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p10le',
timeBase: 600,
},
],
}),
@@ -163,9 +192,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p10le',
timeBase: 600,
},
],
}),
@@ -179,9 +211,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 90,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@@ -195,9 +230,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@@ -211,9 +249,12 @@ export const probeStub = {
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
pixelFormat: 'yuv420p',
timeBase: 600,
},
],
}),
@@ -274,10 +315,95 @@ export const probeStub = {
videoStreams: [
{
...probeStubDefaultVideoStream[0],
colorPrimaries: 'reserved',
colorSpace: 'reserved',
colorTransfer: 'reserved',
colorPrimaries: ColorPrimaries.Reserved,
colorMatrix: ColorMatrix.Reserved,
colorTransfer: ColorTransfer.Reserved,
},
],
}),
videoStreamHDR10: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 2160,
width: 3840,
codecName: 'hevc',
profile: 2,
level: 153,
frameCount: 1208,
frameRate: 59.94,
rotation: 0,
bitrate: 64_000_000,
pixelFormat: 'yuv420p10le',
colorPrimaries: ColorPrimaries.Bt2020,
colorMatrix: ColorMatrix.Bt2020Nc,
colorTransfer: ColorTransfer.Smpte2084,
timeBase: 600,
},
],
}),
videoStreamDolbyVision: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 2160,
width: 3840,
codecName: 'hevc',
profile: 2,
level: 153,
frameCount: 1299,
frameRate: 59.94,
rotation: 0,
bitrate: 53_500_000,
pixelFormat: 'yuv420p10le',
colorPrimaries: ColorPrimaries.Bt2020,
colorMatrix: ColorMatrix.Bt2020Nc,
colorTransfer: ColorTransfer.AribStdB67,
dvProfile: DvProfile.Dvhe08,
dvLevel: 10,
dvBlSignalCompatibilityId: DvSignalCompatibility.Hlg,
timeBase: 600,
},
],
}),
videoStreamWithProfileLevel: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
...probeStubDefaultVideoStream[0],
codecName: 'h264',
profile: 100,
level: 40,
},
],
}),
audioStreamAAC: Object.freeze<VideoInfo>({
...probeStubDefault,
audioStreams: [
{
index: 1,
codecName: 'aac',
profile: 2,
bitrate: 128_000,
},
],
}),
};
interface SelectedStreams {
videoStream: VideoStreamInfo & { timeBase: NotNull };
audioStream: AudioStreamInfo | null;
format: VideoFormat;
}
const toSelectedStreams = (info: VideoInfo) => ({
videoStream: info.videoStreams[0] ?? null,
audioStream: info.audioStreams[0] ?? null,
format: info.format,
});
export const probeStub = Object.fromEntries(
Object.entries(videoInfoStub).map(([key, info]) => [key, toSelectedStreams(info)]),
) as Record<keyof typeof videoInfoStub, SelectedStreams>;

View File

@@ -1,9 +1,10 @@
import { Selectable, ShallowDehydrateObject } from 'kysely';
import { NotNull, Selectable, ShallowDehydrateObject } from 'kysely';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AssetEditActionItem } from 'src/dtos/editing.dto';
import { ActivityTable } from 'src/schema/tables/activity.table';
import { AssetTable } from 'src/schema/tables/asset.table';
import { PartnerTable } from 'src/schema/tables/partner.table';
import { AudioStreamInfo, VideoFormat, VideoStreamInfo } from 'src/types';
import { AlbumFactory } from 'test/factories/album.factory';
import { AssetFaceFactory } from 'test/factories/asset-face.factory';
import { AssetFactory } from 'test/factories/asset.factory';
@@ -155,6 +156,9 @@ export const getForGenerateThumbnail = (asset: ReturnType<AssetFactory['build']>
files: asset.files.map((file) => getDehydrated(file)),
exifInfo: getDehydrated(asset.exifInfo),
edits: asset.edits.map(({ action, parameters }) => ({ action, parameters })) as AssetEditActionItem[],
videoStream: null as (VideoStreamInfo & { timeBase: NotNull }) | null,
audioStream: null as AudioStreamInfo | null,
format: null as VideoFormat | null,
});
export const getForAssetFace = (face: ReturnType<AssetFaceFactory['build']>) => ({

View File

@@ -35,6 +35,7 @@ import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { MapRepository } from 'src/repositories/map.repository';
import { MediaRepository } from 'src/repositories/media.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
@@ -218,7 +219,7 @@ export class MediumTestContext<S extends BaseService = BaseService> {
}
async newExif(dto: Insertable<AssetExifTable>) {
const result = await this.get(AssetRepository).upsertExif(dto, { lockedPropertiesBehavior: 'override' });
const result = await this.get(AssetRepository).upsertExif({ exif: dto, lockedPropertiesBehavior: 'override' });
return { result };
}
@@ -362,7 +363,14 @@ export class ExifTestContext extends MediumTestContext<MetadataService> {
constructor(database: Kysely<DB>) {
super(MetadataService, {
database,
real: [AssetRepository, AssetJobRepository, MetadataRepository, SystemMetadataRepository, TagRepository],
real: [
AssetRepository,
AssetJobRepository,
MediaRepository,
MetadataRepository,
SystemMetadataRepository,
TagRepository,
],
mock: [ConfigRepository, EventRepository, LoggingRepository, MapRepository, StorageRepository],
});
@@ -445,6 +453,7 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
return new key(LoggingRepository.create());
}
case MediaRepository:
case MetadataRepository: {
return new key(LoggingRepository.create());
}

View File

@@ -0,0 +1,142 @@
import { Kysely } from 'kysely';
import { resolve } from 'node:path';
import {
AacProfile,
AssetType,
ColorMatrix,
ColorPrimaries,
ColorTransfer,
DvProfile,
DvSignalCompatibility,
H264Profile,
HevcProfile,
} from 'src/enum';
import { DB } from 'src/schema';
import { ExifTestContext, testAssetsDir } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
beforeAll(async () => {
database = await getKyselyDB();
});
const fixtures = [
{
file: 'eiffel-tower.mp4',
video: {
codecName: 'h264',
formatName: 'mov,mp4,m4a,3gp,3g2,mj2',
formatLongName: 'QuickTime / MOV',
pixelFormat: 'yuv420p',
bitrate: 5_128_622,
frameCount: 557,
timeBase: 90_000,
index: 0,
profile: H264Profile.High,
level: 40,
colorPrimaries: ColorPrimaries.Smpte170M,
colorTransfer: ColorTransfer.Smpte170M,
colorMatrix: ColorMatrix.Smpte170M,
dvProfile: null,
dvLevel: null,
dvBlSignalCompatibilityId: null,
},
audio: { codecName: 'aac', bitrate: 125_629, index: 1, profile: AacProfile.Lc },
keyframes: {
totalDuration: 2_012_441,
packetCount: 557,
outputFrames: 557,
pts: [0, 462_502, 925_004, 1_210_454, 1_387_506, 1_542_878, 1_850_008],
accDuration: [3613, 466_077, 928_541, 1_213_968, 1_391_005, 1_546_364, 1_853_469],
ownDuration: [3613, 3613, 3613, 3613, 3613, 3613, 3613],
},
},
{
file: 'waterfall.mp4',
video: {
codecName: 'hevc',
formatName: 'mov,mp4,m4a,3gp,3g2,mj2',
formatLongName: 'QuickTime / MOV',
pixelFormat: 'yuvj420p',
bitrate: 43_363_499,
frameCount: 309,
timeBase: 90_000,
index: 2,
profile: HevcProfile.Main,
level: 156,
colorPrimaries: ColorPrimaries.Bt709,
colorTransfer: ColorTransfer.Bt709,
colorMatrix: ColorMatrix.Bt709,
dvProfile: null,
dvLevel: null,
dvBlSignalCompatibilityId: null,
},
audio: { codecName: 'aac', bitrate: 191_878, index: 1, profile: null },
keyframes: {
totalDuration: 932_286,
packetCount: 309,
outputFrames: 309,
pts: [0, 89_987, 179_974, 269_961, 359_948, 449_936, 539_923, 629_910, 725_166, 815_273, 905_295],
accDuration: [2999, 92_987, 182_974, 272_961, 362_948, 452_934, 542_922, 632_909, 728_175, 818_274, 908_296],
ownDuration: [2999, 3000, 3000, 3000, 3000, 2998, 2999, 2999, 3009, 3001, 3001],
},
},
{
file: 'train.mov',
video: {
codecName: 'hevc',
formatName: 'mov,mp4,m4a,3gp,3g2,mj2',
formatLongName: 'QuickTime / MOV',
pixelFormat: 'yuv420p10le',
bitrate: 12_595_191,
frameCount: 1229,
timeBase: 600,
index: 0,
profile: HevcProfile.Main10,
level: 123,
colorPrimaries: ColorPrimaries.Bt2020,
colorTransfer: ColorTransfer.AribStdB67,
colorMatrix: ColorMatrix.Bt2020Nc,
dvProfile: DvProfile.Dvhe08,
dvLevel: 5,
dvBlSignalCompatibilityId: DvSignalCompatibility.Hlg,
},
audio: { codecName: 'aac', bitrate: 175_477, index: 1, profile: AacProfile.Lc },
keyframes: {
totalDuration: 12_290,
packetCount: 1229,
outputFrames: 1303,
pts: [
0, 601, 1201, 1802, 2402, 3003, 3604, 4204, 4805, 5405, 6006, 6607, 7207, 7808, 8408, 9009, 9609, 10_210,
10_811, 11_411, 12_062, 12_703,
],
accDuration: [
10, 580, 1180, 1780, 2380, 2980, 3580, 4180, 4780, 5380, 5980, 6580, 7180, 7780, 8380, 8980, 9580, 10_180,
10_780, 11_380, 11_780, 12_100,
],
ownDuration: [10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10],
},
},
];
const isExpected = <T extends keyof DB>(name: T, id: string, expected: Omit<DB[T], 'assetId'>) => {
const { table, ref } = database.dynamic;
const res = database.selectFrom(table(name).as('t')).selectAll().where(ref('assetId'), '=', id).executeTakeFirst();
return expect(res).resolves.toEqual({ ...expected, assetId: id });
};
describe('video metadata extraction', () => {
it.each(fixtures)('$file', async ({ file, video, audio, keyframes }) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(testAssetsDir, 'videos', file);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath, type: AssetType.Video });
await ctx.sut.handleMetadataExtraction({ id: asset.id });
await isExpected('asset_audio', asset.id, audio);
await isExpected('asset_video', asset.id, video);
await isExpected('asset_keyframe', asset.id, keyframes);
});
});

View File

@@ -98,10 +98,10 @@ describe(AssetRepository.name, () => {
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal'] });
await sut.upsertExif(
{ assetId: asset.id, lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
);
await sut.upsertExif({
exif: { assetId: asset.id, lockedProperties: ['description'] },
lockedPropertiesBehavior: 'append',
});
await expect(
ctx.database
@@ -130,10 +130,10 @@ describe(AssetRepository.name, () => {
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
await sut.upsertExif(
{ assetId: asset.id, lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
);
await sut.upsertExif({
exif: { assetId: asset.id, lockedProperties: ['description'] },
lockedPropertiesBehavior: 'append',
});
await expect(
ctx.database

View File

@@ -289,13 +289,13 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
// update the asset
const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif(
updateLockedColumns({
await assetRepository.upsertExif({
exif: updateLockedColumns({
assetId: asset.id,
city: 'New City',
}),
{ lockedPropertiesBehavior: 'append' },
);
lockedPropertiesBehavior: 'append',
});
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{
@@ -350,13 +350,13 @@ describe(SyncRequestType.AlbumAssetExifsV1, () => {
// update the asset
const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif(
updateLockedColumns({
await assetRepository.upsertExif({
exif: updateLockedColumns({
assetId: assetDelayedExif.id,
city: 'Delayed Exif',
}),
{ lockedPropertiesBehavior: 'append' },
);
lockedPropertiesBehavior: 'append',
});
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{

View File

@@ -11,6 +11,14 @@ export const newMediaRepositoryMock = (): Mocked<RepositoryInterface<MediaReposi
decodeImage: vitest.fn().mockResolvedValue({ data: Buffer.from(''), info: {} }),
extract: vitest.fn().mockResolvedValue(null),
probe: vitest.fn(),
probePackets: vitest.fn().mockResolvedValue({
totalDuration: 0,
packetCount: 0,
outputFrames: 0,
keyframePts: [],
keyframeAccDuration: [],
keyframeOwnDuration: [],
}),
transcode: vitest.fn(),
getImageMetadata: vitest.fn(),
};