mirror of
https://github.com/immich-app/immich.git
synced 2026-01-06 18:13:46 -08:00
133 lines
5.0 KiB
TypeScript
133 lines
5.0 KiB
TypeScript
import { Injectable } from '@nestjs/common';
|
|
import { SystemConfig } from 'src/config';
|
|
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
|
import { OnEvent, OnJob } from 'src/decorators';
|
|
import { DatabaseLock, ImmichWorker, JobName, JobStatus, QueueName } from 'src/enum';
|
|
import { ArgOf } from 'src/repositories/event.repository';
|
|
import { BaseService } from 'src/services/base.service';
|
|
import { JobItem, JobOf } from 'src/types';
|
|
import { getCLIPModelInfo, isSmartSearchEnabled } from 'src/utils/misc';
|
|
|
|
@Injectable()
|
|
export class SmartInfoService extends BaseService {
|
|
@OnEvent({ name: 'config.init', workers: [ImmichWorker.MICROSERVICES] })
|
|
async onConfigInit({ newConfig }: ArgOf<'config.init'>) {
|
|
await this.init(newConfig);
|
|
}
|
|
|
|
@OnEvent({ name: 'config.update', workers: [ImmichWorker.MICROSERVICES], server: true })
|
|
async onConfigUpdate({ oldConfig, newConfig }: ArgOf<'config.update'>) {
|
|
await this.init(newConfig, oldConfig);
|
|
}
|
|
|
|
@OnEvent({ name: 'config.validate' })
|
|
onConfigValidate({ newConfig }: ArgOf<'config.validate'>) {
|
|
try {
|
|
getCLIPModelInfo(newConfig.machineLearning.clip.modelName);
|
|
} catch {
|
|
throw new Error(
|
|
`Unknown CLIP model: ${newConfig.machineLearning.clip.modelName}. Please check the model name for typos and confirm this is a supported model.`,
|
|
);
|
|
}
|
|
}
|
|
|
|
private async init(newConfig: SystemConfig, oldConfig?: SystemConfig) {
|
|
if (!isSmartSearchEnabled(newConfig.machineLearning)) {
|
|
return;
|
|
}
|
|
|
|
await this.databaseRepository.withLock(DatabaseLock.CLIPDimSize, async () => {
|
|
const { dimSize } = getCLIPModelInfo(newConfig.machineLearning.clip.modelName);
|
|
const dbDimSize = await this.searchRepository.getDimensionSize();
|
|
this.logger.verbose(`Current database CLIP dimension size is ${dbDimSize}`);
|
|
|
|
const modelChange =
|
|
oldConfig && oldConfig.machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName;
|
|
const dimSizeChange = dbDimSize !== dimSize;
|
|
if (!modelChange && !dimSizeChange) {
|
|
return;
|
|
}
|
|
|
|
if (dimSizeChange) {
|
|
this.logger.log(
|
|
`Dimension size of model ${newConfig.machineLearning.clip.modelName} is ${dimSize}, but database expects ${dbDimSize}.`,
|
|
);
|
|
this.logger.log(`Updating database CLIP dimension size to ${dimSize}.`);
|
|
await this.searchRepository.setDimensionSize(dimSize);
|
|
this.logger.log(`Successfully updated database CLIP dimension size from ${dbDimSize} to ${dimSize}.`);
|
|
} else {
|
|
await this.searchRepository.deleteAllSearchEmbeddings();
|
|
}
|
|
|
|
// TODO: A job to reindex all assets should be scheduled, though user
|
|
// confirmation should probably be requested before doing that.
|
|
});
|
|
}
|
|
|
|
@OnJob({ name: JobName.QUEUE_SMART_SEARCH, queue: QueueName.SMART_SEARCH })
|
|
async handleQueueEncodeClip({ force }: JobOf<JobName.QUEUE_SMART_SEARCH>): Promise<JobStatus> {
|
|
const { machineLearning } = await this.getConfig({ withCache: false });
|
|
if (!isSmartSearchEnabled(machineLearning)) {
|
|
return JobStatus.SKIPPED;
|
|
}
|
|
|
|
if (force) {
|
|
const { dimSize } = getCLIPModelInfo(machineLearning.clip.modelName);
|
|
// in addition to deleting embeddings, update the dimension size in case it failed earlier
|
|
await this.searchRepository.setDimensionSize(dimSize);
|
|
}
|
|
|
|
let queue: JobItem[] = [];
|
|
const assets = this.assetJobRepository.streamForEncodeClip(force);
|
|
for await (const asset of assets) {
|
|
queue.push({ name: JobName.SMART_SEARCH, data: { id: asset.id } });
|
|
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
|
await this.jobRepository.queueAll(queue);
|
|
queue = [];
|
|
}
|
|
}
|
|
|
|
await this.jobRepository.queueAll(queue);
|
|
|
|
return JobStatus.SUCCESS;
|
|
}
|
|
|
|
@OnJob({ name: JobName.SMART_SEARCH, queue: QueueName.SMART_SEARCH })
|
|
async handleEncodeClip({ id }: JobOf<JobName.SMART_SEARCH>): Promise<JobStatus> {
|
|
const { machineLearning } = await this.getConfig({ withCache: true });
|
|
if (!isSmartSearchEnabled(machineLearning)) {
|
|
return JobStatus.SKIPPED;
|
|
}
|
|
|
|
const asset = await this.assetJobRepository.getForClipEncoding(id);
|
|
if (!asset || asset.files.length !== 1) {
|
|
return JobStatus.FAILED;
|
|
}
|
|
|
|
if (!asset.isVisible) {
|
|
return JobStatus.SKIPPED;
|
|
}
|
|
|
|
const embedding = await this.machineLearningRepository.encodeImage(
|
|
machineLearning.urls,
|
|
asset.files[0].path,
|
|
machineLearning.clip,
|
|
);
|
|
|
|
if (this.databaseRepository.isBusy(DatabaseLock.CLIPDimSize)) {
|
|
this.logger.verbose(`Waiting for CLIP dimension size to be updated`);
|
|
await this.databaseRepository.wait(DatabaseLock.CLIPDimSize);
|
|
}
|
|
|
|
const newConfig = await this.getConfig({ withCache: true });
|
|
if (machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) {
|
|
// Skip the job if the the model has changed since the embedding was generated.
|
|
return JobStatus.SKIPPED;
|
|
}
|
|
|
|
await this.searchRepository.upsert(asset.id, embedding);
|
|
|
|
return JobStatus.SUCCESS;
|
|
}
|
|
}
|