mirror of
https://github.com/immich-app/immich.git
synced 2025-12-06 04:41:40 -08:00
Compare commits
22 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6fe214a784 | ||
|
|
e18a9f84a4 | ||
|
|
59bb727636 | ||
|
|
20e0c03b39 | ||
|
|
6d1567cf44 | ||
|
|
dc3f53a973 | ||
|
|
dad7cf47b4 | ||
|
|
165b91b068 | ||
|
|
8211afb726 | ||
|
|
2cccef174a | ||
|
|
9bbef4a97b | ||
|
|
10c2bda3a9 | ||
|
|
cf9e04c8ec | ||
|
|
d6887117ac | ||
|
|
3b11be2859 | ||
|
|
d7f52739e8 | ||
|
|
71ea46d95e | ||
|
|
e2afc43506 | ||
|
|
6aed1180e7 | ||
|
|
476b735e3c | ||
|
|
7ad12c7f33 | ||
|
|
60729a091a |
1
.github/workflows/test.yml
vendored
1
.github/workflows/test.yml
vendored
@@ -171,6 +171,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
poetry install --with dev
|
||||
poetry run pip install --no-deps -r requirements.txt
|
||||
- name: Lint with ruff
|
||||
run: |
|
||||
poetry run ruff check --format=github app
|
||||
|
||||
377
cli/src/api/open-api/api.ts
generated
377
cli/src/api/open-api/api.ts
generated
@@ -4,7 +4,7 @@
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.74.0
|
||||
* The version of the OpenAPI document: 1.75.0
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
@@ -752,6 +752,25 @@ export const AudioCodec = {
|
||||
export type AudioCodec = typeof AudioCodec[keyof typeof AudioCodec];
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface AuditDeletesResponseDto
|
||||
*/
|
||||
export interface AuditDeletesResponseDto {
|
||||
/**
|
||||
*
|
||||
* @type {Array<string>}
|
||||
* @memberof AuditDeletesResponseDto
|
||||
*/
|
||||
'ids': Array<string>;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof AuditDeletesResponseDto
|
||||
*/
|
||||
'needsFullSync': boolean;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
@@ -1243,6 +1262,20 @@ export interface DownloadResponseDto {
|
||||
*/
|
||||
'totalSize': number;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @enum {string}
|
||||
*/
|
||||
|
||||
export const EntityType = {
|
||||
Asset: 'ASSET',
|
||||
Album: 'ALBUM'
|
||||
} as const;
|
||||
|
||||
export type EntityType = typeof EntityType[keyof typeof EntityType];
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
@@ -2033,19 +2066,6 @@ export interface SearchAssetResponseDto {
|
||||
*/
|
||||
'total': number;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface SearchConfigResponseDto
|
||||
*/
|
||||
export interface SearchConfigResponseDto {
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof SearchConfigResponseDto
|
||||
*/
|
||||
'enabled': boolean;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
@@ -2152,7 +2172,19 @@ export interface ServerFeaturesDto {
|
||||
* @type {boolean}
|
||||
* @memberof ServerFeaturesDto
|
||||
*/
|
||||
'machineLearning': boolean;
|
||||
'clipEncode': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof ServerFeaturesDto
|
||||
*/
|
||||
'configFile': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof ServerFeaturesDto
|
||||
*/
|
||||
'facialRecognition': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
@@ -2177,6 +2209,18 @@ export interface ServerFeaturesDto {
|
||||
* @memberof ServerFeaturesDto
|
||||
*/
|
||||
'search': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof ServerFeaturesDto
|
||||
*/
|
||||
'sidecar': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof ServerFeaturesDto
|
||||
*/
|
||||
'tagImage': boolean;
|
||||
}
|
||||
/**
|
||||
*
|
||||
@@ -2578,6 +2622,12 @@ export interface SystemConfigDto {
|
||||
* @memberof SystemConfigDto
|
||||
*/
|
||||
'job': SystemConfigJobDto;
|
||||
/**
|
||||
*
|
||||
* @type {SystemConfigMachineLearningDto}
|
||||
* @memberof SystemConfigDto
|
||||
*/
|
||||
'machineLearning': SystemConfigMachineLearningDto;
|
||||
/**
|
||||
*
|
||||
* @type {SystemConfigOAuthDto}
|
||||
@@ -2745,6 +2795,43 @@ export interface SystemConfigJobDto {
|
||||
*/
|
||||
'videoConversion': JobSettingsDto;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
* @interface SystemConfigMachineLearningDto
|
||||
*/
|
||||
export interface SystemConfigMachineLearningDto {
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof SystemConfigMachineLearningDto
|
||||
*/
|
||||
'clipEncodeEnabled': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof SystemConfigMachineLearningDto
|
||||
*/
|
||||
'enabled': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof SystemConfigMachineLearningDto
|
||||
*/
|
||||
'facialRecognitionEnabled': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {boolean}
|
||||
* @memberof SystemConfigMachineLearningDto
|
||||
*/
|
||||
'tagImageEnabled': boolean;
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof SystemConfigMachineLearningDto
|
||||
*/
|
||||
'url': string;
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @export
|
||||
@@ -5118,13 +5205,13 @@ export const AssetApiAxiosParamCreator = function (configuration?: Configuration
|
||||
* @param {string} [userId]
|
||||
* @param {boolean} [isFavorite]
|
||||
* @param {boolean} [isArchived]
|
||||
* @param {boolean} [withoutThumbs] Include assets without thumbnails
|
||||
* @param {number} [skip]
|
||||
* @param {string} [updatedAfter]
|
||||
* @param {string} [ifNoneMatch] ETag of data already cached on the client
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
getAllAssets: async (userId?: string, isFavorite?: boolean, isArchived?: boolean, withoutThumbs?: boolean, skip?: number, ifNoneMatch?: string, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
|
||||
getAllAssets: async (userId?: string, isFavorite?: boolean, isArchived?: boolean, skip?: number, updatedAfter?: string, ifNoneMatch?: string, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
|
||||
const localVarPath = `/asset`;
|
||||
// use dummy base URL string because the URL constructor only accepts absolute URLs.
|
||||
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
|
||||
@@ -5158,14 +5245,16 @@ export const AssetApiAxiosParamCreator = function (configuration?: Configuration
|
||||
localVarQueryParameter['isArchived'] = isArchived;
|
||||
}
|
||||
|
||||
if (withoutThumbs !== undefined) {
|
||||
localVarQueryParameter['withoutThumbs'] = withoutThumbs;
|
||||
}
|
||||
|
||||
if (skip !== undefined) {
|
||||
localVarQueryParameter['skip'] = skip;
|
||||
}
|
||||
|
||||
if (updatedAfter !== undefined) {
|
||||
localVarQueryParameter['updatedAfter'] = (updatedAfter as any instanceof Date) ?
|
||||
(updatedAfter as any).toISOString() :
|
||||
updatedAfter;
|
||||
}
|
||||
|
||||
if (ifNoneMatch != null) {
|
||||
localVarHeaderParameter['if-none-match'] = String(ifNoneMatch);
|
||||
}
|
||||
@@ -6272,14 +6361,14 @@ export const AssetApiFp = function(configuration?: Configuration) {
|
||||
* @param {string} [userId]
|
||||
* @param {boolean} [isFavorite]
|
||||
* @param {boolean} [isArchived]
|
||||
* @param {boolean} [withoutThumbs] Include assets without thumbnails
|
||||
* @param {number} [skip]
|
||||
* @param {string} [updatedAfter]
|
||||
* @param {string} [ifNoneMatch] ETag of data already cached on the client
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
async getAllAssets(userId?: string, isFavorite?: boolean, isArchived?: boolean, withoutThumbs?: boolean, skip?: number, ifNoneMatch?: string, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<Array<AssetResponseDto>>> {
|
||||
const localVarAxiosArgs = await localVarAxiosParamCreator.getAllAssets(userId, isFavorite, isArchived, withoutThumbs, skip, ifNoneMatch, options);
|
||||
async getAllAssets(userId?: string, isFavorite?: boolean, isArchived?: boolean, skip?: number, updatedAfter?: string, ifNoneMatch?: string, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<Array<AssetResponseDto>>> {
|
||||
const localVarAxiosArgs = await localVarAxiosParamCreator.getAllAssets(userId, isFavorite, isArchived, skip, updatedAfter, ifNoneMatch, options);
|
||||
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
|
||||
},
|
||||
/**
|
||||
@@ -6576,7 +6665,7 @@ export const AssetApiFactory = function (configuration?: Configuration, basePath
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
getAllAssets(requestParameters: AssetApiGetAllAssetsRequest = {}, options?: AxiosRequestConfig): AxiosPromise<Array<AssetResponseDto>> {
|
||||
return localVarFp.getAllAssets(requestParameters.userId, requestParameters.isFavorite, requestParameters.isArchived, requestParameters.withoutThumbs, requestParameters.skip, requestParameters.ifNoneMatch, options).then((request) => request(axios, basePath));
|
||||
return localVarFp.getAllAssets(requestParameters.userId, requestParameters.isFavorite, requestParameters.isArchived, requestParameters.skip, requestParameters.updatedAfter, requestParameters.ifNoneMatch, options).then((request) => request(axios, basePath));
|
||||
},
|
||||
/**
|
||||
* Get a single asset\'s information
|
||||
@@ -6881,13 +6970,6 @@ export interface AssetApiGetAllAssetsRequest {
|
||||
*/
|
||||
readonly isArchived?: boolean
|
||||
|
||||
/**
|
||||
* Include assets without thumbnails
|
||||
* @type {boolean}
|
||||
* @memberof AssetApiGetAllAssets
|
||||
*/
|
||||
readonly withoutThumbs?: boolean
|
||||
|
||||
/**
|
||||
*
|
||||
* @type {number}
|
||||
@@ -6895,6 +6977,13 @@ export interface AssetApiGetAllAssetsRequest {
|
||||
*/
|
||||
readonly skip?: number
|
||||
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof AssetApiGetAllAssets
|
||||
*/
|
||||
readonly updatedAfter?: string
|
||||
|
||||
/**
|
||||
* ETag of data already cached on the client
|
||||
* @type {string}
|
||||
@@ -7460,7 +7549,7 @@ export class AssetApi extends BaseAPI {
|
||||
* @memberof AssetApi
|
||||
*/
|
||||
public getAllAssets(requestParameters: AssetApiGetAllAssetsRequest = {}, options?: AxiosRequestConfig) {
|
||||
return AssetApiFp(this.configuration).getAllAssets(requestParameters.userId, requestParameters.isFavorite, requestParameters.isArchived, requestParameters.withoutThumbs, requestParameters.skip, requestParameters.ifNoneMatch, options).then((request) => request(this.axios, this.basePath));
|
||||
return AssetApiFp(this.configuration).getAllAssets(requestParameters.userId, requestParameters.isFavorite, requestParameters.isArchived, requestParameters.skip, requestParameters.updatedAfter, requestParameters.ifNoneMatch, options).then((request) => request(this.axios, this.basePath));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -7671,6 +7760,163 @@ export class AssetApi extends BaseAPI {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* AuditApi - axios parameter creator
|
||||
* @export
|
||||
*/
|
||||
export const AuditApiAxiosParamCreator = function (configuration?: Configuration) {
|
||||
return {
|
||||
/**
|
||||
*
|
||||
* @param {EntityType} entityType
|
||||
* @param {string} after
|
||||
* @param {string} [userId]
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
getAuditDeletes: async (entityType: EntityType, after: string, userId?: string, options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
|
||||
// verify required parameter 'entityType' is not null or undefined
|
||||
assertParamExists('getAuditDeletes', 'entityType', entityType)
|
||||
// verify required parameter 'after' is not null or undefined
|
||||
assertParamExists('getAuditDeletes', 'after', after)
|
||||
const localVarPath = `/audit/deletes`;
|
||||
// use dummy base URL string because the URL constructor only accepts absolute URLs.
|
||||
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
|
||||
let baseOptions;
|
||||
if (configuration) {
|
||||
baseOptions = configuration.baseOptions;
|
||||
}
|
||||
|
||||
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
|
||||
const localVarHeaderParameter = {} as any;
|
||||
const localVarQueryParameter = {} as any;
|
||||
|
||||
// authentication cookie required
|
||||
|
||||
// authentication api_key required
|
||||
await setApiKeyToObject(localVarHeaderParameter, "x-api-key", configuration)
|
||||
|
||||
// authentication bearer required
|
||||
// http bearer authentication required
|
||||
await setBearerAuthToObject(localVarHeaderParameter, configuration)
|
||||
|
||||
if (entityType !== undefined) {
|
||||
localVarQueryParameter['entityType'] = entityType;
|
||||
}
|
||||
|
||||
if (userId !== undefined) {
|
||||
localVarQueryParameter['userId'] = userId;
|
||||
}
|
||||
|
||||
if (after !== undefined) {
|
||||
localVarQueryParameter['after'] = (after as any instanceof Date) ?
|
||||
(after as any).toISOString() :
|
||||
after;
|
||||
}
|
||||
|
||||
|
||||
|
||||
setSearchParams(localVarUrlObj, localVarQueryParameter);
|
||||
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
|
||||
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
|
||||
|
||||
return {
|
||||
url: toPathString(localVarUrlObj),
|
||||
options: localVarRequestOptions,
|
||||
};
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* AuditApi - functional programming interface
|
||||
* @export
|
||||
*/
|
||||
export const AuditApiFp = function(configuration?: Configuration) {
|
||||
const localVarAxiosParamCreator = AuditApiAxiosParamCreator(configuration)
|
||||
return {
|
||||
/**
|
||||
*
|
||||
* @param {EntityType} entityType
|
||||
* @param {string} after
|
||||
* @param {string} [userId]
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
async getAuditDeletes(entityType: EntityType, after: string, userId?: string, options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<AuditDeletesResponseDto>> {
|
||||
const localVarAxiosArgs = await localVarAxiosParamCreator.getAuditDeletes(entityType, after, userId, options);
|
||||
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* AuditApi - factory interface
|
||||
* @export
|
||||
*/
|
||||
export const AuditApiFactory = function (configuration?: Configuration, basePath?: string, axios?: AxiosInstance) {
|
||||
const localVarFp = AuditApiFp(configuration)
|
||||
return {
|
||||
/**
|
||||
*
|
||||
* @param {AuditApiGetAuditDeletesRequest} requestParameters Request parameters.
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
getAuditDeletes(requestParameters: AuditApiGetAuditDeletesRequest, options?: AxiosRequestConfig): AxiosPromise<AuditDeletesResponseDto> {
|
||||
return localVarFp.getAuditDeletes(requestParameters.entityType, requestParameters.after, requestParameters.userId, options).then((request) => request(axios, basePath));
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Request parameters for getAuditDeletes operation in AuditApi.
|
||||
* @export
|
||||
* @interface AuditApiGetAuditDeletesRequest
|
||||
*/
|
||||
export interface AuditApiGetAuditDeletesRequest {
|
||||
/**
|
||||
*
|
||||
* @type {EntityType}
|
||||
* @memberof AuditApiGetAuditDeletes
|
||||
*/
|
||||
readonly entityType: EntityType
|
||||
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof AuditApiGetAuditDeletes
|
||||
*/
|
||||
readonly after: string
|
||||
|
||||
/**
|
||||
*
|
||||
* @type {string}
|
||||
* @memberof AuditApiGetAuditDeletes
|
||||
*/
|
||||
readonly userId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* AuditApi - object-oriented interface
|
||||
* @export
|
||||
* @class AuditApi
|
||||
* @extends {BaseAPI}
|
||||
*/
|
||||
export class AuditApi extends BaseAPI {
|
||||
/**
|
||||
*
|
||||
* @param {AuditApiGetAuditDeletesRequest} requestParameters Request parameters.
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
* @memberof AuditApi
|
||||
*/
|
||||
public getAuditDeletes(requestParameters: AuditApiGetAuditDeletesRequest, options?: AxiosRequestConfig) {
|
||||
return AuditApiFp(this.configuration).getAuditDeletes(requestParameters.entityType, requestParameters.after, requestParameters.userId, options).then((request) => request(this.axios, this.basePath));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* AuthenticationApi - axios parameter creator
|
||||
* @export
|
||||
@@ -9914,44 +10160,6 @@ export const SearchApiAxiosParamCreator = function (configuration?: Configuratio
|
||||
|
||||
|
||||
|
||||
setSearchParams(localVarUrlObj, localVarQueryParameter);
|
||||
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
|
||||
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
|
||||
|
||||
return {
|
||||
url: toPathString(localVarUrlObj),
|
||||
options: localVarRequestOptions,
|
||||
};
|
||||
},
|
||||
/**
|
||||
*
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
getSearchConfig: async (options: AxiosRequestConfig = {}): Promise<RequestArgs> => {
|
||||
const localVarPath = `/search/config`;
|
||||
// use dummy base URL string because the URL constructor only accepts absolute URLs.
|
||||
const localVarUrlObj = new URL(localVarPath, DUMMY_BASE_URL);
|
||||
let baseOptions;
|
||||
if (configuration) {
|
||||
baseOptions = configuration.baseOptions;
|
||||
}
|
||||
|
||||
const localVarRequestOptions = { method: 'GET', ...baseOptions, ...options};
|
||||
const localVarHeaderParameter = {} as any;
|
||||
const localVarQueryParameter = {} as any;
|
||||
|
||||
// authentication cookie required
|
||||
|
||||
// authentication api_key required
|
||||
await setApiKeyToObject(localVarHeaderParameter, "x-api-key", configuration)
|
||||
|
||||
// authentication bearer required
|
||||
// http bearer authentication required
|
||||
await setBearerAuthToObject(localVarHeaderParameter, configuration)
|
||||
|
||||
|
||||
|
||||
setSearchParams(localVarUrlObj, localVarQueryParameter);
|
||||
let headersFromBaseOptions = baseOptions && baseOptions.headers ? baseOptions.headers : {};
|
||||
localVarRequestOptions.headers = {...localVarHeaderParameter, ...headersFromBaseOptions, ...options.headers};
|
||||
@@ -10098,15 +10306,6 @@ export const SearchApiFp = function(configuration?: Configuration) {
|
||||
const localVarAxiosArgs = await localVarAxiosParamCreator.getExploreData(options);
|
||||
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
|
||||
},
|
||||
/**
|
||||
*
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
async getSearchConfig(options?: AxiosRequestConfig): Promise<(axios?: AxiosInstance, basePath?: string) => AxiosPromise<SearchConfigResponseDto>> {
|
||||
const localVarAxiosArgs = await localVarAxiosParamCreator.getSearchConfig(options);
|
||||
return createRequestFunction(localVarAxiosArgs, globalAxios, BASE_PATH, configuration);
|
||||
},
|
||||
/**
|
||||
*
|
||||
* @param {string} [q]
|
||||
@@ -10150,14 +10349,6 @@ export const SearchApiFactory = function (configuration?: Configuration, basePat
|
||||
getExploreData(options?: AxiosRequestConfig): AxiosPromise<Array<SearchExploreResponseDto>> {
|
||||
return localVarFp.getExploreData(options).then((request) => request(axios, basePath));
|
||||
},
|
||||
/**
|
||||
*
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
*/
|
||||
getSearchConfig(options?: AxiosRequestConfig): AxiosPromise<SearchConfigResponseDto> {
|
||||
return localVarFp.getSearchConfig(options).then((request) => request(axios, basePath));
|
||||
},
|
||||
/**
|
||||
*
|
||||
* @param {SearchApiSearchRequest} requestParameters Request parameters.
|
||||
@@ -10306,16 +10497,6 @@ export class SearchApi extends BaseAPI {
|
||||
return SearchApiFp(this.configuration).getExploreData(options).then((request) => request(this.axios, this.basePath));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {*} [options] Override http request option.
|
||||
* @throws {RequiredError}
|
||||
* @memberof SearchApi
|
||||
*/
|
||||
public getSearchConfig(options?: AxiosRequestConfig) {
|
||||
return SearchApiFp(this.configuration).getSearchConfig(options).then((request) => request(this.axios, this.basePath));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {SearchApiSearchRequest} requestParameters Request parameters.
|
||||
|
||||
2
cli/src/api/open-api/base.ts
generated
2
cli/src/api/open-api/base.ts
generated
@@ -4,7 +4,7 @@
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.74.0
|
||||
* The version of the OpenAPI document: 1.75.0
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
|
||||
2
cli/src/api/open-api/common.ts
generated
2
cli/src/api/open-api/common.ts
generated
@@ -4,7 +4,7 @@
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.74.0
|
||||
* The version of the OpenAPI document: 1.75.0
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
|
||||
2
cli/src/api/open-api/configuration.ts
generated
2
cli/src/api/open-api/configuration.ts
generated
@@ -4,7 +4,7 @@
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.74.0
|
||||
* The version of the OpenAPI document: 1.75.0
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
|
||||
2
cli/src/api/open-api/index.ts
generated
2
cli/src/api/open-api/index.ts
generated
@@ -4,7 +4,7 @@
|
||||
* Immich
|
||||
* Immich API
|
||||
*
|
||||
* The version of the OpenAPI document: 1.74.0
|
||||
* The version of the OpenAPI document: 1.75.0
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
|
||||
@@ -39,7 +39,7 @@ This often happens when using a reverse proxy or cloudflare tunnel in front of I
|
||||
|
||||
### Why is Immich slow on low-memory systems like the Raspberry Pi?
|
||||
|
||||
Immich uses optional machine-learning features to enhance search results. This feature, however, can be too heavy to run on a Raspberry Pi. To disable machine learning, comment out the `immich-machine-learning` section of your docker-compose.yml and set `IMMICH_MACHINE_LEARNING_URL=false` in your .env file.
|
||||
Immich uses optional machine-learning features to enhance search results. This feature, however, can be too heavy to run on a Raspberry Pi. To disable machine learning, comment out the `immich-machine-learning` section of your docker-compose.yml and set `IMMICH_MACHINE_LEARNING_ENABLED=false` in your .env file.
|
||||
|
||||
### How to disable machine-learning and TypeSense?
|
||||
|
||||
@@ -47,7 +47,7 @@ Immich uses optional machine-learning features to enhance search results. This f
|
||||
Disabling both will result in poor search experience and typesense utilizes CLIP embeddings which are generated by machine-learning.
|
||||
:::
|
||||
|
||||
These features can be disabled by commenting out `immich-typesense` and `immich-machine-learning` sections of the docker-compose.yml and setting `IMMICH_MACHINE_LEARNING_URL=false` & `TYPESENSE_ENABLED=false` in your .env file.
|
||||
These features can be disabled by commenting out `immich-typesense` and `immich-machine-learning` sections of the docker-compose.yml and setting `IMMICH_MACHINE_LEARNING_ENABLED=false` & `TYPESENSE_ENABLED=false` in your .env file.
|
||||
|
||||
### What happens to existing files after I choose a new [Storage Template](/docs/administration/storage-template.mdx)?
|
||||
|
||||
|
||||
91
docs/docs/install/config-file.md
Normal file
91
docs/docs/install/config-file.md
Normal file
@@ -0,0 +1,91 @@
|
||||
# Config File
|
||||
|
||||
A config file can be provided as an alternative to the UI configuration.
|
||||
|
||||
### Step 1 - Create a new config file
|
||||
|
||||
In JSON format, create a new config file (e.g. `immich.config`) and put it in a location that can be accessed by Immich.
|
||||
The default configuration looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"ffmpeg": {
|
||||
"crf": 23,
|
||||
"threads": 0,
|
||||
"preset": "ultrafast",
|
||||
"targetVideoCodec": "h264",
|
||||
"targetAudioCodec": "aac",
|
||||
"targetResolution": "720",
|
||||
"maxBitrate": "0",
|
||||
"twoPass": false,
|
||||
"transcode": "required",
|
||||
"tonemap": "hable",
|
||||
"accel": "disabled"
|
||||
},
|
||||
"job": {
|
||||
"backgroundTask": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"clipEncoding": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"metadataExtraction": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"objectTagging": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"recognizeFaces": {
|
||||
"concurrency": 2
|
||||
},
|
||||
"search": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"sidecar": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"storageTemplateMigration": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"thumbnailGeneration": {
|
||||
"concurrency": 5
|
||||
},
|
||||
"videoConversion": {
|
||||
"concurrency": 1
|
||||
}
|
||||
},
|
||||
"oauth": {
|
||||
"enabled": false,
|
||||
"issuerUrl": "",
|
||||
"clientId": "",
|
||||
"clientSecret": "",
|
||||
"mobileOverrideEnabled": false,
|
||||
"mobileRedirectUri": "",
|
||||
"scope": "openid email profile",
|
||||
"storageLabelClaim": "preferred_username",
|
||||
"buttonText": "Login with OAuth",
|
||||
"autoRegister": true,
|
||||
"autoLaunch": false
|
||||
},
|
||||
"passwordLogin": {
|
||||
"enabled": true
|
||||
},
|
||||
"storageTemplate": {
|
||||
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
|
||||
},
|
||||
"thumbnail": {
|
||||
"webpSize": 250,
|
||||
"jpegSize": 1440
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
:::tip
|
||||
In Administration > Settings is a button to copy the current configuration to your clipboard.
|
||||
So you can just grab it from there, paste it into a file and you're pretty much good to go.
|
||||
:::
|
||||
|
||||
### Step 2 - Specify the file location
|
||||
|
||||
In your `.env` file, set the variable `IMMICH_CONFIG_FILE` to the path of your config.
|
||||
For more information, refer to the [Environment Variables](https://docs.immich.app/docs/install/environment-variables) section.
|
||||
@@ -132,7 +132,6 @@ PUBLIC_LOGIN_PAGE_MESSAGE="My Family Photos and Videos Backup Server"
|
||||
|
||||
IMMICH_WEB_URL=http://immich-web:3000
|
||||
IMMICH_SERVER_URL=http://immich-server:3001
|
||||
IMMICH_MACHINE_LEARNING_URL=http://immich-machine-learning:3003
|
||||
|
||||
####################################################################################
|
||||
# Alternative API's External Address - Optional
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
---
|
||||
sidebar_position: 90
|
||||
---
|
||||
|
||||
# Environment Variables
|
||||
|
||||
## Docker Compose
|
||||
@@ -22,6 +26,7 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
| `LOG_LEVEL` | Log Level (verbose, debug, log, warn, error) | `log` | server, microservices |
|
||||
| `IMMICH_MEDIA_LOCATION` | Media Location | `./upload` | server, microservices |
|
||||
| `PUBLIC_LOGIN_PAGE_MESSAGE` | Public Login Page Message | | web |
|
||||
| `IMMICH_CONFIG_FILE` | Path to config file | | server |
|
||||
|
||||
:::tip
|
||||
|
||||
@@ -50,13 +55,14 @@ These environment variables are used by the `docker-compose.yml` file and do **N
|
||||
|
||||
## URLs
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------------------- | :------------------------------------------------------- | :-----------------------------------: | :-------------------- |
|
||||
| `IMMICH_WEB_URL` | Immich Web URL | `http://immich-web:3000` | proxy |
|
||||
| `IMMICH_SERVER_URL` | Immich Server URL | `http://immich-server:3001` | web, proxy |
|
||||
| `IMMICH_MACHINE_LEARNING_URL` | Immich Machine Learning URL, set `"false"` to disable ML | `http://immich-machine-learning:3003` | server, microservices |
|
||||
| `PUBLIC_IMMICH_SERVER_URL` | Public Immich URL | `http://immich-server:3001` | web |
|
||||
| `IMMICH_API_URL_EXTERNAL` | Immich API URL External | `/api` | web |
|
||||
| Variable | Description | Default | Services |
|
||||
| :-------------------------------- | :--------------------------- | :-----------------------------------: | :-------------------- |
|
||||
| `IMMICH_WEB_URL` | Immich Web URL | `http://immich-web:3000` | proxy |
|
||||
| `IMMICH_SERVER_URL` | Immich Server URL | `http://immich-server:3001` | web, proxy |
|
||||
| `IMMICH_MACHINE_LEARNING_ENABLED` | Enabled machine learning | `true` | server, microservices |
|
||||
| `IMMICH_MACHINE_LEARNING_URL` | Immich Machine Learning URL, | `http://immich-machine-learning:3003` | server, microservices |
|
||||
| `PUBLIC_IMMICH_SERVER_URL` | Public Immich URL | `http://immich-server:3001` | web |
|
||||
| `IMMICH_API_URL_EXTERNAL` | Immich API URL External | `/api` | web |
|
||||
|
||||
:::info
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
sidebar_position: 100
|
||||
sidebar_position: 80
|
||||
---
|
||||
|
||||
import RegisterAdminUser from '../partials/_register-admin.md';
|
||||
|
||||
@@ -10,8 +10,9 @@ RUN poetry config installer.max-workers 10 && \
|
||||
RUN python -m venv /opt/venv
|
||||
ENV VIRTUAL_ENV="/opt/venv" PATH="/opt/venv/bin:${PATH}"
|
||||
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
COPY poetry.lock pyproject.toml requirements.txt ./
|
||||
RUN poetry install --sync --no-interaction --no-ansi --no-root --only main
|
||||
RUN pip install --no-deps -r requirements.txt
|
||||
|
||||
FROM python:3.11.4-slim-bullseye@sha256:91d194f58f50594cda71dcd2e8fdefd90e7ecc57d07823813b67c8521e565dcd
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import BaseSettings
|
||||
@@ -8,25 +9,31 @@ from .schemas import ModelType
|
||||
class Settings(BaseSettings):
|
||||
cache_folder: str = "/cache"
|
||||
classification_model: str = "microsoft/resnet-50"
|
||||
clip_image_model: str = "clip-ViT-B-32"
|
||||
clip_text_model: str = "clip-ViT-B-32"
|
||||
clip_image_model: str = "ViT-B-32::openai"
|
||||
clip_text_model: str = "ViT-B-32::openai"
|
||||
facial_recognition_model: str = "buffalo_l"
|
||||
min_tag_score: float = 0.9
|
||||
eager_startup: bool = True
|
||||
eager_startup: bool = False
|
||||
model_ttl: int = 0
|
||||
host: str = "0.0.0.0"
|
||||
port: int = 3003
|
||||
workers: int = 1
|
||||
min_face_score: float = 0.7
|
||||
test_full: bool = False
|
||||
request_threads: int = os.cpu_count() or 4
|
||||
model_inter_op_threads: int = 1
|
||||
model_intra_op_threads: int = 2
|
||||
|
||||
class Config:
|
||||
env_prefix = "MACHINE_LEARNING_"
|
||||
case_sensitive = False
|
||||
|
||||
|
||||
_clean_name = str.maketrans(":\\/", "___", ".")
|
||||
|
||||
|
||||
def get_cache_dir(model_name: str, model_type: ModelType) -> Path:
|
||||
return Path(settings.cache_folder, model_type.value, model_name)
|
||||
return Path(settings.cache_folder) / model_type.value / model_name.translate(_clean_name)
|
||||
|
||||
|
||||
settings = Settings()
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import asyncio
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from io import BytesIO
|
||||
from typing import Any
|
||||
|
||||
@@ -8,6 +10,8 @@ import uvicorn
|
||||
from fastapi import Body, Depends, FastAPI
|
||||
from PIL import Image
|
||||
|
||||
from app.models.base import InferenceModel
|
||||
|
||||
from .config import settings
|
||||
from .models.cache import ModelCache
|
||||
from .schemas import (
|
||||
@@ -25,19 +29,21 @@ app = FastAPI()
|
||||
|
||||
def init_state() -> None:
|
||||
app.state.model_cache = ModelCache(ttl=settings.model_ttl, revalidate=settings.model_ttl > 0)
|
||||
# asyncio is a huge bottleneck for performance, so we use a thread pool to run blocking code
|
||||
app.state.thread_pool = ThreadPoolExecutor(settings.request_threads)
|
||||
|
||||
|
||||
async def load_models() -> None:
|
||||
models = [
|
||||
(settings.classification_model, ModelType.IMAGE_CLASSIFICATION),
|
||||
(settings.clip_image_model, ModelType.CLIP),
|
||||
(settings.clip_text_model, ModelType.CLIP),
|
||||
(settings.facial_recognition_model, ModelType.FACIAL_RECOGNITION),
|
||||
models: list[tuple[str, ModelType, dict[str, Any]]] = [
|
||||
(settings.classification_model, ModelType.IMAGE_CLASSIFICATION, {}),
|
||||
(settings.clip_image_model, ModelType.CLIP, {"mode": "vision"}),
|
||||
(settings.clip_text_model, ModelType.CLIP, {"mode": "text"}),
|
||||
(settings.facial_recognition_model, ModelType.FACIAL_RECOGNITION, {}),
|
||||
]
|
||||
|
||||
# Get all models
|
||||
for model_name, model_type in models:
|
||||
await app.state.model_cache.get(model_name, model_type, eager=settings.eager_startup)
|
||||
for model_name, model_type, model_kwargs in models:
|
||||
await app.state.model_cache.get(model_name, model_type, eager=settings.eager_startup, **model_kwargs)
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
@@ -46,11 +52,16 @@ async def startup_event() -> None:
|
||||
await load_models()
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event() -> None:
|
||||
app.state.thread_pool.shutdown()
|
||||
|
||||
|
||||
def dep_pil_image(byte_image: bytes = Body(...)) -> Image.Image:
|
||||
return Image.open(BytesIO(byte_image))
|
||||
|
||||
|
||||
def dep_cv_image(byte_image: bytes = Body(...)) -> cv2.Mat:
|
||||
def dep_cv_image(byte_image: bytes = Body(...)) -> np.ndarray[int, np.dtype[Any]]:
|
||||
byte_image_np = np.frombuffer(byte_image, np.uint8)
|
||||
return cv2.imdecode(byte_image_np, cv2.IMREAD_COLOR)
|
||||
|
||||
@@ -74,7 +85,7 @@ async def image_classification(
|
||||
image: Image.Image = Depends(dep_pil_image),
|
||||
) -> list[str]:
|
||||
model = await app.state.model_cache.get(settings.classification_model, ModelType.IMAGE_CLASSIFICATION)
|
||||
labels = model.predict(image)
|
||||
labels = await predict(model, image)
|
||||
return labels
|
||||
|
||||
|
||||
@@ -86,8 +97,8 @@ async def image_classification(
|
||||
async def clip_encode_image(
|
||||
image: Image.Image = Depends(dep_pil_image),
|
||||
) -> list[float]:
|
||||
model = await app.state.model_cache.get(settings.clip_image_model, ModelType.CLIP)
|
||||
embedding = model.predict(image)
|
||||
model = await app.state.model_cache.get(settings.clip_image_model, ModelType.CLIP, mode="vision")
|
||||
embedding = await predict(model, image)
|
||||
return embedding
|
||||
|
||||
|
||||
@@ -97,8 +108,8 @@ async def clip_encode_image(
|
||||
status_code=200,
|
||||
)
|
||||
async def clip_encode_text(payload: TextModelRequest) -> list[float]:
|
||||
model = await app.state.model_cache.get(settings.clip_text_model, ModelType.CLIP)
|
||||
embedding = model.predict(payload.text)
|
||||
model = await app.state.model_cache.get(settings.clip_text_model, ModelType.CLIP, mode="text")
|
||||
embedding = await predict(model, payload.text)
|
||||
return embedding
|
||||
|
||||
|
||||
@@ -111,10 +122,14 @@ async def facial_recognition(
|
||||
image: cv2.Mat = Depends(dep_cv_image),
|
||||
) -> list[dict[str, Any]]:
|
||||
model = await app.state.model_cache.get(settings.facial_recognition_model, ModelType.FACIAL_RECOGNITION)
|
||||
faces = model.predict(image)
|
||||
faces = await predict(model, image)
|
||||
return faces
|
||||
|
||||
|
||||
async def predict(model: InferenceModel, inputs: Any) -> Any:
|
||||
return await asyncio.get_running_loop().run_in_executor(app.state.thread_pool, model.predict, inputs)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
is_dev = os.getenv("NODE_ENV") == "development"
|
||||
uvicorn.run(
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
from .clip import CLIPSTEncoder
|
||||
from .clip import CLIPEncoder
|
||||
from .facial_recognition import FaceRecognizer
|
||||
from .image_classification import ImageClassifier
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import pickle
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from typing import Any
|
||||
from zipfile import BadZipFile
|
||||
|
||||
import onnxruntime as ort
|
||||
from onnxruntime.capi.onnxruntime_pybind11_state import InvalidProtobuf # type: ignore
|
||||
|
||||
from ..config import get_cache_dir
|
||||
from ..config import get_cache_dir, settings
|
||||
from ..schemas import ModelType
|
||||
|
||||
|
||||
@@ -16,12 +19,31 @@ class InferenceModel(ABC):
|
||||
_model_type: ModelType
|
||||
|
||||
def __init__(
|
||||
self, model_name: str, cache_dir: Path | str | None = None, eager: bool = True, **model_kwargs: Any
|
||||
self,
|
||||
model_name: str,
|
||||
cache_dir: Path | str | None = None,
|
||||
eager: bool = True,
|
||||
inter_op_num_threads: int = settings.model_inter_op_threads,
|
||||
intra_op_num_threads: int = settings.model_intra_op_threads,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
self.model_name = model_name
|
||||
self._loaded = False
|
||||
self._cache_dir = Path(cache_dir) if cache_dir is not None else get_cache_dir(model_name, self.model_type)
|
||||
loader = self.load if eager else self.download
|
||||
|
||||
self.providers = model_kwargs.pop("providers", ["CPUExecutionProvider"])
|
||||
# don't pre-allocate more memory than needed
|
||||
self.provider_options = model_kwargs.pop(
|
||||
"provider_options", [{"arena_extend_strategy": "kSameAsRequested"}] * len(self.providers)
|
||||
)
|
||||
self.sess_options = PicklableSessionOptions()
|
||||
# avoid thread contention between models
|
||||
if inter_op_num_threads > 1:
|
||||
self.sess_options.execution_mode = ort.ExecutionMode.ORT_PARALLEL
|
||||
self.sess_options.inter_op_num_threads = inter_op_num_threads
|
||||
self.sess_options.intra_op_num_threads = intra_op_num_threads
|
||||
|
||||
try:
|
||||
loader(**model_kwargs)
|
||||
except (OSError, InvalidProtobuf, BadZipFile):
|
||||
@@ -30,6 +52,7 @@ class InferenceModel(ABC):
|
||||
|
||||
def download(self, **model_kwargs: Any) -> None:
|
||||
if not self.cached:
|
||||
print(f"Downloading {self.model_type.value.replace('_', ' ')} model. This may take a while...")
|
||||
self._download(**model_kwargs)
|
||||
|
||||
def load(self, **model_kwargs: Any) -> None:
|
||||
@@ -39,6 +62,7 @@ class InferenceModel(ABC):
|
||||
|
||||
def predict(self, inputs: Any) -> Any:
|
||||
if not self._loaded:
|
||||
print(f"Loading {self.model_type.value.replace('_', ' ')} model...")
|
||||
self.load()
|
||||
return self._predict(inputs)
|
||||
|
||||
@@ -89,3 +113,14 @@ class InferenceModel(ABC):
|
||||
else:
|
||||
self.cache_dir.unlink()
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# HF deep copies configs, so we need to make session options picklable
|
||||
class PicklableSessionOptions(ort.SessionOptions):
|
||||
def __getstate__(self) -> bytes:
|
||||
return pickle.dumps([(attr, getattr(self, attr)) for attr in dir(self) if not callable(getattr(self, attr))])
|
||||
|
||||
def __setstate__(self, state: Any) -> None:
|
||||
self.__init__() # type: ignore
|
||||
for attr, val in pickle.loads(state):
|
||||
setattr(self, attr, val)
|
||||
|
||||
@@ -46,7 +46,7 @@ class ModelCache:
|
||||
model: The requested model.
|
||||
"""
|
||||
|
||||
key = self.cache.build_key(model_name, model_type.value)
|
||||
key = f"{model_name}{model_type.value}{model_kwargs.get('mode', '')}"
|
||||
async with OptimisticLock(self.cache, key) as lock:
|
||||
model = await self.cache.get(key)
|
||||
if model is None:
|
||||
|
||||
@@ -1,31 +1,141 @@
|
||||
from typing import Any
|
||||
import os
|
||||
import zipfile
|
||||
from typing import Any, Literal
|
||||
|
||||
import onnxruntime as ort
|
||||
import torch
|
||||
from clip_server.model.clip import BICUBIC, _convert_image_to_rgb
|
||||
from clip_server.model.clip_onnx import _MODELS, _S3_BUCKET_V2, CLIPOnnxModel, download_model
|
||||
from clip_server.model.pretrained_models import _VISUAL_MODEL_IMAGE_SIZE
|
||||
from clip_server.model.tokenization import Tokenizer
|
||||
from PIL.Image import Image
|
||||
from sentence_transformers import SentenceTransformer
|
||||
from sentence_transformers.util import snapshot_download
|
||||
from torchvision.transforms import CenterCrop, Compose, Normalize, Resize, ToTensor
|
||||
|
||||
from ..schemas import ModelType
|
||||
from .base import InferenceModel
|
||||
|
||||
_ST_TO_JINA_MODEL_NAME = {
|
||||
"clip-ViT-B-16": "ViT-B-16::openai",
|
||||
"clip-ViT-B-32": "ViT-B-32::openai",
|
||||
"clip-ViT-B-32-multilingual-v1": "M-CLIP/XLM-Roberta-Large-Vit-B-32",
|
||||
"clip-ViT-L-14": "ViT-L-14::openai",
|
||||
}
|
||||
|
||||
class CLIPSTEncoder(InferenceModel):
|
||||
|
||||
class CLIPEncoder(InferenceModel):
|
||||
_model_type = ModelType.CLIP
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model_name: str,
|
||||
cache_dir: str | None = None,
|
||||
mode: Literal["text", "vision"] | None = None,
|
||||
**model_kwargs: Any,
|
||||
) -> None:
|
||||
if mode is not None and mode not in ("text", "vision"):
|
||||
raise ValueError(f"Mode must be 'text', 'vision', or omitted; got '{mode}'")
|
||||
if "vit-b" not in model_name.lower():
|
||||
raise ValueError(f"Only ViT-B models are currently supported; got '{model_name}'")
|
||||
self.mode = mode
|
||||
jina_model_name = self._get_jina_model_name(model_name)
|
||||
super().__init__(jina_model_name, cache_dir, **model_kwargs)
|
||||
|
||||
def _download(self, **model_kwargs: Any) -> None:
|
||||
repo_id = self.model_name if "/" in self.model_name else f"sentence-transformers/{self.model_name}"
|
||||
snapshot_download(
|
||||
cache_dir=self.cache_dir,
|
||||
repo_id=repo_id,
|
||||
library_name="sentence-transformers",
|
||||
ignore_files=["flax_model.msgpack", "rust_model.ot", "tf_model.h5"],
|
||||
)
|
||||
models: tuple[tuple[str, str], tuple[str, str]] = _MODELS[self.model_name]
|
||||
text_onnx_path = self.cache_dir / "textual.onnx"
|
||||
vision_onnx_path = self.cache_dir / "visual.onnx"
|
||||
|
||||
if not text_onnx_path.is_file():
|
||||
self._download_model(*models[0])
|
||||
|
||||
if not vision_onnx_path.is_file():
|
||||
self._download_model(*models[1])
|
||||
|
||||
def _load(self, **model_kwargs: Any) -> None:
|
||||
self.model = SentenceTransformer(
|
||||
self.model_name,
|
||||
cache_folder=self.cache_dir.as_posix(),
|
||||
**model_kwargs,
|
||||
)
|
||||
if self.mode == "text" or self.mode is None:
|
||||
self.text_model = ort.InferenceSession(
|
||||
self.cache_dir / "textual.onnx",
|
||||
sess_options=self.sess_options,
|
||||
providers=self.providers,
|
||||
provider_options=self.provider_options,
|
||||
)
|
||||
self.text_outputs = [output.name for output in self.text_model.get_outputs()]
|
||||
self.tokenizer = Tokenizer(self.model_name)
|
||||
|
||||
if self.mode == "vision" or self.mode is None:
|
||||
self.vision_model = ort.InferenceSession(
|
||||
self.cache_dir / "visual.onnx",
|
||||
sess_options=self.sess_options,
|
||||
providers=self.providers,
|
||||
provider_options=self.provider_options,
|
||||
)
|
||||
self.vision_outputs = [output.name for output in self.vision_model.get_outputs()]
|
||||
|
||||
image_size = _VISUAL_MODEL_IMAGE_SIZE[CLIPOnnxModel.get_model_name(self.model_name)]
|
||||
self.transform = _transform_pil_image(image_size)
|
||||
|
||||
def _predict(self, image_or_text: Image | str) -> list[float]:
|
||||
return self.model.encode(image_or_text).tolist()
|
||||
match image_or_text:
|
||||
case Image():
|
||||
if self.mode == "text":
|
||||
raise TypeError("Cannot encode image as text-only model")
|
||||
pixel_values = self.transform(image_or_text)
|
||||
assert isinstance(pixel_values, torch.Tensor)
|
||||
pixel_values = torch.unsqueeze(pixel_values, 0).numpy()
|
||||
outputs = self.vision_model.run(self.vision_outputs, {"pixel_values": pixel_values})
|
||||
case str():
|
||||
if self.mode == "vision":
|
||||
raise TypeError("Cannot encode text as vision-only model")
|
||||
text_inputs: dict[str, torch.Tensor] = self.tokenizer(image_or_text)
|
||||
inputs = {
|
||||
"input_ids": text_inputs["input_ids"].int().numpy(),
|
||||
"attention_mask": text_inputs["attention_mask"].int().numpy(),
|
||||
}
|
||||
outputs = self.text_model.run(self.text_outputs, inputs)
|
||||
case _:
|
||||
raise TypeError(f"Expected Image or str, but got: {type(image_or_text)}")
|
||||
|
||||
return outputs[0][0].tolist()
|
||||
|
||||
def _get_jina_model_name(self, model_name: str) -> str:
|
||||
if model_name in _MODELS:
|
||||
return model_name
|
||||
elif model_name in _ST_TO_JINA_MODEL_NAME:
|
||||
print(
|
||||
(f"Warning: Sentence-Transformer model names such as '{model_name}' are no longer supported."),
|
||||
(f"Using '{_ST_TO_JINA_MODEL_NAME[model_name]}' instead as it is the best match for '{model_name}'."),
|
||||
)
|
||||
return _ST_TO_JINA_MODEL_NAME[model_name]
|
||||
else:
|
||||
raise ValueError(f"Unknown model name {model_name}.")
|
||||
|
||||
def _download_model(self, model_name: str, model_md5: str) -> bool:
|
||||
# downloading logic is adapted from clip-server's CLIPOnnxModel class
|
||||
download_model(
|
||||
url=_S3_BUCKET_V2 + model_name,
|
||||
target_folder=self.cache_dir.as_posix(),
|
||||
md5sum=model_md5,
|
||||
with_resume=True,
|
||||
)
|
||||
file = self.cache_dir / model_name.split("/")[1]
|
||||
if file.suffix == ".zip":
|
||||
with zipfile.ZipFile(file, "r") as zip_ref:
|
||||
zip_ref.extractall(self.cache_dir)
|
||||
os.remove(file)
|
||||
return True
|
||||
|
||||
|
||||
# same as `_transform_blob` without `_blob2image`
|
||||
def _transform_pil_image(n_px: int) -> Compose:
|
||||
return Compose(
|
||||
[
|
||||
Resize(n_px, interpolation=BICUBIC),
|
||||
CenterCrop(n_px),
|
||||
_convert_image_to_rgb,
|
||||
ToTensor(),
|
||||
Normalize(
|
||||
(0.48145466, 0.4578275, 0.40821073),
|
||||
(0.26862954, 0.26130258, 0.27577711),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Any
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
from insightface.model_zoo import ArcFaceONNX, RetinaFace
|
||||
from insightface.utils.face_align import norm_crop
|
||||
from insightface.utils.storage import BASE_REPO_URL, download_file
|
||||
@@ -42,15 +43,31 @@ class FaceRecognizer(InferenceModel):
|
||||
rec_file = next(self.cache_dir.glob("w600k_*.onnx"))
|
||||
except StopIteration:
|
||||
raise FileNotFoundError("Facial recognition models not found in cache directory")
|
||||
self.det_model = RetinaFace(det_file.as_posix())
|
||||
self.rec_model = ArcFaceONNX(rec_file.as_posix())
|
||||
|
||||
self.det_model = RetinaFace(
|
||||
session=ort.InferenceSession(
|
||||
det_file.as_posix(),
|
||||
sess_options=self.sess_options,
|
||||
providers=self.providers,
|
||||
provider_options=self.provider_options,
|
||||
),
|
||||
)
|
||||
self.rec_model = ArcFaceONNX(
|
||||
rec_file.as_posix(),
|
||||
session=ort.InferenceSession(
|
||||
rec_file.as_posix(),
|
||||
sess_options=self.sess_options,
|
||||
providers=self.providers,
|
||||
provider_options=self.provider_options,
|
||||
),
|
||||
)
|
||||
|
||||
self.det_model.prepare(
|
||||
ctx_id=-1,
|
||||
ctx_id=0,
|
||||
det_thresh=self.min_score,
|
||||
input_size=(640, 640),
|
||||
)
|
||||
self.rec_model.prepare(ctx_id=-1)
|
||||
self.rec_model.prepare(ctx_id=0)
|
||||
|
||||
def _predict(self, image: cv2.Mat) -> list[dict[str, Any]]:
|
||||
bboxes, kpss = self.det_model.detect(image)
|
||||
|
||||
@@ -2,8 +2,10 @@ from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from huggingface_hub import snapshot_download
|
||||
from optimum.onnxruntime import ORTModelForImageClassification
|
||||
from optimum.pipelines import pipeline
|
||||
from PIL.Image import Image
|
||||
from transformers.pipelines import pipeline
|
||||
from transformers import AutoImageProcessor
|
||||
|
||||
from ..config import settings
|
||||
from ..schemas import ModelType
|
||||
@@ -25,15 +27,34 @@ class ImageClassifier(InferenceModel):
|
||||
|
||||
def _download(self, **model_kwargs: Any) -> None:
|
||||
snapshot_download(
|
||||
cache_dir=self.cache_dir, repo_id=self.model_name, allow_patterns=["*.bin", "*.json", "*.txt"]
|
||||
cache_dir=self.cache_dir,
|
||||
repo_id=self.model_name,
|
||||
allow_patterns=["*.bin", "*.json", "*.txt"],
|
||||
local_dir=self.cache_dir,
|
||||
local_dir_use_symlinks=True,
|
||||
)
|
||||
|
||||
def _load(self, **model_kwargs: Any) -> None:
|
||||
self.model = pipeline(
|
||||
self.model_type.value,
|
||||
self.model_name,
|
||||
model_kwargs={"cache_dir": self.cache_dir, **model_kwargs},
|
||||
)
|
||||
processor = AutoImageProcessor.from_pretrained(self.cache_dir)
|
||||
model_kwargs |= {
|
||||
"cache_dir": self.cache_dir,
|
||||
"provider": self.providers[0],
|
||||
"provider_options": self.provider_options[0],
|
||||
"session_options": self.sess_options,
|
||||
}
|
||||
model_path = self.cache_dir / "model.onnx"
|
||||
|
||||
if model_path.exists():
|
||||
model = ORTModelForImageClassification.from_pretrained(self.cache_dir, **model_kwargs)
|
||||
self.model = pipeline(self.model_type.value, model, feature_extractor=processor)
|
||||
else:
|
||||
self.sess_options.optimized_model_filepath = model_path.as_posix()
|
||||
self.model = pipeline(
|
||||
self.model_type.value,
|
||||
self.model_name,
|
||||
model_kwargs=model_kwargs,
|
||||
feature_extractor=processor,
|
||||
)
|
||||
|
||||
def _predict(self, image: Image) -> list[str]:
|
||||
predictions: list[dict[str, Any]] = self.model(image) # type: ignore
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
import pickle
|
||||
from io import BytesIO
|
||||
from typing import TypeAlias
|
||||
from unittest import mock
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import onnxruntime as ort
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from PIL import Image
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from .config import settings
|
||||
from .models.base import PicklableSessionOptions
|
||||
from .models.cache import ModelCache
|
||||
from .models.clip import CLIPSTEncoder
|
||||
from .models.clip import CLIPEncoder
|
||||
from .models.facial_recognition import FaceRecognizer
|
||||
from .models.image_classification import ImageClassifier
|
||||
from .schemas import ModelType
|
||||
@@ -72,45 +75,47 @@ class TestCLIP:
|
||||
embedding = np.random.rand(512).astype(np.float32)
|
||||
|
||||
def test_eager_init(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(CLIPSTEncoder, "download")
|
||||
mock_load = mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_model = CLIPSTEncoder("test_model_name", cache_dir="test_cache", eager=True, test_arg="test_arg")
|
||||
mocker.patch.object(CLIPEncoder, "download")
|
||||
mock_load = mocker.patch.object(CLIPEncoder, "load")
|
||||
clip_model = CLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", eager=True, test_arg="test_arg")
|
||||
|
||||
assert clip_model.model_name == "test_model_name"
|
||||
assert clip_model.model_name == "ViT-B-32::openai"
|
||||
mock_load.assert_called_once_with(test_arg="test_arg")
|
||||
|
||||
def test_lazy_init(self, mocker: MockerFixture) -> None:
|
||||
mock_download = mocker.patch.object(CLIPSTEncoder, "download")
|
||||
mock_load = mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_model = CLIPSTEncoder("test_model_name", cache_dir="test_cache", eager=False, test_arg="test_arg")
|
||||
mock_download = mocker.patch.object(CLIPEncoder, "download")
|
||||
mock_load = mocker.patch.object(CLIPEncoder, "load")
|
||||
clip_model = CLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", eager=False, test_arg="test_arg")
|
||||
|
||||
assert clip_model.model_name == "test_model_name"
|
||||
assert clip_model.model_name == "ViT-B-32::openai"
|
||||
mock_download.assert_called_once_with(test_arg="test_arg")
|
||||
mock_load.assert_not_called()
|
||||
|
||||
def test_basic_image(self, pil_image: Image.Image, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_encoder = CLIPSTEncoder("test_model_name", cache_dir="test_cache")
|
||||
clip_encoder.model = mock.Mock()
|
||||
clip_encoder.model.encode.return_value = self.embedding
|
||||
mocker.patch.object(CLIPEncoder, "download")
|
||||
mocked = mocker.patch("app.models.clip.ort.InferenceSession", autospec=True)
|
||||
mocked.return_value.run.return_value = [[self.embedding]]
|
||||
clip_encoder = CLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", mode="vision")
|
||||
assert clip_encoder.mode == "vision"
|
||||
embedding = clip_encoder.predict(pil_image)
|
||||
|
||||
assert isinstance(embedding, list)
|
||||
assert len(embedding) == 512
|
||||
assert all([isinstance(num, float) for num in embedding])
|
||||
clip_encoder.model.encode.assert_called_once()
|
||||
clip_encoder.vision_model.run.assert_called_once()
|
||||
|
||||
def test_basic_text(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch.object(CLIPSTEncoder, "load")
|
||||
clip_encoder = CLIPSTEncoder("test_model_name", cache_dir="test_cache")
|
||||
clip_encoder.model = mock.Mock()
|
||||
clip_encoder.model.encode.return_value = self.embedding
|
||||
mocker.patch.object(CLIPEncoder, "download")
|
||||
mocked = mocker.patch("app.models.clip.ort.InferenceSession", autospec=True)
|
||||
mocked.return_value.run.return_value = [[self.embedding]]
|
||||
clip_encoder = CLIPEncoder("ViT-B-32::openai", cache_dir="test_cache", mode="text")
|
||||
assert clip_encoder.mode == "text"
|
||||
embedding = clip_encoder.predict("test search query")
|
||||
|
||||
assert isinstance(embedding, list)
|
||||
assert len(embedding) == 512
|
||||
assert all([isinstance(num, float) for num in embedding])
|
||||
clip_encoder.model.encode.assert_called_once()
|
||||
clip_encoder.text_model.run.assert_called_once()
|
||||
|
||||
|
||||
class TestFaceRecognition:
|
||||
@@ -254,3 +259,13 @@ class TestEndpoints:
|
||||
headers=headers,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
def test_sess_options() -> None:
|
||||
sess_options = PicklableSessionOptions()
|
||||
sess_options.intra_op_num_threads = 1
|
||||
sess_options.inter_op_num_threads = 1
|
||||
pickled = pickle.dumps(sess_options)
|
||||
unpickled = pickle.loads(pickled)
|
||||
assert unpickled.intra_op_num_threads == 1
|
||||
assert unpickled.inter_op_num_threads == 1
|
||||
|
||||
1739
machine-learning/poetry.lock
generated
1739
machine-learning/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "machine-learning"
|
||||
version = "1.74.0"
|
||||
version = "1.75.0"
|
||||
description = ""
|
||||
authors = ["Hau Tran <alex.tran1502@gmail.com>"]
|
||||
readme = "README.md"
|
||||
@@ -13,7 +13,6 @@ torch = [
|
||||
{markers = "platform_machine == 'amd64' or platform_machine == 'x86_64'", version = "=2.0.1", source = "pytorch-cpu"}
|
||||
]
|
||||
transformers = "^4.29.2"
|
||||
sentence-transformers = "^2.2.2"
|
||||
onnxruntime = "^1.15.0"
|
||||
insightface = "^0.7.3"
|
||||
opencv-python-headless = "^4.7.0.72"
|
||||
@@ -22,6 +21,15 @@ fastapi = "^0.95.2"
|
||||
uvicorn = {extras = ["standard"], version = "^0.22.0"}
|
||||
pydantic = "^1.10.8"
|
||||
aiocache = "^0.12.1"
|
||||
optimum = "^1.9.1"
|
||||
torchvision = [
|
||||
{markers = "platform_machine == 'arm64' or platform_machine == 'aarch64'", version = "=0.15.2", source = "pypi"},
|
||||
{markers = "platform_machine == 'amd64' or platform_machine == 'x86_64'", version = "=0.15.2", source = "pytorch-cpu"}
|
||||
]
|
||||
rich = "^13.4.2"
|
||||
ftfy = "^6.1.1"
|
||||
setuptools = "^68.0.0"
|
||||
open-clip-torch = "^2.20.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
mypy = "^1.3.0"
|
||||
@@ -62,13 +70,20 @@ warn_untyped_fields = true
|
||||
[[tool.mypy.overrides]]
|
||||
module = [
|
||||
"huggingface_hub",
|
||||
"transformers.pipelines",
|
||||
"transformers",
|
||||
"cv2",
|
||||
"insightface.model_zoo",
|
||||
"insightface.utils.face_align",
|
||||
"insightface.utils.storage",
|
||||
"sentence_transformers",
|
||||
"sentence_transformers.util",
|
||||
"onnxruntime",
|
||||
"optimum",
|
||||
"optimum.pipelines",
|
||||
"optimum.onnxruntime",
|
||||
"clip_server.model.clip",
|
||||
"clip_server.model.clip_onnx",
|
||||
"clip_server.model.pretrained_models",
|
||||
"clip_server.model.tokenization",
|
||||
"torchvision.transforms",
|
||||
"aiocache.backends.memory",
|
||||
"aiocache.lock",
|
||||
"aiocache.plugins"
|
||||
|
||||
2
machine-learning/requirements.txt
Normal file
2
machine-learning/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
# requirements to be installed with `--no-deps` flag
|
||||
clip-server==0.8.*
|
||||
@@ -35,8 +35,8 @@ platform :android do
|
||||
task: 'bundle',
|
||||
build_type: 'Release',
|
||||
properties: {
|
||||
"android.injected.version.code" => 97,
|
||||
"android.injected.version.name" => "1.74.0",
|
||||
"android.injected.version.code" => 98,
|
||||
"android.injected.version.name" => "1.75.0",
|
||||
}
|
||||
)
|
||||
upload_to_play_store(skip_upload_apk: true, skip_upload_images: true, skip_upload_screenshots: true, aab: '../build/app/outputs/bundle/release/app-release.aab')
|
||||
|
||||
@@ -19,7 +19,7 @@ platform :ios do
|
||||
desc "iOS Beta"
|
||||
lane :beta do
|
||||
increment_version_number(
|
||||
version_number: "1.74.0"
|
||||
version_number: "1.75.0"
|
||||
)
|
||||
increment_build_number(
|
||||
build_number: latest_testflight_build_number + 1,
|
||||
|
||||
@@ -248,9 +248,9 @@ class BackupService {
|
||||
|
||||
req.fields['deviceAssetId'] = entity.id;
|
||||
req.fields['deviceId'] = deviceId;
|
||||
req.fields['fileCreatedAt'] = entity.createDateTime.toIso8601String();
|
||||
req.fields['fileCreatedAt'] = entity.createDateTime.toUtc().toIso8601String();
|
||||
req.fields['fileModifiedAt'] =
|
||||
entity.modifiedDateTime.toIso8601String();
|
||||
entity.modifiedDateTime.toUtc().toIso8601String();
|
||||
req.fields['isFavorite'] = entity.isFavorite.toString();
|
||||
req.fields['duration'] = entity.videoDuration.toString();
|
||||
|
||||
|
||||
@@ -69,7 +69,6 @@ class AssetService {
|
||||
await _apiService.assetApi.getAllAssetsWithETag(
|
||||
eTag: etag,
|
||||
userId: user.id,
|
||||
withoutThumbs: true,
|
||||
);
|
||||
if (assets == null) {
|
||||
return null;
|
||||
|
||||
@@ -17,14 +17,12 @@ extension WithETag on AssetApi {
|
||||
String? userId,
|
||||
bool? isFavorite,
|
||||
bool? isArchived,
|
||||
bool? withoutThumbs,
|
||||
}) async {
|
||||
final response = await getAllAssetsWithHttpInfo(
|
||||
ifNoneMatch: eTag,
|
||||
userId: userId,
|
||||
isFavorite: isFavorite,
|
||||
isArchived: isArchived,
|
||||
withoutThumbs: withoutThumbs,
|
||||
);
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
|
||||
15
mobile/openapi/.openapi-generator/FILES
generated
15
mobile/openapi/.openapi-generator/FILES
generated
@@ -29,6 +29,8 @@ doc/AssetResponseDto.md
|
||||
doc/AssetStatsResponseDto.md
|
||||
doc/AssetTypeEnum.md
|
||||
doc/AudioCodec.md
|
||||
doc/AuditApi.md
|
||||
doc/AuditDeletesResponseDto.md
|
||||
doc/AuthDeviceResponseDto.md
|
||||
doc/AuthenticationApi.md
|
||||
doc/BulkIdResponseDto.md
|
||||
@@ -50,6 +52,7 @@ doc/DeleteAssetStatus.md
|
||||
doc/DownloadArchiveInfo.md
|
||||
doc/DownloadInfoDto.md
|
||||
doc/DownloadResponseDto.md
|
||||
doc/EntityType.md
|
||||
doc/ExifResponseDto.md
|
||||
doc/ImportAssetDto.md
|
||||
doc/JobApi.md
|
||||
@@ -81,7 +84,6 @@ doc/SearchAlbumResponseDto.md
|
||||
doc/SearchApi.md
|
||||
doc/SearchAssetDto.md
|
||||
doc/SearchAssetResponseDto.md
|
||||
doc/SearchConfigResponseDto.md
|
||||
doc/SearchExploreItem.md
|
||||
doc/SearchExploreResponseDto.md
|
||||
doc/SearchFacetCountResponseDto.md
|
||||
@@ -105,6 +107,7 @@ doc/SystemConfigApi.md
|
||||
doc/SystemConfigDto.md
|
||||
doc/SystemConfigFFmpegDto.md
|
||||
doc/SystemConfigJobDto.md
|
||||
doc/SystemConfigMachineLearningDto.md
|
||||
doc/SystemConfigOAuthDto.md
|
||||
doc/SystemConfigPasswordLoginDto.md
|
||||
doc/SystemConfigStorageTemplateDto.md
|
||||
@@ -134,6 +137,7 @@ lib/api.dart
|
||||
lib/api/album_api.dart
|
||||
lib/api/api_key_api.dart
|
||||
lib/api/asset_api.dart
|
||||
lib/api/audit_api.dart
|
||||
lib/api/authentication_api.dart
|
||||
lib/api/job_api.dart
|
||||
lib/api/o_auth_api.dart
|
||||
@@ -176,6 +180,7 @@ lib/model/asset_response_dto.dart
|
||||
lib/model/asset_stats_response_dto.dart
|
||||
lib/model/asset_type_enum.dart
|
||||
lib/model/audio_codec.dart
|
||||
lib/model/audit_deletes_response_dto.dart
|
||||
lib/model/auth_device_response_dto.dart
|
||||
lib/model/bulk_id_response_dto.dart
|
||||
lib/model/bulk_ids_dto.dart
|
||||
@@ -196,6 +201,7 @@ lib/model/delete_asset_status.dart
|
||||
lib/model/download_archive_info.dart
|
||||
lib/model/download_info_dto.dart
|
||||
lib/model/download_response_dto.dart
|
||||
lib/model/entity_type.dart
|
||||
lib/model/exif_response_dto.dart
|
||||
lib/model/import_asset_dto.dart
|
||||
lib/model/job_command.dart
|
||||
@@ -222,7 +228,6 @@ lib/model/queue_status_dto.dart
|
||||
lib/model/search_album_response_dto.dart
|
||||
lib/model/search_asset_dto.dart
|
||||
lib/model/search_asset_response_dto.dart
|
||||
lib/model/search_config_response_dto.dart
|
||||
lib/model/search_explore_item.dart
|
||||
lib/model/search_explore_response_dto.dart
|
||||
lib/model/search_facet_count_response_dto.dart
|
||||
@@ -243,6 +248,7 @@ lib/model/smart_info_response_dto.dart
|
||||
lib/model/system_config_dto.dart
|
||||
lib/model/system_config_f_fmpeg_dto.dart
|
||||
lib/model/system_config_job_dto.dart
|
||||
lib/model/system_config_machine_learning_dto.dart
|
||||
lib/model/system_config_o_auth_dto.dart
|
||||
lib/model/system_config_password_login_dto.dart
|
||||
lib/model/system_config_storage_template_dto.dart
|
||||
@@ -292,6 +298,8 @@ test/asset_response_dto_test.dart
|
||||
test/asset_stats_response_dto_test.dart
|
||||
test/asset_type_enum_test.dart
|
||||
test/audio_codec_test.dart
|
||||
test/audit_api_test.dart
|
||||
test/audit_deletes_response_dto_test.dart
|
||||
test/auth_device_response_dto_test.dart
|
||||
test/authentication_api_test.dart
|
||||
test/bulk_id_response_dto_test.dart
|
||||
@@ -313,6 +321,7 @@ test/delete_asset_status_test.dart
|
||||
test/download_archive_info_test.dart
|
||||
test/download_info_dto_test.dart
|
||||
test/download_response_dto_test.dart
|
||||
test/entity_type_test.dart
|
||||
test/exif_response_dto_test.dart
|
||||
test/import_asset_dto_test.dart
|
||||
test/job_api_test.dart
|
||||
@@ -344,7 +353,6 @@ test/search_album_response_dto_test.dart
|
||||
test/search_api_test.dart
|
||||
test/search_asset_dto_test.dart
|
||||
test/search_asset_response_dto_test.dart
|
||||
test/search_config_response_dto_test.dart
|
||||
test/search_explore_item_test.dart
|
||||
test/search_explore_response_dto_test.dart
|
||||
test/search_facet_count_response_dto_test.dart
|
||||
@@ -368,6 +376,7 @@ test/system_config_api_test.dart
|
||||
test/system_config_dto_test.dart
|
||||
test/system_config_f_fmpeg_dto_test.dart
|
||||
test/system_config_job_dto_test.dart
|
||||
test/system_config_machine_learning_dto_test.dart
|
||||
test/system_config_o_auth_dto_test.dart
|
||||
test/system_config_password_login_dto_test.dart
|
||||
test/system_config_storage_template_dto_test.dart
|
||||
|
||||
8
mobile/openapi/README.md
generated
8
mobile/openapi/README.md
generated
@@ -3,7 +3,7 @@ Immich API
|
||||
|
||||
This Dart package is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
|
||||
|
||||
- API version: 1.74.0
|
||||
- API version: 1.75.0
|
||||
- Build package: org.openapitools.codegen.languages.DartClientCodegen
|
||||
|
||||
## Requirements
|
||||
@@ -113,6 +113,7 @@ Class | Method | HTTP request | Description
|
||||
*AssetApi* | [**updateAsset**](doc//AssetApi.md#updateasset) | **PUT** /asset/{id} |
|
||||
*AssetApi* | [**updateAssets**](doc//AssetApi.md#updateassets) | **PUT** /asset |
|
||||
*AssetApi* | [**uploadFile**](doc//AssetApi.md#uploadfile) | **POST** /asset/upload |
|
||||
*AuditApi* | [**getAuditDeletes**](doc//AuditApi.md#getauditdeletes) | **GET** /audit/deletes |
|
||||
*AuthenticationApi* | [**adminSignUp**](doc//AuthenticationApi.md#adminsignup) | **POST** /auth/admin-sign-up |
|
||||
*AuthenticationApi* | [**changePassword**](doc//AuthenticationApi.md#changepassword) | **POST** /auth/change-password |
|
||||
*AuthenticationApi* | [**getAuthDevices**](doc//AuthenticationApi.md#getauthdevices) | **GET** /auth/devices |
|
||||
@@ -139,7 +140,6 @@ Class | Method | HTTP request | Description
|
||||
*PersonApi* | [**updatePeople**](doc//PersonApi.md#updatepeople) | **PUT** /person |
|
||||
*PersonApi* | [**updatePerson**](doc//PersonApi.md#updateperson) | **PUT** /person/{id} |
|
||||
*SearchApi* | [**getExploreData**](doc//SearchApi.md#getexploredata) | **GET** /search/explore |
|
||||
*SearchApi* | [**getSearchConfig**](doc//SearchApi.md#getsearchconfig) | **GET** /search/config |
|
||||
*SearchApi* | [**search**](doc//SearchApi.md#search) | **GET** /search |
|
||||
*ServerInfoApi* | [**getServerFeatures**](doc//ServerInfoApi.md#getserverfeatures) | **GET** /server-info/features |
|
||||
*ServerInfoApi* | [**getServerInfo**](doc//ServerInfoApi.md#getserverinfo) | **GET** /server-info |
|
||||
@@ -204,6 +204,7 @@ Class | Method | HTTP request | Description
|
||||
- [AssetStatsResponseDto](doc//AssetStatsResponseDto.md)
|
||||
- [AssetTypeEnum](doc//AssetTypeEnum.md)
|
||||
- [AudioCodec](doc//AudioCodec.md)
|
||||
- [AuditDeletesResponseDto](doc//AuditDeletesResponseDto.md)
|
||||
- [AuthDeviceResponseDto](doc//AuthDeviceResponseDto.md)
|
||||
- [BulkIdResponseDto](doc//BulkIdResponseDto.md)
|
||||
- [BulkIdsDto](doc//BulkIdsDto.md)
|
||||
@@ -224,6 +225,7 @@ Class | Method | HTTP request | Description
|
||||
- [DownloadArchiveInfo](doc//DownloadArchiveInfo.md)
|
||||
- [DownloadInfoDto](doc//DownloadInfoDto.md)
|
||||
- [DownloadResponseDto](doc//DownloadResponseDto.md)
|
||||
- [EntityType](doc//EntityType.md)
|
||||
- [ExifResponseDto](doc//ExifResponseDto.md)
|
||||
- [ImportAssetDto](doc//ImportAssetDto.md)
|
||||
- [JobCommand](doc//JobCommand.md)
|
||||
@@ -250,7 +252,6 @@ Class | Method | HTTP request | Description
|
||||
- [SearchAlbumResponseDto](doc//SearchAlbumResponseDto.md)
|
||||
- [SearchAssetDto](doc//SearchAssetDto.md)
|
||||
- [SearchAssetResponseDto](doc//SearchAssetResponseDto.md)
|
||||
- [SearchConfigResponseDto](doc//SearchConfigResponseDto.md)
|
||||
- [SearchExploreItem](doc//SearchExploreItem.md)
|
||||
- [SearchExploreResponseDto](doc//SearchExploreResponseDto.md)
|
||||
- [SearchFacetCountResponseDto](doc//SearchFacetCountResponseDto.md)
|
||||
@@ -271,6 +272,7 @@ Class | Method | HTTP request | Description
|
||||
- [SystemConfigDto](doc//SystemConfigDto.md)
|
||||
- [SystemConfigFFmpegDto](doc//SystemConfigFFmpegDto.md)
|
||||
- [SystemConfigJobDto](doc//SystemConfigJobDto.md)
|
||||
- [SystemConfigMachineLearningDto](doc//SystemConfigMachineLearningDto.md)
|
||||
- [SystemConfigOAuthDto](doc//SystemConfigOAuthDto.md)
|
||||
- [SystemConfigPasswordLoginDto](doc//SystemConfigPasswordLoginDto.md)
|
||||
- [SystemConfigStorageTemplateDto](doc//SystemConfigStorageTemplateDto.md)
|
||||
|
||||
8
mobile/openapi/doc/AssetApi.md
generated
8
mobile/openapi/doc/AssetApi.md
generated
@@ -380,7 +380,7 @@ Name | Type | Description | Notes
|
||||
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
|
||||
|
||||
# **getAllAssets**
|
||||
> List<AssetResponseDto> getAllAssets(userId, isFavorite, isArchived, withoutThumbs, skip, ifNoneMatch)
|
||||
> List<AssetResponseDto> getAllAssets(userId, isFavorite, isArchived, skip, updatedAfter, ifNoneMatch)
|
||||
|
||||
|
||||
|
||||
@@ -408,12 +408,12 @@ final api_instance = AssetApi();
|
||||
final userId = 38400000-8cf0-11bd-b23e-10b96e4ef00d; // String |
|
||||
final isFavorite = true; // bool |
|
||||
final isArchived = true; // bool |
|
||||
final withoutThumbs = true; // bool | Include assets without thumbnails
|
||||
final skip = 8.14; // num |
|
||||
final updatedAfter = 2013-10-20T19:20:30+01:00; // DateTime |
|
||||
final ifNoneMatch = ifNoneMatch_example; // String | ETag of data already cached on the client
|
||||
|
||||
try {
|
||||
final result = api_instance.getAllAssets(userId, isFavorite, isArchived, withoutThumbs, skip, ifNoneMatch);
|
||||
final result = api_instance.getAllAssets(userId, isFavorite, isArchived, skip, updatedAfter, ifNoneMatch);
|
||||
print(result);
|
||||
} catch (e) {
|
||||
print('Exception when calling AssetApi->getAllAssets: $e\n');
|
||||
@@ -427,8 +427,8 @@ Name | Type | Description | Notes
|
||||
**userId** | **String**| | [optional]
|
||||
**isFavorite** | **bool**| | [optional]
|
||||
**isArchived** | **bool**| | [optional]
|
||||
**withoutThumbs** | **bool**| Include assets without thumbnails | [optional]
|
||||
**skip** | **num**| | [optional]
|
||||
**updatedAfter** | **DateTime**| | [optional]
|
||||
**ifNoneMatch** | **String**| ETag of data already cached on the client | [optional]
|
||||
|
||||
### Return type
|
||||
|
||||
73
mobile/openapi/doc/AuditApi.md
generated
Normal file
73
mobile/openapi/doc/AuditApi.md
generated
Normal file
@@ -0,0 +1,73 @@
|
||||
# openapi.api.AuditApi
|
||||
|
||||
## Load the API package
|
||||
```dart
|
||||
import 'package:openapi/api.dart';
|
||||
```
|
||||
|
||||
All URIs are relative to */api*
|
||||
|
||||
Method | HTTP request | Description
|
||||
------------- | ------------- | -------------
|
||||
[**getAuditDeletes**](AuditApi.md#getauditdeletes) | **GET** /audit/deletes |
|
||||
|
||||
|
||||
# **getAuditDeletes**
|
||||
> AuditDeletesResponseDto getAuditDeletes(entityType, after, userId)
|
||||
|
||||
|
||||
|
||||
### Example
|
||||
```dart
|
||||
import 'package:openapi/api.dart';
|
||||
// TODO Configure API key authorization: cookie
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('cookie').apiKey = 'YOUR_API_KEY';
|
||||
// uncomment below to setup prefix (e.g. Bearer) for API key, if needed
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('cookie').apiKeyPrefix = 'Bearer';
|
||||
// TODO Configure API key authorization: api_key
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('api_key').apiKey = 'YOUR_API_KEY';
|
||||
// uncomment below to setup prefix (e.g. Bearer) for API key, if needed
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('api_key').apiKeyPrefix = 'Bearer';
|
||||
// TODO Configure HTTP Bearer authorization: bearer
|
||||
// Case 1. Use String Token
|
||||
//defaultApiClient.getAuthentication<HttpBearerAuth>('bearer').setAccessToken('YOUR_ACCESS_TOKEN');
|
||||
// Case 2. Use Function which generate token.
|
||||
// String yourTokenGeneratorFunction() { ... }
|
||||
//defaultApiClient.getAuthentication<HttpBearerAuth>('bearer').setAccessToken(yourTokenGeneratorFunction);
|
||||
|
||||
final api_instance = AuditApi();
|
||||
final entityType = ; // EntityType |
|
||||
final after = 2013-10-20T19:20:30+01:00; // DateTime |
|
||||
final userId = 38400000-8cf0-11bd-b23e-10b96e4ef00d; // String |
|
||||
|
||||
try {
|
||||
final result = api_instance.getAuditDeletes(entityType, after, userId);
|
||||
print(result);
|
||||
} catch (e) {
|
||||
print('Exception when calling AuditApi->getAuditDeletes: $e\n');
|
||||
}
|
||||
```
|
||||
|
||||
### Parameters
|
||||
|
||||
Name | Type | Description | Notes
|
||||
------------- | ------------- | ------------- | -------------
|
||||
**entityType** | [**EntityType**](.md)| |
|
||||
**after** | **DateTime**| |
|
||||
**userId** | **String**| | [optional]
|
||||
|
||||
### Return type
|
||||
|
||||
[**AuditDeletesResponseDto**](AuditDeletesResponseDto.md)
|
||||
|
||||
### Authorization
|
||||
|
||||
[cookie](../README.md#cookie), [api_key](../README.md#api_key), [bearer](../README.md#bearer)
|
||||
|
||||
### HTTP request headers
|
||||
|
||||
- **Content-Type**: Not defined
|
||||
- **Accept**: application/json
|
||||
|
||||
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
|
||||
|
||||
16
mobile/openapi/doc/AuditDeletesResponseDto.md
generated
Normal file
16
mobile/openapi/doc/AuditDeletesResponseDto.md
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
# openapi.model.AuditDeletesResponseDto
|
||||
|
||||
## Load the model package
|
||||
```dart
|
||||
import 'package:openapi/api.dart';
|
||||
```
|
||||
|
||||
## Properties
|
||||
Name | Type | Description | Notes
|
||||
------------ | ------------- | ------------- | -------------
|
||||
**ids** | **List<String>** | | [default to const []]
|
||||
**needsFullSync** | **bool** | |
|
||||
|
||||
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# openapi.model.SearchConfigResponseDto
|
||||
# openapi.model.EntityType
|
||||
|
||||
## Load the model package
|
||||
```dart
|
||||
@@ -8,7 +8,6 @@ import 'package:openapi/api.dart';
|
||||
## Properties
|
||||
Name | Type | Description | Notes
|
||||
------------ | ------------- | ------------- | -------------
|
||||
**enabled** | **bool** | |
|
||||
|
||||
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
|
||||
|
||||
52
mobile/openapi/doc/SearchApi.md
generated
52
mobile/openapi/doc/SearchApi.md
generated
@@ -10,7 +10,6 @@ All URIs are relative to */api*
|
||||
Method | HTTP request | Description
|
||||
------------- | ------------- | -------------
|
||||
[**getExploreData**](SearchApi.md#getexploredata) | **GET** /search/explore |
|
||||
[**getSearchConfig**](SearchApi.md#getsearchconfig) | **GET** /search/config |
|
||||
[**search**](SearchApi.md#search) | **GET** /search |
|
||||
|
||||
|
||||
@@ -65,57 +64,6 @@ This endpoint does not need any parameter.
|
||||
|
||||
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
|
||||
|
||||
# **getSearchConfig**
|
||||
> SearchConfigResponseDto getSearchConfig()
|
||||
|
||||
|
||||
|
||||
### Example
|
||||
```dart
|
||||
import 'package:openapi/api.dart';
|
||||
// TODO Configure API key authorization: cookie
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('cookie').apiKey = 'YOUR_API_KEY';
|
||||
// uncomment below to setup prefix (e.g. Bearer) for API key, if needed
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('cookie').apiKeyPrefix = 'Bearer';
|
||||
// TODO Configure API key authorization: api_key
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('api_key').apiKey = 'YOUR_API_KEY';
|
||||
// uncomment below to setup prefix (e.g. Bearer) for API key, if needed
|
||||
//defaultApiClient.getAuthentication<ApiKeyAuth>('api_key').apiKeyPrefix = 'Bearer';
|
||||
// TODO Configure HTTP Bearer authorization: bearer
|
||||
// Case 1. Use String Token
|
||||
//defaultApiClient.getAuthentication<HttpBearerAuth>('bearer').setAccessToken('YOUR_ACCESS_TOKEN');
|
||||
// Case 2. Use Function which generate token.
|
||||
// String yourTokenGeneratorFunction() { ... }
|
||||
//defaultApiClient.getAuthentication<HttpBearerAuth>('bearer').setAccessToken(yourTokenGeneratorFunction);
|
||||
|
||||
final api_instance = SearchApi();
|
||||
|
||||
try {
|
||||
final result = api_instance.getSearchConfig();
|
||||
print(result);
|
||||
} catch (e) {
|
||||
print('Exception when calling SearchApi->getSearchConfig: $e\n');
|
||||
}
|
||||
```
|
||||
|
||||
### Parameters
|
||||
This endpoint does not need any parameter.
|
||||
|
||||
### Return type
|
||||
|
||||
[**SearchConfigResponseDto**](SearchConfigResponseDto.md)
|
||||
|
||||
### Authorization
|
||||
|
||||
[cookie](../README.md#cookie), [api_key](../README.md#api_key), [bearer](../README.md#bearer)
|
||||
|
||||
### HTTP request headers
|
||||
|
||||
- **Content-Type**: Not defined
|
||||
- **Accept**: application/json
|
||||
|
||||
[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
|
||||
|
||||
# **search**
|
||||
> SearchResponseDto search(q, query, clip, type, isFavorite, isArchived, exifInfoPeriodCity, exifInfoPeriodState, exifInfoPeriodCountry, exifInfoPeriodMake, exifInfoPeriodModel, exifInfoPeriodProjectionType, smartInfoPeriodObjects, smartInfoPeriodTags, recent, motion)
|
||||
|
||||
|
||||
6
mobile/openapi/doc/ServerFeaturesDto.md
generated
6
mobile/openapi/doc/ServerFeaturesDto.md
generated
@@ -8,11 +8,15 @@ import 'package:openapi/api.dart';
|
||||
## Properties
|
||||
Name | Type | Description | Notes
|
||||
------------ | ------------- | ------------- | -------------
|
||||
**machineLearning** | **bool** | |
|
||||
**clipEncode** | **bool** | |
|
||||
**configFile** | **bool** | |
|
||||
**facialRecognition** | **bool** | |
|
||||
**oauth** | **bool** | |
|
||||
**oauthAutoLaunch** | **bool** | |
|
||||
**passwordLogin** | **bool** | |
|
||||
**search** | **bool** | |
|
||||
**sidecar** | **bool** | |
|
||||
**tagImage** | **bool** | |
|
||||
|
||||
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
|
||||
|
||||
|
||||
1
mobile/openapi/doc/SystemConfigDto.md
generated
1
mobile/openapi/doc/SystemConfigDto.md
generated
@@ -10,6 +10,7 @@ Name | Type | Description | Notes
|
||||
------------ | ------------- | ------------- | -------------
|
||||
**ffmpeg** | [**SystemConfigFFmpegDto**](SystemConfigFFmpegDto.md) | |
|
||||
**job** | [**SystemConfigJobDto**](SystemConfigJobDto.md) | |
|
||||
**machineLearning** | [**SystemConfigMachineLearningDto**](SystemConfigMachineLearningDto.md) | |
|
||||
**oauth** | [**SystemConfigOAuthDto**](SystemConfigOAuthDto.md) | |
|
||||
**passwordLogin** | [**SystemConfigPasswordLoginDto**](SystemConfigPasswordLoginDto.md) | |
|
||||
**storageTemplate** | [**SystemConfigStorageTemplateDto**](SystemConfigStorageTemplateDto.md) | |
|
||||
|
||||
19
mobile/openapi/doc/SystemConfigMachineLearningDto.md
generated
Normal file
19
mobile/openapi/doc/SystemConfigMachineLearningDto.md
generated
Normal file
@@ -0,0 +1,19 @@
|
||||
# openapi.model.SystemConfigMachineLearningDto
|
||||
|
||||
## Load the model package
|
||||
```dart
|
||||
import 'package:openapi/api.dart';
|
||||
```
|
||||
|
||||
## Properties
|
||||
Name | Type | Description | Notes
|
||||
------------ | ------------- | ------------- | -------------
|
||||
**clipEncodeEnabled** | **bool** | |
|
||||
**enabled** | **bool** | |
|
||||
**facialRecognitionEnabled** | **bool** | |
|
||||
**tagImageEnabled** | **bool** | |
|
||||
**url** | **String** | |
|
||||
|
||||
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
|
||||
|
||||
|
||||
5
mobile/openapi/lib/api.dart
generated
5
mobile/openapi/lib/api.dart
generated
@@ -31,6 +31,7 @@ part 'auth/http_bearer_auth.dart';
|
||||
part 'api/api_key_api.dart';
|
||||
part 'api/album_api.dart';
|
||||
part 'api/asset_api.dart';
|
||||
part 'api/audit_api.dart';
|
||||
part 'api/authentication_api.dart';
|
||||
part 'api/job_api.dart';
|
||||
part 'api/o_auth_api.dart';
|
||||
@@ -66,6 +67,7 @@ part 'model/asset_response_dto.dart';
|
||||
part 'model/asset_stats_response_dto.dart';
|
||||
part 'model/asset_type_enum.dart';
|
||||
part 'model/audio_codec.dart';
|
||||
part 'model/audit_deletes_response_dto.dart';
|
||||
part 'model/auth_device_response_dto.dart';
|
||||
part 'model/bulk_id_response_dto.dart';
|
||||
part 'model/bulk_ids_dto.dart';
|
||||
@@ -86,6 +88,7 @@ part 'model/delete_asset_status.dart';
|
||||
part 'model/download_archive_info.dart';
|
||||
part 'model/download_info_dto.dart';
|
||||
part 'model/download_response_dto.dart';
|
||||
part 'model/entity_type.dart';
|
||||
part 'model/exif_response_dto.dart';
|
||||
part 'model/import_asset_dto.dart';
|
||||
part 'model/job_command.dart';
|
||||
@@ -112,7 +115,6 @@ part 'model/queue_status_dto.dart';
|
||||
part 'model/search_album_response_dto.dart';
|
||||
part 'model/search_asset_dto.dart';
|
||||
part 'model/search_asset_response_dto.dart';
|
||||
part 'model/search_config_response_dto.dart';
|
||||
part 'model/search_explore_item.dart';
|
||||
part 'model/search_explore_response_dto.dart';
|
||||
part 'model/search_facet_count_response_dto.dart';
|
||||
@@ -133,6 +135,7 @@ part 'model/smart_info_response_dto.dart';
|
||||
part 'model/system_config_dto.dart';
|
||||
part 'model/system_config_f_fmpeg_dto.dart';
|
||||
part 'model/system_config_job_dto.dart';
|
||||
part 'model/system_config_machine_learning_dto.dart';
|
||||
part 'model/system_config_o_auth_dto.dart';
|
||||
part 'model/system_config_password_login_dto.dart';
|
||||
part 'model/system_config_storage_template_dto.dart';
|
||||
|
||||
22
mobile/openapi/lib/api/asset_api.dart
generated
22
mobile/openapi/lib/api/asset_api.dart
generated
@@ -353,14 +353,13 @@ class AssetApi {
|
||||
///
|
||||
/// * [bool] isArchived:
|
||||
///
|
||||
/// * [bool] withoutThumbs:
|
||||
/// Include assets without thumbnails
|
||||
///
|
||||
/// * [num] skip:
|
||||
///
|
||||
/// * [DateTime] updatedAfter:
|
||||
///
|
||||
/// * [String] ifNoneMatch:
|
||||
/// ETag of data already cached on the client
|
||||
Future<Response> getAllAssetsWithHttpInfo({ String? userId, bool? isFavorite, bool? isArchived, bool? withoutThumbs, num? skip, String? ifNoneMatch, }) async {
|
||||
Future<Response> getAllAssetsWithHttpInfo({ String? userId, bool? isFavorite, bool? isArchived, num? skip, DateTime? updatedAfter, String? ifNoneMatch, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final path = r'/asset';
|
||||
|
||||
@@ -380,12 +379,12 @@ class AssetApi {
|
||||
if (isArchived != null) {
|
||||
queryParams.addAll(_queryParams('', 'isArchived', isArchived));
|
||||
}
|
||||
if (withoutThumbs != null) {
|
||||
queryParams.addAll(_queryParams('', 'withoutThumbs', withoutThumbs));
|
||||
}
|
||||
if (skip != null) {
|
||||
queryParams.addAll(_queryParams('', 'skip', skip));
|
||||
}
|
||||
if (updatedAfter != null) {
|
||||
queryParams.addAll(_queryParams('', 'updatedAfter', updatedAfter));
|
||||
}
|
||||
|
||||
if (ifNoneMatch != null) {
|
||||
headerParams[r'if-none-match'] = parameterToString(ifNoneMatch);
|
||||
@@ -415,15 +414,14 @@ class AssetApi {
|
||||
///
|
||||
/// * [bool] isArchived:
|
||||
///
|
||||
/// * [bool] withoutThumbs:
|
||||
/// Include assets without thumbnails
|
||||
///
|
||||
/// * [num] skip:
|
||||
///
|
||||
/// * [DateTime] updatedAfter:
|
||||
///
|
||||
/// * [String] ifNoneMatch:
|
||||
/// ETag of data already cached on the client
|
||||
Future<List<AssetResponseDto>?> getAllAssets({ String? userId, bool? isFavorite, bool? isArchived, bool? withoutThumbs, num? skip, String? ifNoneMatch, }) async {
|
||||
final response = await getAllAssetsWithHttpInfo( userId: userId, isFavorite: isFavorite, isArchived: isArchived, withoutThumbs: withoutThumbs, skip: skip, ifNoneMatch: ifNoneMatch, );
|
||||
Future<List<AssetResponseDto>?> getAllAssets({ String? userId, bool? isFavorite, bool? isArchived, num? skip, DateTime? updatedAfter, String? ifNoneMatch, }) async {
|
||||
final response = await getAllAssetsWithHttpInfo( userId: userId, isFavorite: isFavorite, isArchived: isArchived, skip: skip, updatedAfter: updatedAfter, ifNoneMatch: ifNoneMatch, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
|
||||
79
mobile/openapi/lib/api/audit_api.dart
generated
Normal file
79
mobile/openapi/lib/api/audit_api.dart
generated
Normal file
@@ -0,0 +1,79 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.12
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class AuditApi {
|
||||
AuditApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
||||
|
||||
final ApiClient apiClient;
|
||||
|
||||
/// Performs an HTTP 'GET /audit/deletes' operation and returns the [Response].
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [EntityType] entityType (required):
|
||||
///
|
||||
/// * [DateTime] after (required):
|
||||
///
|
||||
/// * [String] userId:
|
||||
Future<Response> getAuditDeletesWithHttpInfo(EntityType entityType, DateTime after, { String? userId, }) async {
|
||||
// ignore: prefer_const_declarations
|
||||
final path = r'/audit/deletes';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
queryParams.addAll(_queryParams('', 'entityType', entityType));
|
||||
if (userId != null) {
|
||||
queryParams.addAll(_queryParams('', 'userId', userId));
|
||||
}
|
||||
queryParams.addAll(_queryParams('', 'after', after));
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
path,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
/// Parameters:
|
||||
///
|
||||
/// * [EntityType] entityType (required):
|
||||
///
|
||||
/// * [DateTime] after (required):
|
||||
///
|
||||
/// * [String] userId:
|
||||
Future<AuditDeletesResponseDto?> getAuditDeletes(EntityType entityType, DateTime after, { String? userId, }) async {
|
||||
final response = await getAuditDeletesWithHttpInfo(entityType, after, userId: userId, );
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'AuditDeletesResponseDto',) as AuditDeletesResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
41
mobile/openapi/lib/api/search_api.dart
generated
41
mobile/openapi/lib/api/search_api.dart
generated
@@ -60,47 +60,6 @@ class SearchApi {
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Performs an HTTP 'GET /search/config' operation and returns the [Response].
|
||||
Future<Response> getSearchConfigWithHttpInfo() async {
|
||||
// ignore: prefer_const_declarations
|
||||
final path = r'/search/config';
|
||||
|
||||
// ignore: prefer_final_locals
|
||||
Object? postBody;
|
||||
|
||||
final queryParams = <QueryParam>[];
|
||||
final headerParams = <String, String>{};
|
||||
final formParams = <String, String>{};
|
||||
|
||||
const contentTypes = <String>[];
|
||||
|
||||
|
||||
return apiClient.invokeAPI(
|
||||
path,
|
||||
'GET',
|
||||
queryParams,
|
||||
postBody,
|
||||
headerParams,
|
||||
formParams,
|
||||
contentTypes.isEmpty ? null : contentTypes.first,
|
||||
);
|
||||
}
|
||||
|
||||
Future<SearchConfigResponseDto?> getSearchConfig() async {
|
||||
final response = await getSearchConfigWithHttpInfo();
|
||||
if (response.statusCode >= HttpStatus.badRequest) {
|
||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
||||
}
|
||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
||||
// FormatException when trying to decode an empty string.
|
||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'SearchConfigResponseDto',) as SearchConfigResponseDto;
|
||||
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Performs an HTTP 'GET /search' operation and returns the [Response].
|
||||
/// Parameters:
|
||||
///
|
||||
|
||||
8
mobile/openapi/lib/api_client.dart
generated
8
mobile/openapi/lib/api_client.dart
generated
@@ -227,6 +227,8 @@ class ApiClient {
|
||||
return AssetTypeEnumTypeTransformer().decode(value);
|
||||
case 'AudioCodec':
|
||||
return AudioCodecTypeTransformer().decode(value);
|
||||
case 'AuditDeletesResponseDto':
|
||||
return AuditDeletesResponseDto.fromJson(value);
|
||||
case 'AuthDeviceResponseDto':
|
||||
return AuthDeviceResponseDto.fromJson(value);
|
||||
case 'BulkIdResponseDto':
|
||||
@@ -267,6 +269,8 @@ class ApiClient {
|
||||
return DownloadInfoDto.fromJson(value);
|
||||
case 'DownloadResponseDto':
|
||||
return DownloadResponseDto.fromJson(value);
|
||||
case 'EntityType':
|
||||
return EntityTypeTypeTransformer().decode(value);
|
||||
case 'ExifResponseDto':
|
||||
return ExifResponseDto.fromJson(value);
|
||||
case 'ImportAssetDto':
|
||||
@@ -319,8 +323,6 @@ class ApiClient {
|
||||
return SearchAssetDto.fromJson(value);
|
||||
case 'SearchAssetResponseDto':
|
||||
return SearchAssetResponseDto.fromJson(value);
|
||||
case 'SearchConfigResponseDto':
|
||||
return SearchConfigResponseDto.fromJson(value);
|
||||
case 'SearchExploreItem':
|
||||
return SearchExploreItem.fromJson(value);
|
||||
case 'SearchExploreResponseDto':
|
||||
@@ -361,6 +363,8 @@ class ApiClient {
|
||||
return SystemConfigFFmpegDto.fromJson(value);
|
||||
case 'SystemConfigJobDto':
|
||||
return SystemConfigJobDto.fromJson(value);
|
||||
case 'SystemConfigMachineLearningDto':
|
||||
return SystemConfigMachineLearningDto.fromJson(value);
|
||||
case 'SystemConfigOAuthDto':
|
||||
return SystemConfigOAuthDto.fromJson(value);
|
||||
case 'SystemConfigPasswordLoginDto':
|
||||
|
||||
3
mobile/openapi/lib/api_helper.dart
generated
3
mobile/openapi/lib/api_helper.dart
generated
@@ -67,6 +67,9 @@ String parameterToString(dynamic value) {
|
||||
if (value is DeleteAssetStatus) {
|
||||
return DeleteAssetStatusTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is EntityType) {
|
||||
return EntityTypeTypeTransformer().encode(value).toString();
|
||||
}
|
||||
if (value is JobCommand) {
|
||||
return JobCommandTypeTransformer().encode(value).toString();
|
||||
}
|
||||
|
||||
@@ -10,51 +10,60 @@
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SearchConfigResponseDto {
|
||||
/// Returns a new [SearchConfigResponseDto] instance.
|
||||
SearchConfigResponseDto({
|
||||
required this.enabled,
|
||||
class AuditDeletesResponseDto {
|
||||
/// Returns a new [AuditDeletesResponseDto] instance.
|
||||
AuditDeletesResponseDto({
|
||||
this.ids = const [],
|
||||
required this.needsFullSync,
|
||||
});
|
||||
|
||||
bool enabled;
|
||||
List<String> ids;
|
||||
|
||||
bool needsFullSync;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SearchConfigResponseDto &&
|
||||
other.enabled == enabled;
|
||||
bool operator ==(Object other) => identical(this, other) || other is AuditDeletesResponseDto &&
|
||||
other.ids == ids &&
|
||||
other.needsFullSync == needsFullSync;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(enabled.hashCode);
|
||||
(ids.hashCode) +
|
||||
(needsFullSync.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SearchConfigResponseDto[enabled=$enabled]';
|
||||
String toString() => 'AuditDeletesResponseDto[ids=$ids, needsFullSync=$needsFullSync]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'enabled'] = this.enabled;
|
||||
json[r'ids'] = this.ids;
|
||||
json[r'needsFullSync'] = this.needsFullSync;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SearchConfigResponseDto] instance and imports its values from
|
||||
/// Returns a new [AuditDeletesResponseDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SearchConfigResponseDto? fromJson(dynamic value) {
|
||||
static AuditDeletesResponseDto? fromJson(dynamic value) {
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SearchConfigResponseDto(
|
||||
enabled: mapValueOfType<bool>(json, r'enabled')!,
|
||||
return AuditDeletesResponseDto(
|
||||
ids: json[r'ids'] is List
|
||||
? (json[r'ids'] as List).cast<String>()
|
||||
: const [],
|
||||
needsFullSync: mapValueOfType<bool>(json, r'needsFullSync')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SearchConfigResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SearchConfigResponseDto>[];
|
||||
static List<AuditDeletesResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <AuditDeletesResponseDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SearchConfigResponseDto.fromJson(row);
|
||||
final value = AuditDeletesResponseDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
@@ -63,12 +72,12 @@ class SearchConfigResponseDto {
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SearchConfigResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, SearchConfigResponseDto>{};
|
||||
static Map<String, AuditDeletesResponseDto> mapFromJson(dynamic json) {
|
||||
final map = <String, AuditDeletesResponseDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SearchConfigResponseDto.fromJson(entry.value);
|
||||
final value = AuditDeletesResponseDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
@@ -77,14 +86,14 @@ class SearchConfigResponseDto {
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SearchConfigResponseDto-objects as value to a dart map
|
||||
static Map<String, List<SearchConfigResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SearchConfigResponseDto>>{};
|
||||
// maps a json object with a list of AuditDeletesResponseDto-objects as value to a dart map
|
||||
static Map<String, List<AuditDeletesResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<AuditDeletesResponseDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SearchConfigResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
map[entry.key] = AuditDeletesResponseDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
@@ -92,7 +101,8 @@ class SearchConfigResponseDto {
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'enabled',
|
||||
'ids',
|
||||
'needsFullSync',
|
||||
};
|
||||
}
|
||||
|
||||
85
mobile/openapi/lib/model/entity_type.dart
generated
Normal file
85
mobile/openapi/lib/model/entity_type.dart
generated
Normal file
@@ -0,0 +1,85 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.12
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
|
||||
class EntityType {
|
||||
/// Instantiate a new enum with the provided [value].
|
||||
const EntityType._(this.value);
|
||||
|
||||
/// The underlying value of this enum member.
|
||||
final String value;
|
||||
|
||||
@override
|
||||
String toString() => value;
|
||||
|
||||
String toJson() => value;
|
||||
|
||||
static const ASSET = EntityType._(r'ASSET');
|
||||
static const ALBUM = EntityType._(r'ALBUM');
|
||||
|
||||
/// List of all possible values in this [enum][EntityType].
|
||||
static const values = <EntityType>[
|
||||
ASSET,
|
||||
ALBUM,
|
||||
];
|
||||
|
||||
static EntityType? fromJson(dynamic value) => EntityTypeTypeTransformer().decode(value);
|
||||
|
||||
static List<EntityType>? listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <EntityType>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = EntityType.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
}
|
||||
|
||||
/// Transformation class that can [encode] an instance of [EntityType] to String,
|
||||
/// and [decode] dynamic data back to [EntityType].
|
||||
class EntityTypeTypeTransformer {
|
||||
factory EntityTypeTypeTransformer() => _instance ??= const EntityTypeTypeTransformer._();
|
||||
|
||||
const EntityTypeTypeTransformer._();
|
||||
|
||||
String encode(EntityType data) => data.value;
|
||||
|
||||
/// Decodes a [dynamic value][data] to a EntityType.
|
||||
///
|
||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
||||
///
|
||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
||||
/// and users are still using an old app with the old code.
|
||||
EntityType? decode(dynamic data, {bool allowNull = true}) {
|
||||
if (data != null) {
|
||||
switch (data) {
|
||||
case r'ASSET': return EntityType.ASSET;
|
||||
case r'ALBUM': return EntityType.ALBUM;
|
||||
default:
|
||||
if (!allowNull) {
|
||||
throw ArgumentError('Unknown enum value to decode: $data');
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// Singleton [EntityTypeTypeTransformer] instance.
|
||||
static EntityTypeTypeTransformer? _instance;
|
||||
}
|
||||
|
||||
52
mobile/openapi/lib/model/server_features_dto.dart
generated
52
mobile/openapi/lib/model/server_features_dto.dart
generated
@@ -13,14 +13,22 @@ part of openapi.api;
|
||||
class ServerFeaturesDto {
|
||||
/// Returns a new [ServerFeaturesDto] instance.
|
||||
ServerFeaturesDto({
|
||||
required this.machineLearning,
|
||||
required this.clipEncode,
|
||||
required this.configFile,
|
||||
required this.facialRecognition,
|
||||
required this.oauth,
|
||||
required this.oauthAutoLaunch,
|
||||
required this.passwordLogin,
|
||||
required this.search,
|
||||
required this.sidecar,
|
||||
required this.tagImage,
|
||||
});
|
||||
|
||||
bool machineLearning;
|
||||
bool clipEncode;
|
||||
|
||||
bool configFile;
|
||||
|
||||
bool facialRecognition;
|
||||
|
||||
bool oauth;
|
||||
|
||||
@@ -30,33 +38,49 @@ class ServerFeaturesDto {
|
||||
|
||||
bool search;
|
||||
|
||||
bool sidecar;
|
||||
|
||||
bool tagImage;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is ServerFeaturesDto &&
|
||||
other.machineLearning == machineLearning &&
|
||||
other.clipEncode == clipEncode &&
|
||||
other.configFile == configFile &&
|
||||
other.facialRecognition == facialRecognition &&
|
||||
other.oauth == oauth &&
|
||||
other.oauthAutoLaunch == oauthAutoLaunch &&
|
||||
other.passwordLogin == passwordLogin &&
|
||||
other.search == search;
|
||||
other.search == search &&
|
||||
other.sidecar == sidecar &&
|
||||
other.tagImage == tagImage;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(machineLearning.hashCode) +
|
||||
(clipEncode.hashCode) +
|
||||
(configFile.hashCode) +
|
||||
(facialRecognition.hashCode) +
|
||||
(oauth.hashCode) +
|
||||
(oauthAutoLaunch.hashCode) +
|
||||
(passwordLogin.hashCode) +
|
||||
(search.hashCode);
|
||||
(search.hashCode) +
|
||||
(sidecar.hashCode) +
|
||||
(tagImage.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'ServerFeaturesDto[machineLearning=$machineLearning, oauth=$oauth, oauthAutoLaunch=$oauthAutoLaunch, passwordLogin=$passwordLogin, search=$search]';
|
||||
String toString() => 'ServerFeaturesDto[clipEncode=$clipEncode, configFile=$configFile, facialRecognition=$facialRecognition, oauth=$oauth, oauthAutoLaunch=$oauthAutoLaunch, passwordLogin=$passwordLogin, search=$search, sidecar=$sidecar, tagImage=$tagImage]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'machineLearning'] = this.machineLearning;
|
||||
json[r'clipEncode'] = this.clipEncode;
|
||||
json[r'configFile'] = this.configFile;
|
||||
json[r'facialRecognition'] = this.facialRecognition;
|
||||
json[r'oauth'] = this.oauth;
|
||||
json[r'oauthAutoLaunch'] = this.oauthAutoLaunch;
|
||||
json[r'passwordLogin'] = this.passwordLogin;
|
||||
json[r'search'] = this.search;
|
||||
json[r'sidecar'] = this.sidecar;
|
||||
json[r'tagImage'] = this.tagImage;
|
||||
return json;
|
||||
}
|
||||
|
||||
@@ -68,11 +92,15 @@ class ServerFeaturesDto {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return ServerFeaturesDto(
|
||||
machineLearning: mapValueOfType<bool>(json, r'machineLearning')!,
|
||||
clipEncode: mapValueOfType<bool>(json, r'clipEncode')!,
|
||||
configFile: mapValueOfType<bool>(json, r'configFile')!,
|
||||
facialRecognition: mapValueOfType<bool>(json, r'facialRecognition')!,
|
||||
oauth: mapValueOfType<bool>(json, r'oauth')!,
|
||||
oauthAutoLaunch: mapValueOfType<bool>(json, r'oauthAutoLaunch')!,
|
||||
passwordLogin: mapValueOfType<bool>(json, r'passwordLogin')!,
|
||||
search: mapValueOfType<bool>(json, r'search')!,
|
||||
sidecar: mapValueOfType<bool>(json, r'sidecar')!,
|
||||
tagImage: mapValueOfType<bool>(json, r'tagImage')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
@@ -120,11 +148,15 @@ class ServerFeaturesDto {
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'machineLearning',
|
||||
'clipEncode',
|
||||
'configFile',
|
||||
'facialRecognition',
|
||||
'oauth',
|
||||
'oauthAutoLaunch',
|
||||
'passwordLogin',
|
||||
'search',
|
||||
'sidecar',
|
||||
'tagImage',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
10
mobile/openapi/lib/model/system_config_dto.dart
generated
10
mobile/openapi/lib/model/system_config_dto.dart
generated
@@ -15,6 +15,7 @@ class SystemConfigDto {
|
||||
SystemConfigDto({
|
||||
required this.ffmpeg,
|
||||
required this.job,
|
||||
required this.machineLearning,
|
||||
required this.oauth,
|
||||
required this.passwordLogin,
|
||||
required this.storageTemplate,
|
||||
@@ -25,6 +26,8 @@ class SystemConfigDto {
|
||||
|
||||
SystemConfigJobDto job;
|
||||
|
||||
SystemConfigMachineLearningDto machineLearning;
|
||||
|
||||
SystemConfigOAuthDto oauth;
|
||||
|
||||
SystemConfigPasswordLoginDto passwordLogin;
|
||||
@@ -37,6 +40,7 @@ class SystemConfigDto {
|
||||
bool operator ==(Object other) => identical(this, other) || other is SystemConfigDto &&
|
||||
other.ffmpeg == ffmpeg &&
|
||||
other.job == job &&
|
||||
other.machineLearning == machineLearning &&
|
||||
other.oauth == oauth &&
|
||||
other.passwordLogin == passwordLogin &&
|
||||
other.storageTemplate == storageTemplate &&
|
||||
@@ -47,18 +51,20 @@ class SystemConfigDto {
|
||||
// ignore: unnecessary_parenthesis
|
||||
(ffmpeg.hashCode) +
|
||||
(job.hashCode) +
|
||||
(machineLearning.hashCode) +
|
||||
(oauth.hashCode) +
|
||||
(passwordLogin.hashCode) +
|
||||
(storageTemplate.hashCode) +
|
||||
(thumbnail.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SystemConfigDto[ffmpeg=$ffmpeg, job=$job, oauth=$oauth, passwordLogin=$passwordLogin, storageTemplate=$storageTemplate, thumbnail=$thumbnail]';
|
||||
String toString() => 'SystemConfigDto[ffmpeg=$ffmpeg, job=$job, machineLearning=$machineLearning, oauth=$oauth, passwordLogin=$passwordLogin, storageTemplate=$storageTemplate, thumbnail=$thumbnail]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'ffmpeg'] = this.ffmpeg;
|
||||
json[r'job'] = this.job;
|
||||
json[r'machineLearning'] = this.machineLearning;
|
||||
json[r'oauth'] = this.oauth;
|
||||
json[r'passwordLogin'] = this.passwordLogin;
|
||||
json[r'storageTemplate'] = this.storageTemplate;
|
||||
@@ -76,6 +82,7 @@ class SystemConfigDto {
|
||||
return SystemConfigDto(
|
||||
ffmpeg: SystemConfigFFmpegDto.fromJson(json[r'ffmpeg'])!,
|
||||
job: SystemConfigJobDto.fromJson(json[r'job'])!,
|
||||
machineLearning: SystemConfigMachineLearningDto.fromJson(json[r'machineLearning'])!,
|
||||
oauth: SystemConfigOAuthDto.fromJson(json[r'oauth'])!,
|
||||
passwordLogin: SystemConfigPasswordLoginDto.fromJson(json[r'passwordLogin'])!,
|
||||
storageTemplate: SystemConfigStorageTemplateDto.fromJson(json[r'storageTemplate'])!,
|
||||
@@ -129,6 +136,7 @@ class SystemConfigDto {
|
||||
static const requiredKeys = <String>{
|
||||
'ffmpeg',
|
||||
'job',
|
||||
'machineLearning',
|
||||
'oauth',
|
||||
'passwordLogin',
|
||||
'storageTemplate',
|
||||
|
||||
130
mobile/openapi/lib/model/system_config_machine_learning_dto.dart
generated
Normal file
130
mobile/openapi/lib/model/system_config_machine_learning_dto.dart
generated
Normal file
@@ -0,0 +1,130 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.12
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
part of openapi.api;
|
||||
|
||||
class SystemConfigMachineLearningDto {
|
||||
/// Returns a new [SystemConfigMachineLearningDto] instance.
|
||||
SystemConfigMachineLearningDto({
|
||||
required this.clipEncodeEnabled,
|
||||
required this.enabled,
|
||||
required this.facialRecognitionEnabled,
|
||||
required this.tagImageEnabled,
|
||||
required this.url,
|
||||
});
|
||||
|
||||
bool clipEncodeEnabled;
|
||||
|
||||
bool enabled;
|
||||
|
||||
bool facialRecognitionEnabled;
|
||||
|
||||
bool tagImageEnabled;
|
||||
|
||||
String url;
|
||||
|
||||
@override
|
||||
bool operator ==(Object other) => identical(this, other) || other is SystemConfigMachineLearningDto &&
|
||||
other.clipEncodeEnabled == clipEncodeEnabled &&
|
||||
other.enabled == enabled &&
|
||||
other.facialRecognitionEnabled == facialRecognitionEnabled &&
|
||||
other.tagImageEnabled == tagImageEnabled &&
|
||||
other.url == url;
|
||||
|
||||
@override
|
||||
int get hashCode =>
|
||||
// ignore: unnecessary_parenthesis
|
||||
(clipEncodeEnabled.hashCode) +
|
||||
(enabled.hashCode) +
|
||||
(facialRecognitionEnabled.hashCode) +
|
||||
(tagImageEnabled.hashCode) +
|
||||
(url.hashCode);
|
||||
|
||||
@override
|
||||
String toString() => 'SystemConfigMachineLearningDto[clipEncodeEnabled=$clipEncodeEnabled, enabled=$enabled, facialRecognitionEnabled=$facialRecognitionEnabled, tagImageEnabled=$tagImageEnabled, url=$url]';
|
||||
|
||||
Map<String, dynamic> toJson() {
|
||||
final json = <String, dynamic>{};
|
||||
json[r'clipEncodeEnabled'] = this.clipEncodeEnabled;
|
||||
json[r'enabled'] = this.enabled;
|
||||
json[r'facialRecognitionEnabled'] = this.facialRecognitionEnabled;
|
||||
json[r'tagImageEnabled'] = this.tagImageEnabled;
|
||||
json[r'url'] = this.url;
|
||||
return json;
|
||||
}
|
||||
|
||||
/// Returns a new [SystemConfigMachineLearningDto] instance and imports its values from
|
||||
/// [value] if it's a [Map], null otherwise.
|
||||
// ignore: prefer_constructors_over_static_methods
|
||||
static SystemConfigMachineLearningDto? fromJson(dynamic value) {
|
||||
if (value is Map) {
|
||||
final json = value.cast<String, dynamic>();
|
||||
|
||||
return SystemConfigMachineLearningDto(
|
||||
clipEncodeEnabled: mapValueOfType<bool>(json, r'clipEncodeEnabled')!,
|
||||
enabled: mapValueOfType<bool>(json, r'enabled')!,
|
||||
facialRecognitionEnabled: mapValueOfType<bool>(json, r'facialRecognitionEnabled')!,
|
||||
tagImageEnabled: mapValueOfType<bool>(json, r'tagImageEnabled')!,
|
||||
url: mapValueOfType<String>(json, r'url')!,
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static List<SystemConfigMachineLearningDto> listFromJson(dynamic json, {bool growable = false,}) {
|
||||
final result = <SystemConfigMachineLearningDto>[];
|
||||
if (json is List && json.isNotEmpty) {
|
||||
for (final row in json) {
|
||||
final value = SystemConfigMachineLearningDto.fromJson(row);
|
||||
if (value != null) {
|
||||
result.add(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result.toList(growable: growable);
|
||||
}
|
||||
|
||||
static Map<String, SystemConfigMachineLearningDto> mapFromJson(dynamic json) {
|
||||
final map = <String, SystemConfigMachineLearningDto>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
||||
for (final entry in json.entries) {
|
||||
final value = SystemConfigMachineLearningDto.fromJson(entry.value);
|
||||
if (value != null) {
|
||||
map[entry.key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// maps a json object with a list of SystemConfigMachineLearningDto-objects as value to a dart map
|
||||
static Map<String, List<SystemConfigMachineLearningDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
||||
final map = <String, List<SystemConfigMachineLearningDto>>{};
|
||||
if (json is Map && json.isNotEmpty) {
|
||||
// ignore: parameter_assignments
|
||||
json = json.cast<String, dynamic>();
|
||||
for (final entry in json.entries) {
|
||||
map[entry.key] = SystemConfigMachineLearningDto.listFromJson(entry.value, growable: growable,);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/// The list of required keys that must be present in a JSON.
|
||||
static const requiredKeys = <String>{
|
||||
'clipEncodeEnabled',
|
||||
'enabled',
|
||||
'facialRecognitionEnabled',
|
||||
'tagImageEnabled',
|
||||
'url',
|
||||
};
|
||||
}
|
||||
|
||||
2
mobile/openapi/test/asset_api_test.dart
generated
2
mobile/openapi/test/asset_api_test.dart
generated
@@ -55,7 +55,7 @@ void main() {
|
||||
|
||||
// Get all AssetEntity belong to the user
|
||||
//
|
||||
//Future<List<AssetResponseDto>> getAllAssets({ String userId, bool isFavorite, bool isArchived, bool withoutThumbs, num skip, String ifNoneMatch }) async
|
||||
//Future<List<AssetResponseDto>> getAllAssets({ String userId, bool isFavorite, bool isArchived, num skip, DateTime updatedAfter, String ifNoneMatch }) async
|
||||
test('test getAllAssets', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
26
mobile/openapi/test/audit_api_test.dart
generated
Normal file
26
mobile/openapi/test/audit_api_test.dart
generated
Normal file
@@ -0,0 +1,26 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.12
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
|
||||
/// tests for AuditApi
|
||||
void main() {
|
||||
// final instance = AuditApi();
|
||||
|
||||
group('tests for AuditApi', () {
|
||||
//Future<AuditDeletesResponseDto> getAuditDeletes(EntityType entityType, DateTime after, { String userId }) async
|
||||
test('test getAuditDeletes', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
});
|
||||
}
|
||||
32
mobile/openapi/test/audit_deletes_response_dto_test.dart
generated
Normal file
32
mobile/openapi/test/audit_deletes_response_dto_test.dart
generated
Normal file
@@ -0,0 +1,32 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.12
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
// tests for AuditDeletesResponseDto
|
||||
void main() {
|
||||
// final instance = AuditDeletesResponseDto();
|
||||
|
||||
group('test AuditDeletesResponseDto', () {
|
||||
// List<String> ids (default value: const [])
|
||||
test('to test the property `ids`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool needsFullSync
|
||||
test('to test the property `needsFullSync`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
@@ -11,16 +11,10 @@
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
// tests for SearchConfigResponseDto
|
||||
// tests for EntityType
|
||||
void main() {
|
||||
// final instance = SearchConfigResponseDto();
|
||||
|
||||
group('test SearchConfigResponseDto', () {
|
||||
// bool enabled
|
||||
test('to test the property `enabled`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
group('test EntityType', () {
|
||||
|
||||
});
|
||||
|
||||
5
mobile/openapi/test/search_api_test.dart
generated
5
mobile/openapi/test/search_api_test.dart
generated
@@ -22,11 +22,6 @@ void main() {
|
||||
// TODO
|
||||
});
|
||||
|
||||
//Future<SearchConfigResponseDto> getSearchConfig() async
|
||||
test('test getSearchConfig', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
//Future<SearchResponseDto> search({ String q, String query, bool clip, String type, bool isFavorite, bool isArchived, String exifInfoPeriodCity, String exifInfoPeriodState, String exifInfoPeriodCountry, String exifInfoPeriodMake, String exifInfoPeriodModel, String exifInfoPeriodProjectionType, List<String> smartInfoPeriodObjects, List<String> smartInfoPeriodTags, bool recent, bool motion }) async
|
||||
test('test search', () async {
|
||||
// TODO
|
||||
|
||||
24
mobile/openapi/test/server_features_dto_test.dart
generated
24
mobile/openapi/test/server_features_dto_test.dart
generated
@@ -16,8 +16,18 @@ void main() {
|
||||
// final instance = ServerFeaturesDto();
|
||||
|
||||
group('test ServerFeaturesDto', () {
|
||||
// bool machineLearning
|
||||
test('to test the property `machineLearning`', () async {
|
||||
// bool clipEncode
|
||||
test('to test the property `clipEncode`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool configFile
|
||||
test('to test the property `configFile`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool facialRecognition
|
||||
test('to test the property `facialRecognition`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
@@ -41,6 +51,16 @@ void main() {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool sidecar
|
||||
test('to test the property `sidecar`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool tagImage
|
||||
test('to test the property `tagImage`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
|
||||
5
mobile/openapi/test/system_config_dto_test.dart
generated
5
mobile/openapi/test/system_config_dto_test.dart
generated
@@ -26,6 +26,11 @@ void main() {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// SystemConfigMachineLearningDto machineLearning
|
||||
test('to test the property `machineLearning`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// SystemConfigOAuthDto oauth
|
||||
test('to test the property `oauth`', () async {
|
||||
// TODO
|
||||
|
||||
47
mobile/openapi/test/system_config_machine_learning_dto_test.dart
generated
Normal file
47
mobile/openapi/test/system_config_machine_learning_dto_test.dart
generated
Normal file
@@ -0,0 +1,47 @@
|
||||
//
|
||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
||||
//
|
||||
// @dart=2.12
|
||||
|
||||
// ignore_for_file: unused_element, unused_import
|
||||
// ignore_for_file: always_put_required_named_parameters_first
|
||||
// ignore_for_file: constant_identifier_names
|
||||
// ignore_for_file: lines_longer_than_80_chars
|
||||
|
||||
import 'package:openapi/api.dart';
|
||||
import 'package:test/test.dart';
|
||||
|
||||
// tests for SystemConfigMachineLearningDto
|
||||
void main() {
|
||||
// final instance = SystemConfigMachineLearningDto();
|
||||
|
||||
group('test SystemConfigMachineLearningDto', () {
|
||||
// bool clipEncodeEnabled
|
||||
test('to test the property `clipEncodeEnabled`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool enabled
|
||||
test('to test the property `enabled`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool facialRecognitionEnabled
|
||||
test('to test the property `facialRecognitionEnabled`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// bool tagImageEnabled
|
||||
test('to test the property `tagImageEnabled`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
// String url
|
||||
test('to test the property `url`', () async {
|
||||
// TODO
|
||||
});
|
||||
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
@@ -2,7 +2,7 @@ name: immich_mobile
|
||||
description: Immich - selfhosted backup media file on mobile phone
|
||||
|
||||
publish_to: "none"
|
||||
version: 1.74.0+97
|
||||
version: 1.75.0+98
|
||||
isar_version: &isar_version 3.1.0+1
|
||||
|
||||
environment:
|
||||
|
||||
@@ -13,7 +13,7 @@ mesa-va-drivers libmimalloc2.0 $(if [ $(arch) = "x86_64" ]; then echo "intel-med
|
||||
# debian build for imagemagick has broken RAW support, so build manually
|
||||
ENV LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
|
||||
ENV LD_RUN_PATH=/usr/local/lib:$LD_RUN_PATH
|
||||
COPY bin/build-libraw.sh bin/build-imagemagick.sh bin/build-libvips.sh ./
|
||||
COPY bin/build-libraw.sh bin/build-imagemagick.sh bin/build-libvips.sh bin/use-camera-wb.patch ./
|
||||
RUN ./build-libraw.sh
|
||||
RUN ./build-imagemagick.sh
|
||||
RUN ./build-libvips.sh
|
||||
|
||||
@@ -13,6 +13,7 @@ sha256sum -c imagemagick.sha256
|
||||
tar -xvf ${IMAGEMAGICK_VERSION}.tar.gz -C ImageMagick --strip-components=1
|
||||
rm ${IMAGEMAGICK_VERSION}.tar.gz
|
||||
rm imagemagick.sha256
|
||||
patch -u ImageMagick/coders/dng.c -i use-camera-wb.patch
|
||||
cd ImageMagick
|
||||
./configure --with-modules
|
||||
make -j$(nproc)
|
||||
|
||||
9
server/bin/use-camera-wb.patch
Executable file
9
server/bin/use-camera-wb.patch
Executable file
@@ -0,0 +1,9 @@
|
||||
@@ -339,6 +339,8 @@
|
||||
option=GetImageOption(image_info,"dng:use_camera_wb");
|
||||
if (option != (const char *) NULL)
|
||||
raw_info->params.use_camera_wb=IsStringTrue(option);
|
||||
+ else
|
||||
+ raw_info->params.use_camera_wb=MagickTrue;
|
||||
option=GetImageOption(image_info,"dng:use-auto-wb");
|
||||
if (option == (const char *) NULL)
|
||||
option=GetImageOption(image_info,"dng:use_auto_wb");
|
||||
@@ -752,15 +752,6 @@
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "withoutThumbs",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"description": "Include assets without thumbnails",
|
||||
"schema": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "skip",
|
||||
"required": false,
|
||||
@@ -769,6 +760,15 @@
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "updatedAfter",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"format": "date-time",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "if-none-match",
|
||||
"in": "header",
|
||||
@@ -2071,6 +2071,65 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/audit/deletes": {
|
||||
"get": {
|
||||
"operationId": "getAuditDeletes",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "entityType",
|
||||
"required": true,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/EntityType"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "userId",
|
||||
"required": false,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"format": "uuid",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "after",
|
||||
"required": true,
|
||||
"in": "query",
|
||||
"schema": {
|
||||
"format": "date-time",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/AuditDeletesResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Audit"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/auth/admin-sign-up": {
|
||||
"post": {
|
||||
"operationId": "adminSignUp",
|
||||
@@ -3184,38 +3243,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/search/config": {
|
||||
"get": {
|
||||
"operationId": "getSearchConfig",
|
||||
"parameters": [],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/SearchConfigResponseDto"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"bearer": []
|
||||
},
|
||||
{
|
||||
"cookie": []
|
||||
},
|
||||
{
|
||||
"api_key": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Search"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/search/explore": {
|
||||
"get": {
|
||||
"operationId": "getExploreData",
|
||||
@@ -4653,7 +4680,7 @@
|
||||
"info": {
|
||||
"title": "Immich",
|
||||
"description": "Immich API",
|
||||
"version": "1.74.0",
|
||||
"version": "1.75.0",
|
||||
"contact": {}
|
||||
},
|
||||
"tags": [],
|
||||
@@ -5239,6 +5266,24 @@
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"AuditDeletesResponseDto": {
|
||||
"properties": {
|
||||
"ids": {
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"needsFullSync": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"needsFullSync",
|
||||
"ids"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"AuthDeviceResponseDto": {
|
||||
"properties": {
|
||||
"createdAt": {
|
||||
@@ -5701,6 +5746,13 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"EntityType": {
|
||||
"enum": [
|
||||
"ASSET",
|
||||
"ALBUM"
|
||||
],
|
||||
"type": "string"
|
||||
},
|
||||
"ExifResponseDto": {
|
||||
"properties": {
|
||||
"city": {
|
||||
@@ -6340,17 +6392,6 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SearchConfigResponseDto": {
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"enabled"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SearchExploreItem": {
|
||||
"properties": {
|
||||
"data": {
|
||||
@@ -6434,7 +6475,13 @@
|
||||
},
|
||||
"ServerFeaturesDto": {
|
||||
"properties": {
|
||||
"machineLearning": {
|
||||
"clipEncode": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"configFile": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"facialRecognition": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"oauth": {
|
||||
@@ -6448,11 +6495,21 @@
|
||||
},
|
||||
"search": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"sidecar": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"tagImage": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"machineLearning",
|
||||
"configFile",
|
||||
"clipEncode",
|
||||
"facialRecognition",
|
||||
"sidecar",
|
||||
"search",
|
||||
"tagImage",
|
||||
"oauth",
|
||||
"oauthAutoLaunch",
|
||||
"passwordLogin"
|
||||
@@ -6784,6 +6841,9 @@
|
||||
"job": {
|
||||
"$ref": "#/components/schemas/SystemConfigJobDto"
|
||||
},
|
||||
"machineLearning": {
|
||||
"$ref": "#/components/schemas/SystemConfigMachineLearningDto"
|
||||
},
|
||||
"oauth": {
|
||||
"$ref": "#/components/schemas/SystemConfigOAuthDto"
|
||||
},
|
||||
@@ -6799,6 +6859,7 @@
|
||||
},
|
||||
"required": [
|
||||
"ffmpeg",
|
||||
"machineLearning",
|
||||
"oauth",
|
||||
"passwordLogin",
|
||||
"storageTemplate",
|
||||
@@ -6905,6 +6966,33 @@
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SystemConfigMachineLearningDto": {
|
||||
"properties": {
|
||||
"clipEncodeEnabled": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"enabled": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"facialRecognitionEnabled": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"tagImageEnabled": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"url": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"enabled",
|
||||
"url",
|
||||
"clipEncodeEnabled",
|
||||
"facialRecognitionEnabled",
|
||||
"tagImageEnabled"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"SystemConfigOAuthDto": {
|
||||
"properties": {
|
||||
"autoLaunch": {
|
||||
|
||||
4
server/package-lock.json
generated
4
server/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.74.0",
|
||||
"version": "1.75.0",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "immich",
|
||||
"version": "1.74.0",
|
||||
"version": "1.75.0",
|
||||
"license": "UNLICENSED",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.20.13",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "immich",
|
||||
"version": "1.74.0",
|
||||
"version": "1.75.0",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
import { when } from 'jest-when';
|
||||
import { Readable } from 'stream';
|
||||
import { ICryptoRepository } from '../crypto';
|
||||
import { IJobRepository, JobName } from '../index';
|
||||
import { IJobRepository, JobName } from '../job';
|
||||
import { IStorageRepository } from '../storage';
|
||||
import { AssetStats, IAssetRepository } from './asset.repository';
|
||||
import { AssetService, UploadFieldName } from './asset.service';
|
||||
|
||||
@@ -16,18 +16,23 @@ import {
|
||||
AssetIdsDto,
|
||||
AssetJobName,
|
||||
AssetJobsDto,
|
||||
AssetStatsDto,
|
||||
DownloadArchiveInfo,
|
||||
DownloadInfoDto,
|
||||
DownloadResponseDto,
|
||||
MapMarkerDto,
|
||||
mapStats,
|
||||
MemoryLaneDto,
|
||||
TimeBucketAssetDto,
|
||||
TimeBucketDto,
|
||||
} from './dto';
|
||||
import { AssetStatsDto, mapStats } from './dto/asset-statistics.dto';
|
||||
import { MapMarkerDto } from './dto/map-marker.dto';
|
||||
import { AssetResponseDto, mapAsset, MapMarkerResponseDto } from './response-dto';
|
||||
import { MemoryLaneResponseDto } from './response-dto/memory-lane-response.dto';
|
||||
import { TimeBucketResponseDto } from './response-dto/time-bucket-response.dto';
|
||||
import {
|
||||
AssetResponseDto,
|
||||
mapAsset,
|
||||
MapMarkerResponseDto,
|
||||
MemoryLaneResponseDto,
|
||||
TimeBucketResponseDto,
|
||||
} from './response-dto';
|
||||
|
||||
export enum UploadFieldName {
|
||||
ASSET_DATA = 'assetData',
|
||||
|
||||
@@ -84,3 +84,8 @@ export function mapAssetWithoutExif(entity: AssetEntity): AssetResponseDto {
|
||||
checksum: entity.checksum.toString('base64'),
|
||||
};
|
||||
}
|
||||
|
||||
export class MemoryLaneResponseDto {
|
||||
title!: string;
|
||||
assets!: AssetResponseDto[];
|
||||
}
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
import { AssetResponseDto } from './asset-response.dto';
|
||||
|
||||
export class MemoryLaneResponseDto {
|
||||
title!: string;
|
||||
assets!: AssetResponseDto[];
|
||||
}
|
||||
61
server/src/domain/audit/audi.service.spec.ts
Normal file
61
server/src/domain/audit/audi.service.spec.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { DatabaseAction, EntityType } from '@app/infra/entities';
|
||||
import { auditStub, authStub, IAccessRepositoryMock, newAccessRepositoryMock, newAuditRepositoryMock } from '@test';
|
||||
import { IAuditRepository } from './audit.repository';
|
||||
import { AuditService } from './audit.service';
|
||||
|
||||
describe(AuditService.name, () => {
|
||||
let sut: AuditService;
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let auditMock: jest.Mocked<IAuditRepository>;
|
||||
|
||||
beforeEach(async () => {
|
||||
accessMock = newAccessRepositoryMock();
|
||||
auditMock = newAuditRepositoryMock();
|
||||
sut = new AuditService(accessMock, auditMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleCleanup', () => {
|
||||
it('should delete old audit entries', async () => {
|
||||
await expect(sut.handleCleanup()).resolves.toBe(true);
|
||||
expect(auditMock.removeBefore).toBeCalledWith(expect.any(Date));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getDeletes', () => {
|
||||
it('should require full sync if the request is older than 100 days', async () => {
|
||||
auditMock.getAfter.mockResolvedValue([]);
|
||||
|
||||
const date = new Date(2022, 0, 1);
|
||||
await expect(sut.getDeletes(authStub.admin, { after: date, entityType: EntityType.ASSET })).resolves.toEqual({
|
||||
needsFullSync: true,
|
||||
ids: [],
|
||||
});
|
||||
|
||||
expect(auditMock.getAfter).toHaveBeenCalledWith(date, {
|
||||
action: DatabaseAction.DELETE,
|
||||
ownerId: authStub.admin.id,
|
||||
entityType: EntityType.ASSET,
|
||||
});
|
||||
});
|
||||
|
||||
it('should get any new or updated assets and deleted ids', async () => {
|
||||
auditMock.getAfter.mockResolvedValue([auditStub.delete]);
|
||||
|
||||
const date = new Date();
|
||||
await expect(sut.getDeletes(authStub.admin, { after: date, entityType: EntityType.ASSET })).resolves.toEqual({
|
||||
needsFullSync: false,
|
||||
ids: ['asset-deleted'],
|
||||
});
|
||||
|
||||
expect(auditMock.getAfter).toHaveBeenCalledWith(date, {
|
||||
action: DatabaseAction.DELETE,
|
||||
ownerId: authStub.admin.id,
|
||||
entityType: EntityType.ASSET,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
24
server/src/domain/audit/audit.dto.ts
Normal file
24
server/src/domain/audit/audit.dto.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { EntityType } from '@app/infra/entities';
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
import { Type } from 'class-transformer';
|
||||
import { IsDate, IsEnum, IsOptional, IsUUID } from 'class-validator';
|
||||
|
||||
export class AuditDeletesDto {
|
||||
@IsDate()
|
||||
@Type(() => Date)
|
||||
after!: Date;
|
||||
|
||||
@ApiProperty({ enum: EntityType, enumName: 'EntityType' })
|
||||
@IsEnum(EntityType)
|
||||
entityType!: EntityType;
|
||||
|
||||
@IsOptional()
|
||||
@IsUUID('4')
|
||||
@ApiProperty({ format: 'uuid' })
|
||||
userId?: string;
|
||||
}
|
||||
|
||||
export class AuditDeletesResponseDto {
|
||||
needsFullSync!: boolean;
|
||||
ids!: string[];
|
||||
}
|
||||
14
server/src/domain/audit/audit.repository.ts
Normal file
14
server/src/domain/audit/audit.repository.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { AuditEntity, DatabaseAction, EntityType } from '@app/infra/entities';
|
||||
|
||||
export const IAuditRepository = 'IAuditRepository';
|
||||
|
||||
export interface AuditSearch {
|
||||
action?: DatabaseAction;
|
||||
entityType?: EntityType;
|
||||
ownerId?: string;
|
||||
}
|
||||
|
||||
export interface IAuditRepository {
|
||||
getAfter(since: Date, options: AuditSearch): Promise<AuditEntity[]>;
|
||||
removeBefore(before: Date): Promise<void>;
|
||||
}
|
||||
43
server/src/domain/audit/audit.service.ts
Normal file
43
server/src/domain/audit/audit.service.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { DatabaseAction } from '@app/infra/entities';
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { DateTime } from 'luxon';
|
||||
import { AccessCore, IAccessRepository, Permission } from '../access';
|
||||
import { AuthUserDto } from '../auth';
|
||||
import { AUDIT_LOG_MAX_DURATION } from '../domain.constant';
|
||||
import { AuditDeletesDto, AuditDeletesResponseDto } from './audit.dto';
|
||||
import { IAuditRepository } from './audit.repository';
|
||||
|
||||
@Injectable()
|
||||
export class AuditService {
|
||||
private access: AccessCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAccessRepository) accessRepository: IAccessRepository,
|
||||
@Inject(IAuditRepository) private repository: IAuditRepository,
|
||||
) {
|
||||
this.access = new AccessCore(accessRepository);
|
||||
}
|
||||
|
||||
async handleCleanup(): Promise<boolean> {
|
||||
await this.repository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
|
||||
return true;
|
||||
}
|
||||
|
||||
async getDeletes(authUser: AuthUserDto, dto: AuditDeletesDto): Promise<AuditDeletesResponseDto> {
|
||||
const userId = dto.userId || authUser.id;
|
||||
await this.access.requirePermission(authUser, Permission.LIBRARY_READ, userId);
|
||||
|
||||
const audits = await this.repository.getAfter(dto.after, {
|
||||
ownerId: userId,
|
||||
entityType: dto.entityType,
|
||||
action: DatabaseAction.DELETE,
|
||||
});
|
||||
|
||||
const duration = DateTime.now().diff(DateTime.fromJSDate(dto.after));
|
||||
|
||||
return {
|
||||
needsFullSync: duration > AUDIT_LOG_MAX_DURATION,
|
||||
ids: audits.map(({ entityId }) => entityId),
|
||||
};
|
||||
}
|
||||
}
|
||||
3
server/src/domain/audit/index.ts
Normal file
3
server/src/domain/audit/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './audit.dto';
|
||||
export * from './audit.repository';
|
||||
export * from './audit.service';
|
||||
@@ -1,8 +1,10 @@
|
||||
import { AssetType } from '@app/infra/entities';
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { Duration } from 'luxon';
|
||||
import { extname } from 'node:path';
|
||||
import pkg from 'src/../../package.json';
|
||||
|
||||
export const AUDIT_LOG_MAX_DURATION = Duration.fromObject({ days: 100 });
|
||||
|
||||
const [major, minor, patch] = pkg.version.split('.');
|
||||
|
||||
export interface IServerVersion {
|
||||
@@ -21,17 +23,6 @@ export const SERVER_VERSION = `${serverVersion.major}.${serverVersion.minor}.${s
|
||||
|
||||
export const APP_MEDIA_LOCATION = process.env.IMMICH_MEDIA_LOCATION || './upload';
|
||||
|
||||
export const SEARCH_ENABLED = process.env.TYPESENSE_ENABLED !== 'false';
|
||||
|
||||
export const MACHINE_LEARNING_URL = process.env.IMMICH_MACHINE_LEARNING_URL || 'http://immich-machine-learning:3003';
|
||||
export const MACHINE_LEARNING_ENABLED = MACHINE_LEARNING_URL !== 'false';
|
||||
|
||||
export function assertMachineLearningEnabled() {
|
||||
if (!MACHINE_LEARNING_ENABLED) {
|
||||
throw new BadRequestException('Machine learning is not enabled.');
|
||||
}
|
||||
}
|
||||
|
||||
const image: Record<string, string[]> = {
|
||||
'.3fr': ['image/3fr', 'image/x-hasselblad-3fr'],
|
||||
'.ari': ['image/ari', 'image/x-arriflex-ari'],
|
||||
|
||||
@@ -2,6 +2,7 @@ import { DynamicModule, Global, Module, ModuleMetadata, OnApplicationShutdown, P
|
||||
import { AlbumService } from './album';
|
||||
import { APIKeyService } from './api-key';
|
||||
import { AssetService } from './asset';
|
||||
import { AuditService } from './audit';
|
||||
import { AuthService } from './auth';
|
||||
import { FacialRecognitionService } from './facial-recognition';
|
||||
import { JobService } from './job';
|
||||
@@ -23,6 +24,7 @@ const providers: Provider[] = [
|
||||
AlbumService,
|
||||
APIKeyService,
|
||||
AssetService,
|
||||
AuditService,
|
||||
AuthService,
|
||||
FacialRecognitionService,
|
||||
JobService,
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
newPersonRepositoryMock,
|
||||
newSearchRepositoryMock,
|
||||
newStorageRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
personStub,
|
||||
} from '@test';
|
||||
import { IAssetRepository, WithoutProperty } from '../asset';
|
||||
@@ -18,6 +19,7 @@ import { IPersonRepository } from '../person';
|
||||
import { ISearchRepository } from '../search';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { IStorageRepository } from '../storage';
|
||||
import { ISystemConfigRepository } from '../system-config';
|
||||
import { IFaceRepository } from './face.repository';
|
||||
import { FacialRecognitionService } from './facial-recognition.services';
|
||||
|
||||
@@ -94,6 +96,7 @@ const faceSearch = {
|
||||
describe(FacialRecognitionService.name, () => {
|
||||
let sut: FacialRecognitionService;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let faceMock: jest.Mocked<IFaceRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let machineLearningMock: jest.Mocked<IMachineLearningRepository>;
|
||||
@@ -104,6 +107,7 @@ describe(FacialRecognitionService.name, () => {
|
||||
|
||||
beforeEach(async () => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
faceMock = newFaceRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
machineLearningMock = newMachineLearningRepositoryMock();
|
||||
@@ -116,6 +120,7 @@ describe(FacialRecognitionService.name, () => {
|
||||
|
||||
sut = new FacialRecognitionService(
|
||||
assetMock,
|
||||
configMock,
|
||||
faceMock,
|
||||
jobMock,
|
||||
machineLearningMock,
|
||||
@@ -174,7 +179,7 @@ describe(FacialRecognitionService.name, () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
expect(machineLearningMock.detectFaces).toHaveBeenCalledWith({
|
||||
expect(machineLearningMock.detectFaces).toHaveBeenCalledWith('http://immich-machine-learning:3003', {
|
||||
imagePath: assetStub.image.resizePath,
|
||||
});
|
||||
expect(faceMock.create).not.toHaveBeenCalled();
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Inject, Logger } from '@nestjs/common';
|
||||
import { join } from 'path';
|
||||
import { IAssetRepository, WithoutProperty } from '../asset';
|
||||
import { MACHINE_LEARNING_ENABLED } from '../domain.constant';
|
||||
import { usePagination } from '../domain.util';
|
||||
import { IBaseJob, IEntityJob, IFaceThumbnailJob, IJobRepository, JobName, JOBS_ASSET_PAGINATION_SIZE } from '../job';
|
||||
import { CropOptions, FACE_THUMBNAIL_SIZE, IMediaRepository } from '../media';
|
||||
@@ -9,14 +8,17 @@ import { IPersonRepository } from '../person/person.repository';
|
||||
import { ISearchRepository } from '../search/search.repository';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { IStorageRepository, StorageCore, StorageFolder } from '../storage';
|
||||
import { ISystemConfigRepository, SystemConfigCore } from '../system-config';
|
||||
import { AssetFaceId, IFaceRepository } from './face.repository';
|
||||
|
||||
export class FacialRecognitionService {
|
||||
private logger = new Logger(FacialRecognitionService.name);
|
||||
private storageCore = new StorageCore();
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IFaceRepository) private faceRepository: IFaceRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
@@ -24,9 +26,16 @@ export class FacialRecognitionService {
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
@Inject(IStorageRepository) private storageRepository: IStorageRepository,
|
||||
) {}
|
||||
) {
|
||||
this.configCore = new SystemConfigCore(configRepository);
|
||||
}
|
||||
|
||||
async handleQueueRecognizeFaces({ force }: IBaseJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognitionEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination, { order: 'DESC' })
|
||||
@@ -49,12 +58,17 @@ export class FacialRecognitionService {
|
||||
}
|
||||
|
||||
async handleRecognizeFaces({ id }: IEntityJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognitionEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset || !MACHINE_LEARNING_ENABLED || !asset.resizePath) {
|
||||
if (!asset || !asset.resizePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const faces = await this.machineLearning.detectFaces({ imagePath: asset.resizePath });
|
||||
const faces = await this.machineLearning.detectFaces(machineLearning.url, { imagePath: asset.resizePath });
|
||||
|
||||
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
|
||||
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `float[${face.embedding.length}]` })));
|
||||
@@ -100,6 +114,11 @@ export class FacialRecognitionService {
|
||||
}
|
||||
|
||||
async handleGenerateFaceThumbnail(data: IFaceThumbnailJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.facialRecognitionEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const { assetId, personId, boundingBox, imageWidth, imageHeight } = data;
|
||||
|
||||
const [asset] = await this.assetRepository.getByIds([assetId]);
|
||||
|
||||
@@ -2,6 +2,7 @@ export * from './access';
|
||||
export * from './album';
|
||||
export * from './api-key';
|
||||
export * from './asset';
|
||||
export * from './audit';
|
||||
export * from './auth';
|
||||
export * from './communication';
|
||||
export * from './crypto';
|
||||
|
||||
@@ -55,6 +55,7 @@ export enum JobName {
|
||||
|
||||
// cleanup
|
||||
DELETE_FILES = 'delete-files',
|
||||
CLEAN_OLD_AUDIT_LOGS = 'clean-old-audit-logs',
|
||||
|
||||
// search
|
||||
SEARCH_INDEX_ASSETS = 'search-index-assets',
|
||||
@@ -84,6 +85,7 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
|
||||
[JobName.USER_DELETE_CHECK]: QueueName.BACKGROUND_TASK,
|
||||
[JobName.USER_DELETION]: QueueName.BACKGROUND_TASK,
|
||||
[JobName.DELETE_FILES]: QueueName.BACKGROUND_TASK,
|
||||
[JobName.CLEAN_OLD_AUDIT_LOGS]: QueueName.BACKGROUND_TASK,
|
||||
[JobName.PERSON_CLEANUP]: QueueName.BACKGROUND_TASK,
|
||||
|
||||
// conversion
|
||||
|
||||
@@ -68,6 +68,9 @@ export type JobItem =
|
||||
// Filesystem
|
||||
| { name: JobName.DELETE_FILES; data: IDeleteFilesJob }
|
||||
|
||||
// Audit log cleanup
|
||||
| { name: JobName.CLEAN_OLD_AUDIT_LOGS; data?: IBaseJob }
|
||||
|
||||
// Asset Deletion
|
||||
| { name: JobName.PERSON_CLEANUP; data?: IBaseJob }
|
||||
|
||||
|
||||
@@ -51,6 +51,7 @@ describe(JobService.name, () => {
|
||||
[{ name: JobName.USER_DELETE_CHECK }],
|
||||
[{ name: JobName.PERSON_CLEANUP }],
|
||||
[{ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } }],
|
||||
[{ name: JobName.CLEAN_OLD_AUDIT_LOGS }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,8 +2,7 @@ import { AssetType } from '@app/infra/entities';
|
||||
import { BadRequestException, Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { IAssetRepository, mapAsset } from '../asset';
|
||||
import { CommunicationEvent, ICommunicationRepository } from '../communication';
|
||||
import { assertMachineLearningEnabled } from '../domain.constant';
|
||||
import { ISystemConfigRepository } from '../system-config';
|
||||
import { FeatureFlag, ISystemConfigRepository } from '../system-config';
|
||||
import { SystemConfigCore } from '../system-config/system-config.core';
|
||||
import { JobCommand, JobName, QueueName } from './job.constants';
|
||||
import { AllJobStatusResponseDto, JobCommandDto, JobStatusDto } from './job.dto';
|
||||
@@ -78,23 +77,25 @@ export class JobService {
|
||||
return this.jobRepository.queue({ name: JobName.STORAGE_TEMPLATE_MIGRATION });
|
||||
|
||||
case QueueName.OBJECT_TAGGING:
|
||||
assertMachineLearningEnabled();
|
||||
await this.configCore.requireFeature(FeatureFlag.TAG_IMAGE);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_OBJECT_TAGGING, data: { force } });
|
||||
|
||||
case QueueName.CLIP_ENCODING:
|
||||
assertMachineLearningEnabled();
|
||||
await this.configCore.requireFeature(FeatureFlag.CLIP_ENCODE);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_ENCODE_CLIP, data: { force } });
|
||||
|
||||
case QueueName.METADATA_EXTRACTION:
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_METADATA_EXTRACTION, data: { force } });
|
||||
|
||||
case QueueName.SIDECAR:
|
||||
await this.configCore.requireFeature(FeatureFlag.SIDECAR);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_SIDECAR, data: { force } });
|
||||
|
||||
case QueueName.THUMBNAIL_GENERATION:
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force } });
|
||||
|
||||
case QueueName.RECOGNIZE_FACES:
|
||||
await this.configCore.requireFeature(FeatureFlag.FACIAL_RECOGNITION);
|
||||
return this.jobRepository.queue({ name: JobName.QUEUE_RECOGNIZE_FACES, data: { force } });
|
||||
|
||||
default:
|
||||
@@ -136,6 +137,7 @@ export class JobService {
|
||||
await this.jobRepository.queue({ name: JobName.USER_DELETE_CHECK });
|
||||
await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP });
|
||||
await this.jobRepository.queue({ name: JobName.QUEUE_GENERATE_THUMBNAILS, data: { force: false } });
|
||||
await this.jobRepository.queue({ name: JobName.CLEAN_OLD_AUDIT_LOGS });
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
export * from './search-config-response.dto';
|
||||
export * from './search-explore.response.dto';
|
||||
export * from './search-response.dto';
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
export class SearchConfigResponseDto {
|
||||
enabled!: boolean;
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import {
|
||||
albumStub,
|
||||
assetStub,
|
||||
@@ -12,12 +10,14 @@ import {
|
||||
newJobRepositoryMock,
|
||||
newMachineLearningRepositoryMock,
|
||||
newSearchRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
searchStub,
|
||||
} from '@test';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import { IAlbumRepository } from '../album/album.repository';
|
||||
import { IAssetRepository } from '../asset/asset.repository';
|
||||
import { IFaceRepository } from '../facial-recognition';
|
||||
import { ISystemConfigRepository } from '../index';
|
||||
import { JobName } from '../job';
|
||||
import { IJobRepository } from '../job/job.repository';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
@@ -31,29 +31,26 @@ describe(SearchService.name, () => {
|
||||
let sut: SearchService;
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let faceMock: jest.Mocked<IFaceRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let machineMock: jest.Mocked<IMachineLearningRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
let configMock: jest.Mocked<ConfigService>;
|
||||
|
||||
const makeSut = (value?: string) => {
|
||||
if (value) {
|
||||
configMock.get.mockReturnValue(value);
|
||||
}
|
||||
return new SearchService(albumMock, assetMock, faceMock, jobMock, machineMock, searchMock, configMock);
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
faceMock = newFaceRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
machineMock = newMachineLearningRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
configMock = { get: jest.fn() } as unknown as jest.Mocked<ConfigService>;
|
||||
|
||||
sut = makeSut();
|
||||
sut = new SearchService(albumMock, assetMock, configMock, faceMock, jobMock, machineMock, searchMock);
|
||||
|
||||
searchMock.checkMigrationStatus.mockResolvedValue({ assets: false, albums: false, faces: false });
|
||||
|
||||
await sut.init();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -86,45 +83,18 @@ describe(SearchService.name, () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('isEnabled', () => {
|
||||
it('should be enabled by default', () => {
|
||||
expect(sut.isEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('should be disabled via an env variable', () => {
|
||||
const sut = makeSut('false');
|
||||
|
||||
expect(sut.isEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConfig', () => {
|
||||
it('should return the config', () => {
|
||||
expect(sut.getConfig()).toEqual({ enabled: true });
|
||||
});
|
||||
|
||||
it('should return the config when search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
|
||||
expect(sut.getConfig()).toEqual({ enabled: false });
|
||||
});
|
||||
});
|
||||
|
||||
describe(`init`, () => {
|
||||
it('should skip when search is disabled', async () => {
|
||||
const sut = makeSut('false');
|
||||
// it('should skip when search is disabled', async () => {
|
||||
// await sut.init();
|
||||
|
||||
await sut.init();
|
||||
// expect(searchMock.setup).not.toHaveBeenCalled();
|
||||
// expect(searchMock.checkMigrationStatus).not.toHaveBeenCalled();
|
||||
// expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
|
||||
expect(searchMock.setup).not.toHaveBeenCalled();
|
||||
expect(searchMock.checkMigrationStatus).not.toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
|
||||
sut.teardown();
|
||||
});
|
||||
// sut.teardown();
|
||||
// });
|
||||
|
||||
it('should skip schema migration if not needed', async () => {
|
||||
searchMock.checkMigrationStatus.mockResolvedValue({ assets: false, albums: false, faces: false });
|
||||
await sut.init();
|
||||
|
||||
expect(searchMock.setup).toHaveBeenCalled();
|
||||
@@ -145,14 +115,14 @@ describe(SearchService.name, () => {
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
it('should throw an error is search is disabled', async () => {
|
||||
const sut = makeSut('false');
|
||||
// it('should throw an error is search is disabled', async () => {
|
||||
// sut['enabled'] = false;
|
||||
|
||||
await expect(sut.search(authStub.admin, {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
// await expect(sut.search(authStub.admin, {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
expect(searchMock.searchAlbums).not.toHaveBeenCalled();
|
||||
expect(searchMock.searchAssets).not.toHaveBeenCalled();
|
||||
});
|
||||
// expect(searchMock.searchAlbums).not.toHaveBeenCalled();
|
||||
// expect(searchMock.searchAssets).not.toHaveBeenCalled();
|
||||
// });
|
||||
|
||||
it('should search assets and albums', async () => {
|
||||
searchMock.searchAssets.mockResolvedValue(searchStub.emptyResults);
|
||||
@@ -205,7 +175,7 @@ describe(SearchService.name, () => {
|
||||
});
|
||||
|
||||
it('should skip if search is disabled', async () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
|
||||
await sut.handleIndexAssets();
|
||||
|
||||
@@ -216,7 +186,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleIndexAsset', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
sut.handleIndexAsset({ ids: [assetStub.image.id] });
|
||||
});
|
||||
|
||||
@@ -227,7 +197,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleIndexAlbums', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
sut.handleIndexAlbums();
|
||||
});
|
||||
|
||||
@@ -242,7 +212,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleIndexAlbum', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
sut.handleIndexAlbum({ ids: [albumStub.empty.id] });
|
||||
});
|
||||
|
||||
@@ -253,7 +223,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleRemoveAlbum', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
sut.handleRemoveAlbum({ ids: ['album1'] });
|
||||
});
|
||||
|
||||
@@ -264,7 +234,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleRemoveAsset', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
sut.handleRemoveAsset({ ids: ['asset1'] });
|
||||
});
|
||||
|
||||
@@ -305,7 +275,7 @@ describe(SearchService.name, () => {
|
||||
});
|
||||
|
||||
it('should skip if search is disabled', async () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
|
||||
await sut.handleIndexFaces();
|
||||
|
||||
@@ -315,7 +285,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleIndexAsset', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
sut.handleIndexFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
|
||||
expect(searchMock.importFaces).not.toHaveBeenCalled();
|
||||
@@ -333,7 +303,7 @@ describe(SearchService.name, () => {
|
||||
|
||||
describe('handleRemoveFace', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
const sut = makeSut('false');
|
||||
sut['enabled'] = false;
|
||||
sut.handleRemoveFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
});
|
||||
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
import { AlbumEntity, AssetEntity, AssetFaceEntity } from '@app/infra/entities';
|
||||
import { BadRequestException, Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { mapAlbumWithAssets } from '../album';
|
||||
import { IAlbumRepository } from '../album/album.repository';
|
||||
import { AssetResponseDto, mapAsset } from '../asset';
|
||||
import { IAssetRepository } from '../asset/asset.repository';
|
||||
import { AuthUserDto } from '../auth';
|
||||
import { MACHINE_LEARNING_ENABLED } from '../domain.constant';
|
||||
import { usePagination } from '../domain.util';
|
||||
import { AssetFaceId, IFaceRepository } from '../facial-recognition';
|
||||
import { IAssetFaceJob, IBulkEntityJob, IJobRepository, JobName, JOBS_ASSET_PAGINATION_SIZE } from '../job';
|
||||
import { IMachineLearningRepository } from '../smart-info';
|
||||
import { FeatureFlag, ISystemConfigRepository, SystemConfigCore } from '../system-config';
|
||||
import { SearchDto } from './dto';
|
||||
import { SearchConfigResponseDto, SearchResponseDto } from './response-dto';
|
||||
import { SearchResponseDto } from './response-dto';
|
||||
import {
|
||||
ISearchRepository,
|
||||
OwnedFaceEntity,
|
||||
@@ -30,8 +29,9 @@ interface SyncQueue {
|
||||
@Injectable()
|
||||
export class SearchService {
|
||||
private logger = new Logger(SearchService.name);
|
||||
private enabled: boolean;
|
||||
private enabled = false;
|
||||
private timer: NodeJS.Timer | null = null;
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
private albumQueue: SyncQueue = {
|
||||
upsert: new Set(),
|
||||
@@ -51,16 +51,13 @@ export class SearchService {
|
||||
constructor(
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IFaceRepository) private faceRepository: IFaceRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
configService: ConfigService,
|
||||
) {
|
||||
this.enabled = configService.get('TYPESENSE_ENABLED') !== 'false';
|
||||
if (this.enabled) {
|
||||
this.timer = setInterval(() => this.flush(), 5_000);
|
||||
}
|
||||
this.configCore = new SystemConfigCore(configRepository);
|
||||
}
|
||||
|
||||
teardown() {
|
||||
@@ -70,17 +67,8 @@ export class SearchService {
|
||||
}
|
||||
}
|
||||
|
||||
isEnabled() {
|
||||
return this.enabled;
|
||||
}
|
||||
|
||||
getConfig(): SearchConfigResponseDto {
|
||||
return {
|
||||
enabled: this.enabled,
|
||||
};
|
||||
}
|
||||
|
||||
async init() {
|
||||
this.enabled = await this.configCore.hasFeature(FeatureFlag.SEARCH);
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
@@ -101,10 +89,13 @@ export class SearchService {
|
||||
this.logger.debug('Queueing job to re-index all faces');
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACES });
|
||||
}
|
||||
|
||||
this.timer = setInterval(() => this.flush(), 5_000);
|
||||
}
|
||||
|
||||
async getExploreData(authUser: AuthUserDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
||||
this.assertEnabled();
|
||||
await this.configCore.requireFeature(FeatureFlag.SEARCH);
|
||||
|
||||
const results = await this.searchRepository.explore(authUser.id);
|
||||
const lookup = await this.getLookupMap(
|
||||
results.reduce(
|
||||
@@ -126,16 +117,18 @@ export class SearchService {
|
||||
}
|
||||
|
||||
async search(authUser: AuthUserDto, dto: SearchDto): Promise<SearchResponseDto> {
|
||||
this.assertEnabled();
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
await this.configCore.requireFeature(FeatureFlag.SEARCH);
|
||||
|
||||
const query = dto.q || dto.query || '*';
|
||||
const strategy = dto.clip && MACHINE_LEARNING_ENABLED ? SearchStrategy.CLIP : SearchStrategy.TEXT;
|
||||
const hasClip = machineLearning.enabled && machineLearning.clipEncodeEnabled;
|
||||
const strategy = dto.clip && hasClip ? SearchStrategy.CLIP : SearchStrategy.TEXT;
|
||||
const filters = { userId: authUser.id, ...dto };
|
||||
|
||||
let assets: SearchResult<AssetEntity>;
|
||||
switch (strategy) {
|
||||
case SearchStrategy.CLIP:
|
||||
const clip = await this.machineLearning.encodeText(query);
|
||||
const clip = await this.machineLearning.encodeText(machineLearning.url, query);
|
||||
assets = await this.searchRepository.vectorSearch(clip, filters);
|
||||
break;
|
||||
case SearchStrategy.TEXT:
|
||||
@@ -333,12 +326,6 @@ export class SearchService {
|
||||
}
|
||||
}
|
||||
|
||||
private assertEnabled() {
|
||||
if (!this.enabled) {
|
||||
throw new BadRequestException('Search is disabled');
|
||||
}
|
||||
}
|
||||
|
||||
private async idsToAlbums(ids: string[]): Promise<AlbumEntity[]> {
|
||||
const entities = await this.albumRepository.getByIds(ids);
|
||||
return this.patchAlbums(entities);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { IServerVersion } from '@app/domain';
|
||||
import { FeatureFlags, IServerVersion } from '@app/domain';
|
||||
import { ApiProperty, ApiResponseProperty } from '@nestjs/swagger';
|
||||
|
||||
export class ServerPingResponse {
|
||||
@@ -79,10 +79,15 @@ export class ServerMediaTypesResponseDto {
|
||||
sidecar!: string[];
|
||||
}
|
||||
|
||||
export class ServerFeaturesDto {
|
||||
machineLearning!: boolean;
|
||||
export class ServerFeaturesDto implements FeatureFlags {
|
||||
configFile!: boolean;
|
||||
clipEncode!: boolean;
|
||||
facialRecognition!: boolean;
|
||||
sidecar!: boolean;
|
||||
search!: boolean;
|
||||
tagImage!: boolean;
|
||||
|
||||
// TODO: use these instead of `POST oauth/config`
|
||||
oauth!: boolean;
|
||||
oauthAutoLaunch!: boolean;
|
||||
passwordLogin!: boolean;
|
||||
|
||||
@@ -147,11 +147,15 @@ describe(ServerInfoService.name, () => {
|
||||
describe('getFeatures', () => {
|
||||
it('should respond the server features', async () => {
|
||||
await expect(sut.getFeatures()).resolves.toEqual({
|
||||
machineLearning: true,
|
||||
clipEncode: true,
|
||||
facialRecognition: true,
|
||||
oauth: false,
|
||||
oauthAutoLaunch: false,
|
||||
passwordLogin: true,
|
||||
search: true,
|
||||
sidecar: true,
|
||||
tagImage: true,
|
||||
configFile: false,
|
||||
});
|
||||
expect(configMock.load).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { MACHINE_LEARNING_ENABLED, mimeTypes, SEARCH_ENABLED, serverVersion } from '../domain.constant';
|
||||
import { mimeTypes, serverVersion } from '../domain.constant';
|
||||
import { asHumanReadable } from '../domain.util';
|
||||
import { IStorageRepository, StorageCore, StorageFolder } from '../storage';
|
||||
import { ISystemConfigRepository } from '../system-config';
|
||||
import { SystemConfigCore } from '../system-config/system-config.core';
|
||||
import { ISystemConfigRepository, SystemConfigCore } from '../system-config';
|
||||
import { IUserRepository, UserStatsQueryResponse } from '../user';
|
||||
import {
|
||||
ServerFeaturesDto,
|
||||
@@ -52,18 +51,8 @@ export class ServerInfoService {
|
||||
return serverVersion;
|
||||
}
|
||||
|
||||
async getFeatures(): Promise<ServerFeaturesDto> {
|
||||
const config = await this.configCore.getConfig();
|
||||
|
||||
return {
|
||||
machineLearning: MACHINE_LEARNING_ENABLED,
|
||||
search: SEARCH_ENABLED,
|
||||
|
||||
// TODO: use these instead of `POST oauth/config`
|
||||
oauth: config.oauth.enabled,
|
||||
oauthAutoLaunch: config.oauth.autoLaunch,
|
||||
passwordLogin: config.passwordLogin.enabled,
|
||||
};
|
||||
getFeatures(): Promise<ServerFeaturesDto> {
|
||||
return this.configCore.getFeatures();
|
||||
}
|
||||
|
||||
async getStats(): Promise<ServerStatsResponseDto> {
|
||||
|
||||
@@ -20,8 +20,8 @@ export interface DetectFaceResult {
|
||||
}
|
||||
|
||||
export interface IMachineLearningRepository {
|
||||
classifyImage(input: MachineLearningInput): Promise<string[]>;
|
||||
encodeImage(input: MachineLearningInput): Promise<number[]>;
|
||||
encodeText(input: string): Promise<number[]>;
|
||||
detectFaces(input: MachineLearningInput): Promise<DetectFaceResult[]>;
|
||||
classifyImage(url: string, input: MachineLearningInput): Promise<string[]>;
|
||||
encodeImage(url: string, input: MachineLearningInput): Promise<number[]>;
|
||||
encodeText(url: string, input: string): Promise<number[]>;
|
||||
detectFaces(url: string, input: MachineLearningInput): Promise<DetectFaceResult[]>;
|
||||
}
|
||||
|
||||
@@ -5,9 +5,11 @@ import {
|
||||
newJobRepositoryMock,
|
||||
newMachineLearningRepositoryMock,
|
||||
newSmartInfoRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
} from '@test';
|
||||
import { IAssetRepository, WithoutProperty } from '../asset';
|
||||
import { IJobRepository, JobName } from '../job';
|
||||
import { ISystemConfigRepository } from '../system-config';
|
||||
import { IMachineLearningRepository } from './machine-learning.interface';
|
||||
import { ISmartInfoRepository } from './smart-info.repository';
|
||||
import { SmartInfoService } from './smart-info.service';
|
||||
@@ -20,16 +22,18 @@ const asset = {
|
||||
describe(SmartInfoService.name, () => {
|
||||
let sut: SmartInfoService;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let smartMock: jest.Mocked<ISmartInfoRepository>;
|
||||
let machineMock: jest.Mocked<IMachineLearningRepository>;
|
||||
|
||||
beforeEach(async () => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
smartMock = newSmartInfoRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
machineMock = newMachineLearningRepositoryMock();
|
||||
sut = new SmartInfoService(assetMock, jobMock, smartMock, machineMock);
|
||||
sut = new SmartInfoService(assetMock, configMock, jobMock, smartMock, machineMock);
|
||||
|
||||
assetMock.getByIds.mockResolvedValue([asset]);
|
||||
});
|
||||
@@ -80,7 +84,9 @@ describe(SmartInfoService.name, () => {
|
||||
|
||||
await sut.handleClassifyImage({ id: asset.id });
|
||||
|
||||
expect(machineMock.classifyImage).toHaveBeenCalledWith({ imagePath: 'path/to/resize.ext' });
|
||||
expect(machineMock.classifyImage).toHaveBeenCalledWith('http://immich-machine-learning:3003', {
|
||||
imagePath: 'path/to/resize.ext',
|
||||
});
|
||||
expect(smartMock.upsert).toHaveBeenCalledWith({
|
||||
assetId: 'asset-1',
|
||||
tags: ['tag1', 'tag2', 'tag3'],
|
||||
@@ -139,7 +145,9 @@ describe(SmartInfoService.name, () => {
|
||||
|
||||
await sut.handleEncodeClip({ id: asset.id });
|
||||
|
||||
expect(machineMock.encodeImage).toHaveBeenCalledWith({ imagePath: 'path/to/resize.ext' });
|
||||
expect(machineMock.encodeImage).toHaveBeenCalledWith('http://immich-machine-learning:3003', {
|
||||
imagePath: 'path/to/resize.ext',
|
||||
});
|
||||
expect(smartMock.upsert).toHaveBeenCalledWith({
|
||||
assetId: 'asset-1',
|
||||
clipEmbedding: [0.01, 0.02, 0.03],
|
||||
|
||||
@@ -1,23 +1,31 @@
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { Inject, Injectable } from '@nestjs/common';
|
||||
import { IAssetRepository, WithoutProperty } from '../asset';
|
||||
import { MACHINE_LEARNING_ENABLED } from '../domain.constant';
|
||||
import { usePagination } from '../domain.util';
|
||||
import { IBaseJob, IEntityJob, IJobRepository, JobName, JOBS_ASSET_PAGINATION_SIZE } from '../job';
|
||||
import { ISystemConfigRepository, SystemConfigCore } from '../system-config';
|
||||
import { IMachineLearningRepository } from './machine-learning.interface';
|
||||
import { ISmartInfoRepository } from './smart-info.repository';
|
||||
|
||||
@Injectable()
|
||||
export class SmartInfoService {
|
||||
private logger = new Logger(SmartInfoService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
constructor(
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ISmartInfoRepository) private repository: ISmartInfoRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
) {}
|
||||
) {
|
||||
this.configCore = new SystemConfigCore(configRepository);
|
||||
}
|
||||
|
||||
async handleQueueObjectTagging({ force }: IBaseJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.tagImageEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination)
|
||||
@@ -34,19 +42,28 @@ export class SmartInfoService {
|
||||
}
|
||||
|
||||
async handleClassifyImage({ id }: IEntityJob) {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.tagImageEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!MACHINE_LEARNING_ENABLED || !asset.resizePath) {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset.resizePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const tags = await this.machineLearning.classifyImage({ imagePath: asset.resizePath });
|
||||
const tags = await this.machineLearning.classifyImage(machineLearning.url, { imagePath: asset.resizePath });
|
||||
await this.repository.upsert({ assetId: asset.id, tags });
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleQueueEncodeClip({ force }: IBaseJob) {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.clipEncodeEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
||||
return force
|
||||
? this.assetRepository.getAll(pagination)
|
||||
@@ -63,13 +80,17 @@ export class SmartInfoService {
|
||||
}
|
||||
|
||||
async handleEncodeClip({ id }: IEntityJob) {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
if (!machineLearning.enabled || !machineLearning.clipEncodeEnabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!MACHINE_LEARNING_ENABLED || !asset.resizePath) {
|
||||
const [asset] = await this.assetRepository.getByIds([id]);
|
||||
if (!asset.resizePath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const clipEmbedding = await this.machineLearning.encodeImage({ imagePath: asset.resizePath });
|
||||
const clipEmbedding = await this.machineLearning.encodeImage(machineLearning.url, { imagePath: asset.resizePath });
|
||||
await this.repository.upsert({ assetId: asset.id, clipEmbedding: clipEmbedding });
|
||||
|
||||
return true;
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
import { IsBoolean, IsUrl, ValidateIf } from 'class-validator';
|
||||
|
||||
export class SystemConfigMachineLearningDto {
|
||||
@IsBoolean()
|
||||
enabled!: boolean;
|
||||
|
||||
@IsUrl({ require_tld: false })
|
||||
@ValidateIf((dto) => dto.enabled)
|
||||
url!: string;
|
||||
|
||||
@IsBoolean()
|
||||
clipEncodeEnabled!: boolean;
|
||||
|
||||
@IsBoolean()
|
||||
facialRecognitionEnabled!: boolean;
|
||||
|
||||
@IsBoolean()
|
||||
tagImageEnabled!: boolean;
|
||||
}
|
||||
@@ -4,16 +4,22 @@ import { Type } from 'class-transformer';
|
||||
import { IsObject, ValidateNested } from 'class-validator';
|
||||
import { SystemConfigFFmpegDto } from './system-config-ffmpeg.dto';
|
||||
import { SystemConfigJobDto } from './system-config-job.dto';
|
||||
import { SystemConfigMachineLearningDto } from './system-config-machine-learning.dto';
|
||||
import { SystemConfigOAuthDto } from './system-config-oauth.dto';
|
||||
import { SystemConfigPasswordLoginDto } from './system-config-password-login.dto';
|
||||
import { SystemConfigStorageTemplateDto } from './system-config-storage-template.dto';
|
||||
|
||||
export class SystemConfigDto {
|
||||
export class SystemConfigDto implements SystemConfig {
|
||||
@Type(() => SystemConfigFFmpegDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
ffmpeg!: SystemConfigFFmpegDto;
|
||||
|
||||
@Type(() => SystemConfigMachineLearningDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
machineLearning!: SystemConfigMachineLearningDto;
|
||||
|
||||
@Type(() => SystemConfigOAuthDto)
|
||||
@ValidateNested()
|
||||
@IsObject()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
export * from './dto';
|
||||
export * from './response-dto';
|
||||
export * from './system-config.constants';
|
||||
export * from './system-config.core';
|
||||
export * from './system-config.repository';
|
||||
export * from './system-config.service';
|
||||
|
||||
@@ -9,11 +9,14 @@ import {
|
||||
TranscodePolicy,
|
||||
VideoCodec,
|
||||
} from '@app/infra/entities';
|
||||
import { BadRequestException, Injectable, Logger } from '@nestjs/common';
|
||||
import { BadRequestException, ForbiddenException, Injectable, Logger } from '@nestjs/common';
|
||||
import { plainToClass } from 'class-transformer';
|
||||
import { validate } from 'class-validator';
|
||||
import * as _ from 'lodash';
|
||||
import { Subject } from 'rxjs';
|
||||
import { DeepPartial } from 'typeorm';
|
||||
import { QueueName } from '../job/job.constants';
|
||||
import { SystemConfigDto } from './dto';
|
||||
import { ISystemConfigRepository } from './system-config.repository';
|
||||
|
||||
export type SystemConfigValidator = (config: SystemConfig) => void | Promise<void>;
|
||||
@@ -44,6 +47,13 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
[QueueName.THUMBNAIL_GENERATION]: { concurrency: 5 },
|
||||
[QueueName.VIDEO_CONVERSION]: { concurrency: 1 },
|
||||
},
|
||||
machineLearning: {
|
||||
enabled: process.env.IMMICH_MACHINE_LEARNING_ENABLED !== 'false',
|
||||
url: process.env.IMMICH_MACHINE_LEARNING_URL || 'http://immich-machine-learning:3003',
|
||||
facialRecognitionEnabled: true,
|
||||
tagImageEnabled: true,
|
||||
clipEncodeEnabled: true,
|
||||
},
|
||||
oauth: {
|
||||
enabled: false,
|
||||
issuerUrl: '',
|
||||
@@ -71,17 +81,82 @@ export const defaults = Object.freeze<SystemConfig>({
|
||||
},
|
||||
});
|
||||
|
||||
export enum FeatureFlag {
|
||||
CLIP_ENCODE = 'clipEncode',
|
||||
FACIAL_RECOGNITION = 'facialRecognition',
|
||||
TAG_IMAGE = 'tagImage',
|
||||
SIDECAR = 'sidecar',
|
||||
SEARCH = 'search',
|
||||
OAUTH = 'oauth',
|
||||
OAUTH_AUTO_LAUNCH = 'oauthAutoLaunch',
|
||||
PASSWORD_LOGIN = 'passwordLogin',
|
||||
CONFIG_FILE = 'configFile',
|
||||
}
|
||||
|
||||
export type FeatureFlags = Record<FeatureFlag, boolean>;
|
||||
|
||||
const singleton = new Subject<SystemConfig>();
|
||||
|
||||
@Injectable()
|
||||
export class SystemConfigCore {
|
||||
private logger = new Logger(SystemConfigCore.name);
|
||||
private validators: SystemConfigValidator[] = [];
|
||||
private configCache: SystemConfig | null = null;
|
||||
|
||||
public config$ = singleton;
|
||||
|
||||
constructor(private repository: ISystemConfigRepository) {}
|
||||
|
||||
async requireFeature(feature: FeatureFlag) {
|
||||
const hasFeature = await this.hasFeature(feature);
|
||||
if (!hasFeature) {
|
||||
switch (feature) {
|
||||
case FeatureFlag.CLIP_ENCODE:
|
||||
throw new BadRequestException('Clip encoding is not enabled');
|
||||
case FeatureFlag.FACIAL_RECOGNITION:
|
||||
throw new BadRequestException('Facial recognition is not enabled');
|
||||
case FeatureFlag.TAG_IMAGE:
|
||||
throw new BadRequestException('Image tagging is not enabled');
|
||||
case FeatureFlag.SIDECAR:
|
||||
throw new BadRequestException('Sidecar is not enabled');
|
||||
case FeatureFlag.SEARCH:
|
||||
throw new BadRequestException('Search is not enabled');
|
||||
case FeatureFlag.OAUTH:
|
||||
throw new BadRequestException('OAuth is not enabled');
|
||||
case FeatureFlag.PASSWORD_LOGIN:
|
||||
throw new BadRequestException('Password login is not enabled');
|
||||
case FeatureFlag.CONFIG_FILE:
|
||||
throw new BadRequestException('Config file is not set');
|
||||
default:
|
||||
throw new ForbiddenException(`Missing required feature: ${feature}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async hasFeature(feature: FeatureFlag) {
|
||||
const features = await this.getFeatures();
|
||||
return features[feature] ?? false;
|
||||
}
|
||||
|
||||
async getFeatures(): Promise<FeatureFlags> {
|
||||
const config = await this.getConfig();
|
||||
const mlEnabled = config.machineLearning.enabled;
|
||||
|
||||
return {
|
||||
[FeatureFlag.CLIP_ENCODE]: mlEnabled && config.machineLearning.clipEncodeEnabled,
|
||||
[FeatureFlag.FACIAL_RECOGNITION]: mlEnabled && config.machineLearning.facialRecognitionEnabled,
|
||||
[FeatureFlag.TAG_IMAGE]: mlEnabled && config.machineLearning.tagImageEnabled,
|
||||
[FeatureFlag.SIDECAR]: true,
|
||||
[FeatureFlag.SEARCH]: process.env.TYPESENSE_ENABLED !== 'false',
|
||||
|
||||
// TODO: use these instead of `POST oauth/config`
|
||||
[FeatureFlag.OAUTH]: config.oauth.enabled,
|
||||
[FeatureFlag.OAUTH_AUTO_LAUNCH]: config.oauth.autoLaunch,
|
||||
[FeatureFlag.PASSWORD_LOGIN]: config.passwordLogin.enabled,
|
||||
[FeatureFlag.CONFIG_FILE]: !!process.env.IMMICH_CONFIG_FILE,
|
||||
};
|
||||
}
|
||||
|
||||
public getDefaults(): SystemConfig {
|
||||
return defaults;
|
||||
}
|
||||
@@ -90,18 +165,16 @@ export class SystemConfigCore {
|
||||
this.validators.push(validator);
|
||||
}
|
||||
|
||||
public async getConfig() {
|
||||
const overrides = await this.repository.load();
|
||||
const config: DeepPartial<SystemConfig> = {};
|
||||
for (const { key, value } of overrides) {
|
||||
// set via dot notation
|
||||
_.set(config, key, value);
|
||||
}
|
||||
|
||||
return _.defaultsDeep(config, defaults) as SystemConfig;
|
||||
public getConfig(force = false): Promise<SystemConfig> {
|
||||
const configFilePath = process.env.IMMICH_CONFIG_FILE;
|
||||
return configFilePath ? this.loadFromFile(configFilePath, force) : this.loadFromDatabase();
|
||||
}
|
||||
|
||||
public async updateConfig(config: SystemConfig): Promise<SystemConfig> {
|
||||
if (await this.hasFeature(FeatureFlag.CONFIG_FILE)) {
|
||||
throw new BadRequestException('Cannot update configuration while IMMICH_CONFIG_FILE is in use');
|
||||
}
|
||||
|
||||
try {
|
||||
for (const validator of this.validators) {
|
||||
await validator(config);
|
||||
@@ -144,8 +217,45 @@ export class SystemConfigCore {
|
||||
}
|
||||
|
||||
public async refreshConfig() {
|
||||
const newConfig = await this.getConfig();
|
||||
const newConfig = await this.getConfig(true);
|
||||
|
||||
this.config$.next(newConfig);
|
||||
}
|
||||
|
||||
private async loadFromDatabase() {
|
||||
const config: DeepPartial<SystemConfig> = {};
|
||||
const overrides = await this.repository.load();
|
||||
for (const { key, value } of overrides) {
|
||||
// set via dot notation
|
||||
_.set(config, key, value);
|
||||
}
|
||||
|
||||
return _.defaultsDeep(config, defaults) as SystemConfig;
|
||||
}
|
||||
|
||||
private async loadFromFile(filepath: string, force = false) {
|
||||
if (force || !this.configCache) {
|
||||
try {
|
||||
const overrides = JSON.parse((await this.repository.readFile(filepath)).toString());
|
||||
const config = plainToClass(SystemConfigDto, _.defaultsDeep(overrides, defaults));
|
||||
|
||||
const errors = await validate(config, {
|
||||
whitelist: true,
|
||||
forbidNonWhitelisted: true,
|
||||
forbidUnknownValues: true,
|
||||
});
|
||||
if (errors.length > 0) {
|
||||
this.logger.error('Validation error', errors);
|
||||
throw new Error(`Invalid value(s) in file: ${errors}`);
|
||||
}
|
||||
|
||||
this.configCache = config;
|
||||
} catch (error: Error | any) {
|
||||
this.logger.error(`Unable to load configuration file: ${filepath} due to ${error}`, error?.stack);
|
||||
throw new Error('Invalid configuration file');
|
||||
}
|
||||
}
|
||||
|
||||
return this.configCache;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ export const ISystemConfigRepository = 'ISystemConfigRepository';
|
||||
|
||||
export interface ISystemConfigRepository {
|
||||
load(): Promise<SystemConfigEntity[]>;
|
||||
readFile(filename: string): Promise<Buffer>;
|
||||
saveAll(items: SystemConfigEntity[]): Promise<SystemConfigEntity[]>;
|
||||
deleteKeys(keys: string[]): Promise<void>;
|
||||
}
|
||||
|
||||
@@ -46,6 +46,13 @@ const updatedConfig = Object.freeze<SystemConfig>({
|
||||
accel: TranscodeHWAccel.DISABLED,
|
||||
tonemap: ToneMapping.HABLE,
|
||||
},
|
||||
machineLearning: {
|
||||
enabled: true,
|
||||
url: 'http://immich-machine-learning:3003',
|
||||
facialRecognitionEnabled: true,
|
||||
tagImageEnabled: true,
|
||||
clipEncodeEnabled: true,
|
||||
},
|
||||
oauth: {
|
||||
autoLaunch: true,
|
||||
autoRegister: true,
|
||||
@@ -77,6 +84,7 @@ describe(SystemConfigService.name, () => {
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
|
||||
beforeEach(async () => {
|
||||
delete process.env.IMMICH_CONFIG_FILE;
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
sut = new SystemConfigService(configMock, jobMock);
|
||||
@@ -119,6 +127,43 @@ describe(SystemConfigService.name, () => {
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(updatedConfig);
|
||||
});
|
||||
|
||||
it('should load the config from a file', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
const partialConfig = { ffmpeg: { crf: 30 }, oauth: { autoLaunch: true } };
|
||||
configMock.readFile.mockResolvedValue(Buffer.from(JSON.stringify(partialConfig)));
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(updatedConfig);
|
||||
|
||||
expect(configMock.readFile).toHaveBeenCalledWith('immich-config.json');
|
||||
});
|
||||
|
||||
it('should accept an empty configuration file', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
configMock.readFile.mockResolvedValue(Buffer.from(JSON.stringify({})));
|
||||
|
||||
await expect(sut.getConfig()).resolves.toEqual(defaults);
|
||||
|
||||
expect(configMock.readFile).toHaveBeenCalledWith('immich-config.json');
|
||||
});
|
||||
|
||||
const tests = [
|
||||
{ should: 'validate numbers', config: { ffmpeg: { crf: 'not-a-number' } } },
|
||||
{ should: 'validate booleans', config: { oauth: { enabled: 'invalid' } } },
|
||||
{ should: 'validate enums', config: { ffmpeg: { transcode: 'unknown' } } },
|
||||
{ should: 'validate top level unknown options', config: { unknownOption: true } },
|
||||
{ should: 'validate nested unknown options', config: { ffmpeg: { unknownOption: true } } },
|
||||
{ should: 'validate required oauth fields', config: { oauth: { enabled: true } } },
|
||||
];
|
||||
|
||||
for (const test of tests) {
|
||||
it(`should ${test.should}`, async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
configMock.readFile.mockResolvedValue(Buffer.from(JSON.stringify(test.config)));
|
||||
|
||||
await expect(sut.getConfig()).rejects.toBeInstanceOf(Error);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('getStorageTemplateOptions', () => {
|
||||
@@ -169,6 +214,13 @@ describe(SystemConfigService.name, () => {
|
||||
expect(validator).toHaveBeenCalledWith(updatedConfig);
|
||||
expect(configMock.saveAll).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw an error if a config file is in use', async () => {
|
||||
process.env.IMMICH_CONFIG_FILE = 'immich-config.json';
|
||||
configMock.readFile.mockResolvedValue(Buffer.from(JSON.stringify({})));
|
||||
await expect(sut.updateConfig(defaults)).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(configMock.saveAll).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('refreshConfig', () => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { AssetEntity, ExifEntity } from '@app/infra/entities';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { IsNull, Not } from 'typeorm';
|
||||
import { MoreThan } from 'typeorm';
|
||||
import { In } from 'typeorm/find-options/operator/In';
|
||||
import { Repository } from 'typeorm/repository/Repository';
|
||||
import { AssetSearchDto } from './dto/asset-search.dto';
|
||||
@@ -127,10 +127,10 @@ export class AssetRepository implements IAssetRepository {
|
||||
return this.assetRepository.find({
|
||||
where: {
|
||||
ownerId,
|
||||
resizePath: dto.withoutThumbs ? undefined : Not(IsNull()),
|
||||
isVisible: true,
|
||||
isFavorite: dto.isFavorite,
|
||||
isArchived: dto.isArchived,
|
||||
updatedAt: dto.updatedAfter ? MoreThan(dto.updatedAfter) : undefined,
|
||||
},
|
||||
relations: {
|
||||
exifInfo: true,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user