mirror of
https://github.com/immich-app/immich.git
synced 2025-12-12 07:41:02 -08:00
Compare commits
19 Commits
focus_ring
...
perf/postg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7bcf9aa3a7 | ||
|
|
e773a7b7a1 | ||
|
|
f0c013f844 | ||
|
|
2913a73456 | ||
|
|
262ef2a746 | ||
|
|
8c0c8a8d0e | ||
|
|
b845184c80 | ||
|
|
1fde02ee1e | ||
|
|
526c02297c | ||
|
|
732b06eec8 | ||
|
|
436cff72b5 | ||
|
|
be5cc2cdf5 | ||
|
|
094a41ac9a | ||
|
|
ebad6a008f | ||
|
|
0c261ffbe2 | ||
|
|
6df6103c67 | ||
|
|
8c5116bc1d | ||
|
|
e3812a0e36 | ||
|
|
4b1ced439b |
25
.github/workflows/test.yml
vendored
25
.github/workflows/test.yml
vendored
@@ -338,12 +338,15 @@ jobs:
|
|||||||
name: End-to-End Tests (Server & CLI)
|
name: End-to-End Tests (Server & CLI)
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e_server_cli == 'true' }}
|
||||||
runs-on: mich
|
runs-on: ${{ matrix.runner }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: [mich, ubuntu-24.04-arm]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -383,12 +386,15 @@ jobs:
|
|||||||
name: End-to-End Tests (Web)
|
name: End-to-End Tests (Web)
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
if: ${{ needs.pre-job.outputs.should_run_e2e_web == 'true' }}
|
||||||
runs-on: mich
|
runs-on: ${{ matrix.runner }}
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: ./e2e
|
working-directory: ./e2e
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: [mich, ubuntu-24.04-arm]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
@@ -423,6 +429,21 @@ jobs:
|
|||||||
run: npx playwright test
|
run: npx playwright test
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
|
|
||||||
|
success-check-e2e:
|
||||||
|
name: End-to-End Tests Success
|
||||||
|
needs: [e2e-tests-server-cli, e2e-tests-web]
|
||||||
|
permissions: {}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: always()
|
||||||
|
steps:
|
||||||
|
- name: Any jobs failed?
|
||||||
|
if: ${{ contains(needs.*.result, 'failure') }}
|
||||||
|
run: exit 1
|
||||||
|
- name: All jobs passed or skipped
|
||||||
|
if: ${{ !(contains(needs.*.result, 'failure')) }}
|
||||||
|
# zizmor: ignore[template-injection]
|
||||||
|
run: echo "All jobs passed or skipped" && echo "${{ toJSON(needs.*.result) }}"
|
||||||
|
|
||||||
mobile-unit-tests:
|
mobile-unit-tests:
|
||||||
name: Unit Test Mobile
|
name: Unit Test Mobile
|
||||||
needs: pre-job
|
needs: pre-job
|
||||||
|
|||||||
@@ -58,7 +58,6 @@ services:
|
|||||||
- 9231:9231
|
- 9231:9231
|
||||||
- 2283:2283
|
- 2283:2283
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
@@ -114,12 +113,6 @@ services:
|
|||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
|
|
||||||
redis:
|
|
||||||
container_name: immich_redis
|
|
||||||
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
|
|
||||||
healthcheck:
|
|
||||||
test: redis-cli ping || exit 1
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
|
|||||||
@@ -27,7 +27,6 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- 2283:2283
|
- 2283:2283
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
restart: always
|
restart: always
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -54,13 +53,6 @@ services:
|
|||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
|
|
||||||
redis:
|
|
||||||
container_name: immich_redis
|
|
||||||
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
|
|
||||||
healthcheck:
|
|
||||||
test: redis-cli ping || exit 1
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- '2283:2283'
|
- '2283:2283'
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
restart: always
|
restart: always
|
||||||
healthcheck:
|
healthcheck:
|
||||||
@@ -47,13 +46,6 @@ services:
|
|||||||
healthcheck:
|
healthcheck:
|
||||||
disable: false
|
disable: false
|
||||||
|
|
||||||
redis:
|
|
||||||
container_name: immich_redis
|
|
||||||
image: docker.io/valkey/valkey:8-bookworm@sha256:c855f98e09d558a0d7cc1a4e56473231206a4c54c0114ada9c485b47aeb92ec8
|
|
||||||
healthcheck:
|
|
||||||
test: redis-cli ping || exit 1
|
|
||||||
restart: always
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
container_name: immich_postgres
|
container_name: immich_postgres
|
||||||
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: docker.io/tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
|
|||||||
@@ -278,7 +278,7 @@ You can use [Smart Search](/docs/features/searching.md) for this to some extent.
|
|||||||
|
|
||||||
### I'm getting a lot of "faces" that aren't faces, what can I do?
|
### I'm getting a lot of "faces" that aren't faces, what can I do?
|
||||||
|
|
||||||
You can increase the MIN DETECTION SCORE to 0.8 to help prevent bad thumbnails. Setting the score too high (above 0.9) might filter out too many real faces depending on the library used. If you just want to hide specific faces, you can adjust the 'MIN FACES DETECTED' setting in the administration panel
|
You can increase the MIN DETECTION SCORE to 0.8 to help prevent bad thumbnails. Setting the score too high (above 0.9) might filter out too many real faces depending on the library used. If you just want to hide specific faces, you can adjust the 'MIN FACES DETECTED' setting in the administration panel
|
||||||
to increase the bar for what the algorithm considers a "core face" for that person, reducing the chance of bad thumbnails being chosen.
|
to increase the bar for what the algorithm considers a "core face" for that person, reducing the chance of bad thumbnails being chosen.
|
||||||
|
|
||||||
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
### The immich_model-cache volume takes up a lot of space, what could be the problem?
|
||||||
@@ -367,12 +367,6 @@ You need to [enable WebSockets](/docs/administration/reverse-proxy/) on your rev
|
|||||||
|
|
||||||
Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md).
|
Immich components are typically deployed using docker. To see logs for deployed docker containers, you can use the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/), specifically the `docker logs` command. For examples, see [Docker Help](/docs/guides/docker-help.md).
|
||||||
|
|
||||||
### How can I reduce the log verbosity of Redis?
|
|
||||||
|
|
||||||
To decrease Redis logs, you can add the following line to the `redis:` section of the `docker-compose.yml`:
|
|
||||||
|
|
||||||
` command: redis-server --loglevel warning`
|
|
||||||
|
|
||||||
### How can I run Immich as a non-root user?
|
### How can I run Immich as a non-root user?
|
||||||
|
|
||||||
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
You can change the user in the container by setting the `user` argument in `docker-compose.yml` for each service.
|
||||||
@@ -380,7 +374,6 @@ You may need to add mount points or docker volumes for the following internal co
|
|||||||
|
|
||||||
- `immich-machine-learning:/.config`
|
- `immich-machine-learning:/.config`
|
||||||
- `immich-machine-learning:/.cache`
|
- `immich-machine-learning:/.cache`
|
||||||
- `redis:/data`
|
|
||||||
|
|
||||||
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION` and `/cache` for machine-learning.
|
The non-root user/group needs read/write access to the volume mounts, including `UPLOAD_LOCATION` and `/cache` for machine-learning.
|
||||||
|
|
||||||
@@ -425,7 +418,7 @@ After removing the containers and volumes, there are a few directories that need
|
|||||||
- `UPLOAD_LOCATION` contains all the media uploaded to Immich.
|
- `UPLOAD_LOCATION` contains all the media uploaded to Immich.
|
||||||
|
|
||||||
:::note Portainer
|
:::note Portainer
|
||||||
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
If you use portainer, bring down the stack in portainer. Go into the volumes section
|
||||||
and remove all the volumes related to immich then restart the stack.
|
and remove all the volumes related to immich then restart the stack.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ Immich uses a traditional client-server design, with a dedicated database for da
|
|||||||
|
|
||||||
<img alt="Immich Architecture" src={AppArchitecture} className="p-4 dark:bg-immich-dark-primary my-4" />
|
<img alt="Immich Architecture" src={AppArchitecture} className="p-4 dark:bg-immich-dark-primary my-4" />
|
||||||
|
|
||||||
The diagram shows clients communicating with the server's API via REST. The server communicates with downstream systems (i.e. Redis, Postgres, Machine Learning, file system) through repository interfaces. Not shown in the diagram, is that the server is split into two separate containers `immich-server` and `immich-microservices`. The microservices container does not handle API requests or schedule cron jobs, but primarily handles incoming job requests from Redis.
|
The diagram shows clients communicating with the server's API via REST. The server communicates with downstream systems (i.e. Postgres, Machine Learning, file system) through repository interfaces. Not shown in the diagram, is that the server is split into two separate containers `immich-server` and `immich-microservices`. The microservices container does not handle API requests or schedule cron jobs, but primarily handles incoming job requests from Postgres.
|
||||||
|
|
||||||
## Clients
|
## Clients
|
||||||
|
|
||||||
@@ -53,7 +53,6 @@ The Immich backend is divided into several services, which are run as individual
|
|||||||
1. `immich-server` - Handle and respond to REST API requests, execute background jobs (thumbnail generation, metadata extraction, transcoding, etc.)
|
1. `immich-server` - Handle and respond to REST API requests, execute background jobs (thumbnail generation, metadata extraction, transcoding, etc.)
|
||||||
1. `immich-machine-learning` - Execute machine learning models
|
1. `immich-machine-learning` - Execute machine learning models
|
||||||
1. `postgres` - Persistent data storage
|
1. `postgres` - Persistent data storage
|
||||||
1. `redis`- Queue management for background jobs
|
|
||||||
|
|
||||||
### Immich Server
|
### Immich Server
|
||||||
|
|
||||||
@@ -111,7 +110,3 @@ Immich persists data in Postgres, which includes information about access and au
|
|||||||
:::info
|
:::info
|
||||||
See [Database Migrations](./database-migrations.md) for more information about how to modify the database to create an index, modify a table, add a new column, etc.
|
See [Database Migrations](./database-migrations.md) for more information about how to modify the database to create an index, modify a table, add a new column, etc.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
### Redis
|
|
||||||
|
|
||||||
Immich uses [Redis](https://redis.com/) via [BullMQ](https://docs.bullmq.io/) to manage job queues. Some jobs trigger subsequent jobs. For example, Smart Search and Facial Recognition relies on thumbnail generation and automatically run after one is generated.
|
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ This environment includes the services below. Additional details are available i
|
|||||||
- Server - [`/server`](https://github.com/immich-app/immich/tree/main/server)
|
- Server - [`/server`](https://github.com/immich-app/immich/tree/main/server)
|
||||||
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
- Web app - [`/web`](https://github.com/immich-app/immich/tree/main/web)
|
||||||
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
- Machine learning - [`/machine-learning`](https://github.com/immich-app/immich/tree/main/machine-learning)
|
||||||
- Redis
|
|
||||||
- PostgreSQL development database with exposed port `5432` so you can use any database client to access it
|
- PostgreSQL development database with exposed port `5432` so you can use any database client to access it
|
||||||
|
|
||||||
All the services are packaged to run as with single Docker Compose command.
|
All the services are packaged to run as with single Docker Compose command.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Scaling Immich
|
# Scaling Immich
|
||||||
|
|
||||||
Immich is built with modern deployment practices in mind, and the backend is designed to be able to run multiple instances in parallel. When doing this, the only requirement you need to be aware of is that every instance needs to be connected to the shared infrastructure. That means they should all have access to the same Postgres and Redis instances, and have the same files mounted into the containers.
|
Immich is built with modern deployment practices in mind, and the backend is designed to be able to run multiple instances in parallel. When doing this, the only requirement you need to be aware of is that every instance needs to be connected to the shared infrastructure. That means they should all have access to the same Postgres instance, and have the same files mounted into the containers.
|
||||||
|
|
||||||
Scaling can be useful for many reasons. Maybe you have a gaming PC that you want to use for transcoding and thumbnail generation, or perhaps you run a Kubernetes cluster across a handful of powerful servers that you want to make use of.
|
Scaling can be useful for many reasons. Maybe you have a gaming PC that you want to use for transcoding and thumbnail generation, or perhaps you run a Kubernetes cluster across a handful of powerful servers that you want to make use of.
|
||||||
|
|
||||||
@@ -16,4 +16,4 @@ By default, each running `immich-server` container comes with multiple internal
|
|||||||
|
|
||||||
## Scaling down
|
## Scaling down
|
||||||
|
|
||||||
In the same way you can scale up to multiple containers, you can also choose to scale down. All state is stored in Postgres, Redis, and the filesystem so there is no risk in stopping a running immich-server container, for example if you want to use your GPU to play some games. As long as there is an API worker running you will still be able to browse Immich, and jobs will wait to be processed until there is a worker available for them.
|
In the same way you can scale up to multiple containers, you can also choose to scale down. All state is stored in Postgres and the filesystem so there is no risk in stopping a running immich-server container, for example if you want to use your GPU to play some games. As long as there is an API worker running you will still be able to browse Immich, and jobs will wait to be processed until there is a worker available for them.
|
||||||
|
|||||||
@@ -98,54 +98,6 @@ When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSW
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Redis
|
|
||||||
|
|
||||||
| Variable | Description | Default | Containers |
|
|
||||||
| :--------------- | :------------- | :-----: | :--------- |
|
|
||||||
| `REDIS_URL` | Redis URL | | server |
|
|
||||||
| `REDIS_SOCKET` | Redis socket | | server |
|
|
||||||
| `REDIS_HOSTNAME` | Redis host | `redis` | server |
|
|
||||||
| `REDIS_PORT` | Redis port | `6379` | server |
|
|
||||||
| `REDIS_USERNAME` | Redis username | | server |
|
|
||||||
| `REDIS_PASSWORD` | Redis password | | server |
|
|
||||||
| `REDIS_DBINDEX` | Redis DB index | `0` | server |
|
|
||||||
|
|
||||||
:::info
|
|
||||||
All `REDIS_` variables must be provided to all Immich workers, including `api` and `microservices`.
|
|
||||||
|
|
||||||
`REDIS_URL` must start with `ioredis://` and then include a `base64` encoded JSON string for the configuration.
|
|
||||||
More information can be found in the upstream [ioredis] documentation.
|
|
||||||
|
|
||||||
When `REDIS_URL` or `REDIS_SOCKET` are defined, the `REDIS_HOSTNAME`, `REDIS_PORT`, `REDIS_USERNAME`, `REDIS_PASSWORD`, and `REDIS_DBINDEX` variables are ignored.
|
|
||||||
:::
|
|
||||||
|
|
||||||
Redis (Sentinel) URL example JSON before encoding:
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>JSON</summary>
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"sentinels": [
|
|
||||||
{
|
|
||||||
"host": "redis-sentinel-node-0",
|
|
||||||
"port": 26379
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"host": "redis-sentinel-node-1",
|
|
||||||
"port": 26379
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"host": "redis-sentinel-node-2",
|
|
||||||
"port": 26379
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"name": "redis-sentinel"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
|
||||||
## Machine Learning
|
## Machine Learning
|
||||||
|
|
||||||
| Variable | Description | Default | Containers |
|
| Variable | Description | Default | Containers |
|
||||||
@@ -212,16 +164,10 @@ the `_FILE` variable should be set to the path of a file containing the variable
|
|||||||
| `DB_USERNAME` | `DB_USERNAME_FILE`<sup>\*1</sup> |
|
| `DB_USERNAME` | `DB_USERNAME_FILE`<sup>\*1</sup> |
|
||||||
| `DB_PASSWORD` | `DB_PASSWORD_FILE`<sup>\*1</sup> |
|
| `DB_PASSWORD` | `DB_PASSWORD_FILE`<sup>\*1</sup> |
|
||||||
| `DB_URL` | `DB_URL_FILE`<sup>\*1</sup> |
|
| `DB_URL` | `DB_URL_FILE`<sup>\*1</sup> |
|
||||||
| `REDIS_PASSWORD` | `REDIS_PASSWORD_FILE`<sup>\*2</sup> |
|
|
||||||
|
|
||||||
\*1: See the [official documentation][docker-secrets-docs] for
|
\*1: See the [official documentation][docker-secrets-docs] for
|
||||||
details on how to use Docker Secrets in the Postgres image.
|
details on how to use Docker Secrets in the Postgres image.
|
||||||
|
|
||||||
\*2: See [this comment][docker-secrets-example] for an example of how
|
|
||||||
to use a Docker secret for the password in the Redis container.
|
|
||||||
|
|
||||||
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
[tz-list]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
|
||||||
[docker-secrets-example]: https://github.com/docker-library/redis/issues/46#issuecomment-335326234
|
|
||||||
[docker-secrets-docs]: https://github.com/docker-library/docs/tree/master/postgres#docker-secrets
|
[docker-secrets-docs]: https://github.com/docker-library/docs/tree/master/postgres#docker-secrets
|
||||||
[docker-secrets]: https://docs.docker.com/engine/swarm/secrets/
|
[docker-secrets]: https://docs.docker.com/engine/swarm/secrets/
|
||||||
[ioredis]: https://ioredis.readthedocs.io/en/latest/README/#connect-to-redis
|
|
||||||
|
|||||||
@@ -107,8 +107,6 @@ Accept the default option or select the **Machine Learning Image Type** for your
|
|||||||
|
|
||||||
Immich's default is `postgres` but you should consider setting the **Database Password** to a custom value using only the characters `A-Za-z0-9`.
|
Immich's default is `postgres` but you should consider setting the **Database Password** to a custom value using only the characters `A-Za-z0-9`.
|
||||||
|
|
||||||
The **Redis Password** should be set to a custom value using only the characters `A-Za-z0-9`.
|
|
||||||
|
|
||||||
Accept the **Log Level** default of **Log**.
|
Accept the **Log Level** default of **Log**.
|
||||||
|
|
||||||
Leave **Hugging Face Endpoint** blank. (This is for downloading ML models from a different source.)
|
Leave **Hugging Face Endpoint** blank. (This is for downloading ML models from a different source.)
|
||||||
@@ -242,7 +240,7 @@ className="border rounded-xl"
|
|||||||
:::info
|
:::info
|
||||||
Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
Some Environment Variables are not available for the TrueNAS SCALE app. This is mainly because they can be configured through GUI options in the [Edit Immich screen](#edit-app-settings).
|
||||||
|
|
||||||
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`, `REDIS_PASSWORD`.
|
Some examples are: `IMMICH_VERSION`, `UPLOAD_LOCATION`, `DB_DATA_LOCATION`, `TZ`, `IMMICH_LOG_LEVEL`, `DB_PASSWORD`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## Updating the App
|
## Updating the App
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ Immich can easily be installed and updated on Unraid via:
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
In order to install Immich from the Unraid CA, you will need an existing Redis and PostgreSQL 14 container, If you do not already have Redis or PostgreSQL you can install them from the Unraid CA, just make sure you choose PostgreSQL **14**.
|
In order to install Immich from the Unraid CA, you will need an existing PostgreSQL 14 container, If you do not already have PostgreSQL you can install it from the Unraid CA, just make sure you choose PostgreSQL **14**.
|
||||||
|
|
||||||
Once you have Redis and PostgreSQL running, search for Immich on the Unraid CA, choose either of the templates listed and fill out the example variables.
|
Once you have PostgreSQL running, search for Immich on the Unraid CA, choose either of the templates listed and fill out the example variables.
|
||||||
|
|
||||||
For more information about setting up the community image see [here](https://github.com/imagegenius/docker-immich#application-setup)
|
For more information about setting up the community image see [here](https://github.com/imagegenius/docker-immich#application-setup)
|
||||||
|
|
||||||
@@ -45,63 +45,63 @@ width="70%"
|
|||||||
alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
alt="Select Plugins > Compose.Manager > Add New Stack > Label it Immich"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
3. Select the cogwheel ⚙️ next to Immich and click "**Edit Stack**"
|
3. Select the cogwheel ⚙️ next to Immich and click "**Edit Stack**"
|
||||||
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
|
4. Click "**Compose File**" and then paste the entire contents of the [Immich Docker Compose](https://github.com/immich-app/immich/releases/latest/download/docker-compose.yml) file into the Unraid editor. Remove any text that may be in the text area by default. Note that Unraid v6.12.10 uses version 24.0.9 of the Docker Engine, which does not support healthcheck `start_interval` as defined in the `database` service of the Docker compose file (version 25 or higher is needed). This parameter defines an initial waiting period before starting health checks, to give the container time to start up. Commenting out the `start_interval` and `start_period` parameters will allow the containers to start up normally. The only downside to this is that the database container will not receive an initial health check until `interval` time has passed.
|
||||||
|
|
||||||
<details >
|
<details >
|
||||||
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 5.</summary>
|
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 5.</summary>
|
||||||
<ul>
|
<ul>
|
||||||
<li>Comment out the database service</li>
|
<li>Comment out the database service</li>
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid02.webp').default}
|
src={require('./img/unraid02.webp').default}
|
||||||
width="50%"
|
width="50%"
|
||||||
alt="Comment out database service in the compose file"
|
alt="Comment out database service in the compose file"
|
||||||
/>
|
/>
|
||||||
<li>Comment out the database dependency for <b>each service</b> <i>(example in screenshot below only shows 2 of the services - ensure you do this for all services)</i></li>
|
<li>Comment out the database dependency for <b>each service</b> <i>(example in screenshot below only shows 2 of the services - ensure you do this for all services)</i></li>
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid03.webp').default}
|
src={require('./img/unraid03.webp').default}
|
||||||
width="50%"
|
width="50%"
|
||||||
alt="Comment out every reference to the database service in the compose file"
|
alt="Comment out every reference to the database service in the compose file"
|
||||||
/>
|
/>
|
||||||
<li>Comment out the volumes</li>
|
<li>Comment out the volumes</li>
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid04.webp').default}
|
src={require('./img/unraid04.webp').default}
|
||||||
width="20%"
|
width="20%"
|
||||||
alt="Comment out database volume"
|
alt="Comment out database volume"
|
||||||
/>
|
/>
|
||||||
</ul>
|
</ul>
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
|
5. Click "**Save Changes**", you will be prompted to edit stack UI labels, just leave this blank and click "**Ok**"
|
||||||
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
6. Select the cog ⚙️ next to Immich, click "**Edit Stack**", then click "**Env File**"
|
||||||
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
7. Paste the entire contents of the [Immich example.env](https://github.com/immich-app/immich/releases/latest/download/example.env) file into the Unraid editor, then **before saving** edit the following:
|
||||||
|
|
||||||
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
- `UPLOAD_LOCATION`: Create a folder in your Images Unraid share and place the **absolute** location here > For example my _"images"_ share has a folder within it called _"immich"_. If I browse to this directory in the terminal and type `pwd` the output is `/mnt/user/images/immich`. This is the exact value I need to enter as my `UPLOAD_LOCATION`
|
||||||
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
- `DB_DATA_LOCATION`: Change this to use an Unraid share (preferably a cache pool, e.g. `/mnt/user/appdata/postgresql/data`). This uses the `appdata` share. Do also create the `postgresql` folder, by running `mkdir /mnt/user/{share_location}/postgresql/data`. If left at default it will try to use Unraid's `/boot/config/plugins/compose.manager/projects/[stack_name]/postgres` folder which it doesn't have permissions to, resulting in this container continuously restarting.
|
||||||
|
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid05.webp').default}
|
src={require('./img/unraid05.webp').default}
|
||||||
width="70%"
|
width="70%"
|
||||||
alt="Absolute location of where you want immich images stored"
|
alt="Absolute location of where you want immich images stored"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<details >
|
<details >
|
||||||
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 8.</summary>
|
<summary>Using an existing Postgres container? Click me! Otherwise proceed to step 8.</summary>
|
||||||
<p>Update the following database variables as relevant to your Postgres container:</p>
|
<p>Update the following database variables as relevant to your Postgres container:</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li><code>DB_HOSTNAME</code></li>
|
<li><code>DB_HOSTNAME</code></li>
|
||||||
<li><code>DB_USERNAME</code></li>
|
<li><code>DB_USERNAME</code></li>
|
||||||
<li><code>DB_PASSWORD</code></li>
|
<li><code>DB_PASSWORD</code></li>
|
||||||
<li><code>DB_DATABASE_NAME</code></li>
|
<li><code>DB_DATABASE_NAME</code></li>
|
||||||
<li><code>DB_PORT</code></li>
|
<li><code>DB_PORT</code></li>
|
||||||
</ul>
|
</ul>
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
8. Click "**Save Changes**" followed by "**Compose Up**" and Unraid will begin to create the Immich containers in a popup window. Once complete you will see a message on the popup window stating _"Connection Closed"_. Click "**Done**" and go to the Unraid "**Docker**" page
|
8. Click "**Save Changes**" followed by "**Compose Up**" and Unraid will begin to create the Immich containers in a popup window. Once complete you will see a message on the popup window stating _"Connection Closed"_. Click "**Done**" and go to the Unraid "**Docker**" page
|
||||||
|
|
||||||
> Note: This can take several minutes depending on your Internet speed and Unraid hardware
|
> Note: This can take several minutes depending on your Internet speed and Unraid hardware
|
||||||
|
|
||||||
9. Once on the Docker page you will see several Immich containers, one of them will be labelled `immich_server` and will have a port mapping. Visit the `IP:PORT` displayed in your web browser and you should see the Immich admin setup page.
|
9. Once on the Docker page you will see several Immich containers, one of them will be labelled `immich_server` and will have a port mapping. Visit the `IP:PORT` displayed in your web browser and you should see the Immich admin setup page.
|
||||||
|
|
||||||
<img
|
<img
|
||||||
src={require('./img/unraid06.webp').default}
|
src={require('./img/unraid06.webp').default}
|
||||||
@@ -122,7 +122,7 @@ alt="Go to Docker Tab and visit the address listed next to immich-web"
|
|||||||
width="90%"
|
width="90%"
|
||||||
alt="Go to Docker Tab and visit the address listed next to immich-web"
|
alt="Go to Docker Tab and visit the address listed next to immich-web"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
|
|||||||
@@ -28,14 +28,10 @@ services:
|
|||||||
extra_hosts:
|
extra_hosts:
|
||||||
- 'auth-server:host-gateway'
|
- 'auth-server:host-gateway'
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis
|
|
||||||
- database
|
- database
|
||||||
ports:
|
ports:
|
||||||
- 2285:2285
|
- 2285:2285
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:6.2-alpine@sha256:3211c33a618c457e5d241922c975dbc4f446d0bdb2dc75694f5573ef8e2d01fa
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
image: tensorchord/pgvecto-rs:pg14-v0.2.0@sha256:739cdd626151ff1f796dc95a6591b55a714f341c737e27f045019ceabf8e8c52
|
||||||
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
command: -c fsync=off -c shared_preload_libraries=vectors.so
|
||||||
|
|||||||
@@ -1,43 +0,0 @@
|
|||||||
import { deleteAssets, getAuditFiles, updateAsset, type LoginResponseDto } from '@immich/sdk';
|
|
||||||
import { asBearerAuth, utils } from 'src/utils';
|
|
||||||
import { beforeAll, describe, expect, it } from 'vitest';
|
|
||||||
|
|
||||||
describe('/audits', () => {
|
|
||||||
let admin: LoginResponseDto;
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
|
||||||
await utils.resetDatabase();
|
|
||||||
await utils.resetFilesystem();
|
|
||||||
|
|
||||||
admin = await utils.adminSetup();
|
|
||||||
});
|
|
||||||
|
|
||||||
// TODO: Enable these tests again once #7436 is resolved as these were flaky
|
|
||||||
describe.skip('GET :/file-report', () => {
|
|
||||||
it('excludes assets without issues from report', async () => {
|
|
||||||
const [trashedAsset, archivedAsset] = await Promise.all([
|
|
||||||
utils.createAsset(admin.accessToken),
|
|
||||||
utils.createAsset(admin.accessToken),
|
|
||||||
utils.createAsset(admin.accessToken),
|
|
||||||
]);
|
|
||||||
|
|
||||||
await Promise.all([
|
|
||||||
deleteAssets({ assetBulkDeleteDto: { ids: [trashedAsset.id] } }, { headers: asBearerAuth(admin.accessToken) }),
|
|
||||||
updateAsset(
|
|
||||||
{
|
|
||||||
id: archivedAsset.id,
|
|
||||||
updateAssetDto: { isArchived: true },
|
|
||||||
},
|
|
||||||
{ headers: asBearerAuth(admin.accessToken) },
|
|
||||||
),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const body = await getAuditFiles({
|
|
||||||
headers: asBearerAuth(admin.accessToken),
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(body.orphans).toHaveLength(0);
|
|
||||||
expect(body.extras).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -78,7 +78,7 @@ describe('/jobs', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
await utils.jobCommand(admin.accessToken, JobName.MetadataExtraction, {
|
||||||
command: JobCommand.Empty,
|
command: JobCommand.Clear,
|
||||||
force: false,
|
force: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -160,7 +160,7 @@ describe('/jobs', () => {
|
|||||||
expect(assetBefore.thumbhash).toBeNull();
|
expect(assetBefore.thumbhash).toBeNull();
|
||||||
|
|
||||||
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
await utils.jobCommand(admin.accessToken, JobName.ThumbnailGeneration, {
|
||||||
command: JobCommand.Empty,
|
command: JobCommand.Clear,
|
||||||
force: false,
|
force: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1260,6 +1260,7 @@
|
|||||||
"no_favorites_message": "Add favorites to quickly find your best pictures and videos",
|
"no_favorites_message": "Add favorites to quickly find your best pictures and videos",
|
||||||
"no_libraries_message": "Create an external library to view your photos and videos",
|
"no_libraries_message": "Create an external library to view your photos and videos",
|
||||||
"no_name": "No Name",
|
"no_name": "No Name",
|
||||||
|
"no_people_found": "No matching people found",
|
||||||
"no_places": "No places",
|
"no_places": "No places",
|
||||||
"no_results": "No results",
|
"no_results": "No results",
|
||||||
"no_results_description": "Try a synonym or more general keyword",
|
"no_results_description": "Try a synonym or more general keyword",
|
||||||
@@ -1572,6 +1573,7 @@
|
|||||||
"select_keep_all": "Select keep all",
|
"select_keep_all": "Select keep all",
|
||||||
"select_library_owner": "Select library owner",
|
"select_library_owner": "Select library owner",
|
||||||
"select_new_face": "Select new face",
|
"select_new_face": "Select new face",
|
||||||
|
"select_person_to_tag": "Select a person to tag",
|
||||||
"select_photos": "Select photos",
|
"select_photos": "Select photos",
|
||||||
"select_trash_all": "Select trash all",
|
"select_trash_all": "Select trash all",
|
||||||
"select_user_for_sharing_page_err_album": "Failed to create album",
|
"select_user_for_sharing_page_err_album": "Failed to create album",
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ show_friendly_message() {
|
|||||||
Successfully deployed Immich!
|
Successfully deployed Immich!
|
||||||
You can access the website or the mobile app at http://$ip_address:2283
|
You can access the website or the mobile app at http://$ip_address:2283
|
||||||
---------------------------------------------------
|
---------------------------------------------------
|
||||||
If you want to configure custom information of the server, including the database, Redis information, or the backup (or upload) location, etc.
|
If you want to configure custom information of the server, including the database, or the backup (or upload) location, etc.
|
||||||
|
|
||||||
1. First bring down the containers with the command 'docker compose down' in the immich-app directory,
|
1. First bring down the containers with the command 'docker compose down' in the immich-app directory,
|
||||||
|
|
||||||
|
|||||||
12
mobile/openapi/README.md
generated
12
mobile/openapi/README.md
generated
@@ -100,7 +100,6 @@ Class | Method | HTTP request | Description
|
|||||||
*AssetsApi* | [**getAllUserAssetsByDeviceId**](doc//AssetsApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | getAllUserAssetsByDeviceId
|
*AssetsApi* | [**getAllUserAssetsByDeviceId**](doc//AssetsApi.md#getalluserassetsbydeviceid) | **GET** /assets/device/{deviceId} | getAllUserAssetsByDeviceId
|
||||||
*AssetsApi* | [**getAssetInfo**](doc//AssetsApi.md#getassetinfo) | **GET** /assets/{id} |
|
*AssetsApi* | [**getAssetInfo**](doc//AssetsApi.md#getassetinfo) | **GET** /assets/{id} |
|
||||||
*AssetsApi* | [**getAssetStatistics**](doc//AssetsApi.md#getassetstatistics) | **GET** /assets/statistics |
|
*AssetsApi* | [**getAssetStatistics**](doc//AssetsApi.md#getassetstatistics) | **GET** /assets/statistics |
|
||||||
*AssetsApi* | [**getMemoryLane**](doc//AssetsApi.md#getmemorylane) | **GET** /assets/memory-lane |
|
|
||||||
*AssetsApi* | [**getRandom**](doc//AssetsApi.md#getrandom) | **GET** /assets/random |
|
*AssetsApi* | [**getRandom**](doc//AssetsApi.md#getrandom) | **GET** /assets/random |
|
||||||
*AssetsApi* | [**playAssetVideo**](doc//AssetsApi.md#playassetvideo) | **GET** /assets/{id}/video/playback |
|
*AssetsApi* | [**playAssetVideo**](doc//AssetsApi.md#playassetvideo) | **GET** /assets/{id}/video/playback |
|
||||||
*AssetsApi* | [**replaceAsset**](doc//AssetsApi.md#replaceasset) | **PUT** /assets/{id}/original | replaceAsset
|
*AssetsApi* | [**replaceAsset**](doc//AssetsApi.md#replaceasset) | **PUT** /assets/{id}/original | replaceAsset
|
||||||
@@ -122,9 +121,6 @@ Class | Method | HTTP request | Description
|
|||||||
*FacesApi* | [**deleteFace**](doc//FacesApi.md#deleteface) | **DELETE** /faces/{id} |
|
*FacesApi* | [**deleteFace**](doc//FacesApi.md#deleteface) | **DELETE** /faces/{id} |
|
||||||
*FacesApi* | [**getFaces**](doc//FacesApi.md#getfaces) | **GET** /faces |
|
*FacesApi* | [**getFaces**](doc//FacesApi.md#getfaces) | **GET** /faces |
|
||||||
*FacesApi* | [**reassignFacesById**](doc//FacesApi.md#reassignfacesbyid) | **PUT** /faces/{id} |
|
*FacesApi* | [**reassignFacesById**](doc//FacesApi.md#reassignfacesbyid) | **PUT** /faces/{id} |
|
||||||
*FileReportsApi* | [**fixAuditFiles**](doc//FileReportsApi.md#fixauditfiles) | **POST** /reports/fix |
|
|
||||||
*FileReportsApi* | [**getAuditFiles**](doc//FileReportsApi.md#getauditfiles) | **GET** /reports |
|
|
||||||
*FileReportsApi* | [**getFileChecksums**](doc//FileReportsApi.md#getfilechecksums) | **POST** /reports/checksum |
|
|
||||||
*JobsApi* | [**createJob**](doc//JobsApi.md#createjob) | **POST** /jobs |
|
*JobsApi* | [**createJob**](doc//JobsApi.md#createjob) | **POST** /jobs |
|
||||||
*JobsApi* | [**getAllJobsStatus**](doc//JobsApi.md#getalljobsstatus) | **GET** /jobs |
|
*JobsApi* | [**getAllJobsStatus**](doc//JobsApi.md#getalljobsstatus) | **GET** /jobs |
|
||||||
*JobsApi* | [**sendJobCommand**](doc//JobsApi.md#sendjobcommand) | **PUT** /jobs/{id} |
|
*JobsApi* | [**sendJobCommand**](doc//JobsApi.md#sendjobcommand) | **PUT** /jobs/{id} |
|
||||||
@@ -332,11 +328,6 @@ Class | Method | HTTP request | Description
|
|||||||
- [ExifResponseDto](doc//ExifResponseDto.md)
|
- [ExifResponseDto](doc//ExifResponseDto.md)
|
||||||
- [FaceDto](doc//FaceDto.md)
|
- [FaceDto](doc//FaceDto.md)
|
||||||
- [FacialRecognitionConfig](doc//FacialRecognitionConfig.md)
|
- [FacialRecognitionConfig](doc//FacialRecognitionConfig.md)
|
||||||
- [FileChecksumDto](doc//FileChecksumDto.md)
|
|
||||||
- [FileChecksumResponseDto](doc//FileChecksumResponseDto.md)
|
|
||||||
- [FileReportDto](doc//FileReportDto.md)
|
|
||||||
- [FileReportFixDto](doc//FileReportFixDto.md)
|
|
||||||
- [FileReportItemDto](doc//FileReportItemDto.md)
|
|
||||||
- [FoldersResponse](doc//FoldersResponse.md)
|
- [FoldersResponse](doc//FoldersResponse.md)
|
||||||
- [FoldersUpdate](doc//FoldersUpdate.md)
|
- [FoldersUpdate](doc//FoldersUpdate.md)
|
||||||
- [ImageFormat](doc//ImageFormat.md)
|
- [ImageFormat](doc//ImageFormat.md)
|
||||||
@@ -361,7 +352,6 @@ Class | Method | HTTP request | Description
|
|||||||
- [MemoriesResponse](doc//MemoriesResponse.md)
|
- [MemoriesResponse](doc//MemoriesResponse.md)
|
||||||
- [MemoriesUpdate](doc//MemoriesUpdate.md)
|
- [MemoriesUpdate](doc//MemoriesUpdate.md)
|
||||||
- [MemoryCreateDto](doc//MemoryCreateDto.md)
|
- [MemoryCreateDto](doc//MemoryCreateDto.md)
|
||||||
- [MemoryLaneResponseDto](doc//MemoryLaneResponseDto.md)
|
|
||||||
- [MemoryResponseDto](doc//MemoryResponseDto.md)
|
- [MemoryResponseDto](doc//MemoryResponseDto.md)
|
||||||
- [MemoryType](doc//MemoryType.md)
|
- [MemoryType](doc//MemoryType.md)
|
||||||
- [MemoryUpdateDto](doc//MemoryUpdateDto.md)
|
- [MemoryUpdateDto](doc//MemoryUpdateDto.md)
|
||||||
@@ -381,8 +371,6 @@ Class | Method | HTTP request | Description
|
|||||||
- [OnThisDayDto](doc//OnThisDayDto.md)
|
- [OnThisDayDto](doc//OnThisDayDto.md)
|
||||||
- [PartnerDirection](doc//PartnerDirection.md)
|
- [PartnerDirection](doc//PartnerDirection.md)
|
||||||
- [PartnerResponseDto](doc//PartnerResponseDto.md)
|
- [PartnerResponseDto](doc//PartnerResponseDto.md)
|
||||||
- [PathEntityType](doc//PathEntityType.md)
|
|
||||||
- [PathType](doc//PathType.md)
|
|
||||||
- [PeopleResponse](doc//PeopleResponse.md)
|
- [PeopleResponse](doc//PeopleResponse.md)
|
||||||
- [PeopleResponseDto](doc//PeopleResponseDto.md)
|
- [PeopleResponseDto](doc//PeopleResponseDto.md)
|
||||||
- [PeopleUpdate](doc//PeopleUpdate.md)
|
- [PeopleUpdate](doc//PeopleUpdate.md)
|
||||||
|
|||||||
9
mobile/openapi/lib/api.dart
generated
9
mobile/openapi/lib/api.dart
generated
@@ -39,7 +39,6 @@ part 'api/deprecated_api.dart';
|
|||||||
part 'api/download_api.dart';
|
part 'api/download_api.dart';
|
||||||
part 'api/duplicates_api.dart';
|
part 'api/duplicates_api.dart';
|
||||||
part 'api/faces_api.dart';
|
part 'api/faces_api.dart';
|
||||||
part 'api/file_reports_api.dart';
|
|
||||||
part 'api/jobs_api.dart';
|
part 'api/jobs_api.dart';
|
||||||
part 'api/libraries_api.dart';
|
part 'api/libraries_api.dart';
|
||||||
part 'api/map_api.dart';
|
part 'api/map_api.dart';
|
||||||
@@ -133,11 +132,6 @@ part 'model/email_notifications_update.dart';
|
|||||||
part 'model/exif_response_dto.dart';
|
part 'model/exif_response_dto.dart';
|
||||||
part 'model/face_dto.dart';
|
part 'model/face_dto.dart';
|
||||||
part 'model/facial_recognition_config.dart';
|
part 'model/facial_recognition_config.dart';
|
||||||
part 'model/file_checksum_dto.dart';
|
|
||||||
part 'model/file_checksum_response_dto.dart';
|
|
||||||
part 'model/file_report_dto.dart';
|
|
||||||
part 'model/file_report_fix_dto.dart';
|
|
||||||
part 'model/file_report_item_dto.dart';
|
|
||||||
part 'model/folders_response.dart';
|
part 'model/folders_response.dart';
|
||||||
part 'model/folders_update.dart';
|
part 'model/folders_update.dart';
|
||||||
part 'model/image_format.dart';
|
part 'model/image_format.dart';
|
||||||
@@ -162,7 +156,6 @@ part 'model/map_reverse_geocode_response_dto.dart';
|
|||||||
part 'model/memories_response.dart';
|
part 'model/memories_response.dart';
|
||||||
part 'model/memories_update.dart';
|
part 'model/memories_update.dart';
|
||||||
part 'model/memory_create_dto.dart';
|
part 'model/memory_create_dto.dart';
|
||||||
part 'model/memory_lane_response_dto.dart';
|
|
||||||
part 'model/memory_response_dto.dart';
|
part 'model/memory_response_dto.dart';
|
||||||
part 'model/memory_type.dart';
|
part 'model/memory_type.dart';
|
||||||
part 'model/memory_update_dto.dart';
|
part 'model/memory_update_dto.dart';
|
||||||
@@ -182,8 +175,6 @@ part 'model/o_auth_token_endpoint_auth_method.dart';
|
|||||||
part 'model/on_this_day_dto.dart';
|
part 'model/on_this_day_dto.dart';
|
||||||
part 'model/partner_direction.dart';
|
part 'model/partner_direction.dart';
|
||||||
part 'model/partner_response_dto.dart';
|
part 'model/partner_response_dto.dart';
|
||||||
part 'model/path_entity_type.dart';
|
|
||||||
part 'model/path_type.dart';
|
|
||||||
part 'model/people_response.dart';
|
part 'model/people_response.dart';
|
||||||
part 'model/people_response_dto.dart';
|
part 'model/people_response_dto.dart';
|
||||||
part 'model/people_update.dart';
|
part 'model/people_update.dart';
|
||||||
|
|||||||
57
mobile/openapi/lib/api/assets_api.dart
generated
57
mobile/openapi/lib/api/assets_api.dart
generated
@@ -404,63 +404,6 @@ class AssetsApi {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Performs an HTTP 'GET /assets/memory-lane' operation and returns the [Response].
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
/// * [int] day (required):
|
|
||||||
///
|
|
||||||
/// * [int] month (required):
|
|
||||||
Future<Response> getMemoryLaneWithHttpInfo(int day, int month,) async {
|
|
||||||
// ignore: prefer_const_declarations
|
|
||||||
final apiPath = r'/assets/memory-lane';
|
|
||||||
|
|
||||||
// ignore: prefer_final_locals
|
|
||||||
Object? postBody;
|
|
||||||
|
|
||||||
final queryParams = <QueryParam>[];
|
|
||||||
final headerParams = <String, String>{};
|
|
||||||
final formParams = <String, String>{};
|
|
||||||
|
|
||||||
queryParams.addAll(_queryParams('', 'day', day));
|
|
||||||
queryParams.addAll(_queryParams('', 'month', month));
|
|
||||||
|
|
||||||
const contentTypes = <String>[];
|
|
||||||
|
|
||||||
|
|
||||||
return apiClient.invokeAPI(
|
|
||||||
apiPath,
|
|
||||||
'GET',
|
|
||||||
queryParams,
|
|
||||||
postBody,
|
|
||||||
headerParams,
|
|
||||||
formParams,
|
|
||||||
contentTypes.isEmpty ? null : contentTypes.first,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
/// * [int] day (required):
|
|
||||||
///
|
|
||||||
/// * [int] month (required):
|
|
||||||
Future<List<MemoryLaneResponseDto>?> getMemoryLane(int day, int month,) async {
|
|
||||||
final response = await getMemoryLaneWithHttpInfo(day, month,);
|
|
||||||
if (response.statusCode >= HttpStatus.badRequest) {
|
|
||||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
|
||||||
}
|
|
||||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
|
||||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
|
||||||
// FormatException when trying to decode an empty string.
|
|
||||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
|
||||||
final responseBody = await _decodeBodyBytes(response);
|
|
||||||
return (await apiClient.deserializeAsync(responseBody, 'List<MemoryLaneResponseDto>') as List)
|
|
||||||
.cast<MemoryLaneResponseDto>()
|
|
||||||
.toList(growable: false);
|
|
||||||
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This property was deprecated in v1.116.0
|
/// This property was deprecated in v1.116.0
|
||||||
///
|
///
|
||||||
/// Note: This method returns the HTTP [Response].
|
/// Note: This method returns the HTTP [Response].
|
||||||
|
|||||||
148
mobile/openapi/lib/api/file_reports_api.dart
generated
148
mobile/openapi/lib/api/file_reports_api.dart
generated
@@ -1,148 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
|
|
||||||
class FileReportsApi {
|
|
||||||
FileReportsApi([ApiClient? apiClient]) : apiClient = apiClient ?? defaultApiClient;
|
|
||||||
|
|
||||||
final ApiClient apiClient;
|
|
||||||
|
|
||||||
/// Performs an HTTP 'POST /reports/fix' operation and returns the [Response].
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
/// * [FileReportFixDto] fileReportFixDto (required):
|
|
||||||
Future<Response> fixAuditFilesWithHttpInfo(FileReportFixDto fileReportFixDto,) async {
|
|
||||||
// ignore: prefer_const_declarations
|
|
||||||
final apiPath = r'/reports/fix';
|
|
||||||
|
|
||||||
// ignore: prefer_final_locals
|
|
||||||
Object? postBody = fileReportFixDto;
|
|
||||||
|
|
||||||
final queryParams = <QueryParam>[];
|
|
||||||
final headerParams = <String, String>{};
|
|
||||||
final formParams = <String, String>{};
|
|
||||||
|
|
||||||
const contentTypes = <String>['application/json'];
|
|
||||||
|
|
||||||
|
|
||||||
return apiClient.invokeAPI(
|
|
||||||
apiPath,
|
|
||||||
'POST',
|
|
||||||
queryParams,
|
|
||||||
postBody,
|
|
||||||
headerParams,
|
|
||||||
formParams,
|
|
||||||
contentTypes.isEmpty ? null : contentTypes.first,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
/// * [FileReportFixDto] fileReportFixDto (required):
|
|
||||||
Future<void> fixAuditFiles(FileReportFixDto fileReportFixDto,) async {
|
|
||||||
final response = await fixAuditFilesWithHttpInfo(fileReportFixDto,);
|
|
||||||
if (response.statusCode >= HttpStatus.badRequest) {
|
|
||||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Performs an HTTP 'GET /reports' operation and returns the [Response].
|
|
||||||
Future<Response> getAuditFilesWithHttpInfo() async {
|
|
||||||
// ignore: prefer_const_declarations
|
|
||||||
final apiPath = r'/reports';
|
|
||||||
|
|
||||||
// ignore: prefer_final_locals
|
|
||||||
Object? postBody;
|
|
||||||
|
|
||||||
final queryParams = <QueryParam>[];
|
|
||||||
final headerParams = <String, String>{};
|
|
||||||
final formParams = <String, String>{};
|
|
||||||
|
|
||||||
const contentTypes = <String>[];
|
|
||||||
|
|
||||||
|
|
||||||
return apiClient.invokeAPI(
|
|
||||||
apiPath,
|
|
||||||
'GET',
|
|
||||||
queryParams,
|
|
||||||
postBody,
|
|
||||||
headerParams,
|
|
||||||
formParams,
|
|
||||||
contentTypes.isEmpty ? null : contentTypes.first,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
Future<FileReportDto?> getAuditFiles() async {
|
|
||||||
final response = await getAuditFilesWithHttpInfo();
|
|
||||||
if (response.statusCode >= HttpStatus.badRequest) {
|
|
||||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
|
||||||
}
|
|
||||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
|
||||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
|
||||||
// FormatException when trying to decode an empty string.
|
|
||||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
|
||||||
return await apiClient.deserializeAsync(await _decodeBodyBytes(response), 'FileReportDto',) as FileReportDto;
|
|
||||||
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Performs an HTTP 'POST /reports/checksum' operation and returns the [Response].
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
/// * [FileChecksumDto] fileChecksumDto (required):
|
|
||||||
Future<Response> getFileChecksumsWithHttpInfo(FileChecksumDto fileChecksumDto,) async {
|
|
||||||
// ignore: prefer_const_declarations
|
|
||||||
final apiPath = r'/reports/checksum';
|
|
||||||
|
|
||||||
// ignore: prefer_final_locals
|
|
||||||
Object? postBody = fileChecksumDto;
|
|
||||||
|
|
||||||
final queryParams = <QueryParam>[];
|
|
||||||
final headerParams = <String, String>{};
|
|
||||||
final formParams = <String, String>{};
|
|
||||||
|
|
||||||
const contentTypes = <String>['application/json'];
|
|
||||||
|
|
||||||
|
|
||||||
return apiClient.invokeAPI(
|
|
||||||
apiPath,
|
|
||||||
'POST',
|
|
||||||
queryParams,
|
|
||||||
postBody,
|
|
||||||
headerParams,
|
|
||||||
formParams,
|
|
||||||
contentTypes.isEmpty ? null : contentTypes.first,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parameters:
|
|
||||||
///
|
|
||||||
/// * [FileChecksumDto] fileChecksumDto (required):
|
|
||||||
Future<List<FileChecksumResponseDto>?> getFileChecksums(FileChecksumDto fileChecksumDto,) async {
|
|
||||||
final response = await getFileChecksumsWithHttpInfo(fileChecksumDto,);
|
|
||||||
if (response.statusCode >= HttpStatus.badRequest) {
|
|
||||||
throw ApiException(response.statusCode, await _decodeBodyBytes(response));
|
|
||||||
}
|
|
||||||
// When a remote server returns no body with a status of 204, we shall not decode it.
|
|
||||||
// At the time of writing this, `dart:convert` will throw an "Unexpected end of input"
|
|
||||||
// FormatException when trying to decode an empty string.
|
|
||||||
if (response.body.isNotEmpty && response.statusCode != HttpStatus.noContent) {
|
|
||||||
final responseBody = await _decodeBodyBytes(response);
|
|
||||||
return (await apiClient.deserializeAsync(responseBody, 'List<FileChecksumResponseDto>') as List)
|
|
||||||
.cast<FileChecksumResponseDto>()
|
|
||||||
.toList(growable: false);
|
|
||||||
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
16
mobile/openapi/lib/api_client.dart
generated
16
mobile/openapi/lib/api_client.dart
generated
@@ -320,16 +320,6 @@ class ApiClient {
|
|||||||
return FaceDto.fromJson(value);
|
return FaceDto.fromJson(value);
|
||||||
case 'FacialRecognitionConfig':
|
case 'FacialRecognitionConfig':
|
||||||
return FacialRecognitionConfig.fromJson(value);
|
return FacialRecognitionConfig.fromJson(value);
|
||||||
case 'FileChecksumDto':
|
|
||||||
return FileChecksumDto.fromJson(value);
|
|
||||||
case 'FileChecksumResponseDto':
|
|
||||||
return FileChecksumResponseDto.fromJson(value);
|
|
||||||
case 'FileReportDto':
|
|
||||||
return FileReportDto.fromJson(value);
|
|
||||||
case 'FileReportFixDto':
|
|
||||||
return FileReportFixDto.fromJson(value);
|
|
||||||
case 'FileReportItemDto':
|
|
||||||
return FileReportItemDto.fromJson(value);
|
|
||||||
case 'FoldersResponse':
|
case 'FoldersResponse':
|
||||||
return FoldersResponse.fromJson(value);
|
return FoldersResponse.fromJson(value);
|
||||||
case 'FoldersUpdate':
|
case 'FoldersUpdate':
|
||||||
@@ -378,8 +368,6 @@ class ApiClient {
|
|||||||
return MemoriesUpdate.fromJson(value);
|
return MemoriesUpdate.fromJson(value);
|
||||||
case 'MemoryCreateDto':
|
case 'MemoryCreateDto':
|
||||||
return MemoryCreateDto.fromJson(value);
|
return MemoryCreateDto.fromJson(value);
|
||||||
case 'MemoryLaneResponseDto':
|
|
||||||
return MemoryLaneResponseDto.fromJson(value);
|
|
||||||
case 'MemoryResponseDto':
|
case 'MemoryResponseDto':
|
||||||
return MemoryResponseDto.fromJson(value);
|
return MemoryResponseDto.fromJson(value);
|
||||||
case 'MemoryType':
|
case 'MemoryType':
|
||||||
@@ -418,10 +406,6 @@ class ApiClient {
|
|||||||
return PartnerDirectionTypeTransformer().decode(value);
|
return PartnerDirectionTypeTransformer().decode(value);
|
||||||
case 'PartnerResponseDto':
|
case 'PartnerResponseDto':
|
||||||
return PartnerResponseDto.fromJson(value);
|
return PartnerResponseDto.fromJson(value);
|
||||||
case 'PathEntityType':
|
|
||||||
return PathEntityTypeTypeTransformer().decode(value);
|
|
||||||
case 'PathType':
|
|
||||||
return PathTypeTypeTransformer().decode(value);
|
|
||||||
case 'PeopleResponse':
|
case 'PeopleResponse':
|
||||||
return PeopleResponse.fromJson(value);
|
return PeopleResponse.fromJson(value);
|
||||||
case 'PeopleResponseDto':
|
case 'PeopleResponseDto':
|
||||||
|
|||||||
6
mobile/openapi/lib/api_helper.dart
generated
6
mobile/openapi/lib/api_helper.dart
generated
@@ -112,12 +112,6 @@ String parameterToString(dynamic value) {
|
|||||||
if (value is PartnerDirection) {
|
if (value is PartnerDirection) {
|
||||||
return PartnerDirectionTypeTransformer().encode(value).toString();
|
return PartnerDirectionTypeTransformer().encode(value).toString();
|
||||||
}
|
}
|
||||||
if (value is PathEntityType) {
|
|
||||||
return PathEntityTypeTypeTransformer().encode(value).toString();
|
|
||||||
}
|
|
||||||
if (value is PathType) {
|
|
||||||
return PathTypeTypeTransformer().encode(value).toString();
|
|
||||||
}
|
|
||||||
if (value is Permission) {
|
if (value is Permission) {
|
||||||
return PermissionTypeTransformer().encode(value).toString();
|
return PermissionTypeTransformer().encode(value).toString();
|
||||||
}
|
}
|
||||||
|
|||||||
101
mobile/openapi/lib/model/file_checksum_dto.dart
generated
101
mobile/openapi/lib/model/file_checksum_dto.dart
generated
@@ -1,101 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
class FileChecksumDto {
|
|
||||||
/// Returns a new [FileChecksumDto] instance.
|
|
||||||
FileChecksumDto({
|
|
||||||
this.filenames = const [],
|
|
||||||
});
|
|
||||||
|
|
||||||
List<String> filenames;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is FileChecksumDto &&
|
|
||||||
_deepEquality.equals(other.filenames, filenames);
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
(filenames.hashCode);
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => 'FileChecksumDto[filenames=$filenames]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
json[r'filenames'] = this.filenames;
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [FileChecksumDto] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static FileChecksumDto? fromJson(dynamic value) {
|
|
||||||
upgradeDto(value, "FileChecksumDto");
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
return FileChecksumDto(
|
|
||||||
filenames: json[r'filenames'] is Iterable
|
|
||||||
? (json[r'filenames'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<FileChecksumDto> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <FileChecksumDto>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = FileChecksumDto.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, FileChecksumDto> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, FileChecksumDto>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = FileChecksumDto.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of FileChecksumDto-objects as value to a dart map
|
|
||||||
static Map<String, List<FileChecksumDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<FileChecksumDto>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = FileChecksumDto.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
'filenames',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
107
mobile/openapi/lib/model/file_checksum_response_dto.dart
generated
107
mobile/openapi/lib/model/file_checksum_response_dto.dart
generated
@@ -1,107 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
class FileChecksumResponseDto {
|
|
||||||
/// Returns a new [FileChecksumResponseDto] instance.
|
|
||||||
FileChecksumResponseDto({
|
|
||||||
required this.checksum,
|
|
||||||
required this.filename,
|
|
||||||
});
|
|
||||||
|
|
||||||
String checksum;
|
|
||||||
|
|
||||||
String filename;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is FileChecksumResponseDto &&
|
|
||||||
other.checksum == checksum &&
|
|
||||||
other.filename == filename;
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
(checksum.hashCode) +
|
|
||||||
(filename.hashCode);
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => 'FileChecksumResponseDto[checksum=$checksum, filename=$filename]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
json[r'checksum'] = this.checksum;
|
|
||||||
json[r'filename'] = this.filename;
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [FileChecksumResponseDto] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static FileChecksumResponseDto? fromJson(dynamic value) {
|
|
||||||
upgradeDto(value, "FileChecksumResponseDto");
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
return FileChecksumResponseDto(
|
|
||||||
checksum: mapValueOfType<String>(json, r'checksum')!,
|
|
||||||
filename: mapValueOfType<String>(json, r'filename')!,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<FileChecksumResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <FileChecksumResponseDto>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = FileChecksumResponseDto.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, FileChecksumResponseDto> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, FileChecksumResponseDto>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = FileChecksumResponseDto.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of FileChecksumResponseDto-objects as value to a dart map
|
|
||||||
static Map<String, List<FileChecksumResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<FileChecksumResponseDto>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = FileChecksumResponseDto.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
'checksum',
|
|
||||||
'filename',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
109
mobile/openapi/lib/model/file_report_dto.dart
generated
109
mobile/openapi/lib/model/file_report_dto.dart
generated
@@ -1,109 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
class FileReportDto {
|
|
||||||
/// Returns a new [FileReportDto] instance.
|
|
||||||
FileReportDto({
|
|
||||||
this.extras = const [],
|
|
||||||
this.orphans = const [],
|
|
||||||
});
|
|
||||||
|
|
||||||
List<String> extras;
|
|
||||||
|
|
||||||
List<FileReportItemDto> orphans;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is FileReportDto &&
|
|
||||||
_deepEquality.equals(other.extras, extras) &&
|
|
||||||
_deepEquality.equals(other.orphans, orphans);
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
(extras.hashCode) +
|
|
||||||
(orphans.hashCode);
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => 'FileReportDto[extras=$extras, orphans=$orphans]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
json[r'extras'] = this.extras;
|
|
||||||
json[r'orphans'] = this.orphans;
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [FileReportDto] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static FileReportDto? fromJson(dynamic value) {
|
|
||||||
upgradeDto(value, "FileReportDto");
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
return FileReportDto(
|
|
||||||
extras: json[r'extras'] is Iterable
|
|
||||||
? (json[r'extras'] as Iterable).cast<String>().toList(growable: false)
|
|
||||||
: const [],
|
|
||||||
orphans: FileReportItemDto.listFromJson(json[r'orphans']),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<FileReportDto> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <FileReportDto>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = FileReportDto.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, FileReportDto> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, FileReportDto>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = FileReportDto.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of FileReportDto-objects as value to a dart map
|
|
||||||
static Map<String, List<FileReportDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<FileReportDto>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = FileReportDto.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
'extras',
|
|
||||||
'orphans',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
99
mobile/openapi/lib/model/file_report_fix_dto.dart
generated
99
mobile/openapi/lib/model/file_report_fix_dto.dart
generated
@@ -1,99 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
class FileReportFixDto {
|
|
||||||
/// Returns a new [FileReportFixDto] instance.
|
|
||||||
FileReportFixDto({
|
|
||||||
this.items = const [],
|
|
||||||
});
|
|
||||||
|
|
||||||
List<FileReportItemDto> items;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is FileReportFixDto &&
|
|
||||||
_deepEquality.equals(other.items, items);
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
(items.hashCode);
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => 'FileReportFixDto[items=$items]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
json[r'items'] = this.items;
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [FileReportFixDto] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static FileReportFixDto? fromJson(dynamic value) {
|
|
||||||
upgradeDto(value, "FileReportFixDto");
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
return FileReportFixDto(
|
|
||||||
items: FileReportItemDto.listFromJson(json[r'items']),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<FileReportFixDto> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <FileReportFixDto>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = FileReportFixDto.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, FileReportFixDto> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, FileReportFixDto>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = FileReportFixDto.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of FileReportFixDto-objects as value to a dart map
|
|
||||||
static Map<String, List<FileReportFixDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<FileReportFixDto>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = FileReportFixDto.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
'items',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
140
mobile/openapi/lib/model/file_report_item_dto.dart
generated
140
mobile/openapi/lib/model/file_report_item_dto.dart
generated
@@ -1,140 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
class FileReportItemDto {
|
|
||||||
/// Returns a new [FileReportItemDto] instance.
|
|
||||||
FileReportItemDto({
|
|
||||||
this.checksum,
|
|
||||||
required this.entityId,
|
|
||||||
required this.entityType,
|
|
||||||
required this.pathType,
|
|
||||||
required this.pathValue,
|
|
||||||
});
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Please note: This property should have been non-nullable! Since the specification file
|
|
||||||
/// does not include a default value (using the "default:" property), however, the generated
|
|
||||||
/// source code must fall back to having a nullable type.
|
|
||||||
/// Consider adding a "default:" property in the specification file to hide this note.
|
|
||||||
///
|
|
||||||
String? checksum;
|
|
||||||
|
|
||||||
String entityId;
|
|
||||||
|
|
||||||
PathEntityType entityType;
|
|
||||||
|
|
||||||
PathType pathType;
|
|
||||||
|
|
||||||
String pathValue;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is FileReportItemDto &&
|
|
||||||
other.checksum == checksum &&
|
|
||||||
other.entityId == entityId &&
|
|
||||||
other.entityType == entityType &&
|
|
||||||
other.pathType == pathType &&
|
|
||||||
other.pathValue == pathValue;
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
(checksum == null ? 0 : checksum!.hashCode) +
|
|
||||||
(entityId.hashCode) +
|
|
||||||
(entityType.hashCode) +
|
|
||||||
(pathType.hashCode) +
|
|
||||||
(pathValue.hashCode);
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => 'FileReportItemDto[checksum=$checksum, entityId=$entityId, entityType=$entityType, pathType=$pathType, pathValue=$pathValue]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
if (this.checksum != null) {
|
|
||||||
json[r'checksum'] = this.checksum;
|
|
||||||
} else {
|
|
||||||
// json[r'checksum'] = null;
|
|
||||||
}
|
|
||||||
json[r'entityId'] = this.entityId;
|
|
||||||
json[r'entityType'] = this.entityType;
|
|
||||||
json[r'pathType'] = this.pathType;
|
|
||||||
json[r'pathValue'] = this.pathValue;
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [FileReportItemDto] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static FileReportItemDto? fromJson(dynamic value) {
|
|
||||||
upgradeDto(value, "FileReportItemDto");
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
return FileReportItemDto(
|
|
||||||
checksum: mapValueOfType<String>(json, r'checksum'),
|
|
||||||
entityId: mapValueOfType<String>(json, r'entityId')!,
|
|
||||||
entityType: PathEntityType.fromJson(json[r'entityType'])!,
|
|
||||||
pathType: PathType.fromJson(json[r'pathType'])!,
|
|
||||||
pathValue: mapValueOfType<String>(json, r'pathValue')!,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<FileReportItemDto> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <FileReportItemDto>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = FileReportItemDto.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, FileReportItemDto> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, FileReportItemDto>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = FileReportItemDto.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of FileReportItemDto-objects as value to a dart map
|
|
||||||
static Map<String, List<FileReportItemDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<FileReportItemDto>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = FileReportItemDto.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
'entityId',
|
|
||||||
'entityType',
|
|
||||||
'pathType',
|
|
||||||
'pathValue',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
6
mobile/openapi/lib/model/job_command.dart
generated
6
mobile/openapi/lib/model/job_command.dart
generated
@@ -26,7 +26,7 @@ class JobCommand {
|
|||||||
static const start = JobCommand._(r'start');
|
static const start = JobCommand._(r'start');
|
||||||
static const pause = JobCommand._(r'pause');
|
static const pause = JobCommand._(r'pause');
|
||||||
static const resume = JobCommand._(r'resume');
|
static const resume = JobCommand._(r'resume');
|
||||||
static const empty = JobCommand._(r'empty');
|
static const clear = JobCommand._(r'clear');
|
||||||
static const clearFailed = JobCommand._(r'clear-failed');
|
static const clearFailed = JobCommand._(r'clear-failed');
|
||||||
|
|
||||||
/// List of all possible values in this [enum][JobCommand].
|
/// List of all possible values in this [enum][JobCommand].
|
||||||
@@ -34,7 +34,7 @@ class JobCommand {
|
|||||||
start,
|
start,
|
||||||
pause,
|
pause,
|
||||||
resume,
|
resume,
|
||||||
empty,
|
clear,
|
||||||
clearFailed,
|
clearFailed,
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -77,7 +77,7 @@ class JobCommandTypeTransformer {
|
|||||||
case r'start': return JobCommand.start;
|
case r'start': return JobCommand.start;
|
||||||
case r'pause': return JobCommand.pause;
|
case r'pause': return JobCommand.pause;
|
||||||
case r'resume': return JobCommand.resume;
|
case r'resume': return JobCommand.resume;
|
||||||
case r'empty': return JobCommand.empty;
|
case r'clear': return JobCommand.clear;
|
||||||
case r'clear-failed': return JobCommand.clearFailed;
|
case r'clear-failed': return JobCommand.clearFailed;
|
||||||
default:
|
default:
|
||||||
if (!allowNull) {
|
if (!allowNull) {
|
||||||
|
|||||||
18
mobile/openapi/lib/model/job_counts_dto.dart
generated
18
mobile/openapi/lib/model/job_counts_dto.dart
generated
@@ -14,54 +14,42 @@ class JobCountsDto {
|
|||||||
/// Returns a new [JobCountsDto] instance.
|
/// Returns a new [JobCountsDto] instance.
|
||||||
JobCountsDto({
|
JobCountsDto({
|
||||||
required this.active,
|
required this.active,
|
||||||
required this.completed,
|
|
||||||
required this.delayed,
|
required this.delayed,
|
||||||
required this.failed,
|
required this.failed,
|
||||||
required this.paused,
|
|
||||||
required this.waiting,
|
required this.waiting,
|
||||||
});
|
});
|
||||||
|
|
||||||
int active;
|
int active;
|
||||||
|
|
||||||
int completed;
|
|
||||||
|
|
||||||
int delayed;
|
int delayed;
|
||||||
|
|
||||||
int failed;
|
int failed;
|
||||||
|
|
||||||
int paused;
|
|
||||||
|
|
||||||
int waiting;
|
int waiting;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(Object other) => identical(this, other) || other is JobCountsDto &&
|
bool operator ==(Object other) => identical(this, other) || other is JobCountsDto &&
|
||||||
other.active == active &&
|
other.active == active &&
|
||||||
other.completed == completed &&
|
|
||||||
other.delayed == delayed &&
|
other.delayed == delayed &&
|
||||||
other.failed == failed &&
|
other.failed == failed &&
|
||||||
other.paused == paused &&
|
|
||||||
other.waiting == waiting;
|
other.waiting == waiting;
|
||||||
|
|
||||||
@override
|
@override
|
||||||
int get hashCode =>
|
int get hashCode =>
|
||||||
// ignore: unnecessary_parenthesis
|
// ignore: unnecessary_parenthesis
|
||||||
(active.hashCode) +
|
(active.hashCode) +
|
||||||
(completed.hashCode) +
|
|
||||||
(delayed.hashCode) +
|
(delayed.hashCode) +
|
||||||
(failed.hashCode) +
|
(failed.hashCode) +
|
||||||
(paused.hashCode) +
|
|
||||||
(waiting.hashCode);
|
(waiting.hashCode);
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() => 'JobCountsDto[active=$active, completed=$completed, delayed=$delayed, failed=$failed, paused=$paused, waiting=$waiting]';
|
String toString() => 'JobCountsDto[active=$active, delayed=$delayed, failed=$failed, waiting=$waiting]';
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final json = <String, dynamic>{};
|
final json = <String, dynamic>{};
|
||||||
json[r'active'] = this.active;
|
json[r'active'] = this.active;
|
||||||
json[r'completed'] = this.completed;
|
|
||||||
json[r'delayed'] = this.delayed;
|
json[r'delayed'] = this.delayed;
|
||||||
json[r'failed'] = this.failed;
|
json[r'failed'] = this.failed;
|
||||||
json[r'paused'] = this.paused;
|
|
||||||
json[r'waiting'] = this.waiting;
|
json[r'waiting'] = this.waiting;
|
||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
@@ -76,10 +64,8 @@ class JobCountsDto {
|
|||||||
|
|
||||||
return JobCountsDto(
|
return JobCountsDto(
|
||||||
active: mapValueOfType<int>(json, r'active')!,
|
active: mapValueOfType<int>(json, r'active')!,
|
||||||
completed: mapValueOfType<int>(json, r'completed')!,
|
|
||||||
delayed: mapValueOfType<int>(json, r'delayed')!,
|
delayed: mapValueOfType<int>(json, r'delayed')!,
|
||||||
failed: mapValueOfType<int>(json, r'failed')!,
|
failed: mapValueOfType<int>(json, r'failed')!,
|
||||||
paused: mapValueOfType<int>(json, r'paused')!,
|
|
||||||
waiting: mapValueOfType<int>(json, r'waiting')!,
|
waiting: mapValueOfType<int>(json, r'waiting')!,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -129,10 +115,8 @@ class JobCountsDto {
|
|||||||
/// The list of required keys that must be present in a JSON.
|
/// The list of required keys that must be present in a JSON.
|
||||||
static const requiredKeys = <String>{
|
static const requiredKeys = <String>{
|
||||||
'active',
|
'active',
|
||||||
'completed',
|
|
||||||
'delayed',
|
'delayed',
|
||||||
'failed',
|
'failed',
|
||||||
'paused',
|
|
||||||
'waiting',
|
'waiting',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
107
mobile/openapi/lib/model/memory_lane_response_dto.dart
generated
107
mobile/openapi/lib/model/memory_lane_response_dto.dart
generated
@@ -1,107 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
class MemoryLaneResponseDto {
|
|
||||||
/// Returns a new [MemoryLaneResponseDto] instance.
|
|
||||||
MemoryLaneResponseDto({
|
|
||||||
this.assets = const [],
|
|
||||||
required this.yearsAgo,
|
|
||||||
});
|
|
||||||
|
|
||||||
List<AssetResponseDto> assets;
|
|
||||||
|
|
||||||
int yearsAgo;
|
|
||||||
|
|
||||||
@override
|
|
||||||
bool operator ==(Object other) => identical(this, other) || other is MemoryLaneResponseDto &&
|
|
||||||
_deepEquality.equals(other.assets, assets) &&
|
|
||||||
other.yearsAgo == yearsAgo;
|
|
||||||
|
|
||||||
@override
|
|
||||||
int get hashCode =>
|
|
||||||
// ignore: unnecessary_parenthesis
|
|
||||||
(assets.hashCode) +
|
|
||||||
(yearsAgo.hashCode);
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => 'MemoryLaneResponseDto[assets=$assets, yearsAgo=$yearsAgo]';
|
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
|
||||||
final json = <String, dynamic>{};
|
|
||||||
json[r'assets'] = this.assets;
|
|
||||||
json[r'yearsAgo'] = this.yearsAgo;
|
|
||||||
return json;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a new [MemoryLaneResponseDto] instance and imports its values from
|
|
||||||
/// [value] if it's a [Map], null otherwise.
|
|
||||||
// ignore: prefer_constructors_over_static_methods
|
|
||||||
static MemoryLaneResponseDto? fromJson(dynamic value) {
|
|
||||||
upgradeDto(value, "MemoryLaneResponseDto");
|
|
||||||
if (value is Map) {
|
|
||||||
final json = value.cast<String, dynamic>();
|
|
||||||
|
|
||||||
return MemoryLaneResponseDto(
|
|
||||||
assets: AssetResponseDto.listFromJson(json[r'assets']),
|
|
||||||
yearsAgo: mapValueOfType<int>(json, r'yearsAgo')!,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<MemoryLaneResponseDto> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <MemoryLaneResponseDto>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = MemoryLaneResponseDto.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
|
|
||||||
static Map<String, MemoryLaneResponseDto> mapFromJson(dynamic json) {
|
|
||||||
final map = <String, MemoryLaneResponseDto>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
json = json.cast<String, dynamic>(); // ignore: parameter_assignments
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
final value = MemoryLaneResponseDto.fromJson(entry.value);
|
|
||||||
if (value != null) {
|
|
||||||
map[entry.key] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
// maps a json object with a list of MemoryLaneResponseDto-objects as value to a dart map
|
|
||||||
static Map<String, List<MemoryLaneResponseDto>> mapListFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final map = <String, List<MemoryLaneResponseDto>>{};
|
|
||||||
if (json is Map && json.isNotEmpty) {
|
|
||||||
// ignore: parameter_assignments
|
|
||||||
json = json.cast<String, dynamic>();
|
|
||||||
for (final entry in json.entries) {
|
|
||||||
map[entry.key] = MemoryLaneResponseDto.listFromJson(entry.value, growable: growable,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
|
||||||
static const requiredKeys = <String>{
|
|
||||||
'assets',
|
|
||||||
'yearsAgo',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
88
mobile/openapi/lib/model/path_entity_type.dart
generated
88
mobile/openapi/lib/model/path_entity_type.dart
generated
@@ -1,88 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
|
|
||||||
class PathEntityType {
|
|
||||||
/// Instantiate a new enum with the provided [value].
|
|
||||||
const PathEntityType._(this.value);
|
|
||||||
|
|
||||||
/// The underlying value of this enum member.
|
|
||||||
final String value;
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => value;
|
|
||||||
|
|
||||||
String toJson() => value;
|
|
||||||
|
|
||||||
static const asset = PathEntityType._(r'asset');
|
|
||||||
static const person = PathEntityType._(r'person');
|
|
||||||
static const user = PathEntityType._(r'user');
|
|
||||||
|
|
||||||
/// List of all possible values in this [enum][PathEntityType].
|
|
||||||
static const values = <PathEntityType>[
|
|
||||||
asset,
|
|
||||||
person,
|
|
||||||
user,
|
|
||||||
];
|
|
||||||
|
|
||||||
static PathEntityType? fromJson(dynamic value) => PathEntityTypeTypeTransformer().decode(value);
|
|
||||||
|
|
||||||
static List<PathEntityType> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <PathEntityType>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = PathEntityType.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transformation class that can [encode] an instance of [PathEntityType] to String,
|
|
||||||
/// and [decode] dynamic data back to [PathEntityType].
|
|
||||||
class PathEntityTypeTypeTransformer {
|
|
||||||
factory PathEntityTypeTypeTransformer() => _instance ??= const PathEntityTypeTypeTransformer._();
|
|
||||||
|
|
||||||
const PathEntityTypeTypeTransformer._();
|
|
||||||
|
|
||||||
String encode(PathEntityType data) => data.value;
|
|
||||||
|
|
||||||
/// Decodes a [dynamic value][data] to a PathEntityType.
|
|
||||||
///
|
|
||||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
|
||||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
|
||||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
|
||||||
///
|
|
||||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
|
||||||
/// and users are still using an old app with the old code.
|
|
||||||
PathEntityType? decode(dynamic data, {bool allowNull = true}) {
|
|
||||||
if (data != null) {
|
|
||||||
switch (data) {
|
|
||||||
case r'asset': return PathEntityType.asset;
|
|
||||||
case r'person': return PathEntityType.person;
|
|
||||||
case r'user': return PathEntityType.user;
|
|
||||||
default:
|
|
||||||
if (!allowNull) {
|
|
||||||
throw ArgumentError('Unknown enum value to decode: $data');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Singleton [PathEntityTypeTypeTransformer] instance.
|
|
||||||
static PathEntityTypeTypeTransformer? _instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
103
mobile/openapi/lib/model/path_type.dart
generated
103
mobile/openapi/lib/model/path_type.dart
generated
@@ -1,103 +0,0 @@
|
|||||||
//
|
|
||||||
// AUTO-GENERATED FILE, DO NOT MODIFY!
|
|
||||||
//
|
|
||||||
// @dart=2.18
|
|
||||||
|
|
||||||
// ignore_for_file: unused_element, unused_import
|
|
||||||
// ignore_for_file: always_put_required_named_parameters_first
|
|
||||||
// ignore_for_file: constant_identifier_names
|
|
||||||
// ignore_for_file: lines_longer_than_80_chars
|
|
||||||
|
|
||||||
part of openapi.api;
|
|
||||||
|
|
||||||
|
|
||||||
class PathType {
|
|
||||||
/// Instantiate a new enum with the provided [value].
|
|
||||||
const PathType._(this.value);
|
|
||||||
|
|
||||||
/// The underlying value of this enum member.
|
|
||||||
final String value;
|
|
||||||
|
|
||||||
@override
|
|
||||||
String toString() => value;
|
|
||||||
|
|
||||||
String toJson() => value;
|
|
||||||
|
|
||||||
static const original = PathType._(r'original');
|
|
||||||
static const fullsize = PathType._(r'fullsize');
|
|
||||||
static const preview = PathType._(r'preview');
|
|
||||||
static const thumbnail = PathType._(r'thumbnail');
|
|
||||||
static const encodedVideo = PathType._(r'encoded_video');
|
|
||||||
static const sidecar = PathType._(r'sidecar');
|
|
||||||
static const face = PathType._(r'face');
|
|
||||||
static const profile = PathType._(r'profile');
|
|
||||||
|
|
||||||
/// List of all possible values in this [enum][PathType].
|
|
||||||
static const values = <PathType>[
|
|
||||||
original,
|
|
||||||
fullsize,
|
|
||||||
preview,
|
|
||||||
thumbnail,
|
|
||||||
encodedVideo,
|
|
||||||
sidecar,
|
|
||||||
face,
|
|
||||||
profile,
|
|
||||||
];
|
|
||||||
|
|
||||||
static PathType? fromJson(dynamic value) => PathTypeTypeTransformer().decode(value);
|
|
||||||
|
|
||||||
static List<PathType> listFromJson(dynamic json, {bool growable = false,}) {
|
|
||||||
final result = <PathType>[];
|
|
||||||
if (json is List && json.isNotEmpty) {
|
|
||||||
for (final row in json) {
|
|
||||||
final value = PathType.fromJson(row);
|
|
||||||
if (value != null) {
|
|
||||||
result.add(value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result.toList(growable: growable);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transformation class that can [encode] an instance of [PathType] to String,
|
|
||||||
/// and [decode] dynamic data back to [PathType].
|
|
||||||
class PathTypeTypeTransformer {
|
|
||||||
factory PathTypeTypeTransformer() => _instance ??= const PathTypeTypeTransformer._();
|
|
||||||
|
|
||||||
const PathTypeTypeTransformer._();
|
|
||||||
|
|
||||||
String encode(PathType data) => data.value;
|
|
||||||
|
|
||||||
/// Decodes a [dynamic value][data] to a PathType.
|
|
||||||
///
|
|
||||||
/// If [allowNull] is true and the [dynamic value][data] cannot be decoded successfully,
|
|
||||||
/// then null is returned. However, if [allowNull] is false and the [dynamic value][data]
|
|
||||||
/// cannot be decoded successfully, then an [UnimplementedError] is thrown.
|
|
||||||
///
|
|
||||||
/// The [allowNull] is very handy when an API changes and a new enum value is added or removed,
|
|
||||||
/// and users are still using an old app with the old code.
|
|
||||||
PathType? decode(dynamic data, {bool allowNull = true}) {
|
|
||||||
if (data != null) {
|
|
||||||
switch (data) {
|
|
||||||
case r'original': return PathType.original;
|
|
||||||
case r'fullsize': return PathType.fullsize;
|
|
||||||
case r'preview': return PathType.preview;
|
|
||||||
case r'thumbnail': return PathType.thumbnail;
|
|
||||||
case r'encoded_video': return PathType.encodedVideo;
|
|
||||||
case r'sidecar': return PathType.sidecar;
|
|
||||||
case r'face': return PathType.face;
|
|
||||||
case r'profile': return PathType.profile;
|
|
||||||
default:
|
|
||||||
if (!allowNull) {
|
|
||||||
throw ArgumentError('Unknown enum value to decode: $data');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Singleton [PathTypeTypeTransformer] instance.
|
|
||||||
static PathTypeTypeTransformer? _instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
24
mobile/openapi/lib/model/queue_status_dto.dart
generated
24
mobile/openapi/lib/model/queue_status_dto.dart
generated
@@ -13,32 +13,26 @@ part of openapi.api;
|
|||||||
class QueueStatusDto {
|
class QueueStatusDto {
|
||||||
/// Returns a new [QueueStatusDto] instance.
|
/// Returns a new [QueueStatusDto] instance.
|
||||||
QueueStatusDto({
|
QueueStatusDto({
|
||||||
required this.isActive,
|
required this.paused,
|
||||||
required this.isPaused,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
bool isActive;
|
bool paused;
|
||||||
|
|
||||||
bool isPaused;
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
bool operator ==(Object other) => identical(this, other) || other is QueueStatusDto &&
|
bool operator ==(Object other) => identical(this, other) || other is QueueStatusDto &&
|
||||||
other.isActive == isActive &&
|
other.paused == paused;
|
||||||
other.isPaused == isPaused;
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
int get hashCode =>
|
int get hashCode =>
|
||||||
// ignore: unnecessary_parenthesis
|
// ignore: unnecessary_parenthesis
|
||||||
(isActive.hashCode) +
|
(paused.hashCode);
|
||||||
(isPaused.hashCode);
|
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() => 'QueueStatusDto[isActive=$isActive, isPaused=$isPaused]';
|
String toString() => 'QueueStatusDto[paused=$paused]';
|
||||||
|
|
||||||
Map<String, dynamic> toJson() {
|
Map<String, dynamic> toJson() {
|
||||||
final json = <String, dynamic>{};
|
final json = <String, dynamic>{};
|
||||||
json[r'isActive'] = this.isActive;
|
json[r'paused'] = this.paused;
|
||||||
json[r'isPaused'] = this.isPaused;
|
|
||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,8 +45,7 @@ class QueueStatusDto {
|
|||||||
final json = value.cast<String, dynamic>();
|
final json = value.cast<String, dynamic>();
|
||||||
|
|
||||||
return QueueStatusDto(
|
return QueueStatusDto(
|
||||||
isActive: mapValueOfType<bool>(json, r'isActive')!,
|
paused: mapValueOfType<bool>(json, r'paused')!,
|
||||||
isPaused: mapValueOfType<bool>(json, r'isPaused')!,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
@@ -100,8 +93,7 @@ class QueueStatusDto {
|
|||||||
|
|
||||||
/// The list of required keys that must be present in a JSON.
|
/// The list of required keys that must be present in a JSON.
|
||||||
static const requiredKeys = <String>{
|
static const requiredKeys = <String>{
|
||||||
'isActive',
|
'paused',
|
||||||
'isPaused',
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1726,62 +1726,6 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"/assets/memory-lane": {
|
|
||||||
"get": {
|
|
||||||
"operationId": "getMemoryLane",
|
|
||||||
"parameters": [
|
|
||||||
{
|
|
||||||
"name": "day",
|
|
||||||
"required": true,
|
|
||||||
"in": "query",
|
|
||||||
"schema": {
|
|
||||||
"minimum": 1,
|
|
||||||
"maximum": 31,
|
|
||||||
"type": "integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "month",
|
|
||||||
"required": true,
|
|
||||||
"in": "query",
|
|
||||||
"schema": {
|
|
||||||
"minimum": 1,
|
|
||||||
"maximum": 12,
|
|
||||||
"type": "integer"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"responses": {
|
|
||||||
"200": {
|
|
||||||
"content": {
|
|
||||||
"application/json": {
|
|
||||||
"schema": {
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/components/schemas/MemoryLaneResponseDto"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"security": [
|
|
||||||
{
|
|
||||||
"bearer": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cookie": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"api_key": []
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"tags": [
|
|
||||||
"Assets"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"/assets/random": {
|
"/assets/random": {
|
||||||
"get": {
|
"get": {
|
||||||
"deprecated": true,
|
"deprecated": true,
|
||||||
@@ -4651,118 +4595,6 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"/reports": {
|
|
||||||
"get": {
|
|
||||||
"operationId": "getAuditFiles",
|
|
||||||
"parameters": [],
|
|
||||||
"responses": {
|
|
||||||
"200": {
|
|
||||||
"content": {
|
|
||||||
"application/json": {
|
|
||||||
"schema": {
|
|
||||||
"$ref": "#/components/schemas/FileReportDto"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"security": [
|
|
||||||
{
|
|
||||||
"bearer": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cookie": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"api_key": []
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"tags": [
|
|
||||||
"File Reports"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"/reports/checksum": {
|
|
||||||
"post": {
|
|
||||||
"operationId": "getFileChecksums",
|
|
||||||
"parameters": [],
|
|
||||||
"requestBody": {
|
|
||||||
"content": {
|
|
||||||
"application/json": {
|
|
||||||
"schema": {
|
|
||||||
"$ref": "#/components/schemas/FileChecksumDto"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": true
|
|
||||||
},
|
|
||||||
"responses": {
|
|
||||||
"201": {
|
|
||||||
"content": {
|
|
||||||
"application/json": {
|
|
||||||
"schema": {
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/components/schemas/FileChecksumResponseDto"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"description": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"security": [
|
|
||||||
{
|
|
||||||
"bearer": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cookie": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"api_key": []
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"tags": [
|
|
||||||
"File Reports"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"/reports/fix": {
|
|
||||||
"post": {
|
|
||||||
"operationId": "fixAuditFiles",
|
|
||||||
"parameters": [],
|
|
||||||
"requestBody": {
|
|
||||||
"content": {
|
|
||||||
"application/json": {
|
|
||||||
"schema": {
|
|
||||||
"$ref": "#/components/schemas/FileReportFixDto"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": true
|
|
||||||
},
|
|
||||||
"responses": {
|
|
||||||
"201": {
|
|
||||||
"description": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"security": [
|
|
||||||
{
|
|
||||||
"bearer": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cookie": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"api_key": []
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"tags": [
|
|
||||||
"File Reports"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"/search/cities": {
|
"/search/cities": {
|
||||||
"get": {
|
"get": {
|
||||||
"operationId": "getAssetsByCity",
|
"operationId": "getAssetsByCity",
|
||||||
@@ -9749,105 +9581,6 @@
|
|||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
"FileChecksumDto": {
|
|
||||||
"properties": {
|
|
||||||
"filenames": {
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"filenames"
|
|
||||||
],
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"FileChecksumResponseDto": {
|
|
||||||
"properties": {
|
|
||||||
"checksum": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"filename": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"checksum",
|
|
||||||
"filename"
|
|
||||||
],
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"FileReportDto": {
|
|
||||||
"properties": {
|
|
||||||
"extras": {
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"orphans": {
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/components/schemas/FileReportItemDto"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"extras",
|
|
||||||
"orphans"
|
|
||||||
],
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"FileReportFixDto": {
|
|
||||||
"properties": {
|
|
||||||
"items": {
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/components/schemas/FileReportItemDto"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"items"
|
|
||||||
],
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"FileReportItemDto": {
|
|
||||||
"properties": {
|
|
||||||
"checksum": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"entityId": {
|
|
||||||
"format": "uuid",
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"entityType": {
|
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"$ref": "#/components/schemas/PathEntityType"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"pathType": {
|
|
||||||
"allOf": [
|
|
||||||
{
|
|
||||||
"$ref": "#/components/schemas/PathType"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"pathValue": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"entityId",
|
|
||||||
"entityType",
|
|
||||||
"pathType",
|
|
||||||
"pathValue"
|
|
||||||
],
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"FoldersResponse": {
|
"FoldersResponse": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"enabled": {
|
"enabled": {
|
||||||
@@ -9888,7 +9621,7 @@
|
|||||||
"start",
|
"start",
|
||||||
"pause",
|
"pause",
|
||||||
"resume",
|
"resume",
|
||||||
"empty",
|
"clear",
|
||||||
"clear-failed"
|
"clear-failed"
|
||||||
],
|
],
|
||||||
"type": "string"
|
"type": "string"
|
||||||
@@ -9916,28 +9649,20 @@
|
|||||||
"active": {
|
"active": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"completed": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"delayed": {
|
"delayed": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"failed": {
|
"failed": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"paused": {
|
|
||||||
"type": "integer"
|
|
||||||
},
|
|
||||||
"waiting": {
|
"waiting": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"active",
|
"active",
|
||||||
"completed",
|
|
||||||
"delayed",
|
"delayed",
|
||||||
"failed",
|
"failed",
|
||||||
"paused",
|
|
||||||
"waiting"
|
"waiting"
|
||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
@@ -10328,24 +10053,6 @@
|
|||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
"MemoryLaneResponseDto": {
|
|
||||||
"properties": {
|
|
||||||
"assets": {
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/components/schemas/AssetResponseDto"
|
|
||||||
},
|
|
||||||
"type": "array"
|
|
||||||
},
|
|
||||||
"yearsAgo": {
|
|
||||||
"type": "integer"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": [
|
|
||||||
"assets",
|
|
||||||
"yearsAgo"
|
|
||||||
],
|
|
||||||
"type": "object"
|
|
||||||
},
|
|
||||||
"MemoryResponseDto": {
|
"MemoryResponseDto": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"assets": {
|
"assets": {
|
||||||
@@ -10889,27 +10596,6 @@
|
|||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
"PathEntityType": {
|
|
||||||
"enum": [
|
|
||||||
"asset",
|
|
||||||
"person",
|
|
||||||
"user"
|
|
||||||
],
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"PathType": {
|
|
||||||
"enum": [
|
|
||||||
"original",
|
|
||||||
"fullsize",
|
|
||||||
"preview",
|
|
||||||
"thumbnail",
|
|
||||||
"encoded_video",
|
|
||||||
"sidecar",
|
|
||||||
"face",
|
|
||||||
"profile"
|
|
||||||
],
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"PeopleResponse": {
|
"PeopleResponse": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"enabled": {
|
"enabled": {
|
||||||
@@ -11313,16 +10999,12 @@
|
|||||||
},
|
},
|
||||||
"QueueStatusDto": {
|
"QueueStatusDto": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"isActive": {
|
"paused": {
|
||||||
"type": "boolean"
|
|
||||||
},
|
|
||||||
"isPaused": {
|
|
||||||
"type": "boolean"
|
"type": "boolean"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
"isActive",
|
"paused"
|
||||||
"isPaused"
|
|
||||||
],
|
],
|
||||||
"type": "object"
|
"type": "object"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -462,10 +462,6 @@ export type AssetJobsDto = {
|
|||||||
assetIds: string[];
|
assetIds: string[];
|
||||||
name: AssetJobName;
|
name: AssetJobName;
|
||||||
};
|
};
|
||||||
export type MemoryLaneResponseDto = {
|
|
||||||
assets: AssetResponseDto[];
|
|
||||||
yearsAgo: number;
|
|
||||||
};
|
|
||||||
export type AssetStatsResponseDto = {
|
export type AssetStatsResponseDto = {
|
||||||
images: number;
|
images: number;
|
||||||
total: number;
|
total: number;
|
||||||
@@ -581,15 +577,12 @@ export type FaceDto = {
|
|||||||
};
|
};
|
||||||
export type JobCountsDto = {
|
export type JobCountsDto = {
|
||||||
active: number;
|
active: number;
|
||||||
completed: number;
|
|
||||||
delayed: number;
|
delayed: number;
|
||||||
failed: number;
|
failed: number;
|
||||||
paused: number;
|
|
||||||
waiting: number;
|
waiting: number;
|
||||||
};
|
};
|
||||||
export type QueueStatusDto = {
|
export type QueueStatusDto = {
|
||||||
isActive: boolean;
|
paused: boolean;
|
||||||
isPaused: boolean;
|
|
||||||
};
|
};
|
||||||
export type JobStatusDto = {
|
export type JobStatusDto = {
|
||||||
jobCounts: JobCountsDto;
|
jobCounts: JobCountsDto;
|
||||||
@@ -800,27 +793,6 @@ export type AssetFaceUpdateDto = {
|
|||||||
export type PersonStatisticsResponseDto = {
|
export type PersonStatisticsResponseDto = {
|
||||||
assets: number;
|
assets: number;
|
||||||
};
|
};
|
||||||
export type FileReportItemDto = {
|
|
||||||
checksum?: string;
|
|
||||||
entityId: string;
|
|
||||||
entityType: PathEntityType;
|
|
||||||
pathType: PathType;
|
|
||||||
pathValue: string;
|
|
||||||
};
|
|
||||||
export type FileReportDto = {
|
|
||||||
extras: string[];
|
|
||||||
orphans: FileReportItemDto[];
|
|
||||||
};
|
|
||||||
export type FileChecksumDto = {
|
|
||||||
filenames: string[];
|
|
||||||
};
|
|
||||||
export type FileChecksumResponseDto = {
|
|
||||||
checksum: string;
|
|
||||||
filename: string;
|
|
||||||
};
|
|
||||||
export type FileReportFixDto = {
|
|
||||||
items: FileReportItemDto[];
|
|
||||||
};
|
|
||||||
export type SearchExploreItem = {
|
export type SearchExploreItem = {
|
||||||
data: AssetResponseDto;
|
data: AssetResponseDto;
|
||||||
value: string;
|
value: string;
|
||||||
@@ -1887,20 +1859,6 @@ export function runAssetJobs({ assetJobsDto }: {
|
|||||||
body: assetJobsDto
|
body: assetJobsDto
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
export function getMemoryLane({ day, month }: {
|
|
||||||
day: number;
|
|
||||||
month: number;
|
|
||||||
}, opts?: Oazapfts.RequestOpts) {
|
|
||||||
return oazapfts.ok(oazapfts.fetchJson<{
|
|
||||||
status: 200;
|
|
||||||
data: MemoryLaneResponseDto[];
|
|
||||||
}>(`/assets/memory-lane${QS.query(QS.explode({
|
|
||||||
day,
|
|
||||||
month
|
|
||||||
}))}`, {
|
|
||||||
...opts
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* This property was deprecated in v1.116.0
|
* This property was deprecated in v1.116.0
|
||||||
*/
|
*/
|
||||||
@@ -2663,35 +2621,6 @@ export function getPersonThumbnail({ id }: {
|
|||||||
...opts
|
...opts
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
export function getAuditFiles(opts?: Oazapfts.RequestOpts) {
|
|
||||||
return oazapfts.ok(oazapfts.fetchJson<{
|
|
||||||
status: 200;
|
|
||||||
data: FileReportDto;
|
|
||||||
}>("/reports", {
|
|
||||||
...opts
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
export function getFileChecksums({ fileChecksumDto }: {
|
|
||||||
fileChecksumDto: FileChecksumDto;
|
|
||||||
}, opts?: Oazapfts.RequestOpts) {
|
|
||||||
return oazapfts.ok(oazapfts.fetchJson<{
|
|
||||||
status: 201;
|
|
||||||
data: FileChecksumResponseDto[];
|
|
||||||
}>("/reports/checksum", oazapfts.json({
|
|
||||||
...opts,
|
|
||||||
method: "POST",
|
|
||||||
body: fileChecksumDto
|
|
||||||
})));
|
|
||||||
}
|
|
||||||
export function fixAuditFiles({ fileReportFixDto }: {
|
|
||||||
fileReportFixDto: FileReportFixDto;
|
|
||||||
}, opts?: Oazapfts.RequestOpts) {
|
|
||||||
return oazapfts.ok(oazapfts.fetchText("/reports/fix", oazapfts.json({
|
|
||||||
...opts,
|
|
||||||
method: "POST",
|
|
||||||
body: fileReportFixDto
|
|
||||||
})));
|
|
||||||
}
|
|
||||||
export function getAssetsByCity(opts?: Oazapfts.RequestOpts) {
|
export function getAssetsByCity(opts?: Oazapfts.RequestOpts) {
|
||||||
return oazapfts.ok(oazapfts.fetchJson<{
|
return oazapfts.ok(oazapfts.fetchJson<{
|
||||||
status: 200;
|
status: 200;
|
||||||
@@ -3741,7 +3670,7 @@ export enum JobCommand {
|
|||||||
Start = "start",
|
Start = "start",
|
||||||
Pause = "pause",
|
Pause = "pause",
|
||||||
Resume = "resume",
|
Resume = "resume",
|
||||||
Empty = "empty",
|
Clear = "clear",
|
||||||
ClearFailed = "clear-failed"
|
ClearFailed = "clear-failed"
|
||||||
}
|
}
|
||||||
export enum MemoryType {
|
export enum MemoryType {
|
||||||
@@ -3751,21 +3680,6 @@ export enum PartnerDirection {
|
|||||||
SharedBy = "shared-by",
|
SharedBy = "shared-by",
|
||||||
SharedWith = "shared-with"
|
SharedWith = "shared-with"
|
||||||
}
|
}
|
||||||
export enum PathEntityType {
|
|
||||||
Asset = "asset",
|
|
||||||
Person = "person",
|
|
||||||
User = "user"
|
|
||||||
}
|
|
||||||
export enum PathType {
|
|
||||||
Original = "original",
|
|
||||||
Fullsize = "fullsize",
|
|
||||||
Preview = "preview",
|
|
||||||
Thumbnail = "thumbnail",
|
|
||||||
EncodedVideo = "encoded_video",
|
|
||||||
Sidecar = "sidecar",
|
|
||||||
Face = "face",
|
|
||||||
Profile = "profile"
|
|
||||||
}
|
|
||||||
export enum SearchSuggestionType {
|
export enum SearchSuggestionType {
|
||||||
Country = "country",
|
Country = "country",
|
||||||
State = "state",
|
State = "state",
|
||||||
|
|||||||
447
server/package-lock.json
generated
447
server/package-lock.json
generated
@@ -10,7 +10,6 @@
|
|||||||
"hasInstallScript": true,
|
"hasInstallScript": true,
|
||||||
"license": "GNU Affero General Public License version 3",
|
"license": "GNU Affero General Public License version 3",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@nestjs/bullmq": "^11.0.1",
|
|
||||||
"@nestjs/common": "^11.0.4",
|
"@nestjs/common": "^11.0.4",
|
||||||
"@nestjs/core": "^11.0.4",
|
"@nestjs/core": "^11.0.4",
|
||||||
"@nestjs/event-emitter": "^3.0.0",
|
"@nestjs/event-emitter": "^3.0.0",
|
||||||
@@ -24,11 +23,11 @@
|
|||||||
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
||||||
"@opentelemetry/sdk-node": "^0.200.0",
|
"@opentelemetry/sdk-node": "^0.200.0",
|
||||||
"@react-email/components": "^0.0.36",
|
"@react-email/components": "^0.0.36",
|
||||||
"@socket.io/redis-adapter": "^8.3.0",
|
"@socket.io/postgres-adapter": "^0.4.0",
|
||||||
|
"@types/pg": "^8.11.14",
|
||||||
"archiver": "^7.0.0",
|
"archiver": "^7.0.0",
|
||||||
"async-lock": "^1.4.0",
|
"async-lock": "^1.4.0",
|
||||||
"bcrypt": "^5.1.1",
|
"bcrypt": "^5.1.1",
|
||||||
"bullmq": "^4.8.0",
|
|
||||||
"chokidar": "^3.5.3",
|
"chokidar": "^3.5.3",
|
||||||
"class-transformer": "^0.5.1",
|
"class-transformer": "^0.5.1",
|
||||||
"class-validator": "^0.14.0",
|
"class-validator": "^0.14.0",
|
||||||
@@ -39,9 +38,9 @@
|
|||||||
"fast-glob": "^3.3.2",
|
"fast-glob": "^3.3.2",
|
||||||
"fluent-ffmpeg": "^2.1.2",
|
"fluent-ffmpeg": "^2.1.2",
|
||||||
"geo-tz": "^8.0.0",
|
"geo-tz": "^8.0.0",
|
||||||
|
"graphile-worker": "^0.17.0-canary.1fcb2a0",
|
||||||
"handlebars": "^4.7.8",
|
"handlebars": "^4.7.8",
|
||||||
"i18n-iso-countries": "^7.6.0",
|
"i18n-iso-countries": "^7.6.0",
|
||||||
"ioredis": "^5.3.2",
|
|
||||||
"joi": "^17.10.0",
|
"joi": "^17.10.0",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"kysely": "^0.28.0",
|
"kysely": "^0.28.0",
|
||||||
@@ -54,7 +53,7 @@
|
|||||||
"nestjs-otel": "^6.0.0",
|
"nestjs-otel": "^6.0.0",
|
||||||
"nodemailer": "^6.9.13",
|
"nodemailer": "^6.9.13",
|
||||||
"openid-client": "^6.3.3",
|
"openid-client": "^6.3.3",
|
||||||
"pg": "^8.11.3",
|
"pg": "^8.15.6",
|
||||||
"picomatch": "^4.0.2",
|
"picomatch": "^4.0.2",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-dom": "^19.0.0",
|
"react-dom": "^19.0.0",
|
||||||
@@ -80,7 +79,6 @@
|
|||||||
"@nestjs/testing": "^11.0.4",
|
"@nestjs/testing": "^11.0.4",
|
||||||
"@swc/core": "^1.4.14",
|
"@swc/core": "^1.4.14",
|
||||||
"@testcontainers/postgresql": "^10.2.1",
|
"@testcontainers/postgresql": "^10.2.1",
|
||||||
"@testcontainers/redis": "^10.18.0",
|
|
||||||
"@types/archiver": "^6.0.0",
|
"@types/archiver": "^6.0.0",
|
||||||
"@types/async-lock": "^1.4.2",
|
"@types/async-lock": "^1.4.2",
|
||||||
"@types/bcrypt": "^5.0.0",
|
"@types/bcrypt": "^5.0.0",
|
||||||
@@ -1072,6 +1070,12 @@
|
|||||||
"@nestjs/core": "^10.x || ^11.0.0"
|
"@nestjs/core": "^10.x || ^11.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@graphile/logger": {
|
||||||
|
"version": "0.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@graphile/logger/-/logger-0.2.0.tgz",
|
||||||
|
"integrity": "sha512-jjcWBokl9eb1gVJ85QmoaQ73CQ52xAaOCF29ukRbYNl6lY+ts0ErTaDYOBlejcbUs2OpaiqYLO5uDhyLFzWw4w==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@grpc/grpc-js": {
|
"node_modules/@grpc/grpc-js": {
|
||||||
"version": "1.13.3",
|
"version": "1.13.3",
|
||||||
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.3.tgz",
|
"resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.3.tgz",
|
||||||
@@ -1883,7 +1887,9 @@
|
|||||||
"version": "1.2.0",
|
"version": "1.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@ioredis/commands/-/commands-1.2.0.tgz",
|
||||||
"integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==",
|
"integrity": "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/@isaacs/cliui": {
|
"node_modules/@isaacs/cliui": {
|
||||||
"version": "8.0.2",
|
"version": "8.0.2",
|
||||||
@@ -2118,45 +2124,13 @@
|
|||||||
"integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==",
|
"integrity": "sha512-4aErSrCR/On/e5G2hDP0wjooqDdauzEbIq8hIkIe5pXV0rtWJZvdCEKL0ykZxex+IxIwBp0eGeV48hQN07dXtw==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@msgpackr-extract/msgpackr-extract-linux-x64": {
|
"node_modules/@msgpack/msgpack": {
|
||||||
"version": "3.0.3",
|
"version": "2.8.0",
|
||||||
"resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-linux-x64/-/msgpackr-extract-linux-x64-3.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@msgpack/msgpack/-/msgpack-2.8.0.tgz",
|
||||||
"integrity": "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg==",
|
"integrity": "sha512-h9u4u/jiIRKbq25PM+zymTyW6bhTzELvOoUd+AvYriWOAKpLGnIamaET3pnHYoI5iYphAHBI4ayx0MehR+VVPQ==",
|
||||||
"cpu": [
|
"license": "ISC",
|
||||||
"x64"
|
"engines": {
|
||||||
],
|
"node": ">= 10"
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"linux"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"node_modules/@nestjs/bull-shared": {
|
|
||||||
"version": "11.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@nestjs/bull-shared/-/bull-shared-11.0.2.tgz",
|
|
||||||
"integrity": "sha512-dFlttJvBqIFD6M8JVFbkrR4Feb39OTAJPJpFVILU50NOJCM4qziRw3dSNG84Q3v+7/M6xUGMFdZRRGvBBKxoSA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"tslib": "2.8.1"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@nestjs/common": "^10.0.0 || ^11.0.0",
|
|
||||||
"@nestjs/core": "^10.0.0 || ^11.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@nestjs/bullmq": {
|
|
||||||
"version": "11.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@nestjs/bullmq/-/bullmq-11.0.2.tgz",
|
|
||||||
"integrity": "sha512-Lq6lGpKkETsm0RDcUktlzsthFoE3A5QTMp2FwPi1eztKqKD6/90KS1TcnC9CJFzjpUaYnQzIMrlNs55e+/wsHA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@nestjs/bull-shared": "^11.0.2",
|
|
||||||
"tslib": "2.8.1"
|
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@nestjs/common": "^10.0.0 || ^11.0.0",
|
|
||||||
"@nestjs/core": "^10.0.0 || ^11.0.0",
|
|
||||||
"bullmq": "^3.0.0 || ^4.0.0 || ^5.0.0"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@nestjs/cli": {
|
"node_modules/@nestjs/cli": {
|
||||||
@@ -3787,6 +3761,17 @@
|
|||||||
"@opentelemetry/api": "^1.3.0"
|
"@opentelemetry/api": "^1.3.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@opentelemetry/instrumentation-pg/node_modules/@types/pg": {
|
||||||
|
"version": "8.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz",
|
||||||
|
"integrity": "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*",
|
||||||
|
"pg-protocol": "*",
|
||||||
|
"pg-types": "^2.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@opentelemetry/instrumentation-pino": {
|
"node_modules/@opentelemetry/instrumentation-pino": {
|
||||||
"version": "0.47.0",
|
"version": "0.47.0",
|
||||||
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pino/-/instrumentation-pino-0.47.0.tgz",
|
"resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pino/-/instrumentation-pino-0.47.0.tgz",
|
||||||
@@ -4763,24 +4748,25 @@
|
|||||||
"integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==",
|
"integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@socket.io/redis-adapter": {
|
"node_modules/@socket.io/postgres-adapter": {
|
||||||
"version": "8.3.0",
|
"version": "0.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/@socket.io/redis-adapter/-/redis-adapter-8.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/@socket.io/postgres-adapter/-/postgres-adapter-0.4.0.tgz",
|
||||||
"integrity": "sha512-ly0cra+48hDmChxmIpnESKrc94LjRL80TEmZVscuQ/WWkRP81nNj8W8cCGMqbI4L6NCuAaPRSzZF1a9GlAxxnA==",
|
"integrity": "sha512-FJQslCIchoT4oMHk0D8HeSi9nhAOE8/snId65zI10ykZsk3MQJnUH45+Jqd75IuQhtxxwrvNxqHmzLJEPw9PnA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"debug": "~4.3.1",
|
"@msgpack/msgpack": "~2.8.0",
|
||||||
"notepack.io": "~3.0.1",
|
"@types/pg": "^8.6.6",
|
||||||
"uid2": "1.0.0"
|
"debug": "~4.3.4",
|
||||||
|
"pg": "^8.9.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10.0.0"
|
"node": ">=12.0.0"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"socket.io-adapter": "^2.5.4"
|
"socket.io-adapter": "^2.5.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@socket.io/redis-adapter/node_modules/debug": {
|
"node_modules/@socket.io/postgres-adapter/node_modules/debug": {
|
||||||
"version": "4.3.7",
|
"version": "4.3.7",
|
||||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
|
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz",
|
||||||
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
|
"integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==",
|
||||||
@@ -4914,16 +4900,6 @@
|
|||||||
"testcontainers": "^10.24.2"
|
"testcontainers": "^10.24.2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@testcontainers/redis": {
|
|
||||||
"version": "10.24.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@testcontainers/redis/-/redis-10.24.2.tgz",
|
|
||||||
"integrity": "sha512-m4/FZW5ltZPaK9pQTKNipjpBk73Vdj7Ql3sFr26A9dOr0wJyM3Wnc9jeHTNRal7RDnY5rvumXAIUWbBlvKMJEw==",
|
|
||||||
"dev": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"testcontainers": "^10.24.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@tokenizer/inflate": {
|
"node_modules/@tokenizer/inflate": {
|
||||||
"version": "0.2.7",
|
"version": "0.2.7",
|
||||||
"resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz",
|
"resolved": "https://registry.npmjs.org/@tokenizer/inflate/-/inflate-0.2.7.tgz",
|
||||||
@@ -5089,6 +5065,15 @@
|
|||||||
"@types/node": "*"
|
"@types/node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/debug": {
|
||||||
|
"version": "4.1.12",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
|
||||||
|
"integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/ms": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/docker-modem": {
|
"node_modules/@types/docker-modem": {
|
||||||
"version": "3.0.6",
|
"version": "3.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/@types/docker-modem/-/docker-modem-3.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/@types/docker-modem/-/docker-modem-3.0.6.tgz",
|
||||||
@@ -5201,6 +5186,15 @@
|
|||||||
"rxjs": "^7.2.0"
|
"rxjs": "^7.2.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/interpret": {
|
||||||
|
"version": "1.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/interpret/-/interpret-1.1.3.tgz",
|
||||||
|
"integrity": "sha512-uBaBhj/BhilG58r64mtDb/BEdH51HIQLgP5bmWzc5qCtFMja8dCk/IOJmk36j0lbi9QHwI6sbtUNGuqXdKCAtQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/node": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/js-yaml": {
|
"node_modules/@types/js-yaml": {
|
||||||
"version": "4.0.9",
|
"version": "4.0.9",
|
||||||
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz",
|
"resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.9.tgz",
|
||||||
@@ -5261,6 +5255,12 @@
|
|||||||
"@types/node": "*"
|
"@types/node": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/ms": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@types/multer": {
|
"node_modules/@types/multer": {
|
||||||
"version": "1.4.12",
|
"version": "1.4.12",
|
||||||
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.12.tgz",
|
"resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.12.tgz",
|
||||||
@@ -5317,14 +5317,14 @@
|
|||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/pg": {
|
"node_modules/@types/pg": {
|
||||||
"version": "8.6.1",
|
"version": "8.11.14",
|
||||||
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.14.tgz",
|
||||||
"integrity": "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==",
|
"integrity": "sha512-qyD11E5R3u0eJmd1lB0WnWKXJGA7s015nyARWljfz5DcX83TKAIlY+QrmvzQTsbIe+hkiFtkyL2gHC6qwF6Fbg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/node": "*",
|
"@types/node": "*",
|
||||||
"pg-protocol": "*",
|
"pg-protocol": "*",
|
||||||
"pg-types": "^2.2.0"
|
"pg-types": "^4.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@types/pg-pool": {
|
"node_modules/@types/pg-pool": {
|
||||||
@@ -5336,6 +5336,63 @@
|
|||||||
"@types/pg": "*"
|
"@types/pg": "*"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/pg-types": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"pg-int8": "1.0.1",
|
||||||
|
"pg-numeric": "1.0.2",
|
||||||
|
"postgres-array": "~3.0.1",
|
||||||
|
"postgres-bytea": "~3.0.0",
|
||||||
|
"postgres-date": "~2.1.0",
|
||||||
|
"postgres-interval": "^3.0.0",
|
||||||
|
"postgres-range": "^1.1.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-array": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-nAUSGfSDGOaOAEGwqsRY27GPOea7CNipJPOA7lPbdEpx5Kg3qzdP0AaWC5MlhTWV9s4hFX39nomVZ+C4tnGOJQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-bytea": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"obuf": "~1.1.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-date": {
|
||||||
|
"version": "2.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-2.1.0.tgz",
|
||||||
|
"integrity": "sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/pg/node_modules/postgres-interval": {
|
||||||
|
"version": "3.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-3.0.0.tgz",
|
||||||
|
"integrity": "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@types/picomatch": {
|
"node_modules/@types/picomatch": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/@types/picomatch/-/picomatch-3.0.2.tgz",
|
||||||
@@ -5401,7 +5458,6 @@
|
|||||||
"version": "7.7.0",
|
"version": "7.7.0",
|
||||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz",
|
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.0.tgz",
|
||||||
"integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==",
|
"integrity": "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==",
|
||||||
"dev": true,
|
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/send": {
|
"node_modules/@types/send": {
|
||||||
@@ -6885,64 +6941,6 @@
|
|||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/bullmq": {
|
|
||||||
"version": "4.18.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/bullmq/-/bullmq-4.18.2.tgz",
|
|
||||||
"integrity": "sha512-Cx0O98IlGiFw7UBa+zwGz+nH0Pcl1wfTvMVBlsMna3s0219hXroVovh1xPRgomyUcbyciHiugGCkW0RRNZDHYQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"cron-parser": "^4.6.0",
|
|
||||||
"glob": "^8.0.3",
|
|
||||||
"ioredis": "^5.3.2",
|
|
||||||
"lodash": "^4.17.21",
|
|
||||||
"msgpackr": "^1.6.2",
|
|
||||||
"node-abort-controller": "^3.1.1",
|
|
||||||
"semver": "^7.5.4",
|
|
||||||
"tslib": "^2.0.0",
|
|
||||||
"uuid": "^9.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/bullmq/node_modules/brace-expansion": {
|
|
||||||
"version": "2.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
|
|
||||||
"integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"balanced-match": "^1.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/bullmq/node_modules/glob": {
|
|
||||||
"version": "8.1.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz",
|
|
||||||
"integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==",
|
|
||||||
"deprecated": "Glob versions prior to v9 are no longer supported",
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"fs.realpath": "^1.0.0",
|
|
||||||
"inflight": "^1.0.4",
|
|
||||||
"inherits": "2",
|
|
||||||
"minimatch": "^5.0.1",
|
|
||||||
"once": "^1.3.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/isaacs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/bullmq/node_modules/minimatch": {
|
|
||||||
"version": "5.1.6",
|
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
|
|
||||||
"integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"brace-expansion": "^2.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/busboy": {
|
"node_modules/busboy": {
|
||||||
"version": "1.6.0",
|
"version": "1.6.0",
|
||||||
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
|
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
|
||||||
@@ -7530,6 +7528,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
|
||||||
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
|
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
@@ -7932,18 +7932,6 @@
|
|||||||
"luxon": "~3.5.0"
|
"luxon": "~3.5.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/cron-parser": {
|
|
||||||
"version": "4.9.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz",
|
|
||||||
"integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"luxon": "^3.2.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/cron/node_modules/luxon": {
|
"node_modules/cron/node_modules/luxon": {
|
||||||
"version": "3.5.0",
|
"version": "3.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
"resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz",
|
||||||
@@ -8170,6 +8158,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
|
||||||
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=0.10"
|
"node": ">=0.10"
|
||||||
}
|
}
|
||||||
@@ -10059,6 +10049,65 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/graphile-config": {
|
||||||
|
"version": "0.0.1-beta.15",
|
||||||
|
"resolved": "https://registry.npmjs.org/graphile-config/-/graphile-config-0.0.1-beta.15.tgz",
|
||||||
|
"integrity": "sha512-J+hYqhZlx5yY7XdU7XjOAqNCAUZU33fEx3PdkNc1cfAAbo1TNMWiib4DFH5XkT8BagJtTyFrMnDCuKxnphCu+g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/interpret": "^1.1.1",
|
||||||
|
"@types/node": "^20.5.7",
|
||||||
|
"@types/semver": "^7.5.1",
|
||||||
|
"chalk": "^4.1.2",
|
||||||
|
"debug": "^4.3.4",
|
||||||
|
"interpret": "^3.1.1",
|
||||||
|
"semver": "^7.5.4",
|
||||||
|
"tslib": "^2.6.2",
|
||||||
|
"yargs": "^17.7.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=16"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/graphile-config/node_modules/@types/node": {
|
||||||
|
"version": "20.17.32",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.32.tgz",
|
||||||
|
"integrity": "sha512-zeMXFn8zQ+UkjK4ws0RiOC9EWByyW1CcVmLe+2rQocXRsGEDxUCwPEIVgpsGcLHS/P8JkT0oa3839BRABS0oPw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"undici-types": "~6.19.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/graphile-config/node_modules/undici-types": {
|
||||||
|
"version": "6.19.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
|
||||||
|
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/graphile-worker": {
|
||||||
|
"version": "0.17.0-canary.1fcb2a0",
|
||||||
|
"resolved": "https://registry.npmjs.org/graphile-worker/-/graphile-worker-0.17.0-canary.1fcb2a0.tgz",
|
||||||
|
"integrity": "sha512-eG02GZ0U1eSMBdfHlQg9+jaNXpr9gs1cwqfFeney3BHpEMSvG3jw+7SdQJPVUgF8wnt8dRRfhkbpzaXGSOr+MQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@graphile/logger": "^0.2.0",
|
||||||
|
"@types/debug": "^4.1.10",
|
||||||
|
"@types/pg": "^8.10.5",
|
||||||
|
"cosmiconfig": "^8.3.6",
|
||||||
|
"graphile-config": "^0.0.1-beta.14",
|
||||||
|
"json5": "^2.2.3",
|
||||||
|
"pg": "^8.11.3",
|
||||||
|
"tslib": "^2.6.2",
|
||||||
|
"yargs": "^17.7.2"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"graphile-worker": "dist/cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14.0.0",
|
||||||
|
"yarn": "^1.22.22"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/handlebars": {
|
"node_modules/handlebars": {
|
||||||
"version": "4.7.8",
|
"version": "4.7.8",
|
||||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
|
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
|
||||||
@@ -10501,11 +10550,22 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/interpret": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/interpret/-/interpret-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-6xwYfHbajpoF0xLW+iwLkhwgvLoZDfjYfoFNu8ftMoXINzwuymNLd9u/KmwtdT2GbR+/Cz66otEGEVVUHX9QLQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10.13.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ioredis": {
|
"node_modules/ioredis": {
|
||||||
"version": "5.6.1",
|
"version": "5.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/ioredis/-/ioredis-5.6.1.tgz",
|
||||||
"integrity": "sha512-UxC0Yv1Y4WRJiGQxQkP0hfdL0/5/6YvdfOOClRgJ0qppSarkhneSa6UvkMkms0AkdGimSH3Ikqm+6mkMmX7vGA==",
|
"integrity": "sha512-UxC0Yv1Y4WRJiGQxQkP0hfdL0/5/6YvdfOOClRgJ0qppSarkhneSa6UvkMkms0AkdGimSH3Ikqm+6mkMmX7vGA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ioredis/commands": "^1.1.1",
|
"@ioredis/commands": "^1.1.1",
|
||||||
"cluster-key-slot": "^1.1.0",
|
"cluster-key-slot": "^1.1.0",
|
||||||
@@ -11375,13 +11435,17 @@
|
|||||||
"version": "4.2.0",
|
"version": "4.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
|
||||||
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
|
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/lodash.isarguments": {
|
"node_modules/lodash.isarguments": {
|
||||||
"version": "3.1.0",
|
"version": "3.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
|
||||||
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==",
|
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/lodash.merge": {
|
"node_modules/lodash.merge": {
|
||||||
"version": "4.6.2",
|
"version": "4.6.2",
|
||||||
@@ -11908,37 +11972,6 @@
|
|||||||
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/msgpackr": {
|
|
||||||
"version": "1.11.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/msgpackr/-/msgpackr-1.11.2.tgz",
|
|
||||||
"integrity": "sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g==",
|
|
||||||
"license": "MIT",
|
|
||||||
"optionalDependencies": {
|
|
||||||
"msgpackr-extract": "^3.0.2"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/msgpackr-extract": {
|
|
||||||
"version": "3.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/msgpackr-extract/-/msgpackr-extract-3.0.3.tgz",
|
|
||||||
"integrity": "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA==",
|
|
||||||
"hasInstallScript": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"node-gyp-build-optional-packages": "5.2.2"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"download-msgpackr-prebuilds": "bin/download-prebuilds.js"
|
|
||||||
},
|
|
||||||
"optionalDependencies": {
|
|
||||||
"@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3",
|
|
||||||
"@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/multer": {
|
"node_modules/multer": {
|
||||||
"version": "1.4.5-lts.2",
|
"version": "1.4.5-lts.2",
|
||||||
"resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz",
|
"resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz",
|
||||||
@@ -12299,6 +12332,7 @@
|
|||||||
"version": "3.1.1",
|
"version": "3.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz",
|
||||||
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==",
|
"integrity": "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==",
|
||||||
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/node-addon-api": {
|
"node_modules/node-addon-api": {
|
||||||
@@ -12366,21 +12400,6 @@
|
|||||||
"node": "^18.17.0 || >=20.5.0"
|
"node": "^18.17.0 || >=20.5.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/node-gyp-build-optional-packages": {
|
|
||||||
"version": "5.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.2.2.tgz",
|
|
||||||
"integrity": "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"dependencies": {
|
|
||||||
"detect-libc": "^2.0.1"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"node-gyp-build-optional-packages": "bin.js",
|
|
||||||
"node-gyp-build-optional-packages-optional": "optional.js",
|
|
||||||
"node-gyp-build-optional-packages-test": "build-test.js"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/node-gyp/node_modules/abbrev": {
|
"node_modules/node-gyp/node_modules/abbrev": {
|
||||||
"version": "3.0.1",
|
"version": "3.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz",
|
||||||
@@ -12554,12 +12573,6 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/notepack.io": {
|
|
||||||
"version": "3.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/notepack.io/-/notepack.io-3.0.1.tgz",
|
|
||||||
"integrity": "sha512-TKC/8zH5pXIAMVQio2TvVDTtPRX+DJPHDqjRbxogtFiByHyzKmy96RA0JtCQJ+WouyyL4A10xomQzgbUT+1jCg==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/npmlog": {
|
"node_modules/npmlog": {
|
||||||
"version": "5.0.1",
|
"version": "5.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz",
|
||||||
@@ -12629,6 +12642,12 @@
|
|||||||
"node": ">= 0.4"
|
"node": ">= 0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/obuf": {
|
||||||
|
"version": "1.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz",
|
||||||
|
"integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/on-finished": {
|
"node_modules/on-finished": {
|
||||||
"version": "2.4.1",
|
"version": "2.4.1",
|
||||||
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
|
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz",
|
||||||
@@ -13151,13 +13170,13 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/pg": {
|
"node_modules/pg": {
|
||||||
"version": "8.15.5",
|
"version": "8.15.6",
|
||||||
"resolved": "https://registry.npmjs.org/pg/-/pg-8.15.5.tgz",
|
"resolved": "https://registry.npmjs.org/pg/-/pg-8.15.6.tgz",
|
||||||
"integrity": "sha512-EpAhHFQc+aH9VfeffWIVC+XXk6lmAhS9W1FxtxcPXs94yxhrI1I6w/zkWfIOII/OkBv3Be04X3xMOj0kQ78l6w==",
|
"integrity": "sha512-yvao7YI3GdmmrslNVsZgx9PfntfWrnXwtR+K/DjI0I/sTKif4Z623um+sjVZ1hk5670B+ODjvHDAckKdjmPTsg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"pg-connection-string": "^2.8.5",
|
"pg-connection-string": "^2.8.5",
|
||||||
"pg-pool": "^3.9.5",
|
"pg-pool": "^3.9.6",
|
||||||
"pg-protocol": "^1.9.5",
|
"pg-protocol": "^1.9.5",
|
||||||
"pg-types": "^2.1.0",
|
"pg-types": "^2.1.0",
|
||||||
"pgpass": "1.x"
|
"pgpass": "1.x"
|
||||||
@@ -13199,6 +13218,15 @@
|
|||||||
"node": ">=4.0.0"
|
"node": ">=4.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pg-numeric": {
|
||||||
|
"version": "1.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/pg-numeric/-/pg-numeric-1.0.2.tgz",
|
||||||
|
"integrity": "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=4"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/pg-pool": {
|
"node_modules/pg-pool": {
|
||||||
"version": "3.9.6",
|
"version": "3.9.6",
|
||||||
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.9.6.tgz",
|
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.9.6.tgz",
|
||||||
@@ -13508,6 +13536,12 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/postgres-range": {
|
||||||
|
"version": "1.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/postgres-range/-/postgres-range-1.1.4.tgz",
|
||||||
|
"integrity": "sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/prelude-ls": {
|
"node_modules/prelude-ls": {
|
||||||
"version": "1.2.1",
|
"version": "1.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
|
||||||
@@ -14219,6 +14253,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
|
||||||
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
|
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=4"
|
"node": ">=4"
|
||||||
}
|
}
|
||||||
@@ -14228,6 +14264,8 @@
|
|||||||
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
|
||||||
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
|
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"redis-errors": "^1.0.0"
|
"redis-errors": "^1.0.0"
|
||||||
},
|
},
|
||||||
@@ -15346,7 +15384,9 @@
|
|||||||
"version": "2.1.0",
|
"version": "2.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
|
||||||
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
|
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
|
"optional": true,
|
||||||
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/statuses": {
|
"node_modules/statuses": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
@@ -16965,15 +17005,6 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/uid2": {
|
|
||||||
"version": "1.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/uid2/-/uid2-1.0.0.tgz",
|
|
||||||
"integrity": "sha512-+I6aJUv63YAcY9n4mQreLUt0d4lvwkkopDNmpomkAUz0fAkEMV9pRWxN0EjhW1YfRhcuyHg2v3mwddCDW1+LFQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 4.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/uint8array-extras": {
|
"node_modules/uint8array-extras": {
|
||||||
"version": "1.4.0",
|
"version": "1.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.4.0.tgz",
|
"resolved": "https://registry.npmjs.org/uint8array-extras/-/uint8array-extras-1.4.0.tgz",
|
||||||
|
|||||||
@@ -35,7 +35,6 @@
|
|||||||
"postinstall": "patch-package"
|
"postinstall": "patch-package"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@nestjs/bullmq": "^11.0.1",
|
|
||||||
"@nestjs/common": "^11.0.4",
|
"@nestjs/common": "^11.0.4",
|
||||||
"@nestjs/core": "^11.0.4",
|
"@nestjs/core": "^11.0.4",
|
||||||
"@nestjs/event-emitter": "^3.0.0",
|
"@nestjs/event-emitter": "^3.0.0",
|
||||||
@@ -49,11 +48,11 @@
|
|||||||
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
"@opentelemetry/exporter-prometheus": "^0.200.0",
|
||||||
"@opentelemetry/sdk-node": "^0.200.0",
|
"@opentelemetry/sdk-node": "^0.200.0",
|
||||||
"@react-email/components": "^0.0.36",
|
"@react-email/components": "^0.0.36",
|
||||||
"@socket.io/redis-adapter": "^8.3.0",
|
"@socket.io/postgres-adapter": "^0.4.0",
|
||||||
|
"@types/pg": "^8.11.14",
|
||||||
"archiver": "^7.0.0",
|
"archiver": "^7.0.0",
|
||||||
"async-lock": "^1.4.0",
|
"async-lock": "^1.4.0",
|
||||||
"bcrypt": "^5.1.1",
|
"bcrypt": "^5.1.1",
|
||||||
"bullmq": "^4.8.0",
|
|
||||||
"chokidar": "^3.5.3",
|
"chokidar": "^3.5.3",
|
||||||
"class-transformer": "^0.5.1",
|
"class-transformer": "^0.5.1",
|
||||||
"class-validator": "^0.14.0",
|
"class-validator": "^0.14.0",
|
||||||
@@ -64,9 +63,9 @@
|
|||||||
"fast-glob": "^3.3.2",
|
"fast-glob": "^3.3.2",
|
||||||
"fluent-ffmpeg": "^2.1.2",
|
"fluent-ffmpeg": "^2.1.2",
|
||||||
"geo-tz": "^8.0.0",
|
"geo-tz": "^8.0.0",
|
||||||
|
"graphile-worker": "^0.17.0-canary.1fcb2a0",
|
||||||
"handlebars": "^4.7.8",
|
"handlebars": "^4.7.8",
|
||||||
"i18n-iso-countries": "^7.6.0",
|
"i18n-iso-countries": "^7.6.0",
|
||||||
"ioredis": "^5.3.2",
|
|
||||||
"joi": "^17.10.0",
|
"joi": "^17.10.0",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"kysely": "^0.28.0",
|
"kysely": "^0.28.0",
|
||||||
@@ -79,7 +78,7 @@
|
|||||||
"nestjs-otel": "^6.0.0",
|
"nestjs-otel": "^6.0.0",
|
||||||
"nodemailer": "^6.9.13",
|
"nodemailer": "^6.9.13",
|
||||||
"openid-client": "^6.3.3",
|
"openid-client": "^6.3.3",
|
||||||
"pg": "^8.11.3",
|
"pg": "^8.15.6",
|
||||||
"picomatch": "^4.0.2",
|
"picomatch": "^4.0.2",
|
||||||
"react": "^19.0.0",
|
"react": "^19.0.0",
|
||||||
"react-dom": "^19.0.0",
|
"react-dom": "^19.0.0",
|
||||||
@@ -105,7 +104,6 @@
|
|||||||
"@nestjs/testing": "^11.0.4",
|
"@nestjs/testing": "^11.0.4",
|
||||||
"@swc/core": "^1.4.14",
|
"@swc/core": "^1.4.14",
|
||||||
"@testcontainers/postgresql": "^10.2.1",
|
"@testcontainers/postgresql": "^10.2.1",
|
||||||
"@testcontainers/redis": "^10.18.0",
|
|
||||||
"@types/archiver": "^6.0.0",
|
"@types/archiver": "^6.0.0",
|
||||||
"@types/async-lock": "^1.4.2",
|
"@types/async-lock": "^1.4.2",
|
||||||
"@types/bcrypt": "^5.0.0",
|
"@types/bcrypt": "^5.0.0",
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { BullModule } from '@nestjs/bullmq';
|
|
||||||
import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common';
|
import { Inject, Module, OnModuleDestroy, OnModuleInit, ValidationPipe } from '@nestjs/common';
|
||||||
import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core';
|
import { APP_FILTER, APP_GUARD, APP_INTERCEPTOR, APP_PIPE } from '@nestjs/core';
|
||||||
import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule';
|
import { ScheduleModule, SchedulerRegistry } from '@nestjs/schedule';
|
||||||
@@ -37,11 +36,9 @@ export const middleware = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
const configRepository = new ConfigRepository();
|
const configRepository = new ConfigRepository();
|
||||||
const { bull, cls, database, otel } = configRepository.getEnv();
|
const { cls, database, otel } = configRepository.getEnv();
|
||||||
|
|
||||||
const imports = [
|
const imports = [
|
||||||
BullModule.forRoot(bull.config),
|
|
||||||
BullModule.registerQueue(...bull.queues),
|
|
||||||
ClsModule.forRoot(cls.config),
|
ClsModule.forRoot(cls.config),
|
||||||
OpenTelemetryModule.forRoot(otel),
|
OpenTelemetryModule.forRoot(otel),
|
||||||
KyselyModule.forRoot(getKyselyConfig(database.config)),
|
KyselyModule.forRoot(getKyselyConfig(database.config)),
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Put, Query } from '@nestjs/common';
|
import { Body, Controller, Delete, Get, HttpCode, HttpStatus, Param, Post, Put, Query } from '@nestjs/common';
|
||||||
import { ApiOperation, ApiTags } from '@nestjs/swagger';
|
import { ApiOperation, ApiTags } from '@nestjs/swagger';
|
||||||
import { EndpointLifecycle } from 'src/decorators';
|
import { EndpointLifecycle } from 'src/decorators';
|
||||||
import { AssetResponseDto, MemoryLaneResponseDto } from 'src/dtos/asset-response.dto';
|
import { AssetResponseDto } from 'src/dtos/asset-response.dto';
|
||||||
import {
|
import {
|
||||||
AssetBulkDeleteDto,
|
AssetBulkDeleteDto,
|
||||||
AssetBulkUpdateDto,
|
AssetBulkUpdateDto,
|
||||||
@@ -13,7 +13,6 @@ import {
|
|||||||
UpdateAssetDto,
|
UpdateAssetDto,
|
||||||
} from 'src/dtos/asset.dto';
|
} from 'src/dtos/asset.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { MemoryLaneDto } from 'src/dtos/search.dto';
|
|
||||||
import { RouteKey } from 'src/enum';
|
import { RouteKey } from 'src/enum';
|
||||||
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
import { Auth, Authenticated } from 'src/middleware/auth.guard';
|
||||||
import { AssetService } from 'src/services/asset.service';
|
import { AssetService } from 'src/services/asset.service';
|
||||||
@@ -24,12 +23,6 @@ import { UUIDParamDto } from 'src/validation';
|
|||||||
export class AssetController {
|
export class AssetController {
|
||||||
constructor(private service: AssetService) {}
|
constructor(private service: AssetService) {}
|
||||||
|
|
||||||
@Get('memory-lane')
|
|
||||||
@Authenticated()
|
|
||||||
getMemoryLane(@Auth() auth: AuthDto, @Query() dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
|
|
||||||
return this.service.getMemoryLane(auth, dto);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Get('random')
|
@Get('random')
|
||||||
@Authenticated()
|
@Authenticated()
|
||||||
@EndpointLifecycle({ deprecatedAt: 'v1.116.0' })
|
@EndpointLifecycle({ deprecatedAt: 'v1.116.0' })
|
||||||
|
|||||||
@@ -1,29 +0,0 @@
|
|||||||
import { Body, Controller, Get, Post } from '@nestjs/common';
|
|
||||||
import { ApiTags } from '@nestjs/swagger';
|
|
||||||
import { FileChecksumDto, FileChecksumResponseDto, FileReportDto, FileReportFixDto } from 'src/dtos/audit.dto';
|
|
||||||
import { Authenticated } from 'src/middleware/auth.guard';
|
|
||||||
import { AuditService } from 'src/services/audit.service';
|
|
||||||
|
|
||||||
@ApiTags('File Reports')
|
|
||||||
@Controller('reports')
|
|
||||||
export class ReportController {
|
|
||||||
constructor(private service: AuditService) {}
|
|
||||||
|
|
||||||
@Get()
|
|
||||||
@Authenticated({ admin: true })
|
|
||||||
getAuditFiles(): Promise<FileReportDto> {
|
|
||||||
return this.service.getFileReport();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Post('checksum')
|
|
||||||
@Authenticated({ admin: true })
|
|
||||||
getFileChecksums(@Body() dto: FileChecksumDto): Promise<FileChecksumResponseDto[]> {
|
|
||||||
return this.service.getChecksums(dto);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Post('fix')
|
|
||||||
@Authenticated({ admin: true })
|
|
||||||
fixAuditFiles(@Body() dto: FileReportFixDto): Promise<void> {
|
|
||||||
return this.service.fixItems(dto.items);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -8,7 +8,6 @@ import { AuthController } from 'src/controllers/auth.controller';
|
|||||||
import { DownloadController } from 'src/controllers/download.controller';
|
import { DownloadController } from 'src/controllers/download.controller';
|
||||||
import { DuplicateController } from 'src/controllers/duplicate.controller';
|
import { DuplicateController } from 'src/controllers/duplicate.controller';
|
||||||
import { FaceController } from 'src/controllers/face.controller';
|
import { FaceController } from 'src/controllers/face.controller';
|
||||||
import { ReportController } from 'src/controllers/file-report.controller';
|
|
||||||
import { JobController } from 'src/controllers/job.controller';
|
import { JobController } from 'src/controllers/job.controller';
|
||||||
import { LibraryController } from 'src/controllers/library.controller';
|
import { LibraryController } from 'src/controllers/library.controller';
|
||||||
import { MapController } from 'src/controllers/map.controller';
|
import { MapController } from 'src/controllers/map.controller';
|
||||||
@@ -53,7 +52,6 @@ export const controllers = [
|
|||||||
OAuthController,
|
OAuthController,
|
||||||
PartnerController,
|
PartnerController,
|
||||||
PersonController,
|
PersonController,
|
||||||
ReportController,
|
|
||||||
SearchController,
|
SearchController,
|
||||||
ServerController,
|
ServerController,
|
||||||
SessionController,
|
SessionController,
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ export class SearchController {
|
|||||||
@Get('explore')
|
@Get('explore')
|
||||||
@Authenticated()
|
@Authenticated()
|
||||||
getExploreData(@Auth() auth: AuthDto): Promise<SearchExploreResponseDto[]> {
|
getExploreData(@Auth() auth: AuthDto): Promise<SearchExploreResponseDto[]> {
|
||||||
return this.service.getExploreData(auth) as Promise<SearchExploreResponseDto[]>;
|
return this.service.getExploreData(auth);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get('person')
|
@Get('person')
|
||||||
|
|||||||
27
server/src/db.d.ts
vendored
27
server/src/db.d.ts
vendored
@@ -236,6 +236,30 @@ export interface GeodataPlaces {
|
|||||||
name: string;
|
name: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface GraphileWorkerJobs {
|
||||||
|
id: Generated<string>;
|
||||||
|
task_identifier: string;
|
||||||
|
locked_at: Timestamp | null;
|
||||||
|
locked_by: string | null;
|
||||||
|
run_at: Timestamp | null;
|
||||||
|
attempts: number;
|
||||||
|
max_attempts: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GraphileWorkerPrivateJobs {
|
||||||
|
id: Generated<string>;
|
||||||
|
task_id: string;
|
||||||
|
locked_at: Timestamp | null;
|
||||||
|
locked_by: string | null;
|
||||||
|
attempts: number;
|
||||||
|
max_attempts: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GraphileWorkerPrivateTasks {
|
||||||
|
id: Generated<string>;
|
||||||
|
identifier: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface Libraries {
|
export interface Libraries {
|
||||||
createdAt: Generated<Timestamp>;
|
createdAt: Generated<Timestamp>;
|
||||||
deletedAt: Timestamp | null;
|
deletedAt: Timestamp | null;
|
||||||
@@ -476,6 +500,9 @@ export interface DB {
|
|||||||
exif: Exif;
|
exif: Exif;
|
||||||
face_search: FaceSearch;
|
face_search: FaceSearch;
|
||||||
geodata_places: GeodataPlaces;
|
geodata_places: GeodataPlaces;
|
||||||
|
'graphile_worker.jobs': GraphileWorkerJobs;
|
||||||
|
'graphile_worker._private_jobs': GraphileWorkerPrivateJobs;
|
||||||
|
'graphile_worker._private_tasks': GraphileWorkerPrivateTasks;
|
||||||
libraries: Libraries;
|
libraries: Libraries;
|
||||||
memories: Memories;
|
memories: Memories;
|
||||||
memories_assets_assets: MemoriesAssetsAssets;
|
memories_assets_assets: MemoriesAssetsAssets;
|
||||||
|
|||||||
@@ -199,10 +199,3 @@ export function mapAsset(entity: MapAsset, options: AssetMapOptions = {}): Asset
|
|||||||
resized: true,
|
resized: true,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export class MemoryLaneResponseDto {
|
|
||||||
@ApiProperty({ type: 'integer' })
|
|
||||||
yearsAgo!: number;
|
|
||||||
|
|
||||||
assets!: AssetResponseDto[];
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,73 +0,0 @@
|
|||||||
import { ApiProperty } from '@nestjs/swagger';
|
|
||||||
import { Type } from 'class-transformer';
|
|
||||||
import { IsArray, IsEnum, IsString, IsUUID, ValidateNested } from 'class-validator';
|
|
||||||
import { AssetPathType, EntityType, PathType, PersonPathType, UserPathType } from 'src/enum';
|
|
||||||
import { Optional, ValidateDate, ValidateUUID } from 'src/validation';
|
|
||||||
|
|
||||||
const PathEnum = Object.values({ ...AssetPathType, ...PersonPathType, ...UserPathType });
|
|
||||||
|
|
||||||
export class AuditDeletesDto {
|
|
||||||
@ValidateDate()
|
|
||||||
after!: Date;
|
|
||||||
|
|
||||||
@ApiProperty({ enum: EntityType, enumName: 'EntityType' })
|
|
||||||
@IsEnum(EntityType)
|
|
||||||
entityType!: EntityType;
|
|
||||||
|
|
||||||
@Optional()
|
|
||||||
@IsUUID('4')
|
|
||||||
@ApiProperty({ format: 'uuid' })
|
|
||||||
userId?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum PathEntityType {
|
|
||||||
ASSET = 'asset',
|
|
||||||
PERSON = 'person',
|
|
||||||
USER = 'user',
|
|
||||||
}
|
|
||||||
|
|
||||||
export class AuditDeletesResponseDto {
|
|
||||||
needsFullSync!: boolean;
|
|
||||||
ids!: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export class FileReportDto {
|
|
||||||
orphans!: FileReportItemDto[];
|
|
||||||
extras!: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export class FileChecksumDto {
|
|
||||||
@IsString({ each: true })
|
|
||||||
filenames!: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export class FileChecksumResponseDto {
|
|
||||||
filename!: string;
|
|
||||||
checksum!: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class FileReportFixDto {
|
|
||||||
@IsArray()
|
|
||||||
@ValidateNested({ each: true })
|
|
||||||
@Type(() => FileReportItemDto)
|
|
||||||
items!: FileReportItemDto[];
|
|
||||||
}
|
|
||||||
|
|
||||||
// used both as request and response dto
|
|
||||||
export class FileReportItemDto {
|
|
||||||
@ValidateUUID()
|
|
||||||
entityId!: string;
|
|
||||||
|
|
||||||
@ApiProperty({ enumName: 'PathEntityType', enum: PathEntityType })
|
|
||||||
@IsEnum(PathEntityType)
|
|
||||||
entityType!: PathEntityType;
|
|
||||||
|
|
||||||
@ApiProperty({ enumName: 'PathType', enum: PathEnum })
|
|
||||||
@IsEnum(PathEnum)
|
|
||||||
pathType!: PathType;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
pathValue!: string;
|
|
||||||
|
|
||||||
checksum?: string;
|
|
||||||
}
|
|
||||||
@@ -157,34 +157,4 @@ export class EnvDto {
|
|||||||
@IsString()
|
@IsString()
|
||||||
@Optional()
|
@Optional()
|
||||||
NO_COLOR?: string;
|
NO_COLOR?: string;
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_HOSTNAME?: string;
|
|
||||||
|
|
||||||
@IsInt()
|
|
||||||
@Optional()
|
|
||||||
@Type(() => Number)
|
|
||||||
REDIS_PORT?: number;
|
|
||||||
|
|
||||||
@IsInt()
|
|
||||||
@Optional()
|
|
||||||
@Type(() => Number)
|
|
||||||
REDIS_DBINDEX?: number;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_USERNAME?: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_PASSWORD?: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_SOCKET?: string;
|
|
||||||
|
|
||||||
@IsString()
|
|
||||||
@Optional()
|
|
||||||
REDIS_URL?: string;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,20 +30,15 @@ export class JobCountsDto {
|
|||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
active!: number;
|
active!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
completed!: number;
|
waiting!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
|
||||||
failed!: number;
|
|
||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
delayed!: number;
|
delayed!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
@ApiProperty({ type: 'integer' })
|
||||||
waiting!: number;
|
failed!: number;
|
||||||
@ApiProperty({ type: 'integer' })
|
|
||||||
paused!: number;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class QueueStatusDto {
|
export class QueueStatusDto {
|
||||||
isActive!: boolean;
|
paused!: boolean;
|
||||||
isPaused!: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class JobStatusDto {
|
export class JobStatusDto {
|
||||||
|
|||||||
@@ -204,6 +204,7 @@ export enum SystemMetadataKey {
|
|||||||
SYSTEM_FLAGS = 'system-flags',
|
SYSTEM_FLAGS = 'system-flags',
|
||||||
VERSION_CHECK_STATE = 'version-check-state',
|
VERSION_CHECK_STATE = 'version-check-state',
|
||||||
LICENSE = 'license',
|
LICENSE = 'license',
|
||||||
|
QUEUES_STATE = 'queues-state',
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum UserMetadataKey {
|
export enum UserMetadataKey {
|
||||||
@@ -533,10 +534,20 @@ export enum JobName {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export enum JobCommand {
|
export enum JobCommand {
|
||||||
|
// The behavior of start depends on the queue. Usually it is a request to
|
||||||
|
// reprocess everything associated with the queue from scratch.
|
||||||
START = 'start',
|
START = 'start',
|
||||||
|
|
||||||
|
// Pause prevents workers from processing jobs.
|
||||||
PAUSE = 'pause',
|
PAUSE = 'pause',
|
||||||
|
|
||||||
|
// Resume allows workers to continue processing jobs.
|
||||||
RESUME = 'resume',
|
RESUME = 'resume',
|
||||||
EMPTY = 'empty',
|
|
||||||
|
// Clear removes all pending jobs.
|
||||||
|
CLEAR = 'clear',
|
||||||
|
|
||||||
|
// ClearFailed removes all failed jobs.
|
||||||
CLEAR_FAILED = 'clear-failed',
|
CLEAR_FAILED = 'clear-failed',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import { INestApplicationContext } from '@nestjs/common';
|
import { INestApplicationContext } from '@nestjs/common';
|
||||||
import { IoAdapter } from '@nestjs/platform-socket.io';
|
import { IoAdapter } from '@nestjs/platform-socket.io';
|
||||||
import { createAdapter } from '@socket.io/redis-adapter';
|
import { createAdapter } from '@socket.io/postgres-adapter';
|
||||||
import { Redis } from 'ioredis';
|
import pg, { PoolConfig } from 'pg';
|
||||||
import { ServerOptions } from 'socket.io';
|
import { ServerOptions } from 'socket.io';
|
||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
|
import { asPostgresConnectionConfig } from 'src/utils/database';
|
||||||
|
|
||||||
export class WebSocketAdapter extends IoAdapter {
|
export class WebSocketAdapter extends IoAdapter {
|
||||||
constructor(private app: INestApplicationContext) {
|
constructor(private app: INestApplicationContext) {
|
||||||
@@ -11,11 +12,11 @@ export class WebSocketAdapter extends IoAdapter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
createIOServer(port: number, options?: ServerOptions): any {
|
createIOServer(port: number, options?: ServerOptions): any {
|
||||||
const { redis } = this.app.get(ConfigRepository).getEnv();
|
|
||||||
const server = super.createIOServer(port, options);
|
const server = super.createIOServer(port, options);
|
||||||
const pubClient = new Redis(redis);
|
const configRepository = new ConfigRepository();
|
||||||
const subClient = pubClient.duplicate();
|
const { database } = configRepository.getEnv();
|
||||||
server.adapter(createAdapter(pubClient, subClient));
|
const pool = new pg.Pool(asPostgresConnectionConfig(database.config) as PoolConfig);
|
||||||
|
server.adapter(createAdapter(pool));
|
||||||
return server;
|
return server;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -194,15 +194,16 @@ where
|
|||||||
"asset_files"."assetId" = $1
|
"asset_files"."assetId" = $1
|
||||||
and "asset_files"."type" = $2
|
and "asset_files"."type" = $2
|
||||||
|
|
||||||
-- AssetJobRepository.streamForEncodeClip
|
-- AssetJobRepository.streamForSearchDuplicates
|
||||||
select
|
select
|
||||||
"assets"."id"
|
"assets"."id"
|
||||||
from
|
from
|
||||||
"assets"
|
"assets"
|
||||||
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
|
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
|
||||||
where
|
where
|
||||||
"job_status"."previewAt" is not null
|
"assets"."isVisible" = $1
|
||||||
and "assets"."isVisible" = $1
|
and "assets"."deletedAt" is null
|
||||||
|
and "job_status"."previewAt" is not null
|
||||||
and not exists (
|
and not exists (
|
||||||
select
|
select
|
||||||
from
|
from
|
||||||
@@ -210,7 +211,25 @@ where
|
|||||||
where
|
where
|
||||||
"assetId" = "assets"."id"
|
"assetId" = "assets"."id"
|
||||||
)
|
)
|
||||||
|
and "job_status"."duplicatesDetectedAt" is null
|
||||||
|
|
||||||
|
-- AssetJobRepository.streamForEncodeClip
|
||||||
|
select
|
||||||
|
"assets"."id"
|
||||||
|
from
|
||||||
|
"assets"
|
||||||
|
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
|
||||||
|
where
|
||||||
|
"assets"."isVisible" = $1
|
||||||
and "assets"."deletedAt" is null
|
and "assets"."deletedAt" is null
|
||||||
|
and "job_status"."previewAt" is not null
|
||||||
|
and not exists (
|
||||||
|
select
|
||||||
|
from
|
||||||
|
"smart_search"
|
||||||
|
where
|
||||||
|
"assetId" = "assets"."id"
|
||||||
|
)
|
||||||
|
|
||||||
-- AssetJobRepository.getForClipEncoding
|
-- AssetJobRepository.getForClipEncoding
|
||||||
select
|
select
|
||||||
@@ -450,3 +469,37 @@ from
|
|||||||
"assets"
|
"assets"
|
||||||
where
|
where
|
||||||
"assets"."deletedAt" <= $1
|
"assets"."deletedAt" <= $1
|
||||||
|
|
||||||
|
-- AssetJobRepository.streamForSidecar
|
||||||
|
select
|
||||||
|
"assets"."id"
|
||||||
|
from
|
||||||
|
"assets"
|
||||||
|
where
|
||||||
|
(
|
||||||
|
"assets"."sidecarPath" = $1
|
||||||
|
or "assets"."sidecarPath" is null
|
||||||
|
)
|
||||||
|
and "assets"."isVisible" = $2
|
||||||
|
|
||||||
|
-- AssetJobRepository.streamForDetectFacesJob
|
||||||
|
select
|
||||||
|
"assets"."id"
|
||||||
|
from
|
||||||
|
"assets"
|
||||||
|
inner join "asset_job_status" as "job_status" on "assetId" = "assets"."id"
|
||||||
|
where
|
||||||
|
"assets"."isVisible" = $1
|
||||||
|
and "assets"."deletedAt" is null
|
||||||
|
and "job_status"."previewAt" is not null
|
||||||
|
and "job_status"."facesRecognizedAt" is null
|
||||||
|
order by
|
||||||
|
"assets"."createdAt" desc
|
||||||
|
|
||||||
|
-- AssetJobRepository.streamForMigrationJob
|
||||||
|
select
|
||||||
|
"id"
|
||||||
|
from
|
||||||
|
"assets"
|
||||||
|
where
|
||||||
|
"assets"."deletedAt" is null
|
||||||
|
|||||||
@@ -232,25 +232,6 @@ where
|
|||||||
limit
|
limit
|
||||||
$3
|
$3
|
||||||
|
|
||||||
-- AssetRepository.getWithout (sidecar)
|
|
||||||
select
|
|
||||||
"assets".*
|
|
||||||
from
|
|
||||||
"assets"
|
|
||||||
where
|
|
||||||
(
|
|
||||||
"assets"."sidecarPath" = $1
|
|
||||||
or "assets"."sidecarPath" is null
|
|
||||||
)
|
|
||||||
and "assets"."isVisible" = $2
|
|
||||||
and "deletedAt" is null
|
|
||||||
order by
|
|
||||||
"createdAt"
|
|
||||||
limit
|
|
||||||
$3
|
|
||||||
offset
|
|
||||||
$4
|
|
||||||
|
|
||||||
-- AssetRepository.getTimeBuckets
|
-- AssetRepository.getTimeBuckets
|
||||||
with
|
with
|
||||||
"assets" as (
|
"assets" as (
|
||||||
|
|||||||
@@ -135,20 +135,33 @@ export class AssetJobRepository {
|
|||||||
.execute();
|
.execute();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [], stream: true })
|
private assetsWithPreviews() {
|
||||||
streamForEncodeClip(force?: boolean) {
|
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
.select(['assets.id'])
|
|
||||||
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
|
|
||||||
.where('job_status.previewAt', 'is not', null)
|
|
||||||
.where('assets.isVisible', '=', true)
|
.where('assets.isVisible', '=', true)
|
||||||
|
.where('assets.deletedAt', 'is', null)
|
||||||
|
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
|
||||||
|
.where('job_status.previewAt', 'is not', null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GenerateSql({ params: [], stream: true })
|
||||||
|
streamForSearchDuplicates(force?: boolean) {
|
||||||
|
return this.assetsWithPreviews()
|
||||||
|
.where((eb) => eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))))
|
||||||
|
.$if(!force, (qb) => qb.where('job_status.duplicatesDetectedAt', 'is', null))
|
||||||
|
.select(['assets.id'])
|
||||||
|
.stream();
|
||||||
|
}
|
||||||
|
|
||||||
|
@GenerateSql({ params: [], stream: true })
|
||||||
|
streamForEncodeClip(force?: boolean) {
|
||||||
|
return this.assetsWithPreviews()
|
||||||
|
.select(['assets.id'])
|
||||||
.$if(!force, (qb) =>
|
.$if(!force, (qb) =>
|
||||||
qb.where((eb) =>
|
qb.where((eb) =>
|
||||||
eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))),
|
eb.not((eb) => eb.exists(eb.selectFrom('smart_search').whereRef('assetId', '=', 'assets.id'))),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.where('assets.deletedAt', 'is', null)
|
|
||||||
.stream();
|
.stream();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -309,4 +322,30 @@ export class AssetJobRepository {
|
|||||||
.where('assets.deletedAt', '<=', trashedBefore)
|
.where('assets.deletedAt', '<=', trashedBefore)
|
||||||
.stream();
|
.stream();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@GenerateSql({ params: [], stream: true })
|
||||||
|
streamForSidecar(force?: boolean) {
|
||||||
|
return this.db
|
||||||
|
.selectFrom('assets')
|
||||||
|
.select(['assets.id'])
|
||||||
|
.$if(!force, (qb) =>
|
||||||
|
qb.where((eb) => eb.or([eb('assets.sidecarPath', '=', ''), eb('assets.sidecarPath', 'is', null)])),
|
||||||
|
)
|
||||||
|
.where('assets.isVisible', '=', true)
|
||||||
|
.stream();
|
||||||
|
}
|
||||||
|
|
||||||
|
@GenerateSql({ params: [], stream: true })
|
||||||
|
streamForDetectFacesJob(force?: boolean) {
|
||||||
|
return this.assetsWithPreviews()
|
||||||
|
.$if(!force, (qb) => qb.where('job_status.facesRecognizedAt', 'is', null))
|
||||||
|
.select(['assets.id'])
|
||||||
|
.orderBy('assets.createdAt', 'desc')
|
||||||
|
.stream();
|
||||||
|
}
|
||||||
|
|
||||||
|
@GenerateSql({ params: [DummyValue.DATE], stream: true })
|
||||||
|
streamForMigrationJob() {
|
||||||
|
return this.db.selectFrom('assets').select(['id']).where('assets.deletedAt', 'is', null).stream();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,13 +7,11 @@ import { AssetFiles, AssetJobStatus, Assets, DB, Exif } from 'src/db';
|
|||||||
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
import { Chunked, ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
|
import { AssetFileType, AssetOrder, AssetStatus, AssetType } from 'src/enum';
|
||||||
import { AssetSearchOptions, SearchExploreItem, SearchExploreItemSet } from 'src/repositories/search.repository';
|
|
||||||
import {
|
import {
|
||||||
anyUuid,
|
anyUuid,
|
||||||
asUuid,
|
asUuid,
|
||||||
hasPeople,
|
hasPeople,
|
||||||
removeUndefinedKeys,
|
removeUndefinedKeys,
|
||||||
searchAssetBuilder,
|
|
||||||
truncatedDate,
|
truncatedDate,
|
||||||
unnest,
|
unnest,
|
||||||
withExif,
|
withExif,
|
||||||
@@ -27,7 +25,6 @@ import {
|
|||||||
withTags,
|
withTags,
|
||||||
} from 'src/utils/database';
|
} from 'src/utils/database';
|
||||||
import { globToSqlPattern } from 'src/utils/misc';
|
import { globToSqlPattern } from 'src/utils/misc';
|
||||||
import { PaginationOptions, paginationHelper } from 'src/utils/pagination';
|
|
||||||
|
|
||||||
export type AssetStats = Record<AssetType, number>;
|
export type AssetStats = Record<AssetType, number>;
|
||||||
|
|
||||||
@@ -45,15 +42,6 @@ export interface LivePhotoSearchOptions {
|
|||||||
type: AssetType;
|
type: AssetType;
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum WithoutProperty {
|
|
||||||
THUMBNAIL = 'thumbnail',
|
|
||||||
ENCODED_VIDEO = 'encoded-video',
|
|
||||||
EXIF = 'exif',
|
|
||||||
DUPLICATE = 'duplicate',
|
|
||||||
FACES = 'faces',
|
|
||||||
SIDECAR = 'sidecar',
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum WithProperty {
|
export enum WithProperty {
|
||||||
SIDECAR = 'sidecar',
|
SIDECAR = 'sidecar',
|
||||||
}
|
}
|
||||||
@@ -335,10 +323,6 @@ export class AssetRepository {
|
|||||||
return assets.map((asset) => asset.deviceAssetId);
|
return assets.map((asset) => asset.deviceAssetId);
|
||||||
}
|
}
|
||||||
|
|
||||||
getByUserId(pagination: PaginationOptions, userId: string, options: Omit<AssetSearchOptions, 'userIds'> = {}) {
|
|
||||||
return this.getAll(pagination, { ...options, userIds: [userId] });
|
|
||||||
}
|
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
|
@GenerateSql({ params: [DummyValue.UUID, DummyValue.STRING] })
|
||||||
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string) {
|
getByLibraryIdAndOriginalPath(libraryId: string, originalPath: string) {
|
||||||
return this.db
|
return this.db
|
||||||
@@ -350,16 +334,6 @@ export class AssetRepository {
|
|||||||
.executeTakeFirst();
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
async getAll(pagination: PaginationOptions, { orderDirection, ...options }: AssetSearchOptions = {}) {
|
|
||||||
const builder = searchAssetBuilder(this.db, options)
|
|
||||||
.select(withFiles)
|
|
||||||
.orderBy('assets.createdAt', orderDirection ?? 'asc')
|
|
||||||
.limit(pagination.take + 1)
|
|
||||||
.offset(pagination.skip ?? 0);
|
|
||||||
const items = await builder.execute();
|
|
||||||
return paginationHelper(items, pagination.take);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get assets by device's Id on the database
|
* Get assets by device's Id on the database
|
||||||
* @param ownerId
|
* @param ownerId
|
||||||
@@ -529,68 +503,6 @@ export class AssetRepository {
|
|||||||
.executeTakeFirst();
|
.executeTakeFirst();
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql(
|
|
||||||
...Object.values(WithProperty).map((property) => ({
|
|
||||||
name: property,
|
|
||||||
params: [DummyValue.PAGINATION, property],
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
async getWithout(pagination: PaginationOptions, property: WithoutProperty) {
|
|
||||||
const items = await this.db
|
|
||||||
.selectFrom('assets')
|
|
||||||
.selectAll('assets')
|
|
||||||
.$if(property === WithoutProperty.DUPLICATE, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('asset_job_status as job_status', 'assets.id', 'job_status.assetId')
|
|
||||||
.where('job_status.duplicatesDetectedAt', 'is', null)
|
|
||||||
.where('job_status.previewAt', 'is not', null)
|
|
||||||
.where((eb) => eb.exists(eb.selectFrom('smart_search').where('assetId', '=', eb.ref('assets.id'))))
|
|
||||||
.where('assets.isVisible', '=', true),
|
|
||||||
)
|
|
||||||
.$if(property === WithoutProperty.ENCODED_VIDEO, (qb) =>
|
|
||||||
qb
|
|
||||||
.where('assets.type', '=', AssetType.VIDEO)
|
|
||||||
.where((eb) => eb.or([eb('assets.encodedVideoPath', 'is', null), eb('assets.encodedVideoPath', '=', '')])),
|
|
||||||
)
|
|
||||||
.$if(property === WithoutProperty.EXIF, (qb) =>
|
|
||||||
qb
|
|
||||||
.leftJoin('asset_job_status as job_status', 'assets.id', 'job_status.assetId')
|
|
||||||
.where((eb) => eb.or([eb('job_status.metadataExtractedAt', 'is', null), eb('assetId', 'is', null)]))
|
|
||||||
.where('assets.isVisible', '=', true),
|
|
||||||
)
|
|
||||||
.$if(property === WithoutProperty.FACES, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
|
|
||||||
.where('job_status.previewAt', 'is not', null)
|
|
||||||
.where('job_status.facesRecognizedAt', 'is', null)
|
|
||||||
.where('assets.isVisible', '=', true),
|
|
||||||
)
|
|
||||||
.$if(property === WithoutProperty.SIDECAR, (qb) =>
|
|
||||||
qb
|
|
||||||
.where((eb) => eb.or([eb('assets.sidecarPath', '=', ''), eb('assets.sidecarPath', 'is', null)]))
|
|
||||||
.where('assets.isVisible', '=', true),
|
|
||||||
)
|
|
||||||
.$if(property === WithoutProperty.THUMBNAIL, (qb) =>
|
|
||||||
qb
|
|
||||||
.innerJoin('asset_job_status as job_status', 'assetId', 'assets.id')
|
|
||||||
.where('assets.isVisible', '=', true)
|
|
||||||
.where((eb) =>
|
|
||||||
eb.or([
|
|
||||||
eb('job_status.previewAt', 'is', null),
|
|
||||||
eb('job_status.thumbnailAt', 'is', null),
|
|
||||||
eb('assets.thumbhash', 'is', null),
|
|
||||||
]),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.where('deletedAt', 'is', null)
|
|
||||||
.limit(pagination.take + 1)
|
|
||||||
.offset(pagination.skip ?? 0)
|
|
||||||
.orderBy('createdAt')
|
|
||||||
.execute();
|
|
||||||
|
|
||||||
return paginationHelper(items, pagination.take);
|
|
||||||
}
|
|
||||||
|
|
||||||
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
|
getStatistics(ownerId: string, { isArchived, isFavorite, isTrashed }: AssetStatsOptions): Promise<AssetStats> {
|
||||||
return this.db
|
return this.db
|
||||||
.selectFrom('assets')
|
.selectFrom('assets')
|
||||||
@@ -774,10 +686,7 @@ export class AssetRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
|
@GenerateSql({ params: [DummyValue.UUID, { minAssetsPerField: 5, maxFields: 12 }] })
|
||||||
async getAssetIdByCity(
|
async getAssetIdByCity(ownerId: string, { minAssetsPerField, maxFields }: AssetExploreFieldOptions) {
|
||||||
ownerId: string,
|
|
||||||
{ minAssetsPerField, maxFields }: AssetExploreFieldOptions,
|
|
||||||
): Promise<SearchExploreItem<string>> {
|
|
||||||
const items = await this.db
|
const items = await this.db
|
||||||
.with('cities', (qb) =>
|
.with('cities', (qb) =>
|
||||||
qb
|
qb
|
||||||
@@ -792,6 +701,7 @@ export class AssetRepository {
|
|||||||
.innerJoin('cities', 'exif.city', 'cities.city')
|
.innerJoin('cities', 'exif.city', 'cities.city')
|
||||||
.distinctOn('exif.city')
|
.distinctOn('exif.city')
|
||||||
.select(['assetId as data', 'exif.city as value'])
|
.select(['assetId as data', 'exif.city as value'])
|
||||||
|
.$narrowType<{ value: NotNull }>()
|
||||||
.where('ownerId', '=', asUuid(ownerId))
|
.where('ownerId', '=', asUuid(ownerId))
|
||||||
.where('isVisible', '=', true)
|
.where('isVisible', '=', true)
|
||||||
.where('isArchived', '=', false)
|
.where('isArchived', '=', false)
|
||||||
@@ -800,7 +710,7 @@ export class AssetRepository {
|
|||||||
.limit(maxFields)
|
.limit(maxFields)
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
return { fieldName: 'exifInfo.city', items: items as SearchExploreItemSet<string> };
|
return { fieldName: 'exifInfo.city', items };
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({
|
@GenerateSql({
|
||||||
|
|||||||
@@ -26,38 +26,12 @@ const resetEnv = () => {
|
|||||||
'DB_SKIP_MIGRATIONS',
|
'DB_SKIP_MIGRATIONS',
|
||||||
'DB_VECTOR_EXTENSION',
|
'DB_VECTOR_EXTENSION',
|
||||||
|
|
||||||
'REDIS_HOSTNAME',
|
|
||||||
'REDIS_PORT',
|
|
||||||
'REDIS_DBINDEX',
|
|
||||||
'REDIS_USERNAME',
|
|
||||||
'REDIS_PASSWORD',
|
|
||||||
'REDIS_SOCKET',
|
|
||||||
'REDIS_URL',
|
|
||||||
|
|
||||||
'NO_COLOR',
|
'NO_COLOR',
|
||||||
]) {
|
]) {
|
||||||
delete process.env[env];
|
delete process.env[env];
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const sentinelConfig = {
|
|
||||||
sentinels: [
|
|
||||||
{
|
|
||||||
host: 'redis-sentinel-node-0',
|
|
||||||
port: 26_379,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
host: 'redis-sentinel-node-1',
|
|
||||||
port: 26_379,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
host: 'redis-sentinel-node-2',
|
|
||||||
port: 26_379,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
name: 'redis-sentinel',
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('getEnv', () => {
|
describe('getEnv', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetEnv();
|
resetEnv();
|
||||||
@@ -108,34 +82,6 @@ describe('getEnv', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('redis', () => {
|
|
||||||
it('should use defaults', () => {
|
|
||||||
const { redis } = getEnv();
|
|
||||||
expect(redis).toEqual({
|
|
||||||
host: 'redis',
|
|
||||||
port: 6379,
|
|
||||||
db: 0,
|
|
||||||
username: undefined,
|
|
||||||
password: undefined,
|
|
||||||
path: undefined,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should parse base64 encoded config, ignore other env', () => {
|
|
||||||
process.env.REDIS_URL = `ioredis://${Buffer.from(JSON.stringify(sentinelConfig)).toString('base64')}`;
|
|
||||||
process.env.REDIS_HOSTNAME = 'redis-host';
|
|
||||||
process.env.REDIS_USERNAME = 'redis-user';
|
|
||||||
process.env.REDIS_PASSWORD = 'redis-password';
|
|
||||||
const { redis } = getEnv();
|
|
||||||
expect(redis).toEqual(sentinelConfig);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should reject invalid json', () => {
|
|
||||||
process.env.REDIS_URL = `ioredis://${Buffer.from('{ "invalid json"').toString('base64')}`;
|
|
||||||
expect(() => getEnv()).toThrowError('Failed to decode redis options');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('noColor', () => {
|
describe('noColor', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
delete process.env.NO_COLOR;
|
delete process.env.NO_COLOR;
|
||||||
|
|||||||
@@ -1,25 +1,14 @@
|
|||||||
import { RegisterQueueOptions } from '@nestjs/bullmq';
|
|
||||||
import { Inject, Injectable, Optional } from '@nestjs/common';
|
import { Inject, Injectable, Optional } from '@nestjs/common';
|
||||||
import { QueueOptions } from 'bullmq';
|
|
||||||
import { plainToInstance } from 'class-transformer';
|
import { plainToInstance } from 'class-transformer';
|
||||||
import { validateSync } from 'class-validator';
|
import { validateSync } from 'class-validator';
|
||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { RedisOptions } from 'ioredis';
|
|
||||||
import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
|
import { CLS_ID, ClsModuleOptions } from 'nestjs-cls';
|
||||||
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
|
import { OpenTelemetryModuleOptions } from 'nestjs-otel/lib/interfaces';
|
||||||
import { join } from 'node:path';
|
import { join, resolve } from 'node:path';
|
||||||
import { citiesFile, excludePaths, IWorker } from 'src/constants';
|
import { citiesFile, excludePaths, IWorker } from 'src/constants';
|
||||||
import { Telemetry } from 'src/decorators';
|
import { Telemetry } from 'src/decorators';
|
||||||
import { EnvDto } from 'src/dtos/env.dto';
|
import { EnvDto } from 'src/dtos/env.dto';
|
||||||
import {
|
import { DatabaseExtension, ImmichEnvironment, ImmichHeader, ImmichTelemetry, ImmichWorker, LogLevel } from 'src/enum';
|
||||||
DatabaseExtension,
|
|
||||||
ImmichEnvironment,
|
|
||||||
ImmichHeader,
|
|
||||||
ImmichTelemetry,
|
|
||||||
ImmichWorker,
|
|
||||||
LogLevel,
|
|
||||||
QueueName,
|
|
||||||
} from 'src/enum';
|
|
||||||
import { DatabaseConnectionParams, VectorExtension } from 'src/types';
|
import { DatabaseConnectionParams, VectorExtension } from 'src/types';
|
||||||
import { setDifference } from 'src/utils/set';
|
import { setDifference } from 'src/utils/set';
|
||||||
|
|
||||||
@@ -46,11 +35,6 @@ export interface EnvData {
|
|||||||
thirdPartySupportUrl?: string;
|
thirdPartySupportUrl?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
bull: {
|
|
||||||
config: QueueOptions;
|
|
||||||
queues: RegisterQueueOptions[];
|
|
||||||
};
|
|
||||||
|
|
||||||
cls: {
|
cls: {
|
||||||
config: ClsModuleOptions;
|
config: ClsModuleOptions;
|
||||||
};
|
};
|
||||||
@@ -87,8 +71,6 @@ export interface EnvData {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
redis: RedisOptions;
|
|
||||||
|
|
||||||
telemetry: {
|
telemetry: {
|
||||||
apiPort: number;
|
apiPort: number;
|
||||||
microservicesPort: number;
|
microservicesPort: number;
|
||||||
@@ -149,28 +131,12 @@ const getEnv = (): EnvData => {
|
|||||||
const isProd = environment === ImmichEnvironment.PRODUCTION;
|
const isProd = environment === ImmichEnvironment.PRODUCTION;
|
||||||
const buildFolder = dto.IMMICH_BUILD_DATA || '/build';
|
const buildFolder = dto.IMMICH_BUILD_DATA || '/build';
|
||||||
const folders = {
|
const folders = {
|
||||||
|
// eslint-disable-next-line unicorn/prefer-module
|
||||||
|
dist: resolve(`${__dirname}/..`),
|
||||||
geodata: join(buildFolder, 'geodata'),
|
geodata: join(buildFolder, 'geodata'),
|
||||||
web: join(buildFolder, 'www'),
|
web: join(buildFolder, 'www'),
|
||||||
};
|
};
|
||||||
|
|
||||||
let redisConfig = {
|
|
||||||
host: dto.REDIS_HOSTNAME || 'redis',
|
|
||||||
port: dto.REDIS_PORT || 6379,
|
|
||||||
db: dto.REDIS_DBINDEX || 0,
|
|
||||||
username: dto.REDIS_USERNAME || undefined,
|
|
||||||
password: dto.REDIS_PASSWORD || undefined,
|
|
||||||
path: dto.REDIS_SOCKET || undefined,
|
|
||||||
};
|
|
||||||
|
|
||||||
const redisUrl = dto.REDIS_URL;
|
|
||||||
if (redisUrl && redisUrl.startsWith('ioredis://')) {
|
|
||||||
try {
|
|
||||||
redisConfig = JSON.parse(Buffer.from(redisUrl.slice(10), 'base64').toString());
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Failed to decode redis options: ${error}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const includedTelemetries =
|
const includedTelemetries =
|
||||||
dto.IMMICH_TELEMETRY_INCLUDE === 'all'
|
dto.IMMICH_TELEMETRY_INCLUDE === 'all'
|
||||||
? new Set(Object.values(ImmichTelemetry))
|
? new Set(Object.values(ImmichTelemetry))
|
||||||
@@ -218,19 +184,6 @@ const getEnv = (): EnvData => {
|
|||||||
thirdPartySupportUrl: dto.IMMICH_THIRD_PARTY_SUPPORT_URL,
|
thirdPartySupportUrl: dto.IMMICH_THIRD_PARTY_SUPPORT_URL,
|
||||||
},
|
},
|
||||||
|
|
||||||
bull: {
|
|
||||||
config: {
|
|
||||||
prefix: 'immich_bull',
|
|
||||||
connection: { ...redisConfig },
|
|
||||||
defaultJobOptions: {
|
|
||||||
attempts: 3,
|
|
||||||
removeOnComplete: true,
|
|
||||||
removeOnFail: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
queues: Object.values(QueueName).map((name) => ({ name })),
|
|
||||||
},
|
|
||||||
|
|
||||||
cls: {
|
cls: {
|
||||||
config: {
|
config: {
|
||||||
middleware: {
|
middleware: {
|
||||||
@@ -269,8 +222,6 @@ const getEnv = (): EnvData => {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
redis: redisConfig,
|
|
||||||
|
|
||||||
resourcePaths: {
|
resourcePaths: {
|
||||||
lockFile: join(buildFolder, 'build-lock.json'),
|
lockFile: join(buildFolder, 'build-lock.json'),
|
||||||
geodata: {
|
geodata: {
|
||||||
|
|||||||
@@ -255,10 +255,10 @@ export class DatabaseRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (error) {
|
// if (error) {
|
||||||
this.logger.error(`Kysely migrations failed: ${error}`);
|
// this.logger.error(`Kysely migrations failed: ${error}`);
|
||||||
throw error;
|
// throw error;
|
||||||
}
|
// }
|
||||||
|
|
||||||
this.logger.debug('Finished running kysely migrations');
|
this.logger.debug('Finished running kysely migrations');
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -64,6 +64,9 @@ type EventMap = {
|
|||||||
'assets.delete': [{ assetIds: string[]; userId: string }];
|
'assets.delete': [{ assetIds: string[]; userId: string }];
|
||||||
'assets.restore': [{ assetIds: string[]; userId: string }];
|
'assets.restore': [{ assetIds: string[]; userId: string }];
|
||||||
|
|
||||||
|
'queue.pause': [QueueName];
|
||||||
|
'queue.resume': [QueueName];
|
||||||
|
|
||||||
'job.start': [QueueName, JobItem];
|
'job.start': [QueueName, JobItem];
|
||||||
'job.failed': [{ job: JobItem; error: Error | any }];
|
'job.failed': [{ job: JobItem; error: Error | any }];
|
||||||
|
|
||||||
@@ -85,7 +88,7 @@ type EventMap = {
|
|||||||
'websocket.connect': [{ userId: string }];
|
'websocket.connect': [{ userId: string }];
|
||||||
};
|
};
|
||||||
|
|
||||||
export const serverEvents = ['config.update'] as const;
|
export const serverEvents = ['config.update', 'queue.pause', 'queue.resume'] as const;
|
||||||
export type ServerEvents = (typeof serverEvents)[number];
|
export type ServerEvents = (typeof serverEvents)[number];
|
||||||
|
|
||||||
export type EmitEvent = keyof EventMap;
|
export type EmitEvent = keyof EventMap;
|
||||||
|
|||||||
@@ -1,15 +1,19 @@
|
|||||||
import { getQueueToken } from '@nestjs/bullmq';
|
|
||||||
import { Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { ModuleRef, Reflector } from '@nestjs/core';
|
import { ModuleRef, Reflector } from '@nestjs/core';
|
||||||
import { JobsOptions, Queue, Worker } from 'bullmq';
|
|
||||||
import { ClassConstructor } from 'class-transformer';
|
import { ClassConstructor } from 'class-transformer';
|
||||||
import { setTimeout } from 'node:timers/promises';
|
import { AddJobsJobSpec, makeWorkerUtils, run, Runner, WorkerUtils } from 'graphile-worker';
|
||||||
import { JobConfig } from 'src/decorators';
|
import { Kysely } from 'kysely';
|
||||||
import { JobName, JobStatus, MetadataKey, QueueCleanType, QueueName } from 'src/enum';
|
import { InjectKysely } from 'nestjs-kysely';
|
||||||
|
import pg, { PoolConfig } from 'pg';
|
||||||
|
import { DB } from 'src/db';
|
||||||
|
import { GenerateSql, JobConfig } from 'src/decorators';
|
||||||
|
import { JobName, JobStatus, MetadataKey, QueueName, SystemMetadataKey } from 'src/enum';
|
||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
import { EventRepository } from 'src/repositories/event.repository';
|
import { EventRepository } from 'src/repositories/event.repository';
|
||||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||||
import { IEntityJob, JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
|
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||||
|
import { JobCounts, JobItem, JobOf, QueueStatus } from 'src/types';
|
||||||
|
import { asPostgresConnectionConfig } from 'src/utils/database';
|
||||||
import { getKeyByValue, getMethodNames, ImmichStartupError } from 'src/utils/misc';
|
import { getKeyByValue, getMethodNames, ImmichStartupError } from 'src/utils/misc';
|
||||||
|
|
||||||
type JobMapItem = {
|
type JobMapItem = {
|
||||||
@@ -19,26 +23,38 @@ type JobMapItem = {
|
|||||||
label: string;
|
label: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type QueueConfiguration = {
|
||||||
|
paused: boolean;
|
||||||
|
concurrency: number;
|
||||||
|
};
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class JobRepository {
|
export class JobRepository {
|
||||||
private workers: Partial<Record<QueueName, Worker>> = {};
|
|
||||||
private handlers: Partial<Record<JobName, JobMapItem>> = {};
|
private handlers: Partial<Record<JobName, JobMapItem>> = {};
|
||||||
|
|
||||||
|
// todo inject the pg pool
|
||||||
|
private pool?: pg.Pool;
|
||||||
|
// todo inject worker utils?
|
||||||
|
private workerUtils?: WorkerUtils;
|
||||||
|
private queueConfig: Record<string, QueueConfiguration> = {};
|
||||||
|
private runners: Record<string, Runner> = {};
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private moduleRef: ModuleRef,
|
@InjectKysely() private db: Kysely<DB>,
|
||||||
private configRepository: ConfigRepository,
|
|
||||||
private eventRepository: EventRepository,
|
|
||||||
private logger: LoggingRepository,
|
private logger: LoggingRepository,
|
||||||
|
private moduleRef: ModuleRef,
|
||||||
|
private eventRepository: EventRepository,
|
||||||
|
private configRepository: ConfigRepository,
|
||||||
|
private systemMetadataRepository: SystemMetadataRepository,
|
||||||
) {
|
) {
|
||||||
this.logger.setContext(JobRepository.name);
|
logger.setContext(JobRepository.name);
|
||||||
}
|
}
|
||||||
|
|
||||||
setup(services: ClassConstructor<unknown>[]) {
|
async setup(services: ClassConstructor<unknown>[]) {
|
||||||
const reflector = this.moduleRef.get(Reflector, { strict: false });
|
const reflector = this.moduleRef.get(Reflector, { strict: false });
|
||||||
|
|
||||||
// discovery
|
for (const service of services) {
|
||||||
for (const Service of services) {
|
const instance = this.moduleRef.get<any>(service);
|
||||||
const instance = this.moduleRef.get<any>(Service);
|
|
||||||
for (const methodName of getMethodNames(instance)) {
|
for (const methodName of getMethodNames(instance)) {
|
||||||
const handler = instance[methodName];
|
const handler = instance[methodName];
|
||||||
const config = reflector.get<JobConfig>(MetadataKey.JOB_CONFIG, handler);
|
const config = reflector.get<JobConfig>(MetadataKey.JOB_CONFIG, handler);
|
||||||
@@ -47,7 +63,7 @@ export class JobRepository {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const { name: jobName, queue: queueName } = config;
|
const { name: jobName, queue: queueName } = config;
|
||||||
const label = `${Service.name}.${handler.name}`;
|
const label = `${service.name}.${handler.name}`;
|
||||||
|
|
||||||
// one handler per job
|
// one handler per job
|
||||||
if (this.handlers[jobName]) {
|
if (this.handlers[jobName]) {
|
||||||
@@ -70,176 +86,219 @@ export class JobRepository {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// no missing handlers
|
const { database } = this.configRepository.getEnv();
|
||||||
for (const [jobKey, jobName] of Object.entries(JobName)) {
|
const pool = new pg.Pool({
|
||||||
const item = this.handlers[jobName];
|
...asPostgresConnectionConfig(database.config),
|
||||||
if (!item) {
|
max: 100,
|
||||||
const errorMessage = `Failed to find job handler for Job.${jobKey} ("${jobName}")`;
|
} as PoolConfig);
|
||||||
this.logger.error(
|
|
||||||
`${errorMessage}. Make sure to add the @OnJob({ name: JobName.${jobKey}, queue: QueueName.XYZ }) decorator for the new job.`,
|
// todo: remove debug info
|
||||||
);
|
setInterval(() => {
|
||||||
throw new ImmichStartupError(errorMessage);
|
this.logger.log(`connections:
|
||||||
}
|
total: ${pool.totalCount}
|
||||||
}
|
idle: ${pool.idleCount}
|
||||||
|
waiting: ${pool.waitingCount}`);
|
||||||
|
}, 5000);
|
||||||
|
|
||||||
|
pool.on('connect', (client) => {
|
||||||
|
client.setMaxListeners(200);
|
||||||
|
});
|
||||||
|
|
||||||
|
this.pool = pool;
|
||||||
|
|
||||||
|
this.workerUtils = await makeWorkerUtils({ pgPool: pool });
|
||||||
}
|
}
|
||||||
|
|
||||||
startWorkers() {
|
async start(queueName: QueueName, concurrency?: number): Promise<void> {
|
||||||
const { bull } = this.configRepository.getEnv();
|
if (concurrency) {
|
||||||
for (const queueName of Object.values(QueueName)) {
|
this.queueConfig[queueName] = {
|
||||||
this.logger.debug(`Starting worker for queue: ${queueName}`);
|
...this.queueConfig[queueName],
|
||||||
this.workers[queueName] = new Worker(
|
concurrency,
|
||||||
queueName,
|
};
|
||||||
(job) => this.eventRepository.emit('job.start', queueName, job as JobItem),
|
} else {
|
||||||
{ ...bull.config, concurrency: 1 },
|
concurrency = this.queueConfig[queueName].concurrency;
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async run({ name, data }: JobItem) {
|
|
||||||
const item = this.handlers[name as JobName];
|
|
||||||
if (!item) {
|
|
||||||
this.logger.warn(`Skipping unknown job: "${name}"`);
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return item.handler(data);
|
if (this.queueConfig[queueName].paused) {
|
||||||
}
|
|
||||||
|
|
||||||
setConcurrency(queueName: QueueName, concurrency: number) {
|
|
||||||
const worker = this.workers[queueName];
|
|
||||||
if (!worker) {
|
|
||||||
this.logger.warn(`Unable to set queue concurrency, worker not found: '${queueName}'`);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
worker.concurrency = concurrency;
|
await this.stop(queueName);
|
||||||
|
this.runners[queueName] = await run({
|
||||||
|
concurrency,
|
||||||
|
taskList: {
|
||||||
|
[queueName]: async (payload: unknown): Promise<void> => {
|
||||||
|
// this.logger.log(`Job ${queueName} started with payload: ${JSON.stringify(payload)}`);
|
||||||
|
await this.eventRepository.emit('job.start', queueName, payload as JobItem);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pgPool: this.pool,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async getQueueStatus(name: QueueName): Promise<QueueStatus> {
|
async stop(queueName: QueueName): Promise<void> {
|
||||||
const queue = this.getQueue(name);
|
const runner = this.runners[queueName];
|
||||||
|
if (runner) {
|
||||||
|
await runner.stop();
|
||||||
|
delete this.runners[queueName];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
async pause(queueName: QueueName): Promise<void> {
|
||||||
isActive: !!(await queue.getActiveCount()),
|
await this.setState(queueName, true);
|
||||||
isPaused: await queue.isPaused(),
|
await this.stop(queueName);
|
||||||
|
}
|
||||||
|
|
||||||
|
async resume(queueName: QueueName): Promise<void> {
|
||||||
|
await this.setState(queueName, false);
|
||||||
|
await this.start(queueName);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async setState(queueName: QueueName, paused: boolean): Promise<void> {
|
||||||
|
const state = await this.systemMetadataRepository.get(SystemMetadataKey.QUEUES_STATE);
|
||||||
|
await this.systemMetadataRepository.set(SystemMetadataKey.QUEUES_STATE, {
|
||||||
|
...state,
|
||||||
|
[queueName]: { paused },
|
||||||
|
});
|
||||||
|
this.queueConfig[queueName] = {
|
||||||
|
...this.queueConfig[queueName],
|
||||||
|
paused,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pause(name: QueueName) {
|
// todo: we should consolidate queue and job names and have queues be
|
||||||
return this.getQueue(name).pause();
|
// homogenous.
|
||||||
}
|
//
|
||||||
|
// the reason there are multiple kinds of jobs per queue is so that
|
||||||
resume(name: QueueName) {
|
// concurrency settings apply to all of them. We could instead create a
|
||||||
return this.getQueue(name).resume();
|
// concept of "queue" groups, such that workers will run for groups of queues
|
||||||
}
|
// rather than just a single queue and achieve the same outcome.
|
||||||
|
|
||||||
empty(name: QueueName) {
|
|
||||||
return this.getQueue(name).drain();
|
|
||||||
}
|
|
||||||
|
|
||||||
clear(name: QueueName, type: QueueCleanType) {
|
|
||||||
return this.getQueue(name).clean(0, 1000, type);
|
|
||||||
}
|
|
||||||
|
|
||||||
getJobCounts(name: QueueName): Promise<JobCounts> {
|
|
||||||
return this.getQueue(name).getJobCounts(
|
|
||||||
'active',
|
|
||||||
'completed',
|
|
||||||
'failed',
|
|
||||||
'delayed',
|
|
||||||
'waiting',
|
|
||||||
'paused',
|
|
||||||
) as unknown as Promise<JobCounts>;
|
|
||||||
}
|
|
||||||
|
|
||||||
private getQueueName(name: JobName) {
|
private getQueueName(name: JobName) {
|
||||||
return (this.handlers[name] as JobMapItem).queueName;
|
return (this.handlers[name] as JobMapItem).queueName;
|
||||||
}
|
}
|
||||||
|
|
||||||
async queueAll(items: JobItem[]): Promise<void> {
|
run({ name, data }: JobItem): Promise<JobStatus> {
|
||||||
if (items.length === 0) {
|
const item = this.handlers[name as JobName];
|
||||||
return;
|
if (!item) {
|
||||||
|
this.logger.warn(`Skipping unknown job: "${name}"`);
|
||||||
|
return Promise.resolve(JobStatus.SKIPPED);
|
||||||
}
|
}
|
||||||
|
return item.handler(data);
|
||||||
const promises = [];
|
|
||||||
const itemsByQueue = {} as Record<string, (JobItem & { data: any; options: JobsOptions | undefined })[]>;
|
|
||||||
for (const item of items) {
|
|
||||||
const queueName = this.getQueueName(item.name);
|
|
||||||
const job = {
|
|
||||||
name: item.name,
|
|
||||||
data: item.data || {},
|
|
||||||
options: this.getJobOptions(item) || undefined,
|
|
||||||
} as JobItem & { data: any; options: JobsOptions | undefined };
|
|
||||||
|
|
||||||
if (job.options?.jobId) {
|
|
||||||
// need to use add() instead of addBulk() for jobId deduplication
|
|
||||||
promises.push(this.getQueue(queueName).add(item.name, item.data, job.options));
|
|
||||||
} else {
|
|
||||||
itemsByQueue[queueName] = itemsByQueue[queueName] || [];
|
|
||||||
itemsByQueue[queueName].push(job);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [queueName, jobs] of Object.entries(itemsByQueue)) {
|
|
||||||
const queue = this.getQueue(queueName as QueueName);
|
|
||||||
promises.push(queue.addBulk(jobs));
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async queue(item: JobItem): Promise<void> {
|
queue(item: JobItem): Promise<unknown> {
|
||||||
return this.queueAll([item]);
|
return this.queueAll([item]);
|
||||||
}
|
}
|
||||||
|
|
||||||
async waitForQueueCompletion(...queues: QueueName[]): Promise<void> {
|
queueAll(items: JobItem[]): Promise<unknown> {
|
||||||
let activeQueue: QueueStatus | undefined;
|
return this.workerUtils!.addJobs(items.map((item) => this.getJobSpec(item)));
|
||||||
do {
|
|
||||||
const statuses = await Promise.all(queues.map((name) => this.getQueueStatus(name)));
|
|
||||||
activeQueue = statuses.find((status) => status.isActive);
|
|
||||||
} while (activeQueue);
|
|
||||||
{
|
|
||||||
this.logger.verbose(`Waiting for ${activeQueue} queue to stop...`);
|
|
||||||
await setTimeout(1000);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private getJobOptions(item: JobItem): JobsOptions | null {
|
// todo: are we actually generating sql
|
||||||
|
async clear(name: QueueName): Promise<void> {
|
||||||
|
await this.db
|
||||||
|
.deleteFrom('graphile_worker._private_jobs')
|
||||||
|
.where(({ eb, selectFrom }) =>
|
||||||
|
eb('task_id', 'in', selectFrom('graphile_worker._private_tasks').select('id').where('identifier', '=', name)),
|
||||||
|
)
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
const workers = await this.db
|
||||||
|
.selectFrom('graphile_worker.jobs')
|
||||||
|
.select('locked_by')
|
||||||
|
.where('locked_by', 'is not', null)
|
||||||
|
.distinct()
|
||||||
|
.execute();
|
||||||
|
|
||||||
|
// Potentially dangerous? It helps if jobs get stuck active though. The
|
||||||
|
// documentation says that stuck jobs will be unlocked automatically after 4
|
||||||
|
// hours. Though, it can be strange to click "clear" in the UI and see
|
||||||
|
// nothing happen. Especially as the UI is binary, such that new jobs cannot
|
||||||
|
// usually be scheduled unless both active and waiting are zero.
|
||||||
|
await this.workerUtils!.forceUnlockWorkers(workers.map((worker) => worker.locked_by!));
|
||||||
|
}
|
||||||
|
|
||||||
|
async clearFailed(name: QueueName): Promise<void> {
|
||||||
|
await this.db
|
||||||
|
.deleteFrom('graphile_worker._private_jobs')
|
||||||
|
.where(({ eb, selectFrom }) =>
|
||||||
|
eb(
|
||||||
|
'task_id',
|
||||||
|
'in',
|
||||||
|
selectFrom('graphile_worker._private_tasks')
|
||||||
|
.select('id')
|
||||||
|
.where((eb) => eb.and([eb('identifier', '=', name), eb('attempts', '>=', eb.ref('max_attempts'))])),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo: are we actually generating sql
|
||||||
|
@GenerateSql({ params: [] })
|
||||||
|
async getJobCounts(name: QueueName): Promise<JobCounts> {
|
||||||
|
return await this.db
|
||||||
|
.selectFrom('graphile_worker.jobs')
|
||||||
|
.select((eb) => [
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) => eb.and([eb('task_identifier', '=', name), eb('locked_by', 'is not', null)]))
|
||||||
|
.as('active'),
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) =>
|
||||||
|
eb.and([
|
||||||
|
eb('task_identifier', '=', name),
|
||||||
|
eb('locked_by', 'is', null),
|
||||||
|
eb('run_at', '<=', eb.fn<Date>('now')),
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.as('waiting'),
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) =>
|
||||||
|
eb.and([
|
||||||
|
eb('task_identifier', '=', name),
|
||||||
|
eb('locked_by', 'is', null),
|
||||||
|
eb('run_at', '>', eb.fn<Date>('now')),
|
||||||
|
]),
|
||||||
|
)
|
||||||
|
.as('delayed'),
|
||||||
|
eb.fn
|
||||||
|
.countAll<number>()
|
||||||
|
.filterWhere((eb) => eb.and([eb('task_identifier', '=', name), eb('attempts', '>=', eb.ref('max_attempts'))]))
|
||||||
|
.as('failed'),
|
||||||
|
])
|
||||||
|
.executeTakeFirstOrThrow();
|
||||||
|
}
|
||||||
|
|
||||||
|
async getQueueStatus(queueName: QueueName): Promise<QueueStatus> {
|
||||||
|
const state = await this.systemMetadataRepository.get(SystemMetadataKey.QUEUES_STATE);
|
||||||
|
return { paused: state?.[queueName]?.paused ?? false };
|
||||||
|
}
|
||||||
|
|
||||||
|
private getJobSpec(item: JobItem): AddJobsJobSpec {
|
||||||
|
const identifier = (this.handlers[item.name] as JobMapItem).queueName;
|
||||||
switch (item.name) {
|
switch (item.name) {
|
||||||
case JobName.NOTIFY_ALBUM_UPDATE: {
|
case JobName.NOTIFY_ALBUM_UPDATE: {
|
||||||
return { jobId: item.data.id, delay: item.data?.delay };
|
return {
|
||||||
|
identifier,
|
||||||
|
payload: item,
|
||||||
|
jobKey: item.data.id,
|
||||||
|
runAt: item.data?.delay ? new Date(Date.now() + item.data.delay) : undefined,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
|
case JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE: {
|
||||||
return { jobId: item.data.id };
|
return { identifier, payload: item, jobKey: QueueName.STORAGE_TEMPLATE_MIGRATION };
|
||||||
}
|
}
|
||||||
case JobName.GENERATE_PERSON_THUMBNAIL: {
|
case JobName.GENERATE_PERSON_THUMBNAIL: {
|
||||||
return { priority: 1 };
|
return { identifier, payload: item, priority: 1 };
|
||||||
}
|
}
|
||||||
case JobName.QUEUE_FACIAL_RECOGNITION: {
|
case JobName.QUEUE_FACIAL_RECOGNITION: {
|
||||||
return { jobId: JobName.QUEUE_FACIAL_RECOGNITION };
|
return { identifier, payload: item, jobKey: JobName.QUEUE_FACIAL_RECOGNITION };
|
||||||
}
|
}
|
||||||
default: {
|
default: {
|
||||||
return null;
|
return { identifier, payload: item };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private getQueue(queue: QueueName): Queue {
|
|
||||||
return this.moduleRef.get<Queue>(getQueueToken(queue), { strict: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
public async removeJob(jobId: string, name: JobName): Promise<IEntityJob | undefined> {
|
|
||||||
const existingJob = await this.getQueue(this.getQueueName(name)).getJob(jobId);
|
|
||||||
if (!existingJob) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
await existingJob.remove();
|
|
||||||
} catch (error: any) {
|
|
||||||
if (error.message?.includes('Missing key for job')) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
return existingJob.data;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { AssetFaces, DB, FaceSearch, Person } from 'src/db';
|
|||||||
import { ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
import { ChunkedArray, DummyValue, GenerateSql } from 'src/decorators';
|
||||||
import { AssetFileType, SourceType } from 'src/enum';
|
import { AssetFileType, SourceType } from 'src/enum';
|
||||||
import { removeUndefinedKeys } from 'src/utils/database';
|
import { removeUndefinedKeys } from 'src/utils/database';
|
||||||
import { PaginationOptions } from 'src/utils/pagination';
|
import { paginationHelper, PaginationOptions } from 'src/utils/pagination';
|
||||||
|
|
||||||
export interface PersonSearchOptions {
|
export interface PersonSearchOptions {
|
||||||
minimumFaceCount: number;
|
minimumFaceCount: number;
|
||||||
@@ -200,11 +200,7 @@ export class PersonRepository {
|
|||||||
.limit(pagination.take + 1)
|
.limit(pagination.take + 1)
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
if (items.length > pagination.take) {
|
return paginationHelper(items, pagination.take);
|
||||||
return { items: items.slice(0, -1), hasNextPage: true };
|
|
||||||
}
|
|
||||||
|
|
||||||
return { items, hasNextPage: false };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql()
|
@GenerateSql()
|
||||||
|
|||||||
@@ -8,41 +8,10 @@ import { MapAsset } from 'src/dtos/asset-response.dto';
|
|||||||
import { AssetStatus, AssetType } from 'src/enum';
|
import { AssetStatus, AssetType } from 'src/enum';
|
||||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||||
import { anyUuid, asUuid, searchAssetBuilder, vectorIndexQuery } from 'src/utils/database';
|
import { anyUuid, asUuid, searchAssetBuilder, vectorIndexQuery } from 'src/utils/database';
|
||||||
|
import { paginationHelper } from 'src/utils/pagination';
|
||||||
import { isValidInteger } from 'src/validation';
|
import { isValidInteger } from 'src/validation';
|
||||||
|
|
||||||
export interface SearchResult<T> {
|
export interface SearchAssetIdOptions {
|
||||||
/** total matches */
|
|
||||||
total: number;
|
|
||||||
/** collection size */
|
|
||||||
count: number;
|
|
||||||
/** current page */
|
|
||||||
page: number;
|
|
||||||
/** items for page */
|
|
||||||
items: T[];
|
|
||||||
/** score */
|
|
||||||
distances: number[];
|
|
||||||
facets: SearchFacet[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SearchFacet {
|
|
||||||
fieldName: string;
|
|
||||||
counts: Array<{
|
|
||||||
count: number;
|
|
||||||
value: string;
|
|
||||||
}>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type SearchExploreItemSet<T> = Array<{
|
|
||||||
value: string;
|
|
||||||
data: T;
|
|
||||||
}>;
|
|
||||||
|
|
||||||
export interface SearchExploreItem<T> {
|
|
||||||
fieldName: string;
|
|
||||||
items: SearchExploreItemSet<T>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SearchAssetIDOptions {
|
|
||||||
checksum?: Buffer;
|
checksum?: Buffer;
|
||||||
deviceAssetId?: string;
|
deviceAssetId?: string;
|
||||||
id?: string;
|
id?: string;
|
||||||
@@ -54,7 +23,7 @@ export interface SearchUserIdOptions {
|
|||||||
userIds?: string[];
|
userIds?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export type SearchIdOptions = SearchAssetIDOptions & SearchUserIdOptions;
|
export type SearchIdOptions = SearchAssetIdOptions & SearchUserIdOptions;
|
||||||
|
|
||||||
export interface SearchStatusOptions {
|
export interface SearchStatusOptions {
|
||||||
isArchived?: boolean;
|
isArchived?: boolean;
|
||||||
@@ -144,8 +113,6 @@ type BaseAssetSearchOptions = SearchDateOptions &
|
|||||||
|
|
||||||
export type AssetSearchOptions = BaseAssetSearchOptions & SearchRelationOptions;
|
export type AssetSearchOptions = BaseAssetSearchOptions & SearchRelationOptions;
|
||||||
|
|
||||||
export type AssetSearchOneToOneRelationOptions = BaseAssetSearchOptions & SearchOneToOneRelationOptions;
|
|
||||||
|
|
||||||
export type AssetSearchBuilderOptions = Omit<AssetSearchOptions, 'orderDirection'>;
|
export type AssetSearchBuilderOptions = Omit<AssetSearchOptions, 'orderDirection'>;
|
||||||
|
|
||||||
export type SmartSearchOptions = SearchDateOptions &
|
export type SmartSearchOptions = SearchDateOptions &
|
||||||
@@ -226,9 +193,8 @@ export class SearchRepository {
|
|||||||
.limit(pagination.size + 1)
|
.limit(pagination.size + 1)
|
||||||
.offset((pagination.page - 1) * pagination.size)
|
.offset((pagination.page - 1) * pagination.size)
|
||||||
.execute();
|
.execute();
|
||||||
const hasNextPage = items.length > pagination.size;
|
|
||||||
items.splice(pagination.size);
|
return paginationHelper(items, pagination.size);
|
||||||
return { items, hasNextPage };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({
|
@GenerateSql({
|
||||||
@@ -283,9 +249,7 @@ export class SearchRepository {
|
|||||||
.offset((pagination.page - 1) * pagination.size)
|
.offset((pagination.page - 1) * pagination.size)
|
||||||
.execute();
|
.execute();
|
||||||
|
|
||||||
const hasNextPage = items.length > pagination.size;
|
return paginationHelper(items, pagination.size);
|
||||||
items.splice(pagination.size);
|
|
||||||
return { items, hasNextPage };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@GenerateSql({
|
@GenerateSql({
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import { MetricOptions } from '@opentelemetry/api';
|
|||||||
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
|
import { AsyncLocalStorageContextManager } from '@opentelemetry/context-async-hooks';
|
||||||
import { PrometheusExporter } from '@opentelemetry/exporter-prometheus';
|
import { PrometheusExporter } from '@opentelemetry/exporter-prometheus';
|
||||||
import { HttpInstrumentation } from '@opentelemetry/instrumentation-http';
|
import { HttpInstrumentation } from '@opentelemetry/instrumentation-http';
|
||||||
import { IORedisInstrumentation } from '@opentelemetry/instrumentation-ioredis';
|
|
||||||
import { NestInstrumentation } from '@opentelemetry/instrumentation-nestjs-core';
|
import { NestInstrumentation } from '@opentelemetry/instrumentation-nestjs-core';
|
||||||
import { PgInstrumentation } from '@opentelemetry/instrumentation-pg';
|
import { PgInstrumentation } from '@opentelemetry/instrumentation-pg';
|
||||||
import { resourceFromAttributes } from '@opentelemetry/resources';
|
import { resourceFromAttributes } from '@opentelemetry/resources';
|
||||||
@@ -68,12 +67,7 @@ export const bootstrapTelemetry = (port: number) => {
|
|||||||
}),
|
}),
|
||||||
metricReader: new PrometheusExporter({ port }),
|
metricReader: new PrometheusExporter({ port }),
|
||||||
contextManager: new AsyncLocalStorageContextManager(),
|
contextManager: new AsyncLocalStorageContextManager(),
|
||||||
instrumentations: [
|
instrumentations: [new HttpInstrumentation(), new NestInstrumentation(), new PgInstrumentation()],
|
||||||
new HttpInstrumentation(),
|
|
||||||
new IORedisInstrumentation(),
|
|
||||||
new NestInstrumentation(),
|
|
||||||
new PgInstrumentation(),
|
|
||||||
],
|
|
||||||
views: [
|
views: [
|
||||||
{
|
{
|
||||||
instrumentName: '*',
|
instrumentName: '*',
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { BadRequestException } from '@nestjs/common';
|
import { BadRequestException } from '@nestjs/common';
|
||||||
import { DateTime } from 'luxon';
|
import { DateTime } from 'luxon';
|
||||||
import { MapAsset, mapAsset } from 'src/dtos/asset-response.dto';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
|
import { AssetJobName, AssetStatsResponseDto } from 'src/dtos/asset.dto';
|
||||||
import { AssetStatus, AssetType, JobName, JobStatus } from 'src/enum';
|
import { AssetStatus, AssetType, JobName, JobStatus } from 'src/enum';
|
||||||
import { AssetStats } from 'src/repositories/asset.repository';
|
import { AssetStats } from 'src/repositories/asset.repository';
|
||||||
@@ -11,7 +11,6 @@ import { faceStub } from 'test/fixtures/face.stub';
|
|||||||
import { userStub } from 'test/fixtures/user.stub';
|
import { userStub } from 'test/fixtures/user.stub';
|
||||||
import { factory } from 'test/small.factory';
|
import { factory } from 'test/small.factory';
|
||||||
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||||
import { vitest } from 'vitest';
|
|
||||||
|
|
||||||
const stats: AssetStats = {
|
const stats: AssetStats = {
|
||||||
[AssetType.IMAGE]: 10,
|
[AssetType.IMAGE]: 10,
|
||||||
@@ -44,62 +43,6 @@ describe(AssetService.name, () => {
|
|||||||
mockGetById([assetStub.livePhotoStillAsset, assetStub.livePhotoMotionAsset]);
|
mockGetById([assetStub.livePhotoStillAsset, assetStub.livePhotoMotionAsset]);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getMemoryLane', () => {
|
|
||||||
beforeAll(() => {
|
|
||||||
vitest.useFakeTimers();
|
|
||||||
vitest.setSystemTime(new Date('2024-01-15'));
|
|
||||||
});
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
vitest.useRealTimers();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should group the assets correctly', async () => {
|
|
||||||
const image1 = { ...assetStub.image, localDateTime: new Date(2023, 1, 15, 0, 0, 0) };
|
|
||||||
const image2 = { ...assetStub.image, localDateTime: new Date(2023, 1, 15, 1, 0, 0) };
|
|
||||||
const image3 = { ...assetStub.image, localDateTime: new Date(2015, 1, 15) };
|
|
||||||
const image4 = { ...assetStub.image, localDateTime: new Date(2009, 1, 15) };
|
|
||||||
|
|
||||||
mocks.partner.getAll.mockResolvedValue([]);
|
|
||||||
mocks.asset.getByDayOfYear.mockResolvedValue([
|
|
||||||
{
|
|
||||||
year: 2023,
|
|
||||||
assets: [image1, image2],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
year: 2015,
|
|
||||||
assets: [image3],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
year: 2009,
|
|
||||||
assets: [image4],
|
|
||||||
},
|
|
||||||
] as any);
|
|
||||||
|
|
||||||
await expect(sut.getMemoryLane(authStub.admin, { day: 15, month: 1 })).resolves.toEqual([
|
|
||||||
{ yearsAgo: 1, title: '1 year ago', assets: [mapAsset(image1), mapAsset(image2)] },
|
|
||||||
{ yearsAgo: 9, title: '9 years ago', assets: [mapAsset(image3)] },
|
|
||||||
{ yearsAgo: 15, title: '15 years ago', assets: [mapAsset(image4)] },
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.asset.getByDayOfYear.mock.calls).toEqual([[[authStub.admin.user.id], { day: 15, month: 1 }]]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should get memories with partners with inTimeline enabled', async () => {
|
|
||||||
const partner = factory.partner();
|
|
||||||
const auth = factory.auth({ user: { id: partner.sharedWithId } });
|
|
||||||
|
|
||||||
mocks.partner.getAll.mockResolvedValue([partner]);
|
|
||||||
mocks.asset.getByDayOfYear.mockResolvedValue([]);
|
|
||||||
|
|
||||||
await sut.getMemoryLane(auth, { day: 15, month: 1 });
|
|
||||||
|
|
||||||
expect(mocks.asset.getByDayOfYear.mock.calls).toEqual([
|
|
||||||
[[auth.user.id, partner.sharedById], { day: 15, month: 1 }],
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getStatistics', () => {
|
describe('getStatistics', () => {
|
||||||
it('should get the statistics for a user, excluding archived assets', async () => {
|
it('should get the statistics for a user, excluding archived assets', async () => {
|
||||||
mocks.asset.getStatistics.mockResolvedValue(stats);
|
mocks.asset.getStatistics.mockResolvedValue(stats);
|
||||||
|
|||||||
@@ -3,13 +3,7 @@ import _ from 'lodash';
|
|||||||
import { DateTime, Duration } from 'luxon';
|
import { DateTime, Duration } from 'luxon';
|
||||||
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||||
import { OnJob } from 'src/decorators';
|
import { OnJob } from 'src/decorators';
|
||||||
import {
|
import { AssetResponseDto, MapAsset, SanitizedAssetResponseDto, mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
AssetResponseDto,
|
|
||||||
MapAsset,
|
|
||||||
MemoryLaneResponseDto,
|
|
||||||
SanitizedAssetResponseDto,
|
|
||||||
mapAsset,
|
|
||||||
} from 'src/dtos/asset-response.dto';
|
|
||||||
import {
|
import {
|
||||||
AssetBulkDeleteDto,
|
AssetBulkDeleteDto,
|
||||||
AssetBulkUpdateDto,
|
AssetBulkUpdateDto,
|
||||||
@@ -20,7 +14,6 @@ import {
|
|||||||
mapStats,
|
mapStats,
|
||||||
} from 'src/dtos/asset.dto';
|
} from 'src/dtos/asset.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { MemoryLaneDto } from 'src/dtos/search.dto';
|
|
||||||
import { AssetStatus, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
import { AssetStatus, JobName, JobStatus, Permission, QueueName } from 'src/enum';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
import { ISidecarWriteJob, JobItem, JobOf } from 'src/types';
|
||||||
@@ -28,26 +21,6 @@ import { getAssetFiles, getMyPartnerIds, onAfterUnlink, onBeforeLink, onBeforeUn
|
|||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AssetService extends BaseService {
|
export class AssetService extends BaseService {
|
||||||
async getMemoryLane(auth: AuthDto, dto: MemoryLaneDto): Promise<MemoryLaneResponseDto[]> {
|
|
||||||
const partnerIds = await getMyPartnerIds({
|
|
||||||
userId: auth.user.id,
|
|
||||||
repository: this.partnerRepository,
|
|
||||||
timelineEnabled: true,
|
|
||||||
});
|
|
||||||
const userIds = [auth.user.id, ...partnerIds];
|
|
||||||
|
|
||||||
const groups = await this.assetRepository.getByDayOfYear(userIds, dto);
|
|
||||||
return groups.map(({ year, assets }) => {
|
|
||||||
const yearsAgo = DateTime.utc().year - year;
|
|
||||||
return {
|
|
||||||
yearsAgo,
|
|
||||||
// TODO move this to clients
|
|
||||||
title: `${yearsAgo} year${yearsAgo > 1 ? 's' : ''} ago`,
|
|
||||||
assets: assets.map((asset) => mapAsset(asset, { auth })),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async getStatistics(auth: AuthDto, dto: AssetStatsDto) {
|
async getStatistics(auth: AuthDto, dto: AssetStatsDto) {
|
||||||
const stats = await this.assetRepository.getStatistics(auth.user.id, dto);
|
const stats = await this.assetRepository.getStatistics(auth.user.id, dto);
|
||||||
return mapStats(stats);
|
return mapStats(stats);
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
import { BadRequestException } from '@nestjs/common';
|
import { JobStatus } from 'src/enum';
|
||||||
import { FileReportItemDto } from 'src/dtos/audit.dto';
|
|
||||||
import { AssetFileType, AssetPathType, JobStatus, PersonPathType, UserPathType } from 'src/enum';
|
|
||||||
import { AuditService } from 'src/services/audit.service';
|
import { AuditService } from 'src/services/audit.service';
|
||||||
import { newTestService, ServiceMocks } from 'test/utils';
|
import { newTestService, ServiceMocks } from 'test/utils';
|
||||||
|
|
||||||
@@ -25,148 +23,4 @@ describe(AuditService.name, () => {
|
|||||||
expect(mocks.audit.removeBefore).toHaveBeenCalledWith(expect.any(Date));
|
expect(mocks.audit.removeBefore).toHaveBeenCalledWith(expect.any(Date));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getChecksums', () => {
|
|
||||||
it('should fail if the file is not in the immich path', async () => {
|
|
||||||
await expect(sut.getChecksums({ filenames: ['foo/bar'] })).rejects.toBeInstanceOf(BadRequestException);
|
|
||||||
|
|
||||||
expect(mocks.crypto.hashFile).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should get checksum for valid file', async () => {
|
|
||||||
await expect(sut.getChecksums({ filenames: ['./upload/my-file.jpg'] })).resolves.toEqual([
|
|
||||||
{ filename: './upload/my-file.jpg', checksum: expect.any(String) },
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.crypto.hashFile).toHaveBeenCalledWith('./upload/my-file.jpg');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('fixItems', () => {
|
|
||||||
it('should fail if the file is not in the immich path', async () => {
|
|
||||||
await expect(
|
|
||||||
sut.fixItems([
|
|
||||||
{ entityId: 'my-id', pathType: AssetPathType.ORIGINAL, pathValue: 'foo/bar' } as FileReportItemDto,
|
|
||||||
]),
|
|
||||||
).rejects.toBeInstanceOf(BadRequestException);
|
|
||||||
|
|
||||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.person.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.user.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should update encoded video path', async () => {
|
|
||||||
await sut.fixItems([
|
|
||||||
{
|
|
||||||
entityId: 'my-id',
|
|
||||||
pathType: AssetPathType.ENCODED_VIDEO,
|
|
||||||
pathValue: './upload/my-video.mp4',
|
|
||||||
} as FileReportItemDto,
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', encodedVideoPath: './upload/my-video.mp4' });
|
|
||||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.person.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.user.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should update preview path', async () => {
|
|
||||||
await sut.fixItems([
|
|
||||||
{
|
|
||||||
entityId: 'my-id',
|
|
||||||
pathType: AssetPathType.PREVIEW,
|
|
||||||
pathValue: './upload/my-preview.png',
|
|
||||||
} as FileReportItemDto,
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
|
||||||
assetId: 'my-id',
|
|
||||||
type: AssetFileType.PREVIEW,
|
|
||||||
path: './upload/my-preview.png',
|
|
||||||
});
|
|
||||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.person.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.user.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should update thumbnail path', async () => {
|
|
||||||
await sut.fixItems([
|
|
||||||
{
|
|
||||||
entityId: 'my-id',
|
|
||||||
pathType: AssetPathType.THUMBNAIL,
|
|
||||||
pathValue: './upload/my-thumbnail.webp',
|
|
||||||
} as FileReportItemDto,
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.asset.upsertFile).toHaveBeenCalledWith({
|
|
||||||
assetId: 'my-id',
|
|
||||||
type: AssetFileType.THUMBNAIL,
|
|
||||||
path: './upload/my-thumbnail.webp',
|
|
||||||
});
|
|
||||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.person.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.user.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should update original path', async () => {
|
|
||||||
await sut.fixItems([
|
|
||||||
{
|
|
||||||
entityId: 'my-id',
|
|
||||||
pathType: AssetPathType.ORIGINAL,
|
|
||||||
pathValue: './upload/my-original.png',
|
|
||||||
} as FileReportItemDto,
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', originalPath: './upload/my-original.png' });
|
|
||||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.person.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.user.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should update sidecar path', async () => {
|
|
||||||
await sut.fixItems([
|
|
||||||
{
|
|
||||||
entityId: 'my-id',
|
|
||||||
pathType: AssetPathType.SIDECAR,
|
|
||||||
pathValue: './upload/my-sidecar.xmp',
|
|
||||||
} as FileReportItemDto,
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.asset.update).toHaveBeenCalledWith({ id: 'my-id', sidecarPath: './upload/my-sidecar.xmp' });
|
|
||||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.person.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.user.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should update face path', async () => {
|
|
||||||
await sut.fixItems([
|
|
||||||
{
|
|
||||||
entityId: 'my-id',
|
|
||||||
pathType: PersonPathType.FACE,
|
|
||||||
pathValue: './upload/my-face.jpg',
|
|
||||||
} as FileReportItemDto,
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.person.update).toHaveBeenCalledWith({ id: 'my-id', thumbnailPath: './upload/my-face.jpg' });
|
|
||||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.user.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should update profile path', async () => {
|
|
||||||
await sut.fixItems([
|
|
||||||
{
|
|
||||||
entityId: 'my-id',
|
|
||||||
pathType: UserPathType.PROFILE,
|
|
||||||
pathValue: './upload/my-profile-pic.jpg',
|
|
||||||
} as FileReportItemDto,
|
|
||||||
]);
|
|
||||||
|
|
||||||
expect(mocks.user.update).toHaveBeenCalledWith('my-id', { profileImagePath: './upload/my-profile-pic.jpg' });
|
|
||||||
expect(mocks.asset.update).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.asset.upsertFile).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.person.update).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,23 +1,9 @@
|
|||||||
import { BadRequestException, Injectable } from '@nestjs/common';
|
import { Injectable } from '@nestjs/common';
|
||||||
import { DateTime } from 'luxon';
|
import { DateTime } from 'luxon';
|
||||||
import { resolve } from 'node:path';
|
import { AUDIT_LOG_MAX_DURATION } from 'src/constants';
|
||||||
import { AUDIT_LOG_MAX_DURATION, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
|
||||||
import { StorageCore } from 'src/cores/storage.core';
|
|
||||||
import { OnJob } from 'src/decorators';
|
import { OnJob } from 'src/decorators';
|
||||||
import { FileChecksumDto, FileChecksumResponseDto, FileReportItemDto, PathEntityType } from 'src/dtos/audit.dto';
|
import { JobName, JobStatus, QueueName } from 'src/enum';
|
||||||
import {
|
|
||||||
AssetFileType,
|
|
||||||
AssetPathType,
|
|
||||||
JobName,
|
|
||||||
JobStatus,
|
|
||||||
PersonPathType,
|
|
||||||
QueueName,
|
|
||||||
StorageFolder,
|
|
||||||
UserPathType,
|
|
||||||
} from 'src/enum';
|
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { getAssetFiles } from 'src/utils/asset.util';
|
|
||||||
import { usePagination } from 'src/utils/pagination';
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class AuditService extends BaseService {
|
export class AuditService extends BaseService {
|
||||||
@@ -26,187 +12,4 @@ export class AuditService extends BaseService {
|
|||||||
await this.auditRepository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
|
await this.auditRepository.removeBefore(DateTime.now().minus(AUDIT_LOG_MAX_DURATION).toJSDate());
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getChecksums(dto: FileChecksumDto) {
|
|
||||||
const results: FileChecksumResponseDto[] = [];
|
|
||||||
for (const filename of dto.filenames) {
|
|
||||||
if (!StorageCore.isImmichPath(filename)) {
|
|
||||||
throw new BadRequestException(
|
|
||||||
`Could not get the checksum of ${filename} because the file isn't accessible by Immich`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const checksum = await this.cryptoRepository.hashFile(filename);
|
|
||||||
results.push({ filename, checksum: checksum.toString('base64') });
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fixItems(items: FileReportItemDto[]) {
|
|
||||||
for (const { entityId: id, pathType, pathValue } of items) {
|
|
||||||
if (!StorageCore.isImmichPath(pathValue)) {
|
|
||||||
throw new BadRequestException(
|
|
||||||
`Could not fix item ${id} with path ${pathValue} because the file isn't accessible by Immich`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (pathType) {
|
|
||||||
case AssetPathType.ENCODED_VIDEO: {
|
|
||||||
await this.assetRepository.update({ id, encodedVideoPath: pathValue });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case AssetPathType.PREVIEW: {
|
|
||||||
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.PREVIEW, path: pathValue });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case AssetPathType.THUMBNAIL: {
|
|
||||||
await this.assetRepository.upsertFile({ assetId: id, type: AssetFileType.THUMBNAIL, path: pathValue });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case AssetPathType.ORIGINAL: {
|
|
||||||
await this.assetRepository.update({ id, originalPath: pathValue });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case AssetPathType.SIDECAR: {
|
|
||||||
await this.assetRepository.update({ id, sidecarPath: pathValue });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case PersonPathType.FACE: {
|
|
||||||
await this.personRepository.update({ id, thumbnailPath: pathValue });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
case UserPathType.PROFILE: {
|
|
||||||
await this.userRepository.update(id, { profileImagePath: pathValue });
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fullPath(filename: string) {
|
|
||||||
return resolve(filename);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getFileReport() {
|
|
||||||
const hasFile = (items: Set<string>, filename: string) => items.has(filename) || items.has(this.fullPath(filename));
|
|
||||||
const crawl = async (folder: StorageFolder) =>
|
|
||||||
new Set(
|
|
||||||
await this.storageRepository.crawl({
|
|
||||||
includeHidden: true,
|
|
||||||
pathsToCrawl: [StorageCore.getBaseFolder(folder)],
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
const uploadFiles = await crawl(StorageFolder.UPLOAD);
|
|
||||||
const libraryFiles = await crawl(StorageFolder.LIBRARY);
|
|
||||||
const thumbFiles = await crawl(StorageFolder.THUMBNAILS);
|
|
||||||
const videoFiles = await crawl(StorageFolder.ENCODED_VIDEO);
|
|
||||||
const profileFiles = await crawl(StorageFolder.PROFILE);
|
|
||||||
const allFiles = new Set<string>();
|
|
||||||
for (const list of [libraryFiles, thumbFiles, videoFiles, profileFiles, uploadFiles]) {
|
|
||||||
for (const item of list) {
|
|
||||||
allFiles.add(item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const track = (filename: string | null | undefined) => {
|
|
||||||
if (!filename) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
allFiles.delete(filename);
|
|
||||||
allFiles.delete(this.fullPath(filename));
|
|
||||||
};
|
|
||||||
|
|
||||||
this.logger.log(
|
|
||||||
`Found ${libraryFiles.size} original files, ${thumbFiles.size} thumbnails, ${videoFiles.size} encoded videos, ${profileFiles.size} profile files`,
|
|
||||||
);
|
|
||||||
const pagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (options) =>
|
|
||||||
this.assetRepository.getAll(options, { withDeleted: true, withArchived: true }),
|
|
||||||
);
|
|
||||||
|
|
||||||
let assetCount = 0;
|
|
||||||
|
|
||||||
const orphans: FileReportItemDto[] = [];
|
|
||||||
for await (const assets of pagination) {
|
|
||||||
assetCount += assets.length;
|
|
||||||
for (const { id, files, originalPath, encodedVideoPath, isExternal, checksum } of assets) {
|
|
||||||
const { fullsizeFile, previewFile, thumbnailFile } = getAssetFiles(files);
|
|
||||||
for (const file of [
|
|
||||||
originalPath,
|
|
||||||
fullsizeFile?.path,
|
|
||||||
previewFile?.path,
|
|
||||||
encodedVideoPath,
|
|
||||||
thumbnailFile?.path,
|
|
||||||
]) {
|
|
||||||
track(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
const entity = { entityId: id, entityType: PathEntityType.ASSET, checksum: checksum.toString('base64') };
|
|
||||||
if (
|
|
||||||
originalPath &&
|
|
||||||
!hasFile(libraryFiles, originalPath) &&
|
|
||||||
!hasFile(uploadFiles, originalPath) &&
|
|
||||||
// Android motion assets
|
|
||||||
!hasFile(videoFiles, originalPath) &&
|
|
||||||
// ignore external library assets
|
|
||||||
!isExternal
|
|
||||||
) {
|
|
||||||
orphans.push({ ...entity, pathType: AssetPathType.ORIGINAL, pathValue: originalPath });
|
|
||||||
}
|
|
||||||
if (previewFile && !hasFile(thumbFiles, previewFile.path)) {
|
|
||||||
orphans.push({ ...entity, pathType: AssetPathType.PREVIEW, pathValue: previewFile.path });
|
|
||||||
}
|
|
||||||
if (thumbnailFile && !hasFile(thumbFiles, thumbnailFile.path)) {
|
|
||||||
orphans.push({ ...entity, pathType: AssetPathType.THUMBNAIL, pathValue: thumbnailFile.path });
|
|
||||||
}
|
|
||||||
if (encodedVideoPath && !hasFile(videoFiles, encodedVideoPath)) {
|
|
||||||
orphans.push({ ...entity, pathType: AssetPathType.THUMBNAIL, pathValue: encodedVideoPath });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const users = await this.userRepository.getList();
|
|
||||||
for (const { id, profileImagePath } of users) {
|
|
||||||
track(profileImagePath);
|
|
||||||
|
|
||||||
const entity = { entityId: id, entityType: PathEntityType.USER };
|
|
||||||
if (profileImagePath && !hasFile(profileFiles, profileImagePath)) {
|
|
||||||
orphans.push({ ...entity, pathType: UserPathType.PROFILE, pathValue: profileImagePath });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let peopleCount = 0;
|
|
||||||
for await (const { id, thumbnailPath } of this.personRepository.getAll()) {
|
|
||||||
track(thumbnailPath);
|
|
||||||
const entity = { entityId: id, entityType: PathEntityType.PERSON };
|
|
||||||
if (thumbnailPath && !hasFile(thumbFiles, thumbnailPath)) {
|
|
||||||
orphans.push({ ...entity, pathType: PersonPathType.FACE, pathValue: thumbnailPath });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (peopleCount === JOBS_ASSET_PAGINATION_SIZE) {
|
|
||||||
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${peopleCount} people`);
|
|
||||||
peopleCount = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.log(`Found ${assetCount} assets, ${users.length} users, ${peopleCount} people`);
|
|
||||||
|
|
||||||
const extras: string[] = [];
|
|
||||||
for (const file of allFiles) {
|
|
||||||
extras.push(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
// send as absolute paths
|
|
||||||
for (const orphan of orphans) {
|
|
||||||
orphan.pathValue = this.fullPath(orphan.pathValue);
|
|
||||||
}
|
|
||||||
|
|
||||||
return { orphans, extras };
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import { AssetFileType, AssetType, JobName, JobStatus } from 'src/enum';
|
import { AssetFileType, AssetType, JobName, JobStatus } from 'src/enum';
|
||||||
import { WithoutProperty } from 'src/repositories/asset.repository';
|
|
||||||
import { DuplicateService } from 'src/services/duplicate.service';
|
import { DuplicateService } from 'src/services/duplicate.service';
|
||||||
import { SearchService } from 'src/services/search.service';
|
import { SearchService } from 'src/services/search.service';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { authStub } from 'test/fixtures/auth.stub';
|
import { authStub } from 'test/fixtures/auth.stub';
|
||||||
import { newTestService, ServiceMocks } from 'test/utils';
|
import { makeStream, newTestService, ServiceMocks } from 'test/utils';
|
||||||
import { beforeEach, vitest } from 'vitest';
|
import { beforeEach, vitest } from 'vitest';
|
||||||
|
|
||||||
vitest.useFakeTimers();
|
vitest.useFakeTimers();
|
||||||
@@ -113,14 +112,11 @@ describe(SearchService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should queue missing assets', async () => {
|
it('should queue missing assets', async () => {
|
||||||
mocks.asset.getWithout.mockResolvedValue({
|
mocks.assetJob.streamForSearchDuplicates.mockReturnValue(makeStream([assetStub.image]));
|
||||||
items: [assetStub.image],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await sut.handleQueueSearchDuplicates({});
|
await sut.handleQueueSearchDuplicates({});
|
||||||
|
|
||||||
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.DUPLICATE);
|
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(undefined);
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.DUPLICATE_DETECTION,
|
name: JobName.DUPLICATE_DETECTION,
|
||||||
@@ -130,14 +126,11 @@ describe(SearchService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should queue all assets', async () => {
|
it('should queue all assets', async () => {
|
||||||
mocks.asset.getAll.mockResolvedValue({
|
mocks.assetJob.streamForSearchDuplicates.mockReturnValue(makeStream([assetStub.image]));
|
||||||
items: [assetStub.image],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await sut.handleQueueSearchDuplicates({ force: true });
|
await sut.handleQueueSearchDuplicates({ force: true });
|
||||||
|
|
||||||
expect(mocks.asset.getAll).toHaveBeenCalled();
|
expect(mocks.assetJob.streamForSearchDuplicates).toHaveBeenCalledWith(true);
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.DUPLICATE_DETECTION,
|
name: JobName.DUPLICATE_DETECTION,
|
||||||
|
|||||||
@@ -4,14 +4,11 @@ import { OnJob } from 'src/decorators';
|
|||||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
import { mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
|
import { DuplicateResponseDto } from 'src/dtos/duplicate.dto';
|
||||||
import { AssetFileType, JobName, JobStatus, QueueName } from 'src/enum';
|
import { JobName, JobStatus, QueueName } from 'src/enum';
|
||||||
import { WithoutProperty } from 'src/repositories/asset.repository';
|
|
||||||
import { AssetDuplicateResult } from 'src/repositories/search.repository';
|
import { AssetDuplicateResult } from 'src/repositories/search.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { JobOf } from 'src/types';
|
import { JobItem, JobOf } from 'src/types';
|
||||||
import { getAssetFile } from 'src/utils/asset.util';
|
|
||||||
import { isDuplicateDetectionEnabled } from 'src/utils/misc';
|
import { isDuplicateDetectionEnabled } from 'src/utils/misc';
|
||||||
import { usePagination } from 'src/utils/pagination';
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class DuplicateService extends BaseService {
|
export class DuplicateService extends BaseService {
|
||||||
@@ -30,18 +27,22 @@ export class DuplicateService extends BaseService {
|
|||||||
return JobStatus.SKIPPED;
|
return JobStatus.SKIPPED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
let jobs: JobItem[] = [];
|
||||||
return force
|
const queueAll = async () => {
|
||||||
? this.assetRepository.getAll(pagination, { isVisible: true })
|
await this.jobRepository.queueAll(jobs);
|
||||||
: this.assetRepository.getWithout(pagination, WithoutProperty.DUPLICATE);
|
jobs = [];
|
||||||
});
|
};
|
||||||
|
|
||||||
for await (const assets of assetPagination) {
|
const assets = this.assetJobRepository.streamForSearchDuplicates(force);
|
||||||
await this.jobRepository.queueAll(
|
for await (const asset of assets) {
|
||||||
assets.map((asset) => ({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } })),
|
jobs.push({ name: JobName.DUPLICATE_DETECTION, data: { id: asset.id } });
|
||||||
);
|
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||||
|
await queueAll();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await queueAll();
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -58,49 +59,6 @@ export class DuplicateService extends BaseService {
|
|||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (asset.stackId) {
|
|
||||||
this.logger.debug(`Asset ${id} is part of a stack, skipping`);
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!asset.isVisible) {
|
|
||||||
this.logger.debug(`Asset ${id} is not visible, skipping`);
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const previewFile = getAssetFile(asset.files || [], AssetFileType.PREVIEW);
|
|
||||||
if (!previewFile) {
|
|
||||||
this.logger.warn(`Asset ${id} is missing preview image`);
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!asset.embedding) {
|
|
||||||
this.logger.debug(`Asset ${id} is missing embedding`);
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const duplicateAssets = await this.searchRepository.searchDuplicates({
|
|
||||||
assetId: asset.id,
|
|
||||||
embedding: asset.embedding,
|
|
||||||
maxDistance: machineLearning.duplicateDetection.maxDistance,
|
|
||||||
type: asset.type,
|
|
||||||
userIds: [asset.ownerId],
|
|
||||||
});
|
|
||||||
|
|
||||||
let assetIds = [asset.id];
|
|
||||||
if (duplicateAssets.length > 0) {
|
|
||||||
this.logger.debug(
|
|
||||||
`Found ${duplicateAssets.length} duplicate${duplicateAssets.length === 1 ? '' : 's'} for asset ${asset.id}`,
|
|
||||||
);
|
|
||||||
assetIds = await this.updateDuplicates(asset, duplicateAssets);
|
|
||||||
} else if (asset.duplicateId) {
|
|
||||||
this.logger.debug(`No duplicates found for asset ${asset.id}, removing duplicateId`);
|
|
||||||
await this.assetRepository.update({ id: asset.id, duplicateId: null });
|
|
||||||
}
|
|
||||||
|
|
||||||
const duplicatesDetectedAt = new Date();
|
|
||||||
await this.assetRepository.upsertJobStatus(...assetIds.map((assetId) => ({ assetId, duplicatesDetectedAt })));
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { BadRequestException } from '@nestjs/common';
|
|||||||
import { defaults, SystemConfig } from 'src/config';
|
import { defaults, SystemConfig } from 'src/config';
|
||||||
import { ImmichWorker, JobCommand, JobName, JobStatus, QueueName } from 'src/enum';
|
import { ImmichWorker, JobCommand, JobName, JobStatus, QueueName } from 'src/enum';
|
||||||
import { JobService } from 'src/services/job.service';
|
import { JobService } from 'src/services/job.service';
|
||||||
import { JobItem } from 'src/types';
|
import { JobCounts, JobItem } from 'src/types';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
import { newTestService, ServiceMocks } from 'test/utils';
|
import { newTestService, ServiceMocks } from 'test/utils';
|
||||||
|
|
||||||
@@ -21,14 +21,14 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('onConfigUpdate', () => {
|
describe('onConfigUpdate', () => {
|
||||||
it('should update concurrency', () => {
|
it('should update concurrency', async () => {
|
||||||
sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
|
await sut.onConfigUpdate({ newConfig: defaults, oldConfig: {} as SystemConfig });
|
||||||
|
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenCalledTimes(15);
|
expect(mocks.job.start).toHaveBeenCalledTimes(15);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(5, QueueName.FACIAL_RECOGNITION, 1);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(7, QueueName.DUPLICATE_DETECTION, 1);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(8, QueueName.BACKGROUND_TASK, 5);
|
||||||
expect(mocks.job.setConcurrency).toHaveBeenNthCalledWith(9, QueueName.STORAGE_TEMPLATE_MIGRATION, 1);
|
expect(mocks.job.start).toHaveBeenNthCalledWith(9, QueueName.STORAGE_TEMPLATE_MIGRATION, 1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -55,29 +55,20 @@ describe(JobService.name, () => {
|
|||||||
it('should get all job statuses', async () => {
|
it('should get all job statuses', async () => {
|
||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
completed: 1,
|
|
||||||
failed: 1,
|
|
||||||
delayed: 1,
|
|
||||||
waiting: 1,
|
waiting: 1,
|
||||||
paused: 1,
|
delayed: 1,
|
||||||
});
|
failed: 1,
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({
|
|
||||||
isActive: true,
|
|
||||||
isPaused: true,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const expectedJobStatus = {
|
const expectedJobStatus = {
|
||||||
jobCounts: {
|
jobCounts: {
|
||||||
active: 1,
|
active: 1,
|
||||||
completed: 1,
|
waiting: 1,
|
||||||
delayed: 1,
|
delayed: 1,
|
||||||
failed: 1,
|
failed: 1,
|
||||||
waiting: 1,
|
|
||||||
paused: 1,
|
|
||||||
},
|
},
|
||||||
queueStatus: {
|
queueStatus: {
|
||||||
isActive: true,
|
paused: true,
|
||||||
isPaused: true,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -114,14 +105,20 @@ describe(JobService.name, () => {
|
|||||||
expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
expect(mocks.job.resume).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle an empty command', async () => {
|
it('should handle a clear command', async () => {
|
||||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.EMPTY, force: false });
|
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.CLEAR, force: false });
|
||||||
|
|
||||||
expect(mocks.job.empty).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
expect(mocks.job.clear).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle a clear-failed command', async () => {
|
||||||
|
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.CLEAR_FAILED, force: false });
|
||||||
|
|
||||||
|
expect(mocks.job.clearFailed).toHaveBeenCalledWith(QueueName.METADATA_EXTRACTION);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not start a job that is already running', async () => {
|
it('should not start a job that is already running', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: true, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 1 } as JobCounts);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }),
|
sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false }),
|
||||||
@@ -132,7 +129,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start video conversion command', async () => {
|
it('should handle a start video conversion command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.VIDEO_CONVERSION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -140,7 +137,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start storage template migration command', async () => {
|
it('should handle a start storage template migration command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.STORAGE_TEMPLATE_MIGRATION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.STORAGE_TEMPLATE_MIGRATION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -148,7 +145,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start smart search command', async () => {
|
it('should handle a start smart search command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.SMART_SEARCH, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -156,7 +153,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start metadata extraction command', async () => {
|
it('should handle a start metadata extraction command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.METADATA_EXTRACTION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -164,7 +161,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start sidecar command', async () => {
|
it('should handle a start sidecar command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.SIDECAR, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.SIDECAR, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -172,7 +169,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start thumbnail generation command', async () => {
|
it('should handle a start thumbnail generation command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.THUMBNAIL_GENERATION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.THUMBNAIL_GENERATION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -180,7 +177,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start face detection command', async () => {
|
it('should handle a start face detection command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.FACE_DETECTION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -188,7 +185,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start facial recognition command', async () => {
|
it('should handle a start facial recognition command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.FACIAL_RECOGNITION, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -196,7 +193,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should handle a start backup database command', async () => {
|
it('should handle a start backup database command', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await sut.handleCommand(QueueName.BACKUP_DATABASE, { command: JobCommand.START, force: false });
|
await sut.handleCommand(QueueName.BACKUP_DATABASE, { command: JobCommand.START, force: false });
|
||||||
|
|
||||||
@@ -204,7 +201,7 @@ describe(JobService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should throw a bad request when an invalid queue is used', async () => {
|
it('should throw a bad request when an invalid queue is used', async () => {
|
||||||
mocks.job.getQueueStatus.mockResolvedValue({ isActive: false, isPaused: false });
|
mocks.job.getJobCounts.mockResolvedValue({ active: 0 } as JobCounts);
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
sut.handleCommand(QueueName.BACKGROUND_TASK, { command: JobCommand.START, force: false }),
|
sut.handleCommand(QueueName.BACKGROUND_TASK, { command: JobCommand.START, force: false }),
|
||||||
|
|||||||
@@ -5,14 +5,12 @@ import { OnEvent } from 'src/decorators';
|
|||||||
import { mapAsset } from 'src/dtos/asset-response.dto';
|
import { mapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AllJobStatusResponseDto, JobCommandDto, JobCreateDto, JobStatusDto } from 'src/dtos/job.dto';
|
import { AllJobStatusResponseDto, JobCommandDto, JobCreateDto, JobStatusDto } from 'src/dtos/job.dto';
|
||||||
import {
|
import {
|
||||||
AssetType,
|
|
||||||
BootstrapEventPriority,
|
BootstrapEventPriority,
|
||||||
ImmichWorker,
|
ImmichWorker,
|
||||||
JobCommand,
|
JobCommand,
|
||||||
JobName,
|
JobName,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
ManualJobName,
|
ManualJobName,
|
||||||
QueueCleanType,
|
|
||||||
QueueName,
|
QueueName,
|
||||||
} from 'src/enum';
|
} from 'src/enum';
|
||||||
import { ArgOf, ArgsOf } from 'src/repositories/event.repository';
|
import { ArgOf, ArgsOf } from 'src/repositories/event.repository';
|
||||||
@@ -56,7 +54,7 @@ export class JobService extends BaseService {
|
|||||||
private services: ClassConstructor<unknown>[] = [];
|
private services: ClassConstructor<unknown>[] = [];
|
||||||
|
|
||||||
@OnEvent({ name: 'config.init', workers: [ImmichWorker.MICROSERVICES] })
|
@OnEvent({ name: 'config.init', workers: [ImmichWorker.MICROSERVICES] })
|
||||||
onConfigInit({ newConfig: config }: ArgOf<'config.init'>) {
|
async onConfigInit({ newConfig: config }: ArgOf<'config.init'>) {
|
||||||
this.logger.debug(`Updating queue concurrency settings`);
|
this.logger.debug(`Updating queue concurrency settings`);
|
||||||
for (const queueName of Object.values(QueueName)) {
|
for (const queueName of Object.values(QueueName)) {
|
||||||
let concurrency = 1;
|
let concurrency = 1;
|
||||||
@@ -64,21 +62,18 @@ export class JobService extends BaseService {
|
|||||||
concurrency = config.job[queueName].concurrency;
|
concurrency = config.job[queueName].concurrency;
|
||||||
}
|
}
|
||||||
this.logger.debug(`Setting ${queueName} concurrency to ${concurrency}`);
|
this.logger.debug(`Setting ${queueName} concurrency to ${concurrency}`);
|
||||||
this.jobRepository.setConcurrency(queueName, concurrency);
|
await this.jobRepository.start(queueName, concurrency);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'config.update', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
@OnEvent({ name: 'config.update', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
||||||
onConfigUpdate({ newConfig: config }: ArgOf<'config.update'>) {
|
async onConfigUpdate({ newConfig: config }: ArgOf<'config.update'>) {
|
||||||
this.onConfigInit({ newConfig: config });
|
await this.onConfigInit({ newConfig: config });
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'app.bootstrap', priority: BootstrapEventPriority.JobService })
|
@OnEvent({ name: 'app.bootstrap', priority: BootstrapEventPriority.JobService })
|
||||||
onBootstrap() {
|
async onBootstrap() {
|
||||||
this.jobRepository.setup(this.services);
|
await this.jobRepository.setup(this.services);
|
||||||
if (this.worker === ImmichWorker.MICROSERVICES) {
|
|
||||||
this.jobRepository.startWorkers();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setServices(services: ClassConstructor<unknown>[]) {
|
setServices(services: ClassConstructor<unknown>[]) {
|
||||||
@@ -97,25 +92,20 @@ export class JobService extends BaseService {
|
|||||||
await this.start(queueName, dto);
|
await this.start(queueName, dto);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case JobCommand.PAUSE: {
|
case JobCommand.PAUSE: {
|
||||||
await this.jobRepository.pause(queueName);
|
this.eventRepository.serverSend('queue.pause', queueName);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case JobCommand.RESUME: {
|
case JobCommand.RESUME: {
|
||||||
await this.jobRepository.resume(queueName);
|
this.eventRepository.serverSend('queue.resume', queueName);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
case JobCommand.CLEAR: {
|
||||||
case JobCommand.EMPTY: {
|
await this.jobRepository.clear(queueName);
|
||||||
await this.jobRepository.empty(queueName);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case JobCommand.CLEAR_FAILED: {
|
case JobCommand.CLEAR_FAILED: {
|
||||||
const failedJobs = await this.jobRepository.clear(queueName, QueueCleanType.FAILED);
|
await this.jobRepository.clearFailed(queueName);
|
||||||
this.logger.debug(`Cleared failed jobs: ${failedJobs}`);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -140,10 +130,10 @@ export class JobService extends BaseService {
|
|||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async start(name: QueueName, { force }: JobCommandDto): Promise<void> {
|
private async start(name: QueueName, { force }: JobCommandDto): Promise<unknown> {
|
||||||
const { isActive } = await this.jobRepository.getQueueStatus(name);
|
const { active } = await this.jobRepository.getJobCounts(name);
|
||||||
if (isActive) {
|
if (active > 0) {
|
||||||
throw new BadRequestException(`Job is already running`);
|
throw new BadRequestException(`Jobs are already running`);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
|
this.telemetryRepository.jobs.addToCounter(`immich.queues.${snakeCase(name)}.started`, 1);
|
||||||
@@ -203,6 +193,16 @@ export class JobService extends BaseService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@OnEvent({ name: 'queue.pause', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
||||||
|
async pause(...[queueName]: ArgsOf<'queue.pause'>): Promise<void> {
|
||||||
|
await this.jobRepository.pause(queueName);
|
||||||
|
}
|
||||||
|
|
||||||
|
@OnEvent({ name: 'queue.resume', server: true, workers: [ImmichWorker.MICROSERVICES] })
|
||||||
|
async resume(...[queueName]: ArgsOf<'queue.resume'>): Promise<void> {
|
||||||
|
await this.jobRepository.resume(queueName);
|
||||||
|
}
|
||||||
|
|
||||||
@OnEvent({ name: 'job.start' })
|
@OnEvent({ name: 'job.start' })
|
||||||
async onJobStart(...[queueName, job]: ArgsOf<'job.start'>) {
|
async onJobStart(...[queueName, job]: ArgsOf<'job.start'>) {
|
||||||
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
|
const queueMetric = `immich.queues.${snakeCase(queueName)}.active`;
|
||||||
@@ -305,12 +305,9 @@ export class JobService extends BaseService {
|
|||||||
const jobs: JobItem[] = [
|
const jobs: JobItem[] = [
|
||||||
{ name: JobName.SMART_SEARCH, data: item.data },
|
{ name: JobName.SMART_SEARCH, data: item.data },
|
||||||
{ name: JobName.FACE_DETECTION, data: item.data },
|
{ name: JobName.FACE_DETECTION, data: item.data },
|
||||||
|
{ name: JobName.VIDEO_CONVERSION, data: item.data },
|
||||||
];
|
];
|
||||||
|
|
||||||
if (asset.type === AssetType.VIDEO) {
|
|
||||||
jobs.push({ name: JobName.VIDEO_CONVERSION, data: item.data });
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.jobRepository.queueAll(jobs);
|
await this.jobRepository.queueAll(jobs);
|
||||||
if (asset.isVisible) {
|
if (asset.isVisible) {
|
||||||
this.eventRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));
|
this.eventRepository.clientSend('on_upload_success', asset.ownerId, mapAsset(asset));
|
||||||
|
|||||||
@@ -273,7 +273,6 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
mocks.library.get.mockResolvedValue(library);
|
mocks.library.get.mockResolvedValue(library);
|
||||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||||
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
|
|
||||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
|
mocks.asset.getLibraryAssetCount.mockResolvedValue(1);
|
||||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
||||||
|
|
||||||
@@ -292,7 +291,6 @@ describe(LibraryService.name, () => {
|
|||||||
|
|
||||||
mocks.library.get.mockResolvedValue(library);
|
mocks.library.get.mockResolvedValue(library);
|
||||||
mocks.storage.walk.mockImplementation(async function* generator() {});
|
mocks.storage.walk.mockImplementation(async function* generator() {});
|
||||||
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.external], hasNextPage: false });
|
|
||||||
mocks.asset.getLibraryAssetCount.mockResolvedValue(0);
|
mocks.asset.getLibraryAssetCount.mockResolvedValue(0);
|
||||||
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
mocks.asset.detectOfflineExternalAssets.mockResolvedValue({ numUpdatedRows: BigInt(1) });
|
||||||
|
|
||||||
|
|||||||
@@ -38,10 +38,6 @@ describe(MediaService.name, () => {
|
|||||||
describe('handleQueueGenerateThumbnails', () => {
|
describe('handleQueueGenerateThumbnails', () => {
|
||||||
it('should queue all assets', async () => {
|
it('should queue all assets', async () => {
|
||||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.image]));
|
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.image]));
|
||||||
mocks.asset.getAll.mockResolvedValue({
|
|
||||||
items: [assetStub.image],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
mocks.person.getAll.mockReturnValue(makeStream([personStub.newThumbnail]));
|
mocks.person.getAll.mockReturnValue(makeStream([personStub.newThumbnail]));
|
||||||
mocks.person.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
mocks.person.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||||
@@ -67,10 +63,6 @@ describe(MediaService.name, () => {
|
|||||||
|
|
||||||
it('should queue trashed assets when force is true', async () => {
|
it('should queue trashed assets when force is true', async () => {
|
||||||
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.archived]));
|
mocks.assetJob.streamForThumbnailJob.mockReturnValue(makeStream([assetStub.archived]));
|
||||||
mocks.asset.getAll.mockResolvedValue({
|
|
||||||
items: [assetStub.trashed],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
mocks.person.getAll.mockReturnValue(makeStream());
|
mocks.person.getAll.mockReturnValue(makeStream());
|
||||||
|
|
||||||
await sut.handleQueueGenerateThumbnails({ force: true });
|
await sut.handleQueueGenerateThumbnails({ force: true });
|
||||||
@@ -171,7 +163,7 @@ describe(MediaService.name, () => {
|
|||||||
|
|
||||||
describe('handleQueueMigration', () => {
|
describe('handleQueueMigration', () => {
|
||||||
it('should remove empty directories and queue jobs', async () => {
|
it('should remove empty directories and queue jobs', async () => {
|
||||||
mocks.asset.getAll.mockResolvedValue({ hasNextPage: false, items: [assetStub.image] });
|
mocks.assetJob.streamForMigrationJob.mockReturnValue(makeStream([assetStub.image]));
|
||||||
mocks.job.getJobCounts.mockResolvedValue({ active: 1, waiting: 0 } as JobCounts);
|
mocks.job.getJobCounts.mockResolvedValue({ active: 1, waiting: 0 } as JobCounts);
|
||||||
mocks.person.getAll.mockReturnValue(makeStream([personStub.withName]));
|
mocks.person.getAll.mockReturnValue(makeStream([personStub.withName]));
|
||||||
|
|
||||||
|
|||||||
@@ -5,24 +5,19 @@ import { Exif } from 'src/database';
|
|||||||
import { OnEvent, OnJob } from 'src/decorators';
|
import { OnEvent, OnJob } from 'src/decorators';
|
||||||
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
|
import { SystemConfigFFmpegDto } from 'src/dtos/system-config.dto';
|
||||||
import {
|
import {
|
||||||
AssetFileType,
|
|
||||||
AssetPathType,
|
AssetPathType,
|
||||||
AssetType,
|
|
||||||
AudioCodec,
|
AudioCodec,
|
||||||
Colorspace,
|
Colorspace,
|
||||||
JobName,
|
JobName,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
LogLevel,
|
|
||||||
QueueName,
|
QueueName,
|
||||||
RawExtractedFormat,
|
RawExtractedFormat,
|
||||||
StorageFolder,
|
StorageFolder,
|
||||||
TranscodeHWAccel,
|
|
||||||
TranscodePolicy,
|
TranscodePolicy,
|
||||||
TranscodeTarget,
|
TranscodeTarget,
|
||||||
VideoCodec,
|
VideoCodec,
|
||||||
VideoContainer,
|
VideoContainer
|
||||||
} from 'src/enum';
|
} from 'src/enum';
|
||||||
import { UpsertFileOptions } from 'src/repositories/asset.repository';
|
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import {
|
import {
|
||||||
AudioStreamInfo,
|
AudioStreamInfo,
|
||||||
@@ -34,9 +29,8 @@ import {
|
|||||||
VideoStreamInfo,
|
VideoStreamInfo,
|
||||||
} from 'src/types';
|
} from 'src/types';
|
||||||
import { getAssetFiles } from 'src/utils/asset.util';
|
import { getAssetFiles } from 'src/utils/asset.util';
|
||||||
import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
|
import { ThumbnailConfig } from 'src/utils/media';
|
||||||
import { mimeTypes } from 'src/utils/mime-types';
|
import { mimeTypes } from 'src/utils/mime-types';
|
||||||
import { usePagination } from 'src/utils/pagination';
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class MediaService extends BaseService {
|
export class MediaService extends BaseService {
|
||||||
@@ -50,16 +44,23 @@ export class MediaService extends BaseService {
|
|||||||
|
|
||||||
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
|
@OnJob({ name: JobName.QUEUE_GENERATE_THUMBNAILS, queue: QueueName.THUMBNAIL_GENERATION })
|
||||||
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
|
async handleQueueGenerateThumbnails({ force }: JobOf<JobName.QUEUE_GENERATE_THUMBNAILS>): Promise<JobStatus> {
|
||||||
const thumbJobs: JobItem[] = [];
|
for (let i = 0; i < 10; i++) {
|
||||||
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
|
let thumbJobs: JobItem[] = [];
|
||||||
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
|
for await (const asset of this.assetJobRepository.streamForThumbnailJob(!!force)) {
|
||||||
|
const { previewFile, thumbnailFile } = getAssetFiles(asset.files);
|
||||||
|
|
||||||
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
|
if (!previewFile || !thumbnailFile || !asset.thumbhash || force) {
|
||||||
thumbJobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
|
thumbJobs.push({ name: JobName.GENERATE_THUMBNAILS, data: { id: asset.id } });
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (thumbJobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||||
|
await this.jobRepository.queueAll(thumbJobs);
|
||||||
|
thumbJobs = [];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
await this.jobRepository.queueAll(thumbJobs);
|
||||||
}
|
}
|
||||||
await this.jobRepository.queueAll(thumbJobs);
|
|
||||||
|
|
||||||
const jobs: JobItem[] = [];
|
const jobs: JobItem[] = [];
|
||||||
|
|
||||||
@@ -85,23 +86,24 @@ export class MediaService extends BaseService {
|
|||||||
|
|
||||||
@OnJob({ name: JobName.QUEUE_MIGRATION, queue: QueueName.MIGRATION })
|
@OnJob({ name: JobName.QUEUE_MIGRATION, queue: QueueName.MIGRATION })
|
||||||
async handleQueueMigration(): Promise<JobStatus> {
|
async handleQueueMigration(): Promise<JobStatus> {
|
||||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
|
||||||
this.assetRepository.getAll(pagination),
|
|
||||||
);
|
|
||||||
|
|
||||||
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION);
|
const { active, waiting } = await this.jobRepository.getJobCounts(QueueName.MIGRATION);
|
||||||
if (active === 1 && waiting === 0) {
|
if (active === 1 && waiting === 0) {
|
||||||
await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS);
|
await this.storageCore.removeEmptyDirs(StorageFolder.THUMBNAILS);
|
||||||
await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO);
|
await this.storageCore.removeEmptyDirs(StorageFolder.ENCODED_VIDEO);
|
||||||
}
|
}
|
||||||
|
|
||||||
for await (const assets of assetPagination) {
|
let jobs: JobItem[] = [];
|
||||||
await this.jobRepository.queueAll(
|
const assets = this.assetJobRepository.streamForMigrationJob();
|
||||||
assets.map((asset) => ({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } })),
|
for await (const asset of assets) {
|
||||||
);
|
jobs.push({ name: JobName.MIGRATE_ASSET, data: { id: asset.id } });
|
||||||
|
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||||
|
await this.jobRepository.queueAll(jobs);
|
||||||
|
jobs = [];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let jobs: { name: JobName.MIGRATE_PERSON; data: { id: string } }[] = [];
|
await this.jobRepository.queueAll(jobs);
|
||||||
|
jobs = [];
|
||||||
|
|
||||||
for await (const person of this.personRepository.getAll()) {
|
for await (const person of this.personRepository.getAll()) {
|
||||||
jobs.push({ name: JobName.MIGRATE_PERSON, data: { id: person.id } });
|
jobs.push({ name: JobName.MIGRATE_PERSON, data: { id: person.id } });
|
||||||
@@ -140,75 +142,6 @@ export class MediaService extends BaseService {
|
|||||||
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`);
|
this.logger.warn(`Thumbnail generation failed for asset ${id}: not found`);
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!asset.isVisible) {
|
|
||||||
this.logger.verbose(`Thumbnail generation skipped for asset ${id}: not visible`);
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
let generated: {
|
|
||||||
previewPath: string;
|
|
||||||
thumbnailPath: string;
|
|
||||||
fullsizePath?: string;
|
|
||||||
thumbhash: Buffer;
|
|
||||||
};
|
|
||||||
if (asset.type === AssetType.VIDEO || asset.originalFileName.toLowerCase().endsWith('.gif')) {
|
|
||||||
generated = await this.generateVideoThumbnails(asset);
|
|
||||||
} else if (asset.type === AssetType.IMAGE) {
|
|
||||||
generated = await this.generateImageThumbnails(asset);
|
|
||||||
} else {
|
|
||||||
this.logger.warn(`Skipping thumbnail generation for asset ${id}: ${asset.type} is not an image or video`);
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { previewFile, thumbnailFile, fullsizeFile } = getAssetFiles(asset.files);
|
|
||||||
const toUpsert: UpsertFileOptions[] = [];
|
|
||||||
if (previewFile?.path !== generated.previewPath) {
|
|
||||||
toUpsert.push({ assetId: asset.id, path: generated.previewPath, type: AssetFileType.PREVIEW });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (thumbnailFile?.path !== generated.thumbnailPath) {
|
|
||||||
toUpsert.push({ assetId: asset.id, path: generated.thumbnailPath, type: AssetFileType.THUMBNAIL });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (generated.fullsizePath && fullsizeFile?.path !== generated.fullsizePath) {
|
|
||||||
toUpsert.push({ assetId: asset.id, path: generated.fullsizePath, type: AssetFileType.FULLSIZE });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (toUpsert.length > 0) {
|
|
||||||
await this.assetRepository.upsertFiles(toUpsert);
|
|
||||||
}
|
|
||||||
|
|
||||||
const pathsToDelete: string[] = [];
|
|
||||||
if (previewFile && previewFile.path !== generated.previewPath) {
|
|
||||||
this.logger.debug(`Deleting old preview for asset ${asset.id}`);
|
|
||||||
pathsToDelete.push(previewFile.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (thumbnailFile && thumbnailFile.path !== generated.thumbnailPath) {
|
|
||||||
this.logger.debug(`Deleting old thumbnail for asset ${asset.id}`);
|
|
||||||
pathsToDelete.push(thumbnailFile.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fullsizeFile && fullsizeFile.path !== generated.fullsizePath) {
|
|
||||||
this.logger.debug(`Deleting old fullsize preview image for asset ${asset.id}`);
|
|
||||||
pathsToDelete.push(fullsizeFile.path);
|
|
||||||
if (!generated.fullsizePath) {
|
|
||||||
// did not generate a new fullsize image, delete the existing record
|
|
||||||
await this.assetRepository.deleteFiles([fullsizeFile]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathsToDelete.length > 0) {
|
|
||||||
await Promise.all(pathsToDelete.map((path) => this.storageRepository.unlink(path)));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!asset.thumbhash || Buffer.compare(asset.thumbhash, generated.thumbhash) !== 0) {
|
|
||||||
await this.assetRepository.update({ id: asset.id, thumbhash: generated.thumbhash });
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.assetRepository.upsertJobStatus({ assetId: asset.id, previewAt: new Date(), thumbnailAt: new Date() });
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -353,81 +286,6 @@ export class MediaService extends BaseService {
|
|||||||
if (!asset) {
|
if (!asset) {
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const input = asset.originalPath;
|
|
||||||
const output = StorageCore.getEncodedVideoPath(asset);
|
|
||||||
this.storageCore.ensureFolders(output);
|
|
||||||
|
|
||||||
const { videoStreams, audioStreams, format } = await this.mediaRepository.probe(input, {
|
|
||||||
countFrames: this.logger.isLevelEnabled(LogLevel.DEBUG), // makes frame count more reliable for progress logs
|
|
||||||
});
|
|
||||||
const videoStream = this.getMainStream(videoStreams);
|
|
||||||
const audioStream = this.getMainStream(audioStreams);
|
|
||||||
if (!videoStream || !format.formatName) {
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!videoStream.height || !videoStream.width) {
|
|
||||||
this.logger.warn(`Skipped transcoding for asset ${asset.id}: no video streams found`);
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
let { ffmpeg } = await this.getConfig({ withCache: true });
|
|
||||||
const target = this.getTranscodeTarget(ffmpeg, videoStream, audioStream);
|
|
||||||
if (target === TranscodeTarget.NONE && !this.isRemuxRequired(ffmpeg, format)) {
|
|
||||||
if (asset.encodedVideoPath) {
|
|
||||||
this.logger.log(`Transcoded video exists for asset ${asset.id}, but is no longer required. Deleting...`);
|
|
||||||
await this.jobRepository.queue({ name: JobName.DELETE_FILES, data: { files: [asset.encodedVideoPath] } });
|
|
||||||
await this.assetRepository.update({ id: asset.id, encodedVideoPath: null });
|
|
||||||
} else {
|
|
||||||
this.logger.verbose(`Asset ${asset.id} does not require transcoding based on current policy, skipping`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
|
||||||
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
|
|
||||||
this.logger.log(`Transcoding video ${asset.id} without hardware acceleration`);
|
|
||||||
} else {
|
|
||||||
this.logger.log(
|
|
||||||
`Transcoding video ${asset.id} with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and${ffmpeg.accelDecode ? '' : ' software'} decoding`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await this.mediaRepository.transcode(input, output, command);
|
|
||||||
} catch (error: any) {
|
|
||||||
this.logger.error(`Error occurred during transcoding: ${error.message}`);
|
|
||||||
if (ffmpeg.accel === TranscodeHWAccel.DISABLED) {
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
let partialFallbackSuccess = false;
|
|
||||||
if (ffmpeg.accelDecode) {
|
|
||||||
try {
|
|
||||||
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()}-accelerated encoding and software decoding`);
|
|
||||||
ffmpeg = { ...ffmpeg, accelDecode: false };
|
|
||||||
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
|
||||||
await this.mediaRepository.transcode(input, output, command);
|
|
||||||
partialFallbackSuccess = true;
|
|
||||||
} catch (error: any) {
|
|
||||||
this.logger.error(`Error occurred during transcoding: ${error.message}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!partialFallbackSuccess) {
|
|
||||||
this.logger.error(`Retrying with ${ffmpeg.accel.toUpperCase()} acceleration disabled`);
|
|
||||||
ffmpeg = { ...ffmpeg, accel: TranscodeHWAccel.DISABLED };
|
|
||||||
const command = BaseConfig.create(ffmpeg, this.videoInterfaces).getCommand(target, videoStream, audioStream);
|
|
||||||
await this.mediaRepository.transcode(input, output, command);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.logger.log(`Successfully encoded ${asset.id}`);
|
|
||||||
|
|
||||||
await this.assetRepository.update({ id: asset.id, encodedVideoPath: output });
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import { constants } from 'node:fs/promises';
|
|||||||
import { defaults } from 'src/config';
|
import { defaults } from 'src/config';
|
||||||
import { MapAsset } from 'src/dtos/asset-response.dto';
|
import { MapAsset } from 'src/dtos/asset-response.dto';
|
||||||
import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
|
import { AssetType, ExifOrientation, ImmichWorker, JobName, JobStatus, SourceType } from 'src/enum';
|
||||||
import { WithoutProperty } from 'src/repositories/asset.repository';
|
|
||||||
import { ImmichTags } from 'src/repositories/metadata.repository';
|
import { ImmichTags } from 'src/repositories/metadata.repository';
|
||||||
import { MetadataService } from 'src/services/metadata.service';
|
import { MetadataService } from 'src/services/metadata.service';
|
||||||
import { assetStub } from 'test/fixtures/asset.stub';
|
import { assetStub } from 'test/fixtures/asset.stub';
|
||||||
@@ -68,16 +67,12 @@ describe(MetadataService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
describe('onBootstrapEvent', () => {
|
describe('onBootstrapEvent', () => {
|
||||||
it('should pause and resume queue during init', async () => {
|
it('should init', async () => {
|
||||||
mocks.job.pause.mockResolvedValue();
|
|
||||||
mocks.map.init.mockResolvedValue();
|
mocks.map.init.mockResolvedValue();
|
||||||
mocks.job.resume.mockResolvedValue();
|
|
||||||
|
|
||||||
await sut.onBootstrap();
|
await sut.onBootstrap();
|
||||||
|
|
||||||
expect(mocks.job.pause).toHaveBeenCalledTimes(1);
|
|
||||||
expect(mocks.map.init).toHaveBeenCalledTimes(1);
|
expect(mocks.map.init).toHaveBeenCalledTimes(1);
|
||||||
expect(mocks.job.resume).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1346,12 +1341,11 @@ describe(MetadataService.name, () => {
|
|||||||
|
|
||||||
describe('handleQueueSidecar', () => {
|
describe('handleQueueSidecar', () => {
|
||||||
it('should queue assets with sidecar files', async () => {
|
it('should queue assets with sidecar files', async () => {
|
||||||
mocks.asset.getAll.mockResolvedValue({ items: [assetStub.sidecar], hasNextPage: false });
|
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
|
||||||
|
|
||||||
await sut.handleQueueSidecar({ force: true });
|
await sut.handleQueueSidecar({ force: true });
|
||||||
|
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(true);
|
||||||
|
|
||||||
expect(mocks.asset.getAll).toHaveBeenCalledWith({ take: 1000, skip: 0 });
|
|
||||||
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.SIDECAR_SYNC,
|
name: JobName.SIDECAR_SYNC,
|
||||||
@@ -1361,12 +1355,11 @@ describe(MetadataService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should queue assets without sidecar files', async () => {
|
it('should queue assets without sidecar files', async () => {
|
||||||
mocks.asset.getWithout.mockResolvedValue({ items: [assetStub.image], hasNextPage: false });
|
mocks.assetJob.streamForSidecar.mockReturnValue(makeStream([assetStub.image]));
|
||||||
|
|
||||||
await sut.handleQueueSidecar({ force: false });
|
await sut.handleQueueSidecar({ force: false });
|
||||||
|
|
||||||
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ take: 1000, skip: 0 }, WithoutProperty.SIDECAR);
|
expect(mocks.assetJob.streamForSidecar).toHaveBeenCalledWith(false);
|
||||||
expect(mocks.asset.getAll).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.SIDECAR_DISCOVERY,
|
name: JobName.SIDECAR_DISCOVERY,
|
||||||
|
|||||||
@@ -22,14 +22,10 @@ import {
|
|||||||
QueueName,
|
QueueName,
|
||||||
SourceType,
|
SourceType,
|
||||||
} from 'src/enum';
|
} from 'src/enum';
|
||||||
import { WithoutProperty } from 'src/repositories/asset.repository';
|
|
||||||
import { ArgOf } from 'src/repositories/event.repository';
|
import { ArgOf } from 'src/repositories/event.repository';
|
||||||
import { ReverseGeocodeResult } from 'src/repositories/map.repository';
|
|
||||||
import { ImmichTags } from 'src/repositories/metadata.repository';
|
import { ImmichTags } from 'src/repositories/metadata.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { JobOf } from 'src/types';
|
import { JobItem, JobOf } from 'src/types';
|
||||||
import { isFaceImportEnabled } from 'src/utils/misc';
|
|
||||||
import { usePagination } from 'src/utils/pagination';
|
|
||||||
import { upsertTags } from 'src/utils/tag';
|
import { upsertTags } from 'src/utils/tag';
|
||||||
|
|
||||||
/** look for a date from these tags (in order) */
|
/** look for a date from these tags (in order) */
|
||||||
@@ -123,9 +119,7 @@ export class MetadataService extends BaseService {
|
|||||||
this.logger.log('Initializing metadata service');
|
this.logger.log('Initializing metadata service');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this.jobRepository.pause(QueueName.METADATA_EXTRACTION);
|
|
||||||
await this.databaseRepository.withLock(DatabaseLock.GeodataImport, () => this.mapRepository.init());
|
await this.databaseRepository.withLock(DatabaseLock.GeodataImport, () => this.mapRepository.init());
|
||||||
await this.jobRepository.resume(QueueName.METADATA_EXTRACTION);
|
|
||||||
|
|
||||||
this.logger.log(`Initialized local reverse geocoder`);
|
this.logger.log(`Initialized local reverse geocoder`);
|
||||||
} catch (error: Error | any) {
|
} catch (error: Error | any) {
|
||||||
@@ -169,17 +163,20 @@ export class MetadataService extends BaseService {
|
|||||||
async handleQueueMetadataExtraction(job: JobOf<JobName.QUEUE_METADATA_EXTRACTION>): Promise<JobStatus> {
|
async handleQueueMetadataExtraction(job: JobOf<JobName.QUEUE_METADATA_EXTRACTION>): Promise<JobStatus> {
|
||||||
const { force } = job;
|
const { force } = job;
|
||||||
|
|
||||||
let queue: { name: JobName.METADATA_EXTRACTION; data: { id: string } }[] = [];
|
for (let i = 0; i < 10; i++) {
|
||||||
for await (const asset of this.assetJobRepository.streamForMetadataExtraction(force)) {
|
let queue: { name: JobName.METADATA_EXTRACTION; data: { id: string } }[] = [];
|
||||||
queue.push({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id } });
|
for await (const asset of this.assetJobRepository.streamForMetadataExtraction(force)) {
|
||||||
|
queue.push({ name: JobName.METADATA_EXTRACTION, data: { id: asset.id, source: 'upload' } as any });
|
||||||
|
|
||||||
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
if (queue.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||||
await this.jobRepository.queueAll(queue);
|
await this.jobRepository.queueAll(queue);
|
||||||
queue = [];
|
queue = [];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await this.jobRepository.queueAll(queue);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.jobRepository.queueAll(queue);
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -193,119 +190,27 @@ export class MetadataService extends BaseService {
|
|||||||
if (!asset) {
|
if (!asset) {
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const [exifTags, stats] = await Promise.all([
|
|
||||||
this.getExifTags(asset),
|
|
||||||
this.storageRepository.stat(asset.originalPath),
|
|
||||||
]);
|
|
||||||
this.logger.verbose('Exif Tags', exifTags);
|
|
||||||
|
|
||||||
const dates = this.getDates(asset, exifTags, stats);
|
|
||||||
|
|
||||||
const { width, height } = this.getImageDimensions(exifTags);
|
|
||||||
let geo: ReverseGeocodeResult = { country: null, state: null, city: null },
|
|
||||||
latitude: number | null = null,
|
|
||||||
longitude: number | null = null;
|
|
||||||
if (this.hasGeo(exifTags)) {
|
|
||||||
latitude = exifTags.GPSLatitude;
|
|
||||||
longitude = exifTags.GPSLongitude;
|
|
||||||
if (reverseGeocoding.enabled) {
|
|
||||||
geo = await this.mapRepository.reverseGeocode({ latitude, longitude });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const exifData: Insertable<Exif> = {
|
|
||||||
assetId: asset.id,
|
|
||||||
|
|
||||||
// dates
|
|
||||||
dateTimeOriginal: dates.dateTimeOriginal,
|
|
||||||
modifyDate: stats.mtime,
|
|
||||||
timeZone: dates.timeZone,
|
|
||||||
|
|
||||||
// gps
|
|
||||||
latitude,
|
|
||||||
longitude,
|
|
||||||
country: geo.country,
|
|
||||||
state: geo.state,
|
|
||||||
city: geo.city,
|
|
||||||
|
|
||||||
// image/file
|
|
||||||
fileSizeInByte: stats.size,
|
|
||||||
exifImageHeight: validate(height),
|
|
||||||
exifImageWidth: validate(width),
|
|
||||||
orientation: validate(exifTags.Orientation)?.toString() ?? null,
|
|
||||||
projectionType: exifTags.ProjectionType ? String(exifTags.ProjectionType).toUpperCase() : null,
|
|
||||||
bitsPerSample: this.getBitsPerSample(exifTags),
|
|
||||||
colorspace: exifTags.ColorSpace ?? null,
|
|
||||||
|
|
||||||
// camera
|
|
||||||
make: exifTags.Make ?? exifTags?.Device?.Manufacturer ?? exifTags.AndroidMake ?? null,
|
|
||||||
model: exifTags.Model ?? exifTags?.Device?.ModelName ?? exifTags.AndroidModel ?? null,
|
|
||||||
fps: validate(Number.parseFloat(exifTags.VideoFrameRate!)),
|
|
||||||
iso: validate(exifTags.ISO) as number,
|
|
||||||
exposureTime: exifTags.ExposureTime ?? null,
|
|
||||||
lensModel: getLensModel(exifTags),
|
|
||||||
fNumber: validate(exifTags.FNumber),
|
|
||||||
focalLength: validate(exifTags.FocalLength),
|
|
||||||
|
|
||||||
// comments
|
|
||||||
description: String(exifTags.ImageDescription || exifTags.Description || '').trim(),
|
|
||||||
profileDescription: exifTags.ProfileDescription || null,
|
|
||||||
rating: validateRange(exifTags.Rating, -1, 5),
|
|
||||||
|
|
||||||
// grouping
|
|
||||||
livePhotoCID: (exifTags.ContentIdentifier || exifTags.MediaGroupUUID) ?? null,
|
|
||||||
autoStackId: this.getAutoStackId(exifTags),
|
|
||||||
};
|
|
||||||
|
|
||||||
const promises: Promise<unknown>[] = [
|
|
||||||
this.assetRepository.upsertExif(exifData),
|
|
||||||
this.assetRepository.update({
|
|
||||||
id: asset.id,
|
|
||||||
duration: exifTags.Duration?.toString() ?? null,
|
|
||||||
localDateTime: dates.localDateTime,
|
|
||||||
fileCreatedAt: dates.dateTimeOriginal ?? undefined,
|
|
||||||
fileModifiedAt: stats.mtime,
|
|
||||||
}),
|
|
||||||
this.applyTagList(asset, exifTags),
|
|
||||||
];
|
|
||||||
|
|
||||||
if (this.isMotionPhoto(asset, exifTags)) {
|
|
||||||
promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isFaceImportEnabled(metadata) && this.hasTaggedFaces(exifTags)) {
|
|
||||||
promises.push(this.applyTaggedFaces(asset, exifTags));
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
if (exifData.livePhotoCID) {
|
|
||||||
await this.linkLivePhotos(asset, exifData);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.assetRepository.upsertJobStatus({ assetId: asset.id, metadataExtractedAt: new Date() });
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR })
|
@OnJob({ name: JobName.QUEUE_SIDECAR, queue: QueueName.SIDECAR })
|
||||||
async handleQueueSidecar(job: JobOf<JobName.QUEUE_SIDECAR>): Promise<JobStatus> {
|
async handleQueueSidecar({ force }: JobOf<JobName.QUEUE_SIDECAR>): Promise<JobStatus> {
|
||||||
const { force } = job;
|
let jobs: JobItem[] = [];
|
||||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
const queueAll = async () => {
|
||||||
return force
|
await this.jobRepository.queueAll(jobs);
|
||||||
? this.assetRepository.getAll(pagination)
|
jobs = [];
|
||||||
: this.assetRepository.getWithout(pagination, WithoutProperty.SIDECAR);
|
};
|
||||||
});
|
|
||||||
|
|
||||||
for await (const assets of assetPagination) {
|
const assets = this.assetJobRepository.streamForSidecar(force);
|
||||||
await this.jobRepository.queueAll(
|
for await (const asset of assets) {
|
||||||
assets.map((asset) => ({
|
jobs.push({ name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY, data: { id: asset.id } });
|
||||||
name: force ? JobName.SIDECAR_SYNC : JobName.SIDECAR_DISCOVERY,
|
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||||
data: { id: asset.id },
|
await queueAll();
|
||||||
})),
|
}
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await queueAll();
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -429,9 +334,9 @@ export class MetadataService extends BaseService {
|
|||||||
typeof tag === 'number'
|
typeof tag === 'number'
|
||||||
? String(tag)
|
? String(tag)
|
||||||
: tag
|
: tag
|
||||||
.split('|')
|
.split('|')
|
||||||
.map((tag) => tag.replaceAll('/', '|'))
|
.map((tag) => tag.replaceAll('/', '|'))
|
||||||
.join('/'),
|
.join('/'),
|
||||||
);
|
);
|
||||||
} else if (exifTags.Keywords) {
|
} else if (exifTags.Keywords) {
|
||||||
let keywords = exifTags.Keywords;
|
let keywords = exifTags.Keywords;
|
||||||
|
|||||||
@@ -499,14 +499,13 @@ describe(NotificationService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should add new recipients for new images if job is already queued', async () => {
|
it('should add new recipients for new images if job is already queued', async () => {
|
||||||
mocks.job.removeJob.mockResolvedValue({ id: '1', recipientIds: ['2', '3', '4'] } as INotifyAlbumUpdateJob);
|
|
||||||
await sut.onAlbumUpdate({ id: '1', recipientIds: ['1', '2', '3'] } as INotifyAlbumUpdateJob);
|
await sut.onAlbumUpdate({ id: '1', recipientIds: ['1', '2', '3'] } as INotifyAlbumUpdateJob);
|
||||||
expect(mocks.job.queue).toHaveBeenCalledWith({
|
expect(mocks.job.queue).toHaveBeenCalledWith({
|
||||||
name: JobName.NOTIFY_ALBUM_UPDATE,
|
name: JobName.NOTIFY_ALBUM_UPDATE,
|
||||||
data: {
|
data: {
|
||||||
id: '1',
|
id: '1',
|
||||||
delay: 300_000,
|
delay: 300_000,
|
||||||
recipientIds: ['1', '2', '3', '4'],
|
recipientIds: ['1', '2', '3'],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -196,14 +196,15 @@ export class NotificationService extends BaseService {
|
|||||||
data: { id, recipientIds, delay: NotificationService.albumUpdateEmailDelayMs },
|
data: { id, recipientIds, delay: NotificationService.albumUpdateEmailDelayMs },
|
||||||
};
|
};
|
||||||
|
|
||||||
const previousJobData = await this.jobRepository.removeJob(id, JobName.NOTIFY_ALBUM_UPDATE);
|
// todo: https://github.com/immich-app/immich/pull/17879
|
||||||
if (previousJobData && this.isAlbumUpdateJob(previousJobData)) {
|
// const previousJobData = await this.jobRepository.removeJob(id, JobName.NOTIFY_ALBUM_UPDATE);
|
||||||
for (const id of previousJobData.recipientIds) {
|
// if (previousJobData && this.isAlbumUpdateJob(previousJobData)) {
|
||||||
if (!recipientIds.includes(id)) {
|
// for (const id of previousJobData.recipientIds) {
|
||||||
recipientIds.push(id);
|
// if (!recipientIds.includes(id)) {
|
||||||
}
|
// recipientIds.push(id);
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
// }
|
||||||
await this.jobRepository.queue(job);
|
await this.jobRepository.queue(job);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import { BadRequestException, NotFoundException } from '@nestjs/common';
|
|||||||
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
|
||||||
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
|
import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
|
||||||
import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
|
import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
|
||||||
import { WithoutProperty } from 'src/repositories/asset.repository';
|
|
||||||
import { DetectedFaces } from 'src/repositories/machine-learning.repository';
|
import { DetectedFaces } from 'src/repositories/machine-learning.repository';
|
||||||
import { FaceSearchResult } from 'src/repositories/search.repository';
|
import { FaceSearchResult } from 'src/repositories/search.repository';
|
||||||
import { PersonService } from 'src/services/person.service';
|
import { PersonService } from 'src/services/person.service';
|
||||||
@@ -455,14 +454,11 @@ describe(PersonService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should queue missing assets', async () => {
|
it('should queue missing assets', async () => {
|
||||||
mocks.asset.getWithout.mockResolvedValue({
|
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
|
||||||
items: [assetStub.image],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await sut.handleQueueDetectFaces({ force: false });
|
await sut.handleQueueDetectFaces({ force: false });
|
||||||
|
|
||||||
expect(mocks.asset.getWithout).toHaveBeenCalledWith({ skip: 0, take: 1000 }, WithoutProperty.FACES);
|
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(false);
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.FACE_DETECTION,
|
name: JobName.FACE_DETECTION,
|
||||||
@@ -472,10 +468,7 @@ describe(PersonService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should queue all assets', async () => {
|
it('should queue all assets', async () => {
|
||||||
mocks.asset.getAll.mockResolvedValue({
|
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
|
||||||
items: [assetStub.image],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.withName]);
|
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.withName]);
|
||||||
|
|
||||||
await sut.handleQueueDetectFaces({ force: true });
|
await sut.handleQueueDetectFaces({ force: true });
|
||||||
@@ -483,7 +476,7 @@ describe(PersonService.name, () => {
|
|||||||
expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING });
|
expect(mocks.person.deleteFaces).toHaveBeenCalledWith({ sourceType: SourceType.MACHINE_LEARNING });
|
||||||
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.withName.id]);
|
expect(mocks.person.delete).toHaveBeenCalledWith([personStub.withName.id]);
|
||||||
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
|
expect(mocks.storage.unlink).toHaveBeenCalledWith(personStub.withName.thumbnailPath);
|
||||||
expect(mocks.asset.getAll).toHaveBeenCalled();
|
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.FACE_DETECTION,
|
name: JobName.FACE_DETECTION,
|
||||||
@@ -493,17 +486,14 @@ describe(PersonService.name, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('should refresh all assets', async () => {
|
it('should refresh all assets', async () => {
|
||||||
mocks.asset.getAll.mockResolvedValue({
|
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
|
||||||
items: [assetStub.image],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await sut.handleQueueDetectFaces({ force: undefined });
|
await sut.handleQueueDetectFaces({ force: undefined });
|
||||||
|
|
||||||
expect(mocks.person.delete).not.toHaveBeenCalled();
|
expect(mocks.person.delete).not.toHaveBeenCalled();
|
||||||
expect(mocks.person.deleteFaces).not.toHaveBeenCalled();
|
expect(mocks.person.deleteFaces).not.toHaveBeenCalled();
|
||||||
expect(mocks.storage.unlink).not.toHaveBeenCalled();
|
expect(mocks.storage.unlink).not.toHaveBeenCalled();
|
||||||
expect(mocks.asset.getAll).toHaveBeenCalled();
|
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(undefined);
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.FACE_DETECTION,
|
name: JobName.FACE_DETECTION,
|
||||||
@@ -516,16 +506,13 @@ describe(PersonService.name, () => {
|
|||||||
it('should delete existing people and faces if forced', async () => {
|
it('should delete existing people and faces if forced', async () => {
|
||||||
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
|
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
mocks.asset.getAll.mockResolvedValue({
|
mocks.assetJob.streamForDetectFacesJob.mockReturnValue(makeStream([assetStub.image]));
|
||||||
items: [assetStub.image],
|
|
||||||
hasNextPage: false,
|
|
||||||
});
|
|
||||||
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
|
mocks.person.getAllWithoutFaces.mockResolvedValue([personStub.randomPerson]);
|
||||||
mocks.person.deleteFaces.mockResolvedValue();
|
mocks.person.deleteFaces.mockResolvedValue();
|
||||||
|
|
||||||
await sut.handleQueueDetectFaces({ force: true });
|
await sut.handleQueueDetectFaces({ force: true });
|
||||||
|
|
||||||
expect(mocks.asset.getAll).toHaveBeenCalled();
|
expect(mocks.assetJob.streamForDetectFacesJob).toHaveBeenCalledWith(true);
|
||||||
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
expect(mocks.job.queueAll).toHaveBeenCalledWith([
|
||||||
{
|
{
|
||||||
name: JobName.FACE_DETECTION,
|
name: JobName.FACE_DETECTION,
|
||||||
@@ -542,10 +529,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
|
||||||
|
|
||||||
@@ -559,10 +544,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 1,
|
waiting: 1,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
|
await expect(sut.handleQueueRecognizeFaces({})).resolves.toBe(JobStatus.SKIPPED);
|
||||||
@@ -574,10 +557,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
mocks.person.getAllWithoutFaces.mockResolvedValue([]);
|
mocks.person.getAllWithoutFaces.mockResolvedValue([]);
|
||||||
@@ -603,10 +584,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAll.mockReturnValue(makeStream());
|
mocks.person.getAll.mockReturnValue(makeStream());
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
@@ -632,10 +611,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAll.mockReturnValue(makeStream());
|
mocks.person.getAll.mockReturnValue(makeStream());
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
@@ -679,10 +656,8 @@ describe(PersonService.name, () => {
|
|||||||
mocks.job.getJobCounts.mockResolvedValue({
|
mocks.job.getJobCounts.mockResolvedValue({
|
||||||
active: 1,
|
active: 1,
|
||||||
waiting: 0,
|
waiting: 0,
|
||||||
paused: 0,
|
|
||||||
completed: 0,
|
|
||||||
failed: 0,
|
|
||||||
delayed: 0,
|
delayed: 0,
|
||||||
|
failed: 0,
|
||||||
});
|
});
|
||||||
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
|
mocks.person.getAll.mockReturnValue(makeStream([faceStub.face1.person, personStub.randomPerson]));
|
||||||
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
mocks.person.getAllFaces.mockReturnValue(makeStream([faceStub.face1]));
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common';
|
import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common';
|
||||||
import { Insertable, Updateable } from 'kysely';
|
import { Updateable } from 'kysely';
|
||||||
import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
|
||||||
import { StorageCore } from 'src/cores/storage.core';
|
|
||||||
import { Person } from 'src/database';
|
import { Person } from 'src/database';
|
||||||
import { AssetFaces, FaceSearch } from 'src/db';
|
|
||||||
import { Chunked, OnJob } from 'src/decorators';
|
import { Chunked, OnJob } from 'src/decorators';
|
||||||
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
|
import { BulkIdErrorReason, BulkIdResponseDto } from 'src/dtos/asset-ids.response.dto';
|
||||||
import { AuthDto } from 'src/dtos/auth.dto';
|
import { AuthDto } from 'src/dtos/auth.dto';
|
||||||
@@ -27,7 +25,6 @@ import {
|
|||||||
import {
|
import {
|
||||||
AssetType,
|
AssetType,
|
||||||
CacheControl,
|
CacheControl,
|
||||||
ImageFormat,
|
|
||||||
JobName,
|
JobName,
|
||||||
JobStatus,
|
JobStatus,
|
||||||
Permission,
|
Permission,
|
||||||
@@ -36,7 +33,6 @@ import {
|
|||||||
SourceType,
|
SourceType,
|
||||||
SystemMetadataKey,
|
SystemMetadataKey,
|
||||||
} from 'src/enum';
|
} from 'src/enum';
|
||||||
import { WithoutProperty } from 'src/repositories/asset.repository';
|
|
||||||
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
import { BoundingBox } from 'src/repositories/machine-learning.repository';
|
||||||
import { UpdateFacesData } from 'src/repositories/person.repository';
|
import { UpdateFacesData } from 'src/repositories/person.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
@@ -44,7 +40,6 @@ import { CropOptions, ImageDimensions, InputDimensions, JobItem, JobOf } from 's
|
|||||||
import { ImmichFileResponse } from 'src/utils/file';
|
import { ImmichFileResponse } from 'src/utils/file';
|
||||||
import { mimeTypes } from 'src/utils/mime-types';
|
import { mimeTypes } from 'src/utils/mime-types';
|
||||||
import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
|
import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
|
||||||
import { usePagination } from 'src/utils/pagination';
|
|
||||||
|
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class PersonService extends BaseService {
|
export class PersonService extends BaseService {
|
||||||
@@ -265,23 +260,19 @@ export class PersonService extends BaseService {
|
|||||||
await this.handlePersonCleanup();
|
await this.handlePersonCleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) => {
|
let jobs: JobItem[] = [];
|
||||||
return force === false
|
const assets = this.assetJobRepository.streamForDetectFacesJob(force);
|
||||||
? this.assetRepository.getWithout(pagination, WithoutProperty.FACES)
|
for await (const asset of assets) {
|
||||||
: this.assetRepository.getAll(pagination, {
|
jobs.push({ name: JobName.FACE_DETECTION, data: { id: asset.id } });
|
||||||
orderDirection: 'desc',
|
|
||||||
withFaces: true,
|
|
||||||
withArchived: true,
|
|
||||||
isVisible: true,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
for await (const assets of assetPagination) {
|
if (jobs.length >= JOBS_ASSET_PAGINATION_SIZE) {
|
||||||
await this.jobRepository.queueAll(
|
await this.jobRepository.queueAll(jobs);
|
||||||
assets.map((asset) => ({ name: JobName.FACE_DETECTION, data: { id: asset.id } })),
|
jobs = [];
|
||||||
);
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await this.jobRepository.queueAll(jobs);
|
||||||
|
|
||||||
if (force === undefined) {
|
if (force === undefined) {
|
||||||
await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP });
|
await this.jobRepository.queue({ name: JobName.PERSON_CLEANUP });
|
||||||
}
|
}
|
||||||
@@ -297,79 +288,6 @@ export class PersonService extends BaseService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const asset = await this.assetJobRepository.getForDetectFacesJob(id);
|
const asset = await this.assetJobRepository.getForDetectFacesJob(id);
|
||||||
const previewFile = asset?.files[0];
|
|
||||||
if (!asset || asset.files.length !== 1 || !previewFile) {
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!asset.isVisible) {
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const { imageHeight, imageWidth, faces } = await this.machineLearningRepository.detectFaces(
|
|
||||||
machineLearning.urls,
|
|
||||||
previewFile.path,
|
|
||||||
machineLearning.facialRecognition,
|
|
||||||
);
|
|
||||||
this.logger.debug(`${faces.length} faces detected in ${previewFile.path}`);
|
|
||||||
|
|
||||||
const facesToAdd: (Insertable<AssetFaces> & { id: string })[] = [];
|
|
||||||
const embeddings: FaceSearch[] = [];
|
|
||||||
const mlFaceIds = new Set<string>();
|
|
||||||
|
|
||||||
for (const face of asset.faces) {
|
|
||||||
if (face.sourceType === SourceType.MACHINE_LEARNING) {
|
|
||||||
mlFaceIds.add(face.id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const heightScale = imageHeight / (asset.faces[0]?.imageHeight || 1);
|
|
||||||
const widthScale = imageWidth / (asset.faces[0]?.imageWidth || 1);
|
|
||||||
for (const { boundingBox, embedding } of faces) {
|
|
||||||
const scaledBox = {
|
|
||||||
x1: boundingBox.x1 * widthScale,
|
|
||||||
y1: boundingBox.y1 * heightScale,
|
|
||||||
x2: boundingBox.x2 * widthScale,
|
|
||||||
y2: boundingBox.y2 * heightScale,
|
|
||||||
};
|
|
||||||
const match = asset.faces.find((face) => this.iou(face, scaledBox) > 0.5);
|
|
||||||
|
|
||||||
if (match && !mlFaceIds.delete(match.id)) {
|
|
||||||
embeddings.push({ faceId: match.id, embedding });
|
|
||||||
} else if (!match) {
|
|
||||||
const faceId = this.cryptoRepository.randomUUID();
|
|
||||||
facesToAdd.push({
|
|
||||||
id: faceId,
|
|
||||||
assetId: asset.id,
|
|
||||||
imageHeight,
|
|
||||||
imageWidth,
|
|
||||||
boundingBoxX1: boundingBox.x1,
|
|
||||||
boundingBoxY1: boundingBox.y1,
|
|
||||||
boundingBoxX2: boundingBox.x2,
|
|
||||||
boundingBoxY2: boundingBox.y2,
|
|
||||||
});
|
|
||||||
embeddings.push({ faceId, embedding });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const faceIdsToRemove = [...mlFaceIds];
|
|
||||||
|
|
||||||
if (facesToAdd.length > 0 || faceIdsToRemove.length > 0 || embeddings.length > 0) {
|
|
||||||
await this.personRepository.refreshFaces(facesToAdd, faceIdsToRemove, embeddings);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (faceIdsToRemove.length > 0) {
|
|
||||||
this.logger.log(`Removed ${faceIdsToRemove.length} faces below detection threshold in asset ${id}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (facesToAdd.length > 0) {
|
|
||||||
this.logger.log(`Detected ${facesToAdd.length} new faces in asset ${id}`);
|
|
||||||
const jobs = facesToAdd.map((face) => ({ name: JobName.FACIAL_RECOGNITION, data: { id: face.id } }) as const);
|
|
||||||
await this.jobRepository.queueAll([{ name: JobName.QUEUE_FACIAL_RECOGNITION, data: { force: false } }, ...jobs]);
|
|
||||||
} else if (embeddings.length > 0) {
|
|
||||||
this.logger.log(`Added ${embeddings.length} face embeddings for asset ${id}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.assetRepository.upsertJobStatus({ assetId: asset.id, facesRecognizedAt: new Date() });
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
@@ -398,7 +316,8 @@ export class PersonService extends BaseService {
|
|||||||
return JobStatus.SKIPPED;
|
return JobStatus.SKIPPED;
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
|
// todo
|
||||||
|
// await this.jobRepository.waitForQueueCompletion(QueueName.THUMBNAIL_GENERATION, QueueName.FACE_DETECTION);
|
||||||
|
|
||||||
if (nightly) {
|
if (nightly) {
|
||||||
const [state, latestFaceDate] = await Promise.all([
|
const [state, latestFaceDate] = await Promise.all([
|
||||||
@@ -552,26 +471,6 @@ export class PersonService extends BaseService {
|
|||||||
this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
|
this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight } = data;
|
|
||||||
|
|
||||||
const { width, height, inputPath } = await this.getInputDimensions(data);
|
|
||||||
|
|
||||||
const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
|
|
||||||
this.storageCore.ensureFolders(thumbnailPath);
|
|
||||||
|
|
||||||
const thumbnailOptions = {
|
|
||||||
colorspace: image.colorspace,
|
|
||||||
format: ImageFormat.JPEG,
|
|
||||||
size: FACE_THUMBNAIL_SIZE,
|
|
||||||
quality: image.thumbnail.quality,
|
|
||||||
crop: this.getCrop({ old: { width: oldWidth, height: oldHeight }, new: { width, height } }, { x1, y1, x2, y2 }),
|
|
||||||
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
|
|
||||||
};
|
|
||||||
|
|
||||||
await this.mediaRepository.generateThumbnail(inputPath, thumbnailOptions, thumbnailPath);
|
|
||||||
await this.personRepository.update({ id, thumbnailPath });
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import {
|
|||||||
SmartSearchDto,
|
SmartSearchDto,
|
||||||
} from 'src/dtos/search.dto';
|
} from 'src/dtos/search.dto';
|
||||||
import { AssetOrder } from 'src/enum';
|
import { AssetOrder } from 'src/enum';
|
||||||
import { SearchExploreItem } from 'src/repositories/search.repository';
|
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { getMyPartnerIds } from 'src/utils/asset.util';
|
import { getMyPartnerIds } from 'src/utils/asset.util';
|
||||||
import { isSmartSearchEnabled } from 'src/utils/misc';
|
import { isSmartSearchEnabled } from 'src/utils/misc';
|
||||||
@@ -32,7 +31,7 @@ export class SearchService extends BaseService {
|
|||||||
return places.map((place) => mapPlaces(place));
|
return places.map((place) => mapPlaces(place));
|
||||||
}
|
}
|
||||||
|
|
||||||
async getExploreData(auth: AuthDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
async getExploreData(auth: AuthDto) {
|
||||||
const options = { maxFields: 12, minAssetsPerField: 5 };
|
const options = { maxFields: 12, minAssetsPerField: 5 };
|
||||||
const cities = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
|
const cities = await this.assetRepository.getAssetIdByCity(auth.user.id, options);
|
||||||
const assets = await this.assetRepository.getByIdsWithAllRelationsButStacks(cities.items.map(({ data }) => data));
|
const assets = await this.assetRepository.getByIdsWithAllRelationsButStacks(cities.items.map(({ data }) => data));
|
||||||
|
|||||||
@@ -151,7 +151,6 @@ describe(SmartInfoService.name, () => {
|
|||||||
|
|
||||||
await sut.handleQueueEncodeClip({});
|
await sut.handleQueueEncodeClip({});
|
||||||
|
|
||||||
expect(mocks.asset.getWithout).not.toHaveBeenCalled();
|
|
||||||
expect(mocks.search.setDimensionSize).not.toHaveBeenCalled();
|
expect(mocks.search.setDimensionSize).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -103,30 +103,6 @@ export class SmartInfoService extends BaseService {
|
|||||||
if (!asset || asset.files.length !== 1) {
|
if (!asset || asset.files.length !== 1) {
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!asset.isVisible) {
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
const embedding = await this.machineLearningRepository.encodeImage(
|
|
||||||
machineLearning.urls,
|
|
||||||
asset.files[0].path,
|
|
||||||
machineLearning.clip,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (this.databaseRepository.isBusy(DatabaseLock.CLIPDimSize)) {
|
|
||||||
this.logger.verbose(`Waiting for CLIP dimension size to be updated`);
|
|
||||||
await this.databaseRepository.wait(DatabaseLock.CLIPDimSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
const newConfig = await this.getConfig({ withCache: true });
|
|
||||||
if (machineLearning.clip.modelName !== newConfig.machineLearning.clip.modelName) {
|
|
||||||
// Skip the job if the the model has changed since the embedding was generated.
|
|
||||||
return JobStatus.SKIPPED;
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.searchRepository.upsert(asset.id, embedding);
|
|
||||||
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ import { AssetPathType, AssetType, DatabaseLock, JobName, JobStatus, QueueName,
|
|||||||
import { ArgOf } from 'src/repositories/event.repository';
|
import { ArgOf } from 'src/repositories/event.repository';
|
||||||
import { BaseService } from 'src/services/base.service';
|
import { BaseService } from 'src/services/base.service';
|
||||||
import { JobOf, StorageAsset } from 'src/types';
|
import { JobOf, StorageAsset } from 'src/types';
|
||||||
import { getLivePhotoMotionFilename } from 'src/utils/file';
|
|
||||||
|
|
||||||
const storageTokens = {
|
const storageTokens = {
|
||||||
secondOptions: ['s', 'ss', 'SSS'],
|
secondOptions: ['s', 'ss', 'SSS'],
|
||||||
@@ -128,21 +127,6 @@ export class StorageTemplateService extends BaseService {
|
|||||||
if (!asset) {
|
if (!asset) {
|
||||||
return JobStatus.FAILED;
|
return JobStatus.FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
const user = await this.userRepository.get(asset.ownerId, {});
|
|
||||||
const storageLabel = user?.storageLabel || null;
|
|
||||||
const filename = asset.originalFileName || asset.id;
|
|
||||||
await this.moveAsset(asset, { storageLabel, filename });
|
|
||||||
|
|
||||||
// move motion part of live photo
|
|
||||||
if (asset.livePhotoVideoId) {
|
|
||||||
const livePhotoVideo = await this.assetJobRepository.getForStorageTemplateJob(asset.livePhotoVideoId);
|
|
||||||
if (!livePhotoVideo) {
|
|
||||||
return JobStatus.FAILED;
|
|
||||||
}
|
|
||||||
const motionFilename = getLivePhotoMotionFilename(filename, livePhotoVideo.originalPath);
|
|
||||||
await this.moveAsset(livePhotoVideo, { storageLabel, filename: motionFilename });
|
|
||||||
}
|
|
||||||
return JobStatus.SUCCESS;
|
return JobStatus.SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -256,16 +256,13 @@ export interface INotifyAlbumUpdateJob extends IEntityJob, IDelayedJob {
|
|||||||
|
|
||||||
export interface JobCounts {
|
export interface JobCounts {
|
||||||
active: number;
|
active: number;
|
||||||
completed: number;
|
|
||||||
failed: number;
|
|
||||||
delayed: number;
|
|
||||||
waiting: number;
|
waiting: number;
|
||||||
paused: number;
|
delayed: number;
|
||||||
|
failed: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueueStatus {
|
export interface QueueStatus {
|
||||||
isActive: boolean;
|
paused: boolean;
|
||||||
isPaused: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export type JobItem =
|
export type JobItem =
|
||||||
@@ -450,6 +447,14 @@ export type MemoriesState = {
|
|||||||
lastOnThisDayDate: string;
|
lastOnThisDayDate: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type QueueState = {
|
||||||
|
paused: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type QueuesState = {
|
||||||
|
[key in QueueName]?: QueueState;
|
||||||
|
};
|
||||||
|
|
||||||
export interface SystemMetadata extends Record<SystemMetadataKey, Record<string, any>> {
|
export interface SystemMetadata extends Record<SystemMetadataKey, Record<string, any>> {
|
||||||
[SystemMetadataKey.ADMIN_ONBOARDING]: { isOnboarded: boolean };
|
[SystemMetadataKey.ADMIN_ONBOARDING]: { isOnboarded: boolean };
|
||||||
[SystemMetadataKey.FACIAL_RECOGNITION_STATE]: { lastRun?: string };
|
[SystemMetadataKey.FACIAL_RECOGNITION_STATE]: { lastRun?: string };
|
||||||
@@ -459,6 +464,7 @@ export interface SystemMetadata extends Record<SystemMetadataKey, Record<string,
|
|||||||
[SystemMetadataKey.SYSTEM_FLAGS]: DeepPartial<SystemFlags>;
|
[SystemMetadataKey.SYSTEM_FLAGS]: DeepPartial<SystemFlags>;
|
||||||
[SystemMetadataKey.VERSION_CHECK_STATE]: VersionCheckMetadata;
|
[SystemMetadataKey.VERSION_CHECK_STATE]: VersionCheckMetadata;
|
||||||
[SystemMetadataKey.MEMORIES_STATE]: MemoriesState;
|
[SystemMetadataKey.MEMORIES_STATE]: MemoriesState;
|
||||||
|
[SystemMetadataKey.QUEUES_STATE]: QueuesState;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type UserMetadataItem<T extends keyof UserMetadata = UserMetadataKey> = {
|
export type UserMetadataItem<T extends keyof UserMetadata = UserMetadataKey> = {
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ export const asPostgresConnectionConfig = (params: DatabaseConnectionParams) =>
|
|||||||
return {
|
return {
|
||||||
host: params.host,
|
host: params.host,
|
||||||
port: params.port,
|
port: params.port,
|
||||||
username: params.username,
|
user: params.username,
|
||||||
password: params.password,
|
password: params.password,
|
||||||
database: params.database,
|
database: params.database,
|
||||||
ssl: undefined,
|
ssl: undefined,
|
||||||
@@ -51,7 +51,7 @@ export const asPostgresConnectionConfig = (params: DatabaseConnectionParams) =>
|
|||||||
return {
|
return {
|
||||||
host: host ?? undefined,
|
host: host ?? undefined,
|
||||||
port: port ? Number(port) : undefined,
|
port: port ? Number(port) : undefined,
|
||||||
username: user,
|
user,
|
||||||
password,
|
password,
|
||||||
database: database ?? undefined,
|
database: database ?? undefined,
|
||||||
ssl,
|
ssl,
|
||||||
@@ -92,7 +92,7 @@ export const getKyselyConfig = (
|
|||||||
},
|
},
|
||||||
host: config.host,
|
host: config.host,
|
||||||
port: config.port,
|
port: config.port,
|
||||||
username: config.username,
|
user: config.user,
|
||||||
password: config.password,
|
password: config.password,
|
||||||
database: config.database,
|
database: config.database,
|
||||||
ssl: config.ssl,
|
ssl: config.ssl,
|
||||||
|
|||||||
@@ -8,22 +8,6 @@ export interface PaginationResult<T> {
|
|||||||
hasNextPage: boolean;
|
hasNextPage: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type Paginated<T> = Promise<PaginationResult<T>>;
|
|
||||||
|
|
||||||
/** @deprecated use `this.db. ... .stream()` instead */
|
|
||||||
export async function* usePagination<T>(
|
|
||||||
pageSize: number,
|
|
||||||
getNextPage: (pagination: PaginationOptions) => PaginationResult<T> | Paginated<T>,
|
|
||||||
) {
|
|
||||||
let hasNextPage = true;
|
|
||||||
|
|
||||||
for (let skip = 0; hasNextPage; skip += pageSize) {
|
|
||||||
const result = await getNextPage({ take: pageSize, skip });
|
|
||||||
hasNextPage = result.hasNextPage;
|
|
||||||
yield result.items;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function paginationHelper<Entity extends object>(items: Entity[], take: number): PaginationResult<Entity> {
|
export function paginationHelper<Entity extends object>(items: Entity[], take: number): PaginationResult<Entity> {
|
||||||
const hasNextPage = items.length > take;
|
const hasNextPage = items.length > take;
|
||||||
items.splice(take);
|
items.splice(take);
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ read_file_and_export "DB_HOSTNAME_FILE" "DB_HOSTNAME"
|
|||||||
read_file_and_export "DB_DATABASE_NAME_FILE" "DB_DATABASE_NAME"
|
read_file_and_export "DB_DATABASE_NAME_FILE" "DB_DATABASE_NAME"
|
||||||
read_file_and_export "DB_USERNAME_FILE" "DB_USERNAME"
|
read_file_and_export "DB_USERNAME_FILE" "DB_USERNAME"
|
||||||
read_file_and_export "DB_PASSWORD_FILE" "DB_PASSWORD"
|
read_file_and_export "DB_PASSWORD_FILE" "DB_PASSWORD"
|
||||||
read_file_and_export "REDIS_PASSWORD_FILE" "REDIS_PASSWORD"
|
|
||||||
|
|
||||||
export CPU_CORES="${CPU_CORES:=$(./get-cpus.sh)}"
|
export CPU_CORES="${CPU_CORES:=$(./get-cpus.sh)}"
|
||||||
echo "Detected CPU Cores: $CPU_CORES"
|
echo "Detected CPU Cores: $CPU_CORES"
|
||||||
|
|||||||
@@ -13,14 +13,11 @@ export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetReposi
|
|||||||
getByIds: vitest.fn().mockResolvedValue([]),
|
getByIds: vitest.fn().mockResolvedValue([]),
|
||||||
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
|
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
|
||||||
getByDeviceIds: vitest.fn(),
|
getByDeviceIds: vitest.fn(),
|
||||||
getByUserId: vitest.fn(),
|
|
||||||
getById: vitest.fn(),
|
getById: vitest.fn(),
|
||||||
getWithout: vitest.fn(),
|
|
||||||
getByChecksum: vitest.fn(),
|
getByChecksum: vitest.fn(),
|
||||||
getByChecksums: vitest.fn(),
|
getByChecksums: vitest.fn(),
|
||||||
getUploadAssetIdByChecksum: vitest.fn(),
|
getUploadAssetIdByChecksum: vitest.fn(),
|
||||||
getRandom: vitest.fn(),
|
getRandom: vitest.fn(),
|
||||||
getAll: vitest.fn().mockResolvedValue({ items: [], hasNextPage: false }),
|
|
||||||
getAllByDeviceId: vitest.fn(),
|
getAllByDeviceId: vitest.fn(),
|
||||||
getLivePhotoCount: vitest.fn(),
|
getLivePhotoCount: vitest.fn(),
|
||||||
getLibraryAssetCount: vitest.fn(),
|
getLibraryAssetCount: vitest.fn(),
|
||||||
|
|||||||
@@ -8,12 +8,6 @@ const envData: EnvData = {
|
|||||||
environment: ImmichEnvironment.PRODUCTION,
|
environment: ImmichEnvironment.PRODUCTION,
|
||||||
|
|
||||||
buildMetadata: {},
|
buildMetadata: {},
|
||||||
bull: {
|
|
||||||
config: {
|
|
||||||
prefix: 'immich_bull',
|
|
||||||
},
|
|
||||||
queues: [{ name: 'queue-1' }],
|
|
||||||
},
|
|
||||||
|
|
||||||
cls: {
|
cls: {
|
||||||
config: {},
|
config: {},
|
||||||
@@ -52,12 +46,6 @@ const envData: EnvData = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
redis: {
|
|
||||||
host: 'redis',
|
|
||||||
port: 6379,
|
|
||||||
db: 0,
|
|
||||||
},
|
|
||||||
|
|
||||||
resourcePaths: {
|
resourcePaths: {
|
||||||
lockFile: 'build-lock.json',
|
lockFile: 'build-lock.json',
|
||||||
geodata: {
|
geodata: {
|
||||||
|
|||||||
@@ -5,18 +5,16 @@ import { Mocked, vitest } from 'vitest';
|
|||||||
export const newJobRepositoryMock = (): Mocked<RepositoryInterface<JobRepository>> => {
|
export const newJobRepositoryMock = (): Mocked<RepositoryInterface<JobRepository>> => {
|
||||||
return {
|
return {
|
||||||
setup: vitest.fn(),
|
setup: vitest.fn(),
|
||||||
startWorkers: vitest.fn(),
|
start: vitest.fn(),
|
||||||
run: vitest.fn(),
|
stop: vitest.fn(),
|
||||||
setConcurrency: vitest.fn(),
|
|
||||||
empty: vitest.fn(),
|
|
||||||
pause: vitest.fn(),
|
pause: vitest.fn(),
|
||||||
resume: vitest.fn(),
|
resume: vitest.fn(),
|
||||||
|
run: vitest.fn(),
|
||||||
queue: vitest.fn().mockImplementation(() => Promise.resolve()),
|
queue: vitest.fn().mockImplementation(() => Promise.resolve()),
|
||||||
queueAll: vitest.fn().mockImplementation(() => Promise.resolve()),
|
queueAll: vitest.fn().mockImplementation(() => Promise.resolve()),
|
||||||
getQueueStatus: vitest.fn(),
|
|
||||||
getJobCounts: vitest.fn(),
|
|
||||||
clear: vitest.fn(),
|
clear: vitest.fn(),
|
||||||
waitForQueueCompletion: vitest.fn(),
|
clearFailed: vitest.fn(),
|
||||||
removeJob: vitest.fn(),
|
getJobCounts: vitest.fn(),
|
||||||
|
getQueueStatus: vitest.fn(),
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -5,10 +5,17 @@ TYPESCRIPT_SDK=/usr/src/open-api/typescript-sdk
|
|||||||
npm --prefix "$TYPESCRIPT_SDK" install
|
npm --prefix "$TYPESCRIPT_SDK" install
|
||||||
npm --prefix "$TYPESCRIPT_SDK" run build
|
npm --prefix "$TYPESCRIPT_SDK" run build
|
||||||
|
|
||||||
|
|
||||||
|
COUNT=0
|
||||||
UPSTREAM="${IMMICH_SERVER_URL:-http://immich-server:2283/}"
|
UPSTREAM="${IMMICH_SERVER_URL:-http://immich-server:2283/}"
|
||||||
until wget --spider --quiet "${UPSTREAM}/api/server/config"; do
|
until wget --spider --quiet "${UPSTREAM}/api/server/config" > /dev/null 2>&1; do
|
||||||
echo 'waiting for api server...'
|
if [ $((COUNT % 10)) -eq 0 ]; then
|
||||||
|
echo "Waiting for $UPSTREAM to start..."
|
||||||
|
fi
|
||||||
|
COUNT=$((COUNT + 1))
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
|
|
||||||
|
echo "Connected to $UPSTREAM"
|
||||||
|
|
||||||
node ./node_modules/.bin/vite dev --host 0.0.0.0 --port 3000
|
node ./node_modules/.bin/vite dev --host 0.0.0.0 --port 3000
|
||||||
|
|||||||
@@ -34,9 +34,6 @@
|
|||||||
--immich-ui-info: 14 165 233;
|
--immich-ui-info: 14 165 233;
|
||||||
--immich-ui-default-border: 209 213 219;
|
--immich-ui-default-border: 209 213 219;
|
||||||
}
|
}
|
||||||
* {
|
|
||||||
--tw-ring-offset-width: 0px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.dark {
|
.dark {
|
||||||
/* dark */
|
/* dark */
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
import FocusTrapTest from '$lib/actions/__test__/focus-trap-test.svelte';
|
import FocusTrapTest from '$lib/actions/__test__/focus-trap-test.svelte';
|
||||||
|
import { setDefaultTabbleOptions } from '$lib/utils/focus-util';
|
||||||
import { render, screen } from '@testing-library/svelte';
|
import { render, screen } from '@testing-library/svelte';
|
||||||
import userEvent from '@testing-library/user-event';
|
import userEvent from '@testing-library/user-event';
|
||||||
import { tick } from 'svelte';
|
import { tick } from 'svelte';
|
||||||
|
|
||||||
|
setDefaultTabbleOptions({ displayCheck: 'none' });
|
||||||
|
|
||||||
describe('focusTrap action', () => {
|
describe('focusTrap action', () => {
|
||||||
const user = userEvent.setup();
|
const user = userEvent.setup();
|
||||||
|
|
||||||
@@ -38,6 +41,7 @@ describe('focusTrap action', () => {
|
|||||||
const openButton = screen.getByText('Open');
|
const openButton = screen.getByText('Open');
|
||||||
|
|
||||||
await user.click(openButton);
|
await user.click(openButton);
|
||||||
|
await tick();
|
||||||
expect(document.activeElement).toEqual(screen.getByTestId('one'));
|
expect(document.activeElement).toEqual(screen.getByTestId('one'));
|
||||||
|
|
||||||
screen.getByText('Close').click();
|
screen.getByText('Close').click();
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { shortcuts } from '$lib/actions/shortcut';
|
import { shortcuts } from '$lib/actions/shortcut';
|
||||||
import { getFocusable } from '$lib/utils/focus-util';
|
import { getTabbable } from '$lib/utils/focus-util';
|
||||||
import { tick } from 'svelte';
|
import { tick } from 'svelte';
|
||||||
|
|
||||||
interface Options {
|
interface Options {
|
||||||
@@ -18,18 +18,21 @@ export function focusTrap(container: HTMLElement, options?: Options) {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const setInitialFocus = () => {
|
const setInitialFocus = async () => {
|
||||||
const focusableElement = getFocusable(container)[0];
|
const focusableElement = getTabbable(container, false)[0];
|
||||||
// Use tick() to ensure focus trap works correctly inside <Portal />
|
if (focusableElement) {
|
||||||
void tick().then(() => focusableElement?.focus());
|
// Use tick() to ensure focus trap works correctly inside <Portal />
|
||||||
|
await tick();
|
||||||
|
focusableElement?.focus();
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (withDefaults(options).active) {
|
if (withDefaults(options).active) {
|
||||||
setInitialFocus();
|
void setInitialFocus();
|
||||||
}
|
}
|
||||||
|
|
||||||
const getFocusableElements = () => {
|
const getFocusableElements = () => {
|
||||||
const focusableElements = getFocusable(container);
|
const focusableElements = getTabbable(container);
|
||||||
return [
|
return [
|
||||||
focusableElements.at(0), //
|
focusableElements.at(0), //
|
||||||
focusableElements.at(-1),
|
focusableElements.at(-1),
|
||||||
@@ -67,7 +70,7 @@ export function focusTrap(container: HTMLElement, options?: Options) {
|
|||||||
update(newOptions?: Options) {
|
update(newOptions?: Options) {
|
||||||
options = newOptions;
|
options = newOptions;
|
||||||
if (withDefaults(options).active) {
|
if (withDefaults(options).active) {
|
||||||
setInitialFocus();
|
void setInitialFocus();
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
destroy() {
|
destroy() {
|
||||||
|
|||||||
@@ -47,20 +47,20 @@
|
|||||||
onCommand,
|
onCommand,
|
||||||
}: Props = $props();
|
}: Props = $props();
|
||||||
|
|
||||||
let waitingCount = $derived(jobCounts.waiting + jobCounts.paused + jobCounts.delayed);
|
let waitingCount = $derived(jobCounts.waiting + jobCounts.delayed);
|
||||||
let isIdle = $derived(!queueStatus.isActive && !queueStatus.isPaused);
|
let idle = $derived(jobCounts.active + jobCounts.waiting + jobCounts.delayed === 0);
|
||||||
let multipleButtons = $derived(allText || refreshText);
|
let multipleButtons = $derived(allText || refreshText);
|
||||||
|
|
||||||
const commonClasses = 'flex place-items-center justify-between w-full py-2 sm:py-4 pe-4 ps-6';
|
const commonClasses = 'flex place-items-center justify-between w-full py-2 sm:py-4 pr-4 pl-6';
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
class="flex flex-col overflow-hidden rounded-2xl bg-gray-100 dark:bg-immich-dark-gray sm:flex-row sm:rounded-[35px]"
|
class="flex flex-col overflow-hidden rounded-2xl bg-gray-100 dark:bg-immich-dark-gray sm:flex-row sm:rounded-[35px]"
|
||||||
>
|
>
|
||||||
<div class="flex w-full flex-col">
|
<div class="flex w-full flex-col">
|
||||||
{#if queueStatus.isPaused}
|
{#if queueStatus.paused}
|
||||||
<JobTileStatus color="warning">{$t('paused')}</JobTileStatus>
|
<JobTileStatus color="warning">{$t('paused')}</JobTileStatus>
|
||||||
{:else if queueStatus.isActive}
|
{:else if !idle}
|
||||||
<JobTileStatus color="success">{$t('active')}</JobTileStatus>
|
<JobTileStatus color="success">{$t('active')}</JobTileStatus>
|
||||||
{/if}
|
{/if}
|
||||||
<div class="flex flex-col gap-2 p-5 sm:p-7 md:p-9">
|
<div class="flex flex-col gap-2 p-5 sm:p-7 md:p-9">
|
||||||
@@ -119,12 +119,12 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div
|
<div
|
||||||
class="{commonClasses} flex-row-reverse rounded-b-lg bg-gray-200 text-immich-dark-bg dark:bg-gray-700 dark:text-immich-gray sm:rounded-s-none sm:rounded-e-lg"
|
class="{commonClasses} rounded-b-lg bg-gray-200 text-immich-dark-bg dark:bg-gray-700 dark:text-immich-gray sm:rounded-s-none sm:rounded-e-lg"
|
||||||
>
|
>
|
||||||
|
<p>{$t('waiting')}</p>
|
||||||
<p class="text-2xl">
|
<p class="text-2xl">
|
||||||
{waitingCount.toLocaleString($locale)}
|
{waitingCount.toLocaleString($locale)}
|
||||||
</p>
|
</p>
|
||||||
<p>{$t('waiting')}</p>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -139,54 +139,52 @@
|
|||||||
<Icon path={mdiAlertCircle} size="36" />
|
<Icon path={mdiAlertCircle} size="36" />
|
||||||
{$t('disabled').toUpperCase()}
|
{$t('disabled').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{/if}
|
{:else}
|
||||||
|
{#if !idle}
|
||||||
{#if !disabled && !isIdle}
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Clear, force: false })}>
|
||||||
{#if waitingCount > 0}
|
|
||||||
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Empty, force: false })}>
|
|
||||||
<Icon path={mdiClose} size="24" />
|
<Icon path={mdiClose} size="24" />
|
||||||
{$t('clear').toUpperCase()}
|
{$t('clear').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{/if}
|
{/if}
|
||||||
{#if queueStatus.isPaused}
|
|
||||||
{@const size = waitingCount > 0 ? '24' : '48'}
|
{#if multipleButtons && idle}
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Resume, force: false })}>
|
{#if allText}
|
||||||
|
<JobTileButton color="dark-gray" onClick={() => onCommand({ command: JobCommand.Start, force: true })}>
|
||||||
|
<Icon path={mdiAllInclusive} size="24" />
|
||||||
|
{allText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
{#if refreshText}
|
||||||
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Start, force: undefined })}>
|
||||||
|
<Icon path={mdiImageRefreshOutline} size="24" />
|
||||||
|
{refreshText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
||||||
|
<Icon path={mdiSelectionSearch} size="24" />
|
||||||
|
{missingText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if !multipleButtons && idle}
|
||||||
|
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
||||||
|
<Icon path={mdiPlay} size="24" />
|
||||||
|
{missingText}
|
||||||
|
</JobTileButton>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
{#if queueStatus.paused}
|
||||||
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Resume, force: false })}>
|
||||||
<!-- size property is not reactive, so have to use width and height -->
|
<!-- size property is not reactive, so have to use width and height -->
|
||||||
<Icon path={mdiFastForward} {size} />
|
<Icon path={mdiFastForward} size="24" />
|
||||||
{$t('resume').toUpperCase()}
|
{$t('resume').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{:else}
|
{:else}
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Pause, force: false })}>
|
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Pause, force: false })}>
|
||||||
<Icon path={mdiPause} size="24" />
|
<Icon path={mdiPause} size="24" />
|
||||||
{$t('pause').toUpperCase()}
|
{$t('pause').toUpperCase()}
|
||||||
</JobTileButton>
|
</JobTileButton>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
{#if !disabled && multipleButtons && isIdle}
|
|
||||||
{#if allText}
|
|
||||||
<JobTileButton color="dark-gray" onClick={() => onCommand({ command: JobCommand.Start, force: true })}>
|
|
||||||
<Icon path={mdiAllInclusive} size="24" />
|
|
||||||
{allText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
{#if refreshText}
|
|
||||||
<JobTileButton color="gray" onClick={() => onCommand({ command: JobCommand.Start, force: undefined })}>
|
|
||||||
<Icon path={mdiImageRefreshOutline} size="24" />
|
|
||||||
{refreshText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
|
||||||
<Icon path={mdiSelectionSearch} size="24" />
|
|
||||||
{missingText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
|
|
||||||
{#if !disabled && !multipleButtons && isIdle}
|
|
||||||
<JobTileButton color="light-gray" onClick={() => onCommand({ command: JobCommand.Start, force: false })}>
|
|
||||||
<Icon path={mdiPlay} size="48" />
|
|
||||||
{missingText}
|
|
||||||
</JobTileButton>
|
|
||||||
{/if}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -154,7 +154,7 @@
|
|||||||
jobs[jobId] = await sendJobCommand({ id: jobId, jobCommandDto: jobCommand });
|
jobs[jobId] = await sendJobCommand({ id: jobId, jobCommandDto: jobCommand });
|
||||||
|
|
||||||
switch (jobCommand.command) {
|
switch (jobCommand.command) {
|
||||||
case JobCommand.Empty: {
|
case JobCommand.Clear: {
|
||||||
notificationController.show({
|
notificationController.show({
|
||||||
message: $t('admin.cleared_jobs', { values: { job: title } }),
|
message: $t('admin.cleared_jobs', { values: { job: title } }),
|
||||||
type: NotificationType.Info,
|
type: NotificationType.Info,
|
||||||
|
|||||||
@@ -1,32 +1,24 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import Icon from '$lib/components/elements/icon.svelte';
|
|
||||||
import { AppRoute, timeBeforeShowLoadingSpinner } from '$lib/constants';
|
|
||||||
import { getAssetThumbnailUrl, handlePromiseError } from '$lib/utils';
|
|
||||||
import { getAssetType } from '$lib/utils/asset-utils';
|
|
||||||
import { autoGrowHeight } from '$lib/actions/autogrow';
|
import { autoGrowHeight } from '$lib/actions/autogrow';
|
||||||
|
import { shortcut } from '$lib/actions/shortcut';
|
||||||
|
import Icon from '$lib/components/elements/icon.svelte';
|
||||||
|
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
|
||||||
|
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
||||||
|
import { AppRoute, timeBeforeShowLoadingSpinner } from '$lib/constants';
|
||||||
|
import { activityManager } from '$lib/managers/activity-manager.svelte';
|
||||||
|
import { locale } from '$lib/stores/preferences.store';
|
||||||
|
import { getAssetThumbnailUrl } from '$lib/utils';
|
||||||
|
import { getAssetType } from '$lib/utils/asset-utils';
|
||||||
import { handleError } from '$lib/utils/handle-error';
|
import { handleError } from '$lib/utils/handle-error';
|
||||||
import { isTenMinutesApart } from '$lib/utils/timesince';
|
import { isTenMinutesApart } from '$lib/utils/timesince';
|
||||||
import {
|
import { ReactionType, type ActivityResponseDto, type AssetTypeEnum, type UserResponseDto } from '@immich/sdk';
|
||||||
ReactionType,
|
import { mdiClose, mdiDeleteOutline, mdiDotsVertical, mdiHeart, mdiSend } from '@mdi/js';
|
||||||
createActivity,
|
|
||||||
deleteActivity,
|
|
||||||
getActivities,
|
|
||||||
type ActivityResponseDto,
|
|
||||||
type AssetTypeEnum,
|
|
||||||
type UserResponseDto,
|
|
||||||
} from '@immich/sdk';
|
|
||||||
import { mdiClose, mdiDotsVertical, mdiHeart, mdiSend, mdiDeleteOutline } from '@mdi/js';
|
|
||||||
import * as luxon from 'luxon';
|
import * as luxon from 'luxon';
|
||||||
import { onMount } from 'svelte';
|
import { t } from 'svelte-i18n';
|
||||||
import CircleIconButton from '../elements/buttons/circle-icon-button.svelte';
|
import CircleIconButton from '../elements/buttons/circle-icon-button.svelte';
|
||||||
import LoadingSpinner from '../shared-components/loading-spinner.svelte';
|
import LoadingSpinner from '../shared-components/loading-spinner.svelte';
|
||||||
import { NotificationType, notificationController } from '../shared-components/notification/notification';
|
import { NotificationType, notificationController } from '../shared-components/notification/notification';
|
||||||
import UserAvatar from '../shared-components/user-avatar.svelte';
|
import UserAvatar from '../shared-components/user-avatar.svelte';
|
||||||
import { locale } from '$lib/stores/preferences.store';
|
|
||||||
import { shortcut } from '$lib/actions/shortcut';
|
|
||||||
import { t } from 'svelte-i18n';
|
|
||||||
import ButtonContextMenu from '$lib/components/shared-components/context-menu/button-context-menu.svelte';
|
|
||||||
import MenuOption from '$lib/components/shared-components/context-menu/menu-option.svelte';
|
|
||||||
|
|
||||||
const units: Intl.RelativeTimeFormatUnit[] = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second'];
|
const units: Intl.RelativeTimeFormatUnit[] = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second'];
|
||||||
|
|
||||||
@@ -48,34 +40,16 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
reactions: ActivityResponseDto[];
|
|
||||||
user: UserResponseDto;
|
user: UserResponseDto;
|
||||||
assetId?: string | undefined;
|
assetId?: string | undefined;
|
||||||
albumId: string;
|
albumId: string;
|
||||||
assetType?: AssetTypeEnum | undefined;
|
assetType?: AssetTypeEnum | undefined;
|
||||||
albumOwnerId: string;
|
albumOwnerId: string;
|
||||||
disabled: boolean;
|
disabled: boolean;
|
||||||
isLiked: ActivityResponseDto | null;
|
|
||||||
onDeleteComment: () => void;
|
|
||||||
onDeleteLike: () => void;
|
|
||||||
onAddComment: () => void;
|
|
||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
let {
|
let { user, assetId = undefined, albumId, assetType = undefined, albumOwnerId, disabled, onClose }: Props = $props();
|
||||||
reactions = $bindable(),
|
|
||||||
user,
|
|
||||||
assetId = undefined,
|
|
||||||
albumId,
|
|
||||||
assetType = undefined,
|
|
||||||
albumOwnerId,
|
|
||||||
disabled,
|
|
||||||
isLiked,
|
|
||||||
onDeleteComment,
|
|
||||||
onDeleteLike,
|
|
||||||
onAddComment,
|
|
||||||
onClose,
|
|
||||||
}: Props = $props();
|
|
||||||
|
|
||||||
let innerHeight: number = $state(0);
|
let innerHeight: number = $state(0);
|
||||||
let activityHeight: number = $state(0);
|
let activityHeight: number = $state(0);
|
||||||
@@ -85,36 +59,18 @@
|
|||||||
let message = $state('');
|
let message = $state('');
|
||||||
let isSendingMessage = $state(false);
|
let isSendingMessage = $state(false);
|
||||||
|
|
||||||
onMount(async () => {
|
const timeOptions: Intl.DateTimeFormatOptions = {
|
||||||
await getReactions();
|
|
||||||
});
|
|
||||||
|
|
||||||
const getReactions = async () => {
|
|
||||||
try {
|
|
||||||
reactions = await getActivities({ assetId, albumId });
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, $t('errors.unable_to_load_asset_activity'));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const timeOptions = {
|
|
||||||
year: 'numeric',
|
year: 'numeric',
|
||||||
month: '2-digit',
|
month: '2-digit',
|
||||||
day: '2-digit',
|
day: '2-digit',
|
||||||
hour: '2-digit',
|
hour: '2-digit',
|
||||||
minute: '2-digit',
|
minute: '2-digit',
|
||||||
hour12: false,
|
hour12: false,
|
||||||
} as Intl.DateTimeFormatOptions;
|
};
|
||||||
|
|
||||||
const handleDeleteReaction = async (reaction: ActivityResponseDto, index: number) => {
|
const handleDeleteReaction = async (reaction: ActivityResponseDto, index: number) => {
|
||||||
try {
|
try {
|
||||||
await deleteActivity({ id: reaction.id });
|
await activityManager.deleteActivity(reaction, index);
|
||||||
reactions.splice(index, 1);
|
|
||||||
if (isLiked && reaction.type === ReactionType.Like && reaction.id == isLiked.id) {
|
|
||||||
onDeleteLike();
|
|
||||||
} else {
|
|
||||||
onDeleteComment();
|
|
||||||
}
|
|
||||||
|
|
||||||
const deleteMessages: Record<ReactionType, string> = {
|
const deleteMessages: Record<ReactionType, string> = {
|
||||||
[ReactionType.Comment]: $t('comment_deleted'),
|
[ReactionType.Comment]: $t('comment_deleted'),
|
||||||
@@ -135,13 +91,9 @@
|
|||||||
}
|
}
|
||||||
const timeout = setTimeout(() => (isSendingMessage = true), timeBeforeShowLoadingSpinner);
|
const timeout = setTimeout(() => (isSendingMessage = true), timeBeforeShowLoadingSpinner);
|
||||||
try {
|
try {
|
||||||
const data = await createActivity({
|
await activityManager.addActivity({ albumId, assetId, type: ReactionType.Comment, comment: message });
|
||||||
activityCreateDto: { albumId, assetId, type: ReactionType.Comment, comment: message },
|
|
||||||
});
|
|
||||||
reactions.push(data);
|
|
||||||
|
|
||||||
message = '';
|
message = '';
|
||||||
onAddComment();
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, $t('errors.unable_to_add_comment'));
|
handleError(error, $t('errors.unable_to_add_comment'));
|
||||||
} finally {
|
} finally {
|
||||||
@@ -156,7 +108,6 @@
|
|||||||
});
|
});
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
if (assetId && previousAssetId != assetId) {
|
if (assetId && previousAssetId != assetId) {
|
||||||
handlePromiseError(getReactions());
|
|
||||||
previousAssetId = assetId;
|
previousAssetId = assetId;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -184,7 +135,7 @@
|
|||||||
class="overflow-y-auto immich-scrollbar relative w-full px-2"
|
class="overflow-y-auto immich-scrollbar relative w-full px-2"
|
||||||
style="height: {divHeight}px;padding-bottom: {chatHeight}px"
|
style="height: {divHeight}px;padding-bottom: {chatHeight}px"
|
||||||
>
|
>
|
||||||
{#each reactions as reaction, index (reaction.id)}
|
{#each activityManager.activities as reaction, index (reaction.id)}
|
||||||
{#if reaction.type === ReactionType.Comment}
|
{#if reaction.type === ReactionType.Comment}
|
||||||
<div class="flex dark:bg-gray-800 bg-gray-200 py-3 ps-3 mt-3 rounded-lg gap-4 justify-start">
|
<div class="flex dark:bg-gray-800 bg-gray-200 py-3 ps-3 mt-3 rounded-lg gap-4 justify-start">
|
||||||
<div class="flex items-center">
|
<div class="flex items-center">
|
||||||
@@ -221,7 +172,7 @@
|
|||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{#if (index != reactions.length - 1 && !shouldGroup(reactions[index].createdAt, reactions[index + 1].createdAt)) || index === reactions.length - 1}
|
{#if (index != activityManager.activities.length - 1 && !shouldGroup(activityManager.activities[index].createdAt, activityManager.activities[index + 1].createdAt)) || index === activityManager.activities.length - 1}
|
||||||
<div
|
<div
|
||||||
class="pt-1 px-2 text-right w-full text-sm text-gray-500 dark:text-gray-300"
|
class="pt-1 px-2 text-right w-full text-sm text-gray-500 dark:text-gray-300"
|
||||||
title={new Date(reaction.createdAt).toLocaleDateString(undefined, timeOptions)}
|
title={new Date(reaction.createdAt).toLocaleDateString(undefined, timeOptions)}
|
||||||
@@ -273,7 +224,7 @@
|
|||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{#if (index != reactions.length - 1 && isTenMinutesApart(reactions[index].createdAt, reactions[index + 1].createdAt)) || index === reactions.length - 1}
|
{#if (index != activityManager.activities.length - 1 && isTenMinutesApart(activityManager.activities[index].createdAt, activityManager.activities[index + 1].createdAt)) || index === activityManager.activities.length - 1}
|
||||||
<div
|
<div
|
||||||
class="pt-1 px-2 text-right w-full text-sm text-gray-500 dark:text-gray-300"
|
class="pt-1 px-2 text-right w-full text-sm text-gray-500 dark:text-gray-300"
|
||||||
title={new Date(reaction.createdAt).toLocaleDateString(navigator.language, timeOptions)}
|
title={new Date(reaction.createdAt).toLocaleDateString(navigator.language, timeOptions)}
|
||||||
|
|||||||
@@ -5,8 +5,8 @@
|
|||||||
import NextAssetAction from '$lib/components/asset-viewer/actions/next-asset-action.svelte';
|
import NextAssetAction from '$lib/components/asset-viewer/actions/next-asset-action.svelte';
|
||||||
import PreviousAssetAction from '$lib/components/asset-viewer/actions/previous-asset-action.svelte';
|
import PreviousAssetAction from '$lib/components/asset-viewer/actions/previous-asset-action.svelte';
|
||||||
import { AssetAction, ProjectionType } from '$lib/constants';
|
import { AssetAction, ProjectionType } from '$lib/constants';
|
||||||
|
import { activityManager } from '$lib/managers/activity-manager.svelte';
|
||||||
import { authManager } from '$lib/managers/auth-manager.svelte';
|
import { authManager } from '$lib/managers/auth-manager.svelte';
|
||||||
import { updateNumberOfComments } from '$lib/stores/activity.store';
|
|
||||||
import { closeEditorCofirm } from '$lib/stores/asset-editor.store';
|
import { closeEditorCofirm } from '$lib/stores/asset-editor.store';
|
||||||
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
|
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
|
||||||
import { isShowDetail } from '$lib/stores/preferences.store';
|
import { isShowDetail } from '$lib/stores/preferences.store';
|
||||||
@@ -19,15 +19,9 @@
|
|||||||
import {
|
import {
|
||||||
AssetJobName,
|
AssetJobName,
|
||||||
AssetTypeEnum,
|
AssetTypeEnum,
|
||||||
ReactionType,
|
|
||||||
createActivity,
|
|
||||||
deleteActivity,
|
|
||||||
getActivities,
|
|
||||||
getActivityStatistics,
|
|
||||||
getAllAlbums,
|
getAllAlbums,
|
||||||
getStack,
|
getStack,
|
||||||
runAssetJobs,
|
runAssetJobs,
|
||||||
type ActivityResponseDto,
|
|
||||||
type AlbumResponseDto,
|
type AlbumResponseDto,
|
||||||
type AssetResponseDto,
|
type AssetResponseDto,
|
||||||
type PersonResponseDto,
|
type PersonResponseDto,
|
||||||
@@ -61,7 +55,6 @@
|
|||||||
person?: PersonResponseDto | null;
|
person?: PersonResponseDto | null;
|
||||||
preAction?: PreAction | undefined;
|
preAction?: PreAction | undefined;
|
||||||
onAction?: OnAction | undefined;
|
onAction?: OnAction | undefined;
|
||||||
reactions?: ActivityResponseDto[];
|
|
||||||
showCloseButton?: boolean;
|
showCloseButton?: boolean;
|
||||||
onClose: (dto: { asset: AssetResponseDto }) => void;
|
onClose: (dto: { asset: AssetResponseDto }) => void;
|
||||||
onNext: () => Promise<HasAsset>;
|
onNext: () => Promise<HasAsset>;
|
||||||
@@ -80,7 +73,6 @@
|
|||||||
person = null,
|
person = null,
|
||||||
preAction = undefined,
|
preAction = undefined,
|
||||||
onAction = undefined,
|
onAction = undefined,
|
||||||
reactions = $bindable([]),
|
|
||||||
showCloseButton,
|
showCloseButton,
|
||||||
onClose,
|
onClose,
|
||||||
onNext,
|
onNext,
|
||||||
@@ -107,8 +99,6 @@
|
|||||||
let previewStackedAsset: AssetResponseDto | undefined = $state();
|
let previewStackedAsset: AssetResponseDto | undefined = $state();
|
||||||
let isShowActivity = $state(false);
|
let isShowActivity = $state(false);
|
||||||
let isShowEditor = $state(false);
|
let isShowEditor = $state(false);
|
||||||
let isLiked: ActivityResponseDto | null = $state(null);
|
|
||||||
let numberOfComments = $state(0);
|
|
||||||
let fullscreenElement = $state<Element>();
|
let fullscreenElement = $state<Element>();
|
||||||
let unsubscribes: (() => void)[] = [];
|
let unsubscribes: (() => void)[] = [];
|
||||||
let selectedEditType: string = $state('');
|
let selectedEditType: string = $state('');
|
||||||
@@ -136,59 +126,20 @@
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleAddComment = () => {
|
|
||||||
numberOfComments++;
|
|
||||||
updateNumberOfComments(1);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleRemoveComment = () => {
|
|
||||||
numberOfComments--;
|
|
||||||
updateNumberOfComments(-1);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleFavorite = async () => {
|
const handleFavorite = async () => {
|
||||||
if (album && album.isActivityEnabled) {
|
if (album && album.isActivityEnabled) {
|
||||||
try {
|
try {
|
||||||
if (isLiked) {
|
await activityManager.toggleLike();
|
||||||
const activityId = isLiked.id;
|
|
||||||
await deleteActivity({ id: activityId });
|
|
||||||
reactions = reactions.filter((reaction) => reaction.id !== activityId);
|
|
||||||
isLiked = null;
|
|
||||||
} else {
|
|
||||||
const data = await createActivity({
|
|
||||||
activityCreateDto: { albumId: album.id, assetId: asset.id, type: ReactionType.Like },
|
|
||||||
});
|
|
||||||
|
|
||||||
isLiked = data;
|
|
||||||
reactions = [...reactions, isLiked];
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, $t('errors.unable_to_change_favorite'));
|
handleError(error, $t('errors.unable_to_change_favorite'));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const getFavorite = async () => {
|
const updateComments = async () => {
|
||||||
if (album && $user) {
|
|
||||||
try {
|
|
||||||
const data = await getActivities({
|
|
||||||
userId: $user.id,
|
|
||||||
assetId: asset.id,
|
|
||||||
albumId: album.id,
|
|
||||||
$type: ReactionType.Like,
|
|
||||||
});
|
|
||||||
isLiked = data.length > 0 ? data[0] : null;
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error, $t('errors.unable_to_load_liked_status'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const getNumberOfComments = async () => {
|
|
||||||
if (album) {
|
if (album) {
|
||||||
try {
|
try {
|
||||||
const { comments } = await getActivityStatistics({ assetId: asset.id, albumId: album.id });
|
await activityManager.refreshActivities(album.id, asset.id);
|
||||||
numberOfComments = comments;
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
handleError(error, $t('errors.unable_to_get_comments_number'));
|
handleError(error, $t('errors.unable_to_get_comments_number'));
|
||||||
}
|
}
|
||||||
@@ -227,6 +178,10 @@
|
|||||||
if (!sharedLink) {
|
if (!sharedLink) {
|
||||||
await handleGetAllAlbums();
|
await handleGetAllAlbums();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (album) {
|
||||||
|
activityManager.init(album.id, asset.id);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
onDestroy(() => {
|
onDestroy(() => {
|
||||||
@@ -241,6 +196,8 @@
|
|||||||
for (const unsubscribe of unsubscribes) {
|
for (const unsubscribe of unsubscribes) {
|
||||||
unsubscribe();
|
unsubscribe();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
activityManager.reset();
|
||||||
});
|
});
|
||||||
|
|
||||||
const handleGetAllAlbums = async () => {
|
const handleGetAllAlbums = async () => {
|
||||||
@@ -402,14 +359,13 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
if (album && !album.isActivityEnabled && numberOfComments === 0) {
|
if (album && !album.isActivityEnabled && activityManager.commentCount === 0) {
|
||||||
isShowActivity = false;
|
isShowActivity = false;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
if (isShared && asset.id) {
|
if (isShared && asset.id) {
|
||||||
handlePromiseError(getFavorite());
|
handlePromiseError(updateComments());
|
||||||
handlePromiseError(getNumberOfComments());
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
$effect(() => {
|
$effect(() => {
|
||||||
@@ -547,12 +503,12 @@
|
|||||||
onVideoStarted={handleVideoStarted}
|
onVideoStarted={handleVideoStarted}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
{#if $slideshowState === SlideshowState.None && isShared && ((album && album.isActivityEnabled) || numberOfComments > 0)}
|
{#if $slideshowState === SlideshowState.None && isShared && ((album && album.isActivityEnabled) || activityManager.commentCount > 0)}
|
||||||
<div class="z-[9999] absolute bottom-0 end-0 mb-20 me-8">
|
<div class="z-[9999] absolute bottom-0 end-0 mb-20 me-8">
|
||||||
<ActivityStatus
|
<ActivityStatus
|
||||||
disabled={!album?.isActivityEnabled}
|
disabled={!album?.isActivityEnabled}
|
||||||
{isLiked}
|
isLiked={activityManager.isLiked}
|
||||||
{numberOfComments}
|
numberOfComments={activityManager.commentCount}
|
||||||
onFavorite={handleFavorite}
|
onFavorite={handleFavorite}
|
||||||
onOpenActivityTab={handleOpenActivity}
|
onOpenActivityTab={handleOpenActivity}
|
||||||
/>
|
/>
|
||||||
@@ -642,11 +598,6 @@
|
|||||||
albumOwnerId={album.ownerId}
|
albumOwnerId={album.ownerId}
|
||||||
albumId={album.id}
|
albumId={album.id}
|
||||||
assetId={asset.id}
|
assetId={asset.id}
|
||||||
{isLiked}
|
|
||||||
bind:reactions
|
|
||||||
onAddComment={handleAddComment}
|
|
||||||
onDeleteComment={handleRemoveComment}
|
|
||||||
onDeleteLike={() => (isLiked = null)}
|
|
||||||
onClose={() => (isShowActivity = false)}
|
onClose={() => (isShowActivity = false)}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -2,14 +2,15 @@
|
|||||||
import ImageThumbnail from '$lib/components/assets/thumbnail/image-thumbnail.svelte';
|
import ImageThumbnail from '$lib/components/assets/thumbnail/image-thumbnail.svelte';
|
||||||
import { dialogController } from '$lib/components/shared-components/dialog/dialog';
|
import { dialogController } from '$lib/components/shared-components/dialog/dialog';
|
||||||
import { notificationController } from '$lib/components/shared-components/notification/notification';
|
import { notificationController } from '$lib/components/shared-components/notification/notification';
|
||||||
|
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
|
||||||
import { isFaceEditMode } from '$lib/stores/face-edit.svelte';
|
import { isFaceEditMode } from '$lib/stores/face-edit.svelte';
|
||||||
import { getPeopleThumbnailUrl } from '$lib/utils';
|
import { getPeopleThumbnailUrl } from '$lib/utils';
|
||||||
import { getAllPeople, createFace, type PersonResponseDto } from '@immich/sdk';
|
import { handleError } from '$lib/utils/handle-error';
|
||||||
|
import { createFace, getAllPeople, type PersonResponseDto } from '@immich/sdk';
|
||||||
import { Button, Input } from '@immich/ui';
|
import { Button, Input } from '@immich/ui';
|
||||||
import { Canvas, InteractiveFabricObject, Rect } from 'fabric';
|
import { Canvas, InteractiveFabricObject, Rect } from 'fabric';
|
||||||
import { onMount } from 'svelte';
|
import { onMount } from 'svelte';
|
||||||
import { assetViewingStore } from '$lib/stores/asset-viewing.store';
|
import { t } from 'svelte-i18n';
|
||||||
import { handleError } from '$lib/utils/handle-error';
|
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
htmlElement: HTMLImageElement | HTMLVideoElement;
|
htmlElement: HTMLImageElement | HTMLVideoElement;
|
||||||
@@ -316,7 +317,7 @@
|
|||||||
bind:this={faceSelectorEl}
|
bind:this={faceSelectorEl}
|
||||||
class="absolute top-[calc(50%-250px)] start-[calc(50%-125px)] max-w-[250px] w-[250px] bg-white dark:bg-immich-dark-gray dark:text-immich-dark-fg backdrop-blur-sm px-2 py-4 rounded-xl border border-gray-200 dark:border-gray-800"
|
class="absolute top-[calc(50%-250px)] start-[calc(50%-125px)] max-w-[250px] w-[250px] bg-white dark:bg-immich-dark-gray dark:text-immich-dark-fg backdrop-blur-sm px-2 py-4 rounded-xl border border-gray-200 dark:border-gray-800"
|
||||||
>
|
>
|
||||||
<p class="text-center text-sm">Select a person to tag</p>
|
<p class="text-center text-sm">{$t('select_person_to_tag')}</p>
|
||||||
|
|
||||||
<div class="my-3 relative">
|
<div class="my-3 relative">
|
||||||
<Input placeholder="Search person..." bind:value={searchTerm} size="tiny" />
|
<Input placeholder="Search person..." bind:value={searchTerm} size="tiny" />
|
||||||
@@ -348,11 +349,11 @@
|
|||||||
</div>
|
</div>
|
||||||
{:else}
|
{:else}
|
||||||
<div class="flex items-center justify-center py-4">
|
<div class="flex items-center justify-center py-4">
|
||||||
<p class="text-sm text-gray-500">No matching people found</p>
|
<p class="text-sm text-gray-500">{$t('no_people_found')}</p>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<Button size="small" fullWidth onclick={cancel} color="danger" class="mt-2">Cancel</Button>
|
<Button size="small" fullWidth onclick={cancel} color="danger" class="mt-2">{$t('cancel')}</Button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import { getIntersectionObserverMock } from '$lib/__mocks__/intersection-observer.mock';
|
import { getIntersectionObserverMock } from '$lib/__mocks__/intersection-observer.mock';
|
||||||
import Thumbnail from '$lib/components/assets/thumbnail/thumbnail.svelte';
|
import Thumbnail from '$lib/components/assets/thumbnail/thumbnail.svelte';
|
||||||
|
import { getTabbable } from '$lib/utils/focus-util';
|
||||||
import { assetFactory } from '@test-data/factories/asset-factory';
|
import { assetFactory } from '@test-data/factories/asset-factory';
|
||||||
import { fireEvent, render, screen } from '@testing-library/svelte';
|
import { fireEvent, render } from '@testing-library/svelte';
|
||||||
|
|
||||||
vi.hoisted(() => {
|
vi.hoisted(() => {
|
||||||
Object.defineProperty(globalThis, 'matchMedia', {
|
Object.defineProperty(globalThis, 'matchMedia', {
|
||||||
@@ -31,51 +32,47 @@ describe('Thumbnail component', () => {
|
|||||||
|
|
||||||
it('should only contain a single tabbable element (the container)', () => {
|
it('should only contain a single tabbable element (the container)', () => {
|
||||||
const asset = assetFactory.build({ originalPath: 'image.jpg', originalMimeType: 'image/jpeg' });
|
const asset = assetFactory.build({ originalPath: 'image.jpg', originalMimeType: 'image/jpeg' });
|
||||||
render(Thumbnail, {
|
const { baseElement } = render(Thumbnail, {
|
||||||
asset,
|
asset,
|
||||||
focussed: false,
|
|
||||||
selected: true,
|
selected: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
const container = screen.getByTestId('container-with-tabindex');
|
const container = baseElement.querySelector('[data-thumbnail-focus-container]');
|
||||||
expect(container.getAttribute('tabindex')).toBe('0');
|
expect(container).not.toBeNull();
|
||||||
|
expect(container!.getAttribute('tabindex')).toBe('0');
|
||||||
|
|
||||||
// This isn't capturing all tabbable elements, but should be the most likely ones. Mainly guarding against
|
// Guarding against inserting extra tabbable elments in future in <Thumbnail/>
|
||||||
// inserting extra tabbable elments in future in <Thumbnail/>
|
const tabbables = getTabbable(container!);
|
||||||
let allTabbableElements = screen.queryAllByRole('link');
|
expect(tabbables.length).toBe(0);
|
||||||
allTabbableElements = allTabbableElements.concat(screen.queryAllByRole('checkbox'));
|
|
||||||
expect(allTabbableElements.length).toBeGreaterThan(0);
|
|
||||||
for (const tabbableElement of allTabbableElements) {
|
|
||||||
const testIdValue = tabbableElement.dataset.testid;
|
|
||||||
if (testIdValue === null || testIdValue !== 'container-with-tabindex') {
|
|
||||||
expect(tabbableElement.getAttribute('tabindex')).toBe('-1');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('handleFocus should be called on focus of container', async () => {
|
it('handleFocus should be called on focus of container', async () => {
|
||||||
const asset = assetFactory.build({ originalPath: 'image.jpg', originalMimeType: 'image/jpeg' });
|
const asset = assetFactory.build({ originalPath: 'image.jpg', originalMimeType: 'image/jpeg' });
|
||||||
const handleFocusSpy = vi.fn();
|
const handleFocusSpy = vi.fn();
|
||||||
render(Thumbnail, {
|
const { baseElement } = render(Thumbnail, {
|
||||||
asset,
|
asset,
|
||||||
handleFocus: handleFocusSpy,
|
handleFocus: handleFocusSpy,
|
||||||
});
|
});
|
||||||
|
|
||||||
const container = screen.getByTestId('container-with-tabindex');
|
const container = baseElement.querySelector('[data-thumbnail-focus-container]');
|
||||||
await fireEvent(container, new FocusEvent('focus'));
|
expect(container).not.toBeNull();
|
||||||
|
await fireEvent(container as HTMLElement, new FocusEvent('focus'));
|
||||||
|
|
||||||
expect(handleFocusSpy).toBeCalled();
|
expect(handleFocusSpy).toBeCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('element will be focussed if not already', () => {
|
it('element will be focussed if not already', async () => {
|
||||||
const asset = assetFactory.build({ originalPath: 'image.jpg', originalMimeType: 'image/jpeg' });
|
const asset = assetFactory.build({ originalPath: 'image.jpg', originalMimeType: 'image/jpeg' });
|
||||||
const handleFocusSpy = vi.fn();
|
const handleFocusSpy = vi.fn();
|
||||||
render(Thumbnail, {
|
const { baseElement } = render(Thumbnail, {
|
||||||
asset,
|
asset,
|
||||||
focussed: true,
|
|
||||||
handleFocus: handleFocusSpy,
|
handleFocus: handleFocusSpy,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const container = baseElement.querySelector('[data-thumbnail-focus-container]');
|
||||||
|
expect(container).not.toBeNull();
|
||||||
|
await fireEvent(container as HTMLElement, new FocusEvent('focus'));
|
||||||
|
|
||||||
expect(handleFocusSpy).toBeCalled();
|
expect(handleFocusSpy).toBeCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -5,8 +5,8 @@
|
|||||||
import { cancelImageUrl } from '$lib/utils/sw-messaging';
|
import { cancelImageUrl } from '$lib/utils/sw-messaging';
|
||||||
import { TUNABLES } from '$lib/utils/tunables';
|
import { TUNABLES } from '$lib/utils/tunables';
|
||||||
import { mdiEyeOffOutline } from '@mdi/js';
|
import { mdiEyeOffOutline } from '@mdi/js';
|
||||||
import type { ActionReturn } from 'svelte/action';
|
|
||||||
import type { ClassValue } from 'svelte/elements';
|
import type { ClassValue } from 'svelte/elements';
|
||||||
|
import type { ActionReturn } from 'svelte/action';
|
||||||
import { fade } from 'svelte/transition';
|
import { fade } from 'svelte/transition';
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
@@ -77,7 +77,7 @@
|
|||||||
circle && 'rounded-full',
|
circle && 'rounded-full',
|
||||||
shadow && 'shadow-lg',
|
shadow && 'shadow-lg',
|
||||||
(circle || !heightStyle) && 'aspect-square',
|
(circle || !heightStyle) && 'aspect-square',
|
||||||
border && 'border-[3px] border-immich-dark-primary/80 hocus:border-immich-primary',
|
border && 'border-[3px] border-immich-dark-primary/80 hover:border-immich-primary',
|
||||||
brokenAssetClass,
|
brokenAssetClass,
|
||||||
]
|
]
|
||||||
.filter(Boolean)
|
.filter(Boolean)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user