feat: pgvector
added m:search replaced any_value query for pg14 compatibility update metadata search set and update clip dim size hnsw index migration
This commit is contained in:
parent
024fe1141b
commit
f78d70f87a
60 changed files with 552 additions and 1889 deletions
|
@ -37,7 +37,6 @@ services:
|
|||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
|
||||
immich-microservices:
|
||||
container_name: immich_microservices
|
||||
|
@ -51,7 +50,6 @@ services:
|
|||
depends_on:
|
||||
- database
|
||||
- immich-server
|
||||
- typesense
|
||||
|
||||
immich-web:
|
||||
container_name: immich_web
|
||||
|
@ -95,24 +93,14 @@ services:
|
|||
- database
|
||||
restart: unless-stopped
|
||||
|
||||
typesense:
|
||||
container_name: immich_typesense
|
||||
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
|
||||
environment:
|
||||
- TYPESENSE_API_KEY=${TYPESENSE_API_KEY}
|
||||
- TYPESENSE_DATA_DIR=/data
|
||||
# remove this to get debug messages
|
||||
- GLOG_minloglevel=1
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/typesense:/data
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:60e49e22fa5706cd8df7d5e0bc50ee9bab7c608039fa653c4d961014237cca46
|
||||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: postgres:14-alpine@sha256:6a0e35296341e676fe6bd8d236c72afffe2dfe3d7eb9c2405c0f3fc04500cd07
|
||||
# image: postgres:14-alpine@sha256:28407a9961e76f2d285dc6991e8e48893503cc3836a4755bbc2d40bcc272a441
|
||||
image: ankane/pgvector
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
|
|
|
@ -36,7 +36,6 @@ services:
|
|||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
- immich-server
|
||||
|
||||
immich-machine-learning:
|
||||
|
@ -51,18 +50,6 @@ services:
|
|||
- .env
|
||||
restart: always
|
||||
|
||||
typesense:
|
||||
container_name: immich_typesense
|
||||
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
|
||||
environment:
|
||||
- TYPESENSE_API_KEY=${TYPESENSE_API_KEY}
|
||||
- TYPESENSE_DATA_DIR=/data
|
||||
# remove this to get debug messages
|
||||
- GLOG_minloglevel=1
|
||||
volumes:
|
||||
- ${UPLOAD_LOCATION}/typesense:/data
|
||||
restart: always
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:60e49e22fa5706cd8df7d5e0bc50ee9bab7c608039fa653c4d961014237cca46
|
||||
|
@ -70,7 +57,8 @@ services:
|
|||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: postgres:14-alpine@sha256:6a0e35296341e676fe6bd8d236c72afffe2dfe3d7eb9c2405c0f3fc04500cd07
|
||||
# image: postgres:14-alpine@sha256:28407a9961e76f2d285dc6991e8e48893503cc3836a4755bbc2d40bcc272a441
|
||||
image: ankane/pgvector
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
|
|
|
@ -25,7 +25,6 @@ services:
|
|||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
restart: always
|
||||
|
||||
immich-microservices:
|
||||
|
@ -43,7 +42,6 @@ services:
|
|||
depends_on:
|
||||
- redis
|
||||
- database
|
||||
- typesense
|
||||
restart: always
|
||||
|
||||
immich-machine-learning:
|
||||
|
@ -55,18 +53,6 @@ services:
|
|||
- .env
|
||||
restart: always
|
||||
|
||||
typesense:
|
||||
container_name: immich_typesense
|
||||
image: typesense/typesense:0.24.1@sha256:9bcff2b829f12074426ca044b56160ca9d777a0c488303469143dd9f8259d4dd
|
||||
environment:
|
||||
- TYPESENSE_API_KEY=${TYPESENSE_API_KEY}
|
||||
- TYPESENSE_DATA_DIR=/data
|
||||
# remove this to get debug messages
|
||||
- GLOG_minloglevel=1
|
||||
volumes:
|
||||
- tsdata:/data
|
||||
restart: always
|
||||
|
||||
redis:
|
||||
container_name: immich_redis
|
||||
image: redis:6.2-alpine@sha256:60e49e22fa5706cd8df7d5e0bc50ee9bab7c608039fa653c4d961014237cca46
|
||||
|
@ -74,7 +60,8 @@ services:
|
|||
|
||||
database:
|
||||
container_name: immich_postgres
|
||||
image: postgres:14-alpine@sha256:6a0e35296341e676fe6bd8d236c72afffe2dfe3d7eb9c2405c0f3fc04500cd07
|
||||
# image: postgres:14-alpine@sha256:28407a9961e76f2d285dc6991e8e48893503cc3836a4755bbc2d40bcc272a441
|
||||
image: ankane/pgvector
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
|
|
|
@ -6,8 +6,7 @@ UPLOAD_LOCATION=./library
|
|||
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
|
||||
IMMICH_VERSION=release
|
||||
|
||||
# Connection secrets for postgres and typesense. You should change these to random passwords
|
||||
TYPESENSE_API_KEY=some-random-text
|
||||
# Connection secret for postgres. You should change it to a random password
|
||||
DB_PASSWORD=postgres
|
||||
|
||||
# The values below this line do not need to be changed
|
||||
|
|
|
@ -45,7 +45,6 @@ The Immich backend is divided into several services, which are run as individual
|
|||
1. `immich-machine-learning` - Execute machine learning models
|
||||
1. `postgres` - Persistent data storage
|
||||
1. `redis`- Queue management for `immich-microservices`
|
||||
1. `typesense`- Specialized database for search, specifically with vector comparison features
|
||||
|
||||
### Immich Server
|
||||
|
||||
|
@ -75,7 +74,6 @@ The Immich Microservices image uses the same `Dockerfile` as the Immich Server,
|
|||
- Object Tagging
|
||||
- Facial Recognition
|
||||
- Storage Template Migration
|
||||
- Search (Typesense synchronization)
|
||||
- Sidecar (see [XMP Sidecars](/docs/features/xmp-sidecars.md))
|
||||
- Background jobs (file deletion, user deletion)
|
||||
|
||||
|
@ -108,9 +106,3 @@ See [Database Migrations](./database-migrations.md) for more information about h
|
|||
### Redis
|
||||
|
||||
Immich uses [Redis](https://redis.com/) via [BullMQ](https://docs.bullmq.io/) to manage job queues. Some jobs trigger subsequent jobs. For example, object detection relies on thumbnail generation and automatically run after one is generated.
|
||||
|
||||
### Typesense
|
||||
|
||||
Immich synchronizes some of the Postgres data into Typesense, so it can execute vector related queries in order to implement certain features including, facial recognition and CLIP search.
|
||||
|
||||
<!-- - [NGINX](https://www.nginx.com/) for internal communication between containers and load balancing when scaling. -->
|
||||
|
|
|
@ -1,18 +1,10 @@
|
|||
# Search
|
||||
|
||||
Immich uses Typesense as the primary search database to enable high performance search mechanism.
|
||||
Immich uses postgres as the search database.
|
||||
|
||||
Typesense is a powerful search engine that can be integrated with popular natural language processing (NLP) models like CLIP and SBERT to provide highly accurate and relevant search results. Here are some benefits of using Typesense integrated search for CLIP and SBERT:
|
||||
Postgres with the pgvector extension can integrate with popular natural language processing (NLP) models like CLIP and SBERT to provide highly accurate and relevant search results. Here are some benefits of using CLIP and SBERT:
|
||||
|
||||
Improved Search Accuracy: Typesense uses a combination of indexing, querying, and ranking algorithms to quickly and accurately retrieve relevant search results. When integrated with CLIP and SBERT, Typesense can leverage the semantic understanding and deep learning capabilities of these models to further improve the accuracy of search results.
|
||||
|
||||
Faster Search Response Times: Typesense is optimized for lightning-fast search response times, making it ideal for applications that require near-instantaneous search results. By integrating with CLIP and SBERT, Typesense can reduce the time required to process complex search queries, making it even faster and more efficient.
|
||||
|
||||
Enhanced Semantic Search Capabilities: CLIP and SBERT are powerful NLP models that can extract the semantic meaning from text, enabling more nuanced search queries. By integrating with Typesense, these models can help to improve the accuracy of semantic search, enabling users to find the most relevant results based on the true meaning of their query.
|
||||
|
||||
Greater Search Flexibility: Typesense provides flexible search capabilities, including fuzzy search, partial search, enabling users to find the information they need quickly and easily. When integrated with CLIP and SBERT, Typesense can offer even greater flexibility, allowing users to refine their search queries using natural language and providing more accurate and relevant results.
|
||||
|
||||
(Generated by Chat-GPT4)
|
||||
Improved Search Accuracy: When integrated with CLIP and SBERT, Postgres can leverage the semantic understanding and deep learning capabilities of these models to further improve the accuracy of search results.
|
||||
|
||||
Some search examples:
|
||||
<img src={require('./img/search-ex-2.webp').default} title='Search Example 1' />
|
||||
|
|
|
@ -88,15 +88,6 @@ UPLOAD_LOCATION=absolute_location_on_your_machine_where_you_want_to_store_the_ba
|
|||
|
||||
LOG_LEVEL=simple
|
||||
|
||||
###################################################################################
|
||||
# Typesense
|
||||
###################################################################################
|
||||
# TYPESENSE_ENABLED=false
|
||||
TYPESENSE_API_KEY=some-random-text
|
||||
# TYPESENSE_HOST: typesense
|
||||
# TYPESENSE_PORT: 8108
|
||||
# TYPESENSE_PROTOCOL: http
|
||||
|
||||
###################################################################################
|
||||
# Reverse Geocoding
|
||||
#
|
||||
|
@ -137,7 +128,6 @@ PUBLIC_LOGIN_PAGE_MESSAGE="My Family Photos and Videos Backup Server"
|
|||
- Populate custom database information if necessary.
|
||||
- Populate `UPLOAD_LOCATION` with your preferred location for storing backup assets.
|
||||
- Consider changing `DB_PASSWORD` to something randomly generated
|
||||
- Consider changing `TYPESENSE_API_KEY` to something randomly generated
|
||||
|
||||
### Step 3 - Start the containers
|
||||
|
||||
|
|
|
@ -17,10 +17,10 @@ If this should not work, try running `docker compose up -d --force-recreate`.
|
|||
|
||||
## Docker Compose
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------- | :-------------------- | :-------: | :------------------------------------------------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, microservices, machine learning, web, proxy, typesense |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server, microservices |
|
||||
| Variable | Description | Default | Services |
|
||||
| :---------------- | :-------------------- | :-------: | :-------------------------------------------------- |
|
||||
| `IMMICH_VERSION` | Image tags | `release` | server, microservices, machine learning, web, proxy |
|
||||
| `UPLOAD_LOCATION` | Host Path for uploads | | server, microservices |
|
||||
|
||||
:::tip
|
||||
|
||||
|
@ -124,51 +124,6 @@ Redis (Sentinel) URL example JSON before encoding:
|
|||
}
|
||||
```
|
||||
|
||||
## Typesense
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
| :------------------- | :----------------------- | :---------: | :------------------------------- |
|
||||
| `TYPESENSE_ENABLED` | Enable Typesense | | server, microservices |
|
||||
| `TYPESENSE_URL` | Typesense URL | | server, microservices |
|
||||
| `TYPESENSE_HOST` | Typesense Host | `typesense` | server, microservices |
|
||||
| `TYPESENSE_PORT` | Typesense Port | `8108` | server, microservices |
|
||||
| `TYPESENSE_PROTOCOL` | Typesense Protocol | `http` | server, microservices |
|
||||
| `TYPESENSE_API_KEY` | Typesense API Key | | server, microservices, typesense |
|
||||
| `TYPESENSE_DATA_DIR` | Typesense Data Directory | `/data` | typesense |
|
||||
|
||||
:::info
|
||||
|
||||
`TYPESENSE_URL` must start with `ha://` and then include a `base64` encoded JSON string for the configuration.
|
||||
|
||||
`TYPESENSE_ENABLED`: Anything other than `false`, behaves as `true`.
|
||||
Even undefined is treated as `true`.
|
||||
|
||||
- When `TYPESENSE_URL` is defined, the other typesense (`TYPESENSE_*`) variables are ignored.
|
||||
|
||||
:::
|
||||
|
||||
Typesense URL example JSON before encoding:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"host": "typesense-1.example.net",
|
||||
"port": "443",
|
||||
"protocol": "https"
|
||||
},
|
||||
{
|
||||
"host": "typesense-2.example.net",
|
||||
"port": "443",
|
||||
"protocol": "https"
|
||||
},
|
||||
{
|
||||
"host": "typesense-3.example.net",
|
||||
"port": "443",
|
||||
"protocol": "https"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Machine Learning
|
||||
|
||||
| Variable | Description | Default | Services |
|
||||
|
|
144
server/package-lock.json
generated
144
server/package-lock.json
generated
|
@ -23,6 +23,7 @@
|
|||
"@nestjs/websockets": "^10.2.2",
|
||||
"@socket.io/redis-adapter": "^8.2.1",
|
||||
"archiver": "^6.0.0",
|
||||
"async-lock": "^1.4.0",
|
||||
"axios": "^1.5.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^4.8.0",
|
||||
|
@ -50,7 +51,6 @@
|
|||
"sharp": "^0.32.6",
|
||||
"thumbhash": "^0.1.1",
|
||||
"typeorm": "^0.3.17",
|
||||
"typesense": "^1.7.1",
|
||||
"ua-parser-js": "^1.0.35"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -60,6 +60,7 @@
|
|||
"@openapitools/openapi-generator-cli": "2.7.0",
|
||||
"@testcontainers/postgresql": "^10.2.1",
|
||||
"@types/archiver": "^6.0.0",
|
||||
"@types/async-lock": "^1.4.2",
|
||||
"@types/bcrypt": "^5.0.0",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/cron": "^2.0.1",
|
||||
|
@ -357,12 +358,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@babel/generator": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz",
|
||||
"integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.3.tgz",
|
||||
"integrity": "sha512-keeZWAV4LU3tW0qRi19HRpabC/ilM0HRBBzf9/k8FFiG4KVpiv0FIy4hHfLfFQZNhziCTPTmd59zoyv6DNISzg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/types": "^7.23.0",
|
||||
"@babel/types": "^7.23.3",
|
||||
"@jridgewell/gen-mapping": "^0.3.2",
|
||||
"@jridgewell/trace-mapping": "^0.3.17",
|
||||
"jsesc": "^2.5.1"
|
||||
|
@ -612,9 +613,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@babel/parser": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz",
|
||||
"integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.3.tgz",
|
||||
"integrity": "sha512-uVsWNvlVsIninV2prNz/3lHCb+5CJ+e+IUBfbjToAHODtfGYLfCFuY4AU7TskI+dAKk+njsPiBjq1gKTvZOBaw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"parser": "bin/babel-parser.js"
|
||||
|
@ -826,19 +827,19 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@babel/traverse": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.0.tgz",
|
||||
"integrity": "sha512-t/QaEvyIoIkwzpiZ7aoSKK8kObQYeF7T2v+dazAYCb8SXtp58zEVkWW7zAnju8FNKNdr4ScAOEDmMItbyOmEYw==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.3.tgz",
|
||||
"integrity": "sha512-+K0yF1/9yR0oHdE0StHuEj3uTPzwwbrLGfNOndVJVV2TqA5+j3oljJUb4nmB954FLGjNem976+B+eDuLIjesiQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.22.13",
|
||||
"@babel/generator": "^7.23.0",
|
||||
"@babel/generator": "^7.23.3",
|
||||
"@babel/helper-environment-visitor": "^7.22.20",
|
||||
"@babel/helper-function-name": "^7.23.0",
|
||||
"@babel/helper-hoist-variables": "^7.22.5",
|
||||
"@babel/helper-split-export-declaration": "^7.22.6",
|
||||
"@babel/parser": "^7.23.0",
|
||||
"@babel/types": "^7.23.0",
|
||||
"@babel/parser": "^7.23.3",
|
||||
"@babel/types": "^7.23.3",
|
||||
"debug": "^4.1.0",
|
||||
"globals": "^11.1.0"
|
||||
},
|
||||
|
@ -856,9 +857,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@babel/types": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz",
|
||||
"integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.3.tgz",
|
||||
"integrity": "sha512-OZnvoH2l8PK5eUvEcUyCt/sXgr/h+UWpVuBbOljwcrAgUl6lpchoQ++PHGyQy1AtYnVA6CEq3y5xeEI10brpXw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/helper-string-parser": "^7.22.5",
|
||||
|
@ -2798,6 +2799,12 @@
|
|||
"@types/readdir-glob": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/async-lock": {
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/async-lock/-/async-lock-1.4.2.tgz",
|
||||
"integrity": "sha512-HlZ6Dcr205BmNhwkdXqrg2vkFMN2PluI7Lgr8In3B3wE5PiQHhjRqtW/lGdVU9gw+sM0JcIDx2AN+cW8oSWIcw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/babel__core": {
|
||||
"version": "7.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.2.tgz",
|
||||
|
@ -3938,8 +3945,7 @@
|
|||
"node_modules/async-lock": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.0.tgz",
|
||||
"integrity": "sha512-coglx5yIWuetakm3/1dsX9hxCNox22h7+V80RQOu2XUUMidtArxKoZoOtHUPuR84SycKTXzgGzAUR5hJxujyJQ==",
|
||||
"dev": true
|
||||
"integrity": "sha512-coglx5yIWuetakm3/1dsX9hxCNox22h7+V80RQOu2XUUMidtArxKoZoOtHUPuR84SycKTXzgGzAUR5hJxujyJQ=="
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
|
@ -8458,18 +8464,6 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/loglevel": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.8.1.tgz",
|
||||
"integrity": "sha512-tCRIJM51SHjAayKwC+QAg8hT8vg6z7GSgLJKGvzuPb1Wc+hLzqtuVLxp6/HzSPOozuK+8ErAhy7U/sVzw8Dgfg==",
|
||||
"engines": {
|
||||
"node": ">= 0.6.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/funding/github/npm/loglevel"
|
||||
}
|
||||
},
|
||||
"node_modules/lru-cache": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
|
||||
|
@ -12175,29 +12169,6 @@
|
|||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/typesense": {
|
||||
"version": "1.7.2",
|
||||
"resolved": "https://registry.npmjs.org/typesense/-/typesense-1.7.2.tgz",
|
||||
"integrity": "sha512-hgQESOiyNJq+w2mpRJa/a1UMhWtJ/+sb0p7NoeCDSkikm9sasisJdnc7uhQchM6vTWKw2sMLWUBNbAhItR6zUQ==",
|
||||
"dependencies": {
|
||||
"axios": "^0.26.0",
|
||||
"loglevel": "^1.8.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@babel/runtime": "^7.17.2"
|
||||
}
|
||||
},
|
||||
"node_modules/typesense/node_modules/axios": {
|
||||
"version": "0.26.1",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
||||
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.14.8"
|
||||
}
|
||||
},
|
||||
"node_modules/ua-parser-js": {
|
||||
"version": "1.0.37",
|
||||
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.37.tgz",
|
||||
|
@ -12999,12 +12970,12 @@
|
|||
}
|
||||
},
|
||||
"@babel/generator": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz",
|
||||
"integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.3.tgz",
|
||||
"integrity": "sha512-keeZWAV4LU3tW0qRi19HRpabC/ilM0HRBBzf9/k8FFiG4KVpiv0FIy4hHfLfFQZNhziCTPTmd59zoyv6DNISzg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/types": "^7.23.0",
|
||||
"@babel/types": "^7.23.3",
|
||||
"@jridgewell/gen-mapping": "^0.3.2",
|
||||
"@jridgewell/trace-mapping": "^0.3.17",
|
||||
"jsesc": "^2.5.1"
|
||||
|
@ -13192,9 +13163,9 @@
|
|||
}
|
||||
},
|
||||
"@babel/parser": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz",
|
||||
"integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.3.tgz",
|
||||
"integrity": "sha512-uVsWNvlVsIninV2prNz/3lHCb+5CJ+e+IUBfbjToAHODtfGYLfCFuY4AU7TskI+dAKk+njsPiBjq1gKTvZOBaw==",
|
||||
"dev": true
|
||||
},
|
||||
"@babel/plugin-syntax-async-generators": {
|
||||
|
@ -13343,19 +13314,19 @@
|
|||
}
|
||||
},
|
||||
"@babel/traverse": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.0.tgz",
|
||||
"integrity": "sha512-t/QaEvyIoIkwzpiZ7aoSKK8kObQYeF7T2v+dazAYCb8SXtp58zEVkWW7zAnju8FNKNdr4ScAOEDmMItbyOmEYw==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.3.tgz",
|
||||
"integrity": "sha512-+K0yF1/9yR0oHdE0StHuEj3uTPzwwbrLGfNOndVJVV2TqA5+j3oljJUb4nmB954FLGjNem976+B+eDuLIjesiQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/code-frame": "^7.22.13",
|
||||
"@babel/generator": "^7.23.0",
|
||||
"@babel/generator": "^7.23.3",
|
||||
"@babel/helper-environment-visitor": "^7.22.20",
|
||||
"@babel/helper-function-name": "^7.23.0",
|
||||
"@babel/helper-hoist-variables": "^7.22.5",
|
||||
"@babel/helper-split-export-declaration": "^7.22.6",
|
||||
"@babel/parser": "^7.23.0",
|
||||
"@babel/types": "^7.23.0",
|
||||
"@babel/parser": "^7.23.3",
|
||||
"@babel/types": "^7.23.3",
|
||||
"debug": "^4.1.0",
|
||||
"globals": "^11.1.0"
|
||||
},
|
||||
|
@ -13369,9 +13340,9 @@
|
|||
}
|
||||
},
|
||||
"@babel/types": {
|
||||
"version": "7.23.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz",
|
||||
"integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==",
|
||||
"version": "7.23.3",
|
||||
"resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.3.tgz",
|
||||
"integrity": "sha512-OZnvoH2l8PK5eUvEcUyCt/sXgr/h+UWpVuBbOljwcrAgUl6lpchoQ++PHGyQy1AtYnVA6CEq3y5xeEI10brpXw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/helper-string-parser": "^7.22.5",
|
||||
|
@ -14778,6 +14749,12 @@
|
|||
"@types/readdir-glob": "*"
|
||||
}
|
||||
},
|
||||
"@types/async-lock": {
|
||||
"version": "1.4.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/async-lock/-/async-lock-1.4.2.tgz",
|
||||
"integrity": "sha512-HlZ6Dcr205BmNhwkdXqrg2vkFMN2PluI7Lgr8In3B3wE5PiQHhjRqtW/lGdVU9gw+sM0JcIDx2AN+cW8oSWIcw==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/babel__core": {
|
||||
"version": "7.20.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.2.tgz",
|
||||
|
@ -15750,8 +15727,7 @@
|
|||
"async-lock": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.0.tgz",
|
||||
"integrity": "sha512-coglx5yIWuetakm3/1dsX9hxCNox22h7+V80RQOu2XUUMidtArxKoZoOtHUPuR84SycKTXzgGzAUR5hJxujyJQ==",
|
||||
"dev": true
|
||||
"integrity": "sha512-coglx5yIWuetakm3/1dsX9hxCNox22h7+V80RQOu2XUUMidtArxKoZoOtHUPuR84SycKTXzgGzAUR5hJxujyJQ=="
|
||||
},
|
||||
"asynckit": {
|
||||
"version": "0.4.0",
|
||||
|
@ -19092,11 +19068,6 @@
|
|||
"is-unicode-supported": "^0.1.0"
|
||||
}
|
||||
},
|
||||
"loglevel": {
|
||||
"version": "1.8.1",
|
||||
"resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.8.1.tgz",
|
||||
"integrity": "sha512-tCRIJM51SHjAayKwC+QAg8hT8vg6z7GSgLJKGvzuPb1Wc+hLzqtuVLxp6/HzSPOozuK+8ErAhy7U/sVzw8Dgfg=="
|
||||
},
|
||||
"lru-cache": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
|
||||
|
@ -21789,25 +21760,6 @@
|
|||
"integrity": "sha512-6l+RyNy7oAHDfxC4FzSJcz9vnjTKxrLpDG5M2Vu4SHRVNg6xzqZp6LYSR9zjqQTu8DU/f5xwxUdADOkbrIX2gQ==",
|
||||
"devOptional": true
|
||||
},
|
||||
"typesense": {
|
||||
"version": "1.7.2",
|
||||
"resolved": "https://registry.npmjs.org/typesense/-/typesense-1.7.2.tgz",
|
||||
"integrity": "sha512-hgQESOiyNJq+w2mpRJa/a1UMhWtJ/+sb0p7NoeCDSkikm9sasisJdnc7uhQchM6vTWKw2sMLWUBNbAhItR6zUQ==",
|
||||
"requires": {
|
||||
"axios": "^0.26.0",
|
||||
"loglevel": "^1.8.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": {
|
||||
"version": "0.26.1",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
|
||||
"integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
|
||||
"requires": {
|
||||
"follow-redirects": "^1.14.8"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ua-parser-js": {
|
||||
"version": "1.0.37",
|
||||
"resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.37.tgz",
|
||||
|
|
|
@ -50,6 +50,7 @@
|
|||
"@nestjs/websockets": "^10.2.2",
|
||||
"@socket.io/redis-adapter": "^8.2.1",
|
||||
"archiver": "^6.0.0",
|
||||
"async-lock": "^1.4.0",
|
||||
"axios": "^1.5.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"bullmq": "^4.8.0",
|
||||
|
@ -77,7 +78,6 @@
|
|||
"sharp": "^0.32.6",
|
||||
"thumbhash": "^0.1.1",
|
||||
"typeorm": "^0.3.17",
|
||||
"typesense": "^1.7.1",
|
||||
"ua-parser-js": "^1.0.35"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -87,6 +87,7 @@
|
|||
"@openapitools/openapi-generator-cli": "2.7.0",
|
||||
"@testcontainers/postgresql": "^10.2.1",
|
||||
"@types/archiver": "^6.0.0",
|
||||
"@types/async-lock": "^1.4.2",
|
||||
"@types/bcrypt": "^5.0.0",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/cron": "^2.0.1",
|
||||
|
|
|
@ -12,7 +12,6 @@ import {
|
|||
} from '@test';
|
||||
import _ from 'lodash';
|
||||
import { BulkIdErrorReason } from '../asset';
|
||||
import { JobName } from '../job';
|
||||
import { IAlbumRepository, IAssetRepository, IJobRepository, IUserRepository } from '../repositories';
|
||||
import { AlbumService } from './album.service';
|
||||
|
||||
|
@ -188,11 +187,6 @@ describe(AlbumService.name, () => {
|
|||
assetIds: ['123'],
|
||||
});
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.SEARCH_INDEX_ALBUM,
|
||||
data: { ids: [albumStub.empty.id] },
|
||||
});
|
||||
|
||||
expect(albumMock.create).toHaveBeenCalledWith({
|
||||
ownerId: authStub.admin.id,
|
||||
albumName: albumStub.empty.albumName,
|
||||
|
@ -270,10 +264,6 @@ describe(AlbumService.name, () => {
|
|||
id: 'album-4',
|
||||
albumName: 'new album name',
|
||||
});
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.SEARCH_INDEX_ALBUM,
|
||||
data: { ids: [albumStub.oneAsset.id] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ import { AccessCore, Permission } from '../access';
|
|||
import { BulkIdErrorReason, BulkIdResponseDto, BulkIdsDto } from '../asset';
|
||||
import { AuthUserDto } from '../auth';
|
||||
import { setUnion } from '../domain.util';
|
||||
import { JobName } from '../job';
|
||||
import {
|
||||
AlbumAssetCount,
|
||||
AlbumInfoOptions,
|
||||
|
@ -131,7 +130,6 @@ export class AlbumService {
|
|||
albumThumbnailAssetId: dto.assetIds?.[0] || null,
|
||||
});
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ALBUM, data: { ids: [album.id] } });
|
||||
return mapAlbumWithAssets(album);
|
||||
}
|
||||
|
||||
|
@ -154,8 +152,6 @@ export class AlbumService {
|
|||
isActivityEnabled: dto.isActivityEnabled,
|
||||
});
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ALBUM, data: { ids: [updatedAlbum.id] } });
|
||||
|
||||
return mapAlbumWithoutAssets(updatedAlbum);
|
||||
}
|
||||
|
||||
|
@ -165,7 +161,6 @@ export class AlbumService {
|
|||
const album = await this.findOrFail(id, { withAssets: false });
|
||||
|
||||
await this.albumRepository.delete(album);
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_REMOVE_ALBUM, data: { ids: [id] } });
|
||||
}
|
||||
|
||||
async addAssets(authUser: AuthUserDto, id: string, dto: BulkIdsDto): Promise<BulkIdResponseDto[]> {
|
||||
|
|
|
@ -794,14 +794,7 @@ describe(AssetService.name, () => {
|
|||
await sut.deleteAll(authStub.user1, { ids: ['asset1', 'asset2'], force: false });
|
||||
|
||||
expect(assetMock.softDeleteAll).toHaveBeenCalledWith(['asset1', 'asset2']);
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
name: JobName.SEARCH_REMOVE_ASSET,
|
||||
data: { ids: ['asset1', 'asset2'] },
|
||||
},
|
||||
],
|
||||
]);
|
||||
expect(jobMock.queue.mock.calls).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -820,14 +813,7 @@ describe(AssetService.name, () => {
|
|||
await sut.restoreAll(authStub.user1, { ids: ['asset1', 'asset2'] });
|
||||
|
||||
expect(assetMock.restoreAll).toHaveBeenCalledWith(['asset1', 'asset2']);
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
name: JobName.SEARCH_INDEX_ASSET,
|
||||
data: { ids: ['asset1', 'asset2'] },
|
||||
},
|
||||
],
|
||||
]);
|
||||
expect(jobMock.queue.mock.calls).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -853,19 +839,6 @@ describe(AssetService.name, () => {
|
|||
await sut.handleAssetDeletion({ id: assetWithFace.id });
|
||||
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
name: JobName.SEARCH_REMOVE_FACE,
|
||||
data: { assetId: faceStub.face1.assetId, personId: faceStub.face1.personId },
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
name: JobName.SEARCH_REMOVE_FACE,
|
||||
data: { assetId: faceStub.mergeFace1.assetId, personId: faceStub.mergeFace1.personId },
|
||||
},
|
||||
],
|
||||
[{ name: JobName.SEARCH_REMOVE_ASSET, data: { ids: [assetWithFace.id] } }],
|
||||
[
|
||||
{
|
||||
name: JobName.DELETE_FILES,
|
||||
|
@ -907,9 +880,7 @@ describe(AssetService.name, () => {
|
|||
|
||||
await sut.handleAssetDeletion({ id: assetStub.readOnly.id });
|
||||
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_REMOVE_ASSET, data: { ids: [assetStub.readOnly.id] } }],
|
||||
]);
|
||||
expect(jobMock.queue.mock.calls).toEqual([]);
|
||||
|
||||
expect(assetMock.remove).toHaveBeenCalledWith(assetStub.readOnly);
|
||||
});
|
||||
|
@ -934,7 +905,6 @@ describe(AssetService.name, () => {
|
|||
|
||||
expect(assetMock.remove).toHaveBeenCalledWith(assetStub.external);
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_REMOVE_ASSET, data: { ids: [assetStub.external.id] } }],
|
||||
[
|
||||
{
|
||||
name: JobName.DELETE_FILES,
|
||||
|
@ -955,9 +925,7 @@ describe(AssetService.name, () => {
|
|||
await sut.handleAssetDeletion({ id: assetStub.livePhotoStillAsset.id });
|
||||
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_REMOVE_ASSET, data: { ids: [assetStub.livePhotoStillAsset.id] } }],
|
||||
[{ name: JobName.ASSET_DELETION, data: { id: assetStub.livePhotoMotionAsset.id } }],
|
||||
[{ name: JobName.SEARCH_REMOVE_ASSET, data: { ids: [assetStub.livePhotoMotionAsset.id] } }],
|
||||
[
|
||||
{
|
||||
name: JobName.DELETE_FILES,
|
||||
|
|
|
@ -397,7 +397,6 @@ export class AssetService {
|
|||
await this.updateMetadata({ id, description, dateTimeOriginal, latitude, longitude });
|
||||
|
||||
const asset = await this.assetRepository.save({ id, ...rest });
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids: [id] } });
|
||||
return mapAsset(asset);
|
||||
}
|
||||
|
||||
|
@ -426,7 +425,10 @@ export class AssetService {
|
|||
await this.updateMetadata({ id, dateTimeOriginal, latitude, longitude });
|
||||
}
|
||||
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids } });
|
||||
for (const id of ids) {
|
||||
await this.updateMetadata({ id, dateTimeOriginal, latitude, longitude });
|
||||
}
|
||||
|
||||
await this.assetRepository.updateAll(ids, options);
|
||||
this.communicationRepository.send(CommunicationEvent.ASSET_UPDATE, authUser.id, ids);
|
||||
}
|
||||
|
@ -463,16 +465,6 @@ export class AssetService {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (asset.faces) {
|
||||
await Promise.all(
|
||||
asset.faces.map(
|
||||
({ assetId, personId }) =>
|
||||
personId != null &&
|
||||
this.jobRepository.queue({ name: JobName.SEARCH_REMOVE_FACE, data: { assetId, personId } }),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Replace the parent of the stack children with a new asset
|
||||
if (asset.stack && asset.stack.length != 0) {
|
||||
const stackIds = asset.stack.map((a) => a.id);
|
||||
|
@ -482,7 +474,6 @@ export class AssetService {
|
|||
}
|
||||
|
||||
await this.assetRepository.remove(asset);
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_REMOVE_ASSET, data: { ids: [asset.id] } });
|
||||
this.communicationRepository.send(CommunicationEvent.ASSET_DELETE, asset.ownerId, id);
|
||||
|
||||
// TODO refactor this to use cascades
|
||||
|
@ -513,7 +504,6 @@ export class AssetService {
|
|||
}
|
||||
} else {
|
||||
await this.assetRepository.softDeleteAll(ids);
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_REMOVE_ASSET, data: { ids } });
|
||||
this.communicationRepository.send(CommunicationEvent.ASSET_TRASH, authUser.id, ids);
|
||||
}
|
||||
}
|
||||
|
@ -527,7 +517,6 @@ export class AssetService {
|
|||
for await (const assets of assetPagination) {
|
||||
const ids = assets.map((a) => a.id);
|
||||
await this.assetRepository.restoreAll(ids);
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids } });
|
||||
this.communicationRepository.send(CommunicationEvent.ASSET_RESTORE, authUser.id, ids);
|
||||
}
|
||||
return;
|
||||
|
@ -547,7 +536,6 @@ export class AssetService {
|
|||
const { ids } = dto;
|
||||
await this.access.requirePermission(authUser, Permission.ASSET_RESTORE, ids);
|
||||
await this.assetRepository.restoreAll(ids);
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids } });
|
||||
this.communicationRepository.send(CommunicationEvent.ASSET_RESTORE, authUser.id, ids);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,11 +18,6 @@ export const immichAppConfig: ConfigModuleOptions = {
|
|||
DB_PASSWORD: WHEN_DB_URL_SET,
|
||||
DB_DATABASE_NAME: WHEN_DB_URL_SET,
|
||||
DB_URL: Joi.string().optional(),
|
||||
TYPESENSE_API_KEY: Joi.when('TYPESENSE_ENABLED', {
|
||||
is: 'false',
|
||||
then: Joi.string().optional(),
|
||||
otherwise: Joi.string().required(),
|
||||
}),
|
||||
DISABLE_REVERSE_GEOCODING: Joi.boolean().optional().valid(true, false).default(false),
|
||||
REVERSE_GEOCODING_PRECISION: Joi.number().optional().valid(0, 1, 2, 3).default(3),
|
||||
LOG_LEVEL: Joi.string().optional().valid('simple', 'verbose', 'debug', 'log', 'warn', 'error').default('log'),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { DynamicModule, Global, Module, ModuleMetadata, OnApplicationShutdown, Provider } from '@nestjs/common';
|
||||
import { DynamicModule, Global, Module, ModuleMetadata, Provider } from '@nestjs/common';
|
||||
import { ActivityService } from './activity';
|
||||
import { AlbumService } from './album';
|
||||
import { APIKeyService } from './api-key';
|
||||
|
@ -54,9 +54,7 @@ const providers: Provider[] = [
|
|||
|
||||
@Global()
|
||||
@Module({})
|
||||
export class DomainModule implements OnApplicationShutdown {
|
||||
constructor(private searchService: SearchService) {}
|
||||
|
||||
export class DomainModule {
|
||||
static register(options: Pick<ModuleMetadata, 'imports'>): DynamicModule {
|
||||
return {
|
||||
module: DomainModule,
|
||||
|
@ -65,8 +63,4 @@ export class DomainModule implements OnApplicationShutdown {
|
|||
exports: [...providers],
|
||||
};
|
||||
}
|
||||
|
||||
onApplicationShutdown() {
|
||||
this.searchService.teardown();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,17 +78,6 @@ export enum JobName {
|
|||
DELETE_FILES = 'delete-files',
|
||||
CLEAN_OLD_AUDIT_LOGS = 'clean-old-audit-logs',
|
||||
|
||||
// search
|
||||
SEARCH_INDEX_ASSETS = 'search-index-assets',
|
||||
SEARCH_INDEX_ASSET = 'search-index-asset',
|
||||
SEARCH_INDEX_FACE = 'search-index-face',
|
||||
SEARCH_INDEX_FACES = 'search-index-faces',
|
||||
SEARCH_INDEX_ALBUMS = 'search-index-albums',
|
||||
SEARCH_INDEX_ALBUM = 'search-index-album',
|
||||
SEARCH_REMOVE_ALBUM = 'search-remove-album',
|
||||
SEARCH_REMOVE_ASSET = 'search-remove-asset',
|
||||
SEARCH_REMOVE_FACE = 'search-remove-face',
|
||||
|
||||
// clip
|
||||
QUEUE_ENCODE_CLIP = 'queue-clip-encode',
|
||||
ENCODE_CLIP = 'clip-encode',
|
||||
|
@ -151,21 +140,6 @@ export const JOBS_TO_QUEUE: Record<JobName, QueueName> = {
|
|||
[JobName.QUEUE_ENCODE_CLIP]: QueueName.CLIP_ENCODING,
|
||||
[JobName.ENCODE_CLIP]: QueueName.CLIP_ENCODING,
|
||||
|
||||
// search - albums
|
||||
[JobName.SEARCH_INDEX_ALBUMS]: QueueName.SEARCH,
|
||||
[JobName.SEARCH_INDEX_ALBUM]: QueueName.SEARCH,
|
||||
[JobName.SEARCH_REMOVE_ALBUM]: QueueName.SEARCH,
|
||||
|
||||
// search - assets
|
||||
[JobName.SEARCH_INDEX_ASSETS]: QueueName.SEARCH,
|
||||
[JobName.SEARCH_INDEX_ASSET]: QueueName.SEARCH,
|
||||
[JobName.SEARCH_REMOVE_ASSET]: QueueName.SEARCH,
|
||||
|
||||
// search - faces
|
||||
[JobName.SEARCH_INDEX_FACES]: QueueName.SEARCH,
|
||||
[JobName.SEARCH_INDEX_FACE]: QueueName.SEARCH,
|
||||
[JobName.SEARCH_REMOVE_FACE]: QueueName.SEARCH,
|
||||
|
||||
// XMP sidecars
|
||||
[JobName.QUEUE_SIDECAR]: QueueName.SIDECAR,
|
||||
[JobName.SIDECAR_DISCOVERY]: QueueName.SIDECAR,
|
||||
|
|
|
@ -2,11 +2,6 @@ export interface IBaseJob {
|
|||
force?: boolean;
|
||||
}
|
||||
|
||||
export interface IAssetFaceJob extends IBaseJob {
|
||||
assetId: string;
|
||||
personId: string;
|
||||
}
|
||||
|
||||
export interface IEntityJob extends IBaseJob {
|
||||
id: string;
|
||||
source?: 'upload' | 'sidecar-write';
|
||||
|
|
|
@ -271,7 +271,7 @@ describe(JobService.name, () => {
|
|||
},
|
||||
{
|
||||
item: { name: JobName.LINK_LIVE_PHOTOS, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, JobName.SEARCH_INDEX_ASSET],
|
||||
jobs: [JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE, data: { id: 'asset-1', source: 'upload' } },
|
||||
|
@ -315,15 +315,15 @@ describe(JobService.name, () => {
|
|||
},
|
||||
{
|
||||
item: { name: JobName.CLASSIFY_IMAGE, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.SEARCH_INDEX_ASSET],
|
||||
jobs: [],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.ENCODE_CLIP, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.SEARCH_INDEX_ASSET],
|
||||
jobs: [],
|
||||
},
|
||||
{
|
||||
item: { name: JobName.RECOGNIZE_FACES, data: { id: 'asset-1' } },
|
||||
jobs: [JobName.SEARCH_INDEX_ASSET],
|
||||
jobs: [],
|
||||
},
|
||||
];
|
||||
|
||||
|
|
|
@ -236,15 +236,5 @@ export class JobService {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In addition to the above jobs, all of these should queue `SEARCH_INDEX_ASSET`
|
||||
switch (item.name) {
|
||||
case JobName.CLASSIFY_IMAGE:
|
||||
case JobName.ENCODE_CLIP:
|
||||
case JobName.RECOGNIZE_FACES:
|
||||
case JobName.LINK_LIVE_PHOTOS:
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids: [item.data.id] } });
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,33 +61,6 @@ const detectFaceMock = {
|
|||
score: 0.2,
|
||||
};
|
||||
|
||||
const faceSearch = {
|
||||
noMatch: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
distances: [],
|
||||
facets: [],
|
||||
},
|
||||
oneMatch: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
page: 1,
|
||||
items: [faceStub.face1],
|
||||
distances: [0.1],
|
||||
facets: [],
|
||||
},
|
||||
oneRemoteMatch: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
page: 1,
|
||||
items: [faceStub.face1],
|
||||
distances: [0.8],
|
||||
facets: [],
|
||||
},
|
||||
};
|
||||
|
||||
describe(PersonService.name, () => {
|
||||
let accessMock: IAccessRepositoryMock;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
|
@ -283,10 +256,6 @@ describe(PersonService.name, () => {
|
|||
|
||||
expect(personMock.getById).toHaveBeenCalledWith('person-1');
|
||||
expect(personMock.update).toHaveBeenCalledWith({ id: 'person-1', name: 'Person 1' });
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.SEARCH_INDEX_ASSET,
|
||||
data: { ids: [assetStub.image.id] },
|
||||
});
|
||||
expect(accessMock.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.id, new Set(['person-1']));
|
||||
});
|
||||
|
||||
|
@ -320,10 +289,6 @@ describe(PersonService.name, () => {
|
|||
|
||||
expect(personMock.getById).toHaveBeenCalledWith('person-1');
|
||||
expect(personMock.update).toHaveBeenCalledWith({ id: 'person-1', isHidden: false });
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({
|
||||
name: JobName.SEARCH_INDEX_ASSET,
|
||||
data: { ids: [assetStub.image.id] },
|
||||
});
|
||||
expect(accessMock.person.checkOwnerAccess).toHaveBeenCalledWith(authStub.admin.id, new Set(['person-1']));
|
||||
});
|
||||
|
||||
|
@ -547,7 +512,7 @@ describe(PersonService.name, () => {
|
|||
hasNextPage: false,
|
||||
});
|
||||
personMock.getAll.mockResolvedValue([personStub.withName]);
|
||||
searchMock.deleteAllFaces.mockResolvedValue(100);
|
||||
personMock.deleteAll.mockResolvedValue(5);
|
||||
|
||||
await sut.handleQueueRecognizeFaces({ force: true });
|
||||
|
||||
|
@ -626,7 +591,7 @@ describe(PersonService.name, () => {
|
|||
|
||||
it('should match existing people', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
|
||||
searchMock.searchFaces.mockResolvedValue(faceSearch.oneMatch);
|
||||
personMock.searchByEmbedding.mockResolvedValue([faceStub.face1]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
await sut.handleRecognizeFaces({ id: assetStub.image.id });
|
||||
|
||||
|
@ -645,7 +610,7 @@ describe(PersonService.name, () => {
|
|||
|
||||
it('should create a new person', async () => {
|
||||
machineLearningMock.detectFaces.mockResolvedValue([detectFaceMock]);
|
||||
searchMock.searchFaces.mockResolvedValue(faceSearch.oneRemoteMatch);
|
||||
personMock.searchByEmbedding.mockResolvedValue([]);
|
||||
personMock.create.mockResolvedValue(personStub.noName);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
personMock.createFace.mockResolvedValue(faceStub.primaryFace1);
|
||||
|
@ -664,10 +629,6 @@ describe(PersonService.name, () => {
|
|||
imageHeight: 500,
|
||||
imageWidth: 400,
|
||||
});
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_INDEX_FACE, data: { personId: 'person-1', assetId: 'asset-id' } }],
|
||||
[{ name: JobName.GENERATE_PERSON_THUMBNAIL, data: { id: 'person-1' } }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
describe('handleGeneratePersonThumbnail', () => {
|
||||
|
|
|
@ -198,11 +198,6 @@ export class PersonService {
|
|||
|
||||
if (name !== undefined || birthDate !== undefined || isHidden !== undefined) {
|
||||
person = await this.repository.update({ id, name, birthDate, isHidden });
|
||||
if (this.needsSearchIndexUpdate(dto)) {
|
||||
const assets = await this.repository.getAssets(id);
|
||||
const ids = assets.map((asset) => asset.id);
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSET, data: { ids } });
|
||||
}
|
||||
}
|
||||
|
||||
if (assetId) {
|
||||
|
@ -281,8 +276,7 @@ export class PersonService {
|
|||
for (const person of people) {
|
||||
await this.jobRepository.queue({ name: JobName.PERSON_DELETE, data: { id: person.id } });
|
||||
}
|
||||
const faces = await this.searchRepository.deleteAllFaces();
|
||||
this.logger.debug(`Deleted ${people} people and ${faces} faces`);
|
||||
this.logger.debug(`Deleted ${people.length} people`);
|
||||
}
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
|
@ -318,20 +312,17 @@ export class PersonService {
|
|||
);
|
||||
|
||||
this.logger.debug(`${faces.length} faces detected in ${asset.resizePath}`);
|
||||
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `float[${face.embedding.length}]` })));
|
||||
this.logger.verbose(faces.map((face) => ({ ...face, embedding: `vector(${face.embedding.length})` })));
|
||||
|
||||
for (const { embedding, ...rest } of faces) {
|
||||
const faceSearchResult = await this.searchRepository.searchFaces(embedding, { ownerId: asset.ownerId });
|
||||
|
||||
let personId: string | null = null;
|
||||
|
||||
// try to find a matching face and link to the associated person
|
||||
// The closer to 0, the better the match. Range is from 0 to 2
|
||||
if (faceSearchResult.total && faceSearchResult.distances[0] <= machineLearning.facialRecognition.maxDistance) {
|
||||
this.logger.verbose(`Match face with distance ${faceSearchResult.distances[0]}`);
|
||||
personId = faceSearchResult.items[0].personId;
|
||||
}
|
||||
const matches = await this.repository.searchByEmbedding({
|
||||
ownerId: asset.ownerId,
|
||||
embedding,
|
||||
numResults: 1,
|
||||
maxDistance: machineLearning.facialRecognition.maxDistance,
|
||||
});
|
||||
|
||||
let personId = matches[0]?.personId || null;
|
||||
let newPerson: PersonEntity | null = null;
|
||||
if (!personId) {
|
||||
this.logger.debug('No matches, creating a new person.');
|
||||
|
@ -350,8 +341,6 @@ export class PersonService {
|
|||
boundingBoxY1: rest.boundingBox.y1,
|
||||
boundingBoxY2: rest.boundingBox.y2,
|
||||
});
|
||||
const faceId: AssetFaceId = { assetId: asset.id, personId };
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACE, data: faceId });
|
||||
|
||||
if (newPerson) {
|
||||
await this.repository.update({ id: personId, faceAssetId: face.id });
|
||||
|
@ -489,21 +478,9 @@ export class PersonService {
|
|||
}
|
||||
}
|
||||
|
||||
// Re-index all faces in typesense for up-to-date search results
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACES });
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the given person update is going to require an update of the search index.
|
||||
* @param dto the Person going to be updated
|
||||
* @private
|
||||
*/
|
||||
private needsSearchIndexUpdate(dto: PersonUpdateDto): boolean {
|
||||
return dto.name !== undefined || dto.isHidden !== undefined;
|
||||
}
|
||||
|
||||
private async findOrFail(id: string) {
|
||||
const person = await this.repository.getById(id);
|
||||
if (!person) {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { AssetEntity, AssetJobStatusEntity, AssetType, ExifEntity } from '@app/infra/entities';
|
||||
import { FindOptionsRelations } from 'typeorm';
|
||||
import { Paginated, PaginationOptions } from '../domain.util';
|
||||
import { SearchExploreItem } from '../repositories';
|
||||
|
||||
export type AssetStats = Record<AssetType, number>;
|
||||
|
||||
|
@ -105,8 +106,7 @@ export enum TimeBucketSize {
|
|||
MONTH = 'MONTH',
|
||||
}
|
||||
|
||||
export interface TimeBucketOptions {
|
||||
size: TimeBucketSize;
|
||||
export interface AssetBuilderOptions {
|
||||
isArchived?: boolean;
|
||||
isFavorite?: boolean;
|
||||
isTrashed?: boolean;
|
||||
|
@ -114,6 +114,12 @@ export interface TimeBucketOptions {
|
|||
personId?: string;
|
||||
userIds?: string[];
|
||||
withStacked?: boolean;
|
||||
exifInfo?: boolean;
|
||||
assetType?: AssetType;
|
||||
}
|
||||
|
||||
export interface TimeBucketOptions extends AssetBuilderOptions {
|
||||
size: TimeBucketSize;
|
||||
}
|
||||
|
||||
export interface TimeBucketItem {
|
||||
|
@ -142,6 +148,21 @@ export interface MonthDay {
|
|||
month: number;
|
||||
}
|
||||
|
||||
export interface AssetExploreFieldOptions {
|
||||
maxFields: number;
|
||||
minAssetsPerField: number;
|
||||
}
|
||||
|
||||
export interface AssetExploreOptions extends AssetExploreFieldOptions {
|
||||
relation: keyof AssetEntity;
|
||||
relatedField: string;
|
||||
unnest?: boolean;
|
||||
}
|
||||
|
||||
export interface MetadataSearchOptions {
|
||||
numResults: number;
|
||||
}
|
||||
|
||||
export const IAssetRepository = 'IAssetRepository';
|
||||
|
||||
export interface IAssetRepository {
|
||||
|
@ -175,5 +196,7 @@ export interface IAssetRepository {
|
|||
getTimeBucket(timeBucket: string, options: TimeBucketOptions): Promise<AssetEntity[]>;
|
||||
upsertExif(exif: Partial<ExifEntity>): Promise<void>;
|
||||
upsertJobStatus(jobStatus: Partial<AssetJobStatusEntity>): Promise<void>;
|
||||
search(options: AssetSearchOptions): Promise<AssetEntity[]>;
|
||||
getAssetIdByCity(userId: string, options: AssetExploreFieldOptions): Promise<SearchExploreItem<string>>;
|
||||
getAssetIdByTag(userId: string, options: AssetExploreFieldOptions): Promise<SearchExploreItem<string>>;
|
||||
searchMetadata(query: string, userId: string, options: MetadataSearchOptions): Promise<AssetEntity[]>;
|
||||
}
|
||||
|
|
|
@ -2,9 +2,7 @@ import { JobName, QueueName } from '../job/job.constants';
|
|||
|
||||
import {
|
||||
IAssetDeletionJob,
|
||||
IAssetFaceJob,
|
||||
IBaseJob,
|
||||
IBulkEntityJob,
|
||||
IDeleteFilesJob,
|
||||
IEntityJob,
|
||||
ILibraryFileJob,
|
||||
|
@ -96,18 +94,7 @@ export type JobItem =
|
|||
| { name: JobName.LIBRARY_REMOVE_OFFLINE; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_DELETE; data: IEntityJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_SCAN_ALL; data: IBaseJob }
|
||||
| { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob }
|
||||
|
||||
// Search
|
||||
| { name: JobName.SEARCH_INDEX_ASSETS; data?: IBaseJob }
|
||||
| { name: JobName.SEARCH_INDEX_ASSET; data: IBulkEntityJob }
|
||||
| { name: JobName.SEARCH_INDEX_FACES; data?: IBaseJob }
|
||||
| { name: JobName.SEARCH_INDEX_FACE; data: IAssetFaceJob }
|
||||
| { name: JobName.SEARCH_INDEX_ALBUMS; data?: IBaseJob }
|
||||
| { name: JobName.SEARCH_INDEX_ALBUM; data: IBulkEntityJob }
|
||||
| { name: JobName.SEARCH_REMOVE_ASSET; data: IBulkEntityJob }
|
||||
| { name: JobName.SEARCH_REMOVE_ALBUM; data: IBulkEntityJob }
|
||||
| { name: JobName.SEARCH_REMOVE_FACE; data: IAssetFaceJob };
|
||||
| { name: JobName.LIBRARY_QUEUE_CLEANUP; data: IBaseJob };
|
||||
|
||||
export type JobHandler<T = any> = (data: T) => boolean | Promise<boolean>;
|
||||
export type JobItemHandler = (item: JobItem) => Promise<void>;
|
||||
|
|
|
@ -25,6 +25,13 @@ export interface PersonStatistics {
|
|||
assets: number;
|
||||
}
|
||||
|
||||
export interface EmbeddingSearch {
|
||||
ownerId: string;
|
||||
embedding: number[];
|
||||
numResults: number;
|
||||
maxDistance?: number;
|
||||
}
|
||||
|
||||
export interface IPersonRepository {
|
||||
getAll(): Promise<PersonEntity[]>;
|
||||
getAllWithoutThumbnail(): Promise<PersonEntity[]>;
|
||||
|
@ -41,9 +48,8 @@ export interface IPersonRepository {
|
|||
update(entity: Partial<PersonEntity>): Promise<PersonEntity>;
|
||||
delete(entity: PersonEntity): Promise<PersonEntity | null>;
|
||||
deleteAll(): Promise<number>;
|
||||
|
||||
getStatistics(personId: string): Promise<PersonStatistics>;
|
||||
|
||||
searchByEmbedding(search: EmbeddingSearch): Promise<AssetFaceEntity[]>;
|
||||
getAllFaces(): Promise<AssetFaceEntity[]>;
|
||||
getFacesByIds(ids: AssetFaceId[]): Promise<AssetFaceEntity[]>;
|
||||
getRandomFace(personId: string): Promise<AssetFaceEntity | null>;
|
||||
|
|
|
@ -1,20 +1,10 @@
|
|||
import { AlbumEntity, AssetEntity, AssetFaceEntity, AssetType } from '@app/infra/entities';
|
||||
|
||||
export enum SearchCollection {
|
||||
ASSETS = 'assets',
|
||||
ALBUMS = 'albums',
|
||||
FACES = 'faces',
|
||||
}
|
||||
import { AssetEntity, AssetType } from '@app/infra/entities';
|
||||
|
||||
export enum SearchStrategy {
|
||||
CLIP = 'CLIP',
|
||||
TEXT = 'TEXT',
|
||||
}
|
||||
|
||||
export interface SearchFaceFilter {
|
||||
ownerId: string;
|
||||
}
|
||||
|
||||
export interface SearchFilter {
|
||||
id?: string;
|
||||
userId: string;
|
||||
|
@ -55,43 +45,18 @@ export interface SearchFacet {
|
|||
}>;
|
||||
}
|
||||
|
||||
export type SearchExploreItemSet<T> = Array<{
|
||||
value: string;
|
||||
data: T;
|
||||
}>;
|
||||
|
||||
export interface SearchExploreItem<T> {
|
||||
fieldName: string;
|
||||
items: Array<{
|
||||
value: string;
|
||||
data: T;
|
||||
}>;
|
||||
items: SearchExploreItemSet<T>;
|
||||
}
|
||||
|
||||
export type OwnedFaceEntity = Pick<AssetFaceEntity, 'assetId' | 'personId' | 'embedding'> & {
|
||||
/** computed as assetId|personId */
|
||||
id: string;
|
||||
/** copied from asset.id */
|
||||
ownerId: string;
|
||||
};
|
||||
|
||||
export type SearchCollectionIndexStatus = Record<SearchCollection, boolean>;
|
||||
|
||||
export const ISearchRepository = 'ISearchRepository';
|
||||
|
||||
export interface ISearchRepository {
|
||||
setup(): Promise<void>;
|
||||
checkMigrationStatus(): Promise<SearchCollectionIndexStatus>;
|
||||
|
||||
importAlbums(items: AlbumEntity[], done: boolean): Promise<void>;
|
||||
importAssets(items: AssetEntity[], done: boolean): Promise<void>;
|
||||
importFaces(items: OwnedFaceEntity[], done: boolean): Promise<void>;
|
||||
|
||||
deleteAlbums(ids: string[]): Promise<void>;
|
||||
deleteAssets(ids: string[]): Promise<void>;
|
||||
deleteFaces(ids: string[]): Promise<void>;
|
||||
deleteAllFaces(): Promise<number>;
|
||||
updateCLIPField(num_dim: number): Promise<void>;
|
||||
|
||||
searchAlbums(query: string, filters: SearchFilter): Promise<SearchResult<AlbumEntity>>;
|
||||
searchAssets(query: string, filters: SearchFilter): Promise<SearchResult<AssetEntity>>;
|
||||
vectorSearch(query: number[], filters: SearchFilter): Promise<SearchResult<AssetEntity>>;
|
||||
searchFaces(query: number[], filters: SearchFaceFilter): Promise<SearchResult<AssetFaceEntity>>;
|
||||
|
||||
explore(userId: string): Promise<SearchExploreItem<AssetEntity>[]>;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import { SmartInfoEntity } from '@app/infra/entities';
|
||||
import { AssetEntity, SmartInfoEntity } from '@app/infra/entities';
|
||||
import { EmbeddingSearch } from '../repositories';
|
||||
|
||||
export const ISmartInfoRepository = 'ISmartInfoRepository';
|
||||
|
||||
export interface ISmartInfoRepository {
|
||||
searchByEmbedding(search: EmbeddingSearch): Promise<AssetEntity[]>;
|
||||
upsert(info: Partial<SmartInfoEntity>): Promise<void>;
|
||||
}
|
||||
|
|
|
@ -1,29 +1,16 @@
|
|||
import { BadRequestException } from '@nestjs/common';
|
||||
import {
|
||||
albumStub,
|
||||
assetStub,
|
||||
asyncTick,
|
||||
authStub,
|
||||
faceStub,
|
||||
newAlbumRepositoryMock,
|
||||
newAssetRepositoryMock,
|
||||
newJobRepositoryMock,
|
||||
newMachineLearningRepositoryMock,
|
||||
newPersonRepositoryMock,
|
||||
newSearchRepositoryMock,
|
||||
newSmartInfoRepositoryMock,
|
||||
newSystemConfigRepositoryMock,
|
||||
searchStub,
|
||||
} from '@test';
|
||||
import { plainToInstance } from 'class-transformer';
|
||||
import { mapAsset } from '../asset';
|
||||
import { JobName } from '../job';
|
||||
import {
|
||||
IAlbumRepository,
|
||||
IAssetRepository,
|
||||
IJobRepository,
|
||||
IMachineLearningRepository,
|
||||
IPersonRepository,
|
||||
ISearchRepository,
|
||||
ISmartInfoRepository,
|
||||
ISystemConfigRepository,
|
||||
} from '../repositories';
|
||||
import { SearchDto } from './dto';
|
||||
|
@ -33,40 +20,19 @@ jest.useFakeTimers();
|
|||
|
||||
describe(SearchService.name, () => {
|
||||
let sut: SearchService;
|
||||
let albumMock: jest.Mocked<IAlbumRepository>;
|
||||
let assetMock: jest.Mocked<IAssetRepository>;
|
||||
let configMock: jest.Mocked<ISystemConfigRepository>;
|
||||
let jobMock: jest.Mocked<IJobRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let machineMock: jest.Mocked<IMachineLearningRepository>;
|
||||
let searchMock: jest.Mocked<ISearchRepository>;
|
||||
let personMock: jest.Mocked<IPersonRepository>;
|
||||
let smartInfoMock: jest.Mocked<ISmartInfoRepository>;
|
||||
|
||||
beforeEach(async () => {
|
||||
albumMock = newAlbumRepositoryMock();
|
||||
beforeEach(() => {
|
||||
assetMock = newAssetRepositoryMock();
|
||||
configMock = newSystemConfigRepositoryMock();
|
||||
jobMock = newJobRepositoryMock();
|
||||
personMock = newPersonRepositoryMock();
|
||||
machineMock = newMachineLearningRepositoryMock();
|
||||
searchMock = newSearchRepositoryMock();
|
||||
|
||||
sut = new SearchService(albumMock, assetMock, jobMock, machineMock, personMock, searchMock, configMock);
|
||||
|
||||
searchMock.checkMigrationStatus.mockResolvedValue({ assets: false, albums: false, faces: false });
|
||||
|
||||
delete process.env.TYPESENSE_ENABLED;
|
||||
await sut.init();
|
||||
});
|
||||
|
||||
const disableSearch = () => {
|
||||
searchMock.setup.mockClear();
|
||||
searchMock.checkMigrationStatus.mockClear();
|
||||
jobMock.queue.mockClear();
|
||||
process.env.TYPESENSE_ENABLED = 'false';
|
||||
};
|
||||
|
||||
afterEach(() => {
|
||||
sut.teardown();
|
||||
personMock = newPersonRepositoryMock();
|
||||
smartInfoMock = newSmartInfoRepositoryMock();
|
||||
sut = new SearchService(configMock, machineMock, personMock, smartInfoMock, assetMock);
|
||||
});
|
||||
|
||||
it('should work', () => {
|
||||
|
@ -94,340 +60,4 @@ describe(SearchService.name, () => {
|
|||
expect(instance['smartInfo.objects']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe(`init`, () => {
|
||||
it('should skip when search is disabled', async () => {
|
||||
disableSearch();
|
||||
await sut.init();
|
||||
|
||||
expect(searchMock.setup).not.toHaveBeenCalled();
|
||||
expect(searchMock.checkMigrationStatus).not.toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip schema migration if not needed', async () => {
|
||||
await sut.init();
|
||||
|
||||
expect(searchMock.setup).toHaveBeenCalled();
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do schema migration if needed', async () => {
|
||||
searchMock.checkMigrationStatus.mockResolvedValue({ assets: true, albums: true, faces: true });
|
||||
await sut.init();
|
||||
|
||||
expect(searchMock.setup).toHaveBeenCalled();
|
||||
expect(jobMock.queue.mock.calls).toEqual([
|
||||
[{ name: JobName.SEARCH_INDEX_ASSETS }],
|
||||
[{ name: JobName.SEARCH_INDEX_ALBUMS }],
|
||||
[{ name: JobName.SEARCH_INDEX_FACES }],
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getExploreData', () => {
|
||||
it('should throw bad request exception if search is disabled', async () => {
|
||||
disableSearch();
|
||||
await expect(sut.getExploreData(authStub.admin)).rejects.toBeInstanceOf(BadRequestException);
|
||||
expect(searchMock.explore).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return explore data if feature flag SEARCH is set', async () => {
|
||||
searchMock.explore.mockResolvedValue([{ fieldName: 'name', items: [{ value: 'image', data: assetStub.image }] }]);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await expect(sut.getExploreData(authStub.admin)).resolves.toEqual([
|
||||
{
|
||||
fieldName: 'name',
|
||||
items: [{ value: 'image', data: mapAsset(assetStub.image) }],
|
||||
},
|
||||
]);
|
||||
|
||||
expect(searchMock.explore).toHaveBeenCalledWith(authStub.admin.id);
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith([assetStub.image.id]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
// it('should throw an error is search is disabled', async () => {
|
||||
// sut['enabled'] = false;
|
||||
|
||||
// await expect(sut.search(authStub.admin, {})).rejects.toBeInstanceOf(BadRequestException);
|
||||
|
||||
// expect(searchMock.searchAlbums).not.toHaveBeenCalled();
|
||||
// expect(searchMock.searchAssets).not.toHaveBeenCalled();
|
||||
// });
|
||||
|
||||
it('should search assets and albums using text search', async () => {
|
||||
searchMock.searchAssets.mockResolvedValue(searchStub.withImage);
|
||||
searchMock.searchAlbums.mockResolvedValue(searchStub.emptyResults);
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
await expect(sut.search(authStub.admin, {})).resolves.toEqual({
|
||||
albums: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
facets: [],
|
||||
distances: [],
|
||||
},
|
||||
assets: {
|
||||
total: 1,
|
||||
count: 1,
|
||||
page: 1,
|
||||
items: [mapAsset(assetStub.image)],
|
||||
facets: [],
|
||||
distances: [],
|
||||
},
|
||||
});
|
||||
|
||||
// expect(searchMock.searchAssets).toHaveBeenCalledWith('*', { userId: authStub.admin.id });
|
||||
expect(searchMock.searchAlbums).toHaveBeenCalledWith('*', { userId: authStub.admin.id });
|
||||
});
|
||||
|
||||
it('should search assets and albums using vector search', async () => {
|
||||
searchMock.vectorSearch.mockResolvedValue(searchStub.emptyResults);
|
||||
searchMock.searchAlbums.mockResolvedValue(searchStub.emptyResults);
|
||||
machineMock.encodeText.mockResolvedValue([123]);
|
||||
|
||||
await expect(sut.search(authStub.admin, { clip: true, query: 'foo' })).resolves.toEqual({
|
||||
albums: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
facets: [],
|
||||
distances: [],
|
||||
},
|
||||
assets: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
page: 1,
|
||||
items: [],
|
||||
facets: [],
|
||||
distances: [],
|
||||
},
|
||||
});
|
||||
|
||||
expect(machineMock.encodeText).toHaveBeenCalledWith(expect.any(String), { text: 'foo' }, expect.any(Object));
|
||||
expect(searchMock.vectorSearch).toHaveBeenCalledWith([123], {
|
||||
userId: authStub.admin.id,
|
||||
clip: true,
|
||||
query: 'foo',
|
||||
});
|
||||
expect(searchMock.searchAlbums).toHaveBeenCalledWith('foo', {
|
||||
userId: authStub.admin.id,
|
||||
clip: true,
|
||||
query: 'foo',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAssets', () => {
|
||||
it('should call done, even when there are no assets', async () => {
|
||||
await sut.handleIndexAssets();
|
||||
|
||||
expect(searchMock.importAssets).toHaveBeenCalledWith([], true);
|
||||
});
|
||||
|
||||
it('should index all the assets', async () => {
|
||||
assetMock.getAll.mockResolvedValue({
|
||||
items: [assetStub.image],
|
||||
hasNextPage: false,
|
||||
});
|
||||
|
||||
await sut.handleIndexAssets();
|
||||
|
||||
expect(searchMock.importAssets.mock.calls).toEqual([
|
||||
[[assetStub.image], false],
|
||||
[[], true],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should skip if search is disabled', async () => {
|
||||
sut['enabled'] = false;
|
||||
|
||||
await sut.handleIndexAssets();
|
||||
|
||||
expect(searchMock.importAssets).not.toHaveBeenCalled();
|
||||
expect(searchMock.importAlbums).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAsset', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
sut['enabled'] = false;
|
||||
sut.handleIndexAsset({ ids: [assetStub.image.id] });
|
||||
});
|
||||
|
||||
it('should index the asset', () => {
|
||||
sut.handleIndexAsset({ ids: [assetStub.image.id] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAlbums', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
sut['enabled'] = false;
|
||||
await sut.handleIndexAlbums();
|
||||
});
|
||||
|
||||
it('should index all the albums', async () => {
|
||||
albumMock.getAll.mockResolvedValue([albumStub.empty]);
|
||||
|
||||
await sut.handleIndexAlbums();
|
||||
|
||||
expect(searchMock.importAlbums).toHaveBeenCalledWith([albumStub.empty], true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAlbum', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
sut['enabled'] = false;
|
||||
sut.handleIndexAlbum({ ids: [albumStub.empty.id] });
|
||||
});
|
||||
|
||||
it('should index the album', () => {
|
||||
sut.handleIndexAlbum({ ids: [albumStub.empty.id] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRemoveAlbum', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
sut['enabled'] = false;
|
||||
sut.handleRemoveAlbum({ ids: ['album1'] });
|
||||
});
|
||||
|
||||
it('should remove the album', () => {
|
||||
sut.handleRemoveAlbum({ ids: ['album1'] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRemoveAsset', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
sut['enabled'] = false;
|
||||
sut.handleRemoveAsset({ ids: ['asset1'] });
|
||||
});
|
||||
|
||||
it('should remove the asset', () => {
|
||||
sut.handleRemoveAsset({ ids: ['asset1'] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexFaces', () => {
|
||||
it('should call done, even when there are no faces', async () => {
|
||||
personMock.getAllFaces.mockResolvedValue([]);
|
||||
|
||||
await sut.handleIndexFaces();
|
||||
|
||||
expect(searchMock.importFaces).toHaveBeenCalledWith([], true);
|
||||
});
|
||||
|
||||
it('should index all the faces', async () => {
|
||||
personMock.getAllFaces.mockResolvedValue([faceStub.face1]);
|
||||
|
||||
await sut.handleIndexFaces();
|
||||
|
||||
expect(searchMock.importFaces.mock.calls).toEqual([
|
||||
[
|
||||
[
|
||||
{
|
||||
id: 'asset-id|person-1',
|
||||
ownerId: 'user-id',
|
||||
assetId: 'asset-id',
|
||||
personId: 'person-1',
|
||||
embedding: [1, 2, 3, 4],
|
||||
},
|
||||
],
|
||||
false,
|
||||
],
|
||||
[[], true],
|
||||
]);
|
||||
});
|
||||
|
||||
it('should skip if search is disabled', async () => {
|
||||
sut['enabled'] = false;
|
||||
|
||||
await sut.handleIndexFaces();
|
||||
|
||||
expect(searchMock.importFaces).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleIndexAsset', () => {
|
||||
it('should skip if search is disabled', async () => {
|
||||
sut['enabled'] = false;
|
||||
await sut.handleIndexFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
|
||||
expect(searchMock.importFaces).not.toHaveBeenCalled();
|
||||
expect(personMock.getFacesByIds).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should index the face', async () => {
|
||||
personMock.getFacesByIds.mockResolvedValue([faceStub.face1]);
|
||||
|
||||
await sut.handleIndexFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
|
||||
expect(personMock.getFacesByIds).toHaveBeenCalledWith([{ assetId: 'asset-1', personId: 'person-1' }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('handleRemoveFace', () => {
|
||||
it('should skip if search is disabled', () => {
|
||||
sut['enabled'] = false;
|
||||
sut.handleRemoveFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
});
|
||||
|
||||
it('should remove the face', () => {
|
||||
sut.handleRemoveFace({ assetId: 'asset-1', personId: 'person-1' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('flush', () => {
|
||||
it('should flush queued album updates', async () => {
|
||||
albumMock.getByIds.mockResolvedValue([albumStub.empty]);
|
||||
|
||||
sut.handleIndexAlbum({ ids: ['album1'] });
|
||||
|
||||
jest.runOnlyPendingTimers();
|
||||
|
||||
await asyncTick(4);
|
||||
|
||||
expect(albumMock.getByIds).toHaveBeenCalledWith(['album1']);
|
||||
expect(searchMock.importAlbums).toHaveBeenCalledWith([albumStub.empty], false);
|
||||
});
|
||||
|
||||
it('should flush queued album deletes', async () => {
|
||||
sut.handleRemoveAlbum({ ids: ['album1'] });
|
||||
|
||||
jest.runOnlyPendingTimers();
|
||||
|
||||
await asyncTick(4);
|
||||
|
||||
expect(searchMock.deleteAlbums).toHaveBeenCalledWith(['album1']);
|
||||
});
|
||||
|
||||
it('should flush queued asset updates', async () => {
|
||||
assetMock.getByIds.mockResolvedValue([assetStub.image]);
|
||||
|
||||
sut.handleIndexAsset({ ids: ['asset1'] });
|
||||
|
||||
jest.runOnlyPendingTimers();
|
||||
|
||||
await asyncTick(4);
|
||||
|
||||
expect(assetMock.getByIds).toHaveBeenCalledWith(['asset1']);
|
||||
expect(searchMock.importAssets).toHaveBeenCalledWith([assetStub.image], false);
|
||||
});
|
||||
|
||||
it('should flush queued asset deletes', async () => {
|
||||
sut.handleRemoveAsset({ ids: ['asset1'] });
|
||||
|
||||
jest.runOnlyPendingTimers();
|
||||
|
||||
await asyncTick(4);
|
||||
|
||||
expect(searchMock.deleteAssets).toHaveBeenCalledWith(['asset1']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,396 +1,94 @@
|
|||
import { AlbumEntity, AssetEntity, AssetFaceEntity } from '@app/infra/entities';
|
||||
import { AssetEntity } from '@app/infra/entities';
|
||||
import { Inject, Injectable, Logger } from '@nestjs/common';
|
||||
import { mapAlbumWithAssets } from '../album';
|
||||
import { AssetResponseDto, mapAsset } from '../asset';
|
||||
import { AuthUserDto } from '../auth';
|
||||
import { usePagination } from '../domain.util';
|
||||
import { IAssetFaceJob, IBulkEntityJob, JOBS_ASSET_PAGINATION_SIZE, JobName } from '../job';
|
||||
import { PersonResponseDto } from '../person/person.dto';
|
||||
import { PersonResponseDto } from '../person';
|
||||
import {
|
||||
AssetFaceId,
|
||||
IAlbumRepository,
|
||||
IAssetRepository,
|
||||
IJobRepository,
|
||||
IMachineLearningRepository,
|
||||
IPersonRepository,
|
||||
ISearchRepository,
|
||||
ISmartInfoRepository,
|
||||
ISystemConfigRepository,
|
||||
OwnedFaceEntity,
|
||||
SearchCollection,
|
||||
SearchExploreItem,
|
||||
SearchResult,
|
||||
SearchStrategy,
|
||||
} from '../repositories';
|
||||
import { FeatureFlag, SystemConfigCore } from '../system-config';
|
||||
import { SearchDto, SearchPeopleDto } from './dto';
|
||||
import { SearchResponseDto } from './response-dto';
|
||||
|
||||
interface SyncQueue {
|
||||
upsert: Set<string>;
|
||||
delete: Set<string>;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class SearchService {
|
||||
private logger = new Logger(SearchService.name);
|
||||
private enabled = false;
|
||||
private timer: NodeJS.Timeout | null = null;
|
||||
private logger = new Logger(SearchService.name);
|
||||
private configCore: SystemConfigCore;
|
||||
|
||||
private albumQueue: SyncQueue = {
|
||||
upsert: new Set(),
|
||||
delete: new Set(),
|
||||
};
|
||||
|
||||
private assetQueue: SyncQueue = {
|
||||
upsert: new Set(),
|
||||
delete: new Set(),
|
||||
};
|
||||
|
||||
private faceQueue: SyncQueue = {
|
||||
upsert: new Set(),
|
||||
delete: new Set(),
|
||||
};
|
||||
|
||||
constructor(
|
||||
@Inject(IAlbumRepository) private albumRepository: IAlbumRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
@Inject(IJobRepository) private jobRepository: IJobRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(IMachineLearningRepository) private machineLearning: IMachineLearningRepository,
|
||||
@Inject(IPersonRepository) private personRepository: IPersonRepository,
|
||||
@Inject(ISearchRepository) private searchRepository: ISearchRepository,
|
||||
@Inject(ISystemConfigRepository) configRepository: ISystemConfigRepository,
|
||||
@Inject(ISmartInfoRepository) private smartInfoRepository: ISmartInfoRepository,
|
||||
@Inject(IAssetRepository) private assetRepository: IAssetRepository,
|
||||
) {
|
||||
this.configCore = SystemConfigCore.create(configRepository);
|
||||
}
|
||||
|
||||
teardown() {
|
||||
if (this.timer) {
|
||||
clearInterval(this.timer);
|
||||
this.timer = null;
|
||||
}
|
||||
}
|
||||
|
||||
async init() {
|
||||
this.enabled = await this.configCore.hasFeature(FeatureFlag.SEARCH);
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.log('Running bootstrap');
|
||||
await this.searchRepository.setup();
|
||||
|
||||
const migrationStatus = await this.searchRepository.checkMigrationStatus();
|
||||
if (migrationStatus[SearchCollection.ASSETS]) {
|
||||
this.logger.debug('Queueing job to re-index all assets');
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ASSETS });
|
||||
}
|
||||
if (migrationStatus[SearchCollection.ALBUMS]) {
|
||||
this.logger.debug('Queueing job to re-index all albums');
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_ALBUMS });
|
||||
}
|
||||
if (migrationStatus[SearchCollection.FACES]) {
|
||||
this.logger.debug('Queueing job to re-index all faces');
|
||||
await this.jobRepository.queue({ name: JobName.SEARCH_INDEX_FACES });
|
||||
}
|
||||
|
||||
this.timer = setInterval(() => this.flush(), 5_000);
|
||||
async searchPerson(authUser: AuthUserDto, dto: SearchPeopleDto): Promise<PersonResponseDto[]> {
|
||||
return await this.personRepository.getByName(authUser.id, dto.name, { withHidden: dto.withHidden });
|
||||
}
|
||||
|
||||
async getExploreData(authUser: AuthUserDto): Promise<SearchExploreItem<AssetResponseDto>[]> {
|
||||
await this.configCore.requireFeature(FeatureFlag.SEARCH);
|
||||
|
||||
const results = await this.searchRepository.explore(authUser.id);
|
||||
const lookup = await this.getLookupMap(
|
||||
results.reduce(
|
||||
(ids: string[], result: SearchExploreItem<AssetEntity>) => [
|
||||
...ids,
|
||||
...result.items.map((item) => item.data.id),
|
||||
],
|
||||
[],
|
||||
),
|
||||
);
|
||||
const options = { maxFields: 12, minAssetsPerField: 5 };
|
||||
const results = await Promise.all([
|
||||
this.assetRepository.getAssetIdByCity(authUser.id, options),
|
||||
this.assetRepository.getAssetIdByTag(authUser.id, options),
|
||||
]);
|
||||
const assetIds = new Set<string>(results.flatMap((field) => field.items.map((item) => item.data)));
|
||||
const assets = await this.assetRepository.getByIds(Array.from(assetIds));
|
||||
const assetMap = new Map<string, AssetResponseDto>(assets.map((asset) => [asset.id, mapAsset(asset)]));
|
||||
|
||||
return results.map(({ fieldName, items }) => ({
|
||||
fieldName,
|
||||
items: items
|
||||
.map(({ value, data }) => ({ value, data: lookup[data.id] }))
|
||||
.filter(({ data }) => !!data)
|
||||
.map(({ value, data }) => ({ value, data: mapAsset(data) })),
|
||||
items: items.map(({ value, data }) => ({ value, data: assetMap.get(data) as AssetResponseDto })),
|
||||
}));
|
||||
}
|
||||
|
||||
async search(authUser: AuthUserDto, dto: SearchDto): Promise<SearchResponseDto> {
|
||||
const { machineLearning } = await this.configCore.getConfig();
|
||||
await this.configCore.requireFeature(FeatureFlag.SEARCH);
|
||||
|
||||
const query = dto.q || dto.query || '*';
|
||||
const hasClip = machineLearning.enabled && machineLearning.clip.enabled;
|
||||
const strategy = dto.clip && hasClip ? SearchStrategy.CLIP : SearchStrategy.TEXT;
|
||||
const filters = { userId: authUser.id, ...dto };
|
||||
|
||||
let assets: SearchResult<AssetEntity>;
|
||||
let assets: AssetEntity[] = [];
|
||||
|
||||
switch (strategy) {
|
||||
case SearchStrategy.CLIP:
|
||||
const {
|
||||
machineLearning: { clip },
|
||||
} = await this.configCore.getConfig();
|
||||
const embedding = await this.machineLearning.encodeText(machineLearning.url, { text: query }, clip);
|
||||
assets = await this.searchRepository.vectorSearch(embedding, filters);
|
||||
const embedding = await this.machineLearning.encodeText(
|
||||
machineLearning.url,
|
||||
{ text: query },
|
||||
machineLearning.clip,
|
||||
);
|
||||
assets = await this.smartInfoRepository.searchByEmbedding({ ownerId: authUser.id, embedding, numResults: 100 });
|
||||
break;
|
||||
case SearchStrategy.TEXT:
|
||||
assets = await this.assetRepository.searchMetadata(query, authUser.id, { numResults: 250 });
|
||||
default:
|
||||
assets = await this.searchRepository.searchAssets(query, filters);
|
||||
break;
|
||||
}
|
||||
|
||||
const albums = await this.searchRepository.searchAlbums(query, filters);
|
||||
const lookup = await this.getLookupMap(assets.items.map((asset) => asset.id));
|
||||
|
||||
return {
|
||||
albums: { ...albums, items: albums.items.map(mapAlbumWithAssets) },
|
||||
albums: {
|
||||
total: 0,
|
||||
count: 0,
|
||||
items: [],
|
||||
facets: [],
|
||||
},
|
||||
assets: {
|
||||
...assets,
|
||||
items: assets.items
|
||||
.map((item) => lookup[item.id])
|
||||
.filter((item) => !!item)
|
||||
.map((asset) => mapAsset(asset)),
|
||||
total: assets.length,
|
||||
count: assets.length,
|
||||
items: assets.map((asset) => mapAsset(asset)),
|
||||
facets: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
searchPerson(authUser: AuthUserDto, dto: SearchPeopleDto): Promise<PersonResponseDto[]> {
|
||||
return this.personRepository.getByName(authUser.id, dto.name, { withHidden: dto.withHidden });
|
||||
}
|
||||
|
||||
async handleIndexAlbums() {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const albums = this.patchAlbums(await this.albumRepository.getAll());
|
||||
this.logger.log(`Indexing ${albums.length} albums`);
|
||||
await this.searchRepository.importAlbums(albums, true);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleIndexAssets() {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: do this in batches based on searchIndexVersion
|
||||
const assetPagination = usePagination(JOBS_ASSET_PAGINATION_SIZE, (pagination) =>
|
||||
this.assetRepository.getAll(pagination, { isVisible: true }),
|
||||
);
|
||||
|
||||
for await (const assets of assetPagination) {
|
||||
this.logger.debug(`Indexing ${assets.length} assets`);
|
||||
|
||||
const patchedAssets = this.patchAssets(assets);
|
||||
await this.searchRepository.importAssets(patchedAssets, false);
|
||||
}
|
||||
|
||||
await this.searchRepository.importAssets([], true);
|
||||
|
||||
this.logger.debug('Finished re-indexing all assets');
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async handleIndexFaces() {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
await this.searchRepository.deleteAllFaces();
|
||||
|
||||
// TODO: do this in batches based on searchIndexVersion
|
||||
const faces = this.patchFaces(await this.personRepository.getAllFaces());
|
||||
this.logger.log(`Indexing ${faces.length} faces`);
|
||||
|
||||
const chunkSize = 1000;
|
||||
for (let i = 0; i < faces.length; i += chunkSize) {
|
||||
await this.searchRepository.importFaces(faces.slice(i, i + chunkSize), false);
|
||||
}
|
||||
|
||||
await this.searchRepository.importFaces([], true);
|
||||
|
||||
this.logger.debug('Finished re-indexing all faces');
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
handleIndexAlbum({ ids }: IBulkEntityJob) {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const id of ids) {
|
||||
this.albumQueue.upsert.add(id);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
handleIndexAsset({ ids }: IBulkEntityJob) {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const id of ids) {
|
||||
this.assetQueue.upsert.add(id);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async handleIndexFace({ assetId, personId }: IAssetFaceJob) {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// immediately push to typesense
|
||||
await this.searchRepository.importFaces(await this.idsToFaces([{ assetId, personId }]), false);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
handleRemoveAlbum({ ids }: IBulkEntityJob) {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const id of ids) {
|
||||
this.albumQueue.delete.add(id);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
handleRemoveAsset({ ids }: IBulkEntityJob) {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const id of ids) {
|
||||
this.assetQueue.delete.add(id);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
handleRemoveFace({ assetId, personId }: IAssetFaceJob) {
|
||||
if (!this.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.faceQueue.delete.add(this.asKey({ assetId, personId }));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private async flush() {
|
||||
if (this.albumQueue.upsert.size > 0) {
|
||||
const ids = [...this.albumQueue.upsert.keys()];
|
||||
const items = await this.idsToAlbums(ids);
|
||||
this.logger.debug(`Flushing ${items.length} album upserts`);
|
||||
await this.searchRepository.importAlbums(items, false);
|
||||
this.albumQueue.upsert.clear();
|
||||
}
|
||||
|
||||
if (this.albumQueue.delete.size > 0) {
|
||||
const ids = [...this.albumQueue.delete.keys()];
|
||||
this.logger.debug(`Flushing ${ids.length} album deletes`);
|
||||
await this.searchRepository.deleteAlbums(ids);
|
||||
this.albumQueue.delete.clear();
|
||||
}
|
||||
|
||||
if (this.assetQueue.upsert.size > 0) {
|
||||
const ids = [...this.assetQueue.upsert.keys()];
|
||||
const items = await this.idsToAssets(ids);
|
||||
this.logger.debug(`Flushing ${items.length} asset upserts`);
|
||||
await this.searchRepository.importAssets(items, false);
|
||||
this.assetQueue.upsert.clear();
|
||||
}
|
||||
|
||||
if (this.assetQueue.delete.size > 0) {
|
||||
const ids = [...this.assetQueue.delete.keys()];
|
||||
this.logger.debug(`Flushing ${ids.length} asset deletes`);
|
||||
await this.searchRepository.deleteAssets(ids);
|
||||
this.assetQueue.delete.clear();
|
||||
}
|
||||
|
||||
if (this.faceQueue.upsert.size > 0) {
|
||||
const ids = [...this.faceQueue.upsert.keys()].map((key) => this.asParts(key));
|
||||
const items = await this.idsToFaces(ids);
|
||||
this.logger.debug(`Flushing ${items.length} face upserts`);
|
||||
await this.searchRepository.importFaces(items, false);
|
||||
this.faceQueue.upsert.clear();
|
||||
}
|
||||
|
||||
if (this.faceQueue.delete.size > 0) {
|
||||
const ids = [...this.faceQueue.delete.keys()];
|
||||
this.logger.debug(`Flushing ${ids.length} face deletes`);
|
||||
await this.searchRepository.deleteFaces(ids);
|
||||
this.faceQueue.delete.clear();
|
||||
}
|
||||
}
|
||||
|
||||
private async idsToAlbums(ids: string[]): Promise<AlbumEntity[]> {
|
||||
const entities = await this.albumRepository.getByIds(ids);
|
||||
return this.patchAlbums(entities);
|
||||
}
|
||||
|
||||
private async idsToAssets(ids: string[]): Promise<AssetEntity[]> {
|
||||
const entities = await this.assetRepository.getByIds(ids);
|
||||
return this.patchAssets(entities.filter((entity) => entity.isVisible));
|
||||
}
|
||||
|
||||
private async idsToFaces(ids: AssetFaceId[]): Promise<OwnedFaceEntity[]> {
|
||||
return this.patchFaces(await this.personRepository.getFacesByIds(ids));
|
||||
}
|
||||
|
||||
private patchAssets(assets: AssetEntity[]): AssetEntity[] {
|
||||
return assets;
|
||||
}
|
||||
|
||||
private patchAlbums(albums: AlbumEntity[]): AlbumEntity[] {
|
||||
return albums.map((entity) => ({ ...entity, assets: [] }));
|
||||
}
|
||||
|
||||
private patchFaces(faces: AssetFaceEntity[]): OwnedFaceEntity[] {
|
||||
const results: OwnedFaceEntity[] = [];
|
||||
for (const face of faces) {
|
||||
if (face.personId) {
|
||||
results.push({
|
||||
id: this.asKey(face as AssetFaceId),
|
||||
ownerId: face.asset.ownerId,
|
||||
assetId: face.assetId,
|
||||
personId: face.personId,
|
||||
embedding: face.embedding,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private asKey(face: AssetFaceId): string {
|
||||
return `${face.assetId}|${face.personId}`;
|
||||
}
|
||||
|
||||
private asParts(key: string): AssetFaceId {
|
||||
const [assetId, personId] = key.split('|');
|
||||
return { assetId, personId };
|
||||
}
|
||||
|
||||
private async getLookupMap(assetIds: string[]) {
|
||||
const assets = await this.assetRepository.getByIds(assetIds);
|
||||
const lookup: Record<string, AssetEntity> = {};
|
||||
for (const asset of assets) {
|
||||
lookup[asset.id] = asset;
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { AssetCreate } from '@app/domain';
|
||||
import { AssetEntity } from '@app/infra/entities';
|
||||
import OptionalBetween from '@app/infra/utils/optional-between.util';
|
||||
import { OptionalBetween } from '@app/infra/infra.utils';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { In } from 'typeorm/find-options/operator/In';
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { DomainModule } from '@app/domain';
|
||||
import { InfraModule } from '@app/infra';
|
||||
import { AssetEntity } from '@app/infra/entities';
|
||||
import { Module, OnModuleDestroy, OnModuleInit } from '@nestjs/common';
|
||||
import { Module, OnModuleInit } from '@nestjs/common';
|
||||
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
|
||||
import { ScheduleModule } from '@nestjs/schedule';
|
||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||
|
@ -73,14 +73,10 @@ import { ErrorInterceptor, FileUploadInterceptor } from './interceptors';
|
|||
FileUploadInterceptor,
|
||||
],
|
||||
})
|
||||
export class AppModule implements OnModuleInit, OnModuleDestroy {
|
||||
export class AppModule implements OnModuleInit {
|
||||
constructor(private appService: AppService) {}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.appService.init();
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
await this.appService.destroy();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { JobService, LibraryService, ONE_HOUR, SearchService, ServerInfoService, StorageService } from '@app/domain';
|
||||
import { JobService, LibraryService, ONE_HOUR, ServerInfoService, StorageService } from '@app/domain';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression, Interval } from '@nestjs/schedule';
|
||||
|
||||
|
@ -9,7 +9,6 @@ export class AppService {
|
|||
constructor(
|
||||
private jobService: JobService,
|
||||
private libraryService: LibraryService,
|
||||
private searchService: SearchService,
|
||||
private storageService: StorageService,
|
||||
private serverService: ServerInfoService,
|
||||
) {}
|
||||
|
@ -26,13 +25,7 @@ export class AppService {
|
|||
|
||||
async init() {
|
||||
this.storageService.init();
|
||||
await this.searchService.init();
|
||||
await this.serverService.handleVersionCheck();
|
||||
this.logger.log(`Feature Flags: ${JSON.stringify(await this.serverService.getFeatures(), null, 2)}`);
|
||||
await this.libraryService.init();
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
this.searchService.teardown();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,12 +14,8 @@ export class AssetFaceEntity {
|
|||
@Column({ nullable: true, type: 'uuid' })
|
||||
personId!: string | null;
|
||||
|
||||
@Column({
|
||||
type: 'float4',
|
||||
array: true,
|
||||
nullable: true,
|
||||
})
|
||||
embedding!: number[] | null;
|
||||
@Column({ type: 'float4', array: true })
|
||||
embedding!: number[];
|
||||
|
||||
@Column({ default: 0, type: 'int' })
|
||||
imageWidth!: number;
|
||||
|
|
|
@ -2,7 +2,6 @@ import { QueueName } from '@app/domain';
|
|||
import { RegisterQueueOptions } from '@nestjs/bullmq';
|
||||
import { QueueOptions } from 'bullmq';
|
||||
import { RedisOptions } from 'ioredis';
|
||||
import { ConfigurationOptions } from 'typesense/lib/Typesense/Configuration';
|
||||
|
||||
function parseRedisConfig(): RedisOptions {
|
||||
if (process.env.IMMICH_TEST_ENV == 'true') {
|
||||
|
@ -41,36 +40,3 @@ export const bullConfig: QueueOptions = {
|
|||
};
|
||||
|
||||
export const bullQueues: RegisterQueueOptions[] = Object.values(QueueName).map((name) => ({ name }));
|
||||
|
||||
function parseTypeSenseConfig(): ConfigurationOptions {
|
||||
const typesenseURL = process.env.TYPESENSE_URL;
|
||||
const common = {
|
||||
apiKey: process.env.TYPESENSE_API_KEY as string,
|
||||
numRetries: 15,
|
||||
retryIntervalSeconds: 4,
|
||||
connectionTimeoutSeconds: 10,
|
||||
};
|
||||
if (typesenseURL && typesenseURL.startsWith('ha://')) {
|
||||
try {
|
||||
const decodedString = Buffer.from(typesenseURL.slice(5), 'base64').toString();
|
||||
return {
|
||||
nodes: JSON.parse(decodedString),
|
||||
...common,
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to decode typesense options: ${error}`);
|
||||
}
|
||||
}
|
||||
return {
|
||||
nodes: [
|
||||
{
|
||||
host: process.env.TYPESENSE_HOST || 'typesense',
|
||||
port: Number(process.env.TYPESENSE_PORT) || 8108,
|
||||
protocol: process.env.TYPESENSE_PROTOCOL || 'http',
|
||||
},
|
||||
],
|
||||
...common,
|
||||
};
|
||||
}
|
||||
|
||||
export const typesenseConfig: ConfigurationOptions = parseTypeSenseConfig();
|
||||
|
|
|
@ -54,12 +54,12 @@ import {
|
|||
PartnerRepository,
|
||||
PersonRepository,
|
||||
ServerInfoRepository,
|
||||
SearchRepository,
|
||||
SharedLinkRepository,
|
||||
SmartInfoRepository,
|
||||
SystemConfigRepository,
|
||||
SystemMetadataRepository,
|
||||
TagRepository,
|
||||
TypesenseRepository,
|
||||
UserRepository,
|
||||
UserTokenRepository,
|
||||
} from './repositories';
|
||||
|
@ -80,7 +80,7 @@ const providers: Provider[] = [
|
|||
{ provide: IMoveRepository, useClass: MoveRepository },
|
||||
{ provide: IPartnerRepository, useClass: PartnerRepository },
|
||||
{ provide: IPersonRepository, useClass: PersonRepository },
|
||||
{ provide: ISearchRepository, useClass: TypesenseRepository },
|
||||
{ provide: ISearchRepository, useClass: SearchRepository },
|
||||
{ provide: IServerInfoRepository, useClass: ServerInfoRepository },
|
||||
{ provide: ISharedLinkRepository, useClass: SharedLinkRepository },
|
||||
{ provide: ISmartInfoRepository, useClass: SmartInfoRepository },
|
||||
|
|
|
@ -1,5 +1,19 @@
|
|||
import { Paginated, PaginationOptions } from '@app/domain';
|
||||
import { FindOneOptions, ObjectLiteral, Repository } from 'typeorm';
|
||||
import { Between, FindOneOptions, LessThanOrEqual, MoreThanOrEqual, ObjectLiteral, Repository } from 'typeorm';
|
||||
|
||||
/**
|
||||
* Allows optional values unlike the regular Between and uses MoreThanOrEqual
|
||||
* or LessThanOrEqual when only one parameter is specified.
|
||||
*/
|
||||
export function OptionalBetween<T>(from?: T, to?: T) {
|
||||
if (from && to) {
|
||||
return Between(from, to);
|
||||
} else if (from) {
|
||||
return MoreThanOrEqual(from);
|
||||
} else if (to) {
|
||||
return LessThanOrEqual(to);
|
||||
}
|
||||
}
|
||||
|
||||
export async function paginate<Entity extends ObjectLiteral>(
|
||||
repository: Repository<Entity>,
|
||||
|
@ -18,3 +32,5 @@ export async function paginate<Entity extends ObjectLiteral>(
|
|||
|
||||
return { items, hasNextPage };
|
||||
}
|
||||
|
||||
export const asVector = (embedding: number[]) => `[${embedding.join(',')}]`;
|
32
server/src/infra/migrations/1693228677355-UsePgVector.ts
Normal file
32
server/src/infra/migrations/1693228677355-UsePgVector.ts
Normal file
|
@ -0,0 +1,32 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class UsePgVector1693228677355 implements MigrationInterface {
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query('CREATE EXTENSION IF NOT EXISTS vector');
|
||||
|
||||
const faceDimQuery = await queryRunner.query(`
|
||||
SELECT CARDINALITY("embedding")
|
||||
FROM "asset_faces"
|
||||
LIMIT 1`);
|
||||
const clipDimQuery = await queryRunner.query(`
|
||||
SELECT CARDINALITY("clipEmbedding")
|
||||
FROM "smart_info"
|
||||
LIMIT 1`);
|
||||
|
||||
const faceDimSize = faceDimQuery[0] ?? 512;
|
||||
const clipDimSize = clipDimQuery[0] ?? 512;
|
||||
|
||||
await queryRunner.query(`
|
||||
ALTER TABLE "asset_faces"
|
||||
ALTER COLUMN "embedding" TYPE vector(${faceDimSize})`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "smart_info"
|
||||
ALTER COLUMN "clipEmbedding" TYPE vector(${clipDimSize})`,
|
||||
);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`ALTER TABLE "asset_faces" ALTER COLUMN "embedding" TYPE real array`);
|
||||
await queryRunner.query(`ALTER TABLE "smart_info" ALTER COLUMN "clipEmbedding" TYPE real array`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddSmartInfoTextSearchColumn1696875736010 implements MigrationInterface {
|
||||
name = 'AddSmartInfoTextSearchColumn1696875736010';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`
|
||||
CREATE OR REPLACE FUNCTION immutable_concat_ws(text, text[])
|
||||
RETURNS text
|
||||
LANGUAGE internal IMMUTABLE PARALLEL SAFE AS
|
||||
'text_concat_ws'`);
|
||||
await queryRunner.query(`
|
||||
ALTER TABLE "smart_info" ADD "smartInfoTextSearchableColumn" tsvector
|
||||
GENERATED ALWAYS AS (
|
||||
TO_TSVECTOR(
|
||||
'english',
|
||||
immutable_concat_ws(
|
||||
' '::text,
|
||||
COALESCE(tags, array[]::text[]) || COALESCE(objects, array[]::text[])
|
||||
)
|
||||
)
|
||||
)
|
||||
STORED NOT NULL`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP FUNCTION IF EXISTS immutable_concat_ws(text, text[])`);
|
||||
await queryRunner.query(`ALTER TABLE "smart_info" DROP COLUMN IF EXISTS "smartInfoTextSearchableColumn"`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class CreateSmartInfoTextSearchIndex1696876192604 implements MigrationInterface {
|
||||
name = 'CreateSmartInfoTextSearchIndex1696876192604';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`
|
||||
CREATE INDEX smart_info_text_searchable_idx
|
||||
ON smart_info
|
||||
USING GIN ("smartInfoTextSearchableColumn")`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS smart_info_text_searchable_idx ON smart_info`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||
|
||||
export class AddExifCityIndex1699419684990 implements MigrationInterface {
|
||||
name = 'AddExifCityIndex1699419684990'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS exif_city ON exif (city);`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS exif_city;`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import { MigrationInterface, QueryRunner } from "typeorm";
|
||||
|
||||
export class AddSmartInfoTagsIndex1699419700539 implements MigrationInterface {
|
||||
name = 'AddSmartInfoTagsIndex1699419700539'
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`CREATE INDEX IF NOT EXISTS si_tags ON smart_info USING GIN (tags);`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS si_tags;`);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
import { MigrationInterface, QueryRunner } from 'typeorm';
|
||||
|
||||
export class AddCLIPEmbeddingIndex1699586761207 implements MigrationInterface {
|
||||
name = 'AddCLIPEmbeddingIndex1699586761207';
|
||||
|
||||
public async up(queryRunner: QueryRunner): Promise<void> {
|
||||
console.log('Creating CLIP index. This may take a while...');
|
||||
await queryRunner.query(`
|
||||
CREATE INDEX IF NOT EXISTS clip_index
|
||||
ON smart_info
|
||||
USING hnsw ("clipEmbedding" vector_ip_ops)
|
||||
WITH (m = 16, ef_construction = 128)`);
|
||||
await queryRunner.query(`SET hnsw.ef_search = 250`);
|
||||
}
|
||||
|
||||
public async down(queryRunner: QueryRunner): Promise<void> {
|
||||
await queryRunner.query(`DROP INDEX IF EXISTS clip_index`);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,8 @@
|
|||
import {
|
||||
AssetBuilderOptions,
|
||||
AssetCreate,
|
||||
AssetExploreFieldOptions,
|
||||
AssetExploreOptions,
|
||||
AssetSearchOptions,
|
||||
AssetStats,
|
||||
AssetStatsOptions,
|
||||
|
@ -7,24 +10,25 @@ import {
|
|||
LivePhotoSearchOptions,
|
||||
MapMarker,
|
||||
MapMarkerSearchOptions,
|
||||
MetadataSearchOptions,
|
||||
MonthDay,
|
||||
Paginated,
|
||||
PaginationOptions,
|
||||
SearchExploreItem,
|
||||
TimeBucketItem,
|
||||
TimeBucketOptions,
|
||||
TimeBucketSize,
|
||||
WithoutProperty,
|
||||
WithProperty,
|
||||
WithoutProperty,
|
||||
} from '@app/domain';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import _ from 'lodash';
|
||||
import { DateTime } from 'luxon';
|
||||
import { And, FindOptionsRelations, FindOptionsWhere, In, IsNull, LessThan, Not, Repository } from 'typeorm';
|
||||
import { AssetEntity, AssetJobStatusEntity, AssetType, ExifEntity } from '../entities';
|
||||
import { AssetEntity, AssetJobStatusEntity, AssetType, ExifEntity, SmartInfoEntity } from '../entities';
|
||||
import { DummyValue, GenerateSql } from '../infra.util';
|
||||
import OptionalBetween from '../utils/optional-between.util';
|
||||
import { paginate } from '../utils/pagination.util';
|
||||
import { OptionalBetween, paginate } from '../infra.utils';
|
||||
|
||||
const DEFAULT_SEARCH_SIZE = 250;
|
||||
|
||||
|
@ -44,6 +48,7 @@ export class AssetRepository implements IAssetRepository {
|
|||
@InjectRepository(AssetEntity) private repository: Repository<AssetEntity>,
|
||||
@InjectRepository(ExifEntity) private exifRepository: Repository<ExifEntity>,
|
||||
@InjectRepository(AssetJobStatusEntity) private jobStatusRepository: Repository<AssetJobStatusEntity>,
|
||||
@InjectRepository(SmartInfoEntity) private smartInfoRepository: Repository<SmartInfoEntity>,
|
||||
) {}
|
||||
|
||||
async upsertExif(exif: Partial<ExifEntity>): Promise<void> {
|
||||
|
@ -689,15 +694,70 @@ export class AssetRepository implements IAssetRepository {
|
|||
);
|
||||
}
|
||||
|
||||
private getBuilder(options: TimeBucketOptions) {
|
||||
const { isArchived, isFavorite, isTrashed, albumId, personId, userIds, withStacked } = options;
|
||||
async getAssetIdByCity(
|
||||
userId: string,
|
||||
{ minAssetsPerField, maxFields }: AssetExploreFieldOptions,
|
||||
): Promise<SearchExploreItem<string>> {
|
||||
const cte = this.exifRepository
|
||||
.createQueryBuilder('e')
|
||||
.select('city')
|
||||
.groupBy('city')
|
||||
.having('count(city) >= :minAssetsPerField', { minAssetsPerField })
|
||||
.orderBy('random()')
|
||||
.limit(maxFields);
|
||||
|
||||
const items = await this.getBuilder({ userId, exifInfo: false, assetType: AssetType.IMAGE, isArchived: false })
|
||||
.select('c.city', 'value')
|
||||
.addSelect('asset.id', 'data')
|
||||
.distinctOn(['c.city'])
|
||||
.innerJoin('exif', 'e', 'asset.id = e."assetId"')
|
||||
.addCommonTableExpression(cte, 'cities')
|
||||
.innerJoin('cities', 'c', 'c.city = e.city')
|
||||
.limit(maxFields)
|
||||
.getRawMany();
|
||||
|
||||
return { fieldName: 'exifInfo.city', items };
|
||||
}
|
||||
|
||||
async getAssetIdByTag(
|
||||
userId: string,
|
||||
{ minAssetsPerField, maxFields }: AssetExploreFieldOptions,
|
||||
): Promise<SearchExploreItem<string>> {
|
||||
const cte = this.smartInfoRepository
|
||||
.createQueryBuilder('si')
|
||||
.select('unnest(tags)', 'tag')
|
||||
.groupBy('tag')
|
||||
.having('count(*) >= :minAssetsPerField', { minAssetsPerField })
|
||||
.orderBy('random()')
|
||||
.limit(maxFields);
|
||||
|
||||
const items = await this.getBuilder({ userId, exifInfo: false, assetType: AssetType.IMAGE, isArchived: false })
|
||||
.select('unnest(si.tags)', 'value')
|
||||
.addSelect('asset.id', 'data')
|
||||
.distinctOn(['unnest(si.tags)'])
|
||||
.innerJoin('smart_info', 'si', 'asset.id = si."assetId"')
|
||||
.addCommonTableExpression(cte, 'random_tags')
|
||||
.innerJoin('random_tags', 't', 'si.tags @> ARRAY[t.tag]')
|
||||
.limit(maxFields)
|
||||
.getRawMany();
|
||||
|
||||
return { fieldName: 'smartInfo.tags', items };
|
||||
}
|
||||
|
||||
private getBuilder(options: AssetBuilderOptions) {
|
||||
const { isArchived, isFavorite, isTrashed, albumId, personId, userIds, withStacked, exifInfo, assetType } = options;
|
||||
|
||||
let builder = this.repository
|
||||
.createQueryBuilder('asset')
|
||||
.where('asset.isVisible = true')
|
||||
.andWhere('asset.fileCreatedAt < NOW()')
|
||||
.leftJoinAndSelect('asset.exifInfo', 'exifInfo')
|
||||
.leftJoinAndSelect('asset.stack', 'stack');
|
||||
.andWhere('asset.fileCreatedAt < NOW()');
|
||||
if (assetType !== undefined) {
|
||||
builder = builder.andWhere('asset.type = :assetType', { assetType });
|
||||
}
|
||||
|
||||
if (exifInfo !== false) {
|
||||
builder = builder.leftJoinAndSelect('asset.exifInfo', 'exifInfo').leftJoinAndSelect('asset.stack', 'stack');
|
||||
}
|
||||
|
||||
if (albumId) {
|
||||
builder = builder.leftJoin('asset.albums', 'album').andWhere('album.id = :albumId', { albumId });
|
||||
|
@ -732,4 +792,43 @@ export class AssetRepository implements IAssetRepository {
|
|||
|
||||
return builder;
|
||||
}
|
||||
|
||||
async searchMetadata(query: string, ownerId: string, { numResults }: MetadataSearchOptions): Promise<AssetEntity[]> {
|
||||
const rows = await this.repository
|
||||
.createQueryBuilder('assets')
|
||||
.select('assets.*')
|
||||
.addSelect('e.country', 'country')
|
||||
.addSelect('e.state', 'state')
|
||||
.addSelect('e.city', 'city')
|
||||
.addSelect('e.description', 'description')
|
||||
.addSelect('e.model', 'model')
|
||||
.addSelect('e.make', 'make')
|
||||
.addSelect('COALESCE(si.tags, array[]::text[])', 'tags')
|
||||
.addSelect('COALESCE(si.objects, array[]::text[])', 'objects')
|
||||
.innerJoin('smart_info', 'si', 'si."assetId" = assets."id"')
|
||||
.innerJoin('exif', 'e', 'assets."id" = e."assetId"')
|
||||
.where('a.ownerId = :ownerId', { ownerId })
|
||||
.where('(e."exifTextSearchableColumn" || si."smartInfoTextSearchableColumn") @@ PLAINTO_TSQUERY(\'english\', :query)', { query })
|
||||
.limit(numResults)
|
||||
.getRawMany();
|
||||
|
||||
return rows.map(
|
||||
({ tags, objects, country, state, city, description, model, make, ...assetInfo }) =>
|
||||
({
|
||||
exifInfo: {
|
||||
country,
|
||||
state,
|
||||
city,
|
||||
description,
|
||||
model,
|
||||
make,
|
||||
},
|
||||
smartInfo: {
|
||||
tags,
|
||||
objects,
|
||||
},
|
||||
...assetInfo,
|
||||
}) as AssetEntity,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,12 +15,12 @@ export * from './metadata.repository';
|
|||
export * from './move.repository';
|
||||
export * from './partner.repository';
|
||||
export * from './person.repository';
|
||||
export * from './search.repository';
|
||||
export * from './server-info.repository';
|
||||
export * from './shared-link.repository';
|
||||
export * from './smart-info.repository';
|
||||
export * from './system-config.repository';
|
||||
export * from './system-metadata.repository';
|
||||
export * from './tag.repository';
|
||||
export * from './typesense.repository';
|
||||
export * from './user-token.repository';
|
||||
export * from './user.repository';
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import {
|
||||
AssetFaceId,
|
||||
EmbeddingSearch,
|
||||
IPersonRepository,
|
||||
PersonNameSearchOptions,
|
||||
PersonSearchOptions,
|
||||
|
@ -10,6 +11,7 @@ import { InjectRepository } from '@nestjs/typeorm';
|
|||
import { In, Repository } from 'typeorm';
|
||||
import { AssetEntity, AssetFaceEntity, PersonEntity } from '../entities';
|
||||
import { DummyValue, GenerateSql } from '../infra.util';
|
||||
import { asVector } from '../infra.utils';
|
||||
|
||||
export class PersonRepository implements IPersonRepository {
|
||||
constructor(
|
||||
|
@ -215,8 +217,17 @@ export class PersonRepository implements IPersonRepository {
|
|||
return this.personRepository.save(entity);
|
||||
}
|
||||
|
||||
createFace(entity: Partial<AssetFaceEntity>): Promise<AssetFaceEntity> {
|
||||
return this.assetFaceRepository.save(entity);
|
||||
async createFace(entity: AssetFaceEntity): Promise<AssetFaceEntity> {
|
||||
if (!entity.personId) {
|
||||
throw new Error('Person ID is required to create a face');
|
||||
}
|
||||
const { embedding, ...face } = entity;
|
||||
await this.assetFaceRepository.save(face);
|
||||
await this.assetFaceRepository.manager.query(
|
||||
`UPDATE "asset_faces" SET "embedding" = ${asVector(embedding)} WHERE "assetId" = $1 AND "personId" = $2`,
|
||||
[entity.assetId, entity.personId],
|
||||
);
|
||||
return this.assetFaceRepository.findOneByOrFail({ assetId: entity.assetId, personId: entity.personId });
|
||||
}
|
||||
|
||||
async update(entity: Partial<PersonEntity>): Promise<PersonEntity> {
|
||||
|
@ -233,4 +244,19 @@ export class PersonRepository implements IPersonRepository {
|
|||
async getRandomFace(personId: string): Promise<AssetFaceEntity | null> {
|
||||
return this.assetFaceRepository.findOneBy({ personId });
|
||||
}
|
||||
|
||||
searchByEmbedding({ ownerId, embedding, numResults, maxDistance }: EmbeddingSearch): Promise<AssetFaceEntity[]> {
|
||||
let query = this.assetFaceRepository
|
||||
.createQueryBuilder('faces')
|
||||
.leftJoinAndSelect('faces.asset', 'asset')
|
||||
.where('asset.ownerId = :ownerId', { ownerId })
|
||||
.orderBy(`faces.embedding <=> ${asVector(embedding)}`)
|
||||
.limit(numResults);
|
||||
|
||||
if (maxDistance) {
|
||||
query = query.andWhere(`(faces.embedding <=> ${asVector(embedding)}) <= :maxDistance`, { maxDistance });
|
||||
}
|
||||
|
||||
return query.getMany();
|
||||
}
|
||||
}
|
||||
|
|
8
server/src/infra/repositories/search.repository.ts
Normal file
8
server/src/infra/repositories/search.repository.ts
Normal file
|
@ -0,0 +1,8 @@
|
|||
import { ISearchRepository, SearchExploreItem } from '@app/domain';
|
||||
import { AssetEntity } from '../entities';
|
||||
|
||||
export class SearchRepository implements ISearchRepository {
|
||||
async explore(): Promise<SearchExploreItem<AssetEntity>[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
|
@ -1,14 +1,84 @@
|
|||
import { ISmartInfoRepository } from '@app/domain';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { EmbeddingSearch, ISmartInfoRepository } from '@app/domain';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import AsyncLock from 'async-lock';
|
||||
import { Repository } from 'typeorm';
|
||||
import { SmartInfoEntity } from '../entities';
|
||||
import { AssetEntity, SmartInfoEntity } from '../entities';
|
||||
import { asVector } from '../infra.utils';
|
||||
|
||||
@Injectable()
|
||||
export class SmartInfoRepository implements ISmartInfoRepository {
|
||||
constructor(@InjectRepository(SmartInfoEntity) private repository: Repository<SmartInfoEntity>) {}
|
||||
private logger = new Logger(SmartInfoRepository.name);
|
||||
private lock: AsyncLock;
|
||||
private curDimSize: number | undefined;
|
||||
|
||||
constructor(@InjectRepository(SmartInfoEntity) private repository: Repository<SmartInfoEntity>) {
|
||||
this.lock = new AsyncLock();
|
||||
}
|
||||
|
||||
async searchByEmbedding({ ownerId, embedding, numResults }: EmbeddingSearch): Promise<AssetEntity[]> {
|
||||
const results = await this.repository
|
||||
.createQueryBuilder('smartInfo')
|
||||
.useTransaction(true)
|
||||
.leftJoinAndSelect('smartInfo.asset', 'asset')
|
||||
.where('asset.ownerId = :ownerId', { ownerId })
|
||||
.orderBy(`smartInfo.clipEmbedding <=> :embedding`)
|
||||
.setParameters({ embedding: asVector(embedding) })
|
||||
.limit(numResults)
|
||||
.getMany();
|
||||
|
||||
return results.map((result) => result.asset).filter((asset): asset is AssetEntity => !!asset);
|
||||
}
|
||||
|
||||
async upsert(info: Partial<SmartInfoEntity>): Promise<void> {
|
||||
await this.repository.upsert(info, { conflictPaths: ['assetId'] });
|
||||
const { clipEmbedding, ...withoutEmbedding } = info;
|
||||
await this.repository.upsert(withoutEmbedding, { conflictPaths: ['assetId'] });
|
||||
if (!clipEmbedding || !info.assetId) return;
|
||||
|
||||
try {
|
||||
await this.updateEmbedding(clipEmbedding, info.assetId);
|
||||
} catch (e) {
|
||||
await this.updateDimSize(clipEmbedding.length);
|
||||
await this.updateEmbedding(clipEmbedding, info.assetId);
|
||||
}
|
||||
}
|
||||
|
||||
private async updateEmbedding(embedding: number[], assetId: string): Promise<void> {
|
||||
await this.repository.manager.query(`UPDATE "smart_info" SET "clipEmbedding" = $1 WHERE "assetId" = $2`, [
|
||||
asVector(embedding),
|
||||
assetId,
|
||||
]);
|
||||
}
|
||||
|
||||
/*
|
||||
* note: never use this with user input
|
||||
* this does not parameterize the query because it is not possible to parameterize the column type
|
||||
*/
|
||||
private async updateDimSize(dimSize: number): Promise<void> {
|
||||
await this.lock.acquire('updateDimSizeLock', async () => {
|
||||
if (this.curDimSize === dimSize) return;
|
||||
|
||||
this.logger.log(`Updating CLIP dimension size to ${dimSize}`);
|
||||
|
||||
await this.repository.manager.query(`
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE smart_info
|
||||
DROP COLUMN "clipEmbedding",
|
||||
ADD COLUMN "clipEmbedding" vector(${dimSize});
|
||||
|
||||
CREATE INDEX IF NOT EXISTS clip_index
|
||||
ON smart_info
|
||||
USING hnsw ("clipEmbedding" vector_ip_ops)
|
||||
WITH (m = 16, ef_construction = 128);
|
||||
|
||||
SET hnsw.ef_search = 250;
|
||||
COMMIT;
|
||||
`
|
||||
);
|
||||
|
||||
this.curDimSize = dimSize;
|
||||
this.logger.log(`Successfully updated CLIP dimension size to ${dimSize}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,503 +0,0 @@
|
|||
import {
|
||||
ISearchRepository,
|
||||
OwnedFaceEntity,
|
||||
SearchCollection,
|
||||
SearchCollectionIndexStatus,
|
||||
SearchExploreItem,
|
||||
SearchFaceFilter,
|
||||
SearchFilter,
|
||||
SearchResult,
|
||||
} from '@app/domain';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import _, { Dictionary } from 'lodash';
|
||||
import { catchError, filter, firstValueFrom, from, map, mergeMap, of, toArray } from 'rxjs';
|
||||
import { Client } from 'typesense';
|
||||
import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections';
|
||||
import { DocumentSchema, SearchResponse } from 'typesense/lib/Typesense/Documents';
|
||||
import { AlbumEntity, AssetEntity, AssetFaceEntity } from '../entities';
|
||||
import { typesenseConfig } from '../infra.config';
|
||||
import { albumSchema, assetSchema, faceSchema } from '../typesense-schemas';
|
||||
|
||||
function removeNil<T extends Dictionary<any>>(item: T): T {
|
||||
_.forOwn(item, (value, key) => {
|
||||
if (_.isNil(value) || (_.isObject(value) && !_.isDate(value) && _.isEmpty(removeNil(value)))) {
|
||||
delete item[key];
|
||||
}
|
||||
});
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
interface MultiSearchError {
|
||||
code: number;
|
||||
error: string;
|
||||
}
|
||||
|
||||
interface CustomAssetEntity extends AssetEntity {
|
||||
geo?: [number, number];
|
||||
motion?: boolean;
|
||||
people?: string[];
|
||||
}
|
||||
|
||||
const schemaMap: Record<SearchCollection, CollectionCreateSchema> = {
|
||||
[SearchCollection.ASSETS]: assetSchema,
|
||||
[SearchCollection.ALBUMS]: albumSchema,
|
||||
[SearchCollection.FACES]: faceSchema,
|
||||
};
|
||||
|
||||
const schemas = Object.entries(schemaMap) as [SearchCollection, CollectionCreateSchema][];
|
||||
|
||||
@Injectable()
|
||||
export class TypesenseRepository implements ISearchRepository {
|
||||
private logger = new Logger(TypesenseRepository.name);
|
||||
|
||||
private _client: Client | null = null;
|
||||
private _updateCLIPLock = false;
|
||||
|
||||
private get client(): Client {
|
||||
if (!this._client) {
|
||||
throw new Error('Typesense client not available (no apiKey was provided)');
|
||||
}
|
||||
return this._client;
|
||||
}
|
||||
|
||||
constructor() {
|
||||
if (!typesenseConfig.apiKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._client = new Client(typesenseConfig);
|
||||
}
|
||||
|
||||
async setup(): Promise<void> {
|
||||
const collections = await this.client.collections().retrieve();
|
||||
for (const collection of collections) {
|
||||
this.logger.debug(`${collection.name} collection has ${collection.num_documents} documents`);
|
||||
// await this.client.collections(collection.name).delete();
|
||||
}
|
||||
|
||||
// upsert collections
|
||||
for (const [collectionName, schema] of schemas) {
|
||||
const collection = await this.client
|
||||
.collections(schema.name)
|
||||
.retrieve()
|
||||
.catch(() => null);
|
||||
if (!collection) {
|
||||
this.logger.log(`Creating schema: ${collectionName}/${schema.name}`);
|
||||
await this.client.collections().create(schema);
|
||||
} else {
|
||||
this.logger.log(`Schema up to date: ${collectionName}/${schema.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async checkMigrationStatus(): Promise<SearchCollectionIndexStatus> {
|
||||
const migrationMap: SearchCollectionIndexStatus = {
|
||||
[SearchCollection.ASSETS]: false,
|
||||
[SearchCollection.ALBUMS]: false,
|
||||
[SearchCollection.FACES]: false,
|
||||
};
|
||||
|
||||
// check if alias is using the current schema
|
||||
const { aliases } = await this.client.aliases().retrieve();
|
||||
this.logger.log(`Alias mapping: ${JSON.stringify(aliases)}`);
|
||||
|
||||
for (const [aliasName, schema] of schemas) {
|
||||
const match = aliases.find((alias) => alias.name === aliasName);
|
||||
if (!match || match.collection_name !== schema.name) {
|
||||
migrationMap[aliasName] = true;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.log(`Collections needing migration: ${JSON.stringify(migrationMap)}`);
|
||||
|
||||
return migrationMap;
|
||||
}
|
||||
|
||||
async importAlbums(items: AlbumEntity[], done: boolean): Promise<void> {
|
||||
await this.import(SearchCollection.ALBUMS, items, done);
|
||||
}
|
||||
|
||||
async importAssets(items: AssetEntity[], done: boolean): Promise<void> {
|
||||
await this.import(SearchCollection.ASSETS, items, done);
|
||||
}
|
||||
|
||||
async importFaces(items: OwnedFaceEntity[], done: boolean): Promise<void> {
|
||||
await this.import(SearchCollection.FACES, items, done);
|
||||
}
|
||||
|
||||
private async import(
|
||||
collection: SearchCollection,
|
||||
items: AlbumEntity[] | AssetEntity[] | OwnedFaceEntity[],
|
||||
done: boolean,
|
||||
): Promise<void> {
|
||||
try {
|
||||
if (items.length > 0) {
|
||||
await this.client.collections(schemaMap[collection].name).documents().import(this.patch(collection, items), {
|
||||
action: 'upsert',
|
||||
dirty_values: 'coerce_or_drop',
|
||||
});
|
||||
}
|
||||
|
||||
if (done) {
|
||||
await this.updateAlias(collection);
|
||||
}
|
||||
} catch (error: any) {
|
||||
await this.handleError(error);
|
||||
}
|
||||
}
|
||||
|
||||
async explore(userId: string): Promise<SearchExploreItem<AssetEntity>[]> {
|
||||
const common = {
|
||||
q: '*',
|
||||
filter_by: [this.buildFilterBy('ownerId', userId, true), this.buildFilterBy('isArchived', false)].join(' && '),
|
||||
per_page: 100,
|
||||
};
|
||||
|
||||
const asset$ = this.client.collections<AssetEntity>(assetSchema.name).documents();
|
||||
|
||||
const { facet_counts: facets } = await asset$.search({
|
||||
...common,
|
||||
query_by: 'originalFileName',
|
||||
facet_by: 'exifInfo.city,smartInfo.objects',
|
||||
max_facet_values: 12,
|
||||
});
|
||||
|
||||
return firstValueFrom(
|
||||
from(facets || []).pipe(
|
||||
mergeMap(
|
||||
(facet) =>
|
||||
from(facet.counts).pipe(
|
||||
mergeMap((count) => {
|
||||
const config = {
|
||||
...common,
|
||||
query_by: 'originalFileName',
|
||||
filter_by: [common.filter_by, this.buildFilterBy(facet.field_name, count.value, true)].join(' && '),
|
||||
per_page: 1,
|
||||
};
|
||||
|
||||
this.logger.verbose(`Explore subquery: "filter_by:${config.filter_by}" (count:${count.count})`);
|
||||
|
||||
return from(asset$.search(config)).pipe(
|
||||
catchError((error: any) => {
|
||||
this.logger.warn(`Explore subquery error: ${error}`, error?.stack);
|
||||
return of({ hits: [] });
|
||||
}),
|
||||
map((result) => ({
|
||||
value: count.value,
|
||||
data: result.hits?.[0]?.document as AssetEntity,
|
||||
})),
|
||||
filter((item) => !!item.data),
|
||||
);
|
||||
}, 5),
|
||||
toArray(),
|
||||
map((items) => ({
|
||||
fieldName: facet.field_name as string,
|
||||
items,
|
||||
})),
|
||||
),
|
||||
3,
|
||||
),
|
||||
toArray(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
async deleteAlbums(ids: string[]): Promise<void> {
|
||||
await this.delete(SearchCollection.ALBUMS, ids);
|
||||
}
|
||||
|
||||
async deleteAssets(ids: string[]): Promise<void> {
|
||||
await this.delete(SearchCollection.ASSETS, ids);
|
||||
}
|
||||
|
||||
async deleteFaces(ids: string[]): Promise<void> {
|
||||
await this.delete(SearchCollection.FACES, ids);
|
||||
}
|
||||
|
||||
async deleteAllFaces(): Promise<number> {
|
||||
const records = await this.client.collections(faceSchema.name).documents().delete({ filter_by: 'ownerId:!=null' });
|
||||
return records.num_deleted;
|
||||
}
|
||||
|
||||
async deleteAllAssets(): Promise<number> {
|
||||
const records = await this.client.collections(assetSchema.name).documents().delete({ filter_by: 'ownerId:!=null' });
|
||||
return records.num_deleted;
|
||||
}
|
||||
|
||||
async updateCLIPField(num_dim: number): Promise<void> {
|
||||
const clipField = assetSchema.fields?.find((field) => field.name === 'smartInfo.clipEmbedding');
|
||||
if (clipField && !this._updateCLIPLock) {
|
||||
try {
|
||||
this._updateCLIPLock = true;
|
||||
clipField.num_dim = num_dim;
|
||||
await this.deleteAllAssets();
|
||||
await this.client
|
||||
.collections(assetSchema.name)
|
||||
.update({ fields: [{ name: 'smartInfo.clipEmbedding', drop: true } as any, clipField] });
|
||||
this.logger.log(`Successfully updated CLIP dimensions to ${num_dim}`);
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Error while updating CLIP field: ${err.message}`);
|
||||
} finally {
|
||||
this._updateCLIPLock = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async delete(collection: SearchCollection, ids: string[]): Promise<void> {
|
||||
await this.client
|
||||
.collections(schemaMap[collection].name)
|
||||
.documents()
|
||||
.delete({ filter_by: this.buildFilterBy('id', ids, true) });
|
||||
}
|
||||
|
||||
async searchAlbums(query: string, filters: SearchFilter): Promise<SearchResult<AlbumEntity>> {
|
||||
const results = await this.client
|
||||
.collections<AlbumEntity>(albumSchema.name)
|
||||
.documents()
|
||||
.search({
|
||||
q: query,
|
||||
query_by: ['albumName', 'description'].join(','),
|
||||
filter_by: this.getAlbumFilters(filters),
|
||||
});
|
||||
|
||||
return this.asResponse(results, filters.debug);
|
||||
}
|
||||
|
||||
async searchAssets(query: string, filters: SearchFilter): Promise<SearchResult<AssetEntity>> {
|
||||
const results = await this.client
|
||||
.collections<AssetEntity>(assetSchema.name)
|
||||
.documents()
|
||||
.search({
|
||||
q: query,
|
||||
query_by: [
|
||||
'originalFileName',
|
||||
'exifInfo.country',
|
||||
'exifInfo.state',
|
||||
'exifInfo.city',
|
||||
'exifInfo.description',
|
||||
'exifInfo.model',
|
||||
'exifInfo.make',
|
||||
'smartInfo.tags',
|
||||
'smartInfo.objects',
|
||||
'people',
|
||||
].join(','),
|
||||
per_page: 250,
|
||||
facet_by: this.getFacetFieldNames(SearchCollection.ASSETS),
|
||||
filter_by: this.getAssetFilters(filters),
|
||||
sort_by: filters.recent ? 'createdAt:desc' : undefined,
|
||||
});
|
||||
|
||||
return this.asResponse(results, filters.debug);
|
||||
}
|
||||
|
||||
async searchFaces(input: number[], filters: SearchFaceFilter): Promise<SearchResult<AssetFaceEntity>> {
|
||||
const { results } = await this.client.multiSearch.perform({
|
||||
searches: [
|
||||
{
|
||||
collection: faceSchema.name,
|
||||
q: '*',
|
||||
vector_query: `embedding:([${input.join(',')}], k:5)`,
|
||||
per_page: 5,
|
||||
filter_by: this.buildFilterBy('ownerId', filters.ownerId, true),
|
||||
} as any,
|
||||
],
|
||||
});
|
||||
|
||||
return this.asResponse(results[0] as SearchResponse<AssetFaceEntity>);
|
||||
}
|
||||
|
||||
async vectorSearch(input: number[], filters: SearchFilter): Promise<SearchResult<AssetEntity>> {
|
||||
const { results } = await this.client.multiSearch.perform({
|
||||
searches: [
|
||||
{
|
||||
collection: assetSchema.name,
|
||||
q: '*',
|
||||
vector_query: `smartInfo.clipEmbedding:([${input.join(',')}], k:100)`,
|
||||
per_page: 100,
|
||||
facet_by: this.getFacetFieldNames(SearchCollection.ASSETS),
|
||||
filter_by: this.getAssetFilters(filters),
|
||||
} as any,
|
||||
],
|
||||
});
|
||||
|
||||
return this.asResponse(results[0] as SearchResponse<AssetEntity>, filters.debug);
|
||||
}
|
||||
|
||||
private asResponse<T extends DocumentSchema>(
|
||||
resultsOrError: SearchResponse<T> | MultiSearchError,
|
||||
debug?: boolean,
|
||||
): SearchResult<T> {
|
||||
const { error, code } = resultsOrError as MultiSearchError;
|
||||
if (error) {
|
||||
throw new Error(`Typesense multi-search error: ${code} - ${error}`);
|
||||
}
|
||||
|
||||
const results = resultsOrError as SearchResponse<T>;
|
||||
|
||||
return {
|
||||
page: results.page,
|
||||
total: results.found,
|
||||
count: results.out_of,
|
||||
items: (results.hits || []).map((hit) => hit.document),
|
||||
distances: (results.hits || []).map((hit: any) => hit.vector_distance),
|
||||
facets: (results.facet_counts || []).map((facet) => ({
|
||||
counts: facet.counts.map((item) => ({ count: item.count, value: item.value })),
|
||||
fieldName: facet.field_name as string,
|
||||
})),
|
||||
debug: debug ? results : undefined,
|
||||
} as SearchResult<T>;
|
||||
}
|
||||
|
||||
private async handleError(error: any) {
|
||||
this.logger.error('Unable to index documents');
|
||||
const results = error.importResults || [];
|
||||
let dimsChanged = false;
|
||||
for (const result of results) {
|
||||
try {
|
||||
result.document = JSON.parse(result.document);
|
||||
if (result.error.includes('Field `smartInfo.clipEmbedding` must have')) {
|
||||
dimsChanged = true;
|
||||
this.logger.warn(
|
||||
`CLIP embedding dimensions have changed, now ${result.document.smartInfo.clipEmbedding.length} dims. Updating schema...`,
|
||||
);
|
||||
await this.updateCLIPField(result.document.smartInfo.clipEmbedding.length);
|
||||
break;
|
||||
}
|
||||
|
||||
if (result.document?.smartInfo?.clipEmbedding) {
|
||||
result.document.smartInfo.clipEmbedding = '<truncated>';
|
||||
}
|
||||
} catch (err: any) {
|
||||
this.logger.error(`Error while updating CLIP field: ${(err.message, err.stack)}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!dimsChanged) {
|
||||
this.logger.log(JSON.stringify(results, null, 2));
|
||||
}
|
||||
}
|
||||
private async updateAlias(collection: SearchCollection) {
|
||||
const schema = schemaMap[collection];
|
||||
const alias = await this.client
|
||||
.aliases(collection)
|
||||
.retrieve()
|
||||
.catch(() => null);
|
||||
|
||||
// update alias to current collection
|
||||
this.logger.log(`Using new schema: ${alias?.collection_name || '(unset)'} => ${schema.name}`);
|
||||
await this.client.aliases().upsert(collection, { collection_name: schema.name });
|
||||
|
||||
// delete previous collection
|
||||
if (alias && alias.collection_name !== schema.name) {
|
||||
this.logger.log(`Deleting old schema: ${alias.collection_name}`);
|
||||
await this.client.collections(alias.collection_name).delete();
|
||||
}
|
||||
}
|
||||
|
||||
private patch(collection: SearchCollection, items: AssetEntity[] | AlbumEntity[] | OwnedFaceEntity[]) {
|
||||
return items.map((item) => {
|
||||
switch (collection) {
|
||||
case SearchCollection.ASSETS:
|
||||
return this.patchAsset(item as AssetEntity);
|
||||
case SearchCollection.ALBUMS:
|
||||
return this.patchAlbum(item as AlbumEntity);
|
||||
case SearchCollection.FACES:
|
||||
return this.patchFace(item as OwnedFaceEntity);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private patchAlbum(album: AlbumEntity): AlbumEntity {
|
||||
return removeNil(album);
|
||||
}
|
||||
|
||||
private patchAsset(asset: AssetEntity): CustomAssetEntity {
|
||||
let custom = asset as CustomAssetEntity;
|
||||
|
||||
const lat = asset.exifInfo?.latitude;
|
||||
const lng = asset.exifInfo?.longitude;
|
||||
if (lat && lng && lat !== 0 && lng !== 0) {
|
||||
custom = { ...custom, geo: [lat, lng] };
|
||||
}
|
||||
const people = asset.faces
|
||||
?.filter((face) => !face.person?.isHidden && face.person?.name)
|
||||
.map((face) => face.person?.name)
|
||||
.filter((name) => name !== undefined) as string[];
|
||||
if (people.length) {
|
||||
custom = { ...custom, people };
|
||||
}
|
||||
return removeNil({ ...custom, motion: !!asset.livePhotoVideoId });
|
||||
}
|
||||
|
||||
private patchFace(face: OwnedFaceEntity): OwnedFaceEntity {
|
||||
return removeNil(face);
|
||||
}
|
||||
|
||||
private getFacetFieldNames(collection: SearchCollection) {
|
||||
return (schemaMap[collection].fields || [])
|
||||
.filter((field) => field.facet)
|
||||
.map((field) => field.name)
|
||||
.join(',');
|
||||
}
|
||||
|
||||
private getAlbumFilters(filters: SearchFilter) {
|
||||
const { userId } = filters;
|
||||
|
||||
const _filters = [this.buildFilterBy('ownerId', userId, true)];
|
||||
|
||||
if (filters.id) {
|
||||
_filters.push(this.buildFilterBy('id', filters.id, true));
|
||||
}
|
||||
|
||||
for (const item of albumSchema.fields || []) {
|
||||
const value = filters[item.name as keyof SearchFilter];
|
||||
if (item.facet && value !== undefined) {
|
||||
_filters.push(this.buildFilterBy(item.name, value));
|
||||
}
|
||||
}
|
||||
|
||||
const result = _filters.join(' && ');
|
||||
|
||||
this.logger.debug(`Album filters are: ${result}`);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private getAssetFilters(filters: SearchFilter) {
|
||||
const { userId } = filters;
|
||||
const _filters = [this.buildFilterBy('ownerId', userId, true), this.buildFilterBy('isArchived', false)];
|
||||
|
||||
if (filters.id) {
|
||||
_filters.push(this.buildFilterBy('id', filters.id, true));
|
||||
}
|
||||
|
||||
for (const item of assetSchema.fields || []) {
|
||||
const value = filters[item.name as keyof SearchFilter];
|
||||
if (item.facet && value !== undefined) {
|
||||
_filters.push(this.buildFilterBy(item.name, value));
|
||||
}
|
||||
}
|
||||
|
||||
const result = _filters.join(' && ');
|
||||
|
||||
this.logger.debug(`Asset filters are: ${result}`);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private buildFilterBy(key: string, values: boolean | string | string[], exact?: boolean) {
|
||||
const token = exact ? ':=' : ':';
|
||||
|
||||
const _values = (Array.isArray(values) ? values : [values]).map((value) => {
|
||||
if (typeof value === 'boolean' || value === 'true' || value === 'false') {
|
||||
return value;
|
||||
}
|
||||
return '`' + value + '`';
|
||||
});
|
||||
|
||||
const value = _values.length > 1 ? `[${_values.join(',')}]` : _values[0];
|
||||
|
||||
return `${key}${token}${value}`;
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections';
|
||||
|
||||
export const albumSchemaVersion = 2;
|
||||
export const albumSchema: CollectionCreateSchema = {
|
||||
name: `albums-v${albumSchemaVersion}`,
|
||||
fields: [
|
||||
{ name: 'ownerId', type: 'string', facet: false },
|
||||
{ name: 'albumName', type: 'string', facet: false, sort: true },
|
||||
{ name: 'description', type: 'string', facet: false },
|
||||
{ name: 'createdAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'updatedAt', type: 'string', facet: false, sort: true },
|
||||
],
|
||||
default_sorting_field: 'createdAt',
|
||||
};
|
|
@ -1,42 +0,0 @@
|
|||
import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections';
|
||||
|
||||
export const assetSchemaVersion = 10;
|
||||
export const assetSchema: CollectionCreateSchema = {
|
||||
name: `assets-v${assetSchemaVersion}`,
|
||||
fields: [
|
||||
// asset
|
||||
{ name: 'ownerId', type: 'string', facet: false },
|
||||
{ name: 'type', type: 'string', facet: true },
|
||||
{ name: 'originalPath', type: 'string', facet: false },
|
||||
{ name: 'createdAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'updatedAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'fileCreatedAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'fileModifiedAt', type: 'string', facet: false, sort: true },
|
||||
{ name: 'isFavorite', type: 'bool', facet: true },
|
||||
{ name: 'isArchived', type: 'bool', facet: true },
|
||||
{ name: 'originalFileName', type: 'string', facet: false, optional: true },
|
||||
|
||||
// exif
|
||||
{ name: 'exifInfo.city', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.country', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.state', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.description', type: 'string', facet: false, optional: true },
|
||||
{ name: 'exifInfo.make', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.model', type: 'string', facet: true, optional: true },
|
||||
{ name: 'exifInfo.orientation', type: 'string', optional: true },
|
||||
{ name: 'exifInfo.projectionType', type: 'string', facet: true, optional: true },
|
||||
|
||||
// smart info
|
||||
{ name: 'smartInfo.objects', type: 'string[]', facet: true, optional: true },
|
||||
{ name: 'smartInfo.tags', type: 'string[]', facet: true, optional: true },
|
||||
{ name: 'smartInfo.clipEmbedding', type: 'float[]', facet: false, optional: true, num_dim: 512 },
|
||||
|
||||
// computed
|
||||
{ name: 'geo', type: 'geopoint', facet: false, optional: true },
|
||||
{ name: 'motion', type: 'bool', facet: true },
|
||||
{ name: 'people', type: 'string[]', facet: true, optional: true },
|
||||
],
|
||||
token_separators: ['.', '-', '_'],
|
||||
enable_nested_fields: true,
|
||||
default_sorting_field: 'fileCreatedAt',
|
||||
};
|
|
@ -1,12 +0,0 @@
|
|||
import { CollectionCreateSchema } from 'typesense/lib/Typesense/Collections';
|
||||
|
||||
export const faceSchemaVersion = 1;
|
||||
export const faceSchema: CollectionCreateSchema = {
|
||||
name: `faces-v${faceSchemaVersion}`,
|
||||
fields: [
|
||||
{ name: 'ownerId', type: 'string', facet: false },
|
||||
{ name: 'assetId', type: 'string', facet: false },
|
||||
{ name: 'personId', type: 'string', facet: false },
|
||||
{ name: 'embedding', type: 'float[]', facet: false, num_dim: 512 },
|
||||
],
|
||||
};
|
|
@ -1,3 +0,0 @@
|
|||
export * from './album.schema';
|
||||
export * from './asset.schema';
|
||||
export * from './face.schema';
|
|
@ -1,15 +0,0 @@
|
|||
import { Between, LessThanOrEqual, MoreThanOrEqual } from 'typeorm';
|
||||
|
||||
/**
|
||||
* Allows optional values unlike the regular Between and uses MoreThanOrEqual
|
||||
* or LessThanOrEqual when only one parameter is specified.
|
||||
*/
|
||||
export default function OptionalBetween<T>(from?: T, to?: T) {
|
||||
if (from && to) {
|
||||
return Between(from, to);
|
||||
} else if (from) {
|
||||
return MoreThanOrEqual(from);
|
||||
} else if (to) {
|
||||
return LessThanOrEqual(to);
|
||||
}
|
||||
}
|
|
@ -8,7 +8,6 @@ import {
|
|||
MediaService,
|
||||
MetadataService,
|
||||
PersonService,
|
||||
SearchService,
|
||||
ServerInfoService,
|
||||
SmartInfoService,
|
||||
StorageService,
|
||||
|
@ -31,7 +30,6 @@ export class AppService {
|
|||
private mediaService: MediaService,
|
||||
private metadataService: MetadataService,
|
||||
private personService: PersonService,
|
||||
private searchService: SearchService,
|
||||
private serverInfoService: ServerInfoService,
|
||||
private smartInfoService: SmartInfoService,
|
||||
private storageTemplateService: StorageTemplateService,
|
||||
|
@ -52,15 +50,6 @@ export class AppService {
|
|||
[JobName.CLASSIFY_IMAGE]: (data) => this.smartInfoService.handleClassifyImage(data),
|
||||
[JobName.QUEUE_ENCODE_CLIP]: (data) => this.smartInfoService.handleQueueEncodeClip(data),
|
||||
[JobName.ENCODE_CLIP]: (data) => this.smartInfoService.handleEncodeClip(data),
|
||||
[JobName.SEARCH_INDEX_ALBUMS]: () => this.searchService.handleIndexAlbums(),
|
||||
[JobName.SEARCH_INDEX_ASSETS]: () => this.searchService.handleIndexAssets(),
|
||||
[JobName.SEARCH_INDEX_FACES]: () => this.searchService.handleIndexFaces(),
|
||||
[JobName.SEARCH_INDEX_ALBUM]: (data) => this.searchService.handleIndexAlbum(data),
|
||||
[JobName.SEARCH_INDEX_ASSET]: (data) => this.searchService.handleIndexAsset(data),
|
||||
[JobName.SEARCH_INDEX_FACE]: (data) => this.searchService.handleIndexFace(data),
|
||||
[JobName.SEARCH_REMOVE_ALBUM]: (data) => this.searchService.handleRemoveAlbum(data),
|
||||
[JobName.SEARCH_REMOVE_ASSET]: (data) => this.searchService.handleRemoveAsset(data),
|
||||
[JobName.SEARCH_REMOVE_FACE]: (data) => this.searchService.handleRemoveFace(data),
|
||||
[JobName.STORAGE_TEMPLATE_MIGRATION]: () => this.storageTemplateService.handleMigration(),
|
||||
[JobName.STORAGE_TEMPLATE_MIGRATION_SINGLE]: (data) => this.storageTemplateService.handleMigrationSingle(data),
|
||||
[JobName.QUEUE_MIGRATION]: () => this.mediaService.handleQueueMigration(),
|
||||
|
@ -94,7 +83,6 @@ export class AppService {
|
|||
});
|
||||
|
||||
await this.metadataService.init();
|
||||
await this.searchService.init();
|
||||
}
|
||||
|
||||
async teardown() {
|
||||
|
|
|
@ -47,7 +47,6 @@ export default async () => {
|
|||
}
|
||||
|
||||
process.env.NODE_ENV = 'development';
|
||||
process.env.TYPESENSE_ENABLED = 'false';
|
||||
process.env.IMMICH_MACHINE_LEARNING_ENABLED = 'false';
|
||||
process.env.IMMICH_TEST_ENV = 'true';
|
||||
process.env.TZ = 'Z';
|
||||
|
|
|
@ -32,6 +32,8 @@ export const newAssetRepositoryMock = (): jest.Mocked<IAssetRepository> => {
|
|||
getTimeBuckets: jest.fn(),
|
||||
restoreAll: jest.fn(),
|
||||
softDeleteAll: jest.fn(),
|
||||
search: jest.fn(),
|
||||
getAssetIdByCity: jest.fn(),
|
||||
getAssetIdByTag: jest.fn(),
|
||||
searchMetadata: jest.fn(),
|
||||
};
|
||||
};
|
||||
|
|
|
@ -17,6 +17,7 @@ export const newPersonRepositoryMock = (): jest.Mocked<IPersonRepository> => {
|
|||
delete: jest.fn(),
|
||||
|
||||
getStatistics: jest.fn(),
|
||||
searchByEmbedding: jest.fn(),
|
||||
getAllFaces: jest.fn(),
|
||||
getFacesByIds: jest.fn(),
|
||||
getRandomFace: jest.fn(),
|
||||
|
|
|
@ -2,20 +2,6 @@ import { ISearchRepository } from '@app/domain';
|
|||
|
||||
export const newSearchRepositoryMock = (): jest.Mocked<ISearchRepository> => {
|
||||
return {
|
||||
setup: jest.fn(),
|
||||
checkMigrationStatus: jest.fn(),
|
||||
importAssets: jest.fn(),
|
||||
importAlbums: jest.fn(),
|
||||
importFaces: jest.fn(),
|
||||
deleteAlbums: jest.fn(),
|
||||
deleteAssets: jest.fn(),
|
||||
deleteFaces: jest.fn(),
|
||||
deleteAllFaces: jest.fn(),
|
||||
updateCLIPField: jest.fn(),
|
||||
searchAssets: jest.fn(),
|
||||
searchAlbums: jest.fn(),
|
||||
vectorSearch: jest.fn(),
|
||||
explore: jest.fn(),
|
||||
searchFaces: jest.fn(),
|
||||
};
|
||||
};
|
||||
|
|
|
@ -2,6 +2,7 @@ import { ISmartInfoRepository } from '@app/domain';
|
|||
|
||||
export const newSmartInfoRepositoryMock = (): jest.Mocked<ISmartInfoRepository> => {
|
||||
return {
|
||||
searchByEmbedding: jest.fn(),
|
||||
upsert: jest.fn(),
|
||||
};
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue