diff --git a/web/apps/photos/src/services/clip-service.ts b/web/apps/photos/src/services/clip-service.ts index 6099edbd9..4df9e5728 100644 --- a/web/apps/photos/src/services/clip-service.ts +++ b/web/apps/photos/src/services/clip-service.ts @@ -1,5 +1,6 @@ import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; +import type { Electron } from "@/next/types/ipc"; import ComlinkCryptoWorker from "@ente/shared/crypto"; import { CustomError } from "@ente/shared/error"; import { Events, eventBus } from "@ente/shared/events"; @@ -64,6 +65,7 @@ export interface CLIPIndexingStatus { * itself, is the same across clients - web and mobile. */ class CLIPService { + private electron: Electron; private embeddingExtractionInProgress: AbortController | null = null; private reRunNeeded = false; private indexingStatus: CLIPIndexingStatus = { @@ -78,6 +80,7 @@ class CLIPService { private unsupportedPlatform = false; constructor() { + this.electron = ensureElectron(); this.liveEmbeddingExtractionQueue = new PQueue({ concurrency: 1, }); @@ -190,12 +193,12 @@ class CLIPService { getTextEmbedding = async (text: string): Promise => { try { - return ensureElectron().computeTextEmbedding(Model.ONNX_CLIP, text); + return electron.clipTextEmbedding(text); } catch (e) { if (e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)) { this.unsupportedPlatform = true; } - log.error("failed to compute text embedding", e); + log.error("Failed to compute CLIP text embedding", e); throw e; } }; @@ -318,11 +321,7 @@ class CLIPService { const file = await localFile .arrayBuffer() .then((buffer) => new Uint8Array(buffer)); - const embedding = await ensureElectron().computeImageEmbedding( - Model.ONNX_CLIP, - file, - ); - return embedding; + return await electron.clipImageEmbedding(file); }; private encryptAndUploadEmbedding = async ( @@ -357,10 +356,7 @@ class CLIPService { private extractFileClipImageEmbedding = async (file: EnteFile) => { const thumb = await downloadManager.getThumbnail(file); - const embedding = await ensureElectron().computeImageEmbedding( - Model.ONNX_CLIP, - thumb, - ); + const embedding = await ensureElectron().clipImageEmbedding(thumb); return embedding; }; diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts index 4a1539cc3..8451b045e 100644 --- a/web/packages/next/types/ipc.ts +++ b/web/packages/next/types/ipc.ts @@ -10,10 +10,6 @@ export interface AppUpdateInfo { version: string; } -export enum Model { - ONNX_CLIP = "onnx-clip", -} - export enum FILE_PATH_TYPE { FILES = "files", ZIPS = "zips", @@ -151,17 +147,22 @@ export interface Electron { * * See: [Note: CLIP based magic search] * - * @param model The CLIP model and ML runtime combination to use. * @param jpegImageData The raw bytes of the image encoded as an JPEG. * * @returns A CLIP embedding. */ - computeImageEmbedding: ( - model: Model, - jpegImageData: Uint8Array, - ) => Promise; + clipImageEmbedding: (jpegImageData: Uint8Array) => Promise; - computeTextEmbedding: (model: Model, text: string) => Promise; + /** + * Compute and return a CLIP embedding of the given image. + * + * See: [Note: CLIP based magic search] + * + * @param text The string whose embedding we want to compute. + * + * @returns A CLIP embedding. + */ + clipTextEmbedding: (text: string) => Promise; // - File selection // TODO: Deprecated - use dialogs on the renderer process itself