diff --git a/web/apps/photos/src/services/machineLearning/faceService.ts b/web/apps/photos/src/services/machineLearning/faceService.ts index b6a7da097..6318650f6 100644 --- a/web/apps/photos/src/services/machineLearning/faceService.ts +++ b/web/apps/photos/src/services/machineLearning/faceService.ts @@ -220,8 +220,7 @@ class FaceService { face.detection, ); - const blobOptions = DEFAULT_ML_SYNC_CONFIG.faceCrop.blobOptions; - const blob = await imageBitmapToBlob(faceCrop.image, blobOptions); + const blob = await imageBitmapToBlob(faceCrop.image); const cache = await openCache("face-crops"); await cache.put(face.id, blob); diff --git a/web/apps/photos/src/utils/image/index.ts b/web/apps/photos/src/utils/image/index.ts index bdaf64d73..b6f1d22aa 100644 --- a/web/apps/photos/src/utils/image/index.ts +++ b/web/apps/photos/src/utils/image/index.ts @@ -450,17 +450,17 @@ export interface BlobOptions { quality?: number; } -export async function imageBitmapToBlob( - imageBitmap: ImageBitmap, - options?: BlobOptions, -) { +export async function imageBitmapToBlob(imageBitmap: ImageBitmap) { const offscreen = new OffscreenCanvas( imageBitmap.width, imageBitmap.height, ); offscreen.getContext("2d").drawImage(imageBitmap, 0, 0); - return offscreen.convertToBlob(options); + return offscreen.convertToBlob({ + type: "image/jpeg", + quality: 0.8, + }); } export async function imageBitmapFromBlob(blob: Blob) {