diff --git a/web/apps/photos/src/services/face/people.ts b/web/apps/photos/src/services/face/people.ts new file mode 100644 index 000000000..8a0f0e969 --- /dev/null +++ b/web/apps/photos/src/services/face/people.ts @@ -0,0 +1,81 @@ +import mlIDbStorage from "services/face/db"; +import { Face, MLSyncContext, Person } from "services/face/types"; +import FaceService from "../machineLearning/faceService"; +import { fetchImageBitmap, getLocalFile } from "./image"; + +export const syncPeopleIndex = async (syncContext: MLSyncContext) => { + const filesVersion = await mlIDbStorage.getIndexVersion("files"); + if (filesVersion <= (await mlIDbStorage.getIndexVersion("people"))) { + return; + } + + // TODO: have faces addresable through fileId + faceId + // to avoid index based addressing, which is prone to wrong results + // one way could be to match nearest face within threshold in the file + const allFacesMap = await FaceService.getAllSyncedFacesMap(syncContext); + const allFaces = getAllFacesFromMap(allFacesMap); + + await FaceService.runFaceClustering(syncContext, allFaces); + await syncPeopleFromClusters(syncContext, allFacesMap, allFaces); + + await mlIDbStorage.setIndexVersion("people", filesVersion); +}; + +const syncPeopleFromClusters = async ( + syncContext: MLSyncContext, + allFacesMap: Map>, + allFaces: Array, +) => { + const clusters = syncContext.mlLibraryData.faceClusteringResults?.clusters; + if (!clusters || clusters.length < 1) { + return; + } + + for (const face of allFaces) { + face.personId = undefined; + } + await mlIDbStorage.clearAllPeople(); + for (const [index, cluster] of clusters.entries()) { + const faces = cluster.map((f) => allFaces[f]).filter((f) => f); + + // TODO: take default display face from last leaves of hdbscan clusters + const personFace = faces.reduce((best, face) => + face.detection.probability > best.detection.probability + ? face + : best, + ); + + if (personFace && !personFace.crop?.cacheKey) { + const file = await getLocalFile(personFace.fileId); + const imageBitmap = await fetchImageBitmap(file); + await FaceService.saveFaceCrop( + imageBitmap, + personFace, + syncContext, + ); + } + + const person: Person = { + id: index, + files: faces.map((f) => f.fileId), + displayFaceId: personFace?.id, + faceCropCacheKey: personFace?.crop?.cacheKey, + }; + + await mlIDbStorage.putPerson(person); + + faces.forEach((face) => { + face.personId = person.id; + }); + // log.info("Creating person: ", person, faces); + } + + await mlIDbStorage.updateFaces(allFacesMap); +}; + + +function getAllFacesFromMap(allFacesMap: Map>) { + const allFaces = [...allFacesMap.values()].flat(); + + return allFaces; +}