Reduce API surface for migration
This commit is contained in:
parent
ee894a668c
commit
87c7999370
2 changed files with 15 additions and 165 deletions
|
@ -10,7 +10,7 @@ import {
|
|||
} from "idb";
|
||||
import isElectron from "is-electron";
|
||||
import type { Person } from "services/face/people";
|
||||
import type { Face, MlFileData } from "services/face/types";
|
||||
import type { MlFileData } from "services/face/types";
|
||||
import {
|
||||
DEFAULT_ML_SEARCH_CONFIG,
|
||||
MAX_ML_SYNC_ERROR_COUNT,
|
||||
|
@ -212,38 +212,6 @@ class MLIDbStorage {
|
|||
await this.db;
|
||||
}
|
||||
|
||||
public async getAllFileIds() {
|
||||
const db = await this.db;
|
||||
return db.getAllKeys("files");
|
||||
}
|
||||
|
||||
public async putAllFilesInTx(mlFiles: Array<MlFileData>) {
|
||||
const db = await this.db;
|
||||
const tx = db.transaction("files", "readwrite");
|
||||
await Promise.all(mlFiles.map((mlFile) => tx.store.put(mlFile)));
|
||||
await tx.done;
|
||||
}
|
||||
|
||||
public async removeAllFilesInTx(fileIds: Array<number>) {
|
||||
const db = await this.db;
|
||||
const tx = db.transaction("files", "readwrite");
|
||||
|
||||
await Promise.all(fileIds.map((fileId) => tx.store.delete(fileId)));
|
||||
await tx.done;
|
||||
}
|
||||
|
||||
public async newTransaction<
|
||||
Name extends StoreNames<MLDb>,
|
||||
Mode extends IDBTransactionMode = "readonly",
|
||||
>(storeNames: Name, mode?: Mode) {
|
||||
const db = await this.db;
|
||||
return db.transaction(storeNames, mode);
|
||||
}
|
||||
|
||||
public async commit(tx: IDBPTransaction<MLDb>) {
|
||||
return tx.done;
|
||||
}
|
||||
|
||||
public async getAllFileIdsForUpdate(
|
||||
tx: IDBPTransaction<MLDb, ["files"], "readwrite">,
|
||||
) {
|
||||
|
@ -282,11 +250,6 @@ class MLIDbStorage {
|
|||
return db.get("files", fileId);
|
||||
}
|
||||
|
||||
public async getAllFiles() {
|
||||
const db = await this.db;
|
||||
return db.getAll("files");
|
||||
}
|
||||
|
||||
public async putFile(mlFile: MlFileData) {
|
||||
const db = await this.db;
|
||||
return db.put("files", mlFile);
|
||||
|
@ -326,38 +289,6 @@ class MLIDbStorage {
|
|||
return face[0];
|
||||
}
|
||||
|
||||
public async getAllFacesMap() {
|
||||
const startTime = Date.now();
|
||||
const db = await this.db;
|
||||
const allFiles = await db.getAll("files");
|
||||
const allFacesMap = new Map<number, Array<Face>>();
|
||||
allFiles.forEach(
|
||||
(mlFileData) =>
|
||||
mlFileData.faces &&
|
||||
allFacesMap.set(mlFileData.fileId, mlFileData.faces),
|
||||
);
|
||||
log.info("getAllFacesMap", Date.now() - startTime, "ms");
|
||||
|
||||
return allFacesMap;
|
||||
}
|
||||
|
||||
public async updateFaces(allFacesMap: Map<number, Face[]>) {
|
||||
const startTime = Date.now();
|
||||
const db = await this.db;
|
||||
const tx = db.transaction("files", "readwrite");
|
||||
let cursor = await tx.store.openCursor();
|
||||
while (cursor) {
|
||||
if (allFacesMap.has(cursor.key)) {
|
||||
const mlFileData = { ...cursor.value };
|
||||
mlFileData.faces = allFacesMap.get(cursor.key);
|
||||
cursor.update(mlFileData);
|
||||
}
|
||||
cursor = await cursor.continue();
|
||||
}
|
||||
await tx.done;
|
||||
log.info("updateFaces", Date.now() - startTime, "ms");
|
||||
}
|
||||
|
||||
public async getPerson(id: number) {
|
||||
const db = await this.db;
|
||||
return db.get("people", id);
|
||||
|
@ -368,21 +299,6 @@ class MLIDbStorage {
|
|||
return db.getAll("people");
|
||||
}
|
||||
|
||||
public async putPerson(person: Person) {
|
||||
const db = await this.db;
|
||||
return db.put("people", person);
|
||||
}
|
||||
|
||||
public async clearAllPeople() {
|
||||
const db = await this.db;
|
||||
return db.clear("people");
|
||||
}
|
||||
|
||||
public async getIndexVersion(index: string) {
|
||||
const db = await this.db;
|
||||
return db.get("versions", index);
|
||||
}
|
||||
|
||||
public async incrementIndexVersion(index: StoreNames<MLDb>) {
|
||||
if (index === "versions") {
|
||||
throw new Error("versions store can not be versioned");
|
||||
|
@ -397,11 +313,6 @@ class MLIDbStorage {
|
|||
return version;
|
||||
}
|
||||
|
||||
public async setIndexVersion(index: string, version: number) {
|
||||
const db = await this.db;
|
||||
return db.put("versions", version, index);
|
||||
}
|
||||
|
||||
public async getConfig<T extends Config>(name: string, def: T) {
|
||||
const db = await this.db;
|
||||
const tx = db.transaction("configs", "readwrite");
|
||||
|
@ -465,66 +376,6 @@ class MLIDbStorage {
|
|||
peopleIndexVersion === filesIndexVersion,
|
||||
};
|
||||
}
|
||||
|
||||
// for debug purpose
|
||||
public async getAllMLData() {
|
||||
const db = await this.db;
|
||||
const tx = db.transaction(db.objectStoreNames, "readonly");
|
||||
const allMLData: any = {};
|
||||
for (const store of tx.objectStoreNames) {
|
||||
const keys = await tx.objectStore(store).getAllKeys();
|
||||
const data = await tx.objectStore(store).getAll();
|
||||
|
||||
allMLData[store] = {};
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
allMLData[store][keys[i]] = data[i];
|
||||
}
|
||||
}
|
||||
await tx.done;
|
||||
|
||||
const files = allMLData["files"];
|
||||
for (const fileId of Object.keys(files)) {
|
||||
const fileData = files[fileId];
|
||||
fileData.faces?.forEach(
|
||||
(f) => (f.embedding = Array.from(f.embedding)),
|
||||
);
|
||||
}
|
||||
|
||||
return allMLData;
|
||||
}
|
||||
|
||||
// for debug purpose, this will overwrite all data
|
||||
public async putAllMLData(allMLData: Map<string, any>) {
|
||||
const db = await this.db;
|
||||
const tx = db.transaction(db.objectStoreNames, "readwrite");
|
||||
for (const store of tx.objectStoreNames) {
|
||||
const records = allMLData[store];
|
||||
if (!records) {
|
||||
continue;
|
||||
}
|
||||
const txStore = tx.objectStore(store);
|
||||
|
||||
if (store === "files") {
|
||||
const files = records;
|
||||
for (const fileId of Object.keys(files)) {
|
||||
const fileData = files[fileId];
|
||||
fileData.faces?.forEach(
|
||||
(f) => (f.embedding = Float32Array.from(f.embedding)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await txStore.clear();
|
||||
for (const key of Object.keys(records)) {
|
||||
if (txStore.keyPath) {
|
||||
txStore.put(records[key]);
|
||||
} else {
|
||||
txStore.put(records[key], key);
|
||||
}
|
||||
}
|
||||
}
|
||||
await tx.done;
|
||||
}
|
||||
}
|
||||
|
||||
export default new MLIDbStorage();
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
import log from "@/next/log";
|
||||
import mlIDbStorage from "services/face/db";
|
||||
import { clusterFaces } from "./cluster";
|
||||
|
||||
export interface Person {
|
||||
id: number;
|
||||
name?: string;
|
||||
|
@ -9,13 +5,15 @@ export interface Person {
|
|||
displayFaceId?: string;
|
||||
}
|
||||
|
||||
// TODO-ML(MR): Forced disable clustering. It doesn't currently work,
|
||||
// need to finalize it before we move out of beta.
|
||||
//
|
||||
// > Error: Failed to execute 'transferToImageBitmap' on
|
||||
// > 'OffscreenCanvas': ImageBitmap construction failed
|
||||
|
||||
/*
|
||||
export const syncPeopleIndex = async () => {
|
||||
// TODO-ML(MR): Forced disable clustering. It doesn't currently work,
|
||||
// need to finalize it before we move out of beta.
|
||||
//
|
||||
// > Error: Failed to execute 'transferToImageBitmap' on
|
||||
// > 'OffscreenCanvas': ImageBitmap construction failed
|
||||
/*
|
||||
|
||||
if (
|
||||
syncContext.outOfSyncFiles.length <= 0 ||
|
||||
(syncContext.nSyncedFiles === batchSize && Math.random() < 0)
|
||||
|
@ -36,16 +34,16 @@ export const syncPeopleIndex = async () => {
|
|||
if (filesVersion <= (await mlIDbStorage.getIndexVersion("people"))) {
|
||||
return;
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
// TODO: have faces addresable through fileId + faceId
|
||||
// to avoid index based addressing, which is prone to wrong results
|
||||
// one way could be to match nearest face within threshold in the file
|
||||
/*
|
||||
|
||||
const allFacesMap =
|
||||
syncContext.allSyncedFacesMap ??
|
||||
(syncContext.allSyncedFacesMap = await mlIDbStorage.getAllFacesMap());
|
||||
*/
|
||||
|
||||
|
||||
// await this.init();
|
||||
|
||||
|
@ -87,13 +85,13 @@ export const syncPeopleIndex = async () => {
|
|||
: best,
|
||||
);
|
||||
|
||||
/* Generate face crop
|
||||
|
||||
if (personFace && !personFace.crop?.cacheKey) {
|
||||
const file = await getLocalFile(personFace.fileId);
|
||||
const imageBitmap = await fetchImageBitmap(file);
|
||||
await saveFaceCrop(imageBitmap, personFace);
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
const person: Person = {
|
||||
id: index,
|
||||
|
@ -113,3 +111,4 @@ export const syncPeopleIndex = async () => {
|
|||
|
||||
// await mlIDbStorage.setIndexVersion("people", filesVersion);
|
||||
};
|
||||
*/
|
||||
|
|
Loading…
Add table
Reference in a new issue