diff --git a/web/apps/photos/src/components/ml/PeopleList.tsx b/web/apps/photos/src/components/ml/PeopleList.tsx
index 34293254d..b9ceea883 100644
--- a/web/apps/photos/src/components/ml/PeopleList.tsx
+++ b/web/apps/photos/src/components/ml/PeopleList.tsx
@@ -58,10 +58,7 @@ export const PeopleList = React.memo((props: PeopleListProps) => {
props.onSelect && props.onSelect(person, index)
}
>
-
+
))}
@@ -109,7 +106,7 @@ export function UnidentifiedFaces(props: {
file: EnteFile;
updateMLDataIndex: number;
}) {
- const [faces, setFaces] = useState>([]);
+ const [faces, setFaces] = useState([]);
useEffect(() => {
let didCancel = false;
@@ -137,10 +134,7 @@ export function UnidentifiedFaces(props: {
{faces &&
faces.map((face, index) => (
-
+
))}
@@ -150,13 +144,9 @@ export function UnidentifiedFaces(props: {
interface FaceCropImageViewProps {
faceID: string;
- cacheKey?: string;
}
-const FaceCropImageView: React.FC = ({
- faceID,
- cacheKey,
-}) => {
+const FaceCropImageView: React.FC = ({ faceID }) => {
const [objectURL, setObjectURL] = useState();
useEffect(() => {
@@ -190,7 +180,7 @@ const FaceCropImageView: React.FC = ({
didCancel = true;
if (objectURL) URL.revokeObjectURL(objectURL);
};
- }, [faceID, cacheKey]);
+ }, [faceID]);
return objectURL ? (
@@ -199,7 +189,7 @@ const FaceCropImageView: React.FC = ({
);
};
-async function getPeopleList(file: EnteFile): Promise> {
+async function getPeopleList(file: EnteFile): Promise {
let startTime = Date.now();
const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
log.info(
diff --git a/web/apps/photos/src/services/face/f-index.ts b/web/apps/photos/src/services/face/f-index.ts
index 210bced35..6224c87d3 100644
--- a/web/apps/photos/src/services/face/f-index.ts
+++ b/web/apps/photos/src/services/face/f-index.ts
@@ -9,7 +9,6 @@ import {
Face,
FaceAlignment,
FaceDetection,
- FaceEmbedding,
type MlFileData,
} from "services/face/types";
import { defaultMLVersion } from "services/machineLearning/machineLearningService";
@@ -166,7 +165,7 @@ const convertToYOLOInputFloat32ChannelsFirst = (imageBitmap: ImageBitmap) => {
const requiredWidth = 640;
const requiredHeight = 640;
- const { width, height } = imageBitmap;
+ const { width, height } = imageBitmap;
// Create an OffscreenCanvas and set its size.
const offscreenCanvas = new OffscreenCanvas(width, height);
@@ -615,11 +614,11 @@ const mobileFaceNetEmbeddingSize = 192;
*/
const computeEmbeddings = async (
faceData: Float32Array,
-): Promise => {
+): Promise => {
const outputData = await workerBridge.faceEmbeddings(faceData);
const embeddingSize = mobileFaceNetEmbeddingSize;
- const embeddings = new Array(
+ const embeddings = new Array(
outputData.length / embeddingSize,
);
for (let i = 0; i < embeddings.length; i++) {
diff --git a/web/apps/photos/src/services/face/people.ts b/web/apps/photos/src/services/face/people.ts
index 5a7026a85..f5a073328 100644
--- a/web/apps/photos/src/services/face/people.ts
+++ b/web/apps/photos/src/services/face/people.ts
@@ -93,7 +93,6 @@ export const syncPeopleIndex = async () => {
id: index,
files: faces.map((f) => f.fileId),
displayFaceId: personFace?.id,
- faceCropCacheKey: personFace?.crop?.cacheKey,
};
await mlIDbStorage.putPerson(person);
diff --git a/web/apps/photos/src/services/face/types.ts b/web/apps/photos/src/services/face/types.ts
index fadbf427f..261fcf7fc 100644
--- a/web/apps/photos/src/services/face/types.ts
+++ b/web/apps/photos/src/services/face/types.ts
@@ -11,30 +11,6 @@ export interface FaceDetection {
probability?: number;
}
-export interface DetectedFace {
- fileId: number;
- detection: FaceDetection;
-}
-
-export interface DetectedFaceWithId extends DetectedFace {
- id: string;
-}
-
-export interface FaceCrop {
- image: ImageBitmap;
- // imageBox is relative to image dimentions stored at mlFileData
- imageBox: Box;
-}
-
-export interface StoredFaceCrop {
- cacheKey: string;
- imageBox: Box;
-}
-
-export interface CroppedFace extends DetectedFaceWithId {
- crop?: StoredFaceCrop;
-}
-
export interface FaceAlignment {
// TODO-ML: remove affine matrix as rotation, size and center
// are simple to store and use, affine matrix adds complexity while getting crop
@@ -45,18 +21,16 @@ export interface FaceAlignment {
center: Point;
}
-export interface AlignedFace extends CroppedFace {
+export interface Face {
+ fileId: number;
+ detection: FaceDetection;
+ id: string;
+
alignment?: FaceAlignment;
blurValue?: number;
-}
-export declare type FaceEmbedding = Float32Array;
+ embedding?: Float32Array;
-export interface FaceWithEmbedding extends AlignedFace {
- embedding?: FaceEmbedding;
-}
-
-export interface Face extends FaceWithEmbedding {
personId?: number;
}
@@ -65,7 +39,6 @@ export interface Person {
name?: string;
files: Array;
displayFaceId?: string;
- faceCropCacheKey?: string;
}
export interface MlFileData {