Prune
This commit is contained in:
parent
40dfeb5bca
commit
9346ce3255
4 changed files with 15 additions and 54 deletions
|
@ -58,10 +58,7 @@ export const PeopleList = React.memo((props: PeopleListProps) => {
|
|||
props.onSelect && props.onSelect(person, index)
|
||||
}
|
||||
>
|
||||
<FaceCropImageView
|
||||
faceID={person.displayFaceId}
|
||||
cacheKey={person.faceCropCacheKey}
|
||||
/>
|
||||
<FaceCropImageView faceID={person.displayFaceId} />
|
||||
</FaceChip>
|
||||
))}
|
||||
</FaceChipContainer>
|
||||
|
@ -109,7 +106,7 @@ export function UnidentifiedFaces(props: {
|
|||
file: EnteFile;
|
||||
updateMLDataIndex: number;
|
||||
}) {
|
||||
const [faces, setFaces] = useState<Array<Face>>([]);
|
||||
const [faces, setFaces] = useState<Face[]>([]);
|
||||
|
||||
useEffect(() => {
|
||||
let didCancel = false;
|
||||
|
@ -137,10 +134,7 @@ export function UnidentifiedFaces(props: {
|
|||
{faces &&
|
||||
faces.map((face, index) => (
|
||||
<FaceChip key={index}>
|
||||
<FaceCropImageView
|
||||
faceID={face.id}
|
||||
cacheKey={face.crop?.cacheKey}
|
||||
/>
|
||||
<FaceCropImageView faceID={face.id} />
|
||||
</FaceChip>
|
||||
))}
|
||||
</FaceChipContainer>
|
||||
|
@ -150,13 +144,9 @@ export function UnidentifiedFaces(props: {
|
|||
|
||||
interface FaceCropImageViewProps {
|
||||
faceID: string;
|
||||
cacheKey?: string;
|
||||
}
|
||||
|
||||
const FaceCropImageView: React.FC<FaceCropImageViewProps> = ({
|
||||
faceID,
|
||||
cacheKey,
|
||||
}) => {
|
||||
const FaceCropImageView: React.FC<FaceCropImageViewProps> = ({ faceID }) => {
|
||||
const [objectURL, setObjectURL] = useState<string | undefined>();
|
||||
|
||||
useEffect(() => {
|
||||
|
@ -190,7 +180,7 @@ const FaceCropImageView: React.FC<FaceCropImageViewProps> = ({
|
|||
didCancel = true;
|
||||
if (objectURL) URL.revokeObjectURL(objectURL);
|
||||
};
|
||||
}, [faceID, cacheKey]);
|
||||
}, [faceID]);
|
||||
|
||||
return objectURL ? (
|
||||
<img src={objectURL} />
|
||||
|
@ -199,7 +189,7 @@ const FaceCropImageView: React.FC<FaceCropImageViewProps> = ({
|
|||
);
|
||||
};
|
||||
|
||||
async function getPeopleList(file: EnteFile): Promise<Array<Person>> {
|
||||
async function getPeopleList(file: EnteFile): Promise<Person[]> {
|
||||
let startTime = Date.now();
|
||||
const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
|
||||
log.info(
|
||||
|
|
|
@ -9,7 +9,6 @@ import {
|
|||
Face,
|
||||
FaceAlignment,
|
||||
FaceDetection,
|
||||
FaceEmbedding,
|
||||
type MlFileData,
|
||||
} from "services/face/types";
|
||||
import { defaultMLVersion } from "services/machineLearning/machineLearningService";
|
||||
|
@ -166,7 +165,7 @@ const convertToYOLOInputFloat32ChannelsFirst = (imageBitmap: ImageBitmap) => {
|
|||
const requiredWidth = 640;
|
||||
const requiredHeight = 640;
|
||||
|
||||
const { width, height } = imageBitmap;
|
||||
const { width, height } = imageBitmap;
|
||||
|
||||
// Create an OffscreenCanvas and set its size.
|
||||
const offscreenCanvas = new OffscreenCanvas(width, height);
|
||||
|
@ -615,11 +614,11 @@ const mobileFaceNetEmbeddingSize = 192;
|
|||
*/
|
||||
const computeEmbeddings = async (
|
||||
faceData: Float32Array,
|
||||
): Promise<FaceEmbedding[]> => {
|
||||
): Promise<Float32Array[]> => {
|
||||
const outputData = await workerBridge.faceEmbeddings(faceData);
|
||||
|
||||
const embeddingSize = mobileFaceNetEmbeddingSize;
|
||||
const embeddings = new Array<FaceEmbedding>(
|
||||
const embeddings = new Array<Float32Array>(
|
||||
outputData.length / embeddingSize,
|
||||
);
|
||||
for (let i = 0; i < embeddings.length; i++) {
|
||||
|
|
|
@ -93,7 +93,6 @@ export const syncPeopleIndex = async () => {
|
|||
id: index,
|
||||
files: faces.map((f) => f.fileId),
|
||||
displayFaceId: personFace?.id,
|
||||
faceCropCacheKey: personFace?.crop?.cacheKey,
|
||||
};
|
||||
|
||||
await mlIDbStorage.putPerson(person);
|
||||
|
|
|
@ -11,30 +11,6 @@ export interface FaceDetection {
|
|||
probability?: number;
|
||||
}
|
||||
|
||||
export interface DetectedFace {
|
||||
fileId: number;
|
||||
detection: FaceDetection;
|
||||
}
|
||||
|
||||
export interface DetectedFaceWithId extends DetectedFace {
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface FaceCrop {
|
||||
image: ImageBitmap;
|
||||
// imageBox is relative to image dimentions stored at mlFileData
|
||||
imageBox: Box;
|
||||
}
|
||||
|
||||
export interface StoredFaceCrop {
|
||||
cacheKey: string;
|
||||
imageBox: Box;
|
||||
}
|
||||
|
||||
export interface CroppedFace extends DetectedFaceWithId {
|
||||
crop?: StoredFaceCrop;
|
||||
}
|
||||
|
||||
export interface FaceAlignment {
|
||||
// TODO-ML: remove affine matrix as rotation, size and center
|
||||
// are simple to store and use, affine matrix adds complexity while getting crop
|
||||
|
@ -45,18 +21,16 @@ export interface FaceAlignment {
|
|||
center: Point;
|
||||
}
|
||||
|
||||
export interface AlignedFace extends CroppedFace {
|
||||
export interface Face {
|
||||
fileId: number;
|
||||
detection: FaceDetection;
|
||||
id: string;
|
||||
|
||||
alignment?: FaceAlignment;
|
||||
blurValue?: number;
|
||||
}
|
||||
|
||||
export declare type FaceEmbedding = Float32Array;
|
||||
embedding?: Float32Array;
|
||||
|
||||
export interface FaceWithEmbedding extends AlignedFace {
|
||||
embedding?: FaceEmbedding;
|
||||
}
|
||||
|
||||
export interface Face extends FaceWithEmbedding {
|
||||
personId?: number;
|
||||
}
|
||||
|
||||
|
@ -65,7 +39,6 @@ export interface Person {
|
|||
name?: string;
|
||||
files: Array<number>;
|
||||
displayFaceId?: string;
|
||||
faceCropCacheKey?: string;
|
||||
}
|
||||
|
||||
export interface MlFileData {
|
||||
|
|
Loading…
Add table
Reference in a new issue