This commit is contained in:
Manav Rathi 2024-04-19 14:49:15 +05:30
parent 5f87dce547
commit 82c365b2db
No known key found for this signature in database
3 changed files with 43 additions and 49 deletions

View file

@ -1,48 +0,0 @@
import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE } from "constants/upload";
import { getElectronFileStream, getFileStream } from "services/readerService";
import { DataStream, ElectronFile } from "types/upload";
export async function getFileHash(
worker: Remote<DedicatedCryptoWorker>,
file: File | ElectronFile,
) {
try {
log.info(`getFileHash called for ${getFileNameSize(file)}`);
let filedata: DataStream;
if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
} else {
filedata = await getElectronFileStream(
file,
FILE_READER_CHUNK_SIZE,
);
}
const hashState = await worker.initChunkHashing();
const streamReader = filedata.stream.getReader();
for (let i = 0; i < filedata.chunkCount; i++) {
const { done, value: chunk } = await streamReader.read();
if (done) {
throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED);
}
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
}
const { done } = await streamReader.read();
if (!done) {
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
}
const hash = await worker.completeChunkHashing(hashState);
log.info(
`file hashing completed successfully ${getFileNameSize(file)}`,
);
return hash;
} catch (e) {
log.error("getFileHash failed", e);
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
}
}

View file

@ -11,14 +11,17 @@ import {
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import {
FILE_READER_CHUNK_SIZE,
LIVE_PHOTO_ASSET_SIZE_LIMIT,
NULL_EXTRACTED_METADATA,
NULL_LOCATION,
} from "constants/upload";
import * as ffmpegService from "services/ffmpeg/ffmpegService";
import { getElectronFileStream, getFileStream } from "services/readerService";
import { getFileType } from "services/typeDetectionService";
import { FilePublicMagicMetadataProps } from "types/file";
import {
DataStream,
ElectronFile,
ExtractMetadataResult,
FileTypeInfo,
@ -33,7 +36,6 @@ import {
import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto";
import { getUint8ArrayView } from "../readerService";
import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService";
import { getFileHash } from "./hashService";
import { generateThumbnail } from "./thumbnailService";
import uploadCancelService from "./uploadCancelService";
import { extractFileMetadata } from "./uploadService";
@ -610,3 +612,43 @@ function splitFilenameAndExtension(filename: string): [string, string] {
const isImageOrVideo = (fileType: FILE_TYPE) =>
[FILE_TYPE.IMAGE, FILE_TYPE.VIDEO].includes(fileType);
async function getFileHash(
worker: Remote<DedicatedCryptoWorker>,
file: File | ElectronFile,
) {
try {
log.info(`getFileHash called for ${getFileNameSize(file)}`);
let filedata: DataStream;
if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
} else {
filedata = await getElectronFileStream(
file,
FILE_READER_CHUNK_SIZE,
);
}
const hashState = await worker.initChunkHashing();
const streamReader = filedata.stream.getReader();
for (let i = 0; i < filedata.chunkCount; i++) {
const { done, value: chunk } = await streamReader.read();
if (done) {
throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED);
}
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
}
const { done } = await streamReader.read();
if (!done) {
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
}
const hash = await worker.completeChunkHashing(hashState);
log.info(
`file hashing completed successfully ${getFileNameSize(file)}`,
);
return hash;
} catch (e) {
log.error("getFileHash failed", e);
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
}
}