diff --git a/web/apps/photos/src/services/upload/fileService.ts b/web/apps/photos/src/services/upload/fileService.ts deleted file mode 100644 index e69de29bb..000000000 diff --git a/web/apps/photos/src/services/upload/hashService.tsx b/web/apps/photos/src/services/upload/hashService.tsx deleted file mode 100644 index aa275fb34..000000000 --- a/web/apps/photos/src/services/upload/hashService.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import { getFileNameSize } from "@/next/file"; -import log from "@/next/log"; -import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { CustomError } from "@ente/shared/error"; -import { Remote } from "comlink"; -import { FILE_READER_CHUNK_SIZE } from "constants/upload"; -import { getElectronFileStream, getFileStream } from "services/readerService"; -import { DataStream, ElectronFile } from "types/upload"; - -export async function getFileHash( - worker: Remote, - file: File | ElectronFile, -) { - try { - log.info(`getFileHash called for ${getFileNameSize(file)}`); - let filedata: DataStream; - if (file instanceof File) { - filedata = getFileStream(file, FILE_READER_CHUNK_SIZE); - } else { - filedata = await getElectronFileStream( - file, - FILE_READER_CHUNK_SIZE, - ); - } - const hashState = await worker.initChunkHashing(); - - const streamReader = filedata.stream.getReader(); - for (let i = 0; i < filedata.chunkCount; i++) { - const { done, value: chunk } = await streamReader.read(); - if (done) { - throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED); - } - await worker.hashFileChunk(hashState, Uint8Array.from(chunk)); - } - const { done } = await streamReader.read(); - if (!done) { - throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED); - } - const hash = await worker.completeChunkHashing(hashState); - log.info( - `file hashing completed successfully ${getFileNameSize(file)}`, - ); - return hash; - } catch (e) { - log.error("getFileHash failed", e); - log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `); - } -} diff --git a/web/apps/photos/src/services/upload/metadataService.ts b/web/apps/photos/src/services/upload/metadataService.ts index 498290a09..5a8c4e1f5 100644 --- a/web/apps/photos/src/services/upload/metadataService.ts +++ b/web/apps/photos/src/services/upload/metadataService.ts @@ -11,14 +11,17 @@ import { import { Remote } from "comlink"; import { FILE_TYPE } from "constants/file"; import { + FILE_READER_CHUNK_SIZE, LIVE_PHOTO_ASSET_SIZE_LIMIT, NULL_EXTRACTED_METADATA, NULL_LOCATION, } from "constants/upload"; import * as ffmpegService from "services/ffmpeg/ffmpegService"; +import { getElectronFileStream, getFileStream } from "services/readerService"; import { getFileType } from "services/typeDetectionService"; import { FilePublicMagicMetadataProps } from "types/file"; import { + DataStream, ElectronFile, ExtractMetadataResult, FileTypeInfo, @@ -33,7 +36,6 @@ import { import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto"; import { getUint8ArrayView } from "../readerService"; import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService"; -import { getFileHash } from "./hashService"; import { generateThumbnail } from "./thumbnailService"; import uploadCancelService from "./uploadCancelService"; import { extractFileMetadata } from "./uploadService"; @@ -610,3 +612,43 @@ function splitFilenameAndExtension(filename: string): [string, string] { const isImageOrVideo = (fileType: FILE_TYPE) => [FILE_TYPE.IMAGE, FILE_TYPE.VIDEO].includes(fileType); + +async function getFileHash( + worker: Remote, + file: File | ElectronFile, +) { + try { + log.info(`getFileHash called for ${getFileNameSize(file)}`); + let filedata: DataStream; + if (file instanceof File) { + filedata = getFileStream(file, FILE_READER_CHUNK_SIZE); + } else { + filedata = await getElectronFileStream( + file, + FILE_READER_CHUNK_SIZE, + ); + } + const hashState = await worker.initChunkHashing(); + + const streamReader = filedata.stream.getReader(); + for (let i = 0; i < filedata.chunkCount; i++) { + const { done, value: chunk } = await streamReader.read(); + if (done) { + throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED); + } + await worker.hashFileChunk(hashState, Uint8Array.from(chunk)); + } + const { done } = await streamReader.read(); + if (!done) { + throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED); + } + const hash = await worker.completeChunkHashing(hashState); + log.info( + `file hashing completed successfully ${getFileNameSize(file)}`, + ); + return hash; + } catch (e) { + log.error("getFileHash failed", e); + log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `); + } +}