[desktop] Watch related refactoring - Part 2/x (#1488)
Inline, mostly, in preparation for a subsequent refactoring.
This commit is contained in:
commit
814803edb4
16 changed files with 852 additions and 901 deletions
|
@ -131,12 +131,6 @@ export default function Uploader(props: Props) {
|
|||
const closeUploadProgress = () => setUploadProgressView(false);
|
||||
const showUserNameInputDialog = () => setUserNameInputDialogView(true);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const setCollectionName = (collectionName: string) => {
|
||||
isPendingDesktopUpload.current = true;
|
||||
pendingDesktopUploadCollectionName.current = collectionName;
|
||||
};
|
||||
|
||||
const handleChoiceModalClose = () => {
|
||||
setChoiceModalView(false);
|
||||
uploadRunning.current = false;
|
||||
|
@ -186,13 +180,26 @@ export default function Uploader(props: Props) {
|
|||
);
|
||||
}
|
||||
});
|
||||
/* TODO(MR): This is the connection point, implement
|
||||
watcher.init(
|
||||
setElectronFiles,
|
||||
setCollectionName,
|
||||
props.syncWithRemote,
|
||||
);
|
||||
*/
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const upload = (collectionName: string, filePaths: string[]) => {
|
||||
isPendingDesktopUpload.current = true;
|
||||
pendingDesktopUploadCollectionName.current = collectionName;
|
||||
|
||||
// TODO (MR):
|
||||
// setElectronFiles(filePaths);
|
||||
};
|
||||
|
||||
const requestSyncWithRemote = () => {
|
||||
props.syncWithRemote().catch((e) => {
|
||||
log.error(
|
||||
"Ignoring error when syncing trash changes with remote",
|
||||
e,
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
watcher.init(upload, requestSyncWithRemote);
|
||||
}
|
||||
}, [
|
||||
publicCollectionGalleryContext.accessedThroughSharedURL,
|
||||
|
|
|
@ -144,8 +144,10 @@ class FaceService {
|
|||
syncContext.faceEmbeddingService.faceSize,
|
||||
imageBitmap,
|
||||
);
|
||||
const blurValues =
|
||||
syncContext.blurDetectionService.detectBlur(faceImages, newMlFile.faces);
|
||||
const blurValues = syncContext.blurDetectionService.detectBlur(
|
||||
faceImages,
|
||||
newMlFile.faces,
|
||||
);
|
||||
newMlFile.faces.forEach((f, i) => (f.blurValue = blurValues[i]));
|
||||
|
||||
imageBitmap.close();
|
||||
|
|
|
@ -80,7 +80,7 @@ class LaplacianBlurDetectionService implements BlurDetectionService {
|
|||
|
||||
// Create a new matrix with extra padding
|
||||
const paddedImage: number[][] = Array.from(
|
||||
{ length: paddedNumRows},
|
||||
{ length: paddedNumRows },
|
||||
() => new Array(paddedNumCols).fill(0),
|
||||
);
|
||||
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { EncryptionResult } from "@ente/shared/crypto/types";
|
||||
import { Remote } from "comlink";
|
||||
import { DataStream, isDataStream } from "types/upload";
|
||||
|
||||
async function encryptFileStream(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
fileData: DataStream,
|
||||
) {
|
||||
const { stream, chunkCount } = fileData;
|
||||
const fileStreamReader = stream.getReader();
|
||||
const { key, decryptionHeader, pushState } =
|
||||
await worker.initChunkEncryption();
|
||||
const ref = { pullCount: 1 };
|
||||
const encryptedFileStream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
const { value } = await fileStreamReader.read();
|
||||
const encryptedFileChunk = await worker.encryptFileChunk(
|
||||
value,
|
||||
pushState,
|
||||
ref.pullCount === chunkCount,
|
||||
);
|
||||
controller.enqueue(encryptedFileChunk);
|
||||
if (ref.pullCount === chunkCount) {
|
||||
controller.close();
|
||||
}
|
||||
ref.pullCount++;
|
||||
},
|
||||
});
|
||||
return {
|
||||
key,
|
||||
file: {
|
||||
decryptionHeader,
|
||||
encryptedData: { stream: encryptedFileStream, chunkCount },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function encryptFiledata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
filedata: Uint8Array | DataStream,
|
||||
): Promise<EncryptionResult<Uint8Array | DataStream>> {
|
||||
return isDataStream(filedata)
|
||||
? await encryptFileStream(worker, filedata)
|
||||
: await worker.encryptFile(filedata);
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
import { getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from "constants/upload";
|
||||
import { EncryptedMagicMetadata } from "types/magicMetadata";
|
||||
import {
|
||||
DataStream,
|
||||
ElectronFile,
|
||||
EncryptedFile,
|
||||
ExtractMetadataResult,
|
||||
FileInMemory,
|
||||
FileTypeInfo,
|
||||
FileWithMetadata,
|
||||
ParsedMetadataJSON,
|
||||
ParsedMetadataJSONMap,
|
||||
} from "types/upload";
|
||||
import {
|
||||
getElectronFileStream,
|
||||
getFileStream,
|
||||
getUint8ArrayView,
|
||||
} from "../readerService";
|
||||
import { encryptFiledata } from "./encryptionService";
|
||||
import {
|
||||
MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
|
||||
extractMetadata,
|
||||
getClippedMetadataJSONMapKeyForFile,
|
||||
getMetadataJSONMapKeyForFile,
|
||||
} from "./metadataService";
|
||||
import { generateThumbnail } from "./thumbnailService";
|
||||
|
||||
export function getFileSize(file: File | ElectronFile) {
|
||||
return file.size;
|
||||
}
|
||||
|
||||
export function getFilename(file: File | ElectronFile) {
|
||||
return file.name;
|
||||
}
|
||||
|
||||
export async function readFile(
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
rawFile: File | ElectronFile,
|
||||
): Promise<FileInMemory> {
|
||||
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
|
||||
rawFile,
|
||||
fileTypeInfo,
|
||||
);
|
||||
log.info(`reading file data ${getFileNameSize(rawFile)} `);
|
||||
let filedata: Uint8Array | DataStream;
|
||||
if (!(rawFile instanceof File)) {
|
||||
if (rawFile.size > MULTIPART_PART_SIZE) {
|
||||
filedata = await getElectronFileStream(
|
||||
rawFile,
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
);
|
||||
} else {
|
||||
filedata = await getUint8ArrayView(rawFile);
|
||||
}
|
||||
} else if (rawFile.size > MULTIPART_PART_SIZE) {
|
||||
filedata = getFileStream(rawFile, FILE_READER_CHUNK_SIZE);
|
||||
} else {
|
||||
filedata = await getUint8ArrayView(rawFile);
|
||||
}
|
||||
|
||||
log.info(`read file data successfully ${getFileNameSize(rawFile)} `);
|
||||
|
||||
return {
|
||||
filedata,
|
||||
thumbnail,
|
||||
hasStaticThumbnail,
|
||||
};
|
||||
}
|
||||
|
||||
export async function extractFileMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: ParsedMetadataJSONMap,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
rawFile: File | ElectronFile,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
let key = getMetadataJSONMapKeyForFile(collectionID, rawFile.name);
|
||||
let googleMetadata: ParsedMetadataJSON = parsedMetadataJSONMap.get(key);
|
||||
|
||||
if (!googleMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) {
|
||||
key = getClippedMetadataJSONMapKeyForFile(collectionID, rawFile.name);
|
||||
googleMetadata = parsedMetadataJSONMap.get(key);
|
||||
}
|
||||
|
||||
const { metadata, publicMagicMetadata } = await extractMetadata(
|
||||
worker,
|
||||
rawFile,
|
||||
fileTypeInfo,
|
||||
);
|
||||
|
||||
for (const [key, value] of Object.entries(googleMetadata ?? {})) {
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
metadata[key] = value;
|
||||
}
|
||||
return { metadata, publicMagicMetadata };
|
||||
}
|
||||
|
||||
export async function encryptFile(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
file: FileWithMetadata,
|
||||
encryptionKey: string,
|
||||
): Promise<EncryptedFile> {
|
||||
try {
|
||||
const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
|
||||
worker,
|
||||
file.filedata,
|
||||
);
|
||||
|
||||
const { file: encryptedThumbnail } = await worker.encryptThumbnail(
|
||||
file.thumbnail,
|
||||
fileKey,
|
||||
);
|
||||
const { file: encryptedMetadata } = await worker.encryptMetadata(
|
||||
file.metadata,
|
||||
fileKey,
|
||||
);
|
||||
|
||||
let encryptedPubMagicMetadata: EncryptedMagicMetadata;
|
||||
if (file.pubMagicMetadata) {
|
||||
const { file: encryptedPubMagicMetadataData } =
|
||||
await worker.encryptMetadata(
|
||||
file.pubMagicMetadata.data,
|
||||
fileKey,
|
||||
);
|
||||
encryptedPubMagicMetadata = {
|
||||
version: file.pubMagicMetadata.version,
|
||||
count: file.pubMagicMetadata.count,
|
||||
data: encryptedPubMagicMetadataData.encryptedData,
|
||||
header: encryptedPubMagicMetadataData.decryptionHeader,
|
||||
};
|
||||
}
|
||||
|
||||
const encryptedKey = await worker.encryptToB64(fileKey, encryptionKey);
|
||||
|
||||
const result: EncryptedFile = {
|
||||
file: {
|
||||
file: encryptedFiledata,
|
||||
thumbnail: encryptedThumbnail,
|
||||
metadata: encryptedMetadata,
|
||||
pubMagicMetadata: encryptedPubMagicMetadata,
|
||||
localID: file.localID,
|
||||
},
|
||||
fileKey: encryptedKey,
|
||||
};
|
||||
return result;
|
||||
} catch (e) {
|
||||
log.error("Error encrypting files", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
import { getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_READER_CHUNK_SIZE } from "constants/upload";
|
||||
import { getElectronFileStream, getFileStream } from "services/readerService";
|
||||
import { DataStream, ElectronFile } from "types/upload";
|
||||
|
||||
export async function getFileHash(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
file: File | ElectronFile,
|
||||
) {
|
||||
try {
|
||||
log.info(`getFileHash called for ${getFileNameSize(file)}`);
|
||||
let filedata: DataStream;
|
||||
if (file instanceof File) {
|
||||
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
|
||||
} else {
|
||||
filedata = await getElectronFileStream(
|
||||
file,
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
);
|
||||
}
|
||||
const hashState = await worker.initChunkHashing();
|
||||
|
||||
const streamReader = filedata.stream.getReader();
|
||||
for (let i = 0; i < filedata.chunkCount; i++) {
|
||||
const { done, value: chunk } = await streamReader.read();
|
||||
if (done) {
|
||||
throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED);
|
||||
}
|
||||
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
|
||||
}
|
||||
const { done } = await streamReader.read();
|
||||
if (!done) {
|
||||
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
|
||||
}
|
||||
const hash = await worker.completeChunkHashing(hashState);
|
||||
log.info(
|
||||
`file hashing completed successfully ${getFileNameSize(file)}`,
|
||||
);
|
||||
return hash;
|
||||
} catch (e) {
|
||||
log.error("getFileHash failed", e);
|
||||
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
|
||||
}
|
||||
}
|
|
@ -1,325 +0,0 @@
|
|||
import { encodeLivePhoto } from "@/media/live-photo";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { LIVE_PHOTO_ASSET_SIZE_LIMIT } from "constants/upload";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import {
|
||||
ElectronFile,
|
||||
ExtractMetadataResult,
|
||||
FileTypeInfo,
|
||||
FileWithCollection,
|
||||
LivePhotoAssets,
|
||||
ParsedMetadataJSONMap,
|
||||
} from "types/upload";
|
||||
import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto";
|
||||
import { getUint8ArrayView } from "../readerService";
|
||||
import { extractFileMetadata } from "./fileService";
|
||||
import { getFileHash } from "./hashService";
|
||||
import { generateThumbnail } from "./thumbnailService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
|
||||
interface LivePhotoIdentifier {
|
||||
collectionID: number;
|
||||
fileType: FILE_TYPE;
|
||||
name: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
const UNDERSCORE_THREE = "_3";
|
||||
// Note: The icloud-photos-downloader library appends _HVEC to the end of the filename in case of live photos
|
||||
// https://github.com/icloud-photos-downloader/icloud_photos_downloader
|
||||
const UNDERSCORE_HEVC = "_HVEC";
|
||||
|
||||
export async function getLivePhotoFileType(
|
||||
livePhotoAssets: LivePhotoAssets,
|
||||
): Promise<FileTypeInfo> {
|
||||
const imageFileTypeInfo = await getFileType(livePhotoAssets.image);
|
||||
const videoFileTypeInfo = await getFileType(livePhotoAssets.video);
|
||||
return {
|
||||
fileType: FILE_TYPE.LIVE_PHOTO,
|
||||
exactType: `${imageFileTypeInfo.exactType}+${videoFileTypeInfo.exactType}`,
|
||||
imageType: imageFileTypeInfo.exactType,
|
||||
videoType: videoFileTypeInfo.exactType,
|
||||
};
|
||||
}
|
||||
|
||||
export async function extractLivePhotoMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: ParsedMetadataJSONMap,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
livePhotoAssets: LivePhotoAssets,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
const imageFileTypeInfo: FileTypeInfo = {
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
exactType: fileTypeInfo.imageType,
|
||||
};
|
||||
const {
|
||||
metadata: imageMetadata,
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
} = await extractFileMetadata(
|
||||
worker,
|
||||
parsedMetadataJSONMap,
|
||||
collectionID,
|
||||
imageFileTypeInfo,
|
||||
livePhotoAssets.image,
|
||||
);
|
||||
const videoHash = await getFileHash(worker, livePhotoAssets.video);
|
||||
return {
|
||||
metadata: {
|
||||
...imageMetadata,
|
||||
title: getLivePhotoName(livePhotoAssets),
|
||||
fileType: FILE_TYPE.LIVE_PHOTO,
|
||||
imageHash: imageMetadata.hash,
|
||||
videoHash: videoHash,
|
||||
hash: undefined,
|
||||
},
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
};
|
||||
}
|
||||
|
||||
export function getLivePhotoSize(livePhotoAssets: LivePhotoAssets) {
|
||||
return livePhotoAssets.image.size + livePhotoAssets.video.size;
|
||||
}
|
||||
|
||||
export function getLivePhotoName(livePhotoAssets: LivePhotoAssets) {
|
||||
return livePhotoAssets.image.name;
|
||||
}
|
||||
|
||||
export async function readLivePhoto(
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
livePhotoAssets: LivePhotoAssets,
|
||||
) {
|
||||
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
|
||||
livePhotoAssets.image,
|
||||
{
|
||||
exactType: fileTypeInfo.imageType,
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
},
|
||||
);
|
||||
|
||||
const imageData = await getUint8ArrayView(livePhotoAssets.image);
|
||||
|
||||
const videoData = await getUint8ArrayView(livePhotoAssets.video);
|
||||
|
||||
return {
|
||||
filedata: await encodeLivePhoto({
|
||||
imageFileName: livePhotoAssets.image.name,
|
||||
imageData,
|
||||
videoFileName: livePhotoAssets.video.name,
|
||||
videoData,
|
||||
}),
|
||||
thumbnail,
|
||||
hasStaticThumbnail,
|
||||
};
|
||||
}
|
||||
|
||||
export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
|
||||
try {
|
||||
const analysedMediaFiles: FileWithCollection[] = [];
|
||||
mediaFiles
|
||||
.sort((firstMediaFile, secondMediaFile) =>
|
||||
splitFilenameAndExtension(
|
||||
firstMediaFile.file.name,
|
||||
)[0].localeCompare(
|
||||
splitFilenameAndExtension(secondMediaFile.file.name)[0],
|
||||
),
|
||||
)
|
||||
.sort(
|
||||
(firstMediaFile, secondMediaFile) =>
|
||||
firstMediaFile.collectionID - secondMediaFile.collectionID,
|
||||
);
|
||||
let index = 0;
|
||||
while (index < mediaFiles.length - 1) {
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
const firstMediaFile = mediaFiles[index];
|
||||
const secondMediaFile = mediaFiles[index + 1];
|
||||
const firstFileType =
|
||||
getFileTypeFromExtensionForLivePhotoClustering(
|
||||
firstMediaFile.file.name,
|
||||
);
|
||||
const secondFileType =
|
||||
getFileTypeFromExtensionForLivePhotoClustering(
|
||||
secondMediaFile.file.name,
|
||||
);
|
||||
const firstFileIdentifier: LivePhotoIdentifier = {
|
||||
collectionID: firstMediaFile.collectionID,
|
||||
fileType: firstFileType,
|
||||
name: firstMediaFile.file.name,
|
||||
size: firstMediaFile.file.size,
|
||||
};
|
||||
const secondFileIdentifier: LivePhotoIdentifier = {
|
||||
collectionID: secondMediaFile.collectionID,
|
||||
fileType: secondFileType,
|
||||
name: secondMediaFile.file.name,
|
||||
size: secondMediaFile.file.size,
|
||||
};
|
||||
if (
|
||||
areFilesLivePhotoAssets(
|
||||
firstFileIdentifier,
|
||||
secondFileIdentifier,
|
||||
)
|
||||
) {
|
||||
let imageFile: File | ElectronFile;
|
||||
let videoFile: File | ElectronFile;
|
||||
if (
|
||||
firstFileType === FILE_TYPE.IMAGE &&
|
||||
secondFileType === FILE_TYPE.VIDEO
|
||||
) {
|
||||
imageFile = firstMediaFile.file;
|
||||
videoFile = secondMediaFile.file;
|
||||
} else {
|
||||
videoFile = firstMediaFile.file;
|
||||
imageFile = secondMediaFile.file;
|
||||
}
|
||||
const livePhotoLocalID = firstMediaFile.localID;
|
||||
analysedMediaFiles.push({
|
||||
localID: livePhotoLocalID,
|
||||
collectionID: firstMediaFile.collectionID,
|
||||
isLivePhoto: true,
|
||||
livePhotoAssets: {
|
||||
image: imageFile,
|
||||
video: videoFile,
|
||||
},
|
||||
});
|
||||
index += 2;
|
||||
} else {
|
||||
analysedMediaFiles.push({
|
||||
...firstMediaFile,
|
||||
isLivePhoto: false,
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
if (index === mediaFiles.length - 1) {
|
||||
analysedMediaFiles.push({
|
||||
...mediaFiles[index],
|
||||
isLivePhoto: false,
|
||||
});
|
||||
}
|
||||
return analysedMediaFiles;
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.UPLOAD_CANCELLED) {
|
||||
throw e;
|
||||
} else {
|
||||
log.error("failed to cluster live photo", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function areFilesLivePhotoAssets(
|
||||
firstFileIdentifier: LivePhotoIdentifier,
|
||||
secondFileIdentifier: LivePhotoIdentifier,
|
||||
) {
|
||||
const haveSameCollectionID =
|
||||
firstFileIdentifier.collectionID === secondFileIdentifier.collectionID;
|
||||
const areNotSameFileType =
|
||||
firstFileIdentifier.fileType !== secondFileIdentifier.fileType;
|
||||
|
||||
let firstFileNameWithoutSuffix: string;
|
||||
let secondFileNameWithoutSuffix: string;
|
||||
if (firstFileIdentifier.fileType === FILE_TYPE.IMAGE) {
|
||||
firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(firstFileIdentifier.name),
|
||||
// Note: The Google Live Photo image file can have video extension appended as suffix, passing that to removePotentialLivePhotoSuffix to remove it
|
||||
// Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file)
|
||||
getFileExtensionWithDot(secondFileIdentifier.name),
|
||||
);
|
||||
secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(secondFileIdentifier.name),
|
||||
);
|
||||
} else {
|
||||
firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(firstFileIdentifier.name),
|
||||
);
|
||||
secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(secondFileIdentifier.name),
|
||||
getFileExtensionWithDot(firstFileIdentifier.name),
|
||||
);
|
||||
}
|
||||
if (
|
||||
haveSameCollectionID &&
|
||||
isImageOrVideo(firstFileIdentifier.fileType) &&
|
||||
isImageOrVideo(secondFileIdentifier.fileType) &&
|
||||
areNotSameFileType &&
|
||||
firstFileNameWithoutSuffix === secondFileNameWithoutSuffix
|
||||
) {
|
||||
// checks size of live Photo assets are less than allowed limit
|
||||
// I did that based on the assumption that live photo assets ideally would not be larger than LIVE_PHOTO_ASSET_SIZE_LIMIT
|
||||
// also zipping library doesn't support stream as a input
|
||||
if (
|
||||
firstFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT &&
|
||||
secondFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT
|
||||
) {
|
||||
return true;
|
||||
} else {
|
||||
log.error(
|
||||
`${CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS} - ${JSON.stringify({
|
||||
fileSizes: [
|
||||
firstFileIdentifier.size,
|
||||
secondFileIdentifier.size,
|
||||
],
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function removePotentialLivePhotoSuffix(
|
||||
filenameWithoutExtension: string,
|
||||
suffix?: string,
|
||||
) {
|
||||
let presentSuffix: string;
|
||||
if (filenameWithoutExtension.endsWith(UNDERSCORE_THREE)) {
|
||||
presentSuffix = UNDERSCORE_THREE;
|
||||
} else if (filenameWithoutExtension.endsWith(UNDERSCORE_HEVC)) {
|
||||
presentSuffix = UNDERSCORE_HEVC;
|
||||
} else if (
|
||||
filenameWithoutExtension.endsWith(UNDERSCORE_HEVC.toLowerCase())
|
||||
) {
|
||||
presentSuffix = UNDERSCORE_HEVC.toLowerCase();
|
||||
} else if (suffix) {
|
||||
if (filenameWithoutExtension.endsWith(suffix)) {
|
||||
presentSuffix = suffix;
|
||||
} else if (filenameWithoutExtension.endsWith(suffix.toLowerCase())) {
|
||||
presentSuffix = suffix.toLowerCase();
|
||||
}
|
||||
}
|
||||
if (presentSuffix) {
|
||||
return filenameWithoutExtension.slice(0, presentSuffix.length * -1);
|
||||
} else {
|
||||
return filenameWithoutExtension;
|
||||
}
|
||||
}
|
||||
|
||||
function getFileNameWithoutExtension(filename: string) {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return filename;
|
||||
else return filename.slice(0, lastDotPosition);
|
||||
}
|
||||
|
||||
function getFileExtensionWithDot(filename: string) {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return "";
|
||||
else return filename.slice(lastDotPosition);
|
||||
}
|
||||
|
||||
function splitFilenameAndExtension(filename: string): [string, string] {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return [filename, null];
|
||||
else
|
||||
return [
|
||||
filename.slice(0, lastDotPosition),
|
||||
filename.slice(lastDotPosition + 1),
|
||||
];
|
||||
}
|
||||
|
||||
const isImageOrVideo = (fileType: FILE_TYPE) =>
|
||||
[FILE_TYPE.IMAGE, FILE_TYPE.VIDEO].includes(fileType);
|
|
@ -1,21 +0,0 @@
|
|||
import {
|
||||
FilePublicMagicMetadata,
|
||||
FilePublicMagicMetadataProps,
|
||||
} from "types/file";
|
||||
import {
|
||||
getNonEmptyMagicMetadataProps,
|
||||
updateMagicMetadata,
|
||||
} from "utils/magicMetadata";
|
||||
|
||||
export async function constructPublicMagicMetadata(
|
||||
publicMagicMetadataProps: FilePublicMagicMetadataProps,
|
||||
): Promise<FilePublicMagicMetadata> {
|
||||
const nonEmptyPublicMagicMetadataProps = getNonEmptyMagicMetadataProps(
|
||||
publicMagicMetadataProps,
|
||||
);
|
||||
|
||||
if (Object.values(nonEmptyPublicMagicMetadataProps)?.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return await updateMagicMetadata(publicMagicMetadataProps);
|
||||
}
|
|
@ -1,5 +1,8 @@
|
|||
import { encodeLivePhoto } from "@/media/live-photo";
|
||||
import { getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import {
|
||||
parseDateFromFusedDateString,
|
||||
tryToParseDateTime,
|
||||
|
@ -7,21 +10,35 @@ import {
|
|||
} from "@ente/shared/time";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { NULL_EXTRACTED_METADATA, NULL_LOCATION } from "constants/upload";
|
||||
import {
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
LIVE_PHOTO_ASSET_SIZE_LIMIT,
|
||||
NULL_EXTRACTED_METADATA,
|
||||
NULL_LOCATION,
|
||||
} from "constants/upload";
|
||||
import * as ffmpegService from "services/ffmpeg/ffmpegService";
|
||||
import { getElectronFileStream, getFileStream } from "services/readerService";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import { FilePublicMagicMetadataProps } from "types/file";
|
||||
import {
|
||||
DataStream,
|
||||
ElectronFile,
|
||||
ExtractMetadataResult,
|
||||
FileTypeInfo,
|
||||
FileWithCollection,
|
||||
LivePhotoAssets,
|
||||
Location,
|
||||
Metadata,
|
||||
ParsedExtractedMetadata,
|
||||
ParsedMetadataJSON,
|
||||
ParsedMetadataJSONMap,
|
||||
} from "types/upload";
|
||||
import { splitFilenameAndExtension } from "utils/file";
|
||||
import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto";
|
||||
import { getUint8ArrayView } from "../readerService";
|
||||
import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService";
|
||||
import { getFileHash } from "./hashService";
|
||||
import { getVideoMetadata } from "./videoMetadataService";
|
||||
import { generateThumbnail } from "./thumbnailService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import { extractFileMetadata } from "./uploadService";
|
||||
|
||||
const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
|
||||
creationTime: null,
|
||||
|
@ -272,3 +289,366 @@ function getFileOriginalName(fileName: string) {
|
|||
}
|
||||
return originalName;
|
||||
}
|
||||
|
||||
async function getVideoMetadata(file: File | ElectronFile) {
|
||||
let videoMetadata = NULL_EXTRACTED_METADATA;
|
||||
try {
|
||||
log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
|
||||
videoMetadata = await ffmpegService.extractVideoMetadata(file);
|
||||
log.info(
|
||||
`videoMetadata successfully extracted ${getFileNameSize(file)}`,
|
||||
);
|
||||
} catch (e) {
|
||||
log.error("failed to get video metadata", e);
|
||||
log.info(
|
||||
`videoMetadata extracted failed ${getFileNameSize(file)} ,${
|
||||
e.message
|
||||
} `,
|
||||
);
|
||||
}
|
||||
|
||||
return videoMetadata;
|
||||
}
|
||||
|
||||
interface LivePhotoIdentifier {
|
||||
collectionID: number;
|
||||
fileType: FILE_TYPE;
|
||||
name: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
const UNDERSCORE_THREE = "_3";
|
||||
// Note: The icloud-photos-downloader library appends _HVEC to the end of the filename in case of live photos
|
||||
// https://github.com/icloud-photos-downloader/icloud_photos_downloader
|
||||
const UNDERSCORE_HEVC = "_HVEC";
|
||||
|
||||
export async function getLivePhotoFileType(
|
||||
livePhotoAssets: LivePhotoAssets,
|
||||
): Promise<FileTypeInfo> {
|
||||
const imageFileTypeInfo = await getFileType(livePhotoAssets.image);
|
||||
const videoFileTypeInfo = await getFileType(livePhotoAssets.video);
|
||||
return {
|
||||
fileType: FILE_TYPE.LIVE_PHOTO,
|
||||
exactType: `${imageFileTypeInfo.exactType}+${videoFileTypeInfo.exactType}`,
|
||||
imageType: imageFileTypeInfo.exactType,
|
||||
videoType: videoFileTypeInfo.exactType,
|
||||
};
|
||||
}
|
||||
|
||||
export async function extractLivePhotoMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: ParsedMetadataJSONMap,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
livePhotoAssets: LivePhotoAssets,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
const imageFileTypeInfo: FileTypeInfo = {
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
exactType: fileTypeInfo.imageType,
|
||||
};
|
||||
const {
|
||||
metadata: imageMetadata,
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
} = await extractFileMetadata(
|
||||
worker,
|
||||
parsedMetadataJSONMap,
|
||||
collectionID,
|
||||
imageFileTypeInfo,
|
||||
livePhotoAssets.image,
|
||||
);
|
||||
const videoHash = await getFileHash(worker, livePhotoAssets.video);
|
||||
return {
|
||||
metadata: {
|
||||
...imageMetadata,
|
||||
title: getLivePhotoName(livePhotoAssets),
|
||||
fileType: FILE_TYPE.LIVE_PHOTO,
|
||||
imageHash: imageMetadata.hash,
|
||||
videoHash: videoHash,
|
||||
hash: undefined,
|
||||
},
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
};
|
||||
}
|
||||
|
||||
export function getLivePhotoSize(livePhotoAssets: LivePhotoAssets) {
|
||||
return livePhotoAssets.image.size + livePhotoAssets.video.size;
|
||||
}
|
||||
|
||||
export function getLivePhotoName(livePhotoAssets: LivePhotoAssets) {
|
||||
return livePhotoAssets.image.name;
|
||||
}
|
||||
|
||||
export async function readLivePhoto(
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
livePhotoAssets: LivePhotoAssets,
|
||||
) {
|
||||
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
|
||||
livePhotoAssets.image,
|
||||
{
|
||||
exactType: fileTypeInfo.imageType,
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
},
|
||||
);
|
||||
|
||||
const imageData = await getUint8ArrayView(livePhotoAssets.image);
|
||||
|
||||
const videoData = await getUint8ArrayView(livePhotoAssets.video);
|
||||
|
||||
return {
|
||||
filedata: await encodeLivePhoto({
|
||||
imageFileName: livePhotoAssets.image.name,
|
||||
imageData,
|
||||
videoFileName: livePhotoAssets.video.name,
|
||||
videoData,
|
||||
}),
|
||||
thumbnail,
|
||||
hasStaticThumbnail,
|
||||
};
|
||||
}
|
||||
|
||||
export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
|
||||
try {
|
||||
const analysedMediaFiles: FileWithCollection[] = [];
|
||||
mediaFiles
|
||||
.sort((firstMediaFile, secondMediaFile) =>
|
||||
splitFilenameAndExtension(
|
||||
firstMediaFile.file.name,
|
||||
)[0].localeCompare(
|
||||
splitFilenameAndExtension(secondMediaFile.file.name)[0],
|
||||
),
|
||||
)
|
||||
.sort(
|
||||
(firstMediaFile, secondMediaFile) =>
|
||||
firstMediaFile.collectionID - secondMediaFile.collectionID,
|
||||
);
|
||||
let index = 0;
|
||||
while (index < mediaFiles.length - 1) {
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
const firstMediaFile = mediaFiles[index];
|
||||
const secondMediaFile = mediaFiles[index + 1];
|
||||
const firstFileType =
|
||||
getFileTypeFromExtensionForLivePhotoClustering(
|
||||
firstMediaFile.file.name,
|
||||
);
|
||||
const secondFileType =
|
||||
getFileTypeFromExtensionForLivePhotoClustering(
|
||||
secondMediaFile.file.name,
|
||||
);
|
||||
const firstFileIdentifier: LivePhotoIdentifier = {
|
||||
collectionID: firstMediaFile.collectionID,
|
||||
fileType: firstFileType,
|
||||
name: firstMediaFile.file.name,
|
||||
size: firstMediaFile.file.size,
|
||||
};
|
||||
const secondFileIdentifier: LivePhotoIdentifier = {
|
||||
collectionID: secondMediaFile.collectionID,
|
||||
fileType: secondFileType,
|
||||
name: secondMediaFile.file.name,
|
||||
size: secondMediaFile.file.size,
|
||||
};
|
||||
if (
|
||||
areFilesLivePhotoAssets(
|
||||
firstFileIdentifier,
|
||||
secondFileIdentifier,
|
||||
)
|
||||
) {
|
||||
let imageFile: File | ElectronFile;
|
||||
let videoFile: File | ElectronFile;
|
||||
if (
|
||||
firstFileType === FILE_TYPE.IMAGE &&
|
||||
secondFileType === FILE_TYPE.VIDEO
|
||||
) {
|
||||
imageFile = firstMediaFile.file;
|
||||
videoFile = secondMediaFile.file;
|
||||
} else {
|
||||
videoFile = firstMediaFile.file;
|
||||
imageFile = secondMediaFile.file;
|
||||
}
|
||||
const livePhotoLocalID = firstMediaFile.localID;
|
||||
analysedMediaFiles.push({
|
||||
localID: livePhotoLocalID,
|
||||
collectionID: firstMediaFile.collectionID,
|
||||
isLivePhoto: true,
|
||||
livePhotoAssets: {
|
||||
image: imageFile,
|
||||
video: videoFile,
|
||||
},
|
||||
});
|
||||
index += 2;
|
||||
} else {
|
||||
analysedMediaFiles.push({
|
||||
...firstMediaFile,
|
||||
isLivePhoto: false,
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
if (index === mediaFiles.length - 1) {
|
||||
analysedMediaFiles.push({
|
||||
...mediaFiles[index],
|
||||
isLivePhoto: false,
|
||||
});
|
||||
}
|
||||
return analysedMediaFiles;
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.UPLOAD_CANCELLED) {
|
||||
throw e;
|
||||
} else {
|
||||
log.error("failed to cluster live photo", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function areFilesLivePhotoAssets(
|
||||
firstFileIdentifier: LivePhotoIdentifier,
|
||||
secondFileIdentifier: LivePhotoIdentifier,
|
||||
) {
|
||||
const haveSameCollectionID =
|
||||
firstFileIdentifier.collectionID === secondFileIdentifier.collectionID;
|
||||
const areNotSameFileType =
|
||||
firstFileIdentifier.fileType !== secondFileIdentifier.fileType;
|
||||
|
||||
let firstFileNameWithoutSuffix: string;
|
||||
let secondFileNameWithoutSuffix: string;
|
||||
if (firstFileIdentifier.fileType === FILE_TYPE.IMAGE) {
|
||||
firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(firstFileIdentifier.name),
|
||||
// Note: The Google Live Photo image file can have video extension appended as suffix, passing that to removePotentialLivePhotoSuffix to remove it
|
||||
// Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file)
|
||||
getFileExtensionWithDot(secondFileIdentifier.name),
|
||||
);
|
||||
secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(secondFileIdentifier.name),
|
||||
);
|
||||
} else {
|
||||
firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(firstFileIdentifier.name),
|
||||
);
|
||||
secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(secondFileIdentifier.name),
|
||||
getFileExtensionWithDot(firstFileIdentifier.name),
|
||||
);
|
||||
}
|
||||
if (
|
||||
haveSameCollectionID &&
|
||||
isImageOrVideo(firstFileIdentifier.fileType) &&
|
||||
isImageOrVideo(secondFileIdentifier.fileType) &&
|
||||
areNotSameFileType &&
|
||||
firstFileNameWithoutSuffix === secondFileNameWithoutSuffix
|
||||
) {
|
||||
// checks size of live Photo assets are less than allowed limit
|
||||
// I did that based on the assumption that live photo assets ideally would not be larger than LIVE_PHOTO_ASSET_SIZE_LIMIT
|
||||
// also zipping library doesn't support stream as a input
|
||||
if (
|
||||
firstFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT &&
|
||||
secondFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT
|
||||
) {
|
||||
return true;
|
||||
} else {
|
||||
log.error(
|
||||
`${CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS} - ${JSON.stringify({
|
||||
fileSizes: [
|
||||
firstFileIdentifier.size,
|
||||
secondFileIdentifier.size,
|
||||
],
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function removePotentialLivePhotoSuffix(
|
||||
filenameWithoutExtension: string,
|
||||
suffix?: string,
|
||||
) {
|
||||
let presentSuffix: string;
|
||||
if (filenameWithoutExtension.endsWith(UNDERSCORE_THREE)) {
|
||||
presentSuffix = UNDERSCORE_THREE;
|
||||
} else if (filenameWithoutExtension.endsWith(UNDERSCORE_HEVC)) {
|
||||
presentSuffix = UNDERSCORE_HEVC;
|
||||
} else if (
|
||||
filenameWithoutExtension.endsWith(UNDERSCORE_HEVC.toLowerCase())
|
||||
) {
|
||||
presentSuffix = UNDERSCORE_HEVC.toLowerCase();
|
||||
} else if (suffix) {
|
||||
if (filenameWithoutExtension.endsWith(suffix)) {
|
||||
presentSuffix = suffix;
|
||||
} else if (filenameWithoutExtension.endsWith(suffix.toLowerCase())) {
|
||||
presentSuffix = suffix.toLowerCase();
|
||||
}
|
||||
}
|
||||
if (presentSuffix) {
|
||||
return filenameWithoutExtension.slice(0, presentSuffix.length * -1);
|
||||
} else {
|
||||
return filenameWithoutExtension;
|
||||
}
|
||||
}
|
||||
|
||||
function getFileNameWithoutExtension(filename: string) {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return filename;
|
||||
else return filename.slice(0, lastDotPosition);
|
||||
}
|
||||
|
||||
function getFileExtensionWithDot(filename: string) {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return "";
|
||||
else return filename.slice(lastDotPosition);
|
||||
}
|
||||
|
||||
function splitFilenameAndExtension(filename: string): [string, string] {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return [filename, null];
|
||||
else
|
||||
return [
|
||||
filename.slice(0, lastDotPosition),
|
||||
filename.slice(lastDotPosition + 1),
|
||||
];
|
||||
}
|
||||
|
||||
const isImageOrVideo = (fileType: FILE_TYPE) =>
|
||||
[FILE_TYPE.IMAGE, FILE_TYPE.VIDEO].includes(fileType);
|
||||
|
||||
async function getFileHash(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
file: File | ElectronFile,
|
||||
) {
|
||||
try {
|
||||
log.info(`getFileHash called for ${getFileNameSize(file)}`);
|
||||
let filedata: DataStream;
|
||||
if (file instanceof File) {
|
||||
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
|
||||
} else {
|
||||
filedata = await getElectronFileStream(
|
||||
file,
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
);
|
||||
}
|
||||
const hashState = await worker.initChunkHashing();
|
||||
|
||||
const streamReader = filedata.stream.getReader();
|
||||
for (let i = 0; i < filedata.chunkCount; i++) {
|
||||
const { done, value: chunk } = await streamReader.read();
|
||||
if (done) {
|
||||
throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED);
|
||||
}
|
||||
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
|
||||
}
|
||||
const { done } = await streamReader.read();
|
||||
if (!done) {
|
||||
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
|
||||
}
|
||||
const hash = await worker.completeChunkHashing(hashState);
|
||||
log.info(
|
||||
`file hashing completed successfully ${getFileNameSize(file)}`,
|
||||
);
|
||||
return hash;
|
||||
} catch (e) {
|
||||
log.error("getFileHash failed", e);
|
||||
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,8 +40,7 @@ import {
|
|||
} from "./metadataService";
|
||||
import { default as UIService, default as uiService } from "./uiService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import UploadService from "./uploadService";
|
||||
import uploader from "./uploader";
|
||||
import UploadService, { uploader } from "./uploadService";
|
||||
|
||||
const MAX_CONCURRENT_UPLOADS = 4;
|
||||
|
||||
|
|
|
@ -1,14 +1,34 @@
|
|||
import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { B64EncryptionResult } from "@ente/shared/crypto/types";
|
||||
import {
|
||||
B64EncryptionResult,
|
||||
EncryptionResult,
|
||||
} from "@ente/shared/crypto/types";
|
||||
import { CustomError, handleUploadError } from "@ente/shared/error";
|
||||
import { sleep } from "@ente/shared/utils";
|
||||
import { Remote } from "comlink";
|
||||
import {
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
MAX_FILE_SIZE_SUPPORTED,
|
||||
MULTIPART_PART_SIZE,
|
||||
UPLOAD_RESULT,
|
||||
} from "constants/upload";
|
||||
import { addToCollection } from "services/collectionService";
|
||||
import { Collection } from "types/collection";
|
||||
import { FilePublicMagicMetadataProps } from "types/file";
|
||||
import {
|
||||
EnteFile,
|
||||
FilePublicMagicMetadata,
|
||||
FilePublicMagicMetadataProps,
|
||||
} from "types/file";
|
||||
import { EncryptedMagicMetadata } from "types/magicMetadata";
|
||||
import {
|
||||
BackupedFile,
|
||||
DataStream,
|
||||
ElectronFile,
|
||||
EncryptedFile,
|
||||
ExtractMetadataResult,
|
||||
FileInMemory,
|
||||
FileTypeInfo,
|
||||
FileWithCollection,
|
||||
FileWithMetadata,
|
||||
|
@ -22,28 +42,37 @@ import {
|
|||
UploadURL,
|
||||
isDataStream,
|
||||
} from "types/upload";
|
||||
import {
|
||||
getNonEmptyMagicMetadataProps,
|
||||
updateMagicMetadata,
|
||||
} from "utils/magicMetadata";
|
||||
import { findMatchingExistingFiles } from "utils/upload";
|
||||
import {
|
||||
getElectronFileStream,
|
||||
getFileStream,
|
||||
getUint8ArrayView,
|
||||
} from "../readerService";
|
||||
import { getFileType } from "../typeDetectionService";
|
||||
import {
|
||||
encryptFile,
|
||||
extractFileMetadata,
|
||||
getFileSize,
|
||||
getFilename,
|
||||
readFile,
|
||||
} from "./fileService";
|
||||
import {
|
||||
MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
|
||||
clusterLivePhotoFiles,
|
||||
extractLivePhotoMetadata,
|
||||
extractMetadata,
|
||||
getClippedMetadataJSONMapKeyForFile,
|
||||
getLivePhotoFileType,
|
||||
getLivePhotoName,
|
||||
getLivePhotoSize,
|
||||
getMetadataJSONMapKeyForFile,
|
||||
readLivePhoto,
|
||||
} from "./livePhotoService";
|
||||
import { constructPublicMagicMetadata } from "./magicMetadataService";
|
||||
} from "./metadataService";
|
||||
import { uploadStreamUsingMultipart } from "./multiPartUploadService";
|
||||
import publicUploadHttpClient from "./publicUploadHttpClient";
|
||||
import { generateThumbnail } from "./thumbnailService";
|
||||
import UIService from "./uiService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import UploadHttpClient from "./uploadHttpClient";
|
||||
|
||||
/** Upload files to cloud storage */
|
||||
class UploadService {
|
||||
private uploadURLs: UploadURL[] = [];
|
||||
private parsedMetadataJSONMap: ParsedMetadataJSONMap = new Map<
|
||||
|
@ -310,4 +339,368 @@ class UploadService {
|
|||
}
|
||||
}
|
||||
|
||||
export default new UploadService();
|
||||
/** The singleton instance of {@link UploadService}. */
|
||||
const uploadService = new UploadService();
|
||||
|
||||
export default uploadService;
|
||||
|
||||
export async function constructPublicMagicMetadata(
|
||||
publicMagicMetadataProps: FilePublicMagicMetadataProps,
|
||||
): Promise<FilePublicMagicMetadata> {
|
||||
const nonEmptyPublicMagicMetadataProps = getNonEmptyMagicMetadataProps(
|
||||
publicMagicMetadataProps,
|
||||
);
|
||||
|
||||
if (Object.values(nonEmptyPublicMagicMetadataProps)?.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return await updateMagicMetadata(publicMagicMetadataProps);
|
||||
}
|
||||
|
||||
function getFileSize(file: File | ElectronFile) {
|
||||
return file.size;
|
||||
}
|
||||
|
||||
function getFilename(file: File | ElectronFile) {
|
||||
return file.name;
|
||||
}
|
||||
|
||||
async function readFile(
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
rawFile: File | ElectronFile,
|
||||
): Promise<FileInMemory> {
|
||||
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
|
||||
rawFile,
|
||||
fileTypeInfo,
|
||||
);
|
||||
log.info(`reading file data ${getFileNameSize(rawFile)} `);
|
||||
let filedata: Uint8Array | DataStream;
|
||||
if (!(rawFile instanceof File)) {
|
||||
if (rawFile.size > MULTIPART_PART_SIZE) {
|
||||
filedata = await getElectronFileStream(
|
||||
rawFile,
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
);
|
||||
} else {
|
||||
filedata = await getUint8ArrayView(rawFile);
|
||||
}
|
||||
} else if (rawFile.size > MULTIPART_PART_SIZE) {
|
||||
filedata = getFileStream(rawFile, FILE_READER_CHUNK_SIZE);
|
||||
} else {
|
||||
filedata = await getUint8ArrayView(rawFile);
|
||||
}
|
||||
|
||||
log.info(`read file data successfully ${getFileNameSize(rawFile)} `);
|
||||
|
||||
return {
|
||||
filedata,
|
||||
thumbnail,
|
||||
hasStaticThumbnail,
|
||||
};
|
||||
}
|
||||
|
||||
export async function extractFileMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: ParsedMetadataJSONMap,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
rawFile: File | ElectronFile,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
let key = getMetadataJSONMapKeyForFile(collectionID, rawFile.name);
|
||||
let googleMetadata: ParsedMetadataJSON = parsedMetadataJSONMap.get(key);
|
||||
|
||||
if (!googleMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) {
|
||||
key = getClippedMetadataJSONMapKeyForFile(collectionID, rawFile.name);
|
||||
googleMetadata = parsedMetadataJSONMap.get(key);
|
||||
}
|
||||
|
||||
const { metadata, publicMagicMetadata } = await extractMetadata(
|
||||
worker,
|
||||
rawFile,
|
||||
fileTypeInfo,
|
||||
);
|
||||
|
||||
for (const [key, value] of Object.entries(googleMetadata ?? {})) {
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
metadata[key] = value;
|
||||
}
|
||||
return { metadata, publicMagicMetadata };
|
||||
}
|
||||
|
||||
async function encryptFile(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
file: FileWithMetadata,
|
||||
encryptionKey: string,
|
||||
): Promise<EncryptedFile> {
|
||||
try {
|
||||
const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
|
||||
worker,
|
||||
file.filedata,
|
||||
);
|
||||
|
||||
const { file: encryptedThumbnail } = await worker.encryptThumbnail(
|
||||
file.thumbnail,
|
||||
fileKey,
|
||||
);
|
||||
const { file: encryptedMetadata } = await worker.encryptMetadata(
|
||||
file.metadata,
|
||||
fileKey,
|
||||
);
|
||||
|
||||
let encryptedPubMagicMetadata: EncryptedMagicMetadata;
|
||||
if (file.pubMagicMetadata) {
|
||||
const { file: encryptedPubMagicMetadataData } =
|
||||
await worker.encryptMetadata(
|
||||
file.pubMagicMetadata.data,
|
||||
fileKey,
|
||||
);
|
||||
encryptedPubMagicMetadata = {
|
||||
version: file.pubMagicMetadata.version,
|
||||
count: file.pubMagicMetadata.count,
|
||||
data: encryptedPubMagicMetadataData.encryptedData,
|
||||
header: encryptedPubMagicMetadataData.decryptionHeader,
|
||||
};
|
||||
}
|
||||
|
||||
const encryptedKey = await worker.encryptToB64(fileKey, encryptionKey);
|
||||
|
||||
const result: EncryptedFile = {
|
||||
file: {
|
||||
file: encryptedFiledata,
|
||||
thumbnail: encryptedThumbnail,
|
||||
metadata: encryptedMetadata,
|
||||
pubMagicMetadata: encryptedPubMagicMetadata,
|
||||
localID: file.localID,
|
||||
},
|
||||
fileKey: encryptedKey,
|
||||
};
|
||||
return result;
|
||||
} catch (e) {
|
||||
log.error("Error encrypting files", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async function encryptFiledata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
filedata: Uint8Array | DataStream,
|
||||
): Promise<EncryptionResult<Uint8Array | DataStream>> {
|
||||
return isDataStream(filedata)
|
||||
? await encryptFileStream(worker, filedata)
|
||||
: await worker.encryptFile(filedata);
|
||||
}
|
||||
|
||||
async function encryptFileStream(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
fileData: DataStream,
|
||||
) {
|
||||
const { stream, chunkCount } = fileData;
|
||||
const fileStreamReader = stream.getReader();
|
||||
const { key, decryptionHeader, pushState } =
|
||||
await worker.initChunkEncryption();
|
||||
const ref = { pullCount: 1 };
|
||||
const encryptedFileStream = new ReadableStream({
|
||||
async pull(controller) {
|
||||
const { value } = await fileStreamReader.read();
|
||||
const encryptedFileChunk = await worker.encryptFileChunk(
|
||||
value,
|
||||
pushState,
|
||||
ref.pullCount === chunkCount,
|
||||
);
|
||||
controller.enqueue(encryptedFileChunk);
|
||||
if (ref.pullCount === chunkCount) {
|
||||
controller.close();
|
||||
}
|
||||
ref.pullCount++;
|
||||
},
|
||||
});
|
||||
return {
|
||||
key,
|
||||
file: {
|
||||
decryptionHeader,
|
||||
encryptedData: { stream: encryptedFileStream, chunkCount },
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
interface UploadResponse {
|
||||
fileUploadResult: UPLOAD_RESULT;
|
||||
uploadedFile?: EnteFile;
|
||||
}
|
||||
|
||||
export async function uploader(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
existingFiles: EnteFile[],
|
||||
fileWithCollection: FileWithCollection,
|
||||
uploaderName: string,
|
||||
): Promise<UploadResponse> {
|
||||
const { collection, localID, ...uploadAsset } = fileWithCollection;
|
||||
const fileNameSize = `${uploadService.getAssetName(
|
||||
fileWithCollection,
|
||||
)}_${convertBytesToHumanReadable(uploadService.getAssetSize(uploadAsset))}`;
|
||||
|
||||
log.info(`uploader called for ${fileNameSize}`);
|
||||
UIService.setFileProgress(localID, 0);
|
||||
await sleep(0);
|
||||
let fileTypeInfo: FileTypeInfo;
|
||||
let fileSize: number;
|
||||
try {
|
||||
fileSize = uploadService.getAssetSize(uploadAsset);
|
||||
if (fileSize >= MAX_FILE_SIZE_SUPPORTED) {
|
||||
return { fileUploadResult: UPLOAD_RESULT.TOO_LARGE };
|
||||
}
|
||||
log.info(`getting filetype for ${fileNameSize}`);
|
||||
fileTypeInfo = await uploadService.getAssetFileType(uploadAsset);
|
||||
log.info(
|
||||
`got filetype for ${fileNameSize} - ${JSON.stringify(fileTypeInfo)}`,
|
||||
);
|
||||
|
||||
log.info(`extracting metadata ${fileNameSize}`);
|
||||
const { metadata, publicMagicMetadata } =
|
||||
await uploadService.extractAssetMetadata(
|
||||
worker,
|
||||
uploadAsset,
|
||||
collection.id,
|
||||
fileTypeInfo,
|
||||
);
|
||||
|
||||
const matchingExistingFiles = findMatchingExistingFiles(
|
||||
existingFiles,
|
||||
metadata,
|
||||
);
|
||||
log.debug(
|
||||
() =>
|
||||
`matchedFileList: ${matchingExistingFiles
|
||||
.map((f) => `${f.id}-${f.metadata.title}`)
|
||||
.join(",")}`,
|
||||
);
|
||||
if (matchingExistingFiles?.length) {
|
||||
const matchingExistingFilesCollectionIDs =
|
||||
matchingExistingFiles.map((e) => e.collectionID);
|
||||
log.debug(
|
||||
() =>
|
||||
`matched file collectionIDs:${matchingExistingFilesCollectionIDs}
|
||||
and collectionID:${collection.id}`,
|
||||
);
|
||||
if (matchingExistingFilesCollectionIDs.includes(collection.id)) {
|
||||
log.info(
|
||||
`file already present in the collection , skipped upload for ${fileNameSize}`,
|
||||
);
|
||||
const sameCollectionMatchingExistingFile =
|
||||
matchingExistingFiles.find(
|
||||
(f) => f.collectionID === collection.id,
|
||||
);
|
||||
return {
|
||||
fileUploadResult: UPLOAD_RESULT.ALREADY_UPLOADED,
|
||||
uploadedFile: sameCollectionMatchingExistingFile,
|
||||
};
|
||||
} else {
|
||||
log.info(
|
||||
`same file in ${matchingExistingFilesCollectionIDs.length} collection found for ${fileNameSize} ,adding symlink`,
|
||||
);
|
||||
// any of the matching file can used to add a symlink
|
||||
const resultFile = Object.assign({}, matchingExistingFiles[0]);
|
||||
resultFile.collectionID = collection.id;
|
||||
await addToCollection(collection, [resultFile]);
|
||||
return {
|
||||
fileUploadResult: UPLOAD_RESULT.ADDED_SYMLINK,
|
||||
uploadedFile: resultFile,
|
||||
};
|
||||
}
|
||||
}
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
log.info(`reading asset ${fileNameSize}`);
|
||||
|
||||
const file = await uploadService.readAsset(fileTypeInfo, uploadAsset);
|
||||
|
||||
if (file.hasStaticThumbnail) {
|
||||
metadata.hasStaticThumbnail = true;
|
||||
}
|
||||
|
||||
const pubMagicMetadata =
|
||||
await uploadService.constructPublicMagicMetadata({
|
||||
...publicMagicMetadata,
|
||||
uploaderName,
|
||||
});
|
||||
|
||||
const fileWithMetadata: FileWithMetadata = {
|
||||
localID,
|
||||
filedata: file.filedata,
|
||||
thumbnail: file.thumbnail,
|
||||
metadata,
|
||||
pubMagicMetadata,
|
||||
};
|
||||
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
log.info(`encryptAsset ${fileNameSize}`);
|
||||
const encryptedFile = await uploadService.encryptAsset(
|
||||
worker,
|
||||
fileWithMetadata,
|
||||
collection.key,
|
||||
);
|
||||
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
log.info(`uploadToBucket ${fileNameSize}`);
|
||||
const logger: Logger = (message: string) => {
|
||||
log.info(message, `fileNameSize: ${fileNameSize}`);
|
||||
};
|
||||
const backupedFile: BackupedFile = await uploadService.uploadToBucket(
|
||||
logger,
|
||||
encryptedFile.file,
|
||||
);
|
||||
|
||||
const uploadFile: UploadFile = uploadService.getUploadFile(
|
||||
collection,
|
||||
backupedFile,
|
||||
encryptedFile.fileKey,
|
||||
);
|
||||
log.info(`uploading file to server ${fileNameSize}`);
|
||||
|
||||
const uploadedFile = await uploadService.uploadFile(uploadFile);
|
||||
|
||||
log.info(`${fileNameSize} successfully uploaded`);
|
||||
|
||||
return {
|
||||
fileUploadResult: metadata.hasStaticThumbnail
|
||||
? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL
|
||||
: UPLOAD_RESULT.UPLOADED,
|
||||
uploadedFile: uploadedFile,
|
||||
};
|
||||
} catch (e) {
|
||||
log.info(`upload failed for ${fileNameSize} ,error: ${e.message}`);
|
||||
if (
|
||||
e.message !== CustomError.UPLOAD_CANCELLED &&
|
||||
e.message !== CustomError.UNSUPPORTED_FILE_FORMAT
|
||||
) {
|
||||
log.error(
|
||||
`file upload failed - ${JSON.stringify({
|
||||
fileFormat: fileTypeInfo?.exactType,
|
||||
fileSize: convertBytesToHumanReadable(fileSize),
|
||||
})}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
const error = handleUploadError(e);
|
||||
switch (error.message) {
|
||||
case CustomError.ETAG_MISSING:
|
||||
return { fileUploadResult: UPLOAD_RESULT.BLOCKED };
|
||||
case CustomError.UNSUPPORTED_FILE_FORMAT:
|
||||
return { fileUploadResult: UPLOAD_RESULT.UNSUPPORTED };
|
||||
case CustomError.FILE_TOO_LARGE:
|
||||
return {
|
||||
fileUploadResult:
|
||||
UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE,
|
||||
};
|
||||
default:
|
||||
return { fileUploadResult: UPLOAD_RESULT.FAILED };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,204 +0,0 @@
|
|||
import { convertBytesToHumanReadable } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError, handleUploadError } from "@ente/shared/error";
|
||||
import { sleep } from "@ente/shared/utils";
|
||||
import { Remote } from "comlink";
|
||||
import { MAX_FILE_SIZE_SUPPORTED, UPLOAD_RESULT } from "constants/upload";
|
||||
import { addToCollection } from "services/collectionService";
|
||||
import { EnteFile } from "types/file";
|
||||
import {
|
||||
BackupedFile,
|
||||
FileTypeInfo,
|
||||
FileWithCollection,
|
||||
FileWithMetadata,
|
||||
Logger,
|
||||
UploadFile,
|
||||
} from "types/upload";
|
||||
import { findMatchingExistingFiles } from "utils/upload";
|
||||
import UIService from "./uiService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import {
|
||||
default as UploadService,
|
||||
default as uploadService,
|
||||
} from "./uploadService";
|
||||
|
||||
interface UploadResponse {
|
||||
fileUploadResult: UPLOAD_RESULT;
|
||||
uploadedFile?: EnteFile;
|
||||
}
|
||||
|
||||
export default async function uploader(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
existingFiles: EnteFile[],
|
||||
fileWithCollection: FileWithCollection,
|
||||
uploaderName: string,
|
||||
): Promise<UploadResponse> {
|
||||
const { collection, localID, ...uploadAsset } = fileWithCollection;
|
||||
const fileNameSize = `${UploadService.getAssetName(
|
||||
fileWithCollection,
|
||||
)}_${convertBytesToHumanReadable(UploadService.getAssetSize(uploadAsset))}`;
|
||||
|
||||
log.info(`uploader called for ${fileNameSize}`);
|
||||
UIService.setFileProgress(localID, 0);
|
||||
await sleep(0);
|
||||
let fileTypeInfo: FileTypeInfo;
|
||||
let fileSize: number;
|
||||
try {
|
||||
fileSize = UploadService.getAssetSize(uploadAsset);
|
||||
if (fileSize >= MAX_FILE_SIZE_SUPPORTED) {
|
||||
return { fileUploadResult: UPLOAD_RESULT.TOO_LARGE };
|
||||
}
|
||||
log.info(`getting filetype for ${fileNameSize}`);
|
||||
fileTypeInfo = await UploadService.getAssetFileType(uploadAsset);
|
||||
log.info(
|
||||
`got filetype for ${fileNameSize} - ${JSON.stringify(fileTypeInfo)}`,
|
||||
);
|
||||
|
||||
log.info(`extracting metadata ${fileNameSize}`);
|
||||
const { metadata, publicMagicMetadata } =
|
||||
await UploadService.extractAssetMetadata(
|
||||
worker,
|
||||
uploadAsset,
|
||||
collection.id,
|
||||
fileTypeInfo,
|
||||
);
|
||||
|
||||
const matchingExistingFiles = findMatchingExistingFiles(
|
||||
existingFiles,
|
||||
metadata,
|
||||
);
|
||||
log.debug(
|
||||
() =>
|
||||
`matchedFileList: ${matchingExistingFiles
|
||||
.map((f) => `${f.id}-${f.metadata.title}`)
|
||||
.join(",")}`,
|
||||
);
|
||||
if (matchingExistingFiles?.length) {
|
||||
const matchingExistingFilesCollectionIDs =
|
||||
matchingExistingFiles.map((e) => e.collectionID);
|
||||
log.debug(
|
||||
() =>
|
||||
`matched file collectionIDs:${matchingExistingFilesCollectionIDs}
|
||||
and collectionID:${collection.id}`,
|
||||
);
|
||||
if (matchingExistingFilesCollectionIDs.includes(collection.id)) {
|
||||
log.info(
|
||||
`file already present in the collection , skipped upload for ${fileNameSize}`,
|
||||
);
|
||||
const sameCollectionMatchingExistingFile =
|
||||
matchingExistingFiles.find(
|
||||
(f) => f.collectionID === collection.id,
|
||||
);
|
||||
return {
|
||||
fileUploadResult: UPLOAD_RESULT.ALREADY_UPLOADED,
|
||||
uploadedFile: sameCollectionMatchingExistingFile,
|
||||
};
|
||||
} else {
|
||||
log.info(
|
||||
`same file in ${matchingExistingFilesCollectionIDs.length} collection found for ${fileNameSize} ,adding symlink`,
|
||||
);
|
||||
// any of the matching file can used to add a symlink
|
||||
const resultFile = Object.assign({}, matchingExistingFiles[0]);
|
||||
resultFile.collectionID = collection.id;
|
||||
await addToCollection(collection, [resultFile]);
|
||||
return {
|
||||
fileUploadResult: UPLOAD_RESULT.ADDED_SYMLINK,
|
||||
uploadedFile: resultFile,
|
||||
};
|
||||
}
|
||||
}
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
log.info(`reading asset ${fileNameSize}`);
|
||||
|
||||
const file = await UploadService.readAsset(fileTypeInfo, uploadAsset);
|
||||
|
||||
if (file.hasStaticThumbnail) {
|
||||
metadata.hasStaticThumbnail = true;
|
||||
}
|
||||
|
||||
const pubMagicMetadata =
|
||||
await uploadService.constructPublicMagicMetadata({
|
||||
...publicMagicMetadata,
|
||||
uploaderName,
|
||||
});
|
||||
|
||||
const fileWithMetadata: FileWithMetadata = {
|
||||
localID,
|
||||
filedata: file.filedata,
|
||||
thumbnail: file.thumbnail,
|
||||
metadata,
|
||||
pubMagicMetadata,
|
||||
};
|
||||
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
log.info(`encryptAsset ${fileNameSize}`);
|
||||
const encryptedFile = await UploadService.encryptAsset(
|
||||
worker,
|
||||
fileWithMetadata,
|
||||
collection.key,
|
||||
);
|
||||
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
log.info(`uploadToBucket ${fileNameSize}`);
|
||||
const logger: Logger = (message: string) => {
|
||||
log.info(message, `fileNameSize: ${fileNameSize}`);
|
||||
};
|
||||
const backupedFile: BackupedFile = await UploadService.uploadToBucket(
|
||||
logger,
|
||||
encryptedFile.file,
|
||||
);
|
||||
|
||||
const uploadFile: UploadFile = UploadService.getUploadFile(
|
||||
collection,
|
||||
backupedFile,
|
||||
encryptedFile.fileKey,
|
||||
);
|
||||
log.info(`uploading file to server ${fileNameSize}`);
|
||||
|
||||
const uploadedFile = await UploadService.uploadFile(uploadFile);
|
||||
|
||||
log.info(`${fileNameSize} successfully uploaded`);
|
||||
|
||||
return {
|
||||
fileUploadResult: metadata.hasStaticThumbnail
|
||||
? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL
|
||||
: UPLOAD_RESULT.UPLOADED,
|
||||
uploadedFile: uploadedFile,
|
||||
};
|
||||
} catch (e) {
|
||||
log.info(`upload failed for ${fileNameSize} ,error: ${e.message}`);
|
||||
if (
|
||||
e.message !== CustomError.UPLOAD_CANCELLED &&
|
||||
e.message !== CustomError.UNSUPPORTED_FILE_FORMAT
|
||||
) {
|
||||
log.error(
|
||||
`file upload failed - ${JSON.stringify({
|
||||
fileFormat: fileTypeInfo?.exactType,
|
||||
fileSize: convertBytesToHumanReadable(fileSize),
|
||||
})}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
const error = handleUploadError(e);
|
||||
switch (error.message) {
|
||||
case CustomError.ETAG_MISSING:
|
||||
return { fileUploadResult: UPLOAD_RESULT.BLOCKED };
|
||||
case CustomError.UNSUPPORTED_FILE_FORMAT:
|
||||
return { fileUploadResult: UPLOAD_RESULT.UNSUPPORTED };
|
||||
case CustomError.FILE_TOO_LARGE:
|
||||
return {
|
||||
fileUploadResult:
|
||||
UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE,
|
||||
};
|
||||
default:
|
||||
return { fileUploadResult: UPLOAD_RESULT.FAILED };
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
import { getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { NULL_EXTRACTED_METADATA } from "constants/upload";
|
||||
import * as ffmpegService from "services/ffmpeg/ffmpegService";
|
||||
import { ElectronFile } from "types/upload";
|
||||
|
||||
export async function getVideoMetadata(file: File | ElectronFile) {
|
||||
let videoMetadata = NULL_EXTRACTED_METADATA;
|
||||
try {
|
||||
log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
|
||||
videoMetadata = await ffmpegService.extractVideoMetadata(file);
|
||||
log.info(
|
||||
`videoMetadata successfully extracted ${getFileNameSize(file)}`,
|
||||
);
|
||||
} catch (e) {
|
||||
log.error("failed to get video metadata", e);
|
||||
log.info(
|
||||
`videoMetadata extracted failed ${getFileNameSize(file)} ,${
|
||||
e.message
|
||||
} `,
|
||||
);
|
||||
}
|
||||
|
||||
return videoMetadata;
|
||||
}
|
|
@ -38,8 +38,6 @@ class FolderWatcher {
|
|||
* If the file system directory corresponding to the (root) folder path of a
|
||||
* folder watch is deleted on disk, we note down that in this queue so that
|
||||
* we can ignore any file system events that come for it next.
|
||||
*
|
||||
* TODO: is this really needed? the mappings are pre-checked first.
|
||||
*/
|
||||
private deletedFolderPaths: string[] = [];
|
||||
/** `true` if we are using the uploader. */
|
||||
|
@ -57,11 +55,12 @@ class FolderWatcher {
|
|||
*/
|
||||
private upload: (collectionName: string, filePaths: string[]) => void;
|
||||
/**
|
||||
* A function to call when we want to sync with the backend.
|
||||
* A function to call when we want to sync with the backend. It will
|
||||
* initiate the sync but will not await its completion.
|
||||
*
|
||||
* This is passed as a param to {@link init}.
|
||||
*/
|
||||
private syncWithRemote: () => void;
|
||||
private requestSyncWithRemote: () => void;
|
||||
|
||||
/** A helper function that debounces invocations of {@link runNextEvent}. */
|
||||
private debouncedRunNextEvent: () => void;
|
||||
|
@ -80,20 +79,20 @@ class FolderWatcher {
|
|||
*/
|
||||
init(
|
||||
upload: (collectionName: string, filePaths: string[]) => void,
|
||||
syncWithRemote: () => void,
|
||||
requestSyncWithRemote: () => void,
|
||||
) {
|
||||
this.upload = upload;
|
||||
this.syncWithRemote = syncWithRemote;
|
||||
this.requestSyncWithRemote = requestSyncWithRemote;
|
||||
this.registerListeners();
|
||||
this.syncWithDisk();
|
||||
}
|
||||
|
||||
/** `true` if we are currently using the uploader */
|
||||
/** Return `true` if we are currently using the uploader. */
|
||||
isUploadRunning() {
|
||||
return this.uploadRunning;
|
||||
}
|
||||
|
||||
/** `true` if syncing has been temporarily paused */
|
||||
/** Return `true` if syncing has been temporarily paused. */
|
||||
isSyncPaused() {
|
||||
return this.isPaused;
|
||||
}
|
||||
|
@ -500,41 +499,37 @@ class FolderWatcher {
|
|||
this.eventQueue = this.eventQueue.filter(
|
||||
(event) => !event.filePath.startsWith(deletedFolderPath),
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private async moveToTrash(syncedFiles: FolderWatch["syncedFiles"]) {
|
||||
try {
|
||||
const files = await getLocalFiles();
|
||||
const toTrashFilesMap = new Map<number, FolderWatchSyncedFile>();
|
||||
for (const file of syncedFiles) {
|
||||
toTrashFilesMap.set(file.uploadedFileID, file);
|
||||
}
|
||||
const filesToTrash = files.filter((file) => {
|
||||
if (toTrashFilesMap.has(file.id)) {
|
||||
const fileToTrash = toTrashFilesMap.get(file.id);
|
||||
if (fileToTrash.collectionID === file.collectionID) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
});
|
||||
const groupFilesByCollectionId =
|
||||
groupFilesBasedOnCollectionID(filesToTrash);
|
||||
const syncedFileForID = new Map<number, FolderWatchSyncedFile>();
|
||||
for (const file of syncedFiles)
|
||||
syncedFileForID.set(file.uploadedFileID, file);
|
||||
|
||||
for (const [
|
||||
collectionID,
|
||||
filesToTrash,
|
||||
] of groupFilesByCollectionId.entries()) {
|
||||
await removeFromCollection(collectionID, filesToTrash);
|
||||
const files = await getLocalFiles();
|
||||
const filesToTrash = files.filter((file) => {
|
||||
const correspondingSyncedFile = syncedFileForID.get(file.id);
|
||||
if (
|
||||
correspondingSyncedFile &&
|
||||
correspondingSyncedFile.collectionID == file.collectionID
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
this.syncWithRemote();
|
||||
} catch (e) {
|
||||
log.error("error while trashing by IDs", e);
|
||||
return false;
|
||||
});
|
||||
|
||||
const filesByCollectionID = groupFilesBasedOnCollectionID(filesToTrash);
|
||||
for (const [id, files] of filesByCollectionID.entries()) {
|
||||
await removeFromCollection(id, files);
|
||||
}
|
||||
|
||||
this.requestSyncWithRemote();
|
||||
}
|
||||
}
|
||||
|
||||
/** The singleton instance of the {@link FolderWatcher}. */
|
||||
/** The singleton instance of {@link FolderWatcher}. */
|
||||
const watcher = new FolderWatcher();
|
||||
|
||||
export default watcher;
|
||||
|
|
|
@ -92,8 +92,8 @@ export interface UploadAsset {
|
|||
isLivePhoto?: boolean;
|
||||
file?: File | ElectronFile;
|
||||
livePhotoAssets?: LivePhotoAssets;
|
||||
isElectron?: boolean;
|
||||
}
|
||||
|
||||
export interface LivePhotoAssets {
|
||||
image: globalThis.File | ElectronFile;
|
||||
video: globalThis.File | ElectronFile;
|
||||
|
|
|
@ -132,16 +132,16 @@ export async function downloadFile(file: EnteFile) {
|
|||
}
|
||||
}
|
||||
|
||||
export function groupFilesBasedOnCollectionID(files: EnteFile[]) {
|
||||
const collectionWiseFiles = new Map<number, EnteFile[]>();
|
||||
/** Segment the given {@link files} into lists indexed by their collection ID */
|
||||
export const groupFilesBasedOnCollectionID = (files: EnteFile[]) => {
|
||||
const result = new Map<number, EnteFile[]>();
|
||||
for (const file of files) {
|
||||
if (!collectionWiseFiles.has(file.collectionID)) {
|
||||
collectionWiseFiles.set(file.collectionID, []);
|
||||
}
|
||||
collectionWiseFiles.get(file.collectionID).push(file);
|
||||
const id = file.collectionID;
|
||||
if (!result.has(id)) result.set(id, []);
|
||||
result.get(id).push(file);
|
||||
}
|
||||
return collectionWiseFiles;
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
function getSelectedFileIds(selectedFiles: SelectedState) {
|
||||
const filesIDs: number[] = [];
|
||||
|
|
Loading…
Add table
Reference in a new issue