This commit is contained in:
Manav Rathi 2024-04-23 18:36:42 +05:30
parent 2e222d9409
commit 00c9d78ec9
No known key found for this signature in database
3 changed files with 126 additions and 135 deletions

View file

@ -1,132 +0,0 @@
import { CustomError } from "@ente/shared/error";
import {
FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
} from "constants/upload";
import { DataStream, Logger, MultipartUploadURLs } from "types/upload";
import * as convert from "xml-js";
import UIService from "./uiService";
import uploadCancelService from "./uploadCancelService";
import UploadHttpClient from "./uploadHttpClient";
import uploadService from "./uploadService";
interface PartEtag {
PartNumber: number;
ETag: string;
}
function calculatePartCount(chunkCount: number) {
const partCount = Math.ceil(
chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
);
return partCount;
}
export async function uploadStreamUsingMultipart(
logger: Logger,
fileLocalID: number,
dataStream: DataStream,
) {
const uploadPartCount = calculatePartCount(dataStream.chunkCount);
logger(`fetching ${uploadPartCount} urls for multipart upload`);
const multipartUploadURLs =
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
logger(`fetched ${uploadPartCount} urls for multipart upload`);
const fileObjectKey = await uploadStreamInParts(
logger,
multipartUploadURLs,
dataStream.stream,
fileLocalID,
uploadPartCount,
);
return fileObjectKey;
}
export async function uploadStreamInParts(
logger: Logger,
multipartUploadURLs: MultipartUploadURLs,
dataStream: ReadableStream<Uint8Array>,
fileLocalID: number,
uploadPartCount: number,
) {
const streamReader = dataStream.getReader();
const percentPerPart = getRandomProgressPerPartUpload(uploadPartCount);
const partEtags: PartEtag[] = [];
logger(`uploading file in chunks`);
for (const [
index,
fileUploadURL,
] of multipartUploadURLs.partURLs.entries()) {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
const uploadChunk = await combineChunksToFormUploadPart(streamReader);
const progressTracker = UIService.trackUploadProgress(
fileLocalID,
percentPerPart,
index,
);
let eTag = null;
if (!uploadService.getIsCFUploadProxyDisabled()) {
eTag = await UploadHttpClient.putFilePartV2(
fileUploadURL,
uploadChunk,
progressTracker,
);
} else {
eTag = await UploadHttpClient.putFilePart(
fileUploadURL,
uploadChunk,
progressTracker,
);
}
partEtags.push({ PartNumber: index + 1, ETag: eTag });
}
const { done } = await streamReader.read();
if (!done) {
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
}
logger(`uploading file in chunks done`);
logger(`completing multipart upload`);
await completeMultipartUpload(partEtags, multipartUploadURLs.completeURL);
logger(`completing multipart upload done`);
return multipartUploadURLs.objectKey;
}
function getRandomProgressPerPartUpload(uploadPartCount: number) {
const percentPerPart =
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount;
return percentPerPart;
}
async function combineChunksToFormUploadPart(
streamReader: ReadableStreamDefaultReader<Uint8Array>,
) {
const combinedChunks = [];
for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
const { done, value: chunk } = await streamReader.read();
if (done) {
break;
}
for (let index = 0; index < chunk.length; index++) {
combinedChunks.push(chunk[index]);
}
}
return Uint8Array.from(combinedChunks);
}
async function completeMultipartUpload(
partEtags: PartEtag[],
completeURL: string,
) {
const options = { compact: true, ignoreComment: true, spaces: 4 };
const body = convert.js2xml(
{ CompleteMultipartUpload: { Part: partEtags } },
options,
);
if (!uploadService.getIsCFUploadProxyDisabled()) {
await UploadHttpClient.completeMultipartUploadV2(completeURL, body);
} else {
await UploadHttpClient.completeMultipartUpload(completeURL, body);
}
}

View file

@ -11,8 +11,10 @@ import { wait } from "@ente/shared/utils";
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import {
FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
FILE_READER_CHUNK_SIZE,
MULTIPART_PART_SIZE,
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
UPLOAD_RESULT,
} from "constants/upload";
import { addToCollection } from "services/collectionService";
@ -30,6 +32,7 @@ import {
FileTypeInfo,
FileWithMetadata,
Logger,
MultipartUploadURLs,
ParsedMetadataJSONMap,
ProcessedFile,
PublicUploadProps,
@ -48,6 +51,7 @@ import {
} from "utils/magicMetadata";
import { readStream } from "utils/native-stream";
import { hasFileHash } from "utils/upload";
import * as convert from "xml-js";
import { getFileStream } from "../readerService";
import { getFileType } from "../typeDetectionService";
import {
@ -55,7 +59,6 @@ import {
getLivePhotoFileType,
getLivePhotoSize,
} from "./metadataService";
import { uploadStreamUsingMultipart } from "./multiPartUploadService";
import publicUploadHttpClient from "./publicUploadHttpClient";
import {
fallbackThumbnail,
@ -909,3 +912,125 @@ function areFilesWithFileHashSame(
return existingFile.hash === newFile.hash;
}
}
interface PartEtag {
PartNumber: number;
ETag: string;
}
function calculatePartCount(chunkCount: number) {
const partCount = Math.ceil(
chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
);
return partCount;
}
export async function uploadStreamUsingMultipart(
logger: Logger,
fileLocalID: number,
dataStream: DataStream,
) {
const uploadPartCount = calculatePartCount(dataStream.chunkCount);
logger(`fetching ${uploadPartCount} urls for multipart upload`);
const multipartUploadURLs =
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
logger(`fetched ${uploadPartCount} urls for multipart upload`);
const fileObjectKey = await uploadStreamInParts(
logger,
multipartUploadURLs,
dataStream.stream,
fileLocalID,
uploadPartCount,
);
return fileObjectKey;
}
async function uploadStreamInParts(
logger: Logger,
multipartUploadURLs: MultipartUploadURLs,
dataStream: ReadableStream<Uint8Array>,
fileLocalID: number,
uploadPartCount: number,
) {
const streamReader = dataStream.getReader();
const percentPerPart = getRandomProgressPerPartUpload(uploadPartCount);
const partEtags: PartEtag[] = [];
logger(`uploading file in chunks`);
for (const [
index,
fileUploadURL,
] of multipartUploadURLs.partURLs.entries()) {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
const uploadChunk = await combineChunksToFormUploadPart(streamReader);
const progressTracker = UIService.trackUploadProgress(
fileLocalID,
percentPerPart,
index,
);
let eTag = null;
if (!uploadService.getIsCFUploadProxyDisabled()) {
eTag = await UploadHttpClient.putFilePartV2(
fileUploadURL,
uploadChunk,
progressTracker,
);
} else {
eTag = await UploadHttpClient.putFilePart(
fileUploadURL,
uploadChunk,
progressTracker,
);
}
partEtags.push({ PartNumber: index + 1, ETag: eTag });
}
const { done } = await streamReader.read();
if (!done) {
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
}
logger(`uploading file in chunks done`);
logger(`completing multipart upload`);
await completeMultipartUpload(partEtags, multipartUploadURLs.completeURL);
logger(`completing multipart upload done`);
return multipartUploadURLs.objectKey;
}
function getRandomProgressPerPartUpload(uploadPartCount: number) {
const percentPerPart =
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount;
return percentPerPart;
}
async function combineChunksToFormUploadPart(
streamReader: ReadableStreamDefaultReader<Uint8Array>,
) {
const combinedChunks = [];
for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
const { done, value: chunk } = await streamReader.read();
if (done) {
break;
}
for (let index = 0; index < chunk.length; index++) {
combinedChunks.push(chunk[index]);
}
}
return Uint8Array.from(combinedChunks);
}
async function completeMultipartUpload(
partEtags: PartEtag[],
completeURL: string,
) {
const options = { compact: true, ignoreComment: true, spaces: 4 };
const body = convert.js2xml(
{ CompleteMultipartUpload: { Part: partEtags } },
options,
);
if (!uploadService.getIsCFUploadProxyDisabled()) {
await UploadHttpClient.completeMultipartUploadV2(completeURL, body);
} else {
await UploadHttpClient.completeMultipartUpload(completeURL, body);
}
}

View file

@ -1,10 +1,8 @@
import { basename, dirname } from "@/next/file";
import { ElectronFile } from "@/next/types/file";
import { FILE_TYPE } from "constants/file";
import { PICKED_UPLOAD_TYPE } from "constants/upload";
import isElectron from "is-electron";
import { exportMetadataDirectoryName } from "services/export";
import { EnteFile } from "types/file";
import {
FileWithCollection,
Metadata,