|
@@ -41,7 +41,6 @@ import {
|
|
import { readStream } from "utils/native-stream";
|
|
import { readStream } from "utils/native-stream";
|
|
import * as convert from "xml-js";
|
|
import * as convert from "xml-js";
|
|
import { detectFileTypeInfoFromChunk } from "../detect-type";
|
|
import { detectFileTypeInfoFromChunk } from "../detect-type";
|
|
-import { getFileStream } from "../readerService";
|
|
|
|
import { tryParseEpochMicrosecondsFromFileName } from "./date";
|
|
import { tryParseEpochMicrosecondsFromFileName } from "./date";
|
|
import publicUploadHttpClient from "./publicUploadHttpClient";
|
|
import publicUploadHttpClient from "./publicUploadHttpClient";
|
|
import type { ParsedMetadataJSON } from "./takeout";
|
|
import type { ParsedMetadataJSON } from "./takeout";
|
|
@@ -54,36 +53,50 @@ import {
|
|
import UploadHttpClient from "./uploadHttpClient";
|
|
import UploadHttpClient from "./uploadHttpClient";
|
|
import type { UploadableFile } from "./uploadManager";
|
|
import type { UploadableFile } from "./uploadManager";
|
|
|
|
|
|
-/** Allow up to 5 ENCRYPTION_CHUNK_SIZE chunks in an upload part */
|
|
|
|
-const maximumChunksPerUploadPart = 5;
|
|
|
|
-
|
|
|
|
/**
|
|
/**
|
|
- * The chunk size of the un-encrypted file which is read and encrypted before
|
|
|
|
- * uploading it as a single part of a multipart upload.
|
|
|
|
|
|
+ * A readable stream for a file, and its associated size and last modified time.
|
|
*
|
|
*
|
|
- * ENCRYPTION_CHUNK_SIZE is 4 MB, and maximum number of chunks in a single
|
|
|
|
- * upload part is 5, so this is 20 MB.
|
|
|
|
- * */
|
|
|
|
-const multipartPartSize = ENCRYPTION_CHUNK_SIZE * maximumChunksPerUploadPart;
|
|
|
|
-
|
|
|
|
-interface DataStream {
|
|
|
|
|
|
+ * This is the in-memory representation of the `fileOrPath` type that we usually
|
|
|
|
+ * pass around. See: [Note: Reading a fileOrPath]
|
|
|
|
+ */
|
|
|
|
+interface FileStream {
|
|
|
|
+ /**
|
|
|
|
+ * A stream of the file's contents
|
|
|
|
+ *
|
|
|
|
+ * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
|
|
|
|
+ * (except the last chunk which can be smaller since a file would rarely
|
|
|
|
+ * align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
|
|
|
|
+ *
|
|
|
|
+ * Note: A stream can only be read once!
|
|
|
|
+ */
|
|
stream: ReadableStream<Uint8Array>;
|
|
stream: ReadableStream<Uint8Array>;
|
|
|
|
+ /**
|
|
|
|
+ * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
|
|
|
|
+ * sized (except the last one).
|
|
|
|
+ */
|
|
chunkCount: number;
|
|
chunkCount: number;
|
|
|
|
+ /**
|
|
|
|
+ * The size in bytes of the underlying file.
|
|
|
|
+ */
|
|
|
|
+ fileSize: number;
|
|
|
|
+ /**
|
|
|
|
+ * The modification time of the file, in epoch milliseconds.
|
|
|
|
+ */
|
|
|
|
+ lastModifiedMs: number;
|
|
|
|
+ /**
|
|
|
|
+ * Set to the underlying {@link File} when we also have access to it.
|
|
|
|
+ */
|
|
|
|
+ file?: File;
|
|
}
|
|
}
|
|
|
|
|
|
-function isDataStream(object: any): object is DataStream {
|
|
|
|
- return "stream" in object;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-interface LocalFileAttributes<T extends string | Uint8Array | DataStream> {
|
|
|
|
- encryptedData: T;
|
|
|
|
- decryptionHeader: string;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-interface EncryptionResult<T extends string | Uint8Array | DataStream> {
|
|
|
|
- file: LocalFileAttributes<T>;
|
|
|
|
- key: string;
|
|
|
|
-}
|
|
|
|
|
|
+/**
|
|
|
|
+ * If the stream we have is more than 5 ENCRYPTION_CHUNK_SIZE chunks, then use
|
|
|
|
+ * multipart uploads for it, with each multipart-part containing 5 chunks.
|
|
|
|
+ *
|
|
|
|
+ * ENCRYPTION_CHUNK_SIZE is 4 MB, and the number of chunks in a single upload
|
|
|
|
+ * part is 5, so each part is (up to) 20 MB.
|
|
|
|
+ */
|
|
|
|
+const multipartChunksPerPart = 5;
|
|
|
|
|
|
/** Upload files to cloud storage */
|
|
/** Upload files to cloud storage */
|
|
class UploadService {
|
|
class UploadService {
|
|
@@ -189,14 +202,14 @@ export const fopSize = async (fileOrPath: File | string): Promise<number> =>
|
|
|
|
|
|
/* -- Various intermediate type used during upload -- */
|
|
/* -- Various intermediate type used during upload -- */
|
|
|
|
|
|
-interface UploadAsset2 {
|
|
|
|
|
|
+interface UploadAsset {
|
|
isLivePhoto?: boolean;
|
|
isLivePhoto?: boolean;
|
|
fileOrPath?: File | string;
|
|
fileOrPath?: File | string;
|
|
livePhotoAssets?: LivePhotoAssets;
|
|
livePhotoAssets?: LivePhotoAssets;
|
|
}
|
|
}
|
|
|
|
|
|
-interface FileInMemory {
|
|
|
|
- filedata: Uint8Array | DataStream;
|
|
|
|
|
|
+interface ThumbnailedFile {
|
|
|
|
+ fileStreamOrData: FileStream | Uint8Array;
|
|
/** The JPEG data of the generated thumbnail */
|
|
/** The JPEG data of the generated thumbnail */
|
|
thumbnail: Uint8Array;
|
|
thumbnail: Uint8Array;
|
|
/**
|
|
/**
|
|
@@ -206,7 +219,7 @@ interface FileInMemory {
|
|
hasStaticThumbnail: boolean;
|
|
hasStaticThumbnail: boolean;
|
|
}
|
|
}
|
|
|
|
|
|
-interface FileWithMetadata extends Omit<FileInMemory, "hasStaticThumbnail"> {
|
|
|
|
|
|
+interface FileWithMetadata extends Omit<ThumbnailedFile, "hasStaticThumbnail"> {
|
|
metadata: Metadata;
|
|
metadata: Metadata;
|
|
localID: number;
|
|
localID: number;
|
|
pubMagicMetadata: FilePublicMagicMetadata;
|
|
pubMagicMetadata: FilePublicMagicMetadata;
|
|
@@ -217,8 +230,38 @@ interface EncryptedFile {
|
|
fileKey: B64EncryptionResult;
|
|
fileKey: B64EncryptionResult;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+interface EncryptedFileStream {
|
|
|
|
+ /**
|
|
|
|
+ * A stream of the file's encrypted contents
|
|
|
|
+ *
|
|
|
|
+ * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
|
|
|
|
+ * (except the last chunk which can be smaller since a file would rarely
|
|
|
|
+ * align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
|
|
|
|
+ */
|
|
|
|
+ stream: ReadableStream<Uint8Array>;
|
|
|
|
+ /**
|
|
|
|
+ * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
|
|
|
|
+ * sized (except the last one).
|
|
|
|
+ */
|
|
|
|
+ chunkCount: number;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+interface LocalFileAttributes<
|
|
|
|
+ T extends string | Uint8Array | EncryptedFileStream,
|
|
|
|
+> {
|
|
|
|
+ encryptedData: T;
|
|
|
|
+ decryptionHeader: string;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+interface EncryptionResult<
|
|
|
|
+ T extends string | Uint8Array | EncryptedFileStream,
|
|
|
|
+> {
|
|
|
|
+ file: LocalFileAttributes<T>;
|
|
|
|
+ key: string;
|
|
|
|
+}
|
|
|
|
+
|
|
interface ProcessedFile {
|
|
interface ProcessedFile {
|
|
- file: LocalFileAttributes<Uint8Array | DataStream>;
|
|
|
|
|
|
+ file: LocalFileAttributes<Uint8Array | EncryptedFileStream>;
|
|
thumbnail: LocalFileAttributes<Uint8Array>;
|
|
thumbnail: LocalFileAttributes<Uint8Array>;
|
|
metadata: LocalFileAttributes<string>;
|
|
metadata: LocalFileAttributes<string>;
|
|
pubMagicMetadata: EncryptedMagicMetadata;
|
|
pubMagicMetadata: EncryptedMagicMetadata;
|
|
@@ -349,10 +392,8 @@ export const uploader = async (
|
|
|
|
|
|
abortIfCancelled();
|
|
abortIfCancelled();
|
|
|
|
|
|
- const { filedata, thumbnail, hasStaticThumbnail } = await readAsset(
|
|
|
|
- fileTypeInfo,
|
|
|
|
- uploadAsset,
|
|
|
|
- );
|
|
|
|
|
|
+ const { fileStreamOrData, thumbnail, hasStaticThumbnail } =
|
|
|
|
+ await readAsset(fileTypeInfo, uploadAsset);
|
|
|
|
|
|
if (hasStaticThumbnail) metadata.hasStaticThumbnail = true;
|
|
if (hasStaticThumbnail) metadata.hasStaticThumbnail = true;
|
|
|
|
|
|
@@ -365,7 +406,7 @@ export const uploader = async (
|
|
|
|
|
|
const fileWithMetadata: FileWithMetadata = {
|
|
const fileWithMetadata: FileWithMetadata = {
|
|
localID,
|
|
localID,
|
|
- filedata,
|
|
|
|
|
|
+ fileStreamOrData,
|
|
thumbnail,
|
|
thumbnail,
|
|
metadata,
|
|
metadata,
|
|
pubMagicMetadata,
|
|
pubMagicMetadata,
|
|
@@ -427,7 +468,7 @@ export const uploader = async (
|
|
/**
|
|
/**
|
|
* Read the given file or path into an in-memory representation.
|
|
* Read the given file or path into an in-memory representation.
|
|
*
|
|
*
|
|
- * See: [Note: Reading a fileOrPath]
|
|
|
|
|
|
+ * [Note: Reading a fileOrPath]
|
|
*
|
|
*
|
|
* The file can be either a web
|
|
* The file can be either a web
|
|
* [File](https://developer.mozilla.org/en-US/docs/Web/API/File) or the absolute
|
|
* [File](https://developer.mozilla.org/en-US/docs/Web/API/File) or the absolute
|
|
@@ -473,10 +514,12 @@ export const uploader = async (
|
|
* on the Node.js layer, but it can't then be transferred over the IPC
|
|
* on the Node.js layer, but it can't then be transferred over the IPC
|
|
* boundary). So all our operations use the path itself.
|
|
* boundary). So all our operations use the path itself.
|
|
*
|
|
*
|
|
- * Case 3 involves a choice on a use-case basis, since (a) such File objects
|
|
|
|
- * also have the full path (unlike the bona-fide web File objects that only have
|
|
|
|
- * a relative path), and (b) neither File nor the path is a better choice that
|
|
|
|
- * would be for all scenarios.
|
|
|
|
|
|
+ * Case 3 involves a choice on a use-case basis, since
|
|
|
|
+ *
|
|
|
|
+ * (a) unlike in the web context, such File objects also have the full path.
|
|
|
|
+ * See: [Note: File paths when running under Electron].
|
|
|
|
+ *
|
|
|
|
+ * (b) neither File nor the path is a better choice for all use cases.
|
|
*
|
|
*
|
|
* The advantage of the File object is that the browser has already read it into
|
|
* The advantage of the File object is that the browser has already read it into
|
|
* memory for us. The disadvantage comes in the case where we need to
|
|
* memory for us. The disadvantage comes in the case where we need to
|
|
@@ -486,23 +529,17 @@ export const uploader = async (
|
|
*/
|
|
*/
|
|
const readFileOrPath = async (
|
|
const readFileOrPath = async (
|
|
fileOrPath: File | string,
|
|
fileOrPath: File | string,
|
|
-): Promise<{
|
|
|
|
- dataOrStream: Uint8Array | DataStream;
|
|
|
|
- fileSize: number;
|
|
|
|
- lastModifiedMs: number;
|
|
|
|
-}> => {
|
|
|
|
- let dataOrStream: Uint8Array | DataStream;
|
|
|
|
|
|
+): Promise<FileStream> => {
|
|
|
|
+ let underlyingStream: ReadableStream;
|
|
|
|
+ let file: File | undefined;
|
|
let fileSize: number;
|
|
let fileSize: number;
|
|
let lastModifiedMs: number;
|
|
let lastModifiedMs: number;
|
|
|
|
|
|
if (fileOrPath instanceof File) {
|
|
if (fileOrPath instanceof File) {
|
|
- const file = fileOrPath;
|
|
|
|
|
|
+ file = fileOrPath;
|
|
|
|
+ underlyingStream = file.stream();
|
|
fileSize = file.size;
|
|
fileSize = file.size;
|
|
lastModifiedMs = file.lastModified;
|
|
lastModifiedMs = file.lastModified;
|
|
- dataOrStream =
|
|
|
|
- fileSize > multipartPartSize
|
|
|
|
- ? getFileStream(file, ENCRYPTION_CHUNK_SIZE)
|
|
|
|
- : new Uint8Array(await file.arrayBuffer());
|
|
|
|
} else {
|
|
} else {
|
|
const path = fileOrPath;
|
|
const path = fileOrPath;
|
|
const {
|
|
const {
|
|
@@ -510,38 +547,13 @@ const readFileOrPath = async (
|
|
size,
|
|
size,
|
|
lastModifiedMs: lm,
|
|
lastModifiedMs: lm,
|
|
} = await readStream(ensureElectron(), path);
|
|
} = await readStream(ensureElectron(), path);
|
|
|
|
+ underlyingStream = response.body;
|
|
fileSize = size;
|
|
fileSize = size;
|
|
lastModifiedMs = lm;
|
|
lastModifiedMs = lm;
|
|
- if (size > multipartPartSize) {
|
|
|
|
- const chunkCount = Math.ceil(size / ENCRYPTION_CHUNK_SIZE);
|
|
|
|
- dataOrStream = { stream: response.body, chunkCount };
|
|
|
|
- } else {
|
|
|
|
- dataOrStream = new Uint8Array(await response.arrayBuffer());
|
|
|
|
- }
|
|
|
|
}
|
|
}
|
|
|
|
|
|
- return { dataOrStream, fileSize, lastModifiedMs };
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-/** A variant of {@readFileOrPath} that always returns an {@link DataStream}. */
|
|
|
|
-const readFileOrPathStream = async (
|
|
|
|
- fileOrPath: File | string,
|
|
|
|
-): Promise<DataStream> => {
|
|
|
|
const N = ENCRYPTION_CHUNK_SIZE;
|
|
const N = ENCRYPTION_CHUNK_SIZE;
|
|
-
|
|
|
|
- let underlyingStream: ReadableStream;
|
|
|
|
- let chunkCount: number;
|
|
|
|
-
|
|
|
|
- if (fileOrPath instanceof File) {
|
|
|
|
- const file = fileOrPath;
|
|
|
|
- underlyingStream = file.stream();
|
|
|
|
- chunkCount = Math.ceil(file.size / N);
|
|
|
|
- } else {
|
|
|
|
- const path = fileOrPath;
|
|
|
|
- const { response, size } = await readStream(ensureElectron(), path);
|
|
|
|
- underlyingStream = response.body;
|
|
|
|
- chunkCount = Math.ceil(size / N);
|
|
|
|
- }
|
|
|
|
|
|
+ const chunkCount = Math.ceil(fileSize / ENCRYPTION_CHUNK_SIZE);
|
|
|
|
|
|
// Pipe the underlying stream through a transformer that emits
|
|
// Pipe the underlying stream through a transformer that emits
|
|
// ENCRYPTION_CHUNK_SIZE-ed chunks (except the last one, which can be
|
|
// ENCRYPTION_CHUNK_SIZE-ed chunks (except the last one, which can be
|
|
@@ -573,7 +585,8 @@ const readFileOrPathStream = async (
|
|
});
|
|
});
|
|
|
|
|
|
const stream = underlyingStream.pipeThrough(transformer);
|
|
const stream = underlyingStream.pipeThrough(transformer);
|
|
- return { stream, chunkCount };
|
|
|
|
|
|
+
|
|
|
|
+ return { stream, chunkCount, fileSize, lastModifiedMs, file };
|
|
};
|
|
};
|
|
|
|
|
|
interface ReadAssetDetailsResult {
|
|
interface ReadAssetDetailsResult {
|
|
@@ -590,7 +603,7 @@ const readAssetDetails = async ({
|
|
isLivePhoto,
|
|
isLivePhoto,
|
|
livePhotoAssets,
|
|
livePhotoAssets,
|
|
fileOrPath,
|
|
fileOrPath,
|
|
-}: UploadAsset2): Promise<ReadAssetDetailsResult> =>
|
|
|
|
|
|
+}: UploadAsset): Promise<ReadAssetDetailsResult> =>
|
|
isLivePhoto
|
|
isLivePhoto
|
|
? readLivePhotoDetails(livePhotoAssets)
|
|
? readLivePhotoDetails(livePhotoAssets)
|
|
: readImageOrVideoDetails(fileOrPath);
|
|
: readImageOrVideoDetails(fileOrPath);
|
|
@@ -622,18 +635,14 @@ const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => {
|
|
* @param fileOrPath See: [Note: Reading a fileOrPath]
|
|
* @param fileOrPath See: [Note: Reading a fileOrPath]
|
|
*/
|
|
*/
|
|
const readImageOrVideoDetails = async (fileOrPath: File | string) => {
|
|
const readImageOrVideoDetails = async (fileOrPath: File | string) => {
|
|
- const { dataOrStream, fileSize, lastModifiedMs } =
|
|
|
|
|
|
+ const { stream, fileSize, lastModifiedMs } =
|
|
await readFileOrPath(fileOrPath);
|
|
await readFileOrPath(fileOrPath);
|
|
|
|
|
|
const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => {
|
|
const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => {
|
|
- if (dataOrStream instanceof Uint8Array) {
|
|
|
|
- return dataOrStream;
|
|
|
|
- } else {
|
|
|
|
- const reader = dataOrStream.stream.getReader();
|
|
|
|
- const chunk = ensure((await reader.read()).value);
|
|
|
|
- await reader.cancel();
|
|
|
|
- return chunk;
|
|
|
|
- }
|
|
|
|
|
|
+ const reader = stream.getReader();
|
|
|
|
+ const chunk = ensure((await reader.read()).value);
|
|
|
|
+ await reader.cancel();
|
|
|
|
+ return chunk;
|
|
}, fopFileName(fileOrPath));
|
|
}, fopFileName(fileOrPath));
|
|
|
|
|
|
return { fileTypeInfo, fileSize, lastModifiedMs };
|
|
return { fileTypeInfo, fileSize, lastModifiedMs };
|
|
@@ -660,7 +669,7 @@ interface ExtractAssetMetadataResult {
|
|
* {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum.
|
|
* {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum.
|
|
*/
|
|
*/
|
|
const extractAssetMetadata = async (
|
|
const extractAssetMetadata = async (
|
|
- { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2,
|
|
|
|
|
|
+ { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
|
|
fileTypeInfo: FileTypeInfo,
|
|
fileTypeInfo: FileTypeInfo,
|
|
lastModifiedMs: number,
|
|
lastModifiedMs: number,
|
|
collectionID: number,
|
|
collectionID: number,
|
|
@@ -832,7 +841,7 @@ const computeHash = async (
|
|
fileOrPath: File | string,
|
|
fileOrPath: File | string,
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
) => {
|
|
) => {
|
|
- const { stream, chunkCount } = await readFileOrPathStream(fileOrPath);
|
|
|
|
|
|
+ const { stream, chunkCount } = await readFileOrPath(fileOrPath);
|
|
const hashState = await worker.initChunkHashing();
|
|
const hashState = await worker.initChunkHashing();
|
|
|
|
|
|
const streamReader = stream.getReader();
|
|
const streamReader = stream.getReader();
|
|
@@ -901,8 +910,8 @@ const areFilesSameNoHash = (f: Metadata, g: Metadata) => {
|
|
|
|
|
|
const readAsset = async (
|
|
const readAsset = async (
|
|
fileTypeInfo: FileTypeInfo,
|
|
fileTypeInfo: FileTypeInfo,
|
|
- { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2,
|
|
|
|
-) =>
|
|
|
|
|
|
+ { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
|
|
|
|
+): Promise<ThumbnailedFile> =>
|
|
isLivePhoto
|
|
isLivePhoto
|
|
? await readLivePhoto(livePhotoAssets, fileTypeInfo)
|
|
? await readLivePhoto(livePhotoAssets, fileTypeInfo)
|
|
: await readImageOrVideo(fileOrPath, fileTypeInfo);
|
|
: await readImageOrVideo(fileOrPath, fileTypeInfo);
|
|
@@ -911,9 +920,8 @@ const readLivePhoto = async (
|
|
livePhotoAssets: LivePhotoAssets,
|
|
livePhotoAssets: LivePhotoAssets,
|
|
fileTypeInfo: FileTypeInfo,
|
|
fileTypeInfo: FileTypeInfo,
|
|
) => {
|
|
) => {
|
|
- const readImage = await readFileOrPath(livePhotoAssets.image);
|
|
|
|
const {
|
|
const {
|
|
- filedata: imageDataOrStream,
|
|
|
|
|
|
+ fileStreamOrData: imageFileStreamOrData,
|
|
thumbnail,
|
|
thumbnail,
|
|
hasStaticThumbnail,
|
|
hasStaticThumbnail,
|
|
} = await withThumbnail(
|
|
} = await withThumbnail(
|
|
@@ -922,28 +930,29 @@ const readLivePhoto = async (
|
|
extension: fileTypeInfo.imageType,
|
|
extension: fileTypeInfo.imageType,
|
|
fileType: FILE_TYPE.IMAGE,
|
|
fileType: FILE_TYPE.IMAGE,
|
|
},
|
|
},
|
|
- readImage.dataOrStream,
|
|
|
|
- readImage.fileSize,
|
|
|
|
|
|
+ await readFileOrPath(livePhotoAssets.image),
|
|
);
|
|
);
|
|
- const readVideo = await readFileOrPath(livePhotoAssets.video);
|
|
|
|
|
|
+ const videoFileStreamOrData = await readFileOrPath(livePhotoAssets.video);
|
|
|
|
|
|
- // We can revisit this later, but the existing code always read the entire
|
|
|
|
- // file into memory here, and to avoid changing the rest of the scaffolding
|
|
|
|
- // retain the same behaviour.
|
|
|
|
|
|
+ // The JS zip library that encodeLivePhoto uses does not support
|
|
|
|
+ // ReadableStreams, so pass the file (blob) if we have one, otherwise read
|
|
|
|
+ // the entire stream into memory and pass the resultant data.
|
|
//
|
|
//
|
|
- // This is a reasonable assumption too, since the videos corresponding to
|
|
|
|
- // live photos are only a couple of seconds long.
|
|
|
|
- const toData = async (dataOrStream: Uint8Array | DataStream) =>
|
|
|
|
- dataOrStream instanceof Uint8Array
|
|
|
|
- ? dataOrStream
|
|
|
|
- : await readEntireStream(dataOrStream.stream);
|
|
|
|
|
|
+ // This is a reasonable behaviour since the videos corresponding to live
|
|
|
|
+ // photos are only a couple of seconds long (we have already done a
|
|
|
|
+ // pre-flight check to ensure their size is small in `areLivePhotoAssets`).
|
|
|
|
+ const fileOrData = async (sd: FileStream | Uint8Array) => {
|
|
|
|
+ const _fs = async ({ file, stream }: FileStream) =>
|
|
|
|
+ file ? file : await readEntireStream(stream);
|
|
|
|
+ return sd instanceof Uint8Array ? sd : _fs(sd);
|
|
|
|
+ };
|
|
|
|
|
|
return {
|
|
return {
|
|
- filedata: await encodeLivePhoto({
|
|
|
|
|
|
+ fileStreamOrData: await encodeLivePhoto({
|
|
imageFileName: fopFileName(livePhotoAssets.image),
|
|
imageFileName: fopFileName(livePhotoAssets.image),
|
|
- imageData: await toData(imageDataOrStream),
|
|
|
|
|
|
+ imageFileOrData: await fileOrData(imageFileStreamOrData),
|
|
videoFileName: fopFileName(livePhotoAssets.video),
|
|
videoFileName: fopFileName(livePhotoAssets.video),
|
|
- videoData: await toData(readVideo.dataOrStream),
|
|
|
|
|
|
+ videoFileOrData: await fileOrData(videoFileStreamOrData),
|
|
}),
|
|
}),
|
|
thumbnail,
|
|
thumbnail,
|
|
hasStaticThumbnail,
|
|
hasStaticThumbnail,
|
|
@@ -954,8 +963,8 @@ const readImageOrVideo = async (
|
|
fileOrPath: File | string,
|
|
fileOrPath: File | string,
|
|
fileTypeInfo: FileTypeInfo,
|
|
fileTypeInfo: FileTypeInfo,
|
|
) => {
|
|
) => {
|
|
- const { dataOrStream, fileSize } = await readFileOrPath(fileOrPath);
|
|
|
|
- return withThumbnail(fileOrPath, fileTypeInfo, dataOrStream, fileSize);
|
|
|
|
|
|
+ const fileStream = await readFileOrPath(fileOrPath);
|
|
|
|
+ return withThumbnail(fileOrPath, fileTypeInfo, fileStream);
|
|
};
|
|
};
|
|
|
|
|
|
// TODO(MR): Merge with the uploader
|
|
// TODO(MR): Merge with the uploader
|
|
@@ -979,17 +988,17 @@ const moduleState = new ModuleState();
|
|
* Augment the given {@link dataOrStream} with thumbnail information.
|
|
* Augment the given {@link dataOrStream} with thumbnail information.
|
|
*
|
|
*
|
|
* This is a companion method for {@link readFileOrPath}, and can be used to
|
|
* This is a companion method for {@link readFileOrPath}, and can be used to
|
|
- * convert the result of {@link readFileOrPath} into an {@link FileInMemory}.
|
|
|
|
|
|
+ * convert the result of {@link readFileOrPath} into an {@link ThumbnailedFile}.
|
|
*
|
|
*
|
|
- * Note: The returned dataOrStream might be different from the one that we
|
|
|
|
- * provide to it.
|
|
|
|
|
|
+ * Note: The `fileStream` in the returned ThumbnailedFile may be different from
|
|
|
|
+ * the one passed to the function.
|
|
*/
|
|
*/
|
|
const withThumbnail = async (
|
|
const withThumbnail = async (
|
|
fileOrPath: File | string,
|
|
fileOrPath: File | string,
|
|
fileTypeInfo: FileTypeInfo,
|
|
fileTypeInfo: FileTypeInfo,
|
|
- dataOrStream: Uint8Array | DataStream,
|
|
|
|
- fileSize: number,
|
|
|
|
-): Promise<FileInMemory> => {
|
|
|
|
|
|
+ fileStream: FileStream,
|
|
|
|
+): Promise<ThumbnailedFile> => {
|
|
|
|
+ let fileData: Uint8Array | undefined;
|
|
let thumbnail: Uint8Array | undefined;
|
|
let thumbnail: Uint8Array | undefined;
|
|
let hasStaticThumbnail = false;
|
|
let hasStaticThumbnail = false;
|
|
|
|
|
|
@@ -998,30 +1007,16 @@ const withThumbnail = async (
|
|
fileTypeInfo.fileType == FILE_TYPE.IMAGE &&
|
|
fileTypeInfo.fileType == FILE_TYPE.IMAGE &&
|
|
moduleState.isNativeImageThumbnailGenerationNotAvailable;
|
|
moduleState.isNativeImageThumbnailGenerationNotAvailable;
|
|
|
|
|
|
- // 1. Native thumbnail generation.
|
|
|
|
|
|
+ // 1. Native thumbnail generation using file's path.
|
|
if (electron && !notAvailable) {
|
|
if (electron && !notAvailable) {
|
|
try {
|
|
try {
|
|
- if (fileOrPath instanceof File) {
|
|
|
|
- if (dataOrStream instanceof Uint8Array) {
|
|
|
|
- thumbnail = await generateThumbnailNative(
|
|
|
|
- electron,
|
|
|
|
- dataOrStream,
|
|
|
|
- fileTypeInfo,
|
|
|
|
- );
|
|
|
|
- } else {
|
|
|
|
- // This was large enough to need streaming, and trying to
|
|
|
|
- // read it into memory or copying over IPC might cause us to
|
|
|
|
- // run out of memory. So skip the native generation for it,
|
|
|
|
- // instead let it get processed by the browser based
|
|
|
|
- // thumbnailer (case 2).
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- thumbnail = await generateThumbnailNative(
|
|
|
|
- electron,
|
|
|
|
- fileOrPath,
|
|
|
|
- fileTypeInfo,
|
|
|
|
- );
|
|
|
|
- }
|
|
|
|
|
|
+ // When running in the context of our desktop app, File paths will
|
|
|
|
+ // be absolute. See: [Note: File paths when running under Electron].
|
|
|
|
+ thumbnail = await generateThumbnailNative(
|
|
|
|
+ electron,
|
|
|
|
+ fileOrPath instanceof File ? fileOrPath["path"] : fileOrPath,
|
|
|
|
+ fileTypeInfo,
|
|
|
|
+ );
|
|
} catch (e) {
|
|
} catch (e) {
|
|
if (e.message == CustomErrorMessage.NotAvailable) {
|
|
if (e.message == CustomErrorMessage.NotAvailable) {
|
|
moduleState.isNativeImageThumbnailGenerationNotAvailable = true;
|
|
moduleState.isNativeImageThumbnailGenerationNotAvailable = true;
|
|
@@ -1034,38 +1029,47 @@ const withThumbnail = async (
|
|
if (!thumbnail) {
|
|
if (!thumbnail) {
|
|
let blob: Blob | undefined;
|
|
let blob: Blob | undefined;
|
|
if (fileOrPath instanceof File) {
|
|
if (fileOrPath instanceof File) {
|
|
- // 2. Browser based thumbnail generation for `File`s.
|
|
|
|
|
|
+ // 2. Browser based thumbnail generation for File (blobs).
|
|
blob = fileOrPath;
|
|
blob = fileOrPath;
|
|
} else {
|
|
} else {
|
|
// 3. Browser based thumbnail generation for paths.
|
|
// 3. Browser based thumbnail generation for paths.
|
|
- if (dataOrStream instanceof Uint8Array) {
|
|
|
|
- blob = new Blob([dataOrStream]);
|
|
|
|
|
|
+ //
|
|
|
|
+ // There are two reasons why we could get here:
|
|
|
|
+ //
|
|
|
|
+ // - We're running under Electron, but thumbnail generation is not
|
|
|
|
+ // available. This is currently only a specific scenario for image
|
|
|
|
+ // files on Windows.
|
|
|
|
+ //
|
|
|
|
+ // - We're running under the Electron, but the thumbnail generation
|
|
|
|
+ // otherwise failed for some exception.
|
|
|
|
+ //
|
|
|
|
+ // The fallback in this case involves reading the entire stream into
|
|
|
|
+ // memory, and passing that data across the IPC boundary in a single
|
|
|
|
+ // go (i.e. not in a streaming manner). This is risky for videos of
|
|
|
|
+ // unbounded sizes, plus that isn't the expected scenario. So
|
|
|
|
+ // instead of trying to cater for arbitrary exceptions, we only run
|
|
|
|
+ // this fallback to cover for the case where thumbnail generation
|
|
|
|
+ // was not available for an image file on Windows. If/when we add
|
|
|
|
+ // support of native thumbnailing on Windows too, this entire branch
|
|
|
|
+ // can be removed.
|
|
|
|
+
|
|
|
|
+ if (fileTypeInfo.fileType == FILE_TYPE.IMAGE) {
|
|
|
|
+ const data = await readEntireStream(fileStream.stream);
|
|
|
|
+ blob = new Blob([data]);
|
|
|
|
+
|
|
|
|
+ // The Readable stream cannot be read twice, so use the data
|
|
|
|
+ // directly for subsequent steps.
|
|
|
|
+ fileData = data;
|
|
} else {
|
|
} else {
|
|
- // Read the stream into memory. Don't try this fallback for huge
|
|
|
|
- // files though lest we run out of memory.
|
|
|
|
- if (fileSize < 100 * 1024 * 1024 /* 100 MB */) {
|
|
|
|
- const data = await readEntireStream(dataOrStream.stream);
|
|
|
|
- // The Readable stream cannot be read twice, so also
|
|
|
|
- // overwrite the stream with the data we read.
|
|
|
|
- dataOrStream = data;
|
|
|
|
- blob = new Blob([data]);
|
|
|
|
- } else {
|
|
|
|
- // There isn't a normal scenario where this should happen.
|
|
|
|
- // Case 1, should've already worked, and the only known
|
|
|
|
- // reason it'd have been skipped is for image files on
|
|
|
|
- // Windows, but those should be less than 100 MB.
|
|
|
|
- //
|
|
|
|
- // So don't risk running out of memory for a case we don't
|
|
|
|
- // comprehend.
|
|
|
|
- log.error(
|
|
|
|
- `Not using browser based thumbnail generation fallback for large file at path ${fileOrPath}`,
|
|
|
|
- );
|
|
|
|
- }
|
|
|
|
|
|
+ log.warn(
|
|
|
|
+ `Not using browser based thumbnail generation fallback for video at path ${fileOrPath}`,
|
|
|
|
+ );
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
try {
|
|
try {
|
|
- thumbnail = await generateThumbnailWeb(blob, fileTypeInfo);
|
|
|
|
|
|
+ if (blob)
|
|
|
|
+ thumbnail = await generateThumbnailWeb(blob, fileTypeInfo);
|
|
} catch (e) {
|
|
} catch (e) {
|
|
log.error("Web thumbnail creation failed", e);
|
|
log.error("Web thumbnail creation failed", e);
|
|
}
|
|
}
|
|
@@ -1077,7 +1081,7 @@ const withThumbnail = async (
|
|
}
|
|
}
|
|
|
|
|
|
return {
|
|
return {
|
|
- filedata: dataOrStream,
|
|
|
|
|
|
+ fileStreamOrData: fileData ?? fileStream,
|
|
thumbnail,
|
|
thumbnail,
|
|
hasStaticThumbnail,
|
|
hasStaticThumbnail,
|
|
};
|
|
};
|
|
@@ -1102,7 +1106,7 @@ const encryptFile = async (
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
): Promise<EncryptedFile> => {
|
|
): Promise<EncryptedFile> => {
|
|
const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
|
|
const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
|
|
- file.filedata,
|
|
|
|
|
|
+ file.fileStreamOrData,
|
|
worker,
|
|
worker,
|
|
);
|
|
);
|
|
|
|
|
|
@@ -1144,15 +1148,15 @@ const encryptFile = async (
|
|
};
|
|
};
|
|
|
|
|
|
const encryptFiledata = async (
|
|
const encryptFiledata = async (
|
|
- filedata: Uint8Array | DataStream,
|
|
|
|
|
|
+ fileStreamOrData: FileStream | Uint8Array,
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
-): Promise<EncryptionResult<Uint8Array | DataStream>> =>
|
|
|
|
- isDataStream(filedata)
|
|
|
|
- ? await encryptFileStream(filedata, worker)
|
|
|
|
- : await worker.encryptFile(filedata);
|
|
|
|
|
|
+): Promise<EncryptionResult<Uint8Array | EncryptedFileStream>> =>
|
|
|
|
+ fileStreamOrData instanceof Uint8Array
|
|
|
|
+ ? await worker.encryptFile(fileStreamOrData)
|
|
|
|
+ : await encryptFileStream(fileStreamOrData, worker);
|
|
|
|
|
|
const encryptFileStream = async (
|
|
const encryptFileStream = async (
|
|
- fileData: DataStream,
|
|
|
|
|
|
+ fileData: FileStream,
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
worker: Remote<DedicatedCryptoWorker>,
|
|
) => {
|
|
) => {
|
|
const { stream, chunkCount } = fileData;
|
|
const { stream, chunkCount } = fileData;
|
|
@@ -1193,27 +1197,38 @@ const uploadToBucket = async (
|
|
try {
|
|
try {
|
|
let fileObjectKey: string = null;
|
|
let fileObjectKey: string = null;
|
|
|
|
|
|
- if (isDataStream(file.file.encryptedData)) {
|
|
|
|
|
|
+ const encryptedData = file.file.encryptedData;
|
|
|
|
+ if (
|
|
|
|
+ !(encryptedData instanceof Uint8Array) &&
|
|
|
|
+ encryptedData.chunkCount >= multipartChunksPerPart
|
|
|
|
+ ) {
|
|
|
|
+ // We have a stream, and it is more than multipartChunksPerPart
|
|
|
|
+ // chunks long, so use a multipart upload to upload it.
|
|
fileObjectKey = await uploadStreamUsingMultipart(
|
|
fileObjectKey = await uploadStreamUsingMultipart(
|
|
file.localID,
|
|
file.localID,
|
|
- file.file.encryptedData,
|
|
|
|
|
|
+ encryptedData,
|
|
makeProgessTracker,
|
|
makeProgessTracker,
|
|
isCFUploadProxyDisabled,
|
|
isCFUploadProxyDisabled,
|
|
abortIfCancelled,
|
|
abortIfCancelled,
|
|
);
|
|
);
|
|
} else {
|
|
} else {
|
|
|
|
+ const data =
|
|
|
|
+ encryptedData instanceof Uint8Array
|
|
|
|
+ ? encryptedData
|
|
|
|
+ : await readEntireStream(encryptedData.stream);
|
|
|
|
+
|
|
const progressTracker = makeProgessTracker(file.localID);
|
|
const progressTracker = makeProgessTracker(file.localID);
|
|
const fileUploadURL = await uploadService.getUploadURL();
|
|
const fileUploadURL = await uploadService.getUploadURL();
|
|
if (!isCFUploadProxyDisabled) {
|
|
if (!isCFUploadProxyDisabled) {
|
|
fileObjectKey = await UploadHttpClient.putFileV2(
|
|
fileObjectKey = await UploadHttpClient.putFileV2(
|
|
fileUploadURL,
|
|
fileUploadURL,
|
|
- file.file.encryptedData as Uint8Array,
|
|
|
|
|
|
+ data,
|
|
progressTracker,
|
|
progressTracker,
|
|
);
|
|
);
|
|
} else {
|
|
} else {
|
|
fileObjectKey = await UploadHttpClient.putFile(
|
|
fileObjectKey = await UploadHttpClient.putFile(
|
|
fileUploadURL,
|
|
fileUploadURL,
|
|
- file.file.encryptedData as Uint8Array,
|
|
|
|
|
|
+ data,
|
|
progressTracker,
|
|
progressTracker,
|
|
);
|
|
);
|
|
}
|
|
}
|
|
@@ -1262,13 +1277,13 @@ interface PartEtag {
|
|
|
|
|
|
async function uploadStreamUsingMultipart(
|
|
async function uploadStreamUsingMultipart(
|
|
fileLocalID: number,
|
|
fileLocalID: number,
|
|
- dataStream: DataStream,
|
|
|
|
|
|
+ dataStream: EncryptedFileStream,
|
|
makeProgessTracker: MakeProgressTracker,
|
|
makeProgessTracker: MakeProgressTracker,
|
|
isCFUploadProxyDisabled: boolean,
|
|
isCFUploadProxyDisabled: boolean,
|
|
abortIfCancelled: () => void,
|
|
abortIfCancelled: () => void,
|
|
) {
|
|
) {
|
|
const uploadPartCount = Math.ceil(
|
|
const uploadPartCount = Math.ceil(
|
|
- dataStream.chunkCount / maximumChunksPerUploadPart,
|
|
|
|
|
|
+ dataStream.chunkCount / multipartChunksPerPart,
|
|
);
|
|
);
|
|
const multipartUploadURLs =
|
|
const multipartUploadURLs =
|
|
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
|
|
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
|
|
@@ -1328,7 +1343,7 @@ async function combineChunksToFormUploadPart(
|
|
streamReader: ReadableStreamDefaultReader<Uint8Array>,
|
|
streamReader: ReadableStreamDefaultReader<Uint8Array>,
|
|
) {
|
|
) {
|
|
const combinedChunks = [];
|
|
const combinedChunks = [];
|
|
- for (let i = 0; i < maximumChunksPerUploadPart; i++) {
|
|
|
|
|
|
+ for (let i = 0; i < multipartChunksPerPart; i++) {
|
|
const { done, value: chunk } = await streamReader.read();
|
|
const { done, value: chunk } = await streamReader.read();
|
|
if (done) {
|
|
if (done) {
|
|
break;
|
|
break;
|