|
@@ -278,6 +278,7 @@ class DownloadManagerImpl {
|
|
|
|
|
|
if (
|
|
if (
|
|
file.metadata.fileType === FILE_TYPE.IMAGE ||
|
|
file.metadata.fileType === FILE_TYPE.IMAGE ||
|
|
|
|
+ // file.metadata.fileType === FILE_TYPE.VIDEO ||
|
|
file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
|
|
file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
|
|
) {
|
|
) {
|
|
const cachedBlob = await this.fileCache?.get(cacheKey);
|
|
const cachedBlob = await this.fileCache?.get(cacheKey);
|
|
@@ -310,20 +311,25 @@ class DownloadManagerImpl {
|
|
}
|
|
}
|
|
|
|
|
|
const cachedBlob = await this.fileCache?.get(cacheKey);
|
|
const cachedBlob = await this.fileCache?.get(cacheKey);
|
|
|
|
+ console.log({ a: 1, cacheKey, cachedBlob });
|
|
let res: Response;
|
|
let res: Response;
|
|
if (cachedBlob) res = new Response(cachedBlob);
|
|
if (cachedBlob) res = new Response(cachedBlob);
|
|
else {
|
|
else {
|
|
res = await this.downloadClient.downloadFileStream(file);
|
|
res = await this.downloadClient.downloadFileStream(file);
|
|
- this.fileCache?.put(cacheKey, await res.blob());
|
|
|
|
|
|
+ // this.fileCache?.put(cacheKey, await res.blob());
|
|
}
|
|
}
|
|
const reader = res.body.getReader();
|
|
const reader = res.body.getReader();
|
|
|
|
|
|
const contentLength = +res.headers.get("Content-Length") ?? 0;
|
|
const contentLength = +res.headers.get("Content-Length") ?? 0;
|
|
let downloadedBytes = 0;
|
|
let downloadedBytes = 0;
|
|
|
|
|
|
|
|
+ console.log({ a: 2, res, contentLength });
|
|
|
|
+
|
|
const stream = new ReadableStream({
|
|
const stream = new ReadableStream({
|
|
start: async (controller) => {
|
|
start: async (controller) => {
|
|
try {
|
|
try {
|
|
|
|
+ console.log({ a: "start", controller });
|
|
|
|
+
|
|
const decryptionHeader = await this.cryptoWorker.fromB64(
|
|
const decryptionHeader = await this.cryptoWorker.fromB64(
|
|
file.file.decryptionHeader,
|
|
file.file.decryptionHeader,
|
|
);
|
|
);
|
|
@@ -333,92 +339,102 @@ class DownloadManagerImpl {
|
|
decryptionHeader,
|
|
decryptionHeader,
|
|
fileKey,
|
|
fileKey,
|
|
);
|
|
);
|
|
|
|
+ console.log({ a: "init", decryptionHeader, fileKey, file });
|
|
|
|
+
|
|
let data = new Uint8Array();
|
|
let data = new Uint8Array();
|
|
// The following function handles each data chunk
|
|
// The following function handles each data chunk
|
|
- const push = () => {
|
|
|
|
|
|
+ let more = true;
|
|
|
|
+ while (more) {
|
|
|
|
+ more = false;
|
|
|
|
+ console.log({ a: "push" });
|
|
|
|
+
|
|
// "done" is a Boolean and value a "Uint8Array"
|
|
// "done" is a Boolean and value a "Uint8Array"
|
|
- reader.read().then(async ({ done, value }) => {
|
|
|
|
- try {
|
|
|
|
- // Is there more data to read?
|
|
|
|
- if (!done) {
|
|
|
|
- downloadedBytes += value.byteLength;
|
|
|
|
- onDownloadProgress({
|
|
|
|
- loaded: downloadedBytes,
|
|
|
|
- total: contentLength,
|
|
|
|
- });
|
|
|
|
- const buffer = new Uint8Array(
|
|
|
|
- data.byteLength + value.byteLength,
|
|
|
|
|
|
+ const { done, value } = await reader.read();
|
|
|
|
+ console.log({ a: "read", done, value, data });
|
|
|
|
+
|
|
|
|
+ try {
|
|
|
|
+ // Is there more data to read?
|
|
|
|
+ if (!done) {
|
|
|
|
+ downloadedBytes += value.byteLength;
|
|
|
|
+ onDownloadProgress({
|
|
|
|
+ loaded: downloadedBytes,
|
|
|
|
+ total: contentLength,
|
|
|
|
+ });
|
|
|
|
+ const buffer = new Uint8Array(
|
|
|
|
+ data.byteLength + value.byteLength,
|
|
|
|
+ );
|
|
|
|
+ buffer.set(new Uint8Array(data), 0);
|
|
|
|
+ buffer.set(
|
|
|
|
+ new Uint8Array(value),
|
|
|
|
+ data.byteLength,
|
|
|
|
+ );
|
|
|
|
+ console.log({
|
|
|
|
+ a: "dec?",
|
|
|
|
+ buffer,
|
|
|
|
+ blen: buffer.length,
|
|
|
|
+ decryptionChunkSize,
|
|
|
|
+ });
|
|
|
|
+
|
|
|
|
+ if (buffer.length > decryptionChunkSize) {
|
|
|
|
+ const fileData = buffer.slice(
|
|
|
|
+ 0,
|
|
|
|
+ decryptionChunkSize,
|
|
);
|
|
);
|
|
- buffer.set(new Uint8Array(data), 0);
|
|
|
|
- buffer.set(
|
|
|
|
- new Uint8Array(value),
|
|
|
|
- data.byteLength,
|
|
|
|
- );
|
|
|
|
- if (buffer.length > decryptionChunkSize) {
|
|
|
|
- const fileData = buffer.slice(
|
|
|
|
- 0,
|
|
|
|
- decryptionChunkSize,
|
|
|
|
- );
|
|
|
|
- try {
|
|
|
|
- const { decryptedData } =
|
|
|
|
- await this.cryptoWorker.decryptFileChunk(
|
|
|
|
- fileData,
|
|
|
|
- pullState,
|
|
|
|
- );
|
|
|
|
- controller.enqueue(decryptedData);
|
|
|
|
- data =
|
|
|
|
- buffer.slice(
|
|
|
|
- decryptionChunkSize,
|
|
|
|
- );
|
|
|
|
- } catch (e) {
|
|
|
|
- if (
|
|
|
|
- e.message ===
|
|
|
|
- CustomError.PROCESSING_FAILED
|
|
|
|
- ) {
|
|
|
|
- log.error(
|
|
|
|
- `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
|
|
|
- e,
|
|
|
|
- );
|
|
|
|
- }
|
|
|
|
- throw e;
|
|
|
|
|
|
+ try {
|
|
|
|
+ const { decryptedData } =
|
|
|
|
+ await this.cryptoWorker.decryptFileChunk(
|
|
|
|
+ fileData,
|
|
|
|
+ pullState,
|
|
|
|
+ );
|
|
|
|
+ controller.enqueue(decryptedData);
|
|
|
|
+ data =
|
|
|
|
+ buffer.slice(decryptionChunkSize);
|
|
|
|
+ } catch (e) {
|
|
|
|
+ if (
|
|
|
|
+ e.message ===
|
|
|
|
+ CustomError.PROCESSING_FAILED
|
|
|
|
+ ) {
|
|
|
|
+ log.error(
|
|
|
|
+ `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
|
|
|
+ e,
|
|
|
|
+ );
|
|
}
|
|
}
|
|
- } else {
|
|
|
|
- data = buffer;
|
|
|
|
|
|
+ throw e;
|
|
}
|
|
}
|
|
- push();
|
|
|
|
} else {
|
|
} else {
|
|
- if (data) {
|
|
|
|
- try {
|
|
|
|
- const { decryptedData } =
|
|
|
|
- await this.cryptoWorker.decryptFileChunk(
|
|
|
|
- data,
|
|
|
|
- pullState,
|
|
|
|
- );
|
|
|
|
- controller.enqueue(decryptedData);
|
|
|
|
- data = null;
|
|
|
|
- } catch (e) {
|
|
|
|
- if (
|
|
|
|
- e.message ===
|
|
|
|
- CustomError.PROCESSING_FAILED
|
|
|
|
- ) {
|
|
|
|
- log.error(
|
|
|
|
- `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
|
|
|
- e,
|
|
|
|
- );
|
|
|
|
- }
|
|
|
|
- throw e;
|
|
|
|
|
|
+ data = buffer;
|
|
|
|
+ }
|
|
|
|
+ more = true;
|
|
|
|
+ } else {
|
|
|
|
+ if (data) {
|
|
|
|
+ try {
|
|
|
|
+ const { decryptedData } =
|
|
|
|
+ await this.cryptoWorker.decryptFileChunk(
|
|
|
|
+ data,
|
|
|
|
+ pullState,
|
|
|
|
+ );
|
|
|
|
+ controller.enqueue(decryptedData);
|
|
|
|
+ data = null;
|
|
|
|
+ } catch (e) {
|
|
|
|
+ if (
|
|
|
|
+ e.message ===
|
|
|
|
+ CustomError.PROCESSING_FAILED
|
|
|
|
+ ) {
|
|
|
|
+ log.error(
|
|
|
|
+ `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
|
|
|
+ e,
|
|
|
|
+ );
|
|
}
|
|
}
|
|
|
|
+ throw e;
|
|
}
|
|
}
|
|
- controller.close();
|
|
|
|
}
|
|
}
|
|
- } catch (e) {
|
|
|
|
- log.error("Failed to process file chunk", e);
|
|
|
|
- controller.error(e);
|
|
|
|
|
|
+ controller.close();
|
|
}
|
|
}
|
|
- });
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- push();
|
|
|
|
|
|
+ } catch (e) {
|
|
|
|
+ log.error("Failed to process file chunk", e);
|
|
|
|
+ controller.error(e);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
} catch (e) {
|
|
} catch (e) {
|
|
log.error("Failed to process file stream", e);
|
|
log.error("Failed to process file stream", e);
|
|
controller.error(e);
|
|
controller.error(e);
|