Debug
This commit is contained in:
parent
064827162b
commit
ce38d2beca
2 changed files with 113 additions and 85 deletions
|
@ -278,6 +278,7 @@ class DownloadManagerImpl {
|
|||
|
||||
if (
|
||||
file.metadata.fileType === FILE_TYPE.IMAGE ||
|
||||
// file.metadata.fileType === FILE_TYPE.VIDEO ||
|
||||
file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
|
||||
) {
|
||||
const cachedBlob = await this.fileCache?.get(cacheKey);
|
||||
|
@ -310,20 +311,25 @@ class DownloadManagerImpl {
|
|||
}
|
||||
|
||||
const cachedBlob = await this.fileCache?.get(cacheKey);
|
||||
console.log({ a: 1, cacheKey, cachedBlob });
|
||||
let res: Response;
|
||||
if (cachedBlob) res = new Response(cachedBlob);
|
||||
else {
|
||||
res = await this.downloadClient.downloadFileStream(file);
|
||||
this.fileCache?.put(cacheKey, await res.blob());
|
||||
// this.fileCache?.put(cacheKey, await res.blob());
|
||||
}
|
||||
const reader = res.body.getReader();
|
||||
|
||||
const contentLength = +res.headers.get("Content-Length") ?? 0;
|
||||
let downloadedBytes = 0;
|
||||
|
||||
console.log({ a: 2, res, contentLength });
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start: async (controller) => {
|
||||
try {
|
||||
console.log({ a: "start", controller });
|
||||
|
||||
const decryptionHeader = await this.cryptoWorker.fromB64(
|
||||
file.file.decryptionHeader,
|
||||
);
|
||||
|
@ -333,92 +339,102 @@ class DownloadManagerImpl {
|
|||
decryptionHeader,
|
||||
fileKey,
|
||||
);
|
||||
console.log({ a: "init", decryptionHeader, fileKey, file });
|
||||
|
||||
let data = new Uint8Array();
|
||||
// The following function handles each data chunk
|
||||
const push = () => {
|
||||
// "done" is a Boolean and value a "Uint8Array"
|
||||
reader.read().then(async ({ done, value }) => {
|
||||
try {
|
||||
// Is there more data to read?
|
||||
if (!done) {
|
||||
downloadedBytes += value.byteLength;
|
||||
onDownloadProgress({
|
||||
loaded: downloadedBytes,
|
||||
total: contentLength,
|
||||
});
|
||||
const buffer = new Uint8Array(
|
||||
data.byteLength + value.byteLength,
|
||||
);
|
||||
buffer.set(new Uint8Array(data), 0);
|
||||
buffer.set(
|
||||
new Uint8Array(value),
|
||||
data.byteLength,
|
||||
);
|
||||
if (buffer.length > decryptionChunkSize) {
|
||||
const fileData = buffer.slice(
|
||||
0,
|
||||
decryptionChunkSize,
|
||||
);
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
fileData,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(decryptedData);
|
||||
data =
|
||||
buffer.slice(
|
||||
decryptionChunkSize,
|
||||
);
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
data = buffer;
|
||||
}
|
||||
push();
|
||||
} else {
|
||||
if (data) {
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
data,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(decryptedData);
|
||||
data = null;
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("Failed to process file chunk", e);
|
||||
controller.error(e);
|
||||
}
|
||||
});
|
||||
};
|
||||
let more = true;
|
||||
while (more) {
|
||||
more = false;
|
||||
console.log({ a: "push" });
|
||||
|
||||
push();
|
||||
// "done" is a Boolean and value a "Uint8Array"
|
||||
const { done, value } = await reader.read();
|
||||
console.log({ a: "read", done, value, data });
|
||||
|
||||
try {
|
||||
// Is there more data to read?
|
||||
if (!done) {
|
||||
downloadedBytes += value.byteLength;
|
||||
onDownloadProgress({
|
||||
loaded: downloadedBytes,
|
||||
total: contentLength,
|
||||
});
|
||||
const buffer = new Uint8Array(
|
||||
data.byteLength + value.byteLength,
|
||||
);
|
||||
buffer.set(new Uint8Array(data), 0);
|
||||
buffer.set(
|
||||
new Uint8Array(value),
|
||||
data.byteLength,
|
||||
);
|
||||
console.log({
|
||||
a: "dec?",
|
||||
buffer,
|
||||
blen: buffer.length,
|
||||
decryptionChunkSize,
|
||||
});
|
||||
|
||||
if (buffer.length > decryptionChunkSize) {
|
||||
const fileData = buffer.slice(
|
||||
0,
|
||||
decryptionChunkSize,
|
||||
);
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
fileData,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(decryptedData);
|
||||
data =
|
||||
buffer.slice(decryptionChunkSize);
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
data = buffer;
|
||||
}
|
||||
more = true;
|
||||
} else {
|
||||
if (data) {
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
data,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(decryptedData);
|
||||
data = null;
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("Failed to process file chunk", e);
|
||||
controller.error(e);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("Failed to process file stream", e);
|
||||
controller.error(e);
|
||||
|
|
|
@ -1110,11 +1110,23 @@ const encryptFile = async (
|
|||
encryptionKey: string,
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
): Promise<EncryptedFile> => {
|
||||
let data: Uint8Array;
|
||||
if (file.fileStreamOrData instanceof Uint8Array) {
|
||||
data = file.fileStreamOrData;
|
||||
} else {
|
||||
data = new Uint8Array(
|
||||
await new Response(file.fileStreamOrData.stream).arrayBuffer(),
|
||||
);
|
||||
}
|
||||
console.log("Uploading data", data);
|
||||
const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
|
||||
file.fileStreamOrData,
|
||||
// file.fileStreamOrData,
|
||||
data,
|
||||
worker,
|
||||
);
|
||||
|
||||
console.log("Enc data", encryptedFiledata);
|
||||
|
||||
const { file: encryptedThumbnail } = await worker.encryptThumbnail(
|
||||
file.thumbnail,
|
||||
fileKey,
|
||||
|
|
Loading…
Add table
Reference in a new issue