[desktop] [web] Upload refactoring (#1527)
This commit is contained in:
commit
cf576c07f7
74 changed files with 2615 additions and 3061 deletions
|
@ -38,9 +38,9 @@ import {
|
|||
updateAndRestart,
|
||||
updateOnNextRestart,
|
||||
} from "./services/app-update";
|
||||
import { convertToJPEG, generateImageThumbnail } from "./services/convert";
|
||||
import { ffmpegExec } from "./services/ffmpeg";
|
||||
import { getDirFiles } from "./services/fs";
|
||||
import { convertToJPEG, generateImageThumbnail } from "./services/image";
|
||||
import {
|
||||
clipImageEmbedding,
|
||||
clipTextEmbeddingIfAvailable,
|
||||
|
@ -141,14 +141,18 @@ export const attachIPCHandlers = () => {
|
|||
|
||||
// - Conversion
|
||||
|
||||
ipcMain.handle("convertToJPEG", (_, fileName, imageData) =>
|
||||
convertToJPEG(fileName, imageData),
|
||||
ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) =>
|
||||
convertToJPEG(imageData),
|
||||
);
|
||||
|
||||
ipcMain.handle(
|
||||
"generateImageThumbnail",
|
||||
(_, inputFile, maxDimension, maxSize) =>
|
||||
generateImageThumbnail(inputFile, maxDimension, maxSize),
|
||||
(
|
||||
_,
|
||||
dataOrPath: Uint8Array | string,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
) => generateImageThumbnail(dataOrPath, maxDimension, maxSize),
|
||||
);
|
||||
|
||||
ipcMain.handle(
|
||||
|
@ -156,10 +160,10 @@ export const attachIPCHandlers = () => {
|
|||
(
|
||||
_,
|
||||
command: string[],
|
||||
inputDataOrPath: Uint8Array | string,
|
||||
outputFileName: string,
|
||||
dataOrPath: Uint8Array | string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
) => ffmpegExec(command, inputDataOrPath, outputFileName, timeoutMS),
|
||||
) => ffmpegExec(command, dataOrPath, outputFileExtension, timeoutMS),
|
||||
);
|
||||
|
||||
// - ML
|
||||
|
|
|
@ -1,241 +0,0 @@
|
|||
/** @file Image conversions */
|
||||
|
||||
import { existsSync } from "fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
import { CustomErrorMessage, ElectronFile } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { execAsync, isDev } from "../utils-electron";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
|
||||
|
||||
export const convertToJPEG = async (
|
||||
fileName: string,
|
||||
imageData: Uint8Array,
|
||||
): Promise<Uint8Array> => {
|
||||
const inputFilePath = await makeTempFilePath(fileName);
|
||||
const outputFilePath = await makeTempFilePath("output.jpeg");
|
||||
|
||||
// Construct the command first, it may throw on NotAvailable on win32.
|
||||
const command = convertToJPEGCommand(inputFilePath, outputFilePath);
|
||||
|
||||
try {
|
||||
await fs.writeFile(inputFilePath, imageData);
|
||||
await execAsync(command);
|
||||
return new Uint8Array(await fs.readFile(outputFilePath));
|
||||
} finally {
|
||||
try {
|
||||
deleteTempFile(outputFilePath);
|
||||
deleteTempFile(inputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Ignoring error when cleaning up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const convertToJPEGCommand = (
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
) => {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
inputFilePath,
|
||||
"--out",
|
||||
outputFilePath,
|
||||
];
|
||||
case "linux":
|
||||
return [
|
||||
imageMagickPath(),
|
||||
inputFilePath,
|
||||
"-quality",
|
||||
"100%",
|
||||
outputFilePath,
|
||||
];
|
||||
default: // "win32"
|
||||
throw new Error(CustomErrorMessage.NotAvailable);
|
||||
}
|
||||
};
|
||||
|
||||
/** Path to the Linux image-magick executable bundled with our app */
|
||||
const imageMagickPath = () =>
|
||||
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
|
||||
|
||||
const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK";
|
||||
const MAX_DIMENSION_PLACEHOLDER = "MAX_DIMENSION";
|
||||
const SAMPLE_SIZE_PLACEHOLDER = "SAMPLE_SIZE";
|
||||
const INPUT_PATH_PLACEHOLDER = "INPUT";
|
||||
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
|
||||
const QUALITY_PLACEHOLDER = "QUALITY";
|
||||
|
||||
const MAX_QUALITY = 70;
|
||||
const MIN_QUALITY = 50;
|
||||
|
||||
const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
"-s",
|
||||
"formatOptions",
|
||||
QUALITY_PLACEHOLDER,
|
||||
"-Z",
|
||||
MAX_DIMENSION_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"--out",
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
|
||||
IMAGE_MAGICK_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"-auto-orient",
|
||||
"-define",
|
||||
`jpeg:size=${SAMPLE_SIZE_PLACEHOLDER}x${SAMPLE_SIZE_PLACEHOLDER}`,
|
||||
"-thumbnail",
|
||||
`${MAX_DIMENSION_PLACEHOLDER}x${MAX_DIMENSION_PLACEHOLDER}>`,
|
||||
"-unsharp",
|
||||
"0x.5",
|
||||
"-quality",
|
||||
QUALITY_PLACEHOLDER,
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
export async function generateImageThumbnail(
|
||||
inputFile: File | ElectronFile,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> {
|
||||
let inputFilePath = null;
|
||||
let createdTempInputFile = null;
|
||||
try {
|
||||
if (process.platform == "win32")
|
||||
throw Error(
|
||||
CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED,
|
||||
);
|
||||
if (!existsSync(inputFile.path)) {
|
||||
const tempFilePath = await makeTempFilePath(inputFile.name);
|
||||
await writeStream(tempFilePath, await inputFile.stream());
|
||||
inputFilePath = tempFilePath;
|
||||
createdTempInputFile = true;
|
||||
} else {
|
||||
inputFilePath = inputFile.path;
|
||||
}
|
||||
const thumbnail = await generateImageThumbnail_(
|
||||
inputFilePath,
|
||||
maxDimension,
|
||||
maxSize,
|
||||
);
|
||||
return thumbnail;
|
||||
} finally {
|
||||
if (createdTempInputFile) {
|
||||
try {
|
||||
await deleteTempFile(inputFilePath);
|
||||
} catch (e) {
|
||||
log.error(`Failed to deleteTempFile ${inputFilePath}`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function generateImageThumbnail_(
|
||||
inputFilePath: string,
|
||||
width: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> {
|
||||
let tempOutputFilePath: string;
|
||||
let quality = MAX_QUALITY;
|
||||
try {
|
||||
tempOutputFilePath = await makeTempFilePath("thumb.jpeg");
|
||||
let thumbnail: Uint8Array;
|
||||
do {
|
||||
await execAsync(
|
||||
constructThumbnailGenerationCommand(
|
||||
inputFilePath,
|
||||
tempOutputFilePath,
|
||||
width,
|
||||
quality,
|
||||
),
|
||||
);
|
||||
thumbnail = new Uint8Array(await fs.readFile(tempOutputFilePath));
|
||||
quality -= 10;
|
||||
} while (thumbnail.length > maxSize && quality > MIN_QUALITY);
|
||||
return thumbnail;
|
||||
} catch (e) {
|
||||
log.error("Failed to generate image thumbnail", e);
|
||||
throw e;
|
||||
} finally {
|
||||
try {
|
||||
await fs.rm(tempOutputFilePath, { force: true });
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function constructThumbnailGenerationCommand(
|
||||
inputFilePath: string,
|
||||
tempOutputFilePath: string,
|
||||
maxDimension: number,
|
||||
quality: number,
|
||||
) {
|
||||
let thumbnailGenerationCmd: string[];
|
||||
if (process.platform == "darwin") {
|
||||
thumbnailGenerationCmd = SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map(
|
||||
(cmdPart) => {
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
if (cmdPart === MAX_DIMENSION_PLACEHOLDER) {
|
||||
return maxDimension.toString();
|
||||
}
|
||||
if (cmdPart === QUALITY_PLACEHOLDER) {
|
||||
return quality.toString();
|
||||
}
|
||||
return cmdPart;
|
||||
},
|
||||
);
|
||||
} else if (process.platform == "linux") {
|
||||
thumbnailGenerationCmd =
|
||||
IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map((cmdPart) => {
|
||||
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
|
||||
return imageMagickPath();
|
||||
}
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
if (cmdPart.includes(SAMPLE_SIZE_PLACEHOLDER)) {
|
||||
return cmdPart.replaceAll(
|
||||
SAMPLE_SIZE_PLACEHOLDER,
|
||||
(2 * maxDimension).toString(),
|
||||
);
|
||||
}
|
||||
if (cmdPart.includes(MAX_DIMENSION_PLACEHOLDER)) {
|
||||
return cmdPart.replaceAll(
|
||||
MAX_DIMENSION_PLACEHOLDER,
|
||||
maxDimension.toString(),
|
||||
);
|
||||
}
|
||||
if (cmdPart === QUALITY_PLACEHOLDER) {
|
||||
return quality.toString();
|
||||
}
|
||||
return cmdPart;
|
||||
});
|
||||
} else {
|
||||
throw new Error(`Unsupported OS ${process.platform}`);
|
||||
}
|
||||
return thumbnailGenerationCmd;
|
||||
}
|
|
@ -1,30 +1,32 @@
|
|||
import pathToFfmpeg from "ffmpeg-static";
|
||||
import fs from "node:fs/promises";
|
||||
import log from "../log";
|
||||
import { withTimeout } from "../utils";
|
||||
import { execAsync } from "../utils-electron";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
|
||||
|
||||
/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
|
||||
const ffmpegPathPlaceholder = "FFMPEG";
|
||||
const inputPathPlaceholder = "INPUT";
|
||||
const outputPathPlaceholder = "OUTPUT";
|
||||
|
||||
/**
|
||||
* Run a ffmpeg command
|
||||
* Run a FFmpeg command
|
||||
*
|
||||
* [Note: ffmpeg in Electron]
|
||||
* [Note: FFmpeg in Electron]
|
||||
*
|
||||
* There is a wasm build of ffmpeg, but that is currently 10-20 times slower
|
||||
* There is a wasm build of FFmpeg, but that is currently 10-20 times slower
|
||||
* that the native build. That is slow enough to be unusable for our purposes.
|
||||
* https://ffmpegwasm.netlify.app/docs/performance
|
||||
*
|
||||
* So the alternative is to bundle a ffmpeg binary with our app. e.g.
|
||||
* So the alternative is to bundle a FFmpeg executable binary with our app. e.g.
|
||||
*
|
||||
* yarn add fluent-ffmpeg ffmpeg-static ffprobe-static
|
||||
*
|
||||
* (we only use ffmpeg-static, the rest are mentioned for completeness' sake).
|
||||
*
|
||||
* Interestingly, Electron already bundles an ffmpeg library (it comes from the
|
||||
* ffmpeg fork maintained by Chromium).
|
||||
* Interestingly, Electron already bundles an binary FFmpeg library (it comes
|
||||
* from the ffmpeg fork maintained by Chromium).
|
||||
* https://chromium.googlesource.com/chromium/third_party/ffmpeg
|
||||
* https://stackoverflow.com/questions/53963672/what-version-of-ffmpeg-is-bundled-inside-electron
|
||||
*
|
||||
|
@ -37,8 +39,8 @@ const outputPathPlaceholder = "OUTPUT";
|
|||
*/
|
||||
export const ffmpegExec = async (
|
||||
command: string[],
|
||||
inputDataOrPath: Uint8Array | string,
|
||||
outputFileName: string,
|
||||
dataOrPath: Uint8Array | string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
): Promise<Uint8Array> => {
|
||||
// TODO (MR): This currently copies files for both input and output. This
|
||||
|
@ -47,18 +49,18 @@ export const ffmpegExec = async (
|
|||
|
||||
let inputFilePath: string;
|
||||
let isInputFileTemporary: boolean;
|
||||
if (typeof inputDataOrPath == "string") {
|
||||
inputFilePath = inputDataOrPath;
|
||||
isInputFileTemporary = false;
|
||||
} else {
|
||||
inputFilePath = await makeTempFilePath("input" /* arbitrary */);
|
||||
if (dataOrPath instanceof Uint8Array) {
|
||||
inputFilePath = await makeTempFilePath();
|
||||
isInputFileTemporary = true;
|
||||
await fs.writeFile(inputFilePath, inputDataOrPath);
|
||||
} else {
|
||||
inputFilePath = dataOrPath;
|
||||
isInputFileTemporary = false;
|
||||
}
|
||||
|
||||
let outputFilePath: string | undefined;
|
||||
const outputFilePath = await makeTempFilePath(outputFileExtension);
|
||||
try {
|
||||
outputFilePath = await makeTempFilePath(outputFileName);
|
||||
if (dataOrPath instanceof Uint8Array)
|
||||
await fs.writeFile(inputFilePath, dataOrPath);
|
||||
|
||||
const cmd = substitutePlaceholders(
|
||||
command,
|
||||
|
@ -71,8 +73,12 @@ export const ffmpegExec = async (
|
|||
|
||||
return fs.readFile(outputFilePath);
|
||||
} finally {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
if (outputFilePath) await deleteTempFile(outputFilePath);
|
||||
try {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Ignoring error when cleaning up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -96,7 +102,7 @@ const substitutePlaceholders = (
|
|||
/**
|
||||
* Return the path to the `ffmpeg` binary.
|
||||
*
|
||||
* At runtime, the ffmpeg binary is present in a path like (macOS example):
|
||||
* At runtime, the FFmpeg binary is present in a path like (macOS example):
|
||||
* `ente.app/Contents/Resources/app.asar.unpacked/node_modules/ffmpeg-static/ffmpeg`
|
||||
*/
|
||||
const ffmpegBinaryPath = () => {
|
||||
|
|
160
desktop/src/main/services/image.ts
Normal file
160
desktop/src/main/services/image.ts
Normal file
|
@ -0,0 +1,160 @@
|
|||
/** @file Image format conversions and thumbnail generation */
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
import { CustomErrorMessage } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { execAsync, isDev } from "../utils-electron";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
|
||||
|
||||
export const convertToJPEG = async (imageData: Uint8Array) => {
|
||||
const inputFilePath = await makeTempFilePath();
|
||||
const outputFilePath = await makeTempFilePath("jpeg");
|
||||
|
||||
// Construct the command first, it may throw NotAvailable on win32.
|
||||
const command = convertToJPEGCommand(inputFilePath, outputFilePath);
|
||||
|
||||
try {
|
||||
await fs.writeFile(inputFilePath, imageData);
|
||||
await execAsync(command);
|
||||
return new Uint8Array(await fs.readFile(outputFilePath));
|
||||
} finally {
|
||||
try {
|
||||
await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Ignoring error when cleaning up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const convertToJPEGCommand = (
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
) => {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
inputFilePath,
|
||||
"--out",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
case "linux":
|
||||
return [
|
||||
imageMagickPath(),
|
||||
inputFilePath,
|
||||
"-quality",
|
||||
"100%",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
default: // "win32"
|
||||
throw new Error(CustomErrorMessage.NotAvailable);
|
||||
}
|
||||
};
|
||||
|
||||
/** Path to the Linux image-magick executable bundled with our app */
|
||||
const imageMagickPath = () =>
|
||||
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
|
||||
|
||||
export const generateImageThumbnail = async (
|
||||
dataOrPath: Uint8Array | string,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> => {
|
||||
let inputFilePath: string;
|
||||
let isInputFileTemporary: boolean;
|
||||
if (dataOrPath instanceof Uint8Array) {
|
||||
inputFilePath = await makeTempFilePath();
|
||||
isInputFileTemporary = true;
|
||||
} else {
|
||||
inputFilePath = dataOrPath;
|
||||
isInputFileTemporary = false;
|
||||
}
|
||||
|
||||
const outputFilePath = await makeTempFilePath("jpeg");
|
||||
|
||||
// Construct the command first, it may throw `NotAvailable` on win32.
|
||||
let quality = 70;
|
||||
let command = generateImageThumbnailCommand(
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
maxDimension,
|
||||
quality,
|
||||
);
|
||||
|
||||
try {
|
||||
if (dataOrPath instanceof Uint8Array)
|
||||
await fs.writeFile(inputFilePath, dataOrPath);
|
||||
|
||||
let thumbnail: Uint8Array;
|
||||
do {
|
||||
await execAsync(command);
|
||||
thumbnail = new Uint8Array(await fs.readFile(outputFilePath));
|
||||
quality -= 10;
|
||||
command = generateImageThumbnailCommand(
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
maxDimension,
|
||||
quality,
|
||||
);
|
||||
} while (thumbnail.length > maxSize && quality > 50);
|
||||
return thumbnail;
|
||||
} finally {
|
||||
try {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Ignoring error when cleaning up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const generateImageThumbnailCommand = (
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
maxDimension: number,
|
||||
quality: number,
|
||||
) => {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
"-s",
|
||||
"formatOptions",
|
||||
`${quality}`,
|
||||
"-Z",
|
||||
`${maxDimension}`,
|
||||
inputFilePath,
|
||||
"--out",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
case "linux":
|
||||
return [
|
||||
imageMagickPath(),
|
||||
inputFilePath,
|
||||
"-auto-orient",
|
||||
"-define",
|
||||
`jpeg:size=${2 * maxDimension}x${2 * maxDimension}`,
|
||||
"-thumbnail",
|
||||
`${maxDimension}x${maxDimension}>`,
|
||||
"-unsharp",
|
||||
"0x.5",
|
||||
"-quality",
|
||||
`${quality}`,
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
default: // "win32"
|
||||
throw new Error(CustomErrorMessage.NotAvailable);
|
||||
}
|
||||
};
|
|
@ -20,7 +20,7 @@ const cachedCLIPImageSession = makeCachedInferenceSession(
|
|||
);
|
||||
|
||||
export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
|
||||
const tempFilePath = await makeTempFilePath("");
|
||||
const tempFilePath = await makeTempFilePath();
|
||||
const imageStream = new Response(jpegImageData.buffer).body;
|
||||
await writeStream(tempFilePath, imageStream);
|
||||
try {
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
/**
|
||||
* @file stream data to-from renderer using a custom protocol handler.
|
||||
*/
|
||||
import { protocol } from "electron/main";
|
||||
import { net, protocol } from "electron/main";
|
||||
import { createWriteStream, existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { Readable } from "node:stream";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import log from "./log";
|
||||
|
||||
/**
|
||||
* Register a protocol handler that we use for streaming large files between the
|
||||
* main process (node) and the renderer process (browser) layer.
|
||||
* main (Node.js) and renderer (Chromium) processes.
|
||||
*
|
||||
* [Note: IPC streams]
|
||||
*
|
||||
|
@ -17,11 +18,14 @@ import log from "./log";
|
|||
* across IPC. And passing the entire contents of the file is not feasible for
|
||||
* large video files because of the memory pressure the copying would entail.
|
||||
*
|
||||
* As an alternative, we register a custom protocol handler that can provided a
|
||||
* As an alternative, we register a custom protocol handler that can provides a
|
||||
* bi-directional stream. The renderer can stream data to the node side by
|
||||
* streaming the request. The node side can stream to the renderer side by
|
||||
* streaming the response.
|
||||
*
|
||||
* The stream is not full duplex - while both reads and writes can be streamed,
|
||||
* they need to be streamed separately.
|
||||
*
|
||||
* See also: [Note: Transferring large amount of data over IPC]
|
||||
*
|
||||
* Depends on {@link registerPrivilegedSchemes}.
|
||||
|
@ -29,29 +33,60 @@ import log from "./log";
|
|||
export const registerStreamProtocol = () => {
|
||||
protocol.handle("stream", async (request: Request) => {
|
||||
const url = request.url;
|
||||
// The request URL contains the command to run as the host, and the
|
||||
// pathname of the file as the path. For example,
|
||||
//
|
||||
// stream://write/path/to/file
|
||||
// host-pathname-----
|
||||
//
|
||||
const { host, pathname } = new URL(url);
|
||||
// Convert e.g. "%20" to spaces.
|
||||
const path = decodeURIComponent(pathname);
|
||||
switch (host) {
|
||||
/* stream://write/path/to/file */
|
||||
/* host-pathname----- */
|
||||
case "read":
|
||||
return handleRead(path);
|
||||
case "write":
|
||||
try {
|
||||
await writeStream(path, request.body);
|
||||
return new Response("", { status: 200 });
|
||||
} catch (e) {
|
||||
log.error(`Failed to write stream for ${url}`, e);
|
||||
return new Response(
|
||||
`Failed to write stream: ${e.message}`,
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
return handleWrite(path, request);
|
||||
default:
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleRead = async (path: string) => {
|
||||
try {
|
||||
const res = await net.fetch(pathToFileURL(path).toString());
|
||||
if (res.ok) {
|
||||
// net.fetch defaults to text/plain, which might be fine
|
||||
// in practice, but as an extra precaution indicate that
|
||||
// this is binary data.
|
||||
res.headers.set("Content-Type", "application/octet-stream");
|
||||
|
||||
// Add the file's size as the Content-Length header.
|
||||
const fileSize = (await fs.stat(path)).size;
|
||||
res.headers.set("Content-Length", `${fileSize}`);
|
||||
}
|
||||
return res;
|
||||
} catch (e) {
|
||||
log.error(`Failed to read stream at ${path}`, e);
|
||||
return new Response(`Failed to read stream: ${e.message}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleWrite = async (path: string, request: Request) => {
|
||||
try {
|
||||
await writeStream(path, request.body);
|
||||
return new Response("", { status: 200 });
|
||||
} catch (e) {
|
||||
log.error(`Failed to write stream to ${path}`, e);
|
||||
return new Response(`Failed to write stream: ${e.message}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a (web) ReadableStream to a file at the given {@link filePath}.
|
||||
*
|
||||
|
|
|
@ -13,35 +13,34 @@ const enteTempDirPath = async () => {
|
|||
return result;
|
||||
};
|
||||
|
||||
const randomPrefix = (length: number) => {
|
||||
const CHARACTERS =
|
||||
/** Generate a random string suitable for being used as a file name prefix */
|
||||
const randomPrefix = () => {
|
||||
const alphabet =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
|
||||
let result = "";
|
||||
const charactersLength = CHARACTERS.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += CHARACTERS.charAt(
|
||||
Math.floor(Math.random() * charactersLength),
|
||||
);
|
||||
}
|
||||
for (let i = 0; i < 10; i++)
|
||||
result += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the path to a temporary file with the given {@link formatSuffix}.
|
||||
* Return the path to a temporary file with the given {@link suffix}.
|
||||
*
|
||||
* The function returns the path to a file in the system temp directory (in an
|
||||
* Ente specific folder therin) with a random prefix and the given
|
||||
* {@link formatSuffix}. It ensures that there is no existing file with the same
|
||||
* name already.
|
||||
* Ente specific folder therin) with a random prefix and an (optional)
|
||||
* {@link extension}.
|
||||
*
|
||||
* It ensures that there is no existing item with the same name already.
|
||||
*
|
||||
* Use {@link deleteTempFile} to remove this file when you're done.
|
||||
*/
|
||||
export const makeTempFilePath = async (formatSuffix: string) => {
|
||||
export const makeTempFilePath = async (extension?: string) => {
|
||||
const tempDir = await enteTempDirPath();
|
||||
const suffix = extension ? "." + extension : "";
|
||||
let result: string;
|
||||
do {
|
||||
result = path.join(tempDir, randomPrefix(10) + "-" + formatSuffix);
|
||||
result = path.join(tempDir, randomPrefix() + suffix);
|
||||
} while (existsSync(result));
|
||||
return result;
|
||||
};
|
||||
|
|
|
@ -124,35 +124,32 @@ const fsIsDir = (dirPath: string): Promise<boolean> =>
|
|||
|
||||
// - Conversion
|
||||
|
||||
const convertToJPEG = (
|
||||
fileName: string,
|
||||
imageData: Uint8Array,
|
||||
): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke("convertToJPEG", fileName, imageData);
|
||||
const convertToJPEG = (imageData: Uint8Array): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke("convertToJPEG", imageData);
|
||||
|
||||
const generateImageThumbnail = (
|
||||
inputFile: File | ElectronFile,
|
||||
dataOrPath: Uint8Array | string,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke(
|
||||
"generateImageThumbnail",
|
||||
inputFile,
|
||||
dataOrPath,
|
||||
maxDimension,
|
||||
maxSize,
|
||||
);
|
||||
|
||||
const ffmpegExec = (
|
||||
command: string[],
|
||||
inputDataOrPath: Uint8Array | string,
|
||||
outputFileName: string,
|
||||
dataOrPath: Uint8Array | string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke(
|
||||
"ffmpegExec",
|
||||
command,
|
||||
inputDataOrPath,
|
||||
outputFileName,
|
||||
dataOrPath,
|
||||
outputFileExtension,
|
||||
timeoutMS,
|
||||
);
|
||||
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
export enum CollectionType {
|
||||
folder = "folder",
|
||||
favorites = "favorites",
|
||||
album = "album",
|
||||
uncategorized = "uncategorized",
|
||||
}
|
||||
|
||||
export enum CollectionSummaryType {
|
||||
folder = "folder",
|
||||
favorites = "favorites",
|
||||
album = "album",
|
||||
archive = "archive",
|
||||
trash = "trash",
|
||||
uncategorized = "uncategorized",
|
||||
all = "all",
|
||||
outgoingShare = "outgoingShare",
|
||||
incomingShareViewer = "incomingShareViewer",
|
||||
incomingShareCollaborator = "incomingShareCollaborator",
|
||||
sharedOnlyViaLink = "sharedOnlyViaLink",
|
||||
archived = "archived",
|
||||
defaultHidden = "defaultHidden",
|
||||
hiddenItems = "hiddenItems",
|
||||
pinned = "pinned",
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
export enum FILE_TYPE {
|
||||
IMAGE,
|
||||
VIDEO,
|
||||
LIVE_PHOTO,
|
||||
OTHERS,
|
||||
}
|
||||
|
||||
export const RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"raf",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
|
@ -1,6 +1,20 @@
|
|||
import { FILE_TYPE } from "constants/file";
|
||||
import { FILE_TYPE } from "@/media/file";
|
||||
import { FileTypeInfo } from "types/upload";
|
||||
|
||||
export const RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"raf",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
||||
|
||||
// list of format that were missed by type-detection for some files.
|
||||
export const WHITELISTED_FILE_FORMATS: FileTypeInfo[] = [
|
||||
{ fileType: FILE_TYPE.IMAGE, exactType: "jpeg", mimeType: "image/jpeg" },
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import PairedSuccessfullyOverlay from "components/PairedSuccessfullyOverlay";
|
||||
import { PhotoAuditorium } from "components/PhotoAuditorium";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { useRouter } from "next/router";
|
||||
import { useEffect, useState } from "react";
|
||||
import {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import HTTPService from "@ente/shared/network/HTTPService";
|
||||
import { getCastFileURL } from "@ente/shared/network/api";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { EnteFile } from "types/file";
|
||||
import { generateStreamFromArrayBuffer } from "utils/file";
|
||||
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
import { convertBytesToHumanReadable } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
|
||||
export async function getUint8ArrayView(file: Blob): Promise<Uint8Array> {
|
||||
try {
|
||||
return new Uint8Array(await file.arrayBuffer());
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`,
|
||||
e,
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
|
@ -1,14 +1,13 @@
|
|||
import { nameAndExtension } from "@/next/file";
|
||||
import { FILE_TYPE } from "@/media/file";
|
||||
import { convertBytesToHumanReadable, nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import {
|
||||
KNOWN_NON_MEDIA_FORMATS,
|
||||
WHITELISTED_FILE_FORMATS,
|
||||
} from "constants/upload";
|
||||
import FileType from "file-type";
|
||||
import { FileTypeInfo } from "types/upload";
|
||||
import { getUint8ArrayView } from "./readerService";
|
||||
|
||||
const TYPE_VIDEO = "video";
|
||||
const TYPE_IMAGE = "image";
|
||||
|
@ -66,6 +65,18 @@ async function extractFileType(file: File) {
|
|||
return getFileTypeFromBuffer(fileDataChunk);
|
||||
}
|
||||
|
||||
export async function getUint8ArrayView(file: Blob): Promise<Uint8Array> {
|
||||
try {
|
||||
return new Uint8Array(await file.arrayBuffer());
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`,
|
||||
e,
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async function getFileTypeFromBuffer(buffer: Uint8Array) {
|
||||
const result = await FileType.fromBuffer(buffer);
|
||||
if (!result?.mime) {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import { CollectionSummaryType, CollectionType } from "constants/collection";
|
||||
import { EnteFile } from "types/file";
|
||||
import {
|
||||
EncryptedMagicMetadata,
|
||||
|
@ -20,6 +19,13 @@ export interface CollectionUser {
|
|||
role: COLLECTION_ROLE;
|
||||
}
|
||||
|
||||
enum CollectionType {
|
||||
folder = "folder",
|
||||
favorites = "favorites",
|
||||
album = "album",
|
||||
uncategorized = "uncategorized",
|
||||
}
|
||||
|
||||
export interface EncryptedCollection {
|
||||
id: number;
|
||||
owner: CollectionUser;
|
||||
|
@ -32,7 +38,7 @@ export interface EncryptedCollection {
|
|||
type: CollectionType;
|
||||
attributes: collectionAttributes;
|
||||
sharees: CollectionUser[];
|
||||
publicURLs?: PublicURL[];
|
||||
publicURLs?: unknown;
|
||||
updationTime: number;
|
||||
isDeleted: boolean;
|
||||
magicMetadata: EncryptedMagicMetadata;
|
||||
|
@ -61,54 +67,6 @@ export interface Collection
|
|||
// define a method on Collection interface to return the sync key as collection.id-time
|
||||
// this is used to store the last sync time of a collection in local storage
|
||||
|
||||
export interface PublicURL {
|
||||
url: string;
|
||||
deviceLimit: number;
|
||||
validTill: number;
|
||||
enableDownload: boolean;
|
||||
enableCollect: boolean;
|
||||
passwordEnabled: boolean;
|
||||
nonce?: string;
|
||||
opsLimit?: number;
|
||||
memLimit?: number;
|
||||
}
|
||||
|
||||
export interface UpdatePublicURL {
|
||||
collectionID: number;
|
||||
disablePassword?: boolean;
|
||||
enableDownload?: boolean;
|
||||
enableCollect?: boolean;
|
||||
validTill?: number;
|
||||
deviceLimit?: number;
|
||||
passHash?: string;
|
||||
nonce?: string;
|
||||
opsLimit?: number;
|
||||
memLimit?: number;
|
||||
}
|
||||
|
||||
export interface CreatePublicAccessTokenRequest {
|
||||
collectionID: number;
|
||||
validTill?: number;
|
||||
deviceLimit?: number;
|
||||
}
|
||||
|
||||
export interface EncryptedFileKey {
|
||||
id: number;
|
||||
encryptedKey: string;
|
||||
keyDecryptionNonce: string;
|
||||
}
|
||||
|
||||
export interface AddToCollectionRequest {
|
||||
collectionID: number;
|
||||
files: EncryptedFileKey[];
|
||||
}
|
||||
|
||||
export interface MoveToCollectionRequest {
|
||||
fromCollectionID: number;
|
||||
toCollectionID: number;
|
||||
files: EncryptedFileKey[];
|
||||
}
|
||||
|
||||
export interface collectionAttributes {
|
||||
encryptedPath?: string;
|
||||
pathDecryptionNonce?: string;
|
||||
|
@ -116,11 +74,6 @@ export interface collectionAttributes {
|
|||
|
||||
export type CollectionToFileMap = Map<number, EnteFile>;
|
||||
|
||||
export interface RemoveFromCollectionRequest {
|
||||
collectionID: number;
|
||||
fileIDs: number[];
|
||||
}
|
||||
|
||||
export interface CollectionMagicMetadataProps {
|
||||
visibility?: VISIBILITY_STATE;
|
||||
subType?: SUB_TYPE;
|
||||
|
@ -144,16 +97,4 @@ export interface CollectionPublicMagicMetadataProps {
|
|||
export type CollectionPublicMagicMetadata =
|
||||
MagicMetadataCore<CollectionPublicMagicMetadataProps>;
|
||||
|
||||
export interface CollectionSummary {
|
||||
id: number;
|
||||
name: string;
|
||||
type: CollectionSummaryType;
|
||||
coverFile: EnteFile;
|
||||
latestFile: EnteFile;
|
||||
fileCount: number;
|
||||
updationTime: number;
|
||||
order?: number;
|
||||
}
|
||||
|
||||
export type CollectionSummaries = Map<number, CollectionSummary>;
|
||||
export type CollectionFilesCount = Map<number, number>;
|
|
@ -64,25 +64,6 @@ export interface EnteFile
|
|||
isConverted?: boolean;
|
||||
}
|
||||
|
||||
export interface TrashRequest {
|
||||
items: TrashRequestItems[];
|
||||
}
|
||||
|
||||
export interface TrashRequestItems {
|
||||
fileID: number;
|
||||
collectionID: number;
|
||||
}
|
||||
|
||||
export interface FileWithUpdatedMagicMetadata {
|
||||
file: EnteFile;
|
||||
updatedMagicMetadata: FileMagicMetadata;
|
||||
}
|
||||
|
||||
export interface FileWithUpdatedPublicMagicMetadata {
|
||||
file: EnteFile;
|
||||
updatedPublicMagicMetadata: FilePublicMagicMetadata;
|
||||
}
|
||||
|
||||
export interface FileMagicMetadataProps {
|
||||
visibility?: VISIBILITY_STATE;
|
||||
filePaths?: string[];
|
||||
|
|
25
web/apps/cast/src/types/upload.ts
Normal file
25
web/apps/cast/src/types/upload.ts
Normal file
|
@ -0,0 +1,25 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
|
||||
export interface Metadata {
|
||||
title: string;
|
||||
creationTime: number;
|
||||
modificationTime: number;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
fileType: FILE_TYPE;
|
||||
hasStaticThumbnail?: boolean;
|
||||
hash?: string;
|
||||
imageHash?: string;
|
||||
videoHash?: string;
|
||||
localID?: number;
|
||||
version?: number;
|
||||
deviceFolder?: string;
|
||||
}
|
||||
|
||||
export interface FileTypeInfo {
|
||||
fileType: FILE_TYPE;
|
||||
exactType: string;
|
||||
mimeType?: string;
|
||||
imageType?: string;
|
||||
videoType?: string;
|
||||
}
|
|
@ -1,107 +0,0 @@
|
|||
import {
|
||||
B64EncryptionResult,
|
||||
LocalFileAttributes,
|
||||
} from "@ente/shared/crypto/types";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import {
|
||||
FilePublicMagicMetadata,
|
||||
FilePublicMagicMetadataProps,
|
||||
MetadataFileAttributes,
|
||||
S3FileAttributes,
|
||||
} from "types/file";
|
||||
import { EncryptedMagicMetadata } from "types/magicMetadata";
|
||||
|
||||
export interface DataStream {
|
||||
stream: ReadableStream<Uint8Array>;
|
||||
chunkCount: number;
|
||||
}
|
||||
|
||||
export function isDataStream(object: any): object is DataStream {
|
||||
return "stream" in object;
|
||||
}
|
||||
|
||||
export type Logger = (message: string) => void;
|
||||
|
||||
export interface Metadata {
|
||||
title: string;
|
||||
creationTime: number;
|
||||
modificationTime: number;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
fileType: FILE_TYPE;
|
||||
hasStaticThumbnail?: boolean;
|
||||
hash?: string;
|
||||
imageHash?: string;
|
||||
videoHash?: string;
|
||||
localID?: number;
|
||||
version?: number;
|
||||
deviceFolder?: string;
|
||||
}
|
||||
|
||||
export interface FileTypeInfo {
|
||||
fileType: FILE_TYPE;
|
||||
exactType: string;
|
||||
mimeType?: string;
|
||||
imageType?: string;
|
||||
videoType?: string;
|
||||
}
|
||||
|
||||
export interface UploadURL {
|
||||
url: string;
|
||||
objectKey: string;
|
||||
}
|
||||
|
||||
export interface FileInMemory {
|
||||
filedata: Uint8Array | DataStream;
|
||||
thumbnail: Uint8Array;
|
||||
hasStaticThumbnail: boolean;
|
||||
}
|
||||
|
||||
export interface FileWithMetadata
|
||||
extends Omit<FileInMemory, "hasStaticThumbnail"> {
|
||||
metadata: Metadata;
|
||||
localID: number;
|
||||
pubMagicMetadata: FilePublicMagicMetadata;
|
||||
}
|
||||
|
||||
export interface EncryptedFile {
|
||||
file: ProcessedFile;
|
||||
fileKey: B64EncryptionResult;
|
||||
}
|
||||
export interface ProcessedFile {
|
||||
file: LocalFileAttributes<Uint8Array | DataStream>;
|
||||
thumbnail: LocalFileAttributes<Uint8Array>;
|
||||
metadata: LocalFileAttributes<string>;
|
||||
pubMagicMetadata: EncryptedMagicMetadata;
|
||||
localID: number;
|
||||
}
|
||||
export interface BackupedFile {
|
||||
file: S3FileAttributes;
|
||||
thumbnail: S3FileAttributes;
|
||||
metadata: MetadataFileAttributes;
|
||||
pubMagicMetadata: EncryptedMagicMetadata;
|
||||
}
|
||||
|
||||
export interface UploadFile extends BackupedFile {
|
||||
collectionID: number;
|
||||
encryptedKey: string;
|
||||
keyDecryptionNonce: string;
|
||||
}
|
||||
|
||||
export interface ParsedExtractedMetadata {
|
||||
location: Location;
|
||||
creationTime: number;
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
export interface PublicUploadProps {
|
||||
token: string;
|
||||
passwordToken: string;
|
||||
accessedThroughSharedURL: boolean;
|
||||
}
|
||||
|
||||
export interface ExtractMetadataResult {
|
||||
metadata: Metadata;
|
||||
publicMagicMetadata: FilePublicMagicMetadataProps;
|
||||
}
|
|
@ -1,7 +1,8 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import log from "@/next/log";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { FILE_TYPE, RAW_FORMATS } from "constants/file";
|
||||
import { RAW_FORMATS } from "constants/upload";
|
||||
import CastDownloadManager from "services/castDownloadManager";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import {
|
||||
|
@ -103,18 +104,6 @@ export function isRawFileFromFileName(fileName: string) {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* [Note: File name for local EnteFile objects]
|
||||
*
|
||||
* The title property in a file's metadata is the original file's name. The
|
||||
* metadata of a file cannot be edited. So if later on the file's name is
|
||||
* changed, then the edit is stored in the `editedName` property of the public
|
||||
* metadata of the file.
|
||||
*
|
||||
* This function merges these edits onto the file object that we use locally.
|
||||
* Effectively, post this step, the file's metadata.title can be used in lieu of
|
||||
* its filename.
|
||||
*/
|
||||
export function mergeMetadata(files: EnteFile[]): EnteFile[] {
|
||||
return files.map((file) => {
|
||||
if (file.pubMagicMetadata?.data.editedTime) {
|
|
@ -5,10 +5,9 @@ import {
|
|||
MobileDateTimePicker,
|
||||
} from "@mui/x-date-pickers";
|
||||
import { AdapterDateFns } from "@mui/x-date-pickers/AdapterDateFns";
|
||||
import {
|
||||
MAX_EDITED_CREATION_TIME,
|
||||
MIN_EDITED_CREATION_TIME,
|
||||
} from "constants/file";
|
||||
|
||||
const MIN_EDITED_CREATION_TIME = new Date(1800, 0, 1);
|
||||
const MAX_EDITED_CREATION_TIME = new Date();
|
||||
|
||||
interface Props {
|
||||
initialValue?: Date;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { PHOTOS_PAGES } from "@ente/shared/constants/pages";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
|
@ -5,7 +6,6 @@ import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
|
|||
import { styled } from "@mui/material";
|
||||
import PhotoViewer from "components/PhotoViewer";
|
||||
import { TRASH_SECTION } from "constants/collection";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { useRouter } from "next/router";
|
||||
import { GalleryContext } from "pages/gallery";
|
||||
import PhotoSwipe from "photoswipe";
|
||||
|
|
|
@ -3,7 +3,6 @@ import { FlexWrapper } from "@ente/shared/components/Container";
|
|||
import Close from "@mui/icons-material/Close";
|
||||
import Done from "@mui/icons-material/Done";
|
||||
import { Box, IconButton, TextField } from "@mui/material";
|
||||
import { MAX_CAPTION_SIZE } from "constants/file";
|
||||
import { Formik } from "formik";
|
||||
import { t } from "i18next";
|
||||
import { useState } from "react";
|
||||
|
@ -12,6 +11,8 @@ import { changeCaption, updateExistingFilePubMetadata } from "utils/file";
|
|||
import * as Yup from "yup";
|
||||
import { SmallLoadingSpinner } from "../styledComponents/SmallLoadingSpinner";
|
||||
|
||||
export const MAX_CAPTION_SIZE = 5000;
|
||||
|
||||
interface formValues {
|
||||
caption: string;
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { FlexWrapper } from "@ente/shared/components/Container";
|
||||
import PhotoOutlined from "@mui/icons-material/PhotoOutlined";
|
||||
import VideocamOutlined from "@mui/icons-material/VideocamOutlined";
|
||||
import Box from "@mui/material/Box";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { useEffect, useState } from "react";
|
||||
import { EnteFile } from "types/file";
|
||||
import { makeHumanReadableStorage } from "utils/billing";
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
isSupportedRawFormat,
|
||||
} from "utils/file";
|
||||
|
||||
import { FILE_TYPE } from "@/media/file";
|
||||
import { FlexWrapper } from "@ente/shared/components/Container";
|
||||
import EnteSpinner from "@ente/shared/components/EnteSpinner";
|
||||
import AlbumOutlined from "@mui/icons-material/AlbumOutlined";
|
||||
|
@ -34,7 +35,6 @@ import InfoIcon from "@mui/icons-material/InfoOutlined";
|
|||
import ReplayIcon from "@mui/icons-material/Replay";
|
||||
import ZoomInOutlinedIcon from "@mui/icons-material/ZoomInOutlined";
|
||||
import { Box, Button, styled } from "@mui/material";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import {
|
||||
defaultLivePhotoDefaultOptions,
|
||||
photoSwipeV4Events,
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { Overlay } from "@ente/shared/components/Container";
|
||||
import PhotoOutlined from "@mui/icons-material/PhotoOutlined";
|
||||
import PlayCircleOutlineOutlined from "@mui/icons-material/PlayCircleOutlineOutlined";
|
||||
import { styled } from "@mui/material";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
|
||||
interface Iprops {
|
||||
fileType: FILE_TYPE;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import CloseIcon from "@mui/icons-material/Close";
|
||||
import { IconButton } from "@mui/material";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { t } from "i18next";
|
||||
import memoize from "memoize-one";
|
||||
import pDebounce from "p-debounce";
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import log from "@/next/log";
|
||||
import ChevronRight from "@mui/icons-material/ChevronRight";
|
||||
import ScienceIcon from "@mui/icons-material/Science";
|
||||
import { Box, DialogProps, Stack, Typography } from "@mui/material";
|
||||
|
@ -37,13 +36,10 @@ export default function AdvancedSettings({ open, onClose, onRootClose }) {
|
|||
}
|
||||
};
|
||||
|
||||
const toggleCFProxy = async () => {
|
||||
try {
|
||||
appContext.setIsCFProxyDisabled(!appContext.isCFProxyDisabled);
|
||||
} catch (e) {
|
||||
log.error("toggleFasterUpload failed", e);
|
||||
}
|
||||
const toggleCFProxy = () => {
|
||||
appContext.setIsCFProxyDisabled(!appContext.isCFProxyDisabled);
|
||||
};
|
||||
|
||||
const [indexingStatus, setIndexingStatus] = useState<CLIPIndexingStatus>({
|
||||
indexed: 0,
|
||||
pending: 0,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { ensureElectron } from "@/next/electron";
|
||||
import log from "@/next/log";
|
||||
import { ElectronFile } from "@/next/types/file";
|
||||
import type { CollectionMapping, Electron } from "@/next/types/ipc";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { isPromise } from "@ente/shared/utils";
|
||||
|
@ -32,11 +33,7 @@ import {
|
|||
SetLoading,
|
||||
UploadTypeSelectorIntent,
|
||||
} from "types/gallery";
|
||||
import {
|
||||
ElectronFile,
|
||||
FileWithCollection,
|
||||
type FileWithCollection2,
|
||||
} from "types/upload";
|
||||
import { FileWithCollection, type FileWithCollection2 } from "types/upload";
|
||||
import {
|
||||
InProgressUpload,
|
||||
SegregatedFinishedUploads,
|
||||
|
@ -492,7 +489,7 @@ export default function Uploader(props: Props) {
|
|||
});
|
||||
throw e;
|
||||
}
|
||||
await waitInQueueAndUploadFiles2(
|
||||
await waitInQueueAndUploadFiles(
|
||||
filesWithCollectionToUpload,
|
||||
collections,
|
||||
);
|
||||
|
@ -520,24 +517,6 @@ export default function Uploader(props: Props) {
|
|||
await currentUploadPromise.current;
|
||||
};
|
||||
|
||||
const waitInQueueAndUploadFiles2 = async (
|
||||
filesWithCollectionToUploadIn: FileWithCollection2[],
|
||||
collections: Collection[],
|
||||
uploaderName?: string,
|
||||
) => {
|
||||
const currentPromise = currentUploadPromise.current;
|
||||
currentUploadPromise.current = waitAndRun(
|
||||
currentPromise,
|
||||
async () =>
|
||||
await uploadFiles2(
|
||||
filesWithCollectionToUploadIn,
|
||||
collections,
|
||||
uploaderName,
|
||||
),
|
||||
);
|
||||
await currentUploadPromise.current;
|
||||
};
|
||||
|
||||
const preUploadAction = async () => {
|
||||
uploadManager.prepareForNewUpload();
|
||||
setUploadProgressView(true);
|
||||
|
@ -607,63 +586,6 @@ export default function Uploader(props: Props) {
|
|||
}
|
||||
};
|
||||
|
||||
const uploadFiles2 = async (
|
||||
filesWithCollectionToUploadIn: FileWithCollection2[],
|
||||
collections: Collection[],
|
||||
uploaderName?: string,
|
||||
) => {
|
||||
try {
|
||||
log.info("uploadFiles called");
|
||||
preUploadAction();
|
||||
if (
|
||||
electron &&
|
||||
!isPendingDesktopUpload.current &&
|
||||
!watcher.isUploadRunning()
|
||||
) {
|
||||
await setToUploadCollection(collections);
|
||||
if (zipPaths.current) {
|
||||
await electron.setPendingUploadFiles(
|
||||
"zips",
|
||||
zipPaths.current,
|
||||
);
|
||||
zipPaths.current = null;
|
||||
}
|
||||
await electron.setPendingUploadFiles(
|
||||
"files",
|
||||
filesWithCollectionToUploadIn.map(
|
||||
({ file }) => (file as ElectronFile).path,
|
||||
),
|
||||
);
|
||||
}
|
||||
const shouldCloseUploadProgress =
|
||||
await uploadManager.queueFilesForUpload2(
|
||||
filesWithCollectionToUploadIn,
|
||||
collections,
|
||||
uploaderName,
|
||||
);
|
||||
if (shouldCloseUploadProgress) {
|
||||
closeUploadProgress();
|
||||
}
|
||||
if (isElectron()) {
|
||||
if (watcher.isUploadRunning()) {
|
||||
await watcher.allFileUploadsDone(
|
||||
filesWithCollectionToUploadIn,
|
||||
collections,
|
||||
);
|
||||
} else if (watcher.isSyncPaused()) {
|
||||
// resume the service after user upload is done
|
||||
watcher.resumePausedSync();
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("failed to upload files", e);
|
||||
showUserFacingError(e.message);
|
||||
closeUploadProgress();
|
||||
} finally {
|
||||
postUploadAction();
|
||||
}
|
||||
};
|
||||
|
||||
const retryFailed = async () => {
|
||||
try {
|
||||
log.info("user retrying failed upload");
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { Overlay } from "@ente/shared/components/Container";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
|
@ -11,7 +12,6 @@ import {
|
|||
StaticThumbnail,
|
||||
} from "components/PlaceholderThumbnails";
|
||||
import { TRASH_SECTION } from "constants/collection";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { GAP_BTW_TILES, IMAGE_CONTAINER_MAX_WIDTH } from "constants/gallery";
|
||||
import { DeduplicateContext } from "pages/deduplicate";
|
||||
import { GalleryContext } from "pages/gallery";
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
export const MIN_EDITED_CREATION_TIME = new Date(1800, 0, 1);
|
||||
export const MAX_EDITED_CREATION_TIME = new Date();
|
||||
|
||||
export const MAX_EDITED_FILE_NAME_LENGTH = 100;
|
||||
export const MAX_CAPTION_SIZE = 5000;
|
||||
|
||||
export const TYPE_HEIC = "heic";
|
||||
export const TYPE_HEIF = "heif";
|
||||
export const TYPE_JPEG = "jpeg";
|
||||
export const TYPE_JPG = "jpg";
|
||||
|
||||
export enum FILE_TYPE {
|
||||
IMAGE,
|
||||
VIDEO,
|
||||
LIVE_PHOTO,
|
||||
OTHERS,
|
||||
}
|
||||
|
||||
export const RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"raf",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
||||
export const SUPPORTED_RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
|
@ -1,5 +1,5 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { FileTypeInfo, Location } from "types/upload";
|
||||
|
||||
// list of format that were missed by type-detection for some files.
|
||||
|
@ -70,11 +70,6 @@ export enum UPLOAD_STAGES {
|
|||
FINISH,
|
||||
}
|
||||
|
||||
export enum UPLOAD_STRATEGY {
|
||||
SINGLE_COLLECTION,
|
||||
COLLECTION_PER_FOLDER,
|
||||
}
|
||||
|
||||
export enum UPLOAD_RESULT {
|
||||
FAILED,
|
||||
ALREADY_UPLOADED,
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import log from "@/next/log";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { Events, eventBus } from "@ente/shared/events";
|
||||
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import isElectron from "is-electron";
|
||||
import PQueue from "p-queue";
|
||||
import { Embedding } from "types/embedding";
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import HTTPService from "@ente/shared/network/HTTPService";
|
||||
import { getEndpoint } from "@ente/shared/network/api";
|
||||
import { getToken } from "@ente/shared/storage/localStorage/helpers";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { EnteFile } from "types/file";
|
||||
import { Metadata } from "types/upload";
|
||||
import { hasFileHash } from "utils/upload";
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import { openCache, type BlobCache } from "@/next/blob-cache";
|
||||
import log from "@/next/log";
|
||||
|
@ -8,7 +9,6 @@ import { CustomError } from "@ente/shared/error";
|
|||
import { Events, eventBus } from "@ente/shared/events";
|
||||
import { isPlaybackPossible } from "@ente/shared/media/video-playback";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import isElectron from "is-electron";
|
||||
import * as ffmpegService from "services/ffmpeg";
|
||||
import { EnteFile } from "types/file";
|
||||
|
@ -617,7 +617,7 @@ async function getPlayableVideo(
|
|||
new File([videoBlob], videoNameTitle),
|
||||
);
|
||||
log.info(`video successfully converted ${videoNameTitle}`);
|
||||
return new Blob([await mp4ConvertedVideo.arrayBuffer()]);
|
||||
return new Blob([mp4ConvertedVideo]);
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("video conversion failed", e);
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import log from "@/next/log";
|
||||
|
@ -11,7 +12,6 @@ import QueueProcessor, {
|
|||
CancellationStatus,
|
||||
RequestCanceller,
|
||||
} from "@ente/shared/utils/queueProcessor";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { Collection } from "types/collection";
|
||||
import {
|
||||
CollectionExportNames,
|
||||
|
@ -994,6 +994,7 @@ class ExportService {
|
|||
file,
|
||||
);
|
||||
await writeStream(
|
||||
electron,
|
||||
`${collectionExportPath}/${fileExportName}`,
|
||||
updatedFileStream,
|
||||
);
|
||||
|
@ -1047,6 +1048,7 @@ class ExportService {
|
|||
file,
|
||||
);
|
||||
await writeStream(
|
||||
electron,
|
||||
`${collectionExportPath}/${imageExportName}`,
|
||||
imageStream,
|
||||
);
|
||||
|
@ -1061,6 +1063,7 @@ class ExportService {
|
|||
);
|
||||
try {
|
||||
await writeStream(
|
||||
electron,
|
||||
`${collectionExportPath}/${videoExportName}`,
|
||||
videoStream,
|
||||
);
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import log from "@/next/log";
|
||||
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
|
||||
import { User } from "@ente/shared/user/types";
|
||||
import { wait } from "@ente/shared/utils";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { getLocalCollections } from "services/collectionService";
|
||||
import downloadManager from "services/download";
|
||||
import { getAllLocalFiles } from "services/fileService";
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { ElectronFile } from "@/next/types/file";
|
||||
import type { Electron } from "@/next/types/ipc";
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
|
||||
import { Remote } from "comlink";
|
||||
|
@ -7,40 +9,79 @@ import {
|
|||
outputPathPlaceholder,
|
||||
} from "constants/ffmpeg";
|
||||
import { NULL_LOCATION } from "constants/upload";
|
||||
import { ElectronFile, ParsedExtractedMetadata } from "types/upload";
|
||||
import { ParsedExtractedMetadata } from "types/upload";
|
||||
import { type DedicatedFFmpegWorker } from "worker/ffmpeg.worker";
|
||||
|
||||
/** Called during upload */
|
||||
export async function generateVideoThumbnail(
|
||||
file: File | ElectronFile,
|
||||
): Promise<File | ElectronFile> {
|
||||
let seekTime = 1;
|
||||
while (seekTime >= 0) {
|
||||
try {
|
||||
return await ffmpegExec(
|
||||
[
|
||||
ffmpegPathPlaceholder,
|
||||
"-i",
|
||||
inputPathPlaceholder,
|
||||
"-ss",
|
||||
`00:00:0${seekTime}`,
|
||||
"-vframes",
|
||||
"1",
|
||||
"-vf",
|
||||
"scale=-1:720",
|
||||
outputPathPlaceholder,
|
||||
],
|
||||
file,
|
||||
"thumb.jpeg",
|
||||
);
|
||||
} catch (e) {
|
||||
if (seekTime === 0) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
seekTime--;
|
||||
/**
|
||||
* Generate a thumbnail for the given video using a wasm FFmpeg running in a web
|
||||
* worker.
|
||||
*
|
||||
* This function is called during upload, when we need to generate thumbnails
|
||||
* for the new files that the user is adding.
|
||||
*
|
||||
* @param blob The input video blob.
|
||||
*
|
||||
* @returns JPEG data of the generated thumbnail.
|
||||
*
|
||||
* See also {@link generateVideoThumbnailNative}.
|
||||
*/
|
||||
export const generateVideoThumbnailWeb = async (blob: Blob) =>
|
||||
generateVideoThumbnail((seekTime: number) =>
|
||||
ffmpegExecWeb(genThumbnailCommand(seekTime), blob, "jpeg", 0),
|
||||
);
|
||||
|
||||
const generateVideoThumbnail = async (
|
||||
thumbnailAtTime: (seekTime: number) => Promise<Uint8Array>,
|
||||
) => {
|
||||
try {
|
||||
// Try generating thumbnail at seekTime 1 second.
|
||||
return await thumbnailAtTime(1);
|
||||
} catch (e) {
|
||||
// If that fails, try again at the beginning. If even this throws, let
|
||||
// it fail.
|
||||
return await thumbnailAtTime(0);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a thumbnail for the given video using a native FFmpeg binary bundled
|
||||
* with our desktop app.
|
||||
*
|
||||
* This function is called during upload, when we need to generate thumbnails
|
||||
* for the new files that the user is adding.
|
||||
*
|
||||
* @param dataOrPath The input video's data or the path to the video on the
|
||||
* user's local filesystem. See: [Note: The fileOrPath parameter to upload].
|
||||
*
|
||||
* @returns JPEG data of the generated thumbnail.
|
||||
*
|
||||
* See also {@link generateVideoThumbnailNative}.
|
||||
*/
|
||||
export const generateVideoThumbnailNative = async (
|
||||
electron: Electron,
|
||||
dataOrPath: Uint8Array | string,
|
||||
) =>
|
||||
generateVideoThumbnail((seekTime: number) =>
|
||||
electron.ffmpegExec(
|
||||
genThumbnailCommand(seekTime),
|
||||
dataOrPath,
|
||||
"jpeg",
|
||||
0,
|
||||
),
|
||||
);
|
||||
|
||||
const genThumbnailCommand = (seekTime: number) => [
|
||||
ffmpegPathPlaceholder,
|
||||
"-i",
|
||||
inputPathPlaceholder,
|
||||
"-ss",
|
||||
`00:00:0${seekTime}`,
|
||||
"-vframes",
|
||||
"1",
|
||||
"-vf",
|
||||
"scale=-1:720",
|
||||
outputPathPlaceholder,
|
||||
];
|
||||
|
||||
/** Called during upload */
|
||||
export async function extractVideoMetadata(file: File | ElectronFile) {
|
||||
|
@ -48,7 +89,7 @@ export async function extractVideoMetadata(file: File | ElectronFile) {
|
|||
// -c [short for codex] copy[(stream_specifier)[ffmpeg.org/ffmpeg.html#Stream-specifiers]] => copies all the stream without re-encoding
|
||||
// -map_metadata [http://ffmpeg.org/ffmpeg.html#Advanced-options search for map_metadata] => copies all stream metadata to the out
|
||||
// -f ffmetadata [https://ffmpeg.org/ffmpeg-formats.html#Metadata-1] => dump metadata from media files into a simple UTF-8-encoded INI-like text file
|
||||
const metadata = await ffmpegExec(
|
||||
const metadata = await ffmpegExec2(
|
||||
[
|
||||
ffmpegPathPlaceholder,
|
||||
"-i",
|
||||
|
@ -62,11 +103,9 @@ export async function extractVideoMetadata(file: File | ElectronFile) {
|
|||
outputPathPlaceholder,
|
||||
],
|
||||
file,
|
||||
`metadata.txt`,
|
||||
);
|
||||
return parseFFmpegExtractedMetadata(
|
||||
new Uint8Array(await metadata.arrayBuffer()),
|
||||
"txt",
|
||||
);
|
||||
return parseFFmpegExtractedMetadata(metadata);
|
||||
}
|
||||
|
||||
enum MetadataTags {
|
||||
|
@ -135,7 +174,7 @@ function parseCreationTime(creationTime: string) {
|
|||
|
||||
/** Called when viewing a file */
|
||||
export async function convertToMP4(file: File) {
|
||||
return await ffmpegExec(
|
||||
return await ffmpegExec2(
|
||||
[
|
||||
ffmpegPathPlaceholder,
|
||||
"-i",
|
||||
|
@ -145,50 +184,86 @@ export async function convertToMP4(file: File) {
|
|||
outputPathPlaceholder,
|
||||
],
|
||||
file,
|
||||
"output.mp4",
|
||||
"mp4",
|
||||
30 * 1000,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run the given ffmpeg command.
|
||||
* Run the given FFmpeg command using a wasm FFmpeg running in a web worker.
|
||||
*
|
||||
* If we're running in the context of our desktop app, use the ffmpeg binary we
|
||||
* bundle with our desktop app to run the command. Otherwise fallback to using
|
||||
* the wasm ffmpeg we link to from our web app in a web worker.
|
||||
*
|
||||
* As a rough ballpark, the native ffmpeg integration in the desktop app is
|
||||
* 10-20x faster than the wasm one currently. See: [Note: ffmpeg in Electron].
|
||||
* As a rough ballpark, currently the native FFmpeg integration in the desktop
|
||||
* app is 10-20x faster than the wasm one. See: [Note: FFmpeg in Electron].
|
||||
*/
|
||||
const ffmpegExec = async (
|
||||
cmd: string[],
|
||||
const ffmpegExecWeb = async (
|
||||
command: string[],
|
||||
blob: Blob,
|
||||
outputFileExtension: string,
|
||||
timeoutMs: number,
|
||||
) => {
|
||||
const worker = await workerFactory.lazy();
|
||||
return await worker.exec(command, blob, outputFileExtension, timeoutMs);
|
||||
};
|
||||
|
||||
/**
|
||||
* Run the given FFmpeg command using a native FFmpeg binary bundled with our
|
||||
* desktop app.
|
||||
*
|
||||
* See also: {@link ffmpegExecWeb}.
|
||||
*/
|
||||
/*
|
||||
TODO(MR): Remove me
|
||||
const ffmpegExecNative = async (
|
||||
electron: Electron,
|
||||
command: string[],
|
||||
blob: Blob,
|
||||
timeoutMs: number = 0,
|
||||
) => {
|
||||
const electron = globalThis.electron;
|
||||
if (electron) {
|
||||
const data = new Uint8Array(await blob.arrayBuffer());
|
||||
return await electron.ffmpegExec(command, data, timeoutMs);
|
||||
} else {
|
||||
const worker = await workerFactory.lazy();
|
||||
return await worker.exec(command, blob, timeoutMs);
|
||||
}
|
||||
};
|
||||
*/
|
||||
|
||||
const ffmpegExec2 = async (
|
||||
command: string[],
|
||||
inputFile: File | ElectronFile,
|
||||
outputFilename: string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number = 0,
|
||||
): Promise<File | ElectronFile> => {
|
||||
) => {
|
||||
const electron = globalThis.electron;
|
||||
if (electron || false) {
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
// return electron.runFFmpegCmd(cmd, inputFile, outputFilename, timeoutMS);
|
||||
throw new Error("WIP");
|
||||
// return electron.ffmpegExec(
|
||||
// command,
|
||||
// /* TODO(MR): ElectronFile changes */
|
||||
// inputFile as unknown as string,
|
||||
// outputFileName,
|
||||
// timeoutMS,
|
||||
// );
|
||||
} else {
|
||||
return workerFactory
|
||||
.instance()
|
||||
.then((worker) =>
|
||||
worker.run(cmd, inputFile, outputFilename, timeoutMS),
|
||||
);
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
return ffmpegExecWeb(
|
||||
command,
|
||||
inputFile as File,
|
||||
outputFileExtension,
|
||||
timeoutMS,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
/** Lazily create a singleton instance of our worker */
|
||||
class WorkerFactory {
|
||||
private _instance: Promise<Remote<DedicatedFFmpegWorker>>;
|
||||
private instance: Promise<Remote<DedicatedFFmpegWorker>>;
|
||||
|
||||
async instance() {
|
||||
if (!this._instance) {
|
||||
const comlinkWorker = createComlinkWorker();
|
||||
this._instance = comlinkWorker.remote;
|
||||
}
|
||||
return this._instance;
|
||||
async lazy() {
|
||||
if (!this.instance) this.instance = createComlinkWorker().remote;
|
||||
return this.instance;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { eventBus, Events } from "@ente/shared/events";
|
||||
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import debounce from "debounce";
|
||||
import PQueue from "p-queue";
|
||||
import { JobResult } from "types/common/job";
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { MLSyncContext, MLSyncFileContext } from "types/machineLearning";
|
||||
import {
|
||||
getLocalFileImageBitmap,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { convertBytesToHumanReadable } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { ElectronFile } from "types/upload";
|
||||
import { ElectronFile } from "@/next/types/file";
|
||||
|
||||
export async function getUint8ArrayView(
|
||||
file: Blob | ElectronFile,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import * as chrono from "chrono-node";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { t } from "i18next";
|
||||
import { Collection } from "types/collection";
|
||||
import { EntityType, LocationTag, LocationTagData } from "types/entity";
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { ElectronFile } from "@/next/types/file";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import {
|
||||
KNOWN_NON_MEDIA_FORMATS,
|
||||
WHITELISTED_FILE_FORMATS,
|
||||
} from "constants/upload";
|
||||
import FileType, { FileTypeResult } from "file-type";
|
||||
import { ElectronFile, FileTypeInfo } from "types/upload";
|
||||
import { FileTypeInfo } from "types/upload";
|
||||
import { getFileExtension } from "utils/file";
|
||||
import { getUint8ArrayView } from "./readerService";
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
|
||||
import type { FixOption } from "components/FixCreationTime";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import { EnteFile } from "types/file";
|
||||
import {
|
||||
|
|
318
web/apps/photos/src/services/upload/metadata.ts
Normal file
318
web/apps/photos/src/services/upload/metadata.ts
Normal file
|
@ -0,0 +1,318 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { ElectronFile } from "@/next/types/file";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import {
|
||||
parseDateFromFusedDateString,
|
||||
tryToParseDateTime,
|
||||
validateAndGetCreationUnixTimeInMicroSeconds,
|
||||
} from "@ente/shared/time";
|
||||
import type { DataStream } from "@ente/shared/utils/data-stream";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_READER_CHUNK_SIZE, NULL_LOCATION } from "constants/upload";
|
||||
import * as ffmpegService from "services/ffmpeg";
|
||||
import { getElectronFileStream, getFileStream } from "services/readerService";
|
||||
import { FilePublicMagicMetadataProps } from "types/file";
|
||||
import {
|
||||
FileTypeInfo,
|
||||
Metadata,
|
||||
ParsedExtractedMetadata,
|
||||
type LivePhotoAssets2,
|
||||
type UploadAsset2,
|
||||
} from "types/upload";
|
||||
import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService";
|
||||
import {
|
||||
MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
|
||||
getClippedMetadataJSONMapKeyForFile,
|
||||
getMetadataJSONMapKeyForFile,
|
||||
type ParsedMetadataJSON,
|
||||
} from "./takeout";
|
||||
import { getFileName } from "./uploadService";
|
||||
|
||||
const EXIF_TAGS_NEEDED = [
|
||||
"DateTimeOriginal",
|
||||
"CreateDate",
|
||||
"ModifyDate",
|
||||
"GPSLatitude",
|
||||
"GPSLongitude",
|
||||
"GPSLatitudeRef",
|
||||
"GPSLongitudeRef",
|
||||
"DateCreated",
|
||||
"ExifImageWidth",
|
||||
"ExifImageHeight",
|
||||
"ImageWidth",
|
||||
"ImageHeight",
|
||||
"PixelXDimension",
|
||||
"PixelYDimension",
|
||||
"MetadataDate",
|
||||
];
|
||||
|
||||
const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
|
||||
location: NULL_LOCATION,
|
||||
creationTime: null,
|
||||
width: null,
|
||||
height: null,
|
||||
};
|
||||
|
||||
interface ExtractMetadataResult {
|
||||
metadata: Metadata;
|
||||
publicMagicMetadata: FilePublicMagicMetadataProps;
|
||||
}
|
||||
|
||||
export const extractAssetMetadata = async (
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: Map<string, ParsedMetadataJSON>,
|
||||
{ isLivePhoto, file, livePhotoAssets }: UploadAsset2,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
): Promise<ExtractMetadataResult> => {
|
||||
return isLivePhoto
|
||||
? await extractLivePhotoMetadata(
|
||||
worker,
|
||||
parsedMetadataJSONMap,
|
||||
collectionID,
|
||||
fileTypeInfo,
|
||||
livePhotoAssets,
|
||||
)
|
||||
: await extractFileMetadata(
|
||||
worker,
|
||||
parsedMetadataJSONMap,
|
||||
collectionID,
|
||||
fileTypeInfo,
|
||||
file,
|
||||
);
|
||||
};
|
||||
|
||||
async function extractFileMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: Map<string, ParsedMetadataJSON>,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
rawFile: File | ElectronFile | string,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
const rawFileName = getFileName(rawFile);
|
||||
let key = getMetadataJSONMapKeyForFile(collectionID, rawFileName);
|
||||
let googleMetadata: ParsedMetadataJSON = parsedMetadataJSONMap.get(key);
|
||||
|
||||
if (!googleMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) {
|
||||
key = getClippedMetadataJSONMapKeyForFile(collectionID, rawFileName);
|
||||
googleMetadata = parsedMetadataJSONMap.get(key);
|
||||
}
|
||||
|
||||
const { metadata, publicMagicMetadata } = await extractMetadata(
|
||||
worker,
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
rawFile as File | ElectronFile,
|
||||
fileTypeInfo,
|
||||
);
|
||||
|
||||
for (const [key, value] of Object.entries(googleMetadata ?? {})) {
|
||||
if (!value) {
|
||||
continue;
|
||||
}
|
||||
metadata[key] = value;
|
||||
}
|
||||
return { metadata, publicMagicMetadata };
|
||||
}
|
||||
|
||||
async function extractMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
receivedFile: File | ElectronFile,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
let extractedMetadata: ParsedExtractedMetadata = NULL_EXTRACTED_METADATA;
|
||||
if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
|
||||
extractedMetadata = await getImageMetadata(receivedFile, fileTypeInfo);
|
||||
} else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) {
|
||||
extractedMetadata = await getVideoMetadata(receivedFile);
|
||||
}
|
||||
const hash = await getFileHash(worker, receivedFile);
|
||||
|
||||
const metadata: Metadata = {
|
||||
title: receivedFile.name,
|
||||
creationTime:
|
||||
extractedMetadata.creationTime ??
|
||||
extractDateFromFileName(receivedFile.name) ??
|
||||
receivedFile.lastModified * 1000,
|
||||
modificationTime: receivedFile.lastModified * 1000,
|
||||
latitude: extractedMetadata.location.latitude,
|
||||
longitude: extractedMetadata.location.longitude,
|
||||
fileType: fileTypeInfo.fileType,
|
||||
hash,
|
||||
};
|
||||
const publicMagicMetadata: FilePublicMagicMetadataProps = {
|
||||
w: extractedMetadata.width,
|
||||
h: extractedMetadata.height,
|
||||
};
|
||||
return { metadata, publicMagicMetadata };
|
||||
}
|
||||
|
||||
async function getImageMetadata(
|
||||
receivedFile: File | ElectronFile,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
): Promise<ParsedExtractedMetadata> {
|
||||
let imageMetadata = NULL_EXTRACTED_METADATA;
|
||||
try {
|
||||
if (!(receivedFile instanceof File)) {
|
||||
receivedFile = new File(
|
||||
[await receivedFile.blob()],
|
||||
receivedFile.name,
|
||||
{
|
||||
lastModified: receivedFile.lastModified,
|
||||
},
|
||||
);
|
||||
}
|
||||
const exifData = await getParsedExifData(
|
||||
receivedFile,
|
||||
fileTypeInfo,
|
||||
EXIF_TAGS_NEEDED,
|
||||
);
|
||||
|
||||
imageMetadata = {
|
||||
location: getEXIFLocation(exifData),
|
||||
creationTime: getEXIFTime(exifData),
|
||||
width: exifData?.imageWidth ?? null,
|
||||
height: exifData?.imageHeight ?? null,
|
||||
};
|
||||
} catch (e) {
|
||||
log.error("getExifData failed", e);
|
||||
}
|
||||
return imageMetadata;
|
||||
}
|
||||
|
||||
// tries to extract date from file name if available else returns null
|
||||
function extractDateFromFileName(filename: string): number {
|
||||
try {
|
||||
filename = filename.trim();
|
||||
let parsedDate: Date;
|
||||
if (filename.startsWith("IMG-") || filename.startsWith("VID-")) {
|
||||
// Whatsapp media files
|
||||
// sample name IMG-20171218-WA0028.jpg
|
||||
parsedDate = parseDateFromFusedDateString(filename.split("-")[1]);
|
||||
} else if (filename.startsWith("Screenshot_")) {
|
||||
// Screenshots on droid
|
||||
// sample name Screenshot_20181227-152914.jpg
|
||||
parsedDate = parseDateFromFusedDateString(
|
||||
filename.replaceAll("Screenshot_", ""),
|
||||
);
|
||||
} else if (filename.startsWith("signal-")) {
|
||||
// signal images
|
||||
// sample name :signal-2018-08-21-100217.jpg
|
||||
const dateString = convertSignalNameToFusedDateString(filename);
|
||||
parsedDate = parseDateFromFusedDateString(dateString);
|
||||
}
|
||||
if (!parsedDate) {
|
||||
parsedDate = tryToParseDateTime(filename);
|
||||
}
|
||||
return validateAndGetCreationUnixTimeInMicroSeconds(parsedDate);
|
||||
} catch (e) {
|
||||
log.error("failed to extract date From FileName ", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function convertSignalNameToFusedDateString(filename: string) {
|
||||
const dateStringParts = filename.split("-");
|
||||
return `${dateStringParts[1]}${dateStringParts[2]}${dateStringParts[3]}-${dateStringParts[4]}`;
|
||||
}
|
||||
|
||||
async function getVideoMetadata(file: File | ElectronFile) {
|
||||
let videoMetadata = NULL_EXTRACTED_METADATA;
|
||||
try {
|
||||
log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
|
||||
videoMetadata = await ffmpegService.extractVideoMetadata(file);
|
||||
log.info(
|
||||
`videoMetadata successfully extracted ${getFileNameSize(file)}`,
|
||||
);
|
||||
} catch (e) {
|
||||
log.error("failed to get video metadata", e);
|
||||
log.info(
|
||||
`videoMetadata extracted failed ${getFileNameSize(file)} ,${
|
||||
e.message
|
||||
} `,
|
||||
);
|
||||
}
|
||||
|
||||
return videoMetadata;
|
||||
}
|
||||
|
||||
async function extractLivePhotoMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: Map<string, ParsedMetadataJSON>,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
livePhotoAssets: LivePhotoAssets2,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
const imageFileTypeInfo: FileTypeInfo = {
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
exactType: fileTypeInfo.imageType,
|
||||
};
|
||||
const {
|
||||
metadata: imageMetadata,
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
} = await extractFileMetadata(
|
||||
worker,
|
||||
parsedMetadataJSONMap,
|
||||
collectionID,
|
||||
imageFileTypeInfo,
|
||||
livePhotoAssets.image,
|
||||
);
|
||||
const videoHash = await getFileHash(
|
||||
worker,
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
livePhotoAssets.video as File | ElectronFile,
|
||||
);
|
||||
return {
|
||||
metadata: {
|
||||
...imageMetadata,
|
||||
title: getFileName(livePhotoAssets.image),
|
||||
fileType: FILE_TYPE.LIVE_PHOTO,
|
||||
imageHash: imageMetadata.hash,
|
||||
videoHash: videoHash,
|
||||
hash: undefined,
|
||||
},
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
};
|
||||
}
|
||||
|
||||
async function getFileHash(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
file: File | ElectronFile,
|
||||
) {
|
||||
try {
|
||||
log.info(`getFileHash called for ${getFileNameSize(file)}`);
|
||||
let filedata: DataStream;
|
||||
if (file instanceof File) {
|
||||
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
|
||||
} else {
|
||||
filedata = await getElectronFileStream(
|
||||
file,
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
);
|
||||
}
|
||||
const hashState = await worker.initChunkHashing();
|
||||
|
||||
const streamReader = filedata.stream.getReader();
|
||||
for (let i = 0; i < filedata.chunkCount; i++) {
|
||||
const { done, value: chunk } = await streamReader.read();
|
||||
if (done) {
|
||||
throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED);
|
||||
}
|
||||
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
|
||||
}
|
||||
const { done } = await streamReader.read();
|
||||
if (!done) {
|
||||
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
|
||||
}
|
||||
const hash = await worker.completeChunkHashing(hashState);
|
||||
log.info(
|
||||
`file hashing completed successfully ${getFileNameSize(file)}`,
|
||||
);
|
||||
return hash;
|
||||
} catch (e) {
|
||||
log.error("getFileHash failed", e);
|
||||
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
|
||||
}
|
||||
}
|
|
@ -1,649 +0,0 @@
|
|||
import { ensureElectron } from "@/next/electron";
|
||||
import { basename, getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import {
|
||||
parseDateFromFusedDateString,
|
||||
tryToParseDateTime,
|
||||
validateAndGetCreationUnixTimeInMicroSeconds,
|
||||
} from "@ente/shared/time";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { FILE_READER_CHUNK_SIZE, NULL_LOCATION } from "constants/upload";
|
||||
import * as ffmpegService from "services/ffmpeg";
|
||||
import { getElectronFileStream, getFileStream } from "services/readerService";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import { FilePublicMagicMetadataProps } from "types/file";
|
||||
import {
|
||||
DataStream,
|
||||
ElectronFile,
|
||||
ExtractMetadataResult,
|
||||
FileTypeInfo,
|
||||
LivePhotoAssets,
|
||||
Location,
|
||||
Metadata,
|
||||
ParsedExtractedMetadata,
|
||||
ParsedMetadataJSON,
|
||||
ParsedMetadataJSONMap,
|
||||
type FileWithCollection,
|
||||
type FileWithCollection2,
|
||||
type LivePhotoAssets2,
|
||||
} from "types/upload";
|
||||
import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto";
|
||||
import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import { extractFileMetadata, getFileName } from "./uploadService";
|
||||
|
||||
const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
|
||||
creationTime: null,
|
||||
modificationTime: null,
|
||||
...NULL_LOCATION,
|
||||
};
|
||||
|
||||
const EXIF_TAGS_NEEDED = [
|
||||
"DateTimeOriginal",
|
||||
"CreateDate",
|
||||
"ModifyDate",
|
||||
"GPSLatitude",
|
||||
"GPSLongitude",
|
||||
"GPSLatitudeRef",
|
||||
"GPSLongitudeRef",
|
||||
"DateCreated",
|
||||
"ExifImageWidth",
|
||||
"ExifImageHeight",
|
||||
"ImageWidth",
|
||||
"ImageHeight",
|
||||
"PixelXDimension",
|
||||
"PixelYDimension",
|
||||
"MetadataDate",
|
||||
];
|
||||
|
||||
export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46;
|
||||
|
||||
export const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
|
||||
location: NULL_LOCATION,
|
||||
creationTime: null,
|
||||
width: null,
|
||||
height: null,
|
||||
};
|
||||
|
||||
export async function extractMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
receivedFile: File | ElectronFile,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
let extractedMetadata: ParsedExtractedMetadata = NULL_EXTRACTED_METADATA;
|
||||
if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
|
||||
extractedMetadata = await getImageMetadata(receivedFile, fileTypeInfo);
|
||||
} else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) {
|
||||
extractedMetadata = await getVideoMetadata(receivedFile);
|
||||
}
|
||||
const fileHash = await getFileHash(worker, receivedFile);
|
||||
|
||||
const metadata: Metadata = {
|
||||
title: receivedFile.name,
|
||||
creationTime:
|
||||
extractedMetadata.creationTime ??
|
||||
extractDateFromFileName(receivedFile.name) ??
|
||||
receivedFile.lastModified * 1000,
|
||||
modificationTime: receivedFile.lastModified * 1000,
|
||||
latitude: extractedMetadata.location.latitude,
|
||||
longitude: extractedMetadata.location.longitude,
|
||||
fileType: fileTypeInfo.fileType,
|
||||
hash: fileHash,
|
||||
};
|
||||
const publicMagicMetadata: FilePublicMagicMetadataProps = {
|
||||
w: extractedMetadata.width,
|
||||
h: extractedMetadata.height,
|
||||
};
|
||||
return { metadata, publicMagicMetadata };
|
||||
}
|
||||
|
||||
export async function getImageMetadata(
|
||||
receivedFile: File | ElectronFile,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
): Promise<ParsedExtractedMetadata> {
|
||||
let imageMetadata = NULL_EXTRACTED_METADATA;
|
||||
try {
|
||||
if (!(receivedFile instanceof File)) {
|
||||
receivedFile = new File(
|
||||
[await receivedFile.blob()],
|
||||
receivedFile.name,
|
||||
{
|
||||
lastModified: receivedFile.lastModified,
|
||||
},
|
||||
);
|
||||
}
|
||||
const exifData = await getParsedExifData(
|
||||
receivedFile,
|
||||
fileTypeInfo,
|
||||
EXIF_TAGS_NEEDED,
|
||||
);
|
||||
|
||||
imageMetadata = {
|
||||
location: getEXIFLocation(exifData),
|
||||
creationTime: getEXIFTime(exifData),
|
||||
width: exifData?.imageWidth ?? null,
|
||||
height: exifData?.imageHeight ?? null,
|
||||
};
|
||||
} catch (e) {
|
||||
log.error("getExifData failed", e);
|
||||
}
|
||||
return imageMetadata;
|
||||
}
|
||||
|
||||
export const getMetadataJSONMapKeyForJSON = (
|
||||
collectionID: number,
|
||||
jsonFileName: string,
|
||||
) => {
|
||||
let title = jsonFileName.slice(0, -1 * ".json".length);
|
||||
const endsWithNumberedSuffixWithBrackets = title.match(/\(\d+\)$/);
|
||||
if (endsWithNumberedSuffixWithBrackets) {
|
||||
title = title.slice(
|
||||
0,
|
||||
-1 * endsWithNumberedSuffixWithBrackets[0].length,
|
||||
);
|
||||
const [name, extension] = splitFilenameAndExtension(title);
|
||||
return `${collectionID}-${name}${endsWithNumberedSuffixWithBrackets[0]}.${extension}`;
|
||||
}
|
||||
return `${collectionID}-${title}`;
|
||||
};
|
||||
|
||||
// if the file name is greater than MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT(46) , then google photos clips the file name
|
||||
// so we need to use the clipped file name to get the metadataJSON file
|
||||
export const getClippedMetadataJSONMapKeyForFile = (
|
||||
collectionID: number,
|
||||
fileName: string,
|
||||
) => {
|
||||
return `${collectionID}-${fileName.slice(
|
||||
0,
|
||||
MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
|
||||
)}`;
|
||||
};
|
||||
|
||||
export const getMetadataJSONMapKeyForFile = (
|
||||
collectionID: number,
|
||||
fileName: string,
|
||||
) => {
|
||||
return `${collectionID}-${getFileOriginalName(fileName)}`;
|
||||
};
|
||||
|
||||
export async function parseMetadataJSON(
|
||||
receivedFile: File | ElectronFile | string,
|
||||
) {
|
||||
try {
|
||||
let text: string;
|
||||
if (typeof receivedFile == "string") {
|
||||
text = await ensureElectron().fs.readTextFile(receivedFile);
|
||||
} else {
|
||||
if (!(receivedFile instanceof File)) {
|
||||
receivedFile = new File(
|
||||
[await receivedFile.blob()],
|
||||
receivedFile.name,
|
||||
);
|
||||
}
|
||||
text = await receivedFile.text();
|
||||
}
|
||||
|
||||
return parseMetadataJSONText(text);
|
||||
} catch (e) {
|
||||
log.error("parseMetadataJSON failed", e);
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
export async function parseMetadataJSONText(text: string) {
|
||||
const metadataJSON: object = JSON.parse(text);
|
||||
|
||||
const parsedMetadataJSON: ParsedMetadataJSON = NULL_PARSED_METADATA_JSON;
|
||||
if (!metadataJSON) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
metadataJSON["photoTakenTime"] &&
|
||||
metadataJSON["photoTakenTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
|
||||
} else if (
|
||||
metadataJSON["creationTime"] &&
|
||||
metadataJSON["creationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["creationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
if (
|
||||
metadataJSON["modificationTime"] &&
|
||||
metadataJSON["modificationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.modificationTime =
|
||||
metadataJSON["modificationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
let locationData: Location = NULL_LOCATION;
|
||||
if (
|
||||
metadataJSON["geoData"] &&
|
||||
(metadataJSON["geoData"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoData"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoData"];
|
||||
} else if (
|
||||
metadataJSON["geoDataExif"] &&
|
||||
(metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoDataExif"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoDataExif"];
|
||||
}
|
||||
if (locationData !== null) {
|
||||
parsedMetadataJSON.latitude = locationData.latitude;
|
||||
parsedMetadataJSON.longitude = locationData.longitude;
|
||||
}
|
||||
return parsedMetadataJSON;
|
||||
}
|
||||
|
||||
// tries to extract date from file name if available else returns null
|
||||
export function extractDateFromFileName(filename: string): number {
|
||||
try {
|
||||
filename = filename.trim();
|
||||
let parsedDate: Date;
|
||||
if (filename.startsWith("IMG-") || filename.startsWith("VID-")) {
|
||||
// Whatsapp media files
|
||||
// sample name IMG-20171218-WA0028.jpg
|
||||
parsedDate = parseDateFromFusedDateString(filename.split("-")[1]);
|
||||
} else if (filename.startsWith("Screenshot_")) {
|
||||
// Screenshots on droid
|
||||
// sample name Screenshot_20181227-152914.jpg
|
||||
parsedDate = parseDateFromFusedDateString(
|
||||
filename.replaceAll("Screenshot_", ""),
|
||||
);
|
||||
} else if (filename.startsWith("signal-")) {
|
||||
// signal images
|
||||
// sample name :signal-2018-08-21-100217.jpg
|
||||
const dateString = convertSignalNameToFusedDateString(filename);
|
||||
parsedDate = parseDateFromFusedDateString(dateString);
|
||||
}
|
||||
if (!parsedDate) {
|
||||
parsedDate = tryToParseDateTime(filename);
|
||||
}
|
||||
return validateAndGetCreationUnixTimeInMicroSeconds(parsedDate);
|
||||
} catch (e) {
|
||||
log.error("failed to extract date From FileName ", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function convertSignalNameToFusedDateString(filename: string) {
|
||||
const dateStringParts = filename.split("-");
|
||||
return `${dateStringParts[1]}${dateStringParts[2]}${dateStringParts[3]}-${dateStringParts[4]}`;
|
||||
}
|
||||
|
||||
const EDITED_FILE_SUFFIX = "-edited";
|
||||
|
||||
/*
|
||||
Get the original file name for edited file to associate it to original file's metadataJSON file
|
||||
as edited file doesn't have their own metadata file
|
||||
*/
|
||||
function getFileOriginalName(fileName: string) {
|
||||
let originalName: string = null;
|
||||
const [nameWithoutExtension, extension] =
|
||||
splitFilenameAndExtension(fileName);
|
||||
|
||||
const isEditedFile = nameWithoutExtension.endsWith(EDITED_FILE_SUFFIX);
|
||||
if (isEditedFile) {
|
||||
originalName = nameWithoutExtension.slice(
|
||||
0,
|
||||
-1 * EDITED_FILE_SUFFIX.length,
|
||||
);
|
||||
} else {
|
||||
originalName = nameWithoutExtension;
|
||||
}
|
||||
if (extension) {
|
||||
originalName += "." + extension;
|
||||
}
|
||||
return originalName;
|
||||
}
|
||||
|
||||
async function getVideoMetadata(file: File | ElectronFile) {
|
||||
let videoMetadata = NULL_EXTRACTED_METADATA;
|
||||
try {
|
||||
log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
|
||||
videoMetadata = await ffmpegService.extractVideoMetadata(file);
|
||||
log.info(
|
||||
`videoMetadata successfully extracted ${getFileNameSize(file)}`,
|
||||
);
|
||||
} catch (e) {
|
||||
log.error("failed to get video metadata", e);
|
||||
log.info(
|
||||
`videoMetadata extracted failed ${getFileNameSize(file)} ,${
|
||||
e.message
|
||||
} `,
|
||||
);
|
||||
}
|
||||
|
||||
return videoMetadata;
|
||||
}
|
||||
|
||||
interface LivePhotoIdentifier {
|
||||
collectionID: number;
|
||||
fileType: FILE_TYPE;
|
||||
name: string;
|
||||
size: number;
|
||||
}
|
||||
|
||||
const UNDERSCORE_THREE = "_3";
|
||||
// Note: The icloud-photos-downloader library appends _HVEC to the end of the filename in case of live photos
|
||||
// https://github.com/icloud-photos-downloader/icloud_photos_downloader
|
||||
const UNDERSCORE_HEVC = "_HVEC";
|
||||
|
||||
export async function getLivePhotoFileType(
|
||||
livePhotoAssets: LivePhotoAssets,
|
||||
): Promise<FileTypeInfo> {
|
||||
const imageFileTypeInfo = await getFileType(livePhotoAssets.image);
|
||||
const videoFileTypeInfo = await getFileType(livePhotoAssets.video);
|
||||
return {
|
||||
fileType: FILE_TYPE.LIVE_PHOTO,
|
||||
exactType: `${imageFileTypeInfo.exactType}+${videoFileTypeInfo.exactType}`,
|
||||
imageType: imageFileTypeInfo.exactType,
|
||||
videoType: videoFileTypeInfo.exactType,
|
||||
};
|
||||
}
|
||||
|
||||
export async function extractLivePhotoMetadata(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
parsedMetadataJSONMap: ParsedMetadataJSONMap,
|
||||
collectionID: number,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
livePhotoAssets: LivePhotoAssets2,
|
||||
): Promise<ExtractMetadataResult> {
|
||||
const imageFileTypeInfo: FileTypeInfo = {
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
exactType: fileTypeInfo.imageType,
|
||||
};
|
||||
const {
|
||||
metadata: imageMetadata,
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
} = await extractFileMetadata(
|
||||
worker,
|
||||
parsedMetadataJSONMap,
|
||||
collectionID,
|
||||
imageFileTypeInfo,
|
||||
livePhotoAssets.image,
|
||||
);
|
||||
const videoHash = await getFileHash(
|
||||
worker,
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
livePhotoAssets.video as File | ElectronFile,
|
||||
);
|
||||
return {
|
||||
metadata: {
|
||||
...imageMetadata,
|
||||
title: getLivePhotoName(livePhotoAssets),
|
||||
fileType: FILE_TYPE.LIVE_PHOTO,
|
||||
imageHash: imageMetadata.hash,
|
||||
videoHash: videoHash,
|
||||
hash: undefined,
|
||||
},
|
||||
publicMagicMetadata: imagePublicMagicMetadata,
|
||||
};
|
||||
}
|
||||
|
||||
export function getLivePhotoSize(livePhotoAssets: LivePhotoAssets) {
|
||||
return livePhotoAssets.image.size + livePhotoAssets.video.size;
|
||||
}
|
||||
|
||||
export const getLivePhotoName = ({ image }: LivePhotoAssets2) =>
|
||||
typeof image == "string" ? basename(image) : image.name;
|
||||
|
||||
export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection2[]) {
|
||||
try {
|
||||
const analysedMediaFiles: FileWithCollection2[] = [];
|
||||
mediaFiles
|
||||
.sort((firstMediaFile, secondMediaFile) =>
|
||||
splitFilenameAndExtension(
|
||||
getFileName(firstMediaFile.file),
|
||||
)[0].localeCompare(
|
||||
splitFilenameAndExtension(
|
||||
getFileName(secondMediaFile.file),
|
||||
)[0],
|
||||
),
|
||||
)
|
||||
.sort(
|
||||
(firstMediaFile, secondMediaFile) =>
|
||||
firstMediaFile.collectionID - secondMediaFile.collectionID,
|
||||
);
|
||||
let index = 0;
|
||||
while (index < mediaFiles.length - 1) {
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
const firstMediaFile = mediaFiles[index];
|
||||
const secondMediaFile = mediaFiles[index + 1];
|
||||
const firstFileType =
|
||||
getFileTypeFromExtensionForLivePhotoClustering(
|
||||
getFileName(firstMediaFile.file),
|
||||
);
|
||||
const secondFileType =
|
||||
getFileTypeFromExtensionForLivePhotoClustering(
|
||||
getFileName(secondMediaFile.file),
|
||||
);
|
||||
const firstFileIdentifier: LivePhotoIdentifier = {
|
||||
collectionID: firstMediaFile.collectionID,
|
||||
fileType: firstFileType,
|
||||
name: getFileName(firstMediaFile.file),
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
size: (firstMediaFile as FileWithCollection).file.size,
|
||||
};
|
||||
const secondFileIdentifier: LivePhotoIdentifier = {
|
||||
collectionID: secondMediaFile.collectionID,
|
||||
fileType: secondFileType,
|
||||
name: getFileName(secondMediaFile.file),
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
size: (secondMediaFile as FileWithCollection).file.size,
|
||||
};
|
||||
if (
|
||||
areFilesLivePhotoAssets(
|
||||
firstFileIdentifier,
|
||||
secondFileIdentifier,
|
||||
)
|
||||
) {
|
||||
let imageFile: File | ElectronFile | string;
|
||||
let videoFile: File | ElectronFile | string;
|
||||
if (
|
||||
firstFileType === FILE_TYPE.IMAGE &&
|
||||
secondFileType === FILE_TYPE.VIDEO
|
||||
) {
|
||||
imageFile = firstMediaFile.file;
|
||||
videoFile = secondMediaFile.file;
|
||||
} else {
|
||||
videoFile = firstMediaFile.file;
|
||||
imageFile = secondMediaFile.file;
|
||||
}
|
||||
const livePhotoLocalID = firstMediaFile.localID;
|
||||
analysedMediaFiles.push({
|
||||
localID: livePhotoLocalID,
|
||||
collectionID: firstMediaFile.collectionID,
|
||||
isLivePhoto: true,
|
||||
livePhotoAssets: {
|
||||
image: imageFile,
|
||||
video: videoFile,
|
||||
},
|
||||
});
|
||||
index += 2;
|
||||
} else {
|
||||
analysedMediaFiles.push({
|
||||
...firstMediaFile,
|
||||
isLivePhoto: false,
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
if (index === mediaFiles.length - 1) {
|
||||
analysedMediaFiles.push({
|
||||
...mediaFiles[index],
|
||||
isLivePhoto: false,
|
||||
});
|
||||
}
|
||||
return analysedMediaFiles;
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.UPLOAD_CANCELLED) {
|
||||
throw e;
|
||||
} else {
|
||||
log.error("failed to cluster live photo", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function areFilesLivePhotoAssets(
|
||||
firstFileIdentifier: LivePhotoIdentifier,
|
||||
secondFileIdentifier: LivePhotoIdentifier,
|
||||
) {
|
||||
const haveSameCollectionID =
|
||||
firstFileIdentifier.collectionID === secondFileIdentifier.collectionID;
|
||||
const areNotSameFileType =
|
||||
firstFileIdentifier.fileType !== secondFileIdentifier.fileType;
|
||||
|
||||
let firstFileNameWithoutSuffix: string;
|
||||
let secondFileNameWithoutSuffix: string;
|
||||
if (firstFileIdentifier.fileType === FILE_TYPE.IMAGE) {
|
||||
firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(firstFileIdentifier.name),
|
||||
// Note: The Google Live Photo image file can have video extension appended as suffix, passing that to removePotentialLivePhotoSuffix to remove it
|
||||
// Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file)
|
||||
getFileExtensionWithDot(secondFileIdentifier.name),
|
||||
);
|
||||
secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(secondFileIdentifier.name),
|
||||
);
|
||||
} else {
|
||||
firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(firstFileIdentifier.name),
|
||||
);
|
||||
secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
|
||||
getFileNameWithoutExtension(secondFileIdentifier.name),
|
||||
getFileExtensionWithDot(firstFileIdentifier.name),
|
||||
);
|
||||
}
|
||||
if (
|
||||
haveSameCollectionID &&
|
||||
isImageOrVideo(firstFileIdentifier.fileType) &&
|
||||
isImageOrVideo(secondFileIdentifier.fileType) &&
|
||||
areNotSameFileType &&
|
||||
firstFileNameWithoutSuffix === secondFileNameWithoutSuffix
|
||||
) {
|
||||
const LIVE_PHOTO_ASSET_SIZE_LIMIT = 20 * 1024 * 1024; // 20MB
|
||||
|
||||
// checks size of live Photo assets are less than allowed limit
|
||||
// I did that based on the assumption that live photo assets ideally would not be larger than LIVE_PHOTO_ASSET_SIZE_LIMIT
|
||||
// also zipping library doesn't support stream as a input
|
||||
if (
|
||||
firstFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT &&
|
||||
secondFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT
|
||||
) {
|
||||
return true;
|
||||
} else {
|
||||
log.error(
|
||||
`${CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS} - ${JSON.stringify({
|
||||
fileSizes: [
|
||||
firstFileIdentifier.size,
|
||||
secondFileIdentifier.size,
|
||||
],
|
||||
})}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function removePotentialLivePhotoSuffix(
|
||||
filenameWithoutExtension: string,
|
||||
suffix?: string,
|
||||
) {
|
||||
let presentSuffix: string;
|
||||
if (filenameWithoutExtension.endsWith(UNDERSCORE_THREE)) {
|
||||
presentSuffix = UNDERSCORE_THREE;
|
||||
} else if (filenameWithoutExtension.endsWith(UNDERSCORE_HEVC)) {
|
||||
presentSuffix = UNDERSCORE_HEVC;
|
||||
} else if (
|
||||
filenameWithoutExtension.endsWith(UNDERSCORE_HEVC.toLowerCase())
|
||||
) {
|
||||
presentSuffix = UNDERSCORE_HEVC.toLowerCase();
|
||||
} else if (suffix) {
|
||||
if (filenameWithoutExtension.endsWith(suffix)) {
|
||||
presentSuffix = suffix;
|
||||
} else if (filenameWithoutExtension.endsWith(suffix.toLowerCase())) {
|
||||
presentSuffix = suffix.toLowerCase();
|
||||
}
|
||||
}
|
||||
if (presentSuffix) {
|
||||
return filenameWithoutExtension.slice(0, presentSuffix.length * -1);
|
||||
} else {
|
||||
return filenameWithoutExtension;
|
||||
}
|
||||
}
|
||||
|
||||
function getFileNameWithoutExtension(filename: string) {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return filename;
|
||||
else return filename.slice(0, lastDotPosition);
|
||||
}
|
||||
|
||||
function getFileExtensionWithDot(filename: string) {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return "";
|
||||
else return filename.slice(lastDotPosition);
|
||||
}
|
||||
|
||||
function splitFilenameAndExtension(filename: string): [string, string] {
|
||||
const lastDotPosition = filename.lastIndexOf(".");
|
||||
if (lastDotPosition === -1) return [filename, null];
|
||||
else
|
||||
return [
|
||||
filename.slice(0, lastDotPosition),
|
||||
filename.slice(lastDotPosition + 1),
|
||||
];
|
||||
}
|
||||
|
||||
const isImageOrVideo = (fileType: FILE_TYPE) =>
|
||||
[FILE_TYPE.IMAGE, FILE_TYPE.VIDEO].includes(fileType);
|
||||
|
||||
async function getFileHash(
|
||||
worker: Remote<DedicatedCryptoWorker>,
|
||||
file: File | ElectronFile,
|
||||
) {
|
||||
try {
|
||||
log.info(`getFileHash called for ${getFileNameSize(file)}`);
|
||||
let filedata: DataStream;
|
||||
if (file instanceof File) {
|
||||
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
|
||||
} else {
|
||||
filedata = await getElectronFileStream(
|
||||
file,
|
||||
FILE_READER_CHUNK_SIZE,
|
||||
);
|
||||
}
|
||||
const hashState = await worker.initChunkHashing();
|
||||
|
||||
const streamReader = filedata.stream.getReader();
|
||||
for (let i = 0; i < filedata.chunkCount; i++) {
|
||||
const { done, value: chunk } = await streamReader.read();
|
||||
if (done) {
|
||||
throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED);
|
||||
}
|
||||
await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
|
||||
}
|
||||
const { done } = await streamReader.read();
|
||||
if (!done) {
|
||||
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
|
||||
}
|
||||
const hash = await worker.completeChunkHashing(hashState);
|
||||
log.info(
|
||||
`file hashing completed successfully ${getFileNameSize(file)}`,
|
||||
);
|
||||
return hash;
|
||||
} catch (e) {
|
||||
log.error("getFileHash failed", e);
|
||||
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
|
||||
}
|
||||
}
|
|
@ -1,132 +0,0 @@
|
|||
import { CustomError } from "@ente/shared/error";
|
||||
import {
|
||||
FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
|
||||
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
|
||||
} from "constants/upload";
|
||||
import { DataStream, Logger, MultipartUploadURLs } from "types/upload";
|
||||
import * as convert from "xml-js";
|
||||
import UIService from "./uiService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import UploadHttpClient from "./uploadHttpClient";
|
||||
import uploadService from "./uploadService";
|
||||
|
||||
interface PartEtag {
|
||||
PartNumber: number;
|
||||
ETag: string;
|
||||
}
|
||||
|
||||
function calculatePartCount(chunkCount: number) {
|
||||
const partCount = Math.ceil(
|
||||
chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
|
||||
);
|
||||
return partCount;
|
||||
}
|
||||
export async function uploadStreamUsingMultipart(
|
||||
logger: Logger,
|
||||
fileLocalID: number,
|
||||
dataStream: DataStream,
|
||||
) {
|
||||
const uploadPartCount = calculatePartCount(dataStream.chunkCount);
|
||||
logger(`fetching ${uploadPartCount} urls for multipart upload`);
|
||||
const multipartUploadURLs =
|
||||
await uploadService.fetchMultipartUploadURLs(uploadPartCount);
|
||||
logger(`fetched ${uploadPartCount} urls for multipart upload`);
|
||||
|
||||
const fileObjectKey = await uploadStreamInParts(
|
||||
logger,
|
||||
multipartUploadURLs,
|
||||
dataStream.stream,
|
||||
fileLocalID,
|
||||
uploadPartCount,
|
||||
);
|
||||
return fileObjectKey;
|
||||
}
|
||||
|
||||
export async function uploadStreamInParts(
|
||||
logger: Logger,
|
||||
multipartUploadURLs: MultipartUploadURLs,
|
||||
dataStream: ReadableStream<Uint8Array>,
|
||||
fileLocalID: number,
|
||||
uploadPartCount: number,
|
||||
) {
|
||||
const streamReader = dataStream.getReader();
|
||||
const percentPerPart = getRandomProgressPerPartUpload(uploadPartCount);
|
||||
const partEtags: PartEtag[] = [];
|
||||
logger(`uploading file in chunks`);
|
||||
for (const [
|
||||
index,
|
||||
fileUploadURL,
|
||||
] of multipartUploadURLs.partURLs.entries()) {
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
const uploadChunk = await combineChunksToFormUploadPart(streamReader);
|
||||
const progressTracker = UIService.trackUploadProgress(
|
||||
fileLocalID,
|
||||
percentPerPart,
|
||||
index,
|
||||
);
|
||||
let eTag = null;
|
||||
if (!uploadService.getIsCFUploadProxyDisabled()) {
|
||||
eTag = await UploadHttpClient.putFilePartV2(
|
||||
fileUploadURL,
|
||||
uploadChunk,
|
||||
progressTracker,
|
||||
);
|
||||
} else {
|
||||
eTag = await UploadHttpClient.putFilePart(
|
||||
fileUploadURL,
|
||||
uploadChunk,
|
||||
progressTracker,
|
||||
);
|
||||
}
|
||||
partEtags.push({ PartNumber: index + 1, ETag: eTag });
|
||||
}
|
||||
const { done } = await streamReader.read();
|
||||
if (!done) {
|
||||
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
|
||||
}
|
||||
logger(`uploading file in chunks done`);
|
||||
logger(`completing multipart upload`);
|
||||
await completeMultipartUpload(partEtags, multipartUploadURLs.completeURL);
|
||||
logger(`completing multipart upload done`);
|
||||
return multipartUploadURLs.objectKey;
|
||||
}
|
||||
|
||||
function getRandomProgressPerPartUpload(uploadPartCount: number) {
|
||||
const percentPerPart =
|
||||
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount;
|
||||
return percentPerPart;
|
||||
}
|
||||
|
||||
async function combineChunksToFormUploadPart(
|
||||
streamReader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
) {
|
||||
const combinedChunks = [];
|
||||
for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
|
||||
const { done, value: chunk } = await streamReader.read();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
for (let index = 0; index < chunk.length; index++) {
|
||||
combinedChunks.push(chunk[index]);
|
||||
}
|
||||
}
|
||||
return Uint8Array.from(combinedChunks);
|
||||
}
|
||||
|
||||
async function completeMultipartUpload(
|
||||
partEtags: PartEtag[],
|
||||
completeURL: string,
|
||||
) {
|
||||
const options = { compact: true, ignoreComment: true, spaces: 4 };
|
||||
const body = convert.js2xml(
|
||||
{ CompleteMultipartUpload: { Part: partEtags } },
|
||||
options,
|
||||
);
|
||||
if (!uploadService.getIsCFUploadProxyDisabled()) {
|
||||
await UploadHttpClient.completeMultipartUploadV2(completeURL, body);
|
||||
} else {
|
||||
await UploadHttpClient.completeMultipartUpload(completeURL, body);
|
||||
}
|
||||
}
|
155
web/apps/photos/src/services/upload/takeout.ts
Normal file
155
web/apps/photos/src/services/upload/takeout.ts
Normal file
|
@ -0,0 +1,155 @@
|
|||
/** @file Dealing with the JSON metadata in Google Takeouts */
|
||||
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import { nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import type { ElectronFile } from "@/next/types/file";
|
||||
import { NULL_LOCATION } from "constants/upload";
|
||||
import { type Location } from "types/upload";
|
||||
|
||||
export interface ParsedMetadataJSON {
|
||||
creationTime: number;
|
||||
modificationTime: number;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
}
|
||||
|
||||
export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46;
|
||||
|
||||
export const getMetadataJSONMapKeyForJSON = (
|
||||
collectionID: number,
|
||||
jsonFileName: string,
|
||||
) => {
|
||||
let title = jsonFileName.slice(0, -1 * ".json".length);
|
||||
const endsWithNumberedSuffixWithBrackets = title.match(/\(\d+\)$/);
|
||||
if (endsWithNumberedSuffixWithBrackets) {
|
||||
title = title.slice(
|
||||
0,
|
||||
-1 * endsWithNumberedSuffixWithBrackets[0].length,
|
||||
);
|
||||
const [name, extension] = nameAndExtension(title);
|
||||
return `${collectionID}-${name}${endsWithNumberedSuffixWithBrackets[0]}.${extension}`;
|
||||
}
|
||||
return `${collectionID}-${title}`;
|
||||
};
|
||||
|
||||
// if the file name is greater than MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT(46) , then google photos clips the file name
|
||||
// so we need to use the clipped file name to get the metadataJSON file
|
||||
export const getClippedMetadataJSONMapKeyForFile = (
|
||||
collectionID: number,
|
||||
fileName: string,
|
||||
) => {
|
||||
return `${collectionID}-${fileName.slice(
|
||||
0,
|
||||
MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
|
||||
)}`;
|
||||
};
|
||||
|
||||
export const getMetadataJSONMapKeyForFile = (
|
||||
collectionID: number,
|
||||
fileName: string,
|
||||
) => {
|
||||
return `${collectionID}-${getFileOriginalName(fileName)}`;
|
||||
};
|
||||
|
||||
const EDITED_FILE_SUFFIX = "-edited";
|
||||
|
||||
/*
|
||||
Get the original file name for edited file to associate it to original file's metadataJSON file
|
||||
as edited file doesn't have their own metadata file
|
||||
*/
|
||||
function getFileOriginalName(fileName: string) {
|
||||
let originalName: string = null;
|
||||
const [name, extension] = nameAndExtension(fileName);
|
||||
|
||||
const isEditedFile = name.endsWith(EDITED_FILE_SUFFIX);
|
||||
if (isEditedFile) {
|
||||
originalName = name.slice(0, -1 * EDITED_FILE_SUFFIX.length);
|
||||
} else {
|
||||
originalName = name;
|
||||
}
|
||||
if (extension) {
|
||||
originalName += "." + extension;
|
||||
}
|
||||
return originalName;
|
||||
}
|
||||
|
||||
/** Try to parse the contents of a metadata JSON file in a Google Takeout. */
|
||||
export const tryParseTakeoutMetadataJSON = async (
|
||||
receivedFile: File | ElectronFile | string,
|
||||
): Promise<ParsedMetadataJSON | undefined> => {
|
||||
try {
|
||||
let text: string;
|
||||
if (typeof receivedFile == "string") {
|
||||
text = await ensureElectron().fs.readTextFile(receivedFile);
|
||||
} else {
|
||||
if (!(receivedFile instanceof File)) {
|
||||
receivedFile = new File(
|
||||
[await receivedFile.blob()],
|
||||
receivedFile.name,
|
||||
);
|
||||
}
|
||||
text = await receivedFile.text();
|
||||
}
|
||||
|
||||
return parseMetadataJSONText(text);
|
||||
} catch (e) {
|
||||
log.error("Failed to parse takeout metadata JSON", e);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
|
||||
creationTime: null,
|
||||
modificationTime: null,
|
||||
...NULL_LOCATION,
|
||||
};
|
||||
|
||||
const parseMetadataJSONText = (text: string) => {
|
||||
const metadataJSON: object = JSON.parse(text);
|
||||
if (!metadataJSON) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const parsedMetadataJSON: ParsedMetadataJSON = NULL_PARSED_METADATA_JSON;
|
||||
|
||||
if (
|
||||
metadataJSON["photoTakenTime"] &&
|
||||
metadataJSON["photoTakenTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
|
||||
} else if (
|
||||
metadataJSON["creationTime"] &&
|
||||
metadataJSON["creationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["creationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
if (
|
||||
metadataJSON["modificationTime"] &&
|
||||
metadataJSON["modificationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.modificationTime =
|
||||
metadataJSON["modificationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
let locationData: Location = NULL_LOCATION;
|
||||
if (
|
||||
metadataJSON["geoData"] &&
|
||||
(metadataJSON["geoData"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoData"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoData"];
|
||||
} else if (
|
||||
metadataJSON["geoDataExif"] &&
|
||||
(metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoDataExif"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoDataExif"];
|
||||
}
|
||||
if (locationData !== null) {
|
||||
parsedMetadataJSON.latitude = locationData.latitude;
|
||||
parsedMetadataJSON.longitude = locationData.longitude;
|
||||
}
|
||||
return parsedMetadataJSON;
|
||||
};
|
|
@ -1,294 +1,124 @@
|
|||
import { getFileNameSize } from "@/next/file";
|
||||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { CustomErrorMessage, type Electron } from "@/next/types/ipc";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { type Electron } from "@/next/types/ipc";
|
||||
import { withTimeout } from "@ente/shared/utils";
|
||||
import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
|
||||
import * as FFmpegService from "services/ffmpeg";
|
||||
import * as ffmpeg from "services/ffmpeg";
|
||||
import { heicToJPEG } from "services/heic-convert";
|
||||
import { ElectronFile, FileTypeInfo } from "types/upload";
|
||||
import { FileTypeInfo } from "types/upload";
|
||||
import { isFileHEIC } from "utils/file";
|
||||
import { getUint8ArrayView } from "../readerService";
|
||||
import { getFileName } from "./uploadService";
|
||||
|
||||
/** Maximum width or height of the generated thumbnail */
|
||||
const maxThumbnailDimension = 720;
|
||||
/** Maximum size (in bytes) of the generated thumbnail */
|
||||
const maxThumbnailSize = 100 * 1024; // 100 KB
|
||||
const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10;
|
||||
const MIN_QUALITY = 0.5;
|
||||
const MAX_QUALITY = 0.7;
|
||||
|
||||
const WAIT_TIME_THUMBNAIL_GENERATION = 30 * 1000;
|
||||
|
||||
class ModuleState {
|
||||
/**
|
||||
* This will be set to true if we get an error from the Node.js side of our
|
||||
* desktop app telling us that native JPEG conversion is not available for
|
||||
* the current OS/arch combination. That way, we can stop pestering it again
|
||||
* and again (saving an IPC round-trip).
|
||||
*
|
||||
* Note the double negative when it is used.
|
||||
*/
|
||||
isNativeThumbnailCreationNotAvailable = false;
|
||||
}
|
||||
|
||||
const moduleState = new ModuleState();
|
||||
|
||||
interface GeneratedThumbnail {
|
||||
/** The JPEG data of the generated thumbnail */
|
||||
thumbnail: Uint8Array;
|
||||
/**
|
||||
* `true` if this is a fallback (all black) thumbnail we're returning since
|
||||
* thumbnail generation failed for some reason.
|
||||
*/
|
||||
hasStaticThumbnail: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a JPEG thumbnail for the given {@link file}.
|
||||
* Generate a JPEG thumbnail for the given image or video blob.
|
||||
*
|
||||
* The thumbnail has a smaller file size so that is quick to load. But more
|
||||
* importantly, it uses a universal file format (JPEG in our case) so that the
|
||||
* thumbnail itself can be opened in all clients, even those like the web client
|
||||
* itself that might not yet have support for more exotic formats.
|
||||
*
|
||||
* @param blob The image or video blob whose thumbnail we want to generate.
|
||||
*
|
||||
* @param fileTypeInfo The type information for the file this blob came from.
|
||||
*
|
||||
* @return The JPEG data of the generated thumbnail.
|
||||
*/
|
||||
export const generateThumbnail = async (
|
||||
file: File | ElectronFile,
|
||||
export const generateThumbnailWeb = async (
|
||||
blob: Blob,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
): Promise<GeneratedThumbnail> => {
|
||||
try {
|
||||
const thumbnail =
|
||||
fileTypeInfo.fileType === FILE_TYPE.IMAGE
|
||||
? await generateImageThumbnail(file, fileTypeInfo)
|
||||
: await generateVideoThumbnail(file, fileTypeInfo);
|
||||
): Promise<Uint8Array> =>
|
||||
fileTypeInfo.fileType === FILE_TYPE.IMAGE
|
||||
? await generateImageThumbnailUsingCanvas(blob, fileTypeInfo)
|
||||
: await generateVideoThumbnailWeb(blob);
|
||||
|
||||
if (thumbnail.length == 0) throw new Error("Empty thumbnail");
|
||||
log.debug(() => `Generated thumbnail for ${getFileName(file)}`);
|
||||
return { thumbnail, hasStaticThumbnail: false };
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to generate thumbnail for ${getFileName(file)} with format ${fileTypeInfo.exactType}`,
|
||||
e,
|
||||
);
|
||||
return { thumbnail: fallbackThumbnail(), hasStaticThumbnail: true };
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* A fallback, black, thumbnail for use in cases where thumbnail generation
|
||||
* fails.
|
||||
*/
|
||||
const fallbackThumbnail = () =>
|
||||
Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) => c.charCodeAt(0));
|
||||
|
||||
const generateImageThumbnail = async (
|
||||
file: File | ElectronFile,
|
||||
const generateImageThumbnailUsingCanvas = async (
|
||||
blob: Blob,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
) => {
|
||||
let jpegData: Uint8Array | undefined;
|
||||
|
||||
const electron = globalThis.electron;
|
||||
const available = !moduleState.isNativeThumbnailCreationNotAvailable;
|
||||
if (electron && available) {
|
||||
// If we're running in our desktop app, try to make the thumbnail using
|
||||
// the native tools available there-in, it'll be faster than doing it on
|
||||
// the web layer.
|
||||
try {
|
||||
jpegData = await generateImageThumbnailInElectron(electron, file);
|
||||
} catch (e) {
|
||||
if (e.message == CustomErrorMessage.NotAvailable) {
|
||||
moduleState.isNativeThumbnailCreationNotAvailable = true;
|
||||
} else {
|
||||
log.error("Native thumbnail creation failed", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!jpegData) {
|
||||
jpegData = await generateImageThumbnailUsingCanvas(file, fileTypeInfo);
|
||||
}
|
||||
return jpegData;
|
||||
};
|
||||
|
||||
const generateImageThumbnailInElectron = async (
|
||||
electron: Electron,
|
||||
inputFile: File | ElectronFile,
|
||||
): Promise<Uint8Array> => {
|
||||
const startTime = Date.now();
|
||||
const jpegData = await electron.generateImageThumbnail(
|
||||
inputFile,
|
||||
maxThumbnailDimension,
|
||||
maxThumbnailSize,
|
||||
);
|
||||
log.debug(
|
||||
() => `Native thumbnail generation took ${Date.now() - startTime} ms`,
|
||||
);
|
||||
return jpegData;
|
||||
};
|
||||
|
||||
async function generateImageThumbnailUsingCanvas(
|
||||
file: File | ElectronFile,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
) {
|
||||
const canvas = document.createElement("canvas");
|
||||
const canvasCTX = canvas.getContext("2d");
|
||||
|
||||
let imageURL = null;
|
||||
let timeout = null;
|
||||
|
||||
if (isFileHEIC(fileTypeInfo.exactType)) {
|
||||
log.debug(() => `Pre-converting ${getFileName(file)} to JPEG`);
|
||||
const jpegBlob = await heicToJPEG(new Blob([await file.arrayBuffer()]));
|
||||
file = new File([jpegBlob], file.name);
|
||||
log.debug(() => `Pre-converting HEIC to JPEG for thumbnail generation`);
|
||||
blob = await heicToJPEG(blob);
|
||||
}
|
||||
|
||||
let image = new Image();
|
||||
imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
|
||||
await new Promise((resolve, reject) => {
|
||||
image.setAttribute("src", imageURL);
|
||||
image.onload = () => {
|
||||
try {
|
||||
URL.revokeObjectURL(imageURL);
|
||||
const { width, height } = scaledThumbnailDimensions(
|
||||
image.width,
|
||||
image.height,
|
||||
maxThumbnailDimension,
|
||||
);
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
canvasCTX.drawImage(image, 0, 0, width, height);
|
||||
image = null;
|
||||
clearTimeout(timeout);
|
||||
resolve(null);
|
||||
} catch (e) {
|
||||
const err = new Error(CustomError.THUMBNAIL_GENERATION_FAILED, {
|
||||
cause: e,
|
||||
});
|
||||
reject(err);
|
||||
}
|
||||
};
|
||||
timeout = setTimeout(
|
||||
() => reject(new Error("Operation timed out")),
|
||||
WAIT_TIME_THUMBNAIL_GENERATION,
|
||||
);
|
||||
});
|
||||
const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
|
||||
return await getUint8ArrayView(thumbnailBlob);
|
||||
}
|
||||
const canvas = document.createElement("canvas");
|
||||
const canvasCtx = canvas.getContext("2d");
|
||||
|
||||
async function generateVideoThumbnail(
|
||||
file: File | ElectronFile,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
) {
|
||||
let thumbnail: Uint8Array;
|
||||
const imageURL = URL.createObjectURL(blob);
|
||||
await withTimeout(
|
||||
new Promise((resolve, reject) => {
|
||||
const image = new Image();
|
||||
image.setAttribute("src", imageURL);
|
||||
image.onload = () => {
|
||||
try {
|
||||
URL.revokeObjectURL(imageURL);
|
||||
const { width, height } = scaledThumbnailDimensions(
|
||||
image.width,
|
||||
image.height,
|
||||
maxThumbnailDimension,
|
||||
);
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
canvasCtx.drawImage(image, 0, 0, width, height);
|
||||
resolve(undefined);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
};
|
||||
}),
|
||||
30 * 1000,
|
||||
);
|
||||
|
||||
return await compressedJPEGData(canvas);
|
||||
};
|
||||
|
||||
const generateVideoThumbnailWeb = async (blob: Blob) => {
|
||||
try {
|
||||
log.info(
|
||||
`ffmpeg generateThumbnail called for ${getFileNameSize(file)}`,
|
||||
);
|
||||
|
||||
const thumbnail = await FFmpegService.generateVideoThumbnail(file);
|
||||
log.info(
|
||||
`ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`,
|
||||
);
|
||||
return await getUint8ArrayView(thumbnail);
|
||||
return await ffmpeg.generateVideoThumbnailWeb(blob);
|
||||
} catch (e) {
|
||||
log.info(
|
||||
`ffmpeg thumbnail generated failed ${getFileNameSize(
|
||||
file,
|
||||
)} error: ${e.message}`,
|
||||
);
|
||||
log.error(
|
||||
`failed to generate thumbnail using ffmpeg for format ${fileTypeInfo.exactType}`,
|
||||
`Failed to generate video thumbnail using the wasm FFmpeg web worker, will fallback to canvas`,
|
||||
e,
|
||||
);
|
||||
thumbnail = await generateVideoThumbnailUsingCanvas(file);
|
||||
return generateVideoThumbnailUsingCanvas(blob);
|
||||
}
|
||||
return thumbnail;
|
||||
}
|
||||
};
|
||||
|
||||
async function generateVideoThumbnailUsingCanvas(file: File | ElectronFile) {
|
||||
const generateVideoThumbnailUsingCanvas = async (blob: Blob) => {
|
||||
const canvas = document.createElement("canvas");
|
||||
const canvasCTX = canvas.getContext("2d");
|
||||
const canvasCtx = canvas.getContext("2d");
|
||||
|
||||
let timeout = null;
|
||||
let videoURL = null;
|
||||
|
||||
let video = document.createElement("video");
|
||||
videoURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
|
||||
await new Promise((resolve, reject) => {
|
||||
video.preload = "metadata";
|
||||
video.src = videoURL;
|
||||
video.addEventListener("loadeddata", function () {
|
||||
try {
|
||||
URL.revokeObjectURL(videoURL);
|
||||
if (!video) {
|
||||
throw Error("video load failed");
|
||||
const videoURL = URL.createObjectURL(blob);
|
||||
await withTimeout(
|
||||
new Promise((resolve, reject) => {
|
||||
const video = document.createElement("video");
|
||||
video.preload = "metadata";
|
||||
video.src = videoURL;
|
||||
video.addEventListener("loadeddata", () => {
|
||||
try {
|
||||
URL.revokeObjectURL(videoURL);
|
||||
const { width, height } = scaledThumbnailDimensions(
|
||||
video.videoWidth,
|
||||
video.videoHeight,
|
||||
maxThumbnailDimension,
|
||||
);
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
canvasCtx.drawImage(video, 0, 0, width, height);
|
||||
resolve(undefined);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
const { width, height } = scaledThumbnailDimensions(
|
||||
video.videoWidth,
|
||||
video.videoHeight,
|
||||
maxThumbnailDimension,
|
||||
);
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
canvasCTX.drawImage(video, 0, 0, width, height);
|
||||
video = null;
|
||||
clearTimeout(timeout);
|
||||
resolve(null);
|
||||
} catch (e) {
|
||||
const err = Error(
|
||||
`${CustomError.THUMBNAIL_GENERATION_FAILED} err: ${e}`,
|
||||
);
|
||||
log.error(CustomError.THUMBNAIL_GENERATION_FAILED, e);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
timeout = setTimeout(
|
||||
() => reject(new Error("Operation timed out")),
|
||||
WAIT_TIME_THUMBNAIL_GENERATION,
|
||||
);
|
||||
});
|
||||
const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
|
||||
return await getUint8ArrayView(thumbnailBlob);
|
||||
}
|
||||
|
||||
async function getCompressedThumbnailBlobFromCanvas(canvas: HTMLCanvasElement) {
|
||||
let thumbnailBlob: Blob = null;
|
||||
let prevSize = Number.MAX_SAFE_INTEGER;
|
||||
let quality = MAX_QUALITY;
|
||||
|
||||
do {
|
||||
if (thumbnailBlob) {
|
||||
prevSize = thumbnailBlob.size;
|
||||
}
|
||||
thumbnailBlob = await new Promise((resolve) => {
|
||||
canvas.toBlob(
|
||||
function (blob) {
|
||||
resolve(blob);
|
||||
},
|
||||
"image/jpeg",
|
||||
quality,
|
||||
);
|
||||
});
|
||||
thumbnailBlob = thumbnailBlob ?? new Blob([]);
|
||||
quality -= 0.1;
|
||||
} while (
|
||||
quality >= MIN_QUALITY &&
|
||||
thumbnailBlob.size > maxThumbnailSize &&
|
||||
percentageSizeDiff(thumbnailBlob.size, prevSize) >=
|
||||
MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF
|
||||
});
|
||||
}),
|
||||
30 * 1000,
|
||||
);
|
||||
|
||||
return thumbnailBlob;
|
||||
}
|
||||
|
||||
function percentageSizeDiff(
|
||||
newThumbnailSize: number,
|
||||
oldThumbnailSize: number,
|
||||
) {
|
||||
return ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
|
||||
}
|
||||
return await compressedJPEGData(canvas);
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the size of the thumbnail to create for an image with the given
|
||||
|
@ -317,3 +147,65 @@ const scaledThumbnailDimensions = (
|
|||
return { width: 0, height: 0 };
|
||||
return thumbnailDimensions;
|
||||
};
|
||||
|
||||
const compressedJPEGData = async (canvas: HTMLCanvasElement) => {
|
||||
let blob: Blob;
|
||||
let prevSize = Number.MAX_SAFE_INTEGER;
|
||||
let quality = 0.7;
|
||||
|
||||
do {
|
||||
if (blob) prevSize = blob.size;
|
||||
blob = await new Promise((resolve) => {
|
||||
canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality);
|
||||
});
|
||||
quality -= 0.1;
|
||||
} while (
|
||||
quality >= 0.5 &&
|
||||
blob.size > maxThumbnailSize &&
|
||||
percentageSizeDiff(blob.size, prevSize) >= 10
|
||||
);
|
||||
|
||||
return new Uint8Array(await blob.arrayBuffer());
|
||||
};
|
||||
|
||||
const percentageSizeDiff = (
|
||||
newThumbnailSize: number,
|
||||
oldThumbnailSize: number,
|
||||
) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
|
||||
|
||||
/**
|
||||
* Generate a JPEG thumbnail for the given file or path using native tools.
|
||||
*
|
||||
* This function only works when we're running in the context of our desktop
|
||||
* app, and this dependency is enforced by the need to pass the {@link electron}
|
||||
* object which we use to perform IPC with the Node.js side of our desktop app.
|
||||
*
|
||||
* @param dataOrPath Contents of an image or video file, or the path to the
|
||||
* image or video file on the user's local filesystem, whose thumbnail we want
|
||||
* to generate.
|
||||
*
|
||||
* @param fileTypeInfo The type information for {@link dataOrPath}.
|
||||
*
|
||||
* @return The JPEG data of the generated thumbnail.
|
||||
*
|
||||
* See also {@link generateThumbnailWeb}.
|
||||
*/
|
||||
export const generateThumbnailNative = async (
|
||||
electron: Electron,
|
||||
dataOrPath: Uint8Array | string,
|
||||
fileTypeInfo: FileTypeInfo,
|
||||
): Promise<Uint8Array> =>
|
||||
fileTypeInfo.fileType === FILE_TYPE.IMAGE
|
||||
? await electron.generateImageThumbnail(
|
||||
dataOrPath,
|
||||
maxThumbnailDimension,
|
||||
maxThumbnailSize,
|
||||
)
|
||||
: ffmpeg.generateVideoThumbnailNative(electron, dataOrPath);
|
||||
|
||||
/**
|
||||
* A fallback, black, thumbnail for use in cases where thumbnail generation
|
||||
* fails.
|
||||
*/
|
||||
export const fallbackThumbnail = () =>
|
||||
Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) => c.charCodeAt(0));
|
||||
|
|
|
@ -1,218 +0,0 @@
|
|||
import { CustomError } from "@ente/shared/error";
|
||||
import { Canceler } from "axios";
|
||||
import {
|
||||
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
|
||||
UPLOAD_RESULT,
|
||||
UPLOAD_STAGES,
|
||||
} from "constants/upload";
|
||||
import {
|
||||
FinishedUploads,
|
||||
InProgressUpload,
|
||||
InProgressUploads,
|
||||
ProgressUpdater,
|
||||
SegregatedFinishedUploads,
|
||||
} from "types/upload/ui";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
|
||||
const REQUEST_TIMEOUT_TIME = 30 * 1000; // 30 sec;
|
||||
class UIService {
|
||||
private progressUpdater: ProgressUpdater;
|
||||
|
||||
// UPLOAD LEVEL STATES
|
||||
private uploadStage: UPLOAD_STAGES = UPLOAD_STAGES.START;
|
||||
private filenames: Map<number, string> = new Map();
|
||||
private hasLivePhoto: boolean = false;
|
||||
private uploadProgressView: boolean = false;
|
||||
|
||||
// STAGE LEVEL STATES
|
||||
private perFileProgress: number;
|
||||
private filesUploadedCount: number;
|
||||
private totalFilesCount: number;
|
||||
private inProgressUploads: InProgressUploads = new Map();
|
||||
private finishedUploads: FinishedUploads = new Map();
|
||||
|
||||
init(progressUpdater: ProgressUpdater) {
|
||||
this.progressUpdater = progressUpdater;
|
||||
this.progressUpdater.setUploadStage(this.uploadStage);
|
||||
this.progressUpdater.setUploadFilenames(this.filenames);
|
||||
this.progressUpdater.setHasLivePhotos(this.hasLivePhoto);
|
||||
this.progressUpdater.setUploadProgressView(this.uploadProgressView);
|
||||
this.progressUpdater.setUploadCounter({
|
||||
finished: this.filesUploadedCount,
|
||||
total: this.totalFilesCount,
|
||||
});
|
||||
this.progressUpdater.setInProgressUploads(
|
||||
convertInProgressUploadsToList(this.inProgressUploads),
|
||||
);
|
||||
this.progressUpdater.setFinishedUploads(
|
||||
segregatedFinishedUploadsToList(this.finishedUploads),
|
||||
);
|
||||
}
|
||||
|
||||
reset(count = 0) {
|
||||
this.setTotalFileCount(count);
|
||||
this.filesUploadedCount = 0;
|
||||
this.inProgressUploads = new Map<number, number>();
|
||||
this.finishedUploads = new Map<number, UPLOAD_RESULT>();
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
setTotalFileCount(count: number) {
|
||||
this.totalFilesCount = count;
|
||||
if (count > 0) {
|
||||
this.perFileProgress = 100 / this.totalFilesCount;
|
||||
} else {
|
||||
this.perFileProgress = 0;
|
||||
}
|
||||
}
|
||||
|
||||
setFileProgress(key: number, progress: number) {
|
||||
this.inProgressUploads.set(key, progress);
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
setUploadStage(stage: UPLOAD_STAGES) {
|
||||
this.uploadStage = stage;
|
||||
this.progressUpdater.setUploadStage(stage);
|
||||
}
|
||||
|
||||
setFilenames(filenames: Map<number, string>) {
|
||||
this.filenames = filenames;
|
||||
this.progressUpdater.setUploadFilenames(filenames);
|
||||
}
|
||||
|
||||
setHasLivePhoto(hasLivePhoto: boolean) {
|
||||
this.hasLivePhoto = hasLivePhoto;
|
||||
this.progressUpdater.setHasLivePhotos(hasLivePhoto);
|
||||
}
|
||||
|
||||
setUploadProgressView(uploadProgressView: boolean) {
|
||||
this.uploadProgressView = uploadProgressView;
|
||||
this.progressUpdater.setUploadProgressView(uploadProgressView);
|
||||
}
|
||||
|
||||
increaseFileUploaded() {
|
||||
this.filesUploadedCount++;
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
moveFileToResultList(key: number, uploadResult: UPLOAD_RESULT) {
|
||||
this.finishedUploads.set(key, uploadResult);
|
||||
this.inProgressUploads.delete(key);
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
hasFilesInResultList() {
|
||||
const finishedUploadsList = segregatedFinishedUploadsToList(
|
||||
this.finishedUploads,
|
||||
);
|
||||
for (const x of finishedUploadsList.values()) {
|
||||
if (x.length > 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private updateProgressBarUI() {
|
||||
const {
|
||||
setPercentComplete,
|
||||
setUploadCounter,
|
||||
setInProgressUploads,
|
||||
setFinishedUploads,
|
||||
} = this.progressUpdater;
|
||||
setUploadCounter({
|
||||
finished: this.filesUploadedCount,
|
||||
total: this.totalFilesCount,
|
||||
});
|
||||
let percentComplete =
|
||||
this.perFileProgress *
|
||||
(this.finishedUploads.size || this.filesUploadedCount);
|
||||
if (this.inProgressUploads) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
for (const [_, progress] of this.inProgressUploads) {
|
||||
// filter negative indicator values during percentComplete calculation
|
||||
if (progress < 0) {
|
||||
continue;
|
||||
}
|
||||
percentComplete += (this.perFileProgress * progress) / 100;
|
||||
}
|
||||
}
|
||||
|
||||
setPercentComplete(percentComplete);
|
||||
setInProgressUploads(
|
||||
convertInProgressUploadsToList(this.inProgressUploads),
|
||||
);
|
||||
setFinishedUploads(
|
||||
segregatedFinishedUploadsToList(this.finishedUploads),
|
||||
);
|
||||
}
|
||||
|
||||
trackUploadProgress(
|
||||
fileLocalID: number,
|
||||
percentPerPart = RANDOM_PERCENTAGE_PROGRESS_FOR_PUT(),
|
||||
index = 0,
|
||||
) {
|
||||
const cancel: { exec: Canceler } = { exec: () => {} };
|
||||
const cancelTimedOutRequest = () =>
|
||||
cancel.exec(CustomError.REQUEST_TIMEOUT);
|
||||
|
||||
const cancelCancelledUploadRequest = () =>
|
||||
cancel.exec(CustomError.UPLOAD_CANCELLED);
|
||||
|
||||
let timeout = null;
|
||||
const resetTimeout = () => {
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
timeout = setTimeout(cancelTimedOutRequest, REQUEST_TIMEOUT_TIME);
|
||||
};
|
||||
return {
|
||||
cancel,
|
||||
onUploadProgress: (event) => {
|
||||
this.inProgressUploads.set(
|
||||
fileLocalID,
|
||||
Math.min(
|
||||
Math.round(
|
||||
percentPerPart * index +
|
||||
(percentPerPart * event.loaded) / event.total,
|
||||
),
|
||||
98,
|
||||
),
|
||||
);
|
||||
this.updateProgressBarUI();
|
||||
if (event.loaded === event.total) {
|
||||
clearTimeout(timeout);
|
||||
} else {
|
||||
resetTimeout();
|
||||
}
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
cancelCancelledUploadRequest();
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new UIService();
|
||||
|
||||
function convertInProgressUploadsToList(inProgressUploads) {
|
||||
return [...inProgressUploads.entries()].map(
|
||||
([localFileID, progress]) =>
|
||||
({
|
||||
localFileID,
|
||||
progress,
|
||||
}) as InProgressUpload,
|
||||
);
|
||||
}
|
||||
|
||||
function segregatedFinishedUploadsToList(finishedUploads: FinishedUploads) {
|
||||
const segregatedFinishedUploads = new Map() as SegregatedFinishedUploads;
|
||||
for (const [localID, result] of finishedUploads) {
|
||||
if (!segregatedFinishedUploads.has(result)) {
|
||||
segregatedFinishedUploads.set(result, []);
|
||||
}
|
||||
segregatedFinishedUploads.get(result).push(localID);
|
||||
}
|
||||
return segregatedFinishedUploads;
|
||||
}
|
|
@ -1,12 +1,22 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { potentialFileTypeFromExtension } from "@/media/live-photo";
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import { nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { ElectronFile } from "@/next/types/file";
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { Events, eventBus } from "@ente/shared/events";
|
||||
import { wait } from "@ente/shared/utils";
|
||||
import { Canceler } from "axios";
|
||||
import { Remote } from "comlink";
|
||||
import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
|
||||
import {
|
||||
RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
|
||||
UPLOAD_RESULT,
|
||||
UPLOAD_STAGES,
|
||||
} from "constants/upload";
|
||||
import isElectron from "is-electron";
|
||||
import {
|
||||
getLocalPublicFiles,
|
||||
|
@ -18,37 +28,241 @@ import { Collection } from "types/collection";
|
|||
import { EncryptedEnteFile, EnteFile } from "types/file";
|
||||
import { SetFiles } from "types/gallery";
|
||||
import {
|
||||
ElectronFile,
|
||||
FileWithCollection,
|
||||
ParsedMetadataJSON,
|
||||
ParsedMetadataJSONMap,
|
||||
PublicUploadProps,
|
||||
type FileWithCollection2,
|
||||
type LivePhotoAssets2,
|
||||
} from "types/upload";
|
||||
import { ProgressUpdater } from "types/upload/ui";
|
||||
import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file";
|
||||
import {
|
||||
areFileWithCollectionsSame,
|
||||
segregateMetadataAndMediaFiles,
|
||||
segregateMetadataAndMediaFiles2,
|
||||
} from "utils/upload";
|
||||
FinishedUploads,
|
||||
InProgressUpload,
|
||||
InProgressUploads,
|
||||
ProgressUpdater,
|
||||
SegregatedFinishedUploads,
|
||||
} from "types/upload/ui";
|
||||
import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file";
|
||||
import { segregateMetadataAndMediaFiles } from "utils/upload";
|
||||
import { getLocalFiles } from "../fileService";
|
||||
import {
|
||||
clusterLivePhotoFiles,
|
||||
getMetadataJSONMapKeyForJSON,
|
||||
parseMetadataJSON,
|
||||
} from "./metadataService";
|
||||
import { default as UIService, default as uiService } from "./uiService";
|
||||
tryParseTakeoutMetadataJSON,
|
||||
type ParsedMetadataJSON,
|
||||
} from "./takeout";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import UploadService, { getFileName, uploader } from "./uploadService";
|
||||
import UploadService, {
|
||||
assetName,
|
||||
getAssetName,
|
||||
getFileName,
|
||||
uploader,
|
||||
} from "./uploadService";
|
||||
|
||||
const MAX_CONCURRENT_UPLOADS = 4;
|
||||
|
||||
class UIService {
|
||||
private progressUpdater: ProgressUpdater;
|
||||
|
||||
// UPLOAD LEVEL STATES
|
||||
private uploadStage: UPLOAD_STAGES = UPLOAD_STAGES.START;
|
||||
private filenames: Map<number, string> = new Map();
|
||||
private hasLivePhoto: boolean = false;
|
||||
private uploadProgressView: boolean = false;
|
||||
|
||||
// STAGE LEVEL STATES
|
||||
private perFileProgress: number;
|
||||
private filesUploadedCount: number;
|
||||
private totalFilesCount: number;
|
||||
private inProgressUploads: InProgressUploads = new Map();
|
||||
private finishedUploads: FinishedUploads = new Map();
|
||||
|
||||
init(progressUpdater: ProgressUpdater) {
|
||||
this.progressUpdater = progressUpdater;
|
||||
this.progressUpdater.setUploadStage(this.uploadStage);
|
||||
this.progressUpdater.setUploadFilenames(this.filenames);
|
||||
this.progressUpdater.setHasLivePhotos(this.hasLivePhoto);
|
||||
this.progressUpdater.setUploadProgressView(this.uploadProgressView);
|
||||
this.progressUpdater.setUploadCounter({
|
||||
finished: this.filesUploadedCount,
|
||||
total: this.totalFilesCount,
|
||||
});
|
||||
this.progressUpdater.setInProgressUploads(
|
||||
convertInProgressUploadsToList(this.inProgressUploads),
|
||||
);
|
||||
this.progressUpdater.setFinishedUploads(
|
||||
segregatedFinishedUploadsToList(this.finishedUploads),
|
||||
);
|
||||
}
|
||||
|
||||
reset(count = 0) {
|
||||
this.setTotalFileCount(count);
|
||||
this.filesUploadedCount = 0;
|
||||
this.inProgressUploads = new Map<number, number>();
|
||||
this.finishedUploads = new Map<number, UPLOAD_RESULT>();
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
setTotalFileCount(count: number) {
|
||||
this.totalFilesCount = count;
|
||||
if (count > 0) {
|
||||
this.perFileProgress = 100 / this.totalFilesCount;
|
||||
} else {
|
||||
this.perFileProgress = 0;
|
||||
}
|
||||
}
|
||||
|
||||
setFileProgress(key: number, progress: number) {
|
||||
this.inProgressUploads.set(key, progress);
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
setUploadStage(stage: UPLOAD_STAGES) {
|
||||
this.uploadStage = stage;
|
||||
this.progressUpdater.setUploadStage(stage);
|
||||
}
|
||||
|
||||
setFilenames(filenames: Map<number, string>) {
|
||||
this.filenames = filenames;
|
||||
this.progressUpdater.setUploadFilenames(filenames);
|
||||
}
|
||||
|
||||
setHasLivePhoto(hasLivePhoto: boolean) {
|
||||
this.hasLivePhoto = hasLivePhoto;
|
||||
this.progressUpdater.setHasLivePhotos(hasLivePhoto);
|
||||
}
|
||||
|
||||
setUploadProgressView(uploadProgressView: boolean) {
|
||||
this.uploadProgressView = uploadProgressView;
|
||||
this.progressUpdater.setUploadProgressView(uploadProgressView);
|
||||
}
|
||||
|
||||
increaseFileUploaded() {
|
||||
this.filesUploadedCount++;
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
moveFileToResultList(key: number, uploadResult: UPLOAD_RESULT) {
|
||||
this.finishedUploads.set(key, uploadResult);
|
||||
this.inProgressUploads.delete(key);
|
||||
this.updateProgressBarUI();
|
||||
}
|
||||
|
||||
hasFilesInResultList() {
|
||||
const finishedUploadsList = segregatedFinishedUploadsToList(
|
||||
this.finishedUploads,
|
||||
);
|
||||
for (const x of finishedUploadsList.values()) {
|
||||
if (x.length > 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private updateProgressBarUI() {
|
||||
const {
|
||||
setPercentComplete,
|
||||
setUploadCounter,
|
||||
setInProgressUploads,
|
||||
setFinishedUploads,
|
||||
} = this.progressUpdater;
|
||||
setUploadCounter({
|
||||
finished: this.filesUploadedCount,
|
||||
total: this.totalFilesCount,
|
||||
});
|
||||
let percentComplete =
|
||||
this.perFileProgress *
|
||||
(this.finishedUploads.size || this.filesUploadedCount);
|
||||
if (this.inProgressUploads) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
for (const [_, progress] of this.inProgressUploads) {
|
||||
// filter negative indicator values during percentComplete calculation
|
||||
if (progress < 0) {
|
||||
continue;
|
||||
}
|
||||
percentComplete += (this.perFileProgress * progress) / 100;
|
||||
}
|
||||
}
|
||||
|
||||
setPercentComplete(percentComplete);
|
||||
setInProgressUploads(
|
||||
convertInProgressUploadsToList(this.inProgressUploads),
|
||||
);
|
||||
setFinishedUploads(
|
||||
segregatedFinishedUploadsToList(this.finishedUploads),
|
||||
);
|
||||
}
|
||||
|
||||
trackUploadProgress(
|
||||
fileLocalID: number,
|
||||
percentPerPart = RANDOM_PERCENTAGE_PROGRESS_FOR_PUT(),
|
||||
index = 0,
|
||||
) {
|
||||
const cancel: { exec: Canceler } = { exec: () => {} };
|
||||
const cancelTimedOutRequest = () =>
|
||||
cancel.exec(CustomError.REQUEST_TIMEOUT);
|
||||
|
||||
const cancelCancelledUploadRequest = () =>
|
||||
cancel.exec(CustomError.UPLOAD_CANCELLED);
|
||||
|
||||
let timeout = null;
|
||||
const resetTimeout = () => {
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
timeout = setTimeout(cancelTimedOutRequest, 30 * 1000 /* 30 sec */);
|
||||
};
|
||||
return {
|
||||
cancel,
|
||||
onUploadProgress: (event) => {
|
||||
this.inProgressUploads.set(
|
||||
fileLocalID,
|
||||
Math.min(
|
||||
Math.round(
|
||||
percentPerPart * index +
|
||||
(percentPerPart * event.loaded) / event.total,
|
||||
),
|
||||
98,
|
||||
),
|
||||
);
|
||||
this.updateProgressBarUI();
|
||||
if (event.loaded === event.total) {
|
||||
clearTimeout(timeout);
|
||||
} else {
|
||||
resetTimeout();
|
||||
}
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
cancelCancelledUploadRequest();
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function convertInProgressUploadsToList(inProgressUploads) {
|
||||
return [...inProgressUploads.entries()].map(
|
||||
([localFileID, progress]) =>
|
||||
({
|
||||
localFileID,
|
||||
progress,
|
||||
}) as InProgressUpload,
|
||||
);
|
||||
}
|
||||
|
||||
function segregatedFinishedUploadsToList(finishedUploads: FinishedUploads) {
|
||||
const segregatedFinishedUploads = new Map() as SegregatedFinishedUploads;
|
||||
for (const [localID, result] of finishedUploads) {
|
||||
if (!segregatedFinishedUploads.has(result)) {
|
||||
segregatedFinishedUploads.set(result, []);
|
||||
}
|
||||
segregatedFinishedUploads.get(result).push(localID);
|
||||
}
|
||||
return segregatedFinishedUploads;
|
||||
}
|
||||
|
||||
class UploadManager {
|
||||
private cryptoWorkers = new Array<
|
||||
ComlinkWorker<typeof DedicatedCryptoWorker>
|
||||
>(MAX_CONCURRENT_UPLOADS);
|
||||
private parsedMetadataJSONMap: ParsedMetadataJSONMap;
|
||||
private parsedMetadataJSONMap: Map<string, ParsedMetadataJSON>;
|
||||
private filesToBeUploaded: FileWithCollection2[];
|
||||
private remainingFiles: FileWithCollection2[] = [];
|
||||
private failedFiles: FileWithCollection2[];
|
||||
|
@ -58,20 +272,26 @@ class UploadManager {
|
|||
private uploadInProgress: boolean;
|
||||
private publicUploadProps: PublicUploadProps;
|
||||
private uploaderName: string;
|
||||
private uiService: UIService;
|
||||
private isCFUploadProxyDisabled: boolean = false;
|
||||
|
||||
constructor() {
|
||||
this.uiService = new UIService();
|
||||
}
|
||||
public async init(
|
||||
progressUpdater: ProgressUpdater,
|
||||
setFiles: SetFiles,
|
||||
publicCollectProps: PublicUploadProps,
|
||||
isCFUploadProxyDisabled: boolean,
|
||||
) {
|
||||
UIService.init(progressUpdater);
|
||||
this.uiService.init(progressUpdater);
|
||||
const remoteIsCFUploadProxyDisabled =
|
||||
await getDisableCFUploadProxyFlag();
|
||||
if (remoteIsCFUploadProxyDisabled) {
|
||||
isCFUploadProxyDisabled = remoteIsCFUploadProxyDisabled;
|
||||
}
|
||||
UploadService.init(publicCollectProps, isCFUploadProxyDisabled);
|
||||
this.isCFUploadProxyDisabled = isCFUploadProxyDisabled;
|
||||
UploadService.init(publicCollectProps);
|
||||
this.setFiles = setFiles;
|
||||
this.publicUploadProps = publicCollectProps;
|
||||
}
|
||||
|
@ -91,13 +311,13 @@ class UploadManager {
|
|||
|
||||
prepareForNewUpload() {
|
||||
this.resetState();
|
||||
UIService.reset();
|
||||
this.uiService.reset();
|
||||
uploadCancelService.reset();
|
||||
UIService.setUploadStage(UPLOAD_STAGES.START);
|
||||
this.uiService.setUploadStage(UPLOAD_STAGES.START);
|
||||
}
|
||||
|
||||
showUploadProgressDialog() {
|
||||
UIService.setUploadProgressView(true);
|
||||
this.uiService.setUploadProgressView(true);
|
||||
}
|
||||
|
||||
async updateExistingFilesAndCollections(collections: Collection[]) {
|
||||
|
@ -128,11 +348,11 @@ class UploadManager {
|
|||
log.info(
|
||||
`received ${filesWithCollectionToUploadIn.length} files to upload`,
|
||||
);
|
||||
uiService.setFilenames(
|
||||
this.uiService.setFilenames(
|
||||
new Map<number, string>(
|
||||
filesWithCollectionToUploadIn.map((mediaFile) => [
|
||||
mediaFile.localID,
|
||||
UploadService.getAssetName(mediaFile),
|
||||
getAssetName(mediaFile),
|
||||
]),
|
||||
),
|
||||
);
|
||||
|
@ -141,39 +361,33 @@ class UploadManager {
|
|||
log.info(`has ${metadataJSONFiles.length} metadata json files`);
|
||||
log.info(`has ${mediaFiles.length} media files`);
|
||||
if (metadataJSONFiles.length) {
|
||||
UIService.setUploadStage(
|
||||
this.uiService.setUploadStage(
|
||||
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
|
||||
);
|
||||
await this.parseMetadataJSONFiles(metadataJSONFiles);
|
||||
|
||||
UploadService.setParsedMetadataJSONMap(
|
||||
this.parsedMetadataJSONMap,
|
||||
);
|
||||
}
|
||||
|
||||
if (mediaFiles.length) {
|
||||
log.info(`clusterLivePhotoFiles started`);
|
||||
const analysedMediaFiles =
|
||||
await clusterLivePhotoFiles(mediaFiles);
|
||||
log.info(`clusterLivePhotoFiles ended`);
|
||||
log.info(
|
||||
`got live photos: ${
|
||||
mediaFiles.length !== analysedMediaFiles.length
|
||||
}`,
|
||||
);
|
||||
uiService.setFilenames(
|
||||
const clusteredMediaFiles = clusterLivePhotos(mediaFiles);
|
||||
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
|
||||
this.uiService.setFilenames(
|
||||
new Map<number, string>(
|
||||
analysedMediaFiles.map((mediaFile) => [
|
||||
clusteredMediaFiles.map((mediaFile) => [
|
||||
mediaFile.localID,
|
||||
UploadService.getAssetName(mediaFile),
|
||||
assetName(mediaFile),
|
||||
]),
|
||||
),
|
||||
);
|
||||
|
||||
UIService.setHasLivePhoto(
|
||||
mediaFiles.length !== analysedMediaFiles.length,
|
||||
this.uiService.setHasLivePhoto(
|
||||
mediaFiles.length !== clusteredMediaFiles.length,
|
||||
);
|
||||
|
||||
await this.uploadMediaFiles(analysedMediaFiles);
|
||||
await this.uploadMediaFiles(clusteredMediaFiles);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.UPLOAD_CANCELLED) {
|
||||
|
@ -186,105 +400,14 @@ class UploadManager {
|
|||
throw e;
|
||||
}
|
||||
} finally {
|
||||
UIService.setUploadStage(UPLOAD_STAGES.FINISH);
|
||||
this.uiService.setUploadStage(UPLOAD_STAGES.FINISH);
|
||||
for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) {
|
||||
this.cryptoWorkers[i]?.terminate();
|
||||
}
|
||||
this.uploadInProgress = false;
|
||||
}
|
||||
try {
|
||||
if (!UIService.hasFilesInResultList()) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} catch (e) {
|
||||
log.error(" failed to return shouldCloseProgressBar", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async queueFilesForUpload2(
|
||||
filesWithCollectionToUploadIn: FileWithCollection2[],
|
||||
collections: Collection[],
|
||||
uploaderName?: string,
|
||||
) {
|
||||
try {
|
||||
if (this.uploadInProgress) {
|
||||
throw Error("can't run multiple uploads at once");
|
||||
}
|
||||
this.uploadInProgress = true;
|
||||
await this.updateExistingFilesAndCollections(collections);
|
||||
this.uploaderName = uploaderName;
|
||||
log.info(
|
||||
`received ${filesWithCollectionToUploadIn.length} files to upload`,
|
||||
);
|
||||
uiService.setFilenames(
|
||||
new Map<number, string>(
|
||||
filesWithCollectionToUploadIn.map((mediaFile) => [
|
||||
mediaFile.localID,
|
||||
UploadService.getAssetName(mediaFile),
|
||||
]),
|
||||
),
|
||||
);
|
||||
const { metadataJSONFiles, mediaFiles } =
|
||||
segregateMetadataAndMediaFiles2(filesWithCollectionToUploadIn);
|
||||
log.info(`has ${metadataJSONFiles.length} metadata json files`);
|
||||
log.info(`has ${mediaFiles.length} media files`);
|
||||
if (metadataJSONFiles.length) {
|
||||
UIService.setUploadStage(
|
||||
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
|
||||
);
|
||||
await this.parseMetadataJSONFiles(metadataJSONFiles);
|
||||
|
||||
UploadService.setParsedMetadataJSONMap(
|
||||
this.parsedMetadataJSONMap,
|
||||
);
|
||||
}
|
||||
if (mediaFiles.length) {
|
||||
log.info(`clusterLivePhotoFiles started`);
|
||||
const analysedMediaFiles =
|
||||
await clusterLivePhotoFiles(mediaFiles);
|
||||
log.info(`clusterLivePhotoFiles ended`);
|
||||
log.info(
|
||||
`got live photos: ${
|
||||
mediaFiles.length !== analysedMediaFiles.length
|
||||
}`,
|
||||
);
|
||||
uiService.setFilenames(
|
||||
new Map<number, string>(
|
||||
analysedMediaFiles.map((mediaFile) => [
|
||||
mediaFile.localID,
|
||||
UploadService.getAssetName(mediaFile),
|
||||
]),
|
||||
),
|
||||
);
|
||||
|
||||
UIService.setHasLivePhoto(
|
||||
mediaFiles.length !== analysedMediaFiles.length,
|
||||
);
|
||||
|
||||
await this.uploadMediaFiles(analysedMediaFiles);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.UPLOAD_CANCELLED) {
|
||||
if (isElectron()) {
|
||||
this.remainingFiles = [];
|
||||
await cancelRemainingUploads();
|
||||
}
|
||||
} else {
|
||||
log.error("uploading failed with error", e);
|
||||
throw e;
|
||||
}
|
||||
} finally {
|
||||
UIService.setUploadStage(UPLOAD_STAGES.FINISH);
|
||||
for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) {
|
||||
this.cryptoWorkers[i]?.terminate();
|
||||
}
|
||||
this.uploadInProgress = false;
|
||||
}
|
||||
try {
|
||||
if (!UIService.hasFilesInResultList()) {
|
||||
if (!this.uiService.hasFilesInResultList()) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
|
@ -299,7 +422,7 @@ class UploadManager {
|
|||
try {
|
||||
log.info(`parseMetadataJSONFiles function executed `);
|
||||
|
||||
UIService.reset(metadataFiles.length);
|
||||
this.uiService.reset(metadataFiles.length);
|
||||
|
||||
for (const { file, collectionID } of metadataFiles) {
|
||||
const name = getFileName(file);
|
||||
|
@ -310,13 +433,14 @@ class UploadManager {
|
|||
|
||||
log.info(`parsing metadata json file ${name}`);
|
||||
|
||||
const parsedMetadataJSON = await parseMetadataJSON(file);
|
||||
if (parsedMetadataJSON) {
|
||||
const metadataJSON =
|
||||
await tryParseTakeoutMetadataJSON(file);
|
||||
if (metadataJSON) {
|
||||
this.parsedMetadataJSONMap.set(
|
||||
getMetadataJSONMapKeyForJSON(collectionID, name),
|
||||
parsedMetadataJSON && { ...parsedMetadataJSON },
|
||||
metadataJSON && { ...metadataJSON },
|
||||
);
|
||||
UIService.increaseFileUploaded();
|
||||
this.uiService.increaseFileUploaded();
|
||||
}
|
||||
log.info(`successfully parsed metadata json file ${name}`);
|
||||
} catch (e) {
|
||||
|
@ -347,11 +471,11 @@ class UploadManager {
|
|||
this.remainingFiles = [...this.remainingFiles, ...mediaFiles];
|
||||
}
|
||||
|
||||
UIService.reset(mediaFiles.length);
|
||||
this.uiService.reset(mediaFiles.length);
|
||||
|
||||
await UploadService.setFileCount(mediaFiles.length);
|
||||
|
||||
UIService.setUploadStage(UPLOAD_STAGES.UPLOADING);
|
||||
this.uiService.setUploadStage(UPLOAD_STAGES.UPLOADING);
|
||||
|
||||
const uploadProcesses = [];
|
||||
for (
|
||||
|
@ -367,6 +491,8 @@ class UploadManager {
|
|||
}
|
||||
|
||||
private async uploadNextFileInQueue(worker: Remote<DedicatedCryptoWorker>) {
|
||||
const uiService = this.uiService;
|
||||
|
||||
while (this.filesToBeUploaded.length > 0) {
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
|
@ -375,11 +501,27 @@ class UploadManager {
|
|||
const { collectionID } = fileWithCollection;
|
||||
const collection = this.collections.get(collectionID);
|
||||
fileWithCollection = { ...fileWithCollection, collection };
|
||||
|
||||
uiService.setFileProgress(fileWithCollection.localID, 0);
|
||||
await wait(0);
|
||||
|
||||
const { fileUploadResult, uploadedFile } = await uploader(
|
||||
worker,
|
||||
this.existingFiles,
|
||||
fileWithCollection,
|
||||
this.parsedMetadataJSONMap,
|
||||
this.uploaderName,
|
||||
this.isCFUploadProxyDisabled,
|
||||
(
|
||||
fileLocalID: number,
|
||||
percentPerPart?: number,
|
||||
index?: number,
|
||||
) =>
|
||||
uiService.trackUploadProgress(
|
||||
fileLocalID,
|
||||
percentPerPart,
|
||||
index,
|
||||
),
|
||||
);
|
||||
|
||||
const finalUploadResult = await this.postUploadTask(
|
||||
|
@ -388,11 +530,11 @@ class UploadManager {
|
|||
fileWithCollection,
|
||||
);
|
||||
|
||||
UIService.moveFileToResultList(
|
||||
this.uiService.moveFileToResultList(
|
||||
fileWithCollection.localID,
|
||||
finalUploadResult,
|
||||
);
|
||||
UIService.increaseFileUploaded();
|
||||
this.uiService.increaseFileUploaded();
|
||||
UploadService.reducePendingUploadCount();
|
||||
}
|
||||
}
|
||||
|
@ -407,7 +549,7 @@ class UploadManager {
|
|||
log.info(
|
||||
`post upload action -> fileUploadResult: ${fileUploadResult} uploadedFile present ${!!uploadedFile}`,
|
||||
);
|
||||
await this.updateElectronRemainingFiles(fileWithCollection);
|
||||
await this.removeFromPendingUploads(fileWithCollection);
|
||||
switch (fileUploadResult) {
|
||||
case UPLOAD_RESULT.FAILED:
|
||||
case UPLOAD_RESULT.BLOCKED:
|
||||
|
@ -483,7 +625,7 @@ class UploadManager {
|
|||
|
||||
public cancelRunningUpload() {
|
||||
log.info("user cancelled running upload");
|
||||
UIService.setUploadStage(UPLOAD_STAGES.CANCELLING);
|
||||
this.uiService.setUploadStage(UPLOAD_STAGES.CANCELLING);
|
||||
uploadCancelService.requestUploadCancelation();
|
||||
}
|
||||
|
||||
|
@ -510,12 +652,10 @@ class UploadManager {
|
|||
this.setFiles((files) => sortFiles([...files, decryptedFile]));
|
||||
}
|
||||
|
||||
private async updateElectronRemainingFiles(
|
||||
fileWithCollection: FileWithCollection2,
|
||||
) {
|
||||
private async removeFromPendingUploads(file: FileWithCollection2) {
|
||||
if (isElectron()) {
|
||||
this.remainingFiles = this.remainingFiles.filter(
|
||||
(file) => !areFileWithCollectionsSame(file, fileWithCollection),
|
||||
(f) => f.localID != file.localID,
|
||||
);
|
||||
await updatePendingUploads(this.remainingFiles);
|
||||
}
|
||||
|
@ -570,3 +710,167 @@ const cancelRemainingUploads = async () => {
|
|||
await electron.setPendingUploadFiles("zips", []);
|
||||
await electron.setPendingUploadFiles("files", []);
|
||||
};
|
||||
|
||||
/**
|
||||
* The data needed by {@link clusterLivePhotos} to do its thing.
|
||||
*
|
||||
* As files progress through stages, they get more and more bits tacked on to
|
||||
* them. These types document the journey.
|
||||
*/
|
||||
type ClusterableFile = {
|
||||
localID: number;
|
||||
collectionID: number;
|
||||
// fileOrPath: File | ElectronFile | string;
|
||||
file: File | ElectronFile | string;
|
||||
};
|
||||
|
||||
type ClusteredFile = ClusterableFile & {
|
||||
isLivePhoto: boolean;
|
||||
livePhotoAssets?: LivePhotoAssets2;
|
||||
};
|
||||
|
||||
/**
|
||||
* Go through the given files, combining any sibling image + video assets into a
|
||||
* single live photo when appropriate.
|
||||
*/
|
||||
const clusterLivePhotos = (mediaFiles: ClusterableFile[]) => {
|
||||
const result: ClusteredFile[] = [];
|
||||
mediaFiles
|
||||
.sort((f, g) =>
|
||||
nameAndExtension(getFileName(f.file))[0].localeCompare(
|
||||
nameAndExtension(getFileName(g.file))[0],
|
||||
),
|
||||
)
|
||||
.sort((f, g) => f.collectionID - g.collectionID);
|
||||
let index = 0;
|
||||
while (index < mediaFiles.length - 1) {
|
||||
const f = mediaFiles[index];
|
||||
const g = mediaFiles[index + 1];
|
||||
const fFileName = getFileName(f.file);
|
||||
const gFileName = getFileName(g.file);
|
||||
const fFileType = potentialFileTypeFromExtension(fFileName);
|
||||
const gFileType = potentialFileTypeFromExtension(gFileName);
|
||||
const fa: PotentialLivePhotoAsset = {
|
||||
fileName: fFileName,
|
||||
fileType: fFileType,
|
||||
collectionID: f.collectionID,
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
size: (f as FileWithCollection).file.size,
|
||||
};
|
||||
const ga: PotentialLivePhotoAsset = {
|
||||
fileName: gFileName,
|
||||
fileType: gFileType,
|
||||
collectionID: g.collectionID,
|
||||
/* TODO(MR): ElectronFile changes */
|
||||
size: (g as FileWithCollection).file.size,
|
||||
};
|
||||
if (areLivePhotoAssets(fa, ga)) {
|
||||
result.push({
|
||||
localID: f.localID,
|
||||
collectionID: f.collectionID,
|
||||
isLivePhoto: true,
|
||||
livePhotoAssets: {
|
||||
image: fFileType == FILE_TYPE.IMAGE ? f.file : g.file,
|
||||
video: fFileType == FILE_TYPE.IMAGE ? g.file : f.file,
|
||||
},
|
||||
});
|
||||
index += 2;
|
||||
} else {
|
||||
result.push({
|
||||
...f,
|
||||
isLivePhoto: false,
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
if (index === mediaFiles.length - 1) {
|
||||
result.push({
|
||||
...mediaFiles[index],
|
||||
isLivePhoto: false,
|
||||
});
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
interface PotentialLivePhotoAsset {
|
||||
fileName: string;
|
||||
fileType: FILE_TYPE;
|
||||
collectionID: number;
|
||||
size: number;
|
||||
}
|
||||
|
||||
const areLivePhotoAssets = (
|
||||
f: PotentialLivePhotoAsset,
|
||||
g: PotentialLivePhotoAsset,
|
||||
) => {
|
||||
if (f.collectionID != g.collectionID) return false;
|
||||
|
||||
const [fName, fExt] = nameAndExtension(f.fileName);
|
||||
const [gName, gExt] = nameAndExtension(g.fileName);
|
||||
|
||||
let fPrunedName: string;
|
||||
let gPrunedName: string;
|
||||
if (f.fileType == FILE_TYPE.IMAGE && g.fileType == FILE_TYPE.VIDEO) {
|
||||
fPrunedName = removePotentialLivePhotoSuffix(
|
||||
fName,
|
||||
// A Google Live Photo image file can have video extension appended
|
||||
// as suffix, so we pass that to removePotentialLivePhotoSuffix to
|
||||
// remove it.
|
||||
//
|
||||
// Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file)
|
||||
gExt ? `.${gExt}` : undefined,
|
||||
);
|
||||
gPrunedName = removePotentialLivePhotoSuffix(gName);
|
||||
} else if (f.fileType == FILE_TYPE.VIDEO && g.fileType == FILE_TYPE.IMAGE) {
|
||||
fPrunedName = removePotentialLivePhotoSuffix(fName);
|
||||
gPrunedName = removePotentialLivePhotoSuffix(
|
||||
gName,
|
||||
fExt ? `.${fExt}` : undefined,
|
||||
);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fPrunedName != gPrunedName) return false;
|
||||
|
||||
// Also check that the size of an individual Live Photo asset is less than
|
||||
// an (arbitrary) limit. This should be true in practice as the videos for a
|
||||
// live photo are a few seconds long. Further on, the zipping library that
|
||||
// we use doesn't support stream as a input.
|
||||
|
||||
const maxAssetSize = 20 * 1024 * 1024; /* 20MB */
|
||||
if (f.size > maxAssetSize || g.size > maxAssetSize) {
|
||||
log.info(
|
||||
`Not classifying assets with too large sizes ${[f.size, g.size]} as a live photo`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const removePotentialLivePhotoSuffix = (name: string, suffix?: string) => {
|
||||
const suffix_3 = "_3";
|
||||
|
||||
// The icloud-photos-downloader library appends _HVEC to the end of the
|
||||
// filename in case of live photos.
|
||||
//
|
||||
// https://github.com/icloud-photos-downloader/icloud_photos_downloader
|
||||
const suffix_hvec = "_HVEC";
|
||||
|
||||
let foundSuffix: string | undefined;
|
||||
if (name.endsWith(suffix_3)) {
|
||||
foundSuffix = suffix_3;
|
||||
} else if (
|
||||
name.endsWith(suffix_hvec) ||
|
||||
name.endsWith(suffix_hvec.toLowerCase())
|
||||
) {
|
||||
foundSuffix = suffix_hvec;
|
||||
} else if (suffix) {
|
||||
if (name.endsWith(suffix) || name.endsWith(suffix.toLowerCase())) {
|
||||
foundSuffix = suffix;
|
||||
}
|
||||
}
|
||||
|
||||
return foundSuffix ? name.slice(0, foundSuffix.length * -1) : name;
|
||||
};
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
|||
import { FILE_TYPE } from "constants/file";
|
||||
import { FILE_TYPE } from "@/media/file";
|
||||
import { City } from "services/locationSearchService";
|
||||
import { LocationTagData } from "types/entity";
|
||||
import { EnteFile } from "types/file";
|
||||
|
|
|
@ -1,28 +1,19 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import type { ElectronFile } from "@/next/types/file";
|
||||
import {
|
||||
B64EncryptionResult,
|
||||
LocalFileAttributes,
|
||||
} from "@ente/shared/crypto/types";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import type { DataStream } from "@ente/shared/utils/data-stream";
|
||||
import { Collection } from "types/collection";
|
||||
import {
|
||||
FilePublicMagicMetadata,
|
||||
FilePublicMagicMetadataProps,
|
||||
MetadataFileAttributes,
|
||||
S3FileAttributes,
|
||||
} from "types/file";
|
||||
import { EncryptedMagicMetadata } from "types/magicMetadata";
|
||||
|
||||
export interface DataStream {
|
||||
stream: ReadableStream<Uint8Array>;
|
||||
chunkCount: number;
|
||||
}
|
||||
|
||||
export function isDataStream(object: any): object is DataStream {
|
||||
return "stream" in object;
|
||||
}
|
||||
|
||||
export type Logger = (message: string) => void;
|
||||
|
||||
/** Information about the file that never changes post upload. */
|
||||
export interface Metadata {
|
||||
/**
|
||||
* The file name.
|
||||
|
@ -49,13 +40,6 @@ export interface Location {
|
|||
longitude: number;
|
||||
}
|
||||
|
||||
export interface ParsedMetadataJSON {
|
||||
creationTime: number;
|
||||
modificationTime: number;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
}
|
||||
|
||||
export interface MultipartUploadURLs {
|
||||
objectKey: string;
|
||||
partURLs: string[];
|
||||
|
@ -70,24 +54,6 @@ export interface FileTypeInfo {
|
|||
videoType?: string;
|
||||
}
|
||||
|
||||
/*
|
||||
* ElectronFile is a custom interface that is used to represent
|
||||
* any file on disk as a File-like object in the Electron desktop app.
|
||||
*
|
||||
* This was added to support the auto-resuming of failed uploads
|
||||
* which needed absolute paths to the files which the
|
||||
* normal File interface does not provide.
|
||||
*/
|
||||
export interface ElectronFile {
|
||||
name: string;
|
||||
path: string;
|
||||
size: number;
|
||||
lastModified: number;
|
||||
stream: () => Promise<ReadableStream<Uint8Array>>;
|
||||
blob: () => Promise<Blob>;
|
||||
arrayBuffer: () => Promise<Uint8Array>;
|
||||
}
|
||||
|
||||
export interface UploadAsset {
|
||||
isLivePhoto?: boolean;
|
||||
file?: File | ElectronFile;
|
||||
|
@ -107,13 +73,13 @@ export interface FileWithCollection extends UploadAsset {
|
|||
|
||||
export interface UploadAsset2 {
|
||||
isLivePhoto?: boolean;
|
||||
file?: File | ElectronFile | string;
|
||||
file?: File | string;
|
||||
livePhotoAssets?: LivePhotoAssets2;
|
||||
}
|
||||
|
||||
export interface LivePhotoAssets2 {
|
||||
image: File | ElectronFile | string;
|
||||
video: File | ElectronFile | string;
|
||||
image: File | string;
|
||||
video: File | string;
|
||||
}
|
||||
|
||||
export interface FileWithCollection2 extends UploadAsset2 {
|
||||
|
@ -122,8 +88,6 @@ export interface FileWithCollection2 extends UploadAsset2 {
|
|||
collectionID?: number;
|
||||
}
|
||||
|
||||
export type ParsedMetadataJSONMap = Map<string, ParsedMetadataJSON>;
|
||||
|
||||
export interface UploadURL {
|
||||
url: string;
|
||||
objectKey: string;
|
||||
|
@ -131,7 +95,12 @@ export interface UploadURL {
|
|||
|
||||
export interface FileInMemory {
|
||||
filedata: Uint8Array | DataStream;
|
||||
/** The JPEG data of the generated thumbnail */
|
||||
thumbnail: Uint8Array;
|
||||
/**
|
||||
* `true` if this is a fallback (all black) thumbnail we're returning since
|
||||
* thumbnail generation failed for some reason.
|
||||
*/
|
||||
hasStaticThumbnail: boolean;
|
||||
}
|
||||
|
||||
|
@ -146,6 +115,7 @@ export interface EncryptedFile {
|
|||
file: ProcessedFile;
|
||||
fileKey: B64EncryptionResult;
|
||||
}
|
||||
|
||||
export interface ProcessedFile {
|
||||
file: LocalFileAttributes<Uint8Array | DataStream>;
|
||||
thumbnail: LocalFileAttributes<Uint8Array>;
|
||||
|
@ -178,8 +148,3 @@ export interface PublicUploadProps {
|
|||
passwordToken: string;
|
||||
accessedThroughSharedURL: boolean;
|
||||
}
|
||||
|
||||
export interface ExtractMetadataResult {
|
||||
metadata: Metadata;
|
||||
publicMagicMetadata: FilePublicMagicMetadataProps;
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import log from "@/next/log";
|
||||
import { CustomErrorMessage, type Electron } from "@/next/types/ipc";
|
||||
|
@ -6,15 +7,6 @@ import ComlinkCryptoWorker from "@ente/shared/crypto";
|
|||
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
|
||||
import { User } from "@ente/shared/user/types";
|
||||
import { downloadUsingAnchor, withTimeout } from "@ente/shared/utils";
|
||||
import {
|
||||
FILE_TYPE,
|
||||
RAW_FORMATS,
|
||||
SUPPORTED_RAW_FORMATS,
|
||||
TYPE_HEIC,
|
||||
TYPE_HEIF,
|
||||
TYPE_JPEG,
|
||||
TYPE_JPG,
|
||||
} from "constants/file";
|
||||
import { t } from "i18next";
|
||||
import isElectron from "is-electron";
|
||||
import { moveToHiddenCollection } from "services/collectionService";
|
||||
|
@ -48,6 +40,38 @@ import { isArchivedFile, updateMagicMetadata } from "utils/magicMetadata";
|
|||
import { safeFileName } from "utils/native-fs";
|
||||
import { writeStream } from "utils/native-stream";
|
||||
|
||||
const TYPE_HEIC = "heic";
|
||||
const TYPE_HEIF = "heif";
|
||||
const TYPE_JPEG = "jpeg";
|
||||
const TYPE_JPG = "jpg";
|
||||
|
||||
const RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"raf",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
||||
|
||||
const SUPPORTED_RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
||||
|
||||
export enum FILE_OPS_TYPE {
|
||||
DOWNLOAD,
|
||||
FIX_TIME,
|
||||
|
@ -62,8 +86,10 @@ class ModuleState {
|
|||
/**
|
||||
* This will be set to true if we get an error from the Node.js side of our
|
||||
* desktop app telling us that native JPEG conversion is not available for
|
||||
* the current OS/arch combination. That way, we can stop pestering it again
|
||||
* and again (saving an IPC round-trip).
|
||||
* the current OS/arch combination.
|
||||
*
|
||||
* That way, we can stop pestering it again and again (saving an IPC
|
||||
* round-trip).
|
||||
*
|
||||
* Note the double negative when it is used.
|
||||
*/
|
||||
|
@ -292,14 +318,12 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
|
|||
return imageBlob;
|
||||
}
|
||||
|
||||
let jpegBlob: Blob | undefined;
|
||||
|
||||
const available = !moduleState.isNativeJPEGConversionNotAvailable;
|
||||
if (isElectron() && available && isSupportedRawFormat(exactType)) {
|
||||
// If we're running in our desktop app, see if our Node.js layer can
|
||||
// convert this into a JPEG using native tools for us.
|
||||
try {
|
||||
jpegBlob = await nativeConvertToJPEG(fileName, imageBlob);
|
||||
return await nativeConvertToJPEG(imageBlob);
|
||||
} catch (e) {
|
||||
if (e.message == CustomErrorMessage.NotAvailable) {
|
||||
moduleState.isNativeJPEGConversionNotAvailable = true;
|
||||
|
@ -309,12 +333,12 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
|
|||
}
|
||||
}
|
||||
|
||||
if (!jpegBlob && isFileHEIC(exactType)) {
|
||||
if (isFileHEIC(exactType)) {
|
||||
// If it is an HEIC file, use our web HEIC converter.
|
||||
jpegBlob = await heicToJPEG(imageBlob);
|
||||
return await heicToJPEG(imageBlob);
|
||||
}
|
||||
|
||||
return jpegBlob;
|
||||
return undefined;
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to get renderable image for ${JSON.stringify(fileTypeInfo ?? fileName)}`,
|
||||
|
@ -324,7 +348,7 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
|
|||
}
|
||||
};
|
||||
|
||||
const nativeConvertToJPEG = async (fileName: string, imageBlob: Blob) => {
|
||||
const nativeConvertToJPEG = async (imageBlob: Blob) => {
|
||||
const startTime = Date.now();
|
||||
const imageData = new Uint8Array(await imageBlob.arrayBuffer());
|
||||
const electron = globalThis.electron;
|
||||
|
@ -332,8 +356,8 @@ const nativeConvertToJPEG = async (fileName: string, imageBlob: Blob) => {
|
|||
// the main thread since workers don't have access to the `window` (and
|
||||
// thus, to the `window.electron`) object.
|
||||
const jpegData = electron
|
||||
? await electron.convertToJPEG(fileName, imageData)
|
||||
: await workerBridge.convertToJPEG(fileName, imageData);
|
||||
? await electron.convertToJPEG(imageData)
|
||||
: await workerBridge.convertToJPEG(imageData);
|
||||
log.debug(() => `Native JPEG conversion took ${Date.now() - startTime} ms`);
|
||||
return new Blob([jpegData]);
|
||||
};
|
||||
|
@ -441,6 +465,18 @@ export function isSharedFile(user: User, file: EnteFile) {
|
|||
return file.ownerID !== user.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* [Note: File name for local EnteFile objects]
|
||||
*
|
||||
* The title property in a file's metadata is the original file's name. The
|
||||
* metadata of a file cannot be edited. So if later on the file's name is
|
||||
* changed, then the edit is stored in the `editedName` property of the public
|
||||
* metadata of the file.
|
||||
*
|
||||
* This function merges these edits onto the file object that we use locally.
|
||||
* Effectively, post this step, the file's metadata.title can be used in lieu of
|
||||
* its filename.
|
||||
*/
|
||||
export function mergeMetadata(files: EnteFile[]): EnteFile[] {
|
||||
return files.map((file) => {
|
||||
if (file.pubMagicMetadata?.data.editedTime) {
|
||||
|
@ -646,7 +682,11 @@ async function downloadFileDesktop(
|
|||
fs.exists,
|
||||
);
|
||||
const imageStream = generateStreamFromArrayBuffer(imageData);
|
||||
await writeStream(`${downloadDir}/${imageExportName}`, imageStream);
|
||||
await writeStream(
|
||||
electron,
|
||||
`${downloadDir}/${imageExportName}`,
|
||||
imageStream,
|
||||
);
|
||||
try {
|
||||
const videoExportName = await safeFileName(
|
||||
downloadDir,
|
||||
|
@ -654,7 +694,11 @@ async function downloadFileDesktop(
|
|||
fs.exists,
|
||||
);
|
||||
const videoStream = generateStreamFromArrayBuffer(videoData);
|
||||
await writeStream(`${downloadDir}/${videoExportName}`, videoStream);
|
||||
await writeStream(
|
||||
electron,
|
||||
`${downloadDir}/${videoExportName}`,
|
||||
videoStream,
|
||||
);
|
||||
} catch (e) {
|
||||
await fs.rm(`${downloadDir}/${imageExportName}`);
|
||||
throw e;
|
||||
|
@ -665,7 +709,11 @@ async function downloadFileDesktop(
|
|||
file.metadata.title,
|
||||
fs.exists,
|
||||
);
|
||||
await writeStream(`${downloadDir}/${fileExportName}`, updatedStream);
|
||||
await writeStream(
|
||||
electron,
|
||||
`${downloadDir}/${fileExportName}`,
|
||||
updatedStream,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -690,14 +738,14 @@ export const getUserOwnedFiles = (files: EnteFile[]) => {
|
|||
};
|
||||
|
||||
// doesn't work on firefox
|
||||
export const copyFileToClipboard = async (fileUrl: string) => {
|
||||
export const copyFileToClipboard = async (fileURL: string) => {
|
||||
const canvas = document.createElement("canvas");
|
||||
const canvasCTX = canvas.getContext("2d");
|
||||
const image = new Image();
|
||||
|
||||
const blobPromise = new Promise<Blob>((resolve, reject) => {
|
||||
try {
|
||||
image.setAttribute("src", fileUrl);
|
||||
image.setAttribute("src", fileURL);
|
||||
image.onload = () => {
|
||||
canvas.width = image.width;
|
||||
canvas.height = image.height;
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
import { FILE_TYPE } from "constants/file";
|
||||
import { getFileExtension } from "utils/file";
|
||||
|
||||
const IMAGE_EXTENSIONS = [
|
||||
"heic",
|
||||
"heif",
|
||||
"jpeg",
|
||||
"jpg",
|
||||
"png",
|
||||
"gif",
|
||||
"bmp",
|
||||
"tiff",
|
||||
"webp",
|
||||
];
|
||||
|
||||
const VIDEO_EXTENSIONS = [
|
||||
"mov",
|
||||
"mp4",
|
||||
"m4v",
|
||||
"avi",
|
||||
"wmv",
|
||||
"flv",
|
||||
"mkv",
|
||||
"webm",
|
||||
"3gp",
|
||||
"3g2",
|
||||
"avi",
|
||||
"ogv",
|
||||
"mpg",
|
||||
"mp",
|
||||
];
|
||||
|
||||
export function getFileTypeFromExtensionForLivePhotoClustering(
|
||||
filename: string,
|
||||
) {
|
||||
const extension = getFileExtension(filename)?.toLowerCase();
|
||||
if (IMAGE_EXTENSIONS.includes(extension)) {
|
||||
return FILE_TYPE.IMAGE;
|
||||
} else if (VIDEO_EXTENSIONS.includes(extension)) {
|
||||
return FILE_TYPE.VIDEO;
|
||||
}
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import log from "@/next/log";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import PQueue from "p-queue";
|
||||
import DownloadManager from "services/download";
|
||||
import { getLocalFiles } from "services/fileService";
|
||||
|
|
|
@ -4,17 +4,71 @@
|
|||
* NOTE: These functions only work when we're running in our desktop app.
|
||||
*/
|
||||
|
||||
import type { Electron } from "@/next/types/ipc";
|
||||
|
||||
/**
|
||||
* Write the given stream to a file on the local machine.
|
||||
* Stream the given file from the user's local filesystem.
|
||||
*
|
||||
* **This only works when we're running in our desktop app**. It uses the
|
||||
* This only works when we're running in our desktop app since it uses the
|
||||
* "stream://" protocol handler exposed by our custom code in the Node.js layer.
|
||||
* See: [Note: IPC streams].
|
||||
*
|
||||
* To avoid accidentally invoking it in a non-desktop app context, it requires
|
||||
* the {@link Electron} object as a parameter (even though it doesn't use it).
|
||||
*
|
||||
* @param path The path on the file on the user's local filesystem whose
|
||||
* contents we want to stream.
|
||||
*
|
||||
* @return A ({@link Response}, size) tuple.
|
||||
*
|
||||
* * The response contains the contents of the file. In particular, the `body`
|
||||
* {@link ReadableStream} property of this response can be used to read the
|
||||
* files contents in a streaming manner.
|
||||
*
|
||||
* * The size is the size of the file that we'll be reading from disk.
|
||||
*/
|
||||
export const readStream = async (
|
||||
_: Electron,
|
||||
path: string,
|
||||
): Promise<{ response: Response; size: number }> => {
|
||||
const req = new Request(`stream://read${path}`, {
|
||||
method: "GET",
|
||||
});
|
||||
|
||||
const res = await fetch(req);
|
||||
if (!res.ok)
|
||||
throw new Error(
|
||||
`Failed to read stream from ${path}: HTTP ${res.status}`,
|
||||
);
|
||||
|
||||
const size = +res.headers["Content-Length"];
|
||||
if (isNaN(size))
|
||||
throw new Error(
|
||||
`Got a numeric Content-Length when reading a stream. The response was ${res}`,
|
||||
);
|
||||
|
||||
return { response: res, size };
|
||||
};
|
||||
|
||||
/**
|
||||
* Write the given stream to a file on the local machine.
|
||||
*
|
||||
* This only works when we're running in our desktop app since it uses the
|
||||
* "stream://" protocol handler exposed by our custom code in the Node.js layer.
|
||||
* See: [Note: IPC streams].
|
||||
*
|
||||
* To avoid accidentally invoking it in a non-desktop app context, it requires
|
||||
* the {@link Electron} object as a parameter (even though it doesn't use it).
|
||||
*
|
||||
* @param path The path on the local machine where to write the file to.
|
||||
*
|
||||
* @param stream The stream which should be written into the file.
|
||||
* */
|
||||
export const writeStream = async (path: string, stream: ReadableStream) => {
|
||||
*/
|
||||
export const writeStream = async (
|
||||
_: Electron,
|
||||
path: string,
|
||||
stream: ReadableStream,
|
||||
) => {
|
||||
// TODO(MR): This doesn't currently work.
|
||||
//
|
||||
// Not sure what I'm doing wrong here; I've opened an issue upstream
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import log from "@/next/log";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { LivePhotoSourceURL, SourceURLs } from "services/download";
|
||||
import { EnteFile } from "types/file";
|
||||
import { SetSelectedState } from "types/gallery";
|
||||
|
|
|
@ -1,89 +1,18 @@
|
|||
import { basename, dirname } from "@/next/file";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { ElectronFile } from "@/next/types/file";
|
||||
import { PICKED_UPLOAD_TYPE } from "constants/upload";
|
||||
import isElectron from "is-electron";
|
||||
import { exportMetadataDirectoryName } from "services/export";
|
||||
import { EnteFile } from "types/file";
|
||||
import {
|
||||
ElectronFile,
|
||||
FileWithCollection,
|
||||
Metadata,
|
||||
type FileWithCollection2,
|
||||
} from "types/upload";
|
||||
|
||||
const TYPE_JSON = "json";
|
||||
const DEDUPE_COLLECTION = new Set(["icloud library", "icloudlibrary"]);
|
||||
|
||||
export function findMatchingExistingFiles(
|
||||
existingFiles: EnteFile[],
|
||||
newFileMetadata: Metadata,
|
||||
): EnteFile[] {
|
||||
const matchingFiles: EnteFile[] = [];
|
||||
for (const existingFile of existingFiles) {
|
||||
if (areFilesSame(existingFile.metadata, newFileMetadata)) {
|
||||
matchingFiles.push(existingFile);
|
||||
}
|
||||
}
|
||||
return matchingFiles;
|
||||
}
|
||||
|
||||
export function shouldDedupeAcrossCollection(collectionName: string): boolean {
|
||||
// using set to avoid unnecessary regex for removing spaces for each upload
|
||||
return DEDUPE_COLLECTION.has(collectionName.toLocaleLowerCase());
|
||||
}
|
||||
|
||||
export function areFilesSame(
|
||||
existingFile: Metadata,
|
||||
newFile: Metadata,
|
||||
): boolean {
|
||||
if (hasFileHash(existingFile) && hasFileHash(newFile)) {
|
||||
return areFilesWithFileHashSame(existingFile, newFile);
|
||||
} else {
|
||||
/*
|
||||
* The maximum difference in the creation/modification times of two similar files is set to 1 second.
|
||||
* This is because while uploading files in the web - browsers and users could have set reduced
|
||||
* precision of file times to prevent timing attacks and fingerprinting.
|
||||
* Context: https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision
|
||||
*/
|
||||
const oneSecond = 1e6;
|
||||
if (
|
||||
existingFile.fileType === newFile.fileType &&
|
||||
Math.abs(existingFile.creationTime - newFile.creationTime) <
|
||||
oneSecond &&
|
||||
Math.abs(existingFile.modificationTime - newFile.modificationTime) <
|
||||
oneSecond &&
|
||||
existingFile.title === newFile.title
|
||||
) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function hasFileHash(file: Metadata) {
|
||||
return file.hash || (file.imageHash && file.videoHash);
|
||||
}
|
||||
|
||||
export function areFilesWithFileHashSame(
|
||||
existingFile: Metadata,
|
||||
newFile: Metadata,
|
||||
): boolean {
|
||||
if (
|
||||
existingFile.fileType !== newFile.fileType ||
|
||||
existingFile.title !== newFile.title
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (existingFile.fileType === FILE_TYPE.LIVE_PHOTO) {
|
||||
return (
|
||||
existingFile.imageHash === newFile.imageHash &&
|
||||
existingFile.videoHash === newFile.videoHash
|
||||
);
|
||||
} else {
|
||||
return existingFile.hash === newFile.hash;
|
||||
}
|
||||
}
|
||||
export const hasFileHash = (file: Metadata) =>
|
||||
file.hash || (file.imageHash && file.videoHash);
|
||||
|
||||
export function segregateMetadataAndMediaFiles(
|
||||
filesWithCollectionToUpload: FileWithCollection[],
|
||||
|
@ -101,23 +30,6 @@ export function segregateMetadataAndMediaFiles(
|
|||
return { mediaFiles, metadataJSONFiles };
|
||||
}
|
||||
|
||||
export function segregateMetadataAndMediaFiles2(
|
||||
filesWithCollectionToUpload: FileWithCollection2[],
|
||||
) {
|
||||
const metadataJSONFiles: FileWithCollection2[] = [];
|
||||
const mediaFiles: FileWithCollection2[] = [];
|
||||
filesWithCollectionToUpload.forEach((fileWithCollection) => {
|
||||
const file = fileWithCollection.file;
|
||||
const s = typeof file == "string" ? file : file.name;
|
||||
if (s.toLowerCase().endsWith(TYPE_JSON)) {
|
||||
metadataJSONFiles.push(fileWithCollection);
|
||||
} else {
|
||||
mediaFiles.push(fileWithCollection);
|
||||
}
|
||||
});
|
||||
return { mediaFiles, metadataJSONFiles };
|
||||
}
|
||||
|
||||
export function areFileWithCollectionsSame(
|
||||
firstFile: FileWithCollection2,
|
||||
secondFile: FileWithCollection2,
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import { nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { withTimeout } from "@ente/shared/utils";
|
||||
import QueueProcessor from "@ente/shared/utils/queueProcessor";
|
||||
import { generateTempName } from "@ente/shared/utils/temp";
|
||||
import { expose } from "comlink";
|
||||
import {
|
||||
ffmpegPathPlaceholder,
|
||||
|
@ -10,108 +8,106 @@ import {
|
|||
outputPathPlaceholder,
|
||||
} from "constants/ffmpeg";
|
||||
import { FFmpeg, createFFmpeg } from "ffmpeg-wasm";
|
||||
import { getUint8ArrayView } from "services/readerService";
|
||||
|
||||
export class DedicatedFFmpegWorker {
|
||||
private wasmFFmpeg: WasmFFmpeg;
|
||||
|
||||
constructor() {
|
||||
this.wasmFFmpeg = new WasmFFmpeg();
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a ffmpeg {@link command}.
|
||||
*
|
||||
* This is a sibling of {@link ffmpegExec} in ipc.ts exposed by the desktop
|
||||
* app. See [Note: ffmpeg in Electron].
|
||||
*/
|
||||
run(cmd, inputFile, outputFileName, timeoutMS) {
|
||||
return this.wasmFFmpeg.run(cmd, inputFile, outputFileName, timeoutMS);
|
||||
}
|
||||
}
|
||||
|
||||
expose(DedicatedFFmpegWorker, self);
|
||||
|
||||
export class WasmFFmpeg {
|
||||
private ffmpeg: FFmpeg;
|
||||
private ready: Promise<void> = null;
|
||||
private ffmpegTaskQueue = new QueueProcessor<File>();
|
||||
private ffmpegTaskQueue = new QueueProcessor<Uint8Array>();
|
||||
|
||||
constructor() {
|
||||
this.ffmpeg = createFFmpeg({
|
||||
corePath: "/js/ffmpeg/ffmpeg-core.js",
|
||||
mt: false,
|
||||
});
|
||||
|
||||
this.ready = this.init();
|
||||
}
|
||||
|
||||
private async init() {
|
||||
if (!this.ffmpeg.isLoaded()) {
|
||||
await this.ffmpeg.load();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Execute a FFmpeg {@link command} on {@link blob}.
|
||||
*
|
||||
* This is a sibling of {@link ffmpegExec} exposed by the desktop app in
|
||||
* `ipc.ts`. See [Note: FFmpeg in Electron].
|
||||
*/
|
||||
async exec(
|
||||
command: string[],
|
||||
blob: Blob,
|
||||
outputFileExtension: string,
|
||||
timeoutMs,
|
||||
): Promise<Uint8Array> {
|
||||
if (!this.ffmpeg.isLoaded()) await this.ffmpeg.load();
|
||||
|
||||
const go = () =>
|
||||
ffmpegExec(this.ffmpeg, command, outputFileExtension, blob);
|
||||
|
||||
async run(
|
||||
cmd: string[],
|
||||
inputFile: File,
|
||||
outputFileName: string,
|
||||
timeoutMS,
|
||||
) {
|
||||
const exec = () => this.execute(cmd, inputFile, outputFileName);
|
||||
const request = this.ffmpegTaskQueue.queueUpRequest(() =>
|
||||
timeoutMS ? withTimeout<File>(exec(), timeoutMS) : exec(),
|
||||
timeoutMs ? withTimeout(go(), timeoutMs) : go(),
|
||||
);
|
||||
|
||||
return await request.promise;
|
||||
}
|
||||
}
|
||||
|
||||
private async execute(
|
||||
cmd: string[],
|
||||
inputFile: File,
|
||||
outputFileName: string,
|
||||
) {
|
||||
let tempInputFilePath: string;
|
||||
let tempOutputFilePath: string;
|
||||
expose(DedicatedFFmpegWorker, self);
|
||||
|
||||
const ffmpegExec = async (
|
||||
ffmpeg: FFmpeg,
|
||||
command: string[],
|
||||
outputFileExtension: string,
|
||||
blob: Blob,
|
||||
) => {
|
||||
const inputPath = randomPrefix();
|
||||
const outputSuffix = outputFileExtension ? "." + outputFileExtension : "";
|
||||
const outputPath = randomPrefix() + outputSuffix;
|
||||
|
||||
const cmd = substitutePlaceholders(command, inputPath, outputPath);
|
||||
|
||||
const inputData = new Uint8Array(await blob.arrayBuffer());
|
||||
|
||||
try {
|
||||
ffmpeg.FS("writeFile", inputPath, inputData);
|
||||
|
||||
log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")}`);
|
||||
await ffmpeg.run(...cmd);
|
||||
|
||||
return ffmpeg.FS("readFile", outputPath);
|
||||
} finally {
|
||||
try {
|
||||
await this.ready;
|
||||
const [, extension] = nameAndExtension(inputFile.name);
|
||||
const tempNameSuffix = extension ? `input.${extension}` : "input";
|
||||
tempInputFilePath = `${generateTempName(10, tempNameSuffix)}`;
|
||||
this.ffmpeg.FS(
|
||||
"writeFile",
|
||||
tempInputFilePath,
|
||||
await getUint8ArrayView(inputFile),
|
||||
);
|
||||
tempOutputFilePath = `${generateTempName(10, outputFileName)}`;
|
||||
|
||||
cmd = cmd.map((cmdPart) => {
|
||||
if (cmdPart === ffmpegPathPlaceholder) {
|
||||
return "";
|
||||
} else if (cmdPart === inputPathPlaceholder) {
|
||||
return tempInputFilePath;
|
||||
} else if (cmdPart === outputPathPlaceholder) {
|
||||
return tempOutputFilePath;
|
||||
} else {
|
||||
return cmdPart;
|
||||
}
|
||||
});
|
||||
log.info(`${cmd}`);
|
||||
await this.ffmpeg.run(...cmd);
|
||||
return new File(
|
||||
[this.ffmpeg.FS("readFile", tempOutputFilePath)],
|
||||
outputFileName,
|
||||
);
|
||||
} finally {
|
||||
try {
|
||||
this.ffmpeg.FS("unlink", tempInputFilePath);
|
||||
} catch (e) {
|
||||
log.error("unlink input file failed", e);
|
||||
}
|
||||
try {
|
||||
this.ffmpeg.FS("unlink", tempOutputFilePath);
|
||||
} catch (e) {
|
||||
log.error("unlink output file failed", e);
|
||||
}
|
||||
ffmpeg.FS("unlink", inputPath);
|
||||
} catch (e) {
|
||||
log.error(`Failed to remove input ${inputPath}`, e);
|
||||
}
|
||||
try {
|
||||
ffmpeg.FS("unlink", outputPath);
|
||||
} catch (e) {
|
||||
log.error(`Failed to remove output ${outputPath}`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/** Generate a random string suitable for being used as a file name prefix */
|
||||
const randomPrefix = () => {
|
||||
const alphabet =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
|
||||
let result = "";
|
||||
for (let i = 0; i < 10; i++)
|
||||
result += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
return result;
|
||||
};
|
||||
|
||||
const substitutePlaceholders = (
|
||||
command: string[],
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
) =>
|
||||
command
|
||||
.map((segment) => {
|
||||
if (segment == ffmpegPathPlaceholder) {
|
||||
return undefined;
|
||||
} else if (segment == inputPathPlaceholder) {
|
||||
return inputFilePath;
|
||||
} else if (segment == outputPathPlaceholder) {
|
||||
return outputFilePath;
|
||||
} else {
|
||||
return segment;
|
||||
}
|
||||
})
|
||||
.filter((c) => !!c);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { FILE_TYPE } from "@/media/file";
|
||||
import { tryToParseDateTime } from "@ente/shared/time";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { getLocalCollections } from "services/collectionService";
|
||||
import { getLocalFiles } from "services/fileService";
|
||||
import {
|
||||
|
@ -7,7 +7,7 @@ import {
|
|||
getClippedMetadataJSONMapKeyForFile,
|
||||
getMetadataJSONMapKeyForFile,
|
||||
getMetadataJSONMapKeyForJSON,
|
||||
} from "services/upload/metadataService";
|
||||
} from "services/upload/takeout";
|
||||
import { getUserDetailsV2 } from "services/userService";
|
||||
import { groupFilesBasedOnCollectionID } from "utils/file";
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { getFileNameSize } from "@/next/file";
|
||||
import type { DataStream } from "@ente/shared/utils/data-stream";
|
||||
import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload";
|
||||
import { getElectronFileStream, getFileStream } from "services/readerService";
|
||||
import { DataStream } from "types/upload";
|
||||
import { getImportSuggestion } from "utils/upload";
|
||||
|
||||
// This was for used to verify that converting from the browser readable stream
|
||||
|
|
6
web/packages/media/file.ts
Normal file
6
web/packages/media/file.ts
Normal file
|
@ -0,0 +1,6 @@
|
|||
export enum FILE_TYPE {
|
||||
IMAGE,
|
||||
VIDEO,
|
||||
LIVE_PHOTO,
|
||||
OTHERS,
|
||||
}
|
|
@ -1,5 +1,52 @@
|
|||
import { fileNameFromComponents, nameAndExtension } from "@/next/file";
|
||||
import JSZip from "jszip";
|
||||
import { FILE_TYPE } from "./file";
|
||||
|
||||
const potentialImageExtensions = [
|
||||
"heic",
|
||||
"heif",
|
||||
"jpeg",
|
||||
"jpg",
|
||||
"png",
|
||||
"gif",
|
||||
"bmp",
|
||||
"tiff",
|
||||
"webp",
|
||||
];
|
||||
|
||||
const potentialVideoExtensions = [
|
||||
"mov",
|
||||
"mp4",
|
||||
"m4v",
|
||||
"avi",
|
||||
"wmv",
|
||||
"flv",
|
||||
"mkv",
|
||||
"webm",
|
||||
"3gp",
|
||||
"3g2",
|
||||
"avi",
|
||||
"ogv",
|
||||
"mpg",
|
||||
"mp",
|
||||
];
|
||||
|
||||
/**
|
||||
* Use the file extension of the given {@link fileName} to deduce if is is
|
||||
* potentially the image or the video part of a Live Photo.
|
||||
*/
|
||||
export const potentialFileTypeFromExtension = (
|
||||
fileName: string,
|
||||
): FILE_TYPE | undefined => {
|
||||
let [, ext] = nameAndExtension(fileName);
|
||||
if (!ext) return undefined;
|
||||
|
||||
ext = ext.toLowerCase();
|
||||
|
||||
if (potentialImageExtensions.includes(ext)) return FILE_TYPE.IMAGE;
|
||||
else if (potentialVideoExtensions.includes(ext)) return FILE_TYPE.VIDEO;
|
||||
else return undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* An in-memory representation of a live photo.
|
||||
|
|
|
@ -113,6 +113,10 @@ export const openCache = async (
|
|||
*
|
||||
* await blob.arrayBuffer()
|
||||
*
|
||||
* To convert from a Blob to Uint8Array, chain the two steps
|
||||
*
|
||||
* new Uint8Array(await blob.arrayBuffer())
|
||||
*
|
||||
* To convert from an ArrayBuffer or Uint8Array to Blob
|
||||
*
|
||||
* new Blob([arrayBuffer, andOrAnyArray, andOrstring])
|
||||
|
|
|
@ -66,6 +66,13 @@ export const dirname = (path: string) => {
|
|||
return pathComponents.join("/");
|
||||
};
|
||||
|
||||
/**
|
||||
* Return a short description of the given {@link fileOrPath} suitable for
|
||||
* helping identify it in log messages.
|
||||
*/
|
||||
export const fopLabel = (fileOrPath: File | string) =>
|
||||
fileOrPath instanceof File ? `File(${fileOrPath.name})` : fileOrPath;
|
||||
|
||||
export function getFileNameSize(file: File | ElectronFile) {
|
||||
return `${file.name}_${convertBytesToHumanReadable(file.size)}`;
|
||||
}
|
||||
|
|
|
@ -1,8 +1,3 @@
|
|||
export enum UPLOAD_STRATEGY {
|
||||
SINGLE_COLLECTION,
|
||||
COLLECTION_PER_FOLDER,
|
||||
}
|
||||
|
||||
/*
|
||||
* ElectronFile is a custom interface that is used to represent
|
||||
* any file on disk as a File-like object in the Electron desktop app.
|
||||
|
@ -21,11 +16,6 @@ export interface ElectronFile {
|
|||
arrayBuffer: () => Promise<Uint8Array>;
|
||||
}
|
||||
|
||||
export interface DataStream {
|
||||
stream: ReadableStream<Uint8Array>;
|
||||
chunkCount: number;
|
||||
}
|
||||
|
||||
export interface EventQueueItem {
|
||||
type: "upload" | "trash";
|
||||
folderPath: string;
|
||||
|
|
|
@ -204,14 +204,11 @@ export interface Electron {
|
|||
* yet possible, this function will throw an error with the
|
||||
* {@link CustomErrorMessage.NotAvailable} message.
|
||||
*
|
||||
* @param fileName The name of the file whose data we're being given.
|
||||
* @param imageData The raw image data (the contents of the image file).
|
||||
*
|
||||
* @returns JPEG data of the converted image.
|
||||
*/
|
||||
convertToJPEG: (
|
||||
fileName: string,
|
||||
imageData: Uint8Array,
|
||||
) => Promise<Uint8Array>;
|
||||
convertToJPEG: (imageData: Uint8Array) => Promise<Uint8Array>;
|
||||
|
||||
/**
|
||||
* Generate a JPEG thumbnail for the given image.
|
||||
|
@ -224,24 +221,26 @@ export interface Electron {
|
|||
* not yet possible, this function will throw an error with the
|
||||
* {@link CustomErrorMessage.NotAvailable} message.
|
||||
*
|
||||
* @param inputFile The file whose thumbnail we want.
|
||||
* @param dataOrPath The raw image data (the contents of the image file), or
|
||||
* the path to the image file, whose thumbnail we want to generate.
|
||||
* @param maxDimension The maximum width or height of the generated
|
||||
* thumbnail.
|
||||
* @param maxSize Maximum size (in bytes) of the generated thumbnail.
|
||||
*
|
||||
* @returns JPEG data of the generated thumbnail.
|
||||
*/
|
||||
generateImageThumbnail: (
|
||||
inputFile: File | ElectronFile,
|
||||
dataOrPath: Uint8Array | string,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
) => Promise<Uint8Array>;
|
||||
|
||||
/**
|
||||
* Execute a ffmpeg {@link command}.
|
||||
* Execute a FFmpeg {@link command} on the given {@link dataOrPath}.
|
||||
*
|
||||
* This executes the command using the ffmpeg executable we bundle with our
|
||||
* desktop app. There is also a ffmpeg wasm implementation that we use when
|
||||
* running on the web, it also has a sibling function with the same
|
||||
* This executes the command using a FFmpeg executable we bundle with our
|
||||
* desktop app. We also have a wasm FFmpeg wasm implementation that we use
|
||||
* when running on the web, which has a sibling function with the same
|
||||
* parameters. See [Note: ffmpeg in Electron].
|
||||
*
|
||||
* @param command An array of strings, each representing one positional
|
||||
|
@ -250,25 +249,27 @@ export interface Electron {
|
|||
* (respectively {@link inputPathPlaceholder},
|
||||
* {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}).
|
||||
*
|
||||
* @param inputDataOrPath The bytes of the input file, or the path to the
|
||||
* input file on the user's local disk. In both cases, the data gets
|
||||
* serialized to a temporary file, and then that path gets substituted in
|
||||
* the ffmpeg {@link command} by {@link inputPathPlaceholder}.
|
||||
* @param dataOrPath The bytes of the input file, or the path to the input
|
||||
* file on the user's local disk. In both cases, the data gets serialized to
|
||||
* a temporary file, and then that path gets substituted in the FFmpeg
|
||||
* {@link command} in lieu of {@link inputPathPlaceholder}.
|
||||
*
|
||||
* @param outputFileName The name of the file we instruct ffmpeg to produce
|
||||
* when giving it the given {@link command}. The contents of this file get
|
||||
* returned as the result.
|
||||
* @param outputFileExtension The extension (without the dot, e.g. "jpeg")
|
||||
* to use for the output file that we ask FFmpeg to create in
|
||||
* {@param command}. While this file will eventually get deleted, and we'll
|
||||
* just return its contents, for some FFmpeg command the extension matters
|
||||
* (e.g. conversion to a JPEG fails if the extension is arbitrary).
|
||||
*
|
||||
* @param timeoutMS If non-zero, then abort and throw a timeout error if the
|
||||
* ffmpeg command takes more than the given number of milliseconds.
|
||||
*
|
||||
* @returns The contents of the output file produced by the ffmpeg command
|
||||
* at {@link outputFileName}.
|
||||
* (specified as {@link outputPathPlaceholder} in {@link command}).
|
||||
*/
|
||||
ffmpegExec: (
|
||||
command: string[],
|
||||
inputDataOrPath: Uint8Array | string,
|
||||
outputFileName: string,
|
||||
dataOrPath: Uint8Array | string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
) => Promise<Uint8Array>;
|
||||
|
||||
|
|
|
@ -44,8 +44,8 @@ const workerBridge = {
|
|||
logToDisk,
|
||||
// Needed by ML worker
|
||||
getAuthToken: () => ensureLocalUser().then((user) => user.token),
|
||||
convertToJPEG: (fileName: string, imageData: Uint8Array) =>
|
||||
ensureElectron().convertToJPEG(fileName, imageData),
|
||||
convertToJPEG: (imageData: Uint8Array) =>
|
||||
ensureElectron().convertToJPEG(imageData),
|
||||
detectFaces: (input: Float32Array) => ensureElectron().detectFaces(input),
|
||||
faceEmbedding: (input: Float32Array) =>
|
||||
ensureElectron().faceEmbedding(input),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { DataStream } from "@/next/types/file";
|
||||
import type { DataStream } from "../utils/data-stream";
|
||||
|
||||
export interface LocalFileAttributes<
|
||||
T extends string | Uint8Array | DataStream,
|
||||
|
|
|
@ -22,7 +22,6 @@ export function isApiErrorResponse(object: any): object is ApiErrorResponse {
|
|||
}
|
||||
|
||||
export const CustomError = {
|
||||
THUMBNAIL_GENERATION_FAILED: "thumbnail generation failed",
|
||||
VIDEO_PLAYBACK_FAILED: "video playback failed",
|
||||
ETAG_MISSING: "no header/etag present in response body",
|
||||
KEY_MISSING: "encrypted key missing from localStorage",
|
||||
|
@ -49,7 +48,6 @@ export const CustomError = {
|
|||
SUBSCRIPTION_NEEDED: "subscription not present",
|
||||
NOT_FOUND: "not found ",
|
||||
NO_METADATA: "no metadata",
|
||||
TOO_LARGE_LIVE_PHOTO_ASSETS: "too large live photo assets",
|
||||
NOT_A_DATE: "not a date",
|
||||
NOT_A_LOCATION: "not a location",
|
||||
FILE_ID_NOT_FOUND: "file with id not found",
|
||||
|
|
8
web/packages/shared/utils/data-stream.ts
Normal file
8
web/packages/shared/utils/data-stream.ts
Normal file
|
@ -0,0 +1,8 @@
|
|||
export interface DataStream {
|
||||
stream: ReadableStream<Uint8Array>;
|
||||
chunkCount: number;
|
||||
}
|
||||
|
||||
export function isDataStream(object: any): object is DataStream {
|
||||
return "stream" in object;
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
const CHARACTERS =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
|
||||
export function generateTempName(length: number, suffix: string) {
|
||||
let tempName = "";
|
||||
|
||||
const charactersLength = CHARACTERS.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
tempName += CHARACTERS.charAt(
|
||||
Math.floor(Math.random() * charactersLength),
|
||||
);
|
||||
}
|
||||
return `${tempName}-${suffix}`;
|
||||
}
|
Loading…
Add table
Reference in a new issue