[web][desktop] Upload refactoring to get desktop watch to work again - Part 3/x (#1515)

This commit is contained in:
Manav Rathi 2024-04-22 10:44:42 +05:30 committed by GitHub
commit 24d3156a5b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
58 changed files with 2079 additions and 2026 deletions

View file

@ -94,12 +94,12 @@ Some extra ones specific to the code here are:
### Format conversion
The main tool we use is for arbitrary conversions is FFMPEG. To bundle a
The main tool we use is for arbitrary conversions is ffmpeg. To bundle a
(platform specific) static binary of ffmpeg with our app, we use
[ffmpeg-static](https://github.com/eugeneware/ffmpeg-static).
> There is a significant (~20x) speed difference between using the compiled
> FFMPEG binary and using the WASM one (that our renderer process already has).
> ffmpeg binary and using the wasm one (that our renderer process already has).
> Which is why we bundle it to speed up operations on the desktop app.
In addition, we also bundle a static Linux binary of imagemagick in our extra

View file

@ -29,7 +29,7 @@ import { createWatcher } from "./main/services/watch";
import { userPreferences } from "./main/stores/user-preferences";
import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch";
import { registerStreamProtocol } from "./main/stream";
import { isDev } from "./main/util";
import { isDev } from "./main/utils-electron";
/**
* The URL where the renderer HTML is being served from.
@ -205,6 +205,8 @@ const createMainWindow = async () => {
window.webContents.reload();
});
// "The unresponsive event is fired when Chromium detects that your
// webContents is not responding to input messages for > 30 seconds."
window.webContents.on("unresponsive", () => {
log.error(
"Main window's webContents are unresponsive, will restart the renderer process",

View file

@ -11,18 +11,7 @@ export function handleDownloads(mainWindow: BrowserWindow) {
});
}
export function handleExternalLinks(mainWindow: BrowserWindow) {
mainWindow.webContents.setWindowOpenHandler(({ url }) => {
if (!url.startsWith(rendererURL)) {
shell.openExternal(url);
return { action: "deny" };
} else {
return { action: "allow" };
}
});
}
export function getUniqueSavePath(filename: string, directory: string): string {
function getUniqueSavePath(filename: string, directory: string): string {
let uniqueFileSavePath = path.join(directory, filename);
const { name: filenameWithoutExtension, ext: extension } =
path.parse(filename);
@ -42,12 +31,15 @@ export function getUniqueSavePath(filename: string, directory: string): string {
return uniqueFileSavePath;
}
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
const headers: Record<string, string[]> = {};
for (const key of Object.keys(responseHeaders)) {
headers[key.toLowerCase()] = responseHeaders[key];
}
return headers;
export function handleExternalLinks(mainWindow: BrowserWindow) {
mainWindow.webContents.setWindowOpenHandler(({ url }) => {
if (!url.startsWith(rendererURL)) {
shell.openExternal(url);
return { action: "deny" };
} else {
return { action: "allow" };
}
});
}
export function addAllowOriginHeader(mainWindow: BrowserWindow) {
@ -61,3 +53,11 @@ export function addAllowOriginHeader(mainWindow: BrowserWindow) {
},
);
}
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
const headers: Record<string, string[]> = {};
for (const key of Object.keys(responseHeaders)) {
headers[key.toLowerCase()] = responseHeaders[key];
}
return headers;
}

View file

@ -12,7 +12,6 @@ import type { FSWatcher } from "chokidar";
import { ipcMain } from "electron/main";
import type {
CollectionMapping,
ElectronFile,
FolderWatch,
PendingUploads,
} from "../types/ipc";
@ -39,13 +38,13 @@ import {
updateAndRestart,
updateOnNextRestart,
} from "./services/app-update";
import { runFFmpegCmd } from "./services/ffmpeg";
import { convertToJPEG, generateImageThumbnail } from "./services/convert";
import { ffmpegExec } from "./services/ffmpeg";
import { getDirFiles } from "./services/fs";
import {
convertToJPEG,
generateImageThumbnail,
} from "./services/imageProcessor";
import { clipImageEmbedding, clipTextEmbedding } from "./services/ml-clip";
clipImageEmbedding,
clipTextEmbeddingIfAvailable,
} from "./services/ml-clip";
import { detectFaces, faceEmbedding } from "./services/ml-face";
import {
clearStores,
@ -66,7 +65,7 @@ import {
watchUpdateIgnoredFiles,
watchUpdateSyncedFiles,
} from "./services/watch";
import { openDirectory, openLogDirectory } from "./util";
import { openDirectory, openLogDirectory } from "./utils-electron";
/**
* Listen for IPC events sent/invoked by the renderer process, and route them to
@ -142,8 +141,8 @@ export const attachIPCHandlers = () => {
// - Conversion
ipcMain.handle("convertToJPEG", (_, fileData, filename) =>
convertToJPEG(fileData, filename),
ipcMain.handle("convertToJPEG", (_, fileName, imageData) =>
convertToJPEG(fileName, imageData),
);
ipcMain.handle(
@ -153,14 +152,14 @@ export const attachIPCHandlers = () => {
);
ipcMain.handle(
"runFFmpegCmd",
"ffmpegExec",
(
_,
cmd: string[],
inputFile: File | ElectronFile,
command: string[],
inputDataOrPath: Uint8Array | string,
outputFileName: string,
dontTimeout?: boolean,
) => runFFmpegCmd(cmd, inputFile, outputFileName, dontTimeout),
timeoutMS: number,
) => ffmpegExec(command, inputDataOrPath, outputFileName, timeoutMS),
);
// - ML
@ -169,8 +168,8 @@ export const attachIPCHandlers = () => {
clipImageEmbedding(jpegImageData),
);
ipcMain.handle("clipTextEmbedding", (_, text: string) =>
clipTextEmbedding(text),
ipcMain.handle("clipTextEmbeddingIfAvailable", (_, text: string) =>
clipTextEmbeddingIfAvailable(text),
);
ipcMain.handle("detectFaces", (_, input: Float32Array) =>

View file

@ -1,6 +1,6 @@
import log from "electron-log";
import util from "node:util";
import { isDev } from "./util";
import { isDev } from "./utils-electron";
/**
* Initialize logging in the main process.

View file

@ -9,7 +9,7 @@ import { allowWindowClose } from "../main";
import { forceCheckForAppUpdates } from "./services/app-update";
import autoLauncher from "./services/auto-launcher";
import { userPreferences } from "./stores/user-preferences";
import { openLogDirectory } from "./util";
import { isDev, openLogDirectory } from "./utils-electron";
/** Create and return the entries in the app's main menu bar */
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
@ -23,6 +23,9 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
process.platform == "darwin" ? options : [];
const devOnly = (options: MenuItemConstructorOptions[]) =>
isDev ? options : [];
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
const handleViewChangelog = () =>
@ -139,7 +142,9 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
label: "View",
submenu: [
{ label: "Reload", role: "reload" },
{ label: "Toggle Dev Tools", role: "toggleDevTools" },
...devOnly([
{ label: "Toggle Dev Tools", role: "toggleDevTools" },
]),
{ type: "separator" },
{ label: "Toggle Full Screen", role: "togglefullscreen" },
],

View file

@ -58,17 +58,17 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
log.debug(() => "Attempting auto update");
autoUpdater.downloadUpdate();
let timeout: NodeJS.Timeout;
let timeoutId: ReturnType<typeof setTimeout>;
const fiveMinutes = 5 * 60 * 1000;
autoUpdater.on("update-downloaded", () => {
timeout = setTimeout(
timeoutId = setTimeout(
() => showUpdateDialog({ autoUpdatable: true, version }),
fiveMinutes,
);
});
autoUpdater.on("error", (error) => {
clearTimeout(timeout);
clearTimeout(timeoutId);
log.error("Auto update failed", error);
showUpdateDialog({ autoUpdatable: false, version });
});

View file

@ -1,12 +1,69 @@
/** @file Image conversions */
import { existsSync } from "fs";
import fs from "node:fs/promises";
import path from "path";
import { CustomErrors, ElectronFile } from "../../types/ipc";
import { CustomErrorMessage, ElectronFile } from "../../types/ipc";
import log from "../log";
import { writeStream } from "../stream";
import { generateTempFilePath } from "../temp";
import { execAsync, isDev } from "../util";
import { deleteTempFile } from "./ffmpeg";
import { execAsync, isDev } from "../utils-electron";
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
export const convertToJPEG = async (
fileName: string,
imageData: Uint8Array,
): Promise<Uint8Array> => {
const inputFilePath = await makeTempFilePath(fileName);
const outputFilePath = await makeTempFilePath("output.jpeg");
// Construct the command first, it may throw on NotAvailable on win32.
const command = convertToJPEGCommand(inputFilePath, outputFilePath);
try {
await fs.writeFile(inputFilePath, imageData);
await execAsync(command);
return new Uint8Array(await fs.readFile(outputFilePath));
} finally {
try {
deleteTempFile(outputFilePath);
deleteTempFile(inputFilePath);
} catch (e) {
log.error("Ignoring error when cleaning up temp files", e);
}
}
};
const convertToJPEGCommand = (
inputFilePath: string,
outputFilePath: string,
) => {
switch (process.platform) {
case "darwin":
return [
"sips",
"-s",
"format",
"jpeg",
inputFilePath,
"--out",
outputFilePath,
];
case "linux":
return [
imageMagickPath(),
inputFilePath,
"-quality",
"100%",
outputFilePath,
];
default: // "win32"
throw new Error(CustomErrorMessage.NotAvailable);
}
};
/** Path to the Linux image-magick executable bundled with our app */
const imageMagickPath = () =>
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK";
const MAX_DIMENSION_PLACEHOLDER = "MAX_DIMENSION";
@ -18,16 +75,6 @@ const QUALITY_PLACEHOLDER = "QUALITY";
const MAX_QUALITY = 70;
const MIN_QUALITY = 50;
const SIPS_HEIC_CONVERT_COMMAND_TEMPLATE = [
"sips",
"-s",
"format",
"jpeg",
INPUT_PATH_PLACEHOLDER,
"--out",
OUTPUT_PATH_PLACEHOLDER,
];
const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
"sips",
"-s",
@ -43,14 +90,6 @@ const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
OUTPUT_PATH_PLACEHOLDER,
];
const IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE = [
IMAGE_MAGICK_PLACEHOLDER,
INPUT_PATH_PLACEHOLDER,
"-quality",
"100%",
OUTPUT_PATH_PLACEHOLDER,
];
const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
IMAGE_MAGICK_PLACEHOLDER,
INPUT_PATH_PLACEHOLDER,
@ -66,92 +105,6 @@ const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
OUTPUT_PATH_PLACEHOLDER,
];
const imageMagickStaticPath = () =>
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
export async function convertToJPEG(
fileData: Uint8Array,
filename: string,
): Promise<Uint8Array> {
if (process.platform == "win32")
throw Error(CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED);
const convertedFileData = await convertToJPEG_(fileData, filename);
return convertedFileData;
}
async function convertToJPEG_(
fileData: Uint8Array,
filename: string,
): Promise<Uint8Array> {
let tempInputFilePath: string;
let tempOutputFilePath: string;
try {
tempInputFilePath = await generateTempFilePath(filename);
tempOutputFilePath = await generateTempFilePath("output.jpeg");
await fs.writeFile(tempInputFilePath, fileData);
await execAsync(
constructConvertCommand(tempInputFilePath, tempOutputFilePath),
);
return new Uint8Array(await fs.readFile(tempOutputFilePath));
} catch (e) {
log.error("Failed to convert HEIC", e);
throw e;
} finally {
try {
await fs.rm(tempInputFilePath, { force: true });
} catch (e) {
log.error(`Failed to remove tempInputFile ${tempInputFilePath}`, e);
}
try {
await fs.rm(tempOutputFilePath, { force: true });
} catch (e) {
log.error(
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
e,
);
}
}
}
function constructConvertCommand(
tempInputFilePath: string,
tempOutputFilePath: string,
) {
let convertCmd: string[];
if (process.platform == "darwin") {
convertCmd = SIPS_HEIC_CONVERT_COMMAND_TEMPLATE.map((cmdPart) => {
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return tempInputFilePath;
}
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
}
return cmdPart;
});
} else if (process.platform == "linux") {
convertCmd = IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE.map(
(cmdPart) => {
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
return imageMagickStaticPath();
}
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return tempInputFilePath;
}
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
}
return cmdPart;
},
);
} else {
throw new Error(`Unsupported OS ${process.platform}`);
}
return convertCmd;
}
export async function generateImageThumbnail(
inputFile: File | ElectronFile,
maxDimension: number,
@ -165,7 +118,7 @@ export async function generateImageThumbnail(
CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED,
);
if (!existsSync(inputFile.path)) {
const tempFilePath = await generateTempFilePath(inputFile.name);
const tempFilePath = await makeTempFilePath(inputFile.name);
await writeStream(tempFilePath, await inputFile.stream());
inputFilePath = tempFilePath;
createdTempInputFile = true;
@ -197,7 +150,7 @@ async function generateImageThumbnail_(
let tempOutputFilePath: string;
let quality = MAX_QUALITY;
try {
tempOutputFilePath = await generateTempFilePath("thumb.jpeg");
tempOutputFilePath = await makeTempFilePath("thumb.jpeg");
let thumbnail: Uint8Array;
do {
await execAsync(
@ -256,7 +209,7 @@ function constructThumbnailGenerationCommand(
thumbnailGenerationCmd =
IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map((cmdPart) => {
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
return imageMagickStaticPath();
return imageMagickPath();
}
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return inputFilePath;

View file

@ -1,22 +1,19 @@
import pathToFfmpeg from "ffmpeg-static";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import { ElectronFile } from "../../types/ipc";
import log from "../log";
import { writeStream } from "../stream";
import { generateTempFilePath, getTempDirPath } from "../temp";
import { execAsync } from "../util";
import { withTimeout } from "../utils";
import { execAsync } from "../utils-electron";
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
const INPUT_PATH_PLACEHOLDER = "INPUT";
const FFMPEG_PLACEHOLDER = "FFMPEG";
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
const ffmpegPathPlaceholder = "FFMPEG";
const inputPathPlaceholder = "INPUT";
const outputPathPlaceholder = "OUTPUT";
/**
* Run a ffmpeg command
*
* [Note: FFMPEG in Electron]
* [Note: ffmpeg in Electron]
*
* There is a wasm build of FFMPEG, but that is currently 10-20 times slower
* There is a wasm build of ffmpeg, but that is currently 10-20 times slower
* that the native build. That is slow enough to be unusable for our purposes.
* https://ffmpegwasm.netlify.app/docs/performance
*
@ -36,79 +33,65 @@ const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
* $ file ente.app/Contents/Frameworks/Electron\ Framework.framework/Versions/Current/Libraries/libffmpeg.dylib
* .../libffmpeg.dylib: Mach-O 64-bit dynamically linked shared library arm64
*
* I'm not sure if our code is supposed to be able to use it, and how.
* But I'm not sure if our code is supposed to be able to use it, and how.
*/
export async function runFFmpegCmd(
cmd: string[],
inputFile: File | ElectronFile,
export const ffmpegExec = async (
command: string[],
inputDataOrPath: Uint8Array | string,
outputFileName: string,
dontTimeout?: boolean,
) {
let inputFilePath = null;
let createdTempInputFile = null;
timeoutMS: number,
): Promise<Uint8Array> => {
// TODO (MR): This currently copies files for both input and output. This
// needs to be tested extremely large video files when invoked downstream of
// `convertToMP4` in the web code.
let inputFilePath: string;
let isInputFileTemporary: boolean;
if (typeof inputDataOrPath == "string") {
inputFilePath = inputDataOrPath;
isInputFileTemporary = false;
} else {
inputFilePath = await makeTempFilePath("input" /* arbitrary */);
isInputFileTemporary = true;
await fs.writeFile(inputFilePath, inputDataOrPath);
}
let outputFilePath: string | undefined;
try {
if (!existsSync(inputFile.path)) {
const tempFilePath = await generateTempFilePath(inputFile.name);
await writeStream(tempFilePath, await inputFile.stream());
inputFilePath = tempFilePath;
createdTempInputFile = true;
} else {
inputFilePath = inputFile.path;
}
const outputFileData = await runFFmpegCmd_(
cmd,
outputFilePath = await makeTempFilePath(outputFileName);
const cmd = substitutePlaceholders(
command,
inputFilePath,
outputFileName,
dontTimeout,
outputFilePath,
);
return new File([outputFileData], outputFileName);
} finally {
if (createdTempInputFile) {
await deleteTempFile(inputFilePath);
}
}
}
export async function runFFmpegCmd_(
cmd: string[],
if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
else await execAsync(cmd);
return fs.readFile(outputFilePath);
} finally {
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
if (outputFilePath) await deleteTempFile(outputFilePath);
}
};
const substitutePlaceholders = (
command: string[],
inputFilePath: string,
outputFileName: string,
dontTimeout = false,
) {
let tempOutputFilePath: string;
try {
tempOutputFilePath = await generateTempFilePath(outputFileName);
cmd = cmd.map((cmdPart) => {
if (cmdPart === FFMPEG_PLACEHOLDER) {
return ffmpegBinaryPath();
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return inputFilePath;
} else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
} else {
return cmdPart;
}
});
if (dontTimeout) {
await execAsync(cmd);
outputFilePath: string,
) =>
command.map((segment) => {
if (segment == ffmpegPathPlaceholder) {
return ffmpegBinaryPath();
} else if (segment == inputPathPlaceholder) {
return inputFilePath;
} else if (segment == outputPathPlaceholder) {
return outputFilePath;
} else {
await promiseWithTimeout(execAsync(cmd), 30 * 1000);
return segment;
}
if (!existsSync(tempOutputFilePath)) {
throw new Error("ffmpeg output file not found");
}
const outputFile = await fs.readFile(tempOutputFilePath);
return new Uint8Array(outputFile);
} catch (e) {
log.error("FFMPEG command failed", e);
throw e;
} finally {
await deleteTempFile(tempOutputFilePath);
}
}
});
/**
* Return the path to the `ffmpeg` binary.
@ -122,40 +105,3 @@ const ffmpegBinaryPath = () => {
// https://github.com/eugeneware/ffmpeg-static/issues/16
return pathToFfmpeg.replace("app.asar", "app.asar.unpacked");
};
export async function writeTempFile(fileStream: Uint8Array, fileName: string) {
const tempFilePath = await generateTempFilePath(fileName);
await fs.writeFile(tempFilePath, fileStream);
return tempFilePath;
}
export async function deleteTempFile(tempFilePath: string) {
const tempDirPath = await getTempDirPath();
if (!tempFilePath.startsWith(tempDirPath))
log.error("Attempting to delete a non-temp file ${tempFilePath}");
await fs.rm(tempFilePath, { force: true });
}
const promiseWithTimeout = async <T>(
request: Promise<T>,
timeout: number,
): Promise<T> => {
const timeoutRef: {
current: NodeJS.Timeout;
} = { current: null };
const rejectOnTimeout = new Promise<null>((_, reject) => {
timeoutRef.current = setTimeout(
() => reject(new Error("Operation timed out")),
timeout,
);
});
const requestWithTimeOutCancellation = async () => {
const resp = await request;
clearTimeout(timeoutRef.current);
return resp;
};
return await Promise.race([
requestWithTimeOutCancellation(),
rejectOnTimeout,
]);
};

View file

@ -5,115 +5,22 @@
*
* @see `web/apps/photos/src/services/clip-service.ts` for more details.
*/
import { existsSync } from "fs";
import jpeg from "jpeg-js";
import fs from "node:fs/promises";
import * as ort from "onnxruntime-node";
import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
import { CustomErrors } from "../../types/ipc";
import log from "../log";
import { writeStream } from "../stream";
import { generateTempFilePath } from "../temp";
import { deleteTempFile } from "./ffmpeg";
import {
createInferenceSession,
downloadModel,
modelPathDownloadingIfNeeded,
modelSavePath,
} from "./ml";
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
import { makeCachedInferenceSession } from "./ml";
const textModelName = "clip-text-vit-32-uint8.onnx";
const textModelByteSize = 64173509; // 61.2 MB
const imageModelName = "clip-image-vit-32-float32.onnx";
const imageModelByteSize = 351468764; // 335.2 MB
let activeImageModelDownload: Promise<string> | undefined;
const imageModelPathDownloadingIfNeeded = async () => {
try {
if (activeImageModelDownload) {
log.info("Waiting for CLIP image model download to finish");
await activeImageModelDownload;
} else {
activeImageModelDownload = modelPathDownloadingIfNeeded(
imageModelName,
imageModelByteSize,
);
return await activeImageModelDownload;
}
} finally {
activeImageModelDownload = undefined;
}
};
let textModelDownloadInProgress = false;
/* TODO(MR): use the generic method. Then we can remove the exports for the
internal details functions that we use here */
const textModelPathDownloadingIfNeeded = async () => {
if (textModelDownloadInProgress)
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
const modelPath = modelSavePath(textModelName);
if (!existsSync(modelPath)) {
log.info("CLIP text model not found, downloading");
textModelDownloadInProgress = true;
downloadModel(modelPath, textModelName)
.catch((e) => {
// log but otherwise ignore
log.error("CLIP text model download failed", e);
})
.finally(() => {
textModelDownloadInProgress = false;
});
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
} else {
const localFileSize = (await fs.stat(modelPath)).size;
if (localFileSize !== textModelByteSize) {
log.error(
`CLIP text model size ${localFileSize} does not match the expected size, downloading again`,
);
textModelDownloadInProgress = true;
downloadModel(modelPath, textModelName)
.catch((e) => {
// log but otherwise ignore
log.error("CLIP text model download failed", e);
})
.finally(() => {
textModelDownloadInProgress = false;
});
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
}
}
return modelPath;
};
let imageSessionPromise: Promise<any> | undefined;
const onnxImageSession = async () => {
if (!imageSessionPromise) {
imageSessionPromise = (async () => {
const modelPath = await imageModelPathDownloadingIfNeeded();
return createInferenceSession(modelPath);
})();
}
return imageSessionPromise;
};
let _textSession: any = null;
const onnxTextSession = async () => {
if (!_textSession) {
const modelPath = await textModelPathDownloadingIfNeeded();
_textSession = await createInferenceSession(modelPath);
}
return _textSession;
};
const cachedCLIPImageSession = makeCachedInferenceSession(
"clip-image-vit-32-float32.onnx",
351468764 /* 335.2 MB */,
);
export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
const tempFilePath = await generateTempFilePath("");
const tempFilePath = await makeTempFilePath("");
const imageStream = new Response(jpegImageData.buffer).body;
await writeStream(tempFilePath, imageStream);
try {
@ -124,19 +31,20 @@ export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
};
const clipImageEmbedding_ = async (jpegFilePath: string) => {
const imageSession = await onnxImageSession();
const session = await cachedCLIPImageSession();
const t1 = Date.now();
const rgbData = await getRGBData(jpegFilePath);
const feeds = {
input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
const results = await session.run(feeds);
log.debug(
() =>
`onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
);
const imageEmbedding = results["output"].data; // Float32Array
/* Need these model specific casts to type the result */
const imageEmbedding = results["output"].data as Float32Array;
return normalizeEmbedding(imageEmbedding);
};
@ -221,6 +129,11 @@ const normalizeEmbedding = (embedding: Float32Array) => {
return embedding;
};
const cachedCLIPTextSession = makeCachedInferenceSession(
"clip-text-vit-32-uint8.onnx",
64173509 /* 61.2 MB */,
);
let _tokenizer: Tokenizer = null;
const getTokenizer = () => {
if (!_tokenizer) {
@ -229,8 +142,21 @@ const getTokenizer = () => {
return _tokenizer;
};
export const clipTextEmbedding = async (text: string) => {
const imageSession = await onnxTextSession();
export const clipTextEmbeddingIfAvailable = async (text: string) => {
const sessionOrStatus = await Promise.race([
cachedCLIPTextSession(),
"downloading-model",
]);
// Don't wait for the download to complete
if (typeof sessionOrStatus == "string") {
console.log(
"Ignoring CLIP text embedding request because model download is pending",
);
return undefined;
}
const session = sessionOrStatus;
const t1 = Date.now();
const tokenizer = getTokenizer();
const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text));
@ -238,11 +164,11 @@ export const clipTextEmbedding = async (text: string) => {
input: new ort.Tensor("int32", tokenizedText, [1, 77]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
const results = await session.run(feeds);
log.debug(
() =>
`onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
);
const textEmbedding = results["output"].data;
const textEmbedding = results["output"].data as Float32Array;
return normalizeEmbedding(textEmbedding);
};

View file

@ -8,78 +8,15 @@
*/
import * as ort from "onnxruntime-node";
import log from "../log";
import { createInferenceSession, modelPathDownloadingIfNeeded } from "./ml";
import { makeCachedInferenceSession } from "./ml";
const faceDetectionModelName = "yolov5s_face_640_640_dynamic.onnx";
const faceDetectionModelByteSize = 30762872; // 29.3 MB
const faceEmbeddingModelName = "mobilefacenet_opset15.onnx";
const faceEmbeddingModelByteSize = 5286998; // 5 MB
let activeFaceDetectionModelDownload: Promise<string> | undefined;
const faceDetectionModelPathDownloadingIfNeeded = async () => {
try {
if (activeFaceDetectionModelDownload) {
log.info("Waiting for face detection model download to finish");
await activeFaceDetectionModelDownload;
} else {
activeFaceDetectionModelDownload = modelPathDownloadingIfNeeded(
faceDetectionModelName,
faceDetectionModelByteSize,
);
return await activeFaceDetectionModelDownload;
}
} finally {
activeFaceDetectionModelDownload = undefined;
}
};
let _faceDetectionSession: Promise<ort.InferenceSession> | undefined;
const faceDetectionSession = async () => {
if (!_faceDetectionSession) {
_faceDetectionSession =
faceDetectionModelPathDownloadingIfNeeded().then((modelPath) =>
createInferenceSession(modelPath),
);
}
return _faceDetectionSession;
};
let activeFaceEmbeddingModelDownload: Promise<string> | undefined;
const faceEmbeddingModelPathDownloadingIfNeeded = async () => {
try {
if (activeFaceEmbeddingModelDownload) {
log.info("Waiting for face embedding model download to finish");
await activeFaceEmbeddingModelDownload;
} else {
activeFaceEmbeddingModelDownload = modelPathDownloadingIfNeeded(
faceEmbeddingModelName,
faceEmbeddingModelByteSize,
);
return await activeFaceEmbeddingModelDownload;
}
} finally {
activeFaceEmbeddingModelDownload = undefined;
}
};
let _faceEmbeddingSession: Promise<ort.InferenceSession> | undefined;
const faceEmbeddingSession = async () => {
if (!_faceEmbeddingSession) {
_faceEmbeddingSession =
faceEmbeddingModelPathDownloadingIfNeeded().then((modelPath) =>
createInferenceSession(modelPath),
);
}
return _faceEmbeddingSession;
};
const cachedFaceDetectionSession = makeCachedInferenceSession(
"yolov5s_face_640_640_dynamic.onnx",
30762872 /* 29.3 MB */,
);
export const detectFaces = async (input: Float32Array) => {
const session = await faceDetectionSession();
const session = await cachedFaceDetectionSession();
const t = Date.now();
const feeds = {
input: new ort.Tensor("float32", input, [1, 3, 640, 640]),
@ -89,6 +26,11 @@ export const detectFaces = async (input: Float32Array) => {
return results["output"].data;
};
const cachedFaceEmbeddingSession = makeCachedInferenceSession(
"mobilefacenet_opset15.onnx",
5286998 /* 5 MB */,
);
export const faceEmbedding = async (input: Float32Array) => {
// Dimension of each face (alias)
const mobileFaceNetFaceSize = 112;
@ -98,11 +40,11 @@ export const faceEmbedding = async (input: Float32Array) => {
const n = Math.round(input.length / (z * z * 3));
const inputTensor = new ort.Tensor("float32", input, [n, z, z, 3]);
const session = await faceEmbeddingSession();
const session = await cachedFaceEmbeddingSession();
const t = Date.now();
const feeds = { img_inputs: inputTensor };
const results = await session.run(feeds);
log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`);
// TODO: What's with this type? It works in practice, but double check.
return (results.embeddings as unknown as any)["cpuData"]; // as Float32Array;
/* Need these model specific casts to extract and type the result */
return (results.embeddings as unknown as any)["cpuData"] as Float32Array;
};

View file

@ -1,5 +1,5 @@
/**
* @file AI/ML related functionality.
* @file AI/ML related functionality, generic layer.
*
* @see also `ml-clip.ts`, `ml-face.ts`.
*
@ -18,6 +18,49 @@ import * as ort from "onnxruntime-node";
import log from "../log";
import { writeStream } from "../stream";
/**
* Return a function that can be used to trigger a download of the specified
* model, and the creating of an ONNX inference session initialized using it.
*
* Multiple parallel calls to the returned function are fine, it ensures that
* the the model will be downloaded and the session created using it only once.
* All pending calls to it meanwhile will just await on the same promise.
*
* And once the promise is resolved, the create ONNX inference session will be
* cached, so subsequent calls to the returned function will just reuse the same
* session.
*
* {@link makeCachedInferenceSession} can itself be called anytime, it doesn't
* actively trigger a download until the returned function is called.
*
* @param modelName The name of the model to download.
* @param modelByteSize The size in bytes that we expect the model to have. If
* the size of the downloaded model does not match the expected size, then we
* will redownload it.
*
* @returns a function. calling that function returns a promise to an ONNX
* session.
*/
export const makeCachedInferenceSession = (
modelName: string,
modelByteSize: number,
) => {
let session: Promise<ort.InferenceSession> | undefined;
const download = () =>
modelPathDownloadingIfNeeded(modelName, modelByteSize);
const createSession = (modelPath: string) =>
createInferenceSession(modelPath);
const cachedInferenceSession = () => {
if (!session) session = download().then(createSession);
return session;
};
return cachedInferenceSession;
};
/**
* Download the model named {@link modelName} if we don't already have it.
*
@ -26,7 +69,7 @@ import { writeStream } from "../stream";
*
* @returns the path to the model on the local machine.
*/
export const modelPathDownloadingIfNeeded = async (
const modelPathDownloadingIfNeeded = async (
modelName: string,
expectedByteSize: number,
) => {
@ -49,10 +92,10 @@ export const modelPathDownloadingIfNeeded = async (
};
/** Return the path where the given {@link modelName} is meant to be saved */
export const modelSavePath = (modelName: string) =>
const modelSavePath = (modelName: string) =>
path.join(app.getPath("userData"), "models", modelName);
export const downloadModel = async (saveLocation: string, name: string) => {
const downloadModel = async (saveLocation: string, name: string) => {
// `mkdir -p` the directory where we want to save the model.
const saveDir = path.dirname(saveLocation);
await fs.mkdir(saveDir, { recursive: true });
@ -69,7 +112,7 @@ export const downloadModel = async (saveLocation: string, name: string) => {
/**
* Crete an ONNX {@link InferenceSession} with some defaults.
*/
export const createInferenceSession = async (modelPath: string) => {
const createInferenceSession = async (modelPath: string) => {
return await ort.InferenceSession.create(modelPath, {
// Restrict the number of threads to 1
intraOpNumThreads: 1,

View file

@ -1,35 +0,0 @@
import { app } from "electron/main";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "path";
const CHARACTERS =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
export async function getTempDirPath() {
const tempDirPath = path.join(app.getPath("temp"), "ente");
await fs.mkdir(tempDirPath, { recursive: true });
return tempDirPath;
}
function generateTempName(length: number) {
let result = "";
const charactersLength = CHARACTERS.length;
for (let i = 0; i < length; i++) {
result += CHARACTERS.charAt(
Math.floor(Math.random() * charactersLength),
);
}
return result;
}
export async function generateTempFilePath(formatSuffix: string) {
let tempFilePath: string;
do {
const tempDirPath = await getTempDirPath();
const namePrefix = generateTempName(10);
tempFilePath = path.join(tempDirPath, namePrefix + "-" + formatSuffix);
} while (existsSync(tempFilePath));
return tempFilePath;
}

View file

@ -0,0 +1,64 @@
import { app } from "electron/main";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "path";
/**
* Our very own directory within the system temp directory. Go crazy, but
* remember to clean up, especially in exception handlers.
*/
const enteTempDirPath = async () => {
const result = path.join(app.getPath("temp"), "ente");
await fs.mkdir(result, { recursive: true });
return result;
};
const randomPrefix = (length: number) => {
const CHARACTERS =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let result = "";
const charactersLength = CHARACTERS.length;
for (let i = 0; i < length; i++) {
result += CHARACTERS.charAt(
Math.floor(Math.random() * charactersLength),
);
}
return result;
};
/**
* Return the path to a temporary file with the given {@link formatSuffix}.
*
* The function returns the path to a file in the system temp directory (in an
* Ente specific folder therin) with a random prefix and the given
* {@link formatSuffix}. It ensures that there is no existing file with the same
* name already.
*
* Use {@link deleteTempFile} to remove this file when you're done.
*/
export const makeTempFilePath = async (formatSuffix: string) => {
const tempDir = await enteTempDirPath();
let result: string;
do {
result = path.join(tempDir, randomPrefix(10) + "-" + formatSuffix);
} while (existsSync(result));
return result;
};
/**
* Delete a temporary file at the given path if it exists.
*
* This is the same as a vanilla {@link fs.rm}, except it first checks that the
* given path is within the Ente specific directory in the system temp
* directory. This acts as an additional safety check.
*
* @param tempFilePath The path to the temporary file to delete. This path
* should've been previously created using {@link makeTempFilePath}.
*/
export const deleteTempFile = async (tempFilePath: string) => {
const tempDir = await enteTempDirPath();
if (!tempFilePath.startsWith(tempDir))
throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`);
await fs.rm(tempFilePath, { force: true });
};

35
desktop/src/main/utils.ts Normal file
View file

@ -0,0 +1,35 @@
/**
* @file grab bag of utitity functions.
*
* Many of these are verbatim copies of functions from web code since there
* isn't currently a common package that both of them share.
*/
/**
* Wait for {@link ms} milliseconds
*
* This function is a promisified `setTimeout`. It returns a promise that
* resolves after {@link ms} milliseconds.
*/
export const wait = (ms: number) =>
new Promise((resolve) => setTimeout(resolve, ms));
/**
* Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
* does not resolve within {@link timeoutMS}, then reject with a timeout error.
*/
export const withTimeout = async <T>(promise: Promise<T>, ms: number) => {
let timeoutId: ReturnType<typeof setTimeout>;
const rejectOnTimeout = new Promise<T>((_, reject) => {
timeoutId = setTimeout(
() => reject(new Error("Operation timed out")),
ms,
);
});
const promiseAndCancelTimeout = async () => {
const result = await promise;
clearTimeout(timeoutId);
return result;
};
return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
};

View file

@ -122,15 +122,13 @@ const fsWriteFile = (path: string, contents: string): Promise<void> =>
const fsIsDir = (dirPath: string): Promise<boolean> =>
ipcRenderer.invoke("fsIsDir", dirPath);
// - AUDIT below this
// - Conversion
const convertToJPEG = (
fileData: Uint8Array,
filename: string,
fileName: string,
imageData: Uint8Array,
): Promise<Uint8Array> =>
ipcRenderer.invoke("convertToJPEG", fileData, filename);
ipcRenderer.invoke("convertToJPEG", fileName, imageData);
const generateImageThumbnail = (
inputFile: File | ElectronFile,
@ -144,18 +142,18 @@ const generateImageThumbnail = (
maxSize,
);
const runFFmpegCmd = (
cmd: string[],
inputFile: File | ElectronFile,
const ffmpegExec = (
command: string[],
inputDataOrPath: Uint8Array | string,
outputFileName: string,
dontTimeout?: boolean,
): Promise<File> =>
timeoutMS: number,
): Promise<Uint8Array> =>
ipcRenderer.invoke(
"runFFmpegCmd",
cmd,
inputFile,
"ffmpegExec",
command,
inputDataOrPath,
outputFileName,
dontTimeout,
timeoutMS,
);
// - ML
@ -163,8 +161,10 @@ const runFFmpegCmd = (
const clipImageEmbedding = (jpegImageData: Uint8Array): Promise<Float32Array> =>
ipcRenderer.invoke("clipImageEmbedding", jpegImageData);
const clipTextEmbedding = (text: string): Promise<Float32Array> =>
ipcRenderer.invoke("clipTextEmbedding", text);
const clipTextEmbeddingIfAvailable = (
text: string,
): Promise<Float32Array | undefined> =>
ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text);
const detectFaces = (input: Float32Array): Promise<Float32Array> =>
ipcRenderer.invoke("detectFaces", input);
@ -253,6 +253,7 @@ const setPendingUploadFiles = (
): Promise<void> =>
ipcRenderer.invoke("setPendingUploadFiles", type, filePaths);
// - TODO: AUDIT below this
// -
const getElectronFilesFromGoogleZip = (
@ -263,42 +264,46 @@ const getElectronFilesFromGoogleZip = (
const getDirFiles = (dirPath: string): Promise<ElectronFile[]> =>
ipcRenderer.invoke("getDirFiles", dirPath);
//
// These objects exposed here will become available to the JS code in our
// renderer (the web/ code) as `window.ElectronAPIs.*`
//
// There are a few related concepts at play here, and it might be worthwhile to
// read their (excellent) documentation to get an understanding;
//`
// - ContextIsolation:
// https://www.electronjs.org/docs/latest/tutorial/context-isolation
//
// - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
//
// [Note: Transferring large amount of data over IPC]
//
// Electron's IPC implementation uses the HTML standard Structured Clone
// Algorithm to serialize objects passed between processes.
// https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
//
// In particular, ArrayBuffer is eligible for structured cloning.
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
//
// Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
// operation when it happens across threads.
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
//
// In our case though, we're not dealing with threads but separate processes. So
// the ArrayBuffer will be copied:
// > "parameters, errors and return values are **copied** when they're sent over
// the bridge".
// https://www.electronjs.org/docs/latest/api/context-bridge#methods
//
// The copy itself is relatively fast, but the problem with transfering large
// amounts of data is potentially running out of memory during the copy.
//
// For an alternative, see [Note: IPC streams].
//
/**
* These objects exposed here will become available to the JS code in our
* renderer (the web/ code) as `window.ElectronAPIs.*`
*
* There are a few related concepts at play here, and it might be worthwhile to
* read their (excellent) documentation to get an understanding;
*`
* - ContextIsolation:
* https://www.electronjs.org/docs/latest/tutorial/context-isolation
*
* - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
*
* ---
*
* [Note: Transferring large amount of data over IPC]
*
* Electron's IPC implementation uses the HTML standard Structured Clone
* Algorithm to serialize objects passed between processes.
* https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
*
* In particular, ArrayBuffer is eligible for structured cloning.
* https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
*
* Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
* operation when it happens across threads.
* https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
*
* In our case though, we're not dealing with threads but separate processes. So
* the ArrayBuffer will be copied:
*
* > "parameters, errors and return values are **copied** when they're sent over
* > the bridge".
* >
* > https://www.electronjs.org/docs/latest/api/context-bridge#methods
*
* The copy itself is relatively fast, but the problem with transfering large
* amounts of data is potentially running out of memory during the copy.
*
* For an alternative, see [Note: IPC streams].
*/
contextBridge.exposeInMainWorld("electron", {
// - General
@ -335,12 +340,12 @@ contextBridge.exposeInMainWorld("electron", {
convertToJPEG,
generateImageThumbnail,
runFFmpegCmd,
ffmpegExec,
// - ML
clipImageEmbedding,
clipTextEmbedding,
clipTextEmbeddingIfAvailable,
detectFaces,
faceEmbedding,

View file

@ -32,28 +32,13 @@ export interface PendingUploads {
}
/**
* Errors that have special semantics on the web side.
* See: [Note: Custom errors across Electron/Renderer boundary]
*
* [Note: Custom errors across Electron/Renderer boundary]
*
* We need to use the `message` field to disambiguate between errors thrown by
* the main process when invoked from the renderer process. This is because:
*
* > Errors thrown throw `handle` in the main process are not transparent as
* > they are serialized and only the `message` property from the original error
* > is provided to the renderer process.
* >
* > - https://www.electronjs.org/docs/latest/tutorial/ipc
* >
* > Ref: https://github.com/electron/electron/issues/24427
* Note: this is not a type, and cannot be used in preload.js; it is only meant
* for use in the main process code.
*/
export const CustomErrors = {
WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
"Windows native image processing is not supported",
UNSUPPORTED_PLATFORM: (platform: string, arch: string) =>
`Unsupported platform - ${platform} ${arch}`,
MODEL_DOWNLOAD_PENDING:
"Model download pending, skipping clip search request",
export const CustomErrorMessage = {
NotAvailable: "This feature in not available on the current OS/arch",
};
/**

View file

@ -13,13 +13,14 @@ import { GalleryContext } from "pages/gallery";
import { useContext, useEffect, useRef, useState } from "react";
import billingService from "services/billingService";
import { getLatestCollections } from "services/collectionService";
import { setToUploadCollection } from "services/pending-uploads";
import {
getPublicCollectionUID,
getPublicCollectionUploaderName,
savePublicCollectionUploaderName,
} from "services/publicCollectionService";
import uploadManager from "services/upload/uploadManager";
import uploadManager, {
setToUploadCollection,
} from "services/upload/uploadManager";
import watcher from "services/watch";
import { NotificationAttributes } from "types/Notification";
import { Collection } from "types/collection";
@ -31,7 +32,11 @@ import {
SetLoading,
UploadTypeSelectorIntent,
} from "types/gallery";
import { ElectronFile, FileWithCollection } from "types/upload";
import {
ElectronFile,
FileWithCollection,
type FileWithCollection2,
} from "types/upload";
import {
InProgressUpload,
SegregatedFinishedUploads,
@ -112,11 +117,28 @@ export default function Uploader(props: Props) {
const [importSuggestion, setImportSuggestion] = useState<ImportSuggestion>(
DEFAULT_IMPORT_SUGGESTION,
);
/**
* Paths of file to upload that we've received over the IPC bridge from the
* code running in the Node.js layer of our desktop app.
*/
const [desktopFilePaths, setDesktopFilePaths] = useState<
string[] | undefined
>();
const [electronFiles, setElectronFiles] = useState<ElectronFile[]>(null);
const [webFiles, setWebFiles] = useState([]);
const toUploadFiles = useRef<File[] | ElectronFile[]>(null);
const toUploadFiles = useRef<
File[] | ElectronFile[] | string[] | undefined | null
>(null);
/**
* If true, then the next upload we'll be processing was initiated by our
* desktop app.
*/
const isPendingDesktopUpload = useRef(false);
/**
* If set, this will be the name of the collection that our desktop app
* wishes for us to upload into.
*/
const pendingDesktopUploadCollectionName = useRef<string>("");
// This is set when the user choses a type to upload from the upload type selector dialog
const pickedUploadType = useRef<PICKED_UPLOAD_TYPE>(null);
@ -181,13 +203,10 @@ export default function Uploader(props: Props) {
}
});
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const upload = (collectionName: string, filePaths: string[]) => {
isPendingDesktopUpload.current = true;
pendingDesktopUploadCollectionName.current = collectionName;
// TODO (MR):
// setElectronFiles(filePaths);
setDesktopFilePaths(filePaths);
};
const requestSyncWithRemote = () => {
@ -284,18 +303,22 @@ export default function Uploader(props: Props) {
useEffect(() => {
if (
desktopFilePaths?.length > 0 ||
electronFiles?.length > 0 ||
webFiles?.length > 0 ||
appContext.sharedFiles?.length > 0
) {
log.info(
`upload request type:${
electronFiles?.length > 0
? "electronFiles"
: webFiles?.length > 0
? "webFiles"
: "sharedFiles"
`upload request type: ${
desktopFilePaths?.length > 0
? "desktopFilePaths"
: electronFiles?.length > 0
? "electronFiles"
: webFiles?.length > 0
? "webFiles"
: "sharedFiles"
} count ${
desktopFilePaths?.length ??
electronFiles?.length ??
webFiles?.length ??
appContext?.sharedFiles.length
@ -326,9 +349,13 @@ export default function Uploader(props: Props) {
toUploadFiles.current = appContext.sharedFiles;
appContext.resetSharedFiles();
} else if (electronFiles?.length > 0) {
// File selection from desktop app
// File selection from desktop app - deprecated
toUploadFiles.current = electronFiles;
setElectronFiles([]);
} else if (desktopFilePaths && desktopFilePaths.length > 0) {
// File selection from our desktop app
toUploadFiles.current = desktopFilePaths;
setDesktopFilePaths(undefined);
}
toUploadFiles.current = filterOutSystemFiles(toUploadFiles.current);
@ -339,7 +366,9 @@ export default function Uploader(props: Props) {
const importSuggestion = getImportSuggestion(
pickedUploadType.current,
toUploadFiles.current.map((file) => file["path"]),
toUploadFiles.current.map((file) =>
typeof file == "string" ? file : file["path"],
),
);
setImportSuggestion(importSuggestion);
@ -352,7 +381,7 @@ export default function Uploader(props: Props) {
pickedUploadType.current = null;
props.setLoading(false);
}
}, [webFiles, appContext.sharedFiles, electronFiles]);
}, [webFiles, appContext.sharedFiles, electronFiles, desktopFilePaths]);
const resumeDesktopUpload = async (
type: PICKED_UPLOAD_TYPE,
@ -408,11 +437,11 @@ export default function Uploader(props: Props) {
`upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`,
);
await preCollectionCreationAction();
let filesWithCollectionToUpload: FileWithCollection[] = [];
let filesWithCollectionToUpload: FileWithCollection2[] = [];
const collections: Collection[] = [];
let collectionNameToFilesMap = new Map<
string,
(File | ElectronFile)[]
File[] | ElectronFile[] | string[]
>();
if (strategy == "root") {
collectionNameToFilesMap.set(
@ -463,7 +492,7 @@ export default function Uploader(props: Props) {
});
throw e;
}
await waitInQueueAndUploadFiles(
await waitInQueueAndUploadFiles2(
filesWithCollectionToUpload,
collections,
);
@ -491,6 +520,24 @@ export default function Uploader(props: Props) {
await currentUploadPromise.current;
};
const waitInQueueAndUploadFiles2 = async (
filesWithCollectionToUploadIn: FileWithCollection2[],
collections: Collection[],
uploaderName?: string,
) => {
const currentPromise = currentUploadPromise.current;
currentUploadPromise.current = waitAndRun(
currentPromise,
async () =>
await uploadFiles2(
filesWithCollectionToUploadIn,
collections,
uploaderName,
),
);
await currentUploadPromise.current;
};
const preUploadAction = async () => {
uploadManager.prepareForNewUpload();
setUploadProgressView(true);
@ -517,7 +564,6 @@ export default function Uploader(props: Props) {
!watcher.isUploadRunning()
) {
await setToUploadCollection(collections);
// TODO (MR): What happens when we have both?
if (zipPaths.current) {
await electron.setPendingUploadFiles(
"zips",
@ -561,6 +607,63 @@ export default function Uploader(props: Props) {
}
};
const uploadFiles2 = async (
filesWithCollectionToUploadIn: FileWithCollection2[],
collections: Collection[],
uploaderName?: string,
) => {
try {
log.info("uploadFiles called");
preUploadAction();
if (
electron &&
!isPendingDesktopUpload.current &&
!watcher.isUploadRunning()
) {
await setToUploadCollection(collections);
if (zipPaths.current) {
await electron.setPendingUploadFiles(
"zips",
zipPaths.current,
);
zipPaths.current = null;
}
await electron.setPendingUploadFiles(
"files",
filesWithCollectionToUploadIn.map(
({ file }) => (file as ElectronFile).path,
),
);
}
const shouldCloseUploadProgress =
await uploadManager.queueFilesForUpload2(
filesWithCollectionToUploadIn,
collections,
uploaderName,
);
if (shouldCloseUploadProgress) {
closeUploadProgress();
}
if (isElectron()) {
if (watcher.isUploadRunning()) {
await watcher.allFileUploadsDone(
filesWithCollectionToUploadIn,
collections,
);
} else if (watcher.isSyncPaused()) {
// resume the service after user upload is done
watcher.resumePausedSync();
}
}
} catch (e) {
log.error("failed to upload files", e);
showUserFacingError(e.message);
closeUploadProgress();
} finally {
postUploadAction();
}
};
const retryFailed = async () => {
try {
log.info("user retrying failed upload");
@ -569,7 +672,8 @@ export default function Uploader(props: Props) {
const uploaderName = uploadManager.getUploaderName();
await preUploadAction();
await uploadManager.queueFilesForUpload(
filesWithCollections.files,
/* TODO(MR): ElectronFile changes */
filesWithCollections.files as FileWithCollection[],
filesWithCollections.collections,
uploaderName,
);
@ -636,7 +740,7 @@ export default function Uploader(props: Props) {
try {
if (accessedThroughSharedURL) {
log.info(
`uploading files to pulbic collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`,
`uploading files to public collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`,
);
const uploaderName = await getPublicCollectionUploaderName(
getPublicCollectionUID(

View file

@ -1,3 +1,3 @@
export const INPUT_PATH_PLACEHOLDER = "INPUT";
export const FFMPEG_PLACEHOLDER = "FFMPEG";
export const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
export const ffmpegPathPlaceholder = "FFMPEG";
export const inputPathPlaceholder = "INPUT";
export const outputPathPlaceholder = "OUTPUT";

View file

@ -1,6 +1,6 @@
import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
import { FILE_TYPE } from "constants/file";
import { FileTypeInfo, Location, ParsedExtractedMetadata } from "types/upload";
import { FileTypeInfo, Location } from "types/upload";
// list of format that were missed by type-detection for some files.
export const WHITELISTED_FILE_FORMATS: FileTypeInfo[] = [
@ -93,19 +93,6 @@ export enum PICKED_UPLOAD_TYPE {
ZIPS = "zips",
}
export const MAX_FILE_SIZE_SUPPORTED = 4 * 1024 * 1024 * 1024; // 4 GB
export const LIVE_PHOTO_ASSET_SIZE_LIMIT = 20 * 1024 * 1024; // 20MB
export const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
location: NULL_LOCATION,
creationTime: null,
width: null,
height: null,
};
export const A_SEC_IN_MICROSECONDS = 1e6;
export const BLACK_THUMBNAIL_BASE64 =
"/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" +
"AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" +

View file

@ -75,7 +75,6 @@ class CLIPService {
private onFileUploadedHandler:
| ((arg: { enteFile: EnteFile; localFile: globalThis.File }) => void)
| null = null;
private unsupportedPlatform = false;
constructor() {
this.liveEmbeddingExtractionQueue = new PQueue({
@ -85,7 +84,7 @@ class CLIPService {
}
isPlatformSupported = () => {
return isElectron() && !this.unsupportedPlatform;
return isElectron();
};
private logoutHandler = async () => {
@ -99,9 +98,6 @@ class CLIPService {
setupOnFileUploadListener = async () => {
try {
if (this.unsupportedPlatform) {
return;
}
if (this.onFileUploadedHandler) {
log.info("file upload listener already setup");
return;
@ -188,26 +184,12 @@ class CLIPService {
}
};
getTextEmbedding = async (text: string): Promise<Float32Array> => {
try {
return ensureElectron().clipTextEmbedding(text);
} catch (e) {
if (e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)) {
this.unsupportedPlatform = true;
}
log.error("Failed to compute CLIP text embedding", e);
throw e;
}
getTextEmbeddingIfAvailable = async (text: string) => {
return ensureElectron().clipTextEmbeddingIfAvailable(text);
};
private runClipEmbeddingExtraction = async (canceller: AbortController) => {
try {
if (this.unsupportedPlatform) {
log.info(
`skipping clip embedding extraction, platform unsupported`,
);
return;
}
const user = getData(LS_KEYS.USER);
if (!user) {
return;
@ -254,11 +236,6 @@ class CLIPService {
e,
);
}
if (
e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)
) {
this.unsupportedPlatform = true;
}
if (
e?.message === CustomError.REQUEST_CANCELLED ||
e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)

View file

@ -1,3 +1,4 @@
import { decodeLivePhoto } from "@/media/live-photo";
import { openCache, type BlobCache } from "@/next/blob-cache";
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
@ -5,13 +6,13 @@ import ComlinkCryptoWorker from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { isPlaybackPossible } from "@ente/shared/media/video-playback";
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron";
import * as ffmpegService from "services/ffmpeg";
import { EnteFile } from "types/file";
import {
generateStreamFromArrayBuffer,
getRenderableFileURL,
} from "utils/file";
import { generateStreamFromArrayBuffer, getRenderableImage } from "utils/file";
import { PhotosDownloadClient } from "./clients/photos";
import { PublicAlbumsDownloadClient } from "./clients/publicAlbums";
@ -303,7 +304,7 @@ class DownloadManagerImpl {
if (cachedBlob) res = new Response(cachedBlob);
else {
res = await this.downloadClient.downloadFileStream(file);
this?.fileCache.put(cacheKey, await res.blob());
this.fileCache?.put(cacheKey, await res.blob());
}
const reader = res.body.getReader();
@ -467,3 +468,159 @@ function createDownloadClient(
return new PhotosDownloadClient(token, timeout);
}
}
async function getRenderableFileURL(
file: EnteFile,
fileBlob: Blob,
originalFileURL: string,
forceConvert: boolean,
): Promise<SourceURLs> {
let srcURLs: SourceURLs["url"];
switch (file.metadata.fileType) {
case FILE_TYPE.IMAGE: {
const convertedBlob = await getRenderableImage(
file.metadata.title,
fileBlob,
);
const convertedURL = getFileObjectURL(
originalFileURL,
fileBlob,
convertedBlob,
);
srcURLs = convertedURL;
break;
}
case FILE_TYPE.LIVE_PHOTO: {
srcURLs = await getRenderableLivePhotoURL(
file,
fileBlob,
forceConvert,
);
break;
}
case FILE_TYPE.VIDEO: {
const convertedBlob = await getPlayableVideo(
file.metadata.title,
fileBlob,
forceConvert,
);
const convertedURL = getFileObjectURL(
originalFileURL,
fileBlob,
convertedBlob,
);
srcURLs = convertedURL;
break;
}
default: {
srcURLs = originalFileURL;
break;
}
}
let isOriginal: boolean;
if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
isOriginal = false;
} else {
isOriginal = (srcURLs as string) === (originalFileURL as string);
}
return {
url: srcURLs,
isOriginal,
isRenderable:
file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs,
type:
file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
? "livePhoto"
: "normal",
};
}
const getFileObjectURL = (
originalFileURL: string,
originalBlob: Blob,
convertedBlob: Blob,
) => {
const convertedURL = convertedBlob
? convertedBlob === originalBlob
? originalFileURL
: URL.createObjectURL(convertedBlob)
: null;
return convertedURL;
};
async function getRenderableLivePhotoURL(
file: EnteFile,
fileBlob: Blob,
forceConvert: boolean,
): Promise<LivePhotoSourceURL> {
const livePhoto = await decodeLivePhoto(file.metadata.title, fileBlob);
const getRenderableLivePhotoImageURL = async () => {
try {
const imageBlob = new Blob([livePhoto.imageData]);
const convertedImageBlob = await getRenderableImage(
livePhoto.imageFileName,
imageBlob,
);
return URL.createObjectURL(convertedImageBlob);
} catch (e) {
//ignore and return null
return null;
}
};
const getRenderableLivePhotoVideoURL = async () => {
try {
const videoBlob = new Blob([livePhoto.videoData]);
const convertedVideoBlob = await getPlayableVideo(
livePhoto.videoFileName,
videoBlob,
forceConvert,
true,
);
return URL.createObjectURL(convertedVideoBlob);
} catch (e) {
//ignore and return null
return null;
}
};
return {
image: getRenderableLivePhotoImageURL,
video: getRenderableLivePhotoVideoURL,
};
}
async function getPlayableVideo(
videoNameTitle: string,
videoBlob: Blob,
forceConvert = false,
runOnWeb = false,
) {
try {
const isPlayable = await isPlaybackPossible(
URL.createObjectURL(videoBlob),
);
if (isPlayable && !forceConvert) {
return videoBlob;
} else {
if (!forceConvert && !runOnWeb && !isElectron()) {
return null;
}
log.info(
`video format not supported, converting it name: ${videoNameTitle}`,
);
const mp4ConvertedVideo = await ffmpegService.convertToMP4(
new File([videoBlob], videoNameTitle),
);
log.info(`video successfully converted ${videoNameTitle}`);
return new Blob([await mp4ConvertedVideo.arrayBuffer()]);
}
} catch (e) {
log.error("video conversion failed", e);
return null;
}
}

View file

@ -6,7 +6,7 @@ import { Events, eventBus } from "@ente/shared/events";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
import { formatDateTimeShort } from "@ente/shared/time/format";
import { User } from "@ente/shared/user/types";
import { sleep } from "@ente/shared/utils";
import { wait } from "@ente/shared/utils";
import QueueProcessor, {
CancellationStatus,
RequestCanceller,
@ -919,7 +919,7 @@ class ExportService {
e.message === CustomError.EXPORT_RECORD_JSON_PARSING_FAILED &&
retry
) {
await sleep(1000);
await wait(1000);
return await this.getExportRecord(folder, false);
}
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {

View file

@ -3,7 +3,7 @@ import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
import { sleep } from "@ente/shared/utils";
import { wait } from "@ente/shared/utils";
import { FILE_TYPE } from "constants/file";
import { getLocalCollections } from "services/collectionService";
import downloadManager from "services/download";
@ -305,7 +305,7 @@ async function getFileExportNamesFromExportedFiles(
);
let success = 0;
for (const file of exportedFiles) {
await sleep(0);
await wait(0);
const collectionPath = exportedCollectionPaths.get(file.collectionID);
log.debug(
() =>

View file

@ -0,0 +1,201 @@
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
import { Remote } from "comlink";
import {
ffmpegPathPlaceholder,
inputPathPlaceholder,
outputPathPlaceholder,
} from "constants/ffmpeg";
import { NULL_LOCATION } from "constants/upload";
import { ElectronFile, ParsedExtractedMetadata } from "types/upload";
import { type DedicatedFFmpegWorker } from "worker/ffmpeg.worker";
/** Called during upload */
export async function generateVideoThumbnail(
file: File | ElectronFile,
): Promise<File | ElectronFile> {
let seekTime = 1;
while (seekTime >= 0) {
try {
return await ffmpegExec(
[
ffmpegPathPlaceholder,
"-i",
inputPathPlaceholder,
"-ss",
`00:00:0${seekTime}`,
"-vframes",
"1",
"-vf",
"scale=-1:720",
outputPathPlaceholder,
],
file,
"thumb.jpeg",
);
} catch (e) {
if (seekTime === 0) {
throw e;
}
}
seekTime--;
}
}
/** Called during upload */
export async function extractVideoMetadata(file: File | ElectronFile) {
// https://stackoverflow.com/questions/9464617/retrieving-and-saving-media-metadata-using-ffmpeg
// -c [short for codex] copy[(stream_specifier)[ffmpeg.org/ffmpeg.html#Stream-specifiers]] => copies all the stream without re-encoding
// -map_metadata [http://ffmpeg.org/ffmpeg.html#Advanced-options search for map_metadata] => copies all stream metadata to the out
// -f ffmetadata [https://ffmpeg.org/ffmpeg-formats.html#Metadata-1] => dump metadata from media files into a simple UTF-8-encoded INI-like text file
const metadata = await ffmpegExec(
[
ffmpegPathPlaceholder,
"-i",
inputPathPlaceholder,
"-c",
"copy",
"-map_metadata",
"0",
"-f",
"ffmetadata",
outputPathPlaceholder,
],
file,
`metadata.txt`,
);
return parseFFmpegExtractedMetadata(
new Uint8Array(await metadata.arrayBuffer()),
);
}
enum MetadataTags {
CREATION_TIME = "creation_time",
APPLE_CONTENT_IDENTIFIER = "com.apple.quicktime.content.identifier",
APPLE_LIVE_PHOTO_IDENTIFIER = "com.apple.quicktime.live-photo.auto",
APPLE_CREATION_DATE = "com.apple.quicktime.creationdate",
APPLE_LOCATION_ISO = "com.apple.quicktime.location.ISO6709",
LOCATION = "location",
}
function parseFFmpegExtractedMetadata(encodedMetadata: Uint8Array) {
const metadataString = new TextDecoder().decode(encodedMetadata);
const metadataPropertyArray = metadataString.split("\n");
const metadataKeyValueArray = metadataPropertyArray.map((property) =>
property.split("="),
);
const validKeyValuePairs = metadataKeyValueArray.filter(
(keyValueArray) => keyValueArray.length === 2,
) as Array<[string, string]>;
const metadataMap = Object.fromEntries(validKeyValuePairs);
const location = parseAppleISOLocation(
metadataMap[MetadataTags.APPLE_LOCATION_ISO] ??
metadataMap[MetadataTags.LOCATION],
);
const creationTime = parseCreationTime(
metadataMap[MetadataTags.APPLE_CREATION_DATE] ??
metadataMap[MetadataTags.CREATION_TIME],
);
const parsedMetadata: ParsedExtractedMetadata = {
creationTime,
location: {
latitude: location.latitude,
longitude: location.longitude,
},
width: null,
height: null,
};
return parsedMetadata;
}
function parseAppleISOLocation(isoLocation: string) {
let location = NULL_LOCATION;
if (isoLocation) {
const [latitude, longitude] = isoLocation
.match(/(\+|-)\d+\.*\d+/g)
.map((x) => parseFloat(x));
location = { latitude, longitude };
}
return location;
}
function parseCreationTime(creationTime: string) {
let dateTime = null;
if (creationTime) {
dateTime = validateAndGetCreationUnixTimeInMicroSeconds(
new Date(creationTime),
);
}
return dateTime;
}
/** Called when viewing a file */
export async function convertToMP4(file: File) {
return await ffmpegExec(
[
ffmpegPathPlaceholder,
"-i",
inputPathPlaceholder,
"-preset",
"ultrafast",
outputPathPlaceholder,
],
file,
"output.mp4",
30 * 1000,
);
}
/**
* Run the given ffmpeg command.
*
* If we're running in the context of our desktop app, use the ffmpeg binary we
* bundle with our desktop app to run the command. Otherwise fallback to using
* the wasm ffmpeg we link to from our web app in a web worker.
*
* As a rough ballpark, the native ffmpeg integration in the desktop app is
* 10-20x faster than the wasm one currently. See: [Note: ffmpeg in Electron].
*/
const ffmpegExec = async (
cmd: string[],
inputFile: File | ElectronFile,
outputFilename: string,
timeoutMS: number = 0,
): Promise<File | ElectronFile> => {
const electron = globalThis.electron;
if (electron || false) {
/* TODO(MR): ElectronFile changes */
// return electron.runFFmpegCmd(cmd, inputFile, outputFilename, timeoutMS);
} else {
return workerFactory
.instance()
.then((worker) =>
worker.run(cmd, inputFile, outputFilename, timeoutMS),
);
}
};
/** Lazily create a singleton instance of our worker */
class WorkerFactory {
private _instance: Promise<Remote<DedicatedFFmpegWorker>>;
async instance() {
if (!this._instance) {
const comlinkWorker = createComlinkWorker();
this._instance = comlinkWorker.remote;
}
return this._instance;
}
}
const workerFactory = new WorkerFactory();
const createComlinkWorker = () =>
new ComlinkWorker<typeof DedicatedFFmpegWorker>(
"ffmpeg-worker",
new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)),
);

View file

@ -1,37 +0,0 @@
import { ElectronFile } from "types/upload";
import ComlinkFFmpegWorker from "utils/comlink/ComlinkFFmpegWorker";
export interface IFFmpeg {
run: (
cmd: string[],
inputFile: File | ElectronFile,
outputFilename: string,
dontTimeout?: boolean,
) => Promise<File | ElectronFile>;
}
class FFmpegFactory {
private client: IFFmpeg;
async getFFmpegClient() {
if (!this.client) {
const electron = globalThis.electron;
if (electron) {
this.client = {
run(cmd, inputFile, outputFilename, dontTimeout) {
return electron.runFFmpegCmd(
cmd,
inputFile,
outputFilename,
dontTimeout,
);
},
};
} else {
this.client = await ComlinkFFmpegWorker.getInstance();
}
}
return this.client;
}
}
export default new FFmpegFactory();

View file

@ -1,100 +0,0 @@
import log from "@/next/log";
import {
FFMPEG_PLACEHOLDER,
INPUT_PATH_PLACEHOLDER,
OUTPUT_PATH_PLACEHOLDER,
} from "constants/ffmpeg";
import { ElectronFile } from "types/upload";
import { parseFFmpegExtractedMetadata } from "utils/ffmpeg";
import ffmpegFactory from "./ffmpegFactory";
export async function generateVideoThumbnail(
file: File | ElectronFile,
): Promise<File | ElectronFile> {
try {
let seekTime = 1;
const ffmpegClient = await ffmpegFactory.getFFmpegClient();
while (seekTime >= 0) {
try {
return await ffmpegClient.run(
[
FFMPEG_PLACEHOLDER,
"-i",
INPUT_PATH_PLACEHOLDER,
"-ss",
`00:00:0${seekTime}`,
"-vframes",
"1",
"-vf",
"scale=-1:720",
OUTPUT_PATH_PLACEHOLDER,
],
file,
"thumb.jpeg",
);
} catch (e) {
if (seekTime === 0) {
throw e;
}
}
seekTime--;
}
} catch (e) {
log.error("ffmpeg generateVideoThumbnail failed", e);
throw e;
}
}
export async function extractVideoMetadata(file: File | ElectronFile) {
try {
const ffmpegClient = await ffmpegFactory.getFFmpegClient();
// https://stackoverflow.com/questions/9464617/retrieving-and-saving-media-metadata-using-ffmpeg
// -c [short for codex] copy[(stream_specifier)[ffmpeg.org/ffmpeg.html#Stream-specifiers]] => copies all the stream without re-encoding
// -map_metadata [http://ffmpeg.org/ffmpeg.html#Advanced-options search for map_metadata] => copies all stream metadata to the out
// -f ffmetadata [https://ffmpeg.org/ffmpeg-formats.html#Metadata-1] => dump metadata from media files into a simple UTF-8-encoded INI-like text file
const metadata = await ffmpegClient.run(
[
FFMPEG_PLACEHOLDER,
"-i",
INPUT_PATH_PLACEHOLDER,
"-c",
"copy",
"-map_metadata",
"0",
"-f",
"ffmetadata",
OUTPUT_PATH_PLACEHOLDER,
],
file,
`metadata.txt`,
);
return parseFFmpegExtractedMetadata(
new Uint8Array(await metadata.arrayBuffer()),
);
} catch (e) {
log.error("ffmpeg extractVideoMetadata failed", e);
throw e;
}
}
export async function convertToMP4(file: File | ElectronFile) {
try {
const ffmpegClient = await ffmpegFactory.getFFmpegClient();
return await ffmpegClient.run(
[
FFMPEG_PLACEHOLDER,
"-i",
INPUT_PATH_PLACEHOLDER,
"-preset",
"ultrafast",
OUTPUT_PATH_PLACEHOLDER,
],
file,
"output.mp4",
true,
);
} catch (e) {
log.error("ffmpeg convertToMP4 failed", e);
throw e;
}
}

View file

@ -4,8 +4,18 @@ import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error";
import { retryAsyncFunction } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker";
import { DedicatedConvertWorker } from "worker/convert.worker";
import { type DedicatedHEICConvertWorker } from "worker/heic-convert.worker";
/**
* Convert a HEIC image to a JPEG.
*
* Behind the scenes, it uses a web worker pool to do the conversion using a
* WASM HEIC conversion package.
*
* @param heicBlob The HEIC blob to convert.
* @returns The JPEG blob.
*/
export const heicToJPEG = (heicBlob: Blob) => converter.convert(heicBlob);
const WORKER_POOL_SIZE = 2;
const WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS = [100, 100];
@ -14,20 +24,18 @@ const BREATH_TIME_IN_MICROSECONDS = 1000;
class HEICConverter {
private convertProcessor = new QueueProcessor<Blob>();
private workerPool: ComlinkWorker<typeof DedicatedConvertWorker>[] = [];
private ready: Promise<void>;
private workerPool: ComlinkWorker<typeof DedicatedHEICConvertWorker>[] = [];
constructor() {
this.ready = this.init();
}
private async init() {
private initIfNeeded() {
if (this.workerPool.length > 0) return;
this.workerPool = [];
for (let i = 0; i < WORKER_POOL_SIZE; i++) {
this.workerPool.push(getDedicatedConvertWorker());
}
for (let i = 0; i < WORKER_POOL_SIZE; i++)
this.workerPool.push(createComlinkWorker());
}
async convert(fileBlob: Blob): Promise<Blob> {
await this.ready;
this.initIfNeeded();
const response = this.convertProcessor.queueUpRequest(() =>
retryAsyncFunction<Blob>(async () => {
const convertWorker = this.workerPool.shift();
@ -42,9 +50,7 @@ class HEICConverter {
}, WAIT_TIME_IN_MICROSECONDS);
const startTime = Date.now();
const convertedHEIC =
await worker.convertHEICToJPEG(
fileBlob,
);
await worker.heicToJPEG(fileBlob);
log.info(
`originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size,
@ -90,11 +96,12 @@ class HEICConverter {
} catch (e) {
log.error("heic conversion failed", e);
convertWorker.terminate();
this.workerPool.push(getDedicatedConvertWorker());
this.workerPool.push(createComlinkWorker());
throw e;
}
}, WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS),
);
try {
return await response.promise;
} catch (e) {
@ -107,4 +114,11 @@ class HEICConverter {
}
}
export default new HEICConverter();
/** The singleton instance of {@link HEICConverter}. */
const converter = new HEICConverter();
const createComlinkWorker = () =>
new ComlinkWorker<typeof DedicatedHEICConvertWorker>(
"heic-convert-worker",
new Worker(new URL("worker/heic-convert.worker.ts", import.meta.url)),
);

View file

@ -1,14 +0,0 @@
import log from "@/next/log";
import WasmHEICConverterService from "./heic-convert/service";
class HeicConversionService {
async convert(heicFileData: Blob): Promise<Blob> {
try {
return await WasmHEICConverterService.convert(heicFileData);
} catch (e) {
log.error("failed to convert heic file", e);
throw e;
}
}
}
export default new HeicConversionService();

View file

@ -1,42 +0,0 @@
import { ensureElectron } from "@/next/electron";
import { Collection } from "types/collection";
import { ElectronFile, FileWithCollection } from "types/upload";
export const setToUploadCollection = async (collections: Collection[]) => {
let collectionName: string = null;
/* collection being one suggest one of two things
1. Either the user has upload to a single existing collection
2. Created a new single collection to upload to
may have had multiple folder, but chose to upload
to one album
hence saving the collection name when upload collection count is 1
helps the info of user choosing this options
and on next upload we can directly start uploading to this collection
*/
if (collections.length === 1) {
collectionName = collections[0].name;
}
await ensureElectron().setPendingUploadCollection(collectionName);
};
export const updatePendingUploads = async (files: FileWithCollection[]) => {
const filePaths = [];
for (const fileWithCollection of files) {
if (fileWithCollection.isLivePhoto) {
filePaths.push(
(fileWithCollection.livePhotoAssets.image as ElectronFile).path,
(fileWithCollection.livePhotoAssets.video as ElectronFile).path,
);
} else {
filePaths.push((fileWithCollection.file as ElectronFile).path);
}
}
await ensureElectron().setPendingUploadFiles("files", filePaths);
};
export const cancelRemainingUploads = async () => {
const electron = ensureElectron();
await electron.setPendingUploadCollection(undefined);
await electron.setPendingUploadFiles("zips", []);
await electron.setPendingUploadFiles("files", []);
};

View file

@ -1,5 +1,4 @@
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import * as chrono from "chrono-node";
import { FILE_TYPE } from "constants/file";
import { t } from "i18next";
@ -287,24 +286,20 @@ async function getLocationSuggestions(searchPhrase: string) {
return [...locationTagSuggestions, ...citySearchSuggestions];
}
async function getClipSuggestion(searchPhrase: string): Promise<Suggestion> {
try {
if (!clipService.isPlatformSupported()) {
return null;
}
const clipResults = await searchClip(searchPhrase);
return {
type: SuggestionType.CLIP,
value: clipResults,
label: searchPhrase,
};
} catch (e) {
if (!e.message?.includes(CustomError.MODEL_DOWNLOAD_PENDING)) {
log.error("getClipSuggestion failed", e);
}
async function getClipSuggestion(
searchPhrase: string,
): Promise<Suggestion | undefined> {
if (!clipService.isPlatformSupported()) {
return null;
}
const clipResults = await searchClip(searchPhrase);
if (!clipResults) return undefined;
return {
type: SuggestionType.CLIP,
value: clipResults,
label: searchPhrase,
};
}
function searchCollection(
@ -374,9 +369,14 @@ async function searchLocationTag(searchPhrase: string): Promise<LocationTag[]> {
return matchedLocationTags;
}
async function searchClip(searchPhrase: string): Promise<ClipSearchScores> {
const searchClip = async (
searchPhrase: string,
): Promise<ClipSearchScores | undefined> => {
const textEmbedding =
await clipService.getTextEmbeddingIfAvailable(searchPhrase);
if (!textEmbedding) return undefined;
const imageEmbeddings = await getLocalEmbeddings();
const textEmbedding = await clipService.getTextEmbedding(searchPhrase);
const clipSearchResult = new Map<number, number>(
(
await Promise.all(
@ -394,7 +394,7 @@ async function searchClip(searchPhrase: string): Promise<ClipSearchScores> {
);
return clipSearchResult;
}
};
function convertSuggestionToSearchQuery(option: Suggestion): Search {
switch (option.type) {

View file

@ -1,5 +1,5 @@
import { encodeLivePhoto } from "@/media/live-photo";
import { getFileNameSize } from "@/next/file";
import { ensureElectron } from "@/next/electron";
import { basename, getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
@ -10,13 +10,8 @@ import {
} from "@ente/shared/time";
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import {
FILE_READER_CHUNK_SIZE,
LIVE_PHOTO_ASSET_SIZE_LIMIT,
NULL_EXTRACTED_METADATA,
NULL_LOCATION,
} from "constants/upload";
import * as ffmpegService from "services/ffmpeg/ffmpegService";
import { FILE_READER_CHUNK_SIZE, NULL_LOCATION } from "constants/upload";
import * as ffmpegService from "services/ffmpeg";
import { getElectronFileStream, getFileStream } from "services/readerService";
import { getFileType } from "services/typeDetectionService";
import { FilePublicMagicMetadataProps } from "types/file";
@ -25,20 +20,20 @@ import {
ElectronFile,
ExtractMetadataResult,
FileTypeInfo,
FileWithCollection,
LivePhotoAssets,
Location,
Metadata,
ParsedExtractedMetadata,
ParsedMetadataJSON,
ParsedMetadataJSONMap,
type FileWithCollection,
type FileWithCollection2,
type LivePhotoAssets2,
} from "types/upload";
import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto";
import { getUint8ArrayView } from "../readerService";
import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService";
import { generateThumbnail } from "./thumbnailService";
import uploadCancelService from "./uploadCancelService";
import { extractFileMetadata } from "./uploadService";
import { extractFileMetadata, getFileName } from "./uploadService";
const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
creationTime: null,
@ -66,6 +61,13 @@ const EXIF_TAGS_NEEDED = [
export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46;
export const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
location: NULL_LOCATION,
creationTime: null,
width: null,
height: null,
};
export async function extractMetadata(
worker: Remote<DedicatedCryptoWorker>,
receivedFile: File | ElectronFile,
@ -167,67 +169,79 @@ export const getMetadataJSONMapKeyForFile = (
return `${collectionID}-${getFileOriginalName(fileName)}`;
};
export async function parseMetadataJSON(receivedFile: File | ElectronFile) {
export async function parseMetadataJSON(
receivedFile: File | ElectronFile | string,
) {
try {
if (!(receivedFile instanceof File)) {
receivedFile = new File(
[await receivedFile.blob()],
receivedFile.name,
);
}
const metadataJSON: object = JSON.parse(await receivedFile.text());
const parsedMetadataJSON: ParsedMetadataJSON =
NULL_PARSED_METADATA_JSON;
if (!metadataJSON) {
return;
let text: string;
if (typeof receivedFile == "string") {
text = await ensureElectron().fs.readTextFile(receivedFile);
} else {
if (!(receivedFile instanceof File)) {
receivedFile = new File(
[await receivedFile.blob()],
receivedFile.name,
);
}
text = await receivedFile.text();
}
if (
metadataJSON["photoTakenTime"] &&
metadataJSON["photoTakenTime"]["timestamp"]
) {
parsedMetadataJSON.creationTime =
metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
} else if (
metadataJSON["creationTime"] &&
metadataJSON["creationTime"]["timestamp"]
) {
parsedMetadataJSON.creationTime =
metadataJSON["creationTime"]["timestamp"] * 1000000;
}
if (
metadataJSON["modificationTime"] &&
metadataJSON["modificationTime"]["timestamp"]
) {
parsedMetadataJSON.modificationTime =
metadataJSON["modificationTime"]["timestamp"] * 1000000;
}
let locationData: Location = NULL_LOCATION;
if (
metadataJSON["geoData"] &&
(metadataJSON["geoData"]["latitude"] !== 0.0 ||
metadataJSON["geoData"]["longitude"] !== 0.0)
) {
locationData = metadataJSON["geoData"];
} else if (
metadataJSON["geoDataExif"] &&
(metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
metadataJSON["geoDataExif"]["longitude"] !== 0.0)
) {
locationData = metadataJSON["geoDataExif"];
}
if (locationData !== null) {
parsedMetadataJSON.latitude = locationData.latitude;
parsedMetadataJSON.longitude = locationData.longitude;
}
return parsedMetadataJSON;
return parseMetadataJSONText(text);
} catch (e) {
log.error("parseMetadataJSON failed", e);
// ignore
}
}
export async function parseMetadataJSONText(text: string) {
const metadataJSON: object = JSON.parse(text);
const parsedMetadataJSON: ParsedMetadataJSON = NULL_PARSED_METADATA_JSON;
if (!metadataJSON) {
return;
}
if (
metadataJSON["photoTakenTime"] &&
metadataJSON["photoTakenTime"]["timestamp"]
) {
parsedMetadataJSON.creationTime =
metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
} else if (
metadataJSON["creationTime"] &&
metadataJSON["creationTime"]["timestamp"]
) {
parsedMetadataJSON.creationTime =
metadataJSON["creationTime"]["timestamp"] * 1000000;
}
if (
metadataJSON["modificationTime"] &&
metadataJSON["modificationTime"]["timestamp"]
) {
parsedMetadataJSON.modificationTime =
metadataJSON["modificationTime"]["timestamp"] * 1000000;
}
let locationData: Location = NULL_LOCATION;
if (
metadataJSON["geoData"] &&
(metadataJSON["geoData"]["latitude"] !== 0.0 ||
metadataJSON["geoData"]["longitude"] !== 0.0)
) {
locationData = metadataJSON["geoData"];
} else if (
metadataJSON["geoDataExif"] &&
(metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
metadataJSON["geoDataExif"]["longitude"] !== 0.0)
) {
locationData = metadataJSON["geoDataExif"];
}
if (locationData !== null) {
parsedMetadataJSON.latitude = locationData.latitude;
parsedMetadataJSON.longitude = locationData.longitude;
}
return parsedMetadataJSON;
}
// tries to extract date from file name if available else returns null
export function extractDateFromFileName(filename: string): number {
try {
@ -340,7 +354,7 @@ export async function extractLivePhotoMetadata(
parsedMetadataJSONMap: ParsedMetadataJSONMap,
collectionID: number,
fileTypeInfo: FileTypeInfo,
livePhotoAssets: LivePhotoAssets,
livePhotoAssets: LivePhotoAssets2,
): Promise<ExtractMetadataResult> {
const imageFileTypeInfo: FileTypeInfo = {
fileType: FILE_TYPE.IMAGE,
@ -356,7 +370,11 @@ export async function extractLivePhotoMetadata(
imageFileTypeInfo,
livePhotoAssets.image,
);
const videoHash = await getFileHash(worker, livePhotoAssets.video);
const videoHash = await getFileHash(
worker,
/* TODO(MR): ElectronFile changes */
livePhotoAssets.video as File | ElectronFile,
);
return {
metadata: {
...imageMetadata,
@ -374,47 +392,20 @@ export function getLivePhotoSize(livePhotoAssets: LivePhotoAssets) {
return livePhotoAssets.image.size + livePhotoAssets.video.size;
}
export function getLivePhotoName(livePhotoAssets: LivePhotoAssets) {
return livePhotoAssets.image.name;
}
export const getLivePhotoName = ({ image }: LivePhotoAssets2) =>
typeof image == "string" ? basename(image) : image.name;
export async function readLivePhoto(
fileTypeInfo: FileTypeInfo,
livePhotoAssets: LivePhotoAssets,
) {
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
livePhotoAssets.image,
{
exactType: fileTypeInfo.imageType,
fileType: FILE_TYPE.IMAGE,
},
);
const imageData = await getUint8ArrayView(livePhotoAssets.image);
const videoData = await getUint8ArrayView(livePhotoAssets.video);
return {
filedata: await encodeLivePhoto({
imageFileName: livePhotoAssets.image.name,
imageData,
videoFileName: livePhotoAssets.video.name,
videoData,
}),
thumbnail,
hasStaticThumbnail,
};
}
export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection2[]) {
try {
const analysedMediaFiles: FileWithCollection[] = [];
const analysedMediaFiles: FileWithCollection2[] = [];
mediaFiles
.sort((firstMediaFile, secondMediaFile) =>
splitFilenameAndExtension(
firstMediaFile.file.name,
getFileName(firstMediaFile.file),
)[0].localeCompare(
splitFilenameAndExtension(secondMediaFile.file.name)[0],
splitFilenameAndExtension(
getFileName(secondMediaFile.file),
)[0],
),
)
.sort(
@ -430,23 +421,25 @@ export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
const secondMediaFile = mediaFiles[index + 1];
const firstFileType =
getFileTypeFromExtensionForLivePhotoClustering(
firstMediaFile.file.name,
getFileName(firstMediaFile.file),
);
const secondFileType =
getFileTypeFromExtensionForLivePhotoClustering(
secondMediaFile.file.name,
getFileName(secondMediaFile.file),
);
const firstFileIdentifier: LivePhotoIdentifier = {
collectionID: firstMediaFile.collectionID,
fileType: firstFileType,
name: firstMediaFile.file.name,
size: firstMediaFile.file.size,
name: getFileName(firstMediaFile.file),
/* TODO(MR): ElectronFile changes */
size: (firstMediaFile as FileWithCollection).file.size,
};
const secondFileIdentifier: LivePhotoIdentifier = {
collectionID: secondMediaFile.collectionID,
fileType: secondFileType,
name: secondMediaFile.file.name,
size: secondMediaFile.file.size,
name: getFileName(secondMediaFile.file),
/* TODO(MR): ElectronFile changes */
size: (secondMediaFile as FileWithCollection).file.size,
};
if (
areFilesLivePhotoAssets(
@ -454,8 +447,8 @@ export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
secondFileIdentifier,
)
) {
let imageFile: File | ElectronFile;
let videoFile: File | ElectronFile;
let imageFile: File | ElectronFile | string;
let videoFile: File | ElectronFile | string;
if (
firstFileType === FILE_TYPE.IMAGE &&
secondFileType === FILE_TYPE.VIDEO
@ -539,6 +532,8 @@ function areFilesLivePhotoAssets(
areNotSameFileType &&
firstFileNameWithoutSuffix === secondFileNameWithoutSuffix
) {
const LIVE_PHOTO_ASSET_SIZE_LIMIT = 20 * 1024 * 1024; // 20MB
// checks size of live Photo assets are less than allowed limit
// I did that based on the assumption that live photo assets ideally would not be larger than LIVE_PHOTO_ASSET_SIZE_LIMIT
// also zipping library doesn't support stream as a input

View file

@ -0,0 +1,319 @@
import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { CustomErrorMessage, type Electron } from "@/next/types/ipc";
import { CustomError } from "@ente/shared/error";
import { FILE_TYPE } from "constants/file";
import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
import * as FFmpegService from "services/ffmpeg";
import { heicToJPEG } from "services/heic-convert";
import { ElectronFile, FileTypeInfo } from "types/upload";
import { isFileHEIC } from "utils/file";
import { getUint8ArrayView } from "../readerService";
import { getFileName } from "./uploadService";
/** Maximum width or height of the generated thumbnail */
const maxThumbnailDimension = 720;
/** Maximum size (in bytes) of the generated thumbnail */
const maxThumbnailSize = 100 * 1024; // 100 KB
const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10;
const MIN_QUALITY = 0.5;
const MAX_QUALITY = 0.7;
const WAIT_TIME_THUMBNAIL_GENERATION = 30 * 1000;
class ModuleState {
/**
* This will be set to true if we get an error from the Node.js side of our
* desktop app telling us that native JPEG conversion is not available for
* the current OS/arch combination. That way, we can stop pestering it again
* and again (saving an IPC round-trip).
*
* Note the double negative when it is used.
*/
isNativeThumbnailCreationNotAvailable = false;
}
const moduleState = new ModuleState();
interface GeneratedThumbnail {
/** The JPEG data of the generated thumbnail */
thumbnail: Uint8Array;
/**
* `true` if this is a fallback (all black) thumbnail we're returning since
* thumbnail generation failed for some reason.
*/
hasStaticThumbnail: boolean;
}
/**
* Generate a JPEG thumbnail for the given {@link file}.
*
* The thumbnail has a smaller file size so that is quick to load. But more
* importantly, it uses a universal file format (JPEG in our case) so that the
* thumbnail itself can be opened in all clients, even those like the web client
* itself that might not yet have support for more exotic formats.
*/
export const generateThumbnail = async (
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
): Promise<GeneratedThumbnail> => {
try {
const thumbnail =
fileTypeInfo.fileType === FILE_TYPE.IMAGE
? await generateImageThumbnail(file, fileTypeInfo)
: await generateVideoThumbnail(file, fileTypeInfo);
if (thumbnail.length == 0) throw new Error("Empty thumbnail");
log.debug(() => `Generated thumbnail for ${getFileName(file)}`);
return { thumbnail, hasStaticThumbnail: false };
} catch (e) {
log.error(
`Failed to generate thumbnail for ${getFileName(file)} with format ${fileTypeInfo.exactType}`,
e,
);
return { thumbnail: fallbackThumbnail(), hasStaticThumbnail: true };
}
};
/**
* A fallback, black, thumbnail for use in cases where thumbnail generation
* fails.
*/
const fallbackThumbnail = () =>
Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) => c.charCodeAt(0));
const generateImageThumbnail = async (
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
) => {
let jpegData: Uint8Array | undefined;
const electron = globalThis.electron;
const available = !moduleState.isNativeThumbnailCreationNotAvailable;
if (electron && available) {
// If we're running in our desktop app, try to make the thumbnail using
// the native tools available there-in, it'll be faster than doing it on
// the web layer.
try {
jpegData = await generateImageThumbnailInElectron(electron, file);
} catch (e) {
if (e.message == CustomErrorMessage.NotAvailable) {
moduleState.isNativeThumbnailCreationNotAvailable = true;
} else {
log.error("Native thumbnail creation failed", e);
}
}
}
if (!jpegData) {
jpegData = await generateImageThumbnailUsingCanvas(file, fileTypeInfo);
}
return jpegData;
};
const generateImageThumbnailInElectron = async (
electron: Electron,
inputFile: File | ElectronFile,
): Promise<Uint8Array> => {
const startTime = Date.now();
const jpegData = await electron.generateImageThumbnail(
inputFile,
maxThumbnailDimension,
maxThumbnailSize,
);
log.debug(
() => `Native thumbnail generation took ${Date.now() - startTime} ms`,
);
return jpegData;
};
async function generateImageThumbnailUsingCanvas(
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
) {
const canvas = document.createElement("canvas");
const canvasCTX = canvas.getContext("2d");
let imageURL = null;
let timeout = null;
if (isFileHEIC(fileTypeInfo.exactType)) {
log.debug(() => `Pre-converting ${getFileName(file)} to JPEG`);
const jpegBlob = await heicToJPEG(new Blob([await file.arrayBuffer()]));
file = new File([jpegBlob], file.name);
}
let image = new Image();
imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
await new Promise((resolve, reject) => {
image.setAttribute("src", imageURL);
image.onload = () => {
try {
URL.revokeObjectURL(imageURL);
const { width, height } = scaledThumbnailDimensions(
image.width,
image.height,
maxThumbnailDimension,
);
canvas.width = width;
canvas.height = height;
canvasCTX.drawImage(image, 0, 0, width, height);
image = null;
clearTimeout(timeout);
resolve(null);
} catch (e) {
const err = new Error(CustomError.THUMBNAIL_GENERATION_FAILED, {
cause: e,
});
reject(err);
}
};
timeout = setTimeout(
() => reject(new Error("Operation timed out")),
WAIT_TIME_THUMBNAIL_GENERATION,
);
});
const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
return await getUint8ArrayView(thumbnailBlob);
}
async function generateVideoThumbnail(
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
) {
let thumbnail: Uint8Array;
try {
log.info(
`ffmpeg generateThumbnail called for ${getFileNameSize(file)}`,
);
const thumbnail = await FFmpegService.generateVideoThumbnail(file);
log.info(
`ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`,
);
return await getUint8ArrayView(thumbnail);
} catch (e) {
log.info(
`ffmpeg thumbnail generated failed ${getFileNameSize(
file,
)} error: ${e.message}`,
);
log.error(
`failed to generate thumbnail using ffmpeg for format ${fileTypeInfo.exactType}`,
e,
);
thumbnail = await generateVideoThumbnailUsingCanvas(file);
}
return thumbnail;
}
async function generateVideoThumbnailUsingCanvas(file: File | ElectronFile) {
const canvas = document.createElement("canvas");
const canvasCTX = canvas.getContext("2d");
let timeout = null;
let videoURL = null;
let video = document.createElement("video");
videoURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
await new Promise((resolve, reject) => {
video.preload = "metadata";
video.src = videoURL;
video.addEventListener("loadeddata", function () {
try {
URL.revokeObjectURL(videoURL);
if (!video) {
throw Error("video load failed");
}
const { width, height } = scaledThumbnailDimensions(
video.videoWidth,
video.videoHeight,
maxThumbnailDimension,
);
canvas.width = width;
canvas.height = height;
canvasCTX.drawImage(video, 0, 0, width, height);
video = null;
clearTimeout(timeout);
resolve(null);
} catch (e) {
const err = Error(
`${CustomError.THUMBNAIL_GENERATION_FAILED} err: ${e}`,
);
log.error(CustomError.THUMBNAIL_GENERATION_FAILED, e);
reject(err);
}
});
timeout = setTimeout(
() => reject(new Error("Operation timed out")),
WAIT_TIME_THUMBNAIL_GENERATION,
);
});
const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
return await getUint8ArrayView(thumbnailBlob);
}
async function getCompressedThumbnailBlobFromCanvas(canvas: HTMLCanvasElement) {
let thumbnailBlob: Blob = null;
let prevSize = Number.MAX_SAFE_INTEGER;
let quality = MAX_QUALITY;
do {
if (thumbnailBlob) {
prevSize = thumbnailBlob.size;
}
thumbnailBlob = await new Promise((resolve) => {
canvas.toBlob(
function (blob) {
resolve(blob);
},
"image/jpeg",
quality,
);
});
thumbnailBlob = thumbnailBlob ?? new Blob([]);
quality -= 0.1;
} while (
quality >= MIN_QUALITY &&
thumbnailBlob.size > maxThumbnailSize &&
percentageSizeDiff(thumbnailBlob.size, prevSize) >=
MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF
);
return thumbnailBlob;
}
function percentageSizeDiff(
newThumbnailSize: number,
oldThumbnailSize: number,
) {
return ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
}
/**
* Compute the size of the thumbnail to create for an image with the given
* {@link width} and {@link height}.
*
* This function calculates a new size of an image for limiting it to maximum
* width and height (both specified by {@link maxDimension}), while maintaining
* aspect ratio.
*
* It returns `{0, 0}` for invalid inputs.
*/
const scaledThumbnailDimensions = (
width: number,
height: number,
maxDimension: number,
): { width: number; height: number } => {
if (width === 0 || height === 0) return { width: 0, height: 0 };
const widthScaleFactor = maxDimension / width;
const heightScaleFactor = maxDimension / height;
const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor);
const thumbnailDimensions = {
width: Math.round(width * scaleFactor),
height: Math.round(height * scaleFactor),
};
if (thumbnailDimensions.width === 0 || thumbnailDimensions.height === 0)
return { width: 0, height: 0 };
return thumbnailDimensions;
};

View file

@ -1,332 +0,0 @@
import { ensureElectron } from "@/next/electron";
import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { FILE_TYPE } from "constants/file";
import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
import isElectron from "is-electron";
import * as FFmpegService from "services/ffmpeg/ffmpegService";
import HeicConversionService from "services/heicConversionService";
import { ElectronFile, FileTypeInfo } from "types/upload";
import { isFileHEIC } from "utils/file";
import { getUint8ArrayView } from "../readerService";
const MAX_THUMBNAIL_DIMENSION = 720;
const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10;
const MAX_THUMBNAIL_SIZE = 100 * 1024;
const MIN_QUALITY = 0.5;
const MAX_QUALITY = 0.7;
const WAIT_TIME_THUMBNAIL_GENERATION = 30 * 1000;
interface Dimension {
width: number;
height: number;
}
export async function generateThumbnail(
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> {
try {
log.info(`generating thumbnail for ${getFileNameSize(file)}`);
let hasStaticThumbnail = false;
let thumbnail: Uint8Array;
try {
if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
thumbnail = await generateImageThumbnail(file, fileTypeInfo);
} else {
thumbnail = await generateVideoThumbnail(file, fileTypeInfo);
}
if (thumbnail.length > 1.5 * MAX_THUMBNAIL_SIZE) {
log.error(
`thumbnail greater than max limit - ${JSON.stringify({
thumbnailSize: convertBytesToHumanReadable(
thumbnail.length,
),
fileSize: convertBytesToHumanReadable(file.size),
fileType: fileTypeInfo.exactType,
})}`,
);
}
if (thumbnail.length === 0) {
throw Error("EMPTY THUMBNAIL");
}
log.info(
`thumbnail successfully generated ${getFileNameSize(file)}`,
);
} catch (e) {
log.error(
`thumbnail generation failed ${getFileNameSize(file)} with format ${fileTypeInfo.exactType}`,
e,
);
thumbnail = Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) =>
c.charCodeAt(0),
);
hasStaticThumbnail = true;
}
return { thumbnail, hasStaticThumbnail };
} catch (e) {
log.error("Error generating static thumbnail", e);
throw e;
}
}
async function generateImageThumbnail(
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
) {
if (isElectron()) {
try {
return await generateImageThumbnailInElectron(
file,
MAX_THUMBNAIL_DIMENSION,
MAX_THUMBNAIL_SIZE,
);
} catch (e) {
return await generateImageThumbnailUsingCanvas(file, fileTypeInfo);
}
} else {
return await generateImageThumbnailUsingCanvas(file, fileTypeInfo);
}
}
const generateImageThumbnailInElectron = async (
inputFile: File | ElectronFile,
maxDimension: number,
maxSize: number,
): Promise<Uint8Array> => {
try {
const startTime = Date.now();
const thumb = await ensureElectron().generateImageThumbnail(
inputFile,
maxDimension,
maxSize,
);
log.info(
`originalFileSize:${convertBytesToHumanReadable(
inputFile?.size,
)},thumbFileSize:${convertBytesToHumanReadable(
thumb?.length,
)}, native thumbnail generation time: ${
Date.now() - startTime
}ms `,
);
return thumb;
} catch (e) {
if (
e.message !==
CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED
) {
log.error("failed to generate image thumbnail natively", e);
}
throw e;
}
};
export async function generateImageThumbnailUsingCanvas(
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
) {
const canvas = document.createElement("canvas");
const canvasCTX = canvas.getContext("2d");
let imageURL = null;
let timeout = null;
const isHEIC = isFileHEIC(fileTypeInfo.exactType);
if (isHEIC) {
log.info(`HEICConverter called for ${getFileNameSize(file)}`);
const convertedBlob = await HeicConversionService.convert(
new Blob([await file.arrayBuffer()]),
);
file = new File([convertedBlob], file.name);
log.info(`${getFileNameSize(file)} successfully converted`);
}
let image = new Image();
imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
await new Promise((resolve, reject) => {
image.setAttribute("src", imageURL);
image.onload = () => {
try {
URL.revokeObjectURL(imageURL);
const imageDimension = {
width: image.width,
height: image.height,
};
const thumbnailDimension = calculateThumbnailDimension(
imageDimension,
MAX_THUMBNAIL_DIMENSION,
);
canvas.width = thumbnailDimension.width;
canvas.height = thumbnailDimension.height;
canvasCTX.drawImage(
image,
0,
0,
thumbnailDimension.width,
thumbnailDimension.height,
);
image = null;
clearTimeout(timeout);
resolve(null);
} catch (e) {
const err = new Error(CustomError.THUMBNAIL_GENERATION_FAILED, {
cause: e,
});
reject(err);
}
};
timeout = setTimeout(
() => reject(new Error("Operation timed out")),
WAIT_TIME_THUMBNAIL_GENERATION,
);
});
const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
return await getUint8ArrayView(thumbnailBlob);
}
async function generateVideoThumbnail(
file: File | ElectronFile,
fileTypeInfo: FileTypeInfo,
) {
let thumbnail: Uint8Array;
try {
log.info(
`ffmpeg generateThumbnail called for ${getFileNameSize(file)}`,
);
const thumbnail = await FFmpegService.generateVideoThumbnail(file);
log.info(
`ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`,
);
return await getUint8ArrayView(thumbnail);
} catch (e) {
log.info(
`ffmpeg thumbnail generated failed ${getFileNameSize(
file,
)} error: ${e.message}`,
);
log.error(
`failed to generate thumbnail using ffmpeg for format ${fileTypeInfo.exactType}`,
e,
);
thumbnail = await generateVideoThumbnailUsingCanvas(file);
}
return thumbnail;
}
export async function generateVideoThumbnailUsingCanvas(
file: File | ElectronFile,
) {
const canvas = document.createElement("canvas");
const canvasCTX = canvas.getContext("2d");
let timeout = null;
let videoURL = null;
let video = document.createElement("video");
videoURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
await new Promise((resolve, reject) => {
video.preload = "metadata";
video.src = videoURL;
video.addEventListener("loadeddata", function () {
try {
URL.revokeObjectURL(videoURL);
if (!video) {
throw Error("video load failed");
}
const videoDimension = {
width: video.videoWidth,
height: video.videoHeight,
};
const thumbnailDimension = calculateThumbnailDimension(
videoDimension,
MAX_THUMBNAIL_DIMENSION,
);
canvas.width = thumbnailDimension.width;
canvas.height = thumbnailDimension.height;
canvasCTX.drawImage(
video,
0,
0,
thumbnailDimension.width,
thumbnailDimension.height,
);
video = null;
clearTimeout(timeout);
resolve(null);
} catch (e) {
const err = Error(
`${CustomError.THUMBNAIL_GENERATION_FAILED} err: ${e}`,
);
log.error(CustomError.THUMBNAIL_GENERATION_FAILED, e);
reject(err);
}
});
timeout = setTimeout(
() => reject(new Error("Operation timed out")),
WAIT_TIME_THUMBNAIL_GENERATION,
);
});
const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
return await getUint8ArrayView(thumbnailBlob);
}
async function getCompressedThumbnailBlobFromCanvas(canvas: HTMLCanvasElement) {
let thumbnailBlob: Blob = null;
let prevSize = Number.MAX_SAFE_INTEGER;
let quality = MAX_QUALITY;
do {
if (thumbnailBlob) {
prevSize = thumbnailBlob.size;
}
thumbnailBlob = await new Promise((resolve) => {
canvas.toBlob(
function (blob) {
resolve(blob);
},
"image/jpeg",
quality,
);
});
thumbnailBlob = thumbnailBlob ?? new Blob([]);
quality -= 0.1;
} while (
quality >= MIN_QUALITY &&
thumbnailBlob.size > MAX_THUMBNAIL_SIZE &&
percentageSizeDiff(thumbnailBlob.size, prevSize) >=
MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF
);
return thumbnailBlob;
}
function percentageSizeDiff(
newThumbnailSize: number,
oldThumbnailSize: number,
) {
return ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
}
// method to calculate new size of image for limiting it to maximum width and height, maintaining aspect ratio
// returns {0,0} for invalid inputs
function calculateThumbnailDimension(
originalDimension: Dimension,
maxDimension: number,
): Dimension {
if (originalDimension.height === 0 || originalDimension.width === 0) {
return { width: 0, height: 0 };
}
const widthScaleFactor = maxDimension / originalDimension.width;
const heightScaleFactor = maxDimension / originalDimension.height;
const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor);
const thumbnailDimension = {
width: Math.round(originalDimension.width * scaleFactor),
height: Math.round(originalDimension.height * scaleFactor),
};
if (thumbnailDimension.width === 0 || thumbnailDimension.height === 0) {
return { width: 0, height: 0 };
}
return thumbnailDimension;
}

View file

@ -1,4 +1,4 @@
import { getFileNameSize } from "@/next/file";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
@ -8,10 +8,6 @@ import { Events, eventBus } from "@ente/shared/events";
import { Remote } from "comlink";
import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
import isElectron from "is-electron";
import {
cancelRemainingUploads,
updatePendingUploads,
} from "services/pending-uploads";
import {
getLocalPublicFiles,
getPublicCollectionUID,
@ -22,25 +18,29 @@ import { Collection } from "types/collection";
import { EncryptedEnteFile, EnteFile } from "types/file";
import { SetFiles } from "types/gallery";
import {
ElectronFile,
FileWithCollection,
ParsedMetadataJSON,
ParsedMetadataJSONMap,
PublicUploadProps,
type FileWithCollection2,
} from "types/upload";
import { ProgressUpdater } from "types/upload/ui";
import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file";
import {
areFileWithCollectionsSame,
segregateMetadataAndMediaFiles,
segregateMetadataAndMediaFiles2,
} from "utils/upload";
import { getLocalFiles } from "../fileService";
import {
clusterLivePhotoFiles,
getMetadataJSONMapKeyForJSON,
parseMetadataJSON,
} from "./metadataService";
import { default as UIService, default as uiService } from "./uiService";
import uploadCancelService from "./uploadCancelService";
import UploadService, { uploader } from "./uploadService";
import UploadService, { getFileName, uploader } from "./uploadService";
const MAX_CONCURRENT_UPLOADS = 4;
@ -49,9 +49,9 @@ class UploadManager {
ComlinkWorker<typeof DedicatedCryptoWorker>
>(MAX_CONCURRENT_UPLOADS);
private parsedMetadataJSONMap: ParsedMetadataJSONMap;
private filesToBeUploaded: FileWithCollection[];
private remainingFiles: FileWithCollection[] = [];
private failedFiles: FileWithCollection[];
private filesToBeUploaded: FileWithCollection2[];
private remainingFiles: FileWithCollection2[] = [];
private failedFiles: FileWithCollection2[];
private existingFiles: EnteFile[];
private setFiles: SetFiles;
private collections: Map<number, Collection>;
@ -153,7 +153,7 @@ class UploadManager {
if (mediaFiles.length) {
log.info(`clusterLivePhotoFiles started`);
const analysedMediaFiles =
await UploadService.clusterLivePhotoFiles(mediaFiles);
await clusterLivePhotoFiles(mediaFiles);
log.info(`clusterLivePhotoFiles ended`);
log.info(
`got live photos: ${
@ -204,37 +204,121 @@ class UploadManager {
}
}
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
public async queueFilesForUpload2(
filesWithCollectionToUploadIn: FileWithCollection2[],
collections: Collection[],
uploaderName?: string,
) {
try {
if (this.uploadInProgress) {
throw Error("can't run multiple uploads at once");
}
this.uploadInProgress = true;
await this.updateExistingFilesAndCollections(collections);
this.uploaderName = uploaderName;
log.info(
`received ${filesWithCollectionToUploadIn.length} files to upload`,
);
uiService.setFilenames(
new Map<number, string>(
filesWithCollectionToUploadIn.map((mediaFile) => [
mediaFile.localID,
UploadService.getAssetName(mediaFile),
]),
),
);
const { metadataJSONFiles, mediaFiles } =
segregateMetadataAndMediaFiles2(filesWithCollectionToUploadIn);
log.info(`has ${metadataJSONFiles.length} metadata json files`);
log.info(`has ${mediaFiles.length} media files`);
if (metadataJSONFiles.length) {
UIService.setUploadStage(
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
);
await this.parseMetadataJSONFiles(metadataJSONFiles);
UploadService.setParsedMetadataJSONMap(
this.parsedMetadataJSONMap,
);
}
if (mediaFiles.length) {
log.info(`clusterLivePhotoFiles started`);
const analysedMediaFiles =
await clusterLivePhotoFiles(mediaFiles);
log.info(`clusterLivePhotoFiles ended`);
log.info(
`got live photos: ${
mediaFiles.length !== analysedMediaFiles.length
}`,
);
uiService.setFilenames(
new Map<number, string>(
analysedMediaFiles.map((mediaFile) => [
mediaFile.localID,
UploadService.getAssetName(mediaFile),
]),
),
);
UIService.setHasLivePhoto(
mediaFiles.length !== analysedMediaFiles.length,
);
await this.uploadMediaFiles(analysedMediaFiles);
}
} catch (e) {
if (e.message === CustomError.UPLOAD_CANCELLED) {
if (isElectron()) {
this.remainingFiles = [];
await cancelRemainingUploads();
}
} else {
log.error("uploading failed with error", e);
throw e;
}
} finally {
UIService.setUploadStage(UPLOAD_STAGES.FINISH);
for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) {
this.cryptoWorkers[i]?.terminate();
}
this.uploadInProgress = false;
}
try {
if (!UIService.hasFilesInResultList()) {
return true;
} else {
return false;
}
} catch (e) {
log.error(" failed to return shouldCloseProgressBar", e);
return false;
}
}
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection2[]) {
try {
log.info(`parseMetadataJSONFiles function executed `);
UIService.reset(metadataFiles.length);
for (const { file, collectionID } of metadataFiles) {
const name = getFileName(file);
try {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
log.info(
`parsing metadata json file ${getFileNameSize(file)}`,
);
log.info(`parsing metadata json file ${name}`);
const parsedMetadataJSON = await parseMetadataJSON(file);
if (parsedMetadataJSON) {
this.parsedMetadataJSONMap.set(
getMetadataJSONMapKeyForJSON(
collectionID,
file.name,
),
getMetadataJSONMapKeyForJSON(collectionID, name),
parsedMetadataJSON && { ...parsedMetadataJSON },
);
UIService.increaseFileUploaded();
}
log.info(
`successfully parsed metadata json file ${getFileNameSize(
file,
)}`,
);
log.info(`successfully parsed metadata json file ${name}`);
} catch (e) {
if (e.message === CustomError.UPLOAD_CANCELLED) {
throw e;
@ -242,9 +326,7 @@ class UploadManager {
// and don't break for subsequent files just log and move on
log.error("parsing failed for a file", e);
log.info(
`failed to parse metadata json file ${getFileNameSize(
file,
)} error: ${e.message}`,
`failed to parse metadata json file ${name} error: ${e.message}`,
);
}
}
@ -257,7 +339,7 @@ class UploadManager {
}
}
private async uploadMediaFiles(mediaFiles: FileWithCollection[]) {
private async uploadMediaFiles(mediaFiles: FileWithCollection2[]) {
log.info(`uploadMediaFiles called`);
this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles];
@ -318,7 +400,7 @@ class UploadManager {
async postUploadTask(
fileUploadResult: UPLOAD_RESULT,
uploadedFile: EncryptedEnteFile | EnteFile | null,
fileWithCollection: FileWithCollection,
fileWithCollection: FileWithCollection2,
) {
try {
let decryptedFile: EnteFile;
@ -385,7 +467,7 @@ class UploadManager {
private async watchFolderCallback(
fileUploadResult: UPLOAD_RESULT,
fileWithCollection: FileWithCollection,
fileWithCollection: FileWithCollection2,
uploadedFile: EncryptedEnteFile,
) {
if (isElectron()) {
@ -429,7 +511,7 @@ class UploadManager {
}
private async updateElectronRemainingFiles(
fileWithCollection: FileWithCollection,
fileWithCollection: FileWithCollection2,
) {
if (isElectron()) {
this.remainingFiles = this.remainingFiles.filter(
@ -445,3 +527,46 @@ class UploadManager {
}
export default new UploadManager();
export const setToUploadCollection = async (collections: Collection[]) => {
let collectionName: string = null;
/* collection being one suggest one of two things
1. Either the user has upload to a single existing collection
2. Created a new single collection to upload to
may have had multiple folder, but chose to upload
to one album
hence saving the collection name when upload collection count is 1
helps the info of user choosing this options
and on next upload we can directly start uploading to this collection
*/
if (collections.length === 1) {
collectionName = collections[0].name;
}
await ensureElectron().setPendingUploadCollection(collectionName);
};
const updatePendingUploads = async (files: FileWithCollection2[]) => {
const paths = files
.map((file) =>
file.isLivePhoto
? [file.livePhotoAssets.image, file.livePhotoAssets.video]
: [file.file],
)
.flat()
.map((f) => getFilePathElectron(f));
await ensureElectron().setPendingUploadFiles("files", paths);
};
/**
* NOTE: a stop gap measure, only meant to be called by code that is running in
* the context of a desktop app initiated upload
*/
export const getFilePathElectron = (file: File | ElectronFile | string) =>
typeof file == "string" ? file : (file as ElectronFile).path;
const cancelRemainingUploads = async () => {
const electron = ensureElectron();
await electron.setPendingUploadCollection(undefined);
await electron.setPendingUploadFiles("zips", []);
await electron.setPendingUploadFiles("files", []);
};

View file

@ -1,4 +1,9 @@
import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
import { encodeLivePhoto } from "@/media/live-photo";
import {
basename,
convertBytesToHumanReadable,
getFileNameSize,
} from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import {
@ -6,11 +11,11 @@ import {
EncryptionResult,
} from "@ente/shared/crypto/types";
import { CustomError, handleUploadError } from "@ente/shared/error";
import { sleep } from "@ente/shared/utils";
import { wait } from "@ente/shared/utils";
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import {
FILE_READER_CHUNK_SIZE,
MAX_FILE_SIZE_SUPPORTED,
MULTIPART_PART_SIZE,
UPLOAD_RESULT,
} from "constants/upload";
@ -30,7 +35,6 @@ import {
ExtractMetadataResult,
FileInMemory,
FileTypeInfo,
FileWithCollection,
FileWithMetadata,
Logger,
ParsedMetadataJSON,
@ -41,6 +45,9 @@ import {
UploadFile,
UploadURL,
isDataStream,
type FileWithCollection2,
type LivePhotoAssets,
type UploadAsset2,
} from "types/upload";
import {
getNonEmptyMagicMetadataProps,
@ -55,7 +62,6 @@ import {
import { getFileType } from "../typeDetectionService";
import {
MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
clusterLivePhotoFiles,
extractLivePhotoMetadata,
extractMetadata,
getClippedMetadataJSONMapKeyForFile,
@ -63,11 +69,10 @@ import {
getLivePhotoName,
getLivePhotoSize,
getMetadataJSONMapKeyForFile,
readLivePhoto,
} from "./metadataService";
import { uploadStreamUsingMultipart } from "./multiPartUploadService";
import publicUploadHttpClient from "./publicUploadHttpClient";
import { generateThumbnail } from "./thumbnailService";
import { generateThumbnail } from "./thumbnail";
import UIService from "./uiService";
import uploadCancelService from "./uploadCancelService";
import UploadHttpClient from "./uploadHttpClient";
@ -127,10 +132,10 @@ class UploadService {
: getFileSize(file);
}
getAssetName({ isLivePhoto, file, livePhotoAssets }: UploadAsset) {
getAssetName({ isLivePhoto, file, livePhotoAssets }: UploadAsset2) {
return isLivePhoto
? getLivePhotoName(livePhotoAssets)
: getFilename(file);
: getFileName(file);
}
getAssetFileType({ isLivePhoto, file, livePhotoAssets }: UploadAsset) {
@ -150,7 +155,7 @@ class UploadService {
async extractAssetMetadata(
worker: Remote<DedicatedCryptoWorker>,
{ isLivePhoto, file, livePhotoAssets }: UploadAsset,
{ isLivePhoto, file, livePhotoAssets }: UploadAsset2,
collectionID: number,
fileTypeInfo: FileTypeInfo,
): Promise<ExtractMetadataResult> {
@ -171,10 +176,6 @@ class UploadService {
);
}
clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
return clusterLivePhotoFiles(mediaFiles);
}
constructPublicMagicMetadata(
publicMagicMetadataProps: FilePublicMagicMetadataProps,
) {
@ -361,9 +362,8 @@ function getFileSize(file: File | ElectronFile) {
return file.size;
}
function getFilename(file: File | ElectronFile) {
return file.name;
}
export const getFileName = (file: File | ElectronFile | string) =>
typeof file == "string" ? basename(file) : file.name;
async function readFile(
fileTypeInfo: FileTypeInfo,
@ -399,24 +399,54 @@ async function readFile(
};
}
async function readLivePhoto(
fileTypeInfo: FileTypeInfo,
livePhotoAssets: LivePhotoAssets,
) {
const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
livePhotoAssets.image,
{
exactType: fileTypeInfo.imageType,
fileType: FILE_TYPE.IMAGE,
},
);
const imageData = await getUint8ArrayView(livePhotoAssets.image);
const videoData = await getUint8ArrayView(livePhotoAssets.video);
return {
filedata: await encodeLivePhoto({
imageFileName: livePhotoAssets.image.name,
imageData,
videoFileName: livePhotoAssets.video.name,
videoData,
}),
thumbnail,
hasStaticThumbnail,
};
}
export async function extractFileMetadata(
worker: Remote<DedicatedCryptoWorker>,
parsedMetadataJSONMap: ParsedMetadataJSONMap,
collectionID: number,
fileTypeInfo: FileTypeInfo,
rawFile: File | ElectronFile,
rawFile: File | ElectronFile | string,
): Promise<ExtractMetadataResult> {
let key = getMetadataJSONMapKeyForFile(collectionID, rawFile.name);
const rawFileName = getFileName(rawFile);
let key = getMetadataJSONMapKeyForFile(collectionID, rawFileName);
let googleMetadata: ParsedMetadataJSON = parsedMetadataJSONMap.get(key);
if (!googleMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) {
key = getClippedMetadataJSONMapKeyForFile(collectionID, rawFile.name);
key = getClippedMetadataJSONMapKeyForFile(collectionID, rawFileName);
googleMetadata = parsedMetadataJSONMap.get(key);
}
const { metadata, publicMagicMetadata } = await extractMetadata(
worker,
rawFile,
/* TODO(MR): ElectronFile changes */
rawFile as File | ElectronFile,
fileTypeInfo,
);
@ -533,22 +563,26 @@ interface UploadResponse {
export async function uploader(
worker: Remote<DedicatedCryptoWorker>,
existingFiles: EnteFile[],
fileWithCollection: FileWithCollection,
fileWithCollection: FileWithCollection2,
uploaderName: string,
): Promise<UploadResponse> {
const { collection, localID, ...uploadAsset } = fileWithCollection;
const { collection, localID, ...uploadAsset2 } = fileWithCollection;
/* TODO(MR): ElectronFile changes */
const uploadAsset = uploadAsset2 as UploadAsset;
const fileNameSize = `${uploadService.getAssetName(
fileWithCollection,
)}_${convertBytesToHumanReadable(uploadService.getAssetSize(uploadAsset))}`;
log.info(`uploader called for ${fileNameSize}`);
UIService.setFileProgress(localID, 0);
await sleep(0);
await wait(0);
let fileTypeInfo: FileTypeInfo;
let fileSize: number;
try {
const maxFileSize = 4 * 1024 * 1024 * 1024; // 4 GB
fileSize = uploadService.getAssetSize(uploadAsset);
if (fileSize >= MAX_FILE_SIZE_SUPPORTED) {
if (fileSize >= maxFileSize) {
return { fileUploadResult: UPLOAD_RESULT.TOO_LARGE };
}
log.info(`getting filetype for ${fileNameSize}`);

View file

@ -1,115 +0,0 @@
import log from "@/next/log";
import { promiseWithTimeout } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { generateTempName } from "@ente/shared/utils/temp";
import { createFFmpeg, FFmpeg } from "ffmpeg-wasm";
import { getUint8ArrayView } from "services/readerService";
const INPUT_PATH_PLACEHOLDER = "INPUT";
const FFMPEG_PLACEHOLDER = "FFMPEG";
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
const FFMPEG_EXECUTION_WAIT_TIME = 30 * 1000;
export class WasmFFmpeg {
private ffmpeg: FFmpeg;
private ready: Promise<void> = null;
private ffmpegTaskQueue = new QueueProcessor<File>();
constructor() {
this.ffmpeg = createFFmpeg({
corePath: "/js/ffmpeg/ffmpeg-core.js",
mt: false,
});
this.ready = this.init();
}
private async init() {
if (!this.ffmpeg.isLoaded()) {
await this.ffmpeg.load();
}
}
async run(
cmd: string[],
inputFile: File,
outputFileName: string,
dontTimeout = false,
) {
const response = this.ffmpegTaskQueue.queueUpRequest(() => {
if (dontTimeout) {
return this.execute(cmd, inputFile, outputFileName);
} else {
return promiseWithTimeout<File>(
this.execute(cmd, inputFile, outputFileName),
FFMPEG_EXECUTION_WAIT_TIME,
);
}
});
try {
return await response.promise;
} catch (e) {
log.error("ffmpeg run failed", e);
throw e;
}
}
private async execute(
cmd: string[],
inputFile: File,
outputFileName: string,
) {
let tempInputFilePath: string;
let tempOutputFilePath: string;
try {
await this.ready;
const extension = getFileExtension(inputFile.name);
const tempNameSuffix = extension ? `input.${extension}` : "input";
tempInputFilePath = `${generateTempName(10, tempNameSuffix)}`;
this.ffmpeg.FS(
"writeFile",
tempInputFilePath,
await getUint8ArrayView(inputFile),
);
tempOutputFilePath = `${generateTempName(10, outputFileName)}`;
cmd = cmd.map((cmdPart) => {
if (cmdPart === FFMPEG_PLACEHOLDER) {
return "";
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return tempInputFilePath;
} else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
} else {
return cmdPart;
}
});
log.info(`${cmd}`);
await this.ffmpeg.run(...cmd);
return new File(
[this.ffmpeg.FS("readFile", tempOutputFilePath)],
outputFileName,
);
} finally {
try {
this.ffmpeg.FS("unlink", tempInputFilePath);
} catch (e) {
log.error("unlink input file failed", e);
}
try {
this.ffmpeg.FS("unlink", tempOutputFilePath);
} catch (e) {
log.error("unlink output file failed", e);
}
}
}
}
function getFileExtension(filename: string) {
const lastDotPosition = filename.lastIndexOf(".");
if (lastDotPosition === -1) return null;
else {
return filename.slice(lastDotPosition + 1);
}
}

View file

@ -11,12 +11,13 @@ import type {
FolderWatch,
FolderWatchSyncedFile,
} from "@/next/types/ipc";
import { ensureString } from "@/utils/ensure";
import { UPLOAD_RESULT } from "constants/upload";
import debounce from "debounce";
import uploadManager from "services/upload/uploadManager";
import { Collection } from "types/collection";
import { EncryptedEnteFile } from "types/file";
import { ElectronFile, FileWithCollection } from "types/upload";
import { type FileWithCollection2 } from "types/upload";
import { groupFilesBasedOnCollectionID } from "utils/file";
import { isHiddenFile } from "utils/upload";
import { removeFromCollection } from "./collectionService";
@ -44,7 +45,16 @@ class FolderWatcher {
private uploadRunning = false;
/** `true` if we are temporarily paused to let a user upload go through. */
private isPaused = false;
private filePathToUploadedFileIDMap = new Map<string, EncryptedEnteFile>();
/**
* A map from file paths to an Ente file for files that were uploaded (or
* symlinked) as part of the most recent upload attempt.
*/
private uploadedFileForPath = new Map<string, EncryptedEnteFile>();
/**
* A set of file paths that could not be uploaded in the most recent upload
* attempt. These are the uploads that failed due to a permanent error that
* a retry will not fix.
*/
private unUploadableFilePaths = new Set<string>();
/**
@ -312,9 +322,11 @@ class FolderWatcher {
*/
async onFileUpload(
fileUploadResult: UPLOAD_RESULT,
fileWithCollection: FileWithCollection,
fileWithCollection: FileWithCollection2,
file: EncryptedEnteFile,
) {
// The files we get here will have fileWithCollection.file as a string,
// not as a File or a ElectronFile
if (
[
UPLOAD_RESULT.ADDED_SYMLINK,
@ -324,19 +336,17 @@ class FolderWatcher {
].includes(fileUploadResult)
) {
if (fileWithCollection.isLivePhoto) {
this.filePathToUploadedFileIDMap.set(
(fileWithCollection.livePhotoAssets.image as ElectronFile)
.path,
this.uploadedFileForPath.set(
ensureString(fileWithCollection.livePhotoAssets.image),
file,
);
this.filePathToUploadedFileIDMap.set(
(fileWithCollection.livePhotoAssets.video as ElectronFile)
.path,
this.uploadedFileForPath.set(
ensureString(fileWithCollection.livePhotoAssets.video),
file,
);
} else {
this.filePathToUploadedFileIDMap.set(
(fileWithCollection.file as ElectronFile).path,
this.uploadedFileForPath.set(
ensureString(fileWithCollection.file),
file,
);
}
@ -347,16 +357,14 @@ class FolderWatcher {
) {
if (fileWithCollection.isLivePhoto) {
this.unUploadableFilePaths.add(
(fileWithCollection.livePhotoAssets.image as ElectronFile)
.path,
ensureString(fileWithCollection.livePhotoAssets.image),
);
this.unUploadableFilePaths.add(
(fileWithCollection.livePhotoAssets.video as ElectronFile)
.path,
ensureString(fileWithCollection.livePhotoAssets.video),
);
} else {
this.unUploadableFilePaths.add(
(fileWithCollection.file as ElectronFile).path,
ensureString(fileWithCollection.file),
);
}
}
@ -367,7 +375,7 @@ class FolderWatcher {
* {@link upload} get uploaded.
*/
async allFileUploadsDone(
filesWithCollection: FileWithCollection[],
filesWithCollection: FileWithCollection2[],
collections: Collection[],
) {
const electron = ensureElectron();
@ -383,15 +391,7 @@ class FolderWatcher {
);
const { syncedFiles, ignoredFiles } =
this.parseAllFileUploadsDone(filesWithCollection);
log.debug(() =>
JSON.stringify({
f: "watch/allFileUploadsDone",
syncedFiles,
ignoredFiles,
}),
);
this.deduceSyncedAndIgnored(filesWithCollection);
if (syncedFiles.length > 0)
await electron.watch.updateSyncedFiles(
@ -411,81 +411,55 @@ class FolderWatcher {
this.debouncedRunNextEvent();
}
private parseAllFileUploadsDone(filesWithCollection: FileWithCollection[]) {
private deduceSyncedAndIgnored(filesWithCollection: FileWithCollection2[]) {
const syncedFiles: FolderWatch["syncedFiles"] = [];
const ignoredFiles: FolderWatch["ignoredFiles"] = [];
const markSynced = (file: EncryptedEnteFile, path: string) => {
syncedFiles.push({
path,
uploadedFileID: file.id,
collectionID: file.collectionID,
});
this.uploadedFileForPath.delete(path);
};
const markIgnored = (path: string) => {
log.debug(() => `Permanently ignoring file at ${path}`);
ignoredFiles.push(path);
this.unUploadableFilePaths.delete(path);
};
for (const fileWithCollection of filesWithCollection) {
if (fileWithCollection.isLivePhoto) {
const imagePath = (
fileWithCollection.livePhotoAssets.image as ElectronFile
).path;
const videoPath = (
fileWithCollection.livePhotoAssets.video as ElectronFile
).path;
const imagePath = ensureString(
fileWithCollection.livePhotoAssets.image,
);
const videoPath = ensureString(
fileWithCollection.livePhotoAssets.video,
);
if (
this.filePathToUploadedFileIDMap.has(imagePath) &&
this.filePathToUploadedFileIDMap.has(videoPath)
) {
const imageFile = {
path: imagePath,
uploadedFileID:
this.filePathToUploadedFileIDMap.get(imagePath).id,
collectionID:
this.filePathToUploadedFileIDMap.get(imagePath)
.collectionID,
};
const videoFile = {
path: videoPath,
uploadedFileID:
this.filePathToUploadedFileIDMap.get(videoPath).id,
collectionID:
this.filePathToUploadedFileIDMap.get(videoPath)
.collectionID,
};
syncedFiles.push(imageFile);
syncedFiles.push(videoFile);
log.debug(
() =>
`added image ${JSON.stringify(
imageFile,
)} and video file ${JSON.stringify(
videoFile,
)} to uploadedFiles`,
);
const imageFile = this.uploadedFileForPath.get(imagePath);
const videoFile = this.uploadedFileForPath.get(videoPath);
if (imageFile && videoFile) {
markSynced(imageFile, imagePath);
markSynced(videoFile, videoPath);
} else if (
this.unUploadableFilePaths.has(imagePath) &&
this.unUploadableFilePaths.has(videoPath)
) {
ignoredFiles.push(imagePath);
ignoredFiles.push(videoPath);
log.debug(
() =>
`added image ${imagePath} and video file ${videoPath} to rejectedFiles`,
);
markIgnored(imagePath);
markIgnored(videoPath);
}
this.filePathToUploadedFileIDMap.delete(imagePath);
this.filePathToUploadedFileIDMap.delete(videoPath);
} else {
const filePath = (fileWithCollection.file as ElectronFile).path;
if (this.filePathToUploadedFileIDMap.has(filePath)) {
const file = {
path: filePath,
uploadedFileID:
this.filePathToUploadedFileIDMap.get(filePath).id,
collectionID:
this.filePathToUploadedFileIDMap.get(filePath)
.collectionID,
};
syncedFiles.push(file);
log.debug(() => `added file ${JSON.stringify(file)}`);
} else if (this.unUploadableFilePaths.has(filePath)) {
ignoredFiles.push(filePath);
log.debug(() => `added file ${filePath} to rejectedFiles`);
const path = ensureString(fileWithCollection.file);
const file = this.uploadedFileForPath.get(path);
if (file) {
markSynced(file, path);
} else if (this.unUploadableFilePaths.has(path)) {
markIgnored(path);
}
this.filePathToUploadedFileIDMap.delete(filePath);
}
}

View file

@ -105,6 +105,23 @@ export interface FileWithCollection extends UploadAsset {
collectionID?: number;
}
export interface UploadAsset2 {
isLivePhoto?: boolean;
file?: File | ElectronFile | string;
livePhotoAssets?: LivePhotoAssets2;
}
export interface LivePhotoAssets2 {
image: File | ElectronFile | string;
video: File | ElectronFile | string;
}
export interface FileWithCollection2 extends UploadAsset2 {
localID: number;
collection?: Collection;
collectionID?: number;
}
export type ParsedMetadataJSONMap = Map<string, ParsedMetadataJSON>;
export interface UploadURL {

View file

@ -1,30 +0,0 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { DedicatedConvertWorker } from "worker/convert.worker";
class ComlinkConvertWorker {
private comlinkWorkerInstance: Remote<DedicatedConvertWorker>;
async getInstance() {
if (!this.comlinkWorkerInstance) {
this.comlinkWorkerInstance =
await getDedicatedConvertWorker().remote;
}
return this.comlinkWorkerInstance;
}
}
export const getDedicatedConvertWorker = () => {
if (haveWindow()) {
const cryptoComlinkWorker = new ComlinkWorker<
typeof DedicatedConvertWorker
>(
"ente-convert-worker",
new Worker(new URL("worker/convert.worker.ts", import.meta.url)),
);
return cryptoComlinkWorker;
}
};
export default new ComlinkConvertWorker();

View file

@ -1,25 +0,0 @@
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { DedicatedFFmpegWorker } from "worker/ffmpeg.worker";
class ComlinkFFmpegWorker {
private comlinkWorkerInstance: Promise<Remote<DedicatedFFmpegWorker>>;
async getInstance() {
if (!this.comlinkWorkerInstance) {
const comlinkWorker = getDedicatedFFmpegWorker();
this.comlinkWorkerInstance = comlinkWorker.remote;
}
return this.comlinkWorkerInstance;
}
}
const getDedicatedFFmpegWorker = () => {
const cryptoComlinkWorker = new ComlinkWorker<typeof DedicatedFFmpegWorker>(
"ente-ffmpeg-worker",
new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)),
);
return cryptoComlinkWorker;
};
export default new ComlinkFFmpegWorker();

View file

@ -1,6 +1,6 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { DedicatedMLWorker } from "worker/ml.worker";
import { type DedicatedMLWorker } from "worker/ml.worker";
export const getDedicatedMLWorker = (name: string) => {
if (haveWindow()) {

View file

@ -1,7 +1,7 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { DedicatedSearchWorker } from "worker/search.worker";
import { type DedicatedSearchWorker } from "worker/search.worker";
class ComlinkSearchWorker {
private comlinkWorkerInstance: Remote<DedicatedSearchWorker>;

View file

@ -1,67 +0,0 @@
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
import { NULL_LOCATION } from "constants/upload";
import { ParsedExtractedMetadata } from "types/upload";
enum MetadataTags {
CREATION_TIME = "creation_time",
APPLE_CONTENT_IDENTIFIER = "com.apple.quicktime.content.identifier",
APPLE_LIVE_PHOTO_IDENTIFIER = "com.apple.quicktime.live-photo.auto",
APPLE_CREATION_DATE = "com.apple.quicktime.creationdate",
APPLE_LOCATION_ISO = "com.apple.quicktime.location.ISO6709",
LOCATION = "location",
}
export function parseFFmpegExtractedMetadata(encodedMetadata: Uint8Array) {
const metadataString = new TextDecoder().decode(encodedMetadata);
const metadataPropertyArray = metadataString.split("\n");
const metadataKeyValueArray = metadataPropertyArray.map((property) =>
property.split("="),
);
const validKeyValuePairs = metadataKeyValueArray.filter(
(keyValueArray) => keyValueArray.length === 2,
) as Array<[string, string]>;
const metadataMap = Object.fromEntries(validKeyValuePairs);
const location = parseAppleISOLocation(
metadataMap[MetadataTags.APPLE_LOCATION_ISO] ??
metadataMap[MetadataTags.LOCATION],
);
const creationTime = parseCreationTime(
metadataMap[MetadataTags.APPLE_CREATION_DATE] ??
metadataMap[MetadataTags.CREATION_TIME],
);
const parsedMetadata: ParsedExtractedMetadata = {
creationTime,
location: {
latitude: location.latitude,
longitude: location.longitude,
},
width: null,
height: null,
};
return parsedMetadata;
}
function parseAppleISOLocation(isoLocation: string) {
let location = NULL_LOCATION;
if (isoLocation) {
const [latitude, longitude] = isoLocation
.match(/(\+|-)\d+\.*\d+/g)
.map((x) => parseFloat(x));
location = { latitude, longitude };
}
return location;
}
function parseCreationTime(creationTime: string) {
let dateTime = null;
if (creationTime) {
dateTime = validateAndGetCreationUnixTimeInMicroSeconds(
new Date(creationTime),
);
}
return dateTime;
}

View file

@ -1,14 +1,11 @@
import { decodeLivePhoto } from "@/media/live-photo";
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import type { Electron } from "@/next/types/ipc";
import { CustomErrorMessage, type Electron } from "@/next/types/ipc";
import { workerBridge } from "@/next/worker/worker-bridge";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import { isPlaybackPossible } from "@ente/shared/media/video-playback";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
import { downloadUsingAnchor } from "@ente/shared/utils";
import { downloadUsingAnchor, withTimeout } from "@ente/shared/utils";
import {
FILE_TYPE,
RAW_FORMATS,
@ -21,18 +18,14 @@ import {
import { t } from "i18next";
import isElectron from "is-electron";
import { moveToHiddenCollection } from "services/collectionService";
import DownloadManager, {
LivePhotoSourceURL,
SourceURLs,
} from "services/download";
import * as ffmpegService from "services/ffmpeg/ffmpegService";
import DownloadManager from "services/download";
import {
deleteFromTrash,
trashFiles,
updateFileMagicMetadata,
updateFilePublicMagicMetadata,
} from "services/fileService";
import heicConversionService from "services/heicConversionService";
import { heicToJPEG } from "services/heic-convert";
import { getFileType } from "services/typeDetectionService";
import { updateFileCreationDateInEXIF } from "services/upload/exifService";
import {
@ -55,8 +48,6 @@ import { isArchivedFile, updateMagicMetadata } from "utils/magicMetadata";
import { safeFileName } from "utils/native-fs";
import { writeStream } from "utils/native-stream";
const WAIT_TIME_IMAGE_CONVERSION = 30 * 1000;
export enum FILE_OPS_TYPE {
DOWNLOAD,
FIX_TIME,
@ -67,6 +58,20 @@ export enum FILE_OPS_TYPE {
DELETE_PERMANENTLY,
}
class ModuleState {
/**
* This will be set to true if we get an error from the Node.js side of our
* desktop app telling us that native JPEG conversion is not available for
* the current OS/arch combination. That way, we can stop pestering it again
* and again (saving an IPC round-trip).
*
* Note the double negative when it is used.
*/
isNativeJPEGConversionNotAvailable = false;
}
const moduleState = new ModuleState();
export async function getUpdatedEXIFFileForDownload(
fileReader: FileReader,
file: EnteFile,
@ -271,234 +276,66 @@ export function generateStreamFromArrayBuffer(data: Uint8Array) {
});
}
export async function getRenderableFileURL(
file: EnteFile,
fileBlob: Blob,
originalFileURL: string,
forceConvert: boolean,
): Promise<SourceURLs> {
let srcURLs: SourceURLs["url"];
switch (file.metadata.fileType) {
case FILE_TYPE.IMAGE: {
const convertedBlob = await getRenderableImage(
file.metadata.title,
fileBlob,
);
const convertedURL = getFileObjectURL(
originalFileURL,
fileBlob,
convertedBlob,
);
srcURLs = convertedURL;
break;
}
case FILE_TYPE.LIVE_PHOTO: {
srcURLs = await getRenderableLivePhotoURL(
file,
fileBlob,
forceConvert,
);
break;
}
case FILE_TYPE.VIDEO: {
const convertedBlob = await getPlayableVideo(
file.metadata.title,
fileBlob,
forceConvert,
);
const convertedURL = getFileObjectURL(
originalFileURL,
fileBlob,
convertedBlob,
);
srcURLs = convertedURL;
break;
}
default: {
srcURLs = originalFileURL;
break;
}
}
let isOriginal: boolean;
if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
isOriginal = false;
} else {
isOriginal = (srcURLs as string) === (originalFileURL as string);
}
return {
url: srcURLs,
isOriginal,
isRenderable:
file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs,
type:
file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
? "livePhoto"
: "normal",
};
}
async function getRenderableLivePhotoURL(
file: EnteFile,
fileBlob: Blob,
forceConvert: boolean,
): Promise<LivePhotoSourceURL> {
const livePhoto = await decodeLivePhoto(file.metadata.title, fileBlob);
const getRenderableLivePhotoImageURL = async () => {
try {
const imageBlob = new Blob([livePhoto.imageData]);
const convertedImageBlob = await getRenderableImage(
livePhoto.imageFileName,
imageBlob,
);
return URL.createObjectURL(convertedImageBlob);
} catch (e) {
//ignore and return null
return null;
}
};
const getRenderableLivePhotoVideoURL = async () => {
try {
const videoBlob = new Blob([livePhoto.videoData]);
const convertedVideoBlob = await getPlayableVideo(
livePhoto.videoFileName,
videoBlob,
forceConvert,
true,
);
return URL.createObjectURL(convertedVideoBlob);
} catch (e) {
//ignore and return null
return null;
}
};
return {
image: getRenderableLivePhotoImageURL,
video: getRenderableLivePhotoVideoURL,
};
}
export async function getPlayableVideo(
videoNameTitle: string,
videoBlob: Blob,
forceConvert = false,
runOnWeb = false,
) {
try {
const isPlayable = await isPlaybackPossible(
URL.createObjectURL(videoBlob),
);
if (isPlayable && !forceConvert) {
return videoBlob;
} else {
if (!forceConvert && !runOnWeb && !isElectron()) {
return null;
}
log.info(
`video format not supported, converting it name: ${videoNameTitle}`,
);
const mp4ConvertedVideo = await ffmpegService.convertToMP4(
new File([videoBlob], videoNameTitle),
);
log.info(`video successfully converted ${videoNameTitle}`);
return new Blob([await mp4ConvertedVideo.arrayBuffer()]);
}
} catch (e) {
log.error("video conversion failed", e);
return null;
}
}
export async function getRenderableImage(fileName: string, imageBlob: Blob) {
export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
let fileTypeInfo: FileTypeInfo;
try {
const tempFile = new File([imageBlob], fileName);
fileTypeInfo = await getFileType(tempFile);
log.debug(() => `file type info: ${JSON.stringify(fileTypeInfo)}`);
log.debug(
() =>
`Obtaining renderable image for ${JSON.stringify(fileTypeInfo)}`,
);
const { exactType } = fileTypeInfo;
let convertedImageBlob: Blob;
if (isRawFile(exactType)) {
try {
if (!isSupportedRawFormat(exactType)) {
throw Error(CustomError.UNSUPPORTED_RAW_FORMAT);
}
if (!isElectron()) {
throw new Error("not available on web");
}
log.info(
`RawConverter called for ${fileName}-${convertBytesToHumanReadable(
imageBlob.size,
)}`,
);
convertedImageBlob = await convertToJPEGInElectron(
imageBlob,
fileName,
);
log.info(`${fileName} successfully converted`);
} catch (e) {
try {
if (!isFileHEIC(exactType)) {
throw e;
}
log.info(
`HEICConverter called for ${fileName}-${convertBytesToHumanReadable(
imageBlob.size,
)}`,
);
convertedImageBlob =
await heicConversionService.convert(imageBlob);
log.info(`${fileName} successfully converted`);
} catch (e) {
throw Error(CustomError.NON_PREVIEWABLE_FILE);
}
}
return convertedImageBlob;
} else {
if (!isRawFile(exactType)) {
// Not something we know how to handle yet, give back the original.
return imageBlob;
}
let jpegBlob: Blob | undefined;
const available = !moduleState.isNativeJPEGConversionNotAvailable;
if (isElectron() && available && isSupportedRawFormat(exactType)) {
// If we're running in our desktop app, see if our Node.js layer can
// convert this into a JPEG using native tools for us.
try {
jpegBlob = await nativeConvertToJPEG(fileName, imageBlob);
} catch (e) {
if (e.message == CustomErrorMessage.NotAvailable) {
moduleState.isNativeJPEGConversionNotAvailable = true;
} else {
log.error("Native conversion to JPEG failed", e);
}
}
}
if (!jpegBlob && isFileHEIC(exactType)) {
// If it is an HEIC file, use our web HEIC converter.
jpegBlob = await heicToJPEG(imageBlob);
}
return jpegBlob;
} catch (e) {
log.error(
`Failed to get renderable image for ${JSON.stringify(fileTypeInfo)}`,
`Failed to get renderable image for ${JSON.stringify(fileTypeInfo ?? fileName)}`,
e,
);
return null;
return undefined;
}
}
};
const convertToJPEGInElectron = async (
fileBlob: Blob,
filename: string,
): Promise<Blob> => {
try {
const startTime = Date.now();
const inputFileData = new Uint8Array(await fileBlob.arrayBuffer());
const electron = globalThis.electron;
const convertedFileData = electron
? await electron.convertToJPEG(inputFileData, filename)
: await workerBridge.convertToJPEG(inputFileData, filename);
log.info(
`originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size,
)},convertedFileSize:${convertBytesToHumanReadable(
convertedFileData?.length,
)}, native conversion time: ${Date.now() - startTime}ms `,
);
return new Blob([convertedFileData]);
} catch (e) {
if (
e.message !==
CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED
) {
log.error("failed to convert to jpeg natively", e);
}
throw e;
}
const nativeConvertToJPEG = async (fileName: string, imageBlob: Blob) => {
const startTime = Date.now();
const imageData = new Uint8Array(await imageBlob.arrayBuffer());
const electron = globalThis.electron;
// If we're running in a worker, we need to reroute the request back to
// the main thread since workers don't have access to the `window` (and
// thus, to the `window.electron`) object.
const jpegData = electron
? await electron.convertToJPEG(fileName, imageData)
: await workerBridge.convertToJPEG(fileName, imageData);
log.debug(() => `Native JPEG conversion took ${Date.now() - startTime} ms`);
return new Blob([jpegData]);
};
export function isFileHEIC(exactType: string) {
@ -859,7 +696,6 @@ export const copyFileToClipboard = async (fileUrl: string) => {
const image = new Image();
const blobPromise = new Promise<Blob>((resolve, reject) => {
let timeout: NodeJS.Timeout = null;
try {
image.setAttribute("src", fileUrl);
image.onload = () => {
@ -873,26 +709,17 @@ export const copyFileToClipboard = async (fileUrl: string) => {
"image/png",
1,
);
clearTimeout(timeout);
};
} catch (e) {
log.error("failed to copy to clipboard", e);
log.error("Failed to copy to clipboard", e);
reject(e);
} finally {
clearTimeout(timeout);
}
timeout = setTimeout(
() => reject(new Error("Operation timed out")),
WAIT_TIME_IMAGE_CONVERSION,
);
});
const { ClipboardItem } = window;
const blob = await withTimeout(blobPromise, 30 * 1000);
await navigator.clipboard
.write([new ClipboardItem({ "image/png": blobPromise })])
.catch((e) => log.error("failed to copy to clipboard", e));
const { ClipboardItem } = window;
await navigator.clipboard.write([new ClipboardItem({ "image/png": blob })]);
};
export function getLatestVersionFiles(files: EnteFile[]) {
@ -1061,16 +888,3 @@ const fixTimeHelper = async (
) => {
setFixCreationTimeAttributes({ files: selectedFiles });
};
const getFileObjectURL = (
originalFileURL: string,
originalBlob: Blob,
convertedBlob: Blob,
) => {
const convertedURL = convertedBlob
? convertedBlob === originalBlob
? originalFileURL
: URL.createObjectURL(convertedBlob)
: null;
return convertedURL;
};

View file

@ -1,10 +1,15 @@
import { basename, dirname } from "@/next/file";
import { FILE_TYPE } from "constants/file";
import { A_SEC_IN_MICROSECONDS, PICKED_UPLOAD_TYPE } from "constants/upload";
import { PICKED_UPLOAD_TYPE } from "constants/upload";
import isElectron from "is-electron";
import { exportMetadataDirectoryName } from "services/export";
import { EnteFile } from "types/file";
import { ElectronFile, FileWithCollection, Metadata } from "types/upload";
import {
ElectronFile,
FileWithCollection,
Metadata,
type FileWithCollection2,
} from "types/upload";
const TYPE_JSON = "json";
const DEDUPE_COLLECTION = new Set(["icloud library", "icloudlibrary"]);
@ -40,12 +45,13 @@ export function areFilesSame(
* precision of file times to prevent timing attacks and fingerprinting.
* Context: https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision
*/
const oneSecond = 1e6;
if (
existingFile.fileType === newFile.fileType &&
Math.abs(existingFile.creationTime - newFile.creationTime) <
A_SEC_IN_MICROSECONDS &&
oneSecond &&
Math.abs(existingFile.modificationTime - newFile.modificationTime) <
A_SEC_IN_MICROSECONDS &&
oneSecond &&
existingFile.title === newFile.title
) {
return true;
@ -95,9 +101,26 @@ export function segregateMetadataAndMediaFiles(
return { mediaFiles, metadataJSONFiles };
}
export function segregateMetadataAndMediaFiles2(
filesWithCollectionToUpload: FileWithCollection2[],
) {
const metadataJSONFiles: FileWithCollection2[] = [];
const mediaFiles: FileWithCollection2[] = [];
filesWithCollectionToUpload.forEach((fileWithCollection) => {
const file = fileWithCollection.file;
const s = typeof file == "string" ? file : file.name;
if (s.toLowerCase().endsWith(TYPE_JSON)) {
metadataJSONFiles.push(fileWithCollection);
} else {
mediaFiles.push(fileWithCollection);
}
});
return { mediaFiles, metadataJSONFiles };
}
export function areFileWithCollectionsSame(
firstFile: FileWithCollection,
secondFile: FileWithCollection,
firstFile: FileWithCollection2,
secondFile: FileWithCollection2,
): boolean {
return firstFile.localID === secondFile.localID;
}
@ -176,11 +199,15 @@ export function getImportSuggestion(
// b => [e,f,g],
// c => [h, i]]
export function groupFilesBasedOnParentFolder(
toUploadFiles: File[] | ElectronFile[],
toUploadFiles: File[] | ElectronFile[] | string[],
) {
const collectionNameToFilesMap = new Map<string, (File | ElectronFile)[]>();
const collectionNameToFilesMap = new Map<
string,
File[] | ElectronFile[] | string[]
>();
for (const file of toUploadFiles) {
const filePath = file["path"] as string;
const filePath =
typeof file == "string" ? file : (file["path"] as string);
let folderPath = filePath.substring(0, filePath.lastIndexOf("/"));
// If the parent folder of a file is "metadata"
@ -200,17 +227,25 @@ export function groupFilesBasedOnParentFolder(
if (!collectionNameToFilesMap.has(folderName)) {
collectionNameToFilesMap.set(folderName, []);
}
collectionNameToFilesMap.get(folderName).push(file);
// TODO: Remove the cast
collectionNameToFilesMap.get(folderName).push(file as any);
}
return collectionNameToFilesMap;
}
export function filterOutSystemFiles(files: File[] | ElectronFile[]) {
export function filterOutSystemFiles(
files: File[] | ElectronFile[] | string[] | undefined | null,
) {
if (!files) return files;
if (files[0] instanceof File) {
const browserFiles = files as File[];
return browserFiles.filter((file) => {
return !isSystemFile(file);
});
} else if (typeof files[0] == "string") {
const filePaths = files as string[];
return filePaths.filter((path) => !isHiddenFile(path));
} else {
const electronFiles = files as ElectronFile[];
return electronFiles.filter((file) => {

View file

@ -1,4 +1,4 @@
import { sleep } from "@ente/shared/utils";
import { wait } from "@ente/shared/utils";
const retrySleepTimeInMilliSeconds = [2000, 5000, 10000];
@ -18,7 +18,7 @@ export async function retryHTTPCall(
checkForBreakingError(e);
}
if (attemptNumber < retrySleepTimeInMilliSeconds.length) {
await sleep(retrySleepTimeInMilliSeconds[attemptNumber]);
await wait(retrySleepTimeInMilliSeconds[attemptNumber]);
return await retrier(func, attemptNumber + 1);
} else {
throw e;

View file

@ -1,15 +1,117 @@
import * as Comlink from "comlink";
import { WasmFFmpeg } from "services/wasm/ffmpeg";
import { nameAndExtension } from "@/next/file";
import log from "@/next/log";
import { withTimeout } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { generateTempName } from "@ente/shared/utils/temp";
import { expose } from "comlink";
import {
ffmpegPathPlaceholder,
inputPathPlaceholder,
outputPathPlaceholder,
} from "constants/ffmpeg";
import { FFmpeg, createFFmpeg } from "ffmpeg-wasm";
import { getUint8ArrayView } from "services/readerService";
export class DedicatedFFmpegWorker {
wasmFFmpeg: WasmFFmpeg;
private wasmFFmpeg: WasmFFmpeg;
constructor() {
this.wasmFFmpeg = new WasmFFmpeg();
}
run(cmd, inputFile, outputFileName, dontTimeout) {
return this.wasmFFmpeg.run(cmd, inputFile, outputFileName, dontTimeout);
/**
* Execute a ffmpeg {@link command}.
*
* This is a sibling of {@link ffmpegExec} in ipc.ts exposed by the desktop
* app. See [Note: ffmpeg in Electron].
*/
run(cmd, inputFile, outputFileName, timeoutMS) {
return this.wasmFFmpeg.run(cmd, inputFile, outputFileName, timeoutMS);
}
}
Comlink.expose(DedicatedFFmpegWorker, self);
expose(DedicatedFFmpegWorker, self);
export class WasmFFmpeg {
private ffmpeg: FFmpeg;
private ready: Promise<void> = null;
private ffmpegTaskQueue = new QueueProcessor<File>();
constructor() {
this.ffmpeg = createFFmpeg({
corePath: "/js/ffmpeg/ffmpeg-core.js",
mt: false,
});
this.ready = this.init();
}
private async init() {
if (!this.ffmpeg.isLoaded()) {
await this.ffmpeg.load();
}
}
async run(
cmd: string[],
inputFile: File,
outputFileName: string,
timeoutMS,
) {
const exec = () => this.execute(cmd, inputFile, outputFileName);
const request = this.ffmpegTaskQueue.queueUpRequest(() =>
timeoutMS ? withTimeout<File>(exec(), timeoutMS) : exec(),
);
return await request.promise;
}
private async execute(
cmd: string[],
inputFile: File,
outputFileName: string,
) {
let tempInputFilePath: string;
let tempOutputFilePath: string;
try {
await this.ready;
const [, extension] = nameAndExtension(inputFile.name);
const tempNameSuffix = extension ? `input.${extension}` : "input";
tempInputFilePath = `${generateTempName(10, tempNameSuffix)}`;
this.ffmpeg.FS(
"writeFile",
tempInputFilePath,
await getUint8ArrayView(inputFile),
);
tempOutputFilePath = `${generateTempName(10, outputFileName)}`;
cmd = cmd.map((cmdPart) => {
if (cmdPart === ffmpegPathPlaceholder) {
return "";
} else if (cmdPart === inputPathPlaceholder) {
return tempInputFilePath;
} else if (cmdPart === outputPathPlaceholder) {
return tempOutputFilePath;
} else {
return cmdPart;
}
});
log.info(`${cmd}`);
await this.ffmpeg.run(...cmd);
return new File(
[this.ffmpeg.FS("readFile", tempOutputFilePath)],
outputFileName,
);
} finally {
try {
this.ffmpeg.FS("unlink", tempInputFilePath);
} catch (e) {
log.error("unlink input file failed", e);
}
try {
this.ffmpeg.FS("unlink", tempOutputFilePath);
} catch (e) {
log.error("unlink output file failed", e);
}
}
}
}

View file

@ -1,21 +1,21 @@
import * as Comlink from "comlink";
import { expose } from "comlink";
import HeicConvert from "heic-convert";
import { getUint8ArrayView } from "services/readerService";
export class DedicatedConvertWorker {
async convertHEICToJPEG(fileBlob: Blob) {
return convertHEICToJPEG(fileBlob);
export class DedicatedHEICConvertWorker {
async heicToJPEG(heicBlob: Blob) {
return heicToJPEG(heicBlob);
}
}
Comlink.expose(DedicatedConvertWorker, self);
expose(DedicatedHEICConvertWorker, self);
/**
* Convert a HEIC file to a JPEG file.
*
* Both the input and output are blobs.
*/
export const convertHEICToJPEG = async (heicBlob: Blob): Promise<Blob> => {
export const heicToJPEG = async (heicBlob: Blob): Promise<Blob> => {
const filedata = await getUint8ArrayView(heicBlob);
const result = await HeicConvert({ buffer: filedata, format: "JPEG" });
const convertedFileData = new Uint8Array(result);

View file

@ -6,7 +6,7 @@ import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer";
import LinkButton from "@ente/shared/components/LinkButton";
import SubmitButton from "@ente/shared/components/SubmitButton";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
import { sleep } from "@ente/shared/utils";
import { wait } from "@ente/shared/utils";
import { Alert, Box, TextField } from "@mui/material";
import { Formik, FormikHelpers } from "formik";
import { t } from "i18next";
@ -59,7 +59,7 @@ function ChangeEmailForm({ appName }: PageProps) {
setData(LS_KEYS.USER, { ...getData(LS_KEYS.USER), email });
setLoading(false);
setSuccess(true);
await sleep(1000);
await wait(1000);
goToApp();
} catch (e) {
setLoading(false);

View file

@ -9,7 +9,7 @@ import {
VerticallyCentered,
} from "@ente/shared/components/Container";
import SubmitButton from "@ente/shared/components/SubmitButton";
import { sleep } from "@ente/shared/utils";
import { wait } from "@ente/shared/utils";
import { Box, Typography } from "@mui/material";
interface formValues {
@ -33,7 +33,7 @@ export default function VerifyTwoFactor(props: Props) {
const markSuccessful = async () => {
setWaiting(false);
setSuccess(true);
await sleep(1000);
await wait(1000);
};
const submitForm = async (

View file

@ -191,32 +191,86 @@ export interface Electron {
isDir: (dirPath: string) => Promise<boolean>;
};
/*
* TODO: AUDIT below this - Some of the types we use below are not copyable
* across process boundaries, and such functions will (expectedly) fail at
* runtime. For such functions, find an efficient alternative or refactor
* the dataflow.
*/
// - Conversion
/**
* Try to convert an arbitrary image into JPEG using native layer tools.
*
* The behaviour is OS dependent. On macOS we use the `sips` utility, and on
* some Linux architectures we use an ImageMagick executable bundled with
* our desktop app.
*
* In other cases (primarily Windows), where native JPEG conversion is not
* yet possible, this function will throw an error with the
* {@link CustomErrorMessage.NotAvailable} message.
*
* @param fileName The name of the file whose data we're being given.
* @param imageData The raw image data (the contents of the image file).
* @returns JPEG data of the converted image.
*/
convertToJPEG: (
fileData: Uint8Array,
filename: string,
fileName: string,
imageData: Uint8Array,
) => Promise<Uint8Array>;
/**
* Generate a JPEG thumbnail for the given image.
*
* The behaviour is OS dependent. On macOS we use the `sips` utility, and on
* some Linux architectures we use an ImageMagick executable bundled with
* our desktop app.
*
* In other cases (primarily Windows), where native thumbnail generation is
* not yet possible, this function will throw an error with the
* {@link CustomErrorMessage.NotAvailable} message.
*
* @param inputFile The file whose thumbnail we want.
* @param maxDimension The maximum width or height of the generated
* thumbnail.
* @param maxSize Maximum size (in bytes) of the generated thumbnail.
* @returns JPEG data of the generated thumbnail.
*/
generateImageThumbnail: (
inputFile: File | ElectronFile,
maxDimension: number,
maxSize: number,
) => Promise<Uint8Array>;
runFFmpegCmd: (
cmd: string[],
inputFile: File | ElectronFile,
/**
* Execute a ffmpeg {@link command}.
*
* This executes the command using the ffmpeg executable we bundle with our
* desktop app. There is also a ffmpeg wasm implementation that we use when
* running on the web, it also has a sibling function with the same
* parameters. See [Note: ffmpeg in Electron].
*
* @param command An array of strings, each representing one positional
* parameter in the command to execute. Placeholders for the input, output
* and ffmpeg's own path are replaced before executing the command
* (respectively {@link inputPathPlaceholder},
* {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}).
*
* @param inputDataOrPath The bytes of the input file, or the path to the
* input file on the user's local disk. In both cases, the data gets
* serialized to a temporary file, and then that path gets substituted in
* the ffmpeg {@link command} by {@link inputPathPlaceholder}.
*
* @param outputFileName The name of the file we instruct ffmpeg to produce
* when giving it the given {@link command}. The contents of this file get
* returned as the result.
*
* @param timeoutMS If non-zero, then abort and throw a timeout error if the
* ffmpeg command takes more than the given number of milliseconds.
*
* @returns The contents of the output file produced by the ffmpeg command
* at {@link outputFileName}.
*/
ffmpegExec: (
command: string[],
inputDataOrPath: Uint8Array | string,
outputFileName: string,
dontTimeout?: boolean,
) => Promise<File>;
timeoutMS: number,
) => Promise<Uint8Array>;
// - ML
@ -232,7 +286,18 @@ export interface Electron {
clipImageEmbedding: (jpegImageData: Uint8Array) => Promise<Float32Array>;
/**
* Return a CLIP embedding of the given image.
* Return a CLIP embedding of the given image if we already have the model
* downloaded and prepped. If the model is not available return `undefined`.
*
* This differs from the other sibling ML functions in that it doesn't wait
* for the model download to finish. It does trigger a model download, but
* then immediately returns `undefined`. At some future point, when the
* model downloaded finishes, calls to this function will start returning
* the result we seek.
*
* The reason for doing it in this asymmetric way is because CLIP text
* embeddings are used as part of deducing user initiated search results,
* and we don't want to block that interaction on a large network request.
*
* See: [Note: CLIP based magic search]
*
@ -240,7 +305,9 @@ export interface Electron {
*
* @returns A CLIP embedding.
*/
clipTextEmbedding: (text: string) => Promise<Float32Array>;
clipTextEmbeddingIfAvailable: (
text: string,
) => Promise<Float32Array | undefined>;
/**
* Detect faces in the given image using YOLO.
@ -418,6 +485,13 @@ export interface Electron {
filePaths: string[],
) => Promise<void>;
/*
* TODO: AUDIT below this - Some of the types we use below are not copyable
* across process boundaries, and such functions will (expectedly) fail at
* runtime. For such functions, find an efficient alternative or refactor
* the dataflow.
*/
// -
getElectronFilesFromGoogleZip: (
@ -426,6 +500,26 @@ export interface Electron {
getDirFiles: (dirPath: string) => Promise<ElectronFile[]>;
}
/**
* Errors that have special semantics on the web side.
*
* [Note: Custom errors across Electron/Renderer boundary]
*
* If we need to identify errors thrown by the main process when invoked from
* the renderer process, we can only use the `message` field because:
*
* > Errors thrown throw `handle` in the main process are not transparent as
* > they are serialized and only the `message` property from the original error
* > is provided to the renderer process.
* >
* > - https://www.electronjs.org/docs/latest/tutorial/ipc
* >
* > Ref: https://github.com/electron/electron/issues/24427
*/
export const CustomErrorMessage = {
NotAvailable: "This feature in not available on the current OS/arch",
};
/**
* Data passed across the IPC bridge when an app update is available.
*/

View file

@ -12,24 +12,17 @@ export class ComlinkWorker<T extends new () => InstanceType<T>> {
this.name = name;
this.worker = worker;
this.worker.onerror = (ev) => {
worker.onerror = (event) => {
log.error(
`Got error event from worker: ${JSON.stringify({
errorEvent: JSON.stringify(ev),
name: this.name,
})}`,
`Got error event from worker: ${JSON.stringify({ event, name })}`,
);
};
log.debug(() => `Initiated ${this.name}`);
const comlink = wrap<T>(this.worker);
log.debug(() => `Initiated web worker ${name}`);
const comlink = wrap<T>(worker);
this.remote = new comlink() as Promise<Remote<InstanceType<T>>>;
expose(workerBridge, worker);
}
public getName() {
return this.name;
}
public terminate() {
this.worker.terminate();
log.debug(() => `Terminated ${this.name}`);
@ -43,15 +36,16 @@ export class ComlinkWorker<T extends new () => InstanceType<T>> {
* `workerBridge` object after importing it from `worker-bridge.ts`.
*
* Not all workers need access to all these functions, and this can indeed be
* done in a more fine-grained, per-worker, manner if needed.
* done in a more fine-grained, per-worker, manner if needed. For now, since it
* is a motley bunch, we just inject them all.
*/
const workerBridge = {
// Needed: generally (presumably)
logToDisk,
// Needed by ML worker
getAuthToken: () => ensureLocalUser().then((user) => user.token),
convertToJPEG: (inputFileData: Uint8Array, filename: string) =>
ensureElectron().convertToJPEG(inputFileData, filename),
convertToJPEG: (fileName: string, imageData: Uint8Array) =>
ensureElectron().convertToJPEG(fileName, imageData),
detectFaces: (input: Float32Array) => ensureElectron().detectFaces(input),
faceEmbedding: (input: Float32Array) =>
ensureElectron().faceEmbedding(input),

View file

@ -1,6 +1,6 @@
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { DedicatedCryptoWorker } from "./internal/crypto.worker";
import { type DedicatedCryptoWorker } from "./internal/crypto.worker";
class ComlinkCryptoWorker {
private comlinkWorkerInstance:

View file

@ -74,8 +74,6 @@ export const CustomError = {
EXIF_DATA_NOT_FOUND: "exif data not found",
SELECT_FOLDER_ABORTED: "select folder aborted",
NON_MEDIA_FILE: "non media file",
UNSUPPORTED_RAW_FORMAT: "unsupported raw format",
NON_PREVIEWABLE_FILE: "non previewable file",
PROCESSING_FAILED: "processing failed",
EXPORT_RECORD_JSON_PARSING_FAILED: "export record json parsing failed",
TWO_FACTOR_ENABLED: "two factor enabled",
@ -84,8 +82,6 @@ export const CustomError = {
ServerError: "server error",
FILE_NOT_FOUND: "file not found",
UNSUPPORTED_PLATFORM: "Unsupported platform",
MODEL_DOWNLOAD_PENDING:
"Model download pending, skipping clip search request",
UPDATE_URL_FILE_ID_MISMATCH: "update url file id mismatch",
URL_ALREADY_SET: "url already set",
FILE_CONVERSION_FAILED: "file conversion failed",

View file

@ -4,9 +4,8 @@
* This function is a promisified `setTimeout`. It returns a promise that
* resolves after {@link ms} milliseconds.
*/
export async function sleep(ms: number) {
await new Promise((resolve) => setTimeout(resolve, ms));
}
export const wait = (ms: number) =>
new Promise((resolve) => setTimeout(resolve, ms));
export function downloadAsFile(filename: string, content: string) {
const file = new Blob([content], {
@ -49,29 +48,27 @@ export async function retryAsyncFunction<T>(
if (attemptNumber === waitTimeBeforeNextTry.length) {
throw e;
}
await sleep(waitTimeBeforeNextTry[attemptNumber]);
await wait(waitTimeBeforeNextTry[attemptNumber]);
}
}
}
export const promiseWithTimeout = async <T>(
request: Promise<T>,
timeout: number,
): Promise<T> => {
const timeoutRef = { current: null };
const rejectOnTimeout = new Promise<null>((_, reject) => {
timeoutRef.current = setTimeout(
/**
* Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
* does not resolve within {@link timeoutMS}, then reject with a timeout error.
*/
export const withTimeout = async <T>(promise: Promise<T>, ms: number) => {
let timeoutId: ReturnType<typeof setTimeout>;
const rejectOnTimeout = new Promise<T>((_, reject) => {
timeoutId = setTimeout(
() => reject(new Error("Operation timed out")),
timeout,
ms,
);
});
const requestWithTimeOutCancellation = async () => {
const resp = await request;
clearTimeout(timeoutRef.current);
return resp;
const promiseAndCancelTimeout = async () => {
const result = await promise;
clearTimeout(timeoutId);
return result;
};
return await Promise.race([
requestWithTimeOutCancellation(),
rejectOnTimeout,
]);
return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
};

View file

@ -5,3 +5,12 @@ export const ensure = <T>(v: T | undefined): T => {
if (v === undefined) throw new Error("Required value was not found");
return v;
};
/**
* Throw an exception if the given value is not a string.
*/
export const ensureString = (v: unknown): string => {
if (typeof v != "string")
throw new Error(`Expected a string, instead found ${String(v)}`);
return v;
};