diff --git a/auth/lib/l10n/arb/app_de.arb b/auth/lib/l10n/arb/app_de.arb
index f3ea23b51..be769ecd5 100644
--- a/auth/lib/l10n/arb/app_de.arb
+++ b/auth/lib/l10n/arb/app_de.arb
@@ -78,12 +78,14 @@
"data": "Datei",
"importCodes": "Codes importieren",
"importTypePlainText": "Klartext",
+ "importTypeEnteEncrypted": "Verschlüsselter Ente-Export",
"passwordForDecryptingExport": "Passwort um den Export zu entschlüsseln",
"passwordEmptyError": "Passwort kann nicht leer sein",
"importFromApp": "Importiere Codes von {appName}",
"importGoogleAuthGuide": "Exportiere deine Accounts von Google Authenticator zu einem QR-Code, durch die \"Konten übertragen\" Option. Scanne den QR-Code danach mit einem anderen Gerät.\n\nTipp: Du kannst die Kamera eines Laptops verwenden, um ein Foto den dem QR-Code zu erstellen.",
"importSelectJsonFile": "Wähle eine JSON-Datei",
"importSelectAppExport": "{appName} Exportdatei auswählen",
+ "importEnteEncGuide": "Wähle die von Ente exportierte, verschlüsselte JSON-Datei",
"importRaivoGuide": "Verwenden Sie die Option \"Export OTPs to Zip archive\" in den Raivo-Einstellungen.\n\nEntpacken Sie die Zip-Datei und importieren Sie die JSON-Datei.",
"importBitwardenGuide": "Verwenden Sie die Option \"Tresor exportieren\" innerhalb der Bitwarden Tools und importieren Sie die unverschlüsselte JSON-Datei.",
"importAegisGuide": "Verwenden Sie die Option \"Tresor exportieren\" in den Aegis-Einstellungen.\n\nFalls Ihr Tresor verschlüsselt ist, müssen Sie das Passwort für den Tresor eingeben, um ihn zu entschlüsseln.",
@@ -121,12 +123,14 @@
"suggestFeatures": "Features vorschlagen",
"faq": "FAQ",
"faq_q_1": "Wie sicher ist Auth?",
+ "faq_a_1": "Alle Codes, die du über Auth sicherst, werden Ende-zu-Ende-verschlüsselt gespeichert. Das bedeutet, dass nur du auf deine Codes zugreifen kannst. Unsere Anwendungen sind quelloffen und unsere Kryptografie wurde extern geprüft.",
"faq_q_2": "Kann ich auf meine Codes auf dem Desktop zugreifen?",
"faq_a_2": "Sie können auf Ihre Codes im Web via auth.ente.io zugreifen.",
"faq_q_3": "Wie kann ich Codes löschen?",
"faq_a_3": "Sie können einen Code löschen, indem Sie auf dem Code nach links wischen.",
"faq_q_4": "Wie kann ich das Projekt unterstützen?",
"faq_a_4": "Sie können die Entwicklung dieses Projekts unterstützen, indem Sie unsere Fotos-App auf ente.io abonnieren.",
+ "faq_q_5": "Wie kann ich die FaceID-Sperre in Auth aktivieren",
"faq_a_5": "Sie können FaceID unter Einstellungen → Sicherheit → Sperrbildschirm aktivieren.",
"somethingWentWrongMessage": "Ein Fehler ist aufgetreten, bitte versuchen Sie es erneut",
"leaveFamily": "Familie verlassen",
@@ -196,6 +200,9 @@
"doThisLater": "Auf später verschieben",
"saveKey": "Schlüssel speichern",
"save": "Speichern",
+ "send": "Senden",
+ "saveOrSendDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) oder an andere Apps senden?",
+ "saveOnlyDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) speichern?",
"back": "Zurück",
"createAccount": "Account erstellen",
"passwordStrength": "Passwortstärke: {passwordStrengthValue}",
@@ -343,6 +350,7 @@
"deleteCodeAuthMessage": "Authentifizieren, um Code zu löschen",
"showQRAuthMessage": "Authentifizieren, um QR-Code anzuzeigen",
"confirmAccountDeleteTitle": "Kontolöschung bestätigen",
+ "confirmAccountDeleteMessage": "Dieses Konto ist mit anderen Ente-Apps verknüpft, falls du welche verwendest.\n\nDeine hochgeladenen Daten werden in allen Ente-Apps zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht.",
"androidBiometricHint": "Identität bestätigen",
"@androidBiometricHint": {
"description": "Hint message advising the user how to authenticate with biometrics. It is used on Android side. Maximum 60 characters."
diff --git a/desktop/.eslintrc.js b/desktop/.eslintrc.js
index a47eb483f..977071a27 100644
--- a/desktop/.eslintrc.js
+++ b/desktop/.eslintrc.js
@@ -7,11 +7,6 @@ module.exports = {
// "plugin:@typescript-eslint/strict-type-checked",
// "plugin:@typescript-eslint/stylistic-type-checked",
],
- /* Temporarily add a global
- Enhancement: Remove me */
- globals: {
- NodeJS: "readonly",
- },
plugins: ["@typescript-eslint"],
parser: "@typescript-eslint/parser",
parserOptions: {
diff --git a/desktop/docs/dependencies.md b/desktop/docs/dependencies.md
index 62f70e8e4..5c6b222b0 100644
--- a/desktop/docs/dependencies.md
+++ b/desktop/docs/dependencies.md
@@ -13,7 +13,7 @@ Electron embeds Chromium and Node.js in the generated app's binary. The
generated app thus consists of two separate processes - the _main_ process, and
a _renderer_ process.
-- The _main_ process is runs the embedded node. This process can deal with the
+- The _main_ process runs the embedded node. This process can deal with the
host OS - it is conceptually like a `node` repl running on your machine. In
our case, the TypeScript code (in the `src/` directory) gets transpiled by
`tsc` into JavaScript in the `build/app/` directory, which gets bundled in
@@ -94,12 +94,12 @@ Some extra ones specific to the code here are:
### Format conversion
-The main tool we use is for arbitrary conversions is FFMPEG. To bundle a
+The main tool we use is for arbitrary conversions is ffmpeg. To bundle a
(platform specific) static binary of ffmpeg with our app, we use
[ffmpeg-static](https://github.com/eugeneware/ffmpeg-static).
> There is a significant (~20x) speed difference between using the compiled
-> FFMPEG binary and using the WASM one (that our renderer process already has).
+> ffmpeg binary and using the wasm one (that our renderer process already has).
> Which is why we bundle it to speed up operations on the desktop app.
In addition, we also bundle a static Linux binary of imagemagick in our extra
diff --git a/desktop/src/main.ts b/desktop/src/main.ts
index 467d9c881..2774ec730 100644
--- a/desktop/src/main.ts
+++ b/desktop/src/main.ts
@@ -8,18 +8,15 @@
*
* https://www.electronjs.org/docs/latest/tutorial/process-model#the-main-process
*/
-import { nativeImage } from "electron";
-import { app, BrowserWindow, Menu, protocol, Tray } from "electron/main";
+
+import { nativeImage, shell } from "electron/common";
+import type { WebContents } from "electron/main";
+import { BrowserWindow, Menu, Tray, app, protocol } from "electron/main";
import serveNextAt from "next-electron-server";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
-import {
- addAllowOriginHeader,
- handleDownloads,
- handleExternalLinks,
-} from "./main/init";
import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc";
import log, { initLogging } from "./main/log";
import { createApplicationMenu, createTrayContextMenu } from "./main/menu";
@@ -29,12 +26,12 @@ import { createWatcher } from "./main/services/watch";
import { userPreferences } from "./main/stores/user-preferences";
import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch";
import { registerStreamProtocol } from "./main/stream";
-import { isDev } from "./main/util";
+import { isDev } from "./main/utils-electron";
/**
* The URL where the renderer HTML is being served from.
*/
-export const rendererURL = "ente://app";
+const rendererURL = "ente://app";
/**
* We want to hide our window instead of closing it when the user presses the
@@ -205,9 +202,11 @@ const createMainWindow = async () => {
window.webContents.reload();
});
+ // "The unresponsive event is fired when Chromium detects that your
+ // webContents is not responding to input messages for > 30 seconds."
window.webContents.on("unresponsive", () => {
log.error(
- "Main window's webContents are unresponsive, will restart the renderer process",
+ "MainWindow's webContents are unresponsive, will restart the renderer process",
);
window.webContents.forcefullyCrashRenderer();
});
@@ -238,6 +237,58 @@ const createMainWindow = async () => {
return window;
};
+/**
+ * Automatically set the save path for user initiated downloads to the system's
+ * "downloads" directory instead of asking the user to select a save location.
+ */
+export const setDownloadPath = (webContents: WebContents) => {
+ webContents.session.on("will-download", (_, item) => {
+ item.setSavePath(
+ uniqueSavePath(app.getPath("downloads"), item.getFilename()),
+ );
+ });
+};
+
+const uniqueSavePath = (dirPath: string, fileName: string) => {
+ const { name, ext } = path.parse(fileName);
+
+ let savePath = path.join(dirPath, fileName);
+ let n = 1;
+ while (existsSync(savePath)) {
+ const suffixedName = [`${name}(${n})`, ext].filter((x) => x).join(".");
+ savePath = path.join(dirPath, suffixedName);
+ n++;
+ }
+ return savePath;
+};
+
+/**
+ * Allow opening external links, e.g. when the user clicks on the "Feature
+ * requests" button in the sidebar (to open our GitHub repository), or when they
+ * click the "Support" button to send an email to support.
+ *
+ * @param webContents The renderer to configure.
+ */
+export const allowExternalLinks = (webContents: WebContents) => {
+ // By default, if the user were open a link, say
+ // https://github.com/ente-io/ente/discussions, then it would open a _new_
+ // BrowserWindow within our app.
+ //
+ // This is not the behaviour we want; what we want is to ask the system to
+ // handle the link (e.g. open the URL in the default browser, or if it is a
+ // mailto: link, then open the user's mail client).
+ //
+ // Returning `action` "deny" accomplishes this.
+ webContents.setWindowOpenHandler(({ url }) => {
+ if (!url.startsWith(rendererURL)) {
+ shell.openExternal(url);
+ return { action: "deny" };
+ } else {
+ return { action: "allow" };
+ }
+ });
+};
+
/**
* Add an icon for our app in the system tray.
*
@@ -340,19 +391,26 @@ const main = () => {
//
// Note that some Electron APIs can only be used after this event occurs.
app.on("ready", async () => {
- // Create window and prepare for renderer
+ // Create window and prepare for the renderer.
mainWindow = await createMainWindow();
attachIPCHandlers();
attachFSWatchIPCHandlers(createWatcher(mainWindow));
registerStreamProtocol();
- handleDownloads(mainWindow);
- handleExternalLinks(mainWindow);
- addAllowOriginHeader(mainWindow);
- // Start loading the renderer
+ // Configure the renderer's environment.
+ setDownloadPath(mainWindow.webContents);
+ allowExternalLinks(mainWindow.webContents);
+
+ // TODO(MR): Remove or resurrect
+ // The commit that introduced this header override had the message
+ // "fix cors issue for uploads". Not sure what that means, so disabling
+ // it for now to see why exactly this is required.
+ // addAllowOriginHeader(mainWindow);
+
+ // Start loading the renderer.
mainWindow.loadURL(rendererURL);
- // Continue on with the rest of the startup sequence
+ // Continue on with the rest of the startup sequence.
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
setupTrayItem(mainWindow);
if (!isDev) setupAutoUpdater(mainWindow);
diff --git a/desktop/src/main/dialogs.ts b/desktop/src/main/dialogs.ts
index 2f91f5c40..f119e3d13 100644
--- a/desktop/src/main/dialogs.ts
+++ b/desktop/src/main/dialogs.ts
@@ -1,7 +1,8 @@
import { dialog } from "electron/main";
+import fs from "node:fs/promises";
import path from "node:path";
import type { ElectronFile } from "../types/ipc";
-import { getDirFilePaths, getElectronFile } from "./services/fs";
+import { getElectronFile } from "./services/fs";
import { getElectronFilesFromGoogleZip } from "./services/upload";
export const selectDirectory = async () => {
@@ -34,6 +35,23 @@ export const showUploadDirsDialog = async () => {
return await Promise.all(filePaths.map(getElectronFile));
};
+// https://stackoverflow.com/a/63111390
+const getDirFilePaths = async (dirPath: string) => {
+ if (!(await fs.stat(dirPath)).isDirectory()) {
+ return [dirPath];
+ }
+
+ let files: string[] = [];
+ const filePaths = await fs.readdir(dirPath);
+
+ for (const filePath of filePaths) {
+ const absolute = path.join(dirPath, filePath);
+ files = [...files, ...(await getDirFilePaths(absolute))];
+ }
+
+ return files;
+};
+
export const showUploadZipDialog = async () => {
const selectedFiles = await dialog.showOpenDialog({
properties: ["openFile", "multiSelections"],
diff --git a/desktop/src/main/fs.ts b/desktop/src/main/fs.ts
index 2428d3a80..fc181cf46 100644
--- a/desktop/src/main/fs.ts
+++ b/desktop/src/main/fs.ts
@@ -27,3 +27,5 @@ export const fsIsDir = async (dirPath: string) => {
const stat = await fs.stat(dirPath);
return stat.isDirectory();
};
+
+export const fsSize = (path: string) => fs.stat(path).then((s) => s.size);
diff --git a/desktop/src/main/init.ts b/desktop/src/main/init.ts
index d3e9b28b4..d0aee17f8 100644
--- a/desktop/src/main/init.ts
+++ b/desktop/src/main/init.ts
@@ -1,54 +1,4 @@
-import { BrowserWindow, app, shell } from "electron";
-import { existsSync } from "node:fs";
-import path from "node:path";
-import { rendererURL } from "../main";
-
-export function handleDownloads(mainWindow: BrowserWindow) {
- mainWindow.webContents.session.on("will-download", (_, item) => {
- item.setSavePath(
- getUniqueSavePath(item.getFilename(), app.getPath("downloads")),
- );
- });
-}
-
-export function handleExternalLinks(mainWindow: BrowserWindow) {
- mainWindow.webContents.setWindowOpenHandler(({ url }) => {
- if (!url.startsWith(rendererURL)) {
- shell.openExternal(url);
- return { action: "deny" };
- } else {
- return { action: "allow" };
- }
- });
-}
-
-export function getUniqueSavePath(filename: string, directory: string): string {
- let uniqueFileSavePath = path.join(directory, filename);
- const { name: filenameWithoutExtension, ext: extension } =
- path.parse(filename);
- let n = 0;
- while (existsSync(uniqueFileSavePath)) {
- n++;
- // filter need to remove undefined extension from the array
- // else [`${fileName}`, undefined].join(".") will lead to `${fileName}.` as joined string
- const fileNameWithNumberedSuffix = [
- `${filenameWithoutExtension}(${n})`,
- extension,
- ]
- .filter((x) => x) // filters out undefined/null values
- .join("");
- uniqueFileSavePath = path.join(directory, fileNameWithNumberedSuffix);
- }
- return uniqueFileSavePath;
-}
-
-function lowerCaseHeaders(responseHeaders: Record) {
- const headers: Record = {};
- for (const key of Object.keys(responseHeaders)) {
- headers[key.toLowerCase()] = responseHeaders[key];
- }
- return headers;
-}
+import { BrowserWindow } from "electron";
export function addAllowOriginHeader(mainWindow: BrowserWindow) {
mainWindow.webContents.session.webRequest.onHeadersReceived(
@@ -61,3 +11,11 @@ export function addAllowOriginHeader(mainWindow: BrowserWindow) {
},
);
}
+
+function lowerCaseHeaders(responseHeaders: Record) {
+ const headers: Record = {};
+ for (const key of Object.keys(responseHeaders)) {
+ headers[key.toLowerCase()] = responseHeaders[key];
+ }
+ return headers;
+}
diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts
index eab2e8b59..825a2ed32 100644
--- a/desktop/src/main/ipc.ts
+++ b/desktop/src/main/ipc.ts
@@ -12,7 +12,6 @@ import type { FSWatcher } from "chokidar";
import { ipcMain } from "electron/main";
import type {
CollectionMapping,
- ElectronFile,
FolderWatch,
PendingUploads,
} from "../types/ipc";
@@ -30,6 +29,7 @@ import {
fsRename,
fsRm,
fsRmdir,
+ fsSize,
fsWriteFile,
} from "./fs";
import { logToDisk } from "./log";
@@ -39,13 +39,12 @@ import {
updateAndRestart,
updateOnNextRestart,
} from "./services/app-update";
-import { runFFmpegCmd } from "./services/ffmpeg";
-import { getDirFiles } from "./services/fs";
+import { ffmpegExec } from "./services/ffmpeg";
+import { convertToJPEG, generateImageThumbnail } from "./services/image";
import {
- convertToJPEG,
- generateImageThumbnail,
-} from "./services/imageProcessor";
-import { clipImageEmbedding, clipTextEmbedding } from "./services/ml-clip";
+ clipImageEmbedding,
+ clipTextEmbeddingIfAvailable,
+} from "./services/ml-clip";
import { detectFaces, faceEmbedding } from "./services/ml-face";
import {
clearStores,
@@ -66,7 +65,7 @@ import {
watchUpdateIgnoredFiles,
watchUpdateSyncedFiles,
} from "./services/watch";
-import { openDirectory, openLogDirectory } from "./util";
+import { openDirectory, openLogDirectory } from "./utils-electron";
/**
* Listen for IPC events sent/invoked by the renderer process, and route them to
@@ -140,27 +139,33 @@ export const attachIPCHandlers = () => {
ipcMain.handle("fsIsDir", (_, dirPath: string) => fsIsDir(dirPath));
+ ipcMain.handle("fsSize", (_, path: string) => fsSize(path));
+
// - Conversion
- ipcMain.handle("convertToJPEG", (_, fileData, filename) =>
- convertToJPEG(fileData, filename),
+ ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) =>
+ convertToJPEG(imageData),
);
ipcMain.handle(
"generateImageThumbnail",
- (_, inputFile, maxDimension, maxSize) =>
- generateImageThumbnail(inputFile, maxDimension, maxSize),
+ (
+ _,
+ dataOrPath: Uint8Array | string,
+ maxDimension: number,
+ maxSize: number,
+ ) => generateImageThumbnail(dataOrPath, maxDimension, maxSize),
);
ipcMain.handle(
- "runFFmpegCmd",
+ "ffmpegExec",
(
_,
- cmd: string[],
- inputFile: File | ElectronFile,
- outputFileName: string,
- dontTimeout?: boolean,
- ) => runFFmpegCmd(cmd, inputFile, outputFileName, dontTimeout),
+ command: string[],
+ dataOrPath: Uint8Array | string,
+ outputFileExtension: string,
+ timeoutMS: number,
+ ) => ffmpegExec(command, dataOrPath, outputFileExtension, timeoutMS),
);
// - ML
@@ -169,8 +174,8 @@ export const attachIPCHandlers = () => {
clipImageEmbedding(jpegImageData),
);
- ipcMain.handle("clipTextEmbedding", (_, text: string) =>
- clipTextEmbedding(text),
+ ipcMain.handle("clipTextEmbeddingIfAvailable", (_, text: string) =>
+ clipTextEmbeddingIfAvailable(text),
);
ipcMain.handle("detectFaces", (_, input: Float32Array) =>
@@ -210,8 +215,6 @@ export const attachIPCHandlers = () => {
ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) =>
getElectronFilesFromGoogleZip(filePath),
);
-
- ipcMain.handle("getDirFiles", (_, dirPath: string) => getDirFiles(dirPath));
};
/**
diff --git a/desktop/src/main/log.ts b/desktop/src/main/log.ts
index d43161fea..22ebb5300 100644
--- a/desktop/src/main/log.ts
+++ b/desktop/src/main/log.ts
@@ -1,6 +1,6 @@
import log from "electron-log";
import util from "node:util";
-import { isDev } from "./util";
+import { isDev } from "./utils-electron";
/**
* Initialize logging in the main process.
diff --git a/desktop/src/main/menu.ts b/desktop/src/main/menu.ts
index bd8810428..12b1ee17d 100644
--- a/desktop/src/main/menu.ts
+++ b/desktop/src/main/menu.ts
@@ -9,7 +9,7 @@ import { allowWindowClose } from "../main";
import { forceCheckForAppUpdates } from "./services/app-update";
import autoLauncher from "./services/auto-launcher";
import { userPreferences } from "./stores/user-preferences";
-import { openLogDirectory } from "./util";
+import { isDev, openLogDirectory } from "./utils-electron";
/** Create and return the entries in the app's main menu bar */
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
@@ -23,6 +23,9 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
process.platform == "darwin" ? options : [];
+ const devOnly = (options: MenuItemConstructorOptions[]) =>
+ isDev ? options : [];
+
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
const handleViewChangelog = () =>
@@ -139,7 +142,9 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
label: "View",
submenu: [
{ label: "Reload", role: "reload" },
- { label: "Toggle Dev Tools", role: "toggleDevTools" },
+ ...devOnly([
+ { label: "Toggle Dev Tools", role: "toggleDevTools" },
+ ]),
{ type: "separator" },
{ label: "Toggle Full Screen", role: "togglefullscreen" },
],
diff --git a/desktop/src/main/services/app-update.ts b/desktop/src/main/services/app-update.ts
index a3f4d3bed..e20d42fb7 100644
--- a/desktop/src/main/services/app-update.ts
+++ b/desktop/src/main/services/app-update.ts
@@ -58,17 +58,17 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
log.debug(() => "Attempting auto update");
autoUpdater.downloadUpdate();
- let timeout: NodeJS.Timeout;
+ let timeoutId: ReturnType;
const fiveMinutes = 5 * 60 * 1000;
autoUpdater.on("update-downloaded", () => {
- timeout = setTimeout(
+ timeoutId = setTimeout(
() => showUpdateDialog({ autoUpdatable: true, version }),
fiveMinutes,
);
});
autoUpdater.on("error", (error) => {
- clearTimeout(timeout);
+ clearTimeout(timeoutId);
log.error("Auto update failed", error);
showUpdateDialog({ autoUpdatable: false, version });
});
diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts
index 2597bae60..ed3542f6a 100644
--- a/desktop/src/main/services/ffmpeg.ts
+++ b/desktop/src/main/services/ffmpeg.ts
@@ -1,33 +1,32 @@
import pathToFfmpeg from "ffmpeg-static";
-import { existsSync } from "node:fs";
import fs from "node:fs/promises";
-import { ElectronFile } from "../../types/ipc";
import log from "../log";
-import { writeStream } from "../stream";
-import { generateTempFilePath, getTempDirPath } from "../temp";
-import { execAsync } from "../util";
+import { withTimeout } from "../utils";
+import { execAsync } from "../utils-electron";
+import { deleteTempFile, makeTempFilePath } from "../utils-temp";
-const INPUT_PATH_PLACEHOLDER = "INPUT";
-const FFMPEG_PLACEHOLDER = "FFMPEG";
-const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
+/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
+const ffmpegPathPlaceholder = "FFMPEG";
+const inputPathPlaceholder = "INPUT";
+const outputPathPlaceholder = "OUTPUT";
/**
- * Run a ffmpeg command
+ * Run a FFmpeg command
*
- * [Note: FFMPEG in Electron]
+ * [Note: FFmpeg in Electron]
*
- * There is a wasm build of FFMPEG, but that is currently 10-20 times slower
+ * There is a wasm build of FFmpeg, but that is currently 10-20 times slower
* that the native build. That is slow enough to be unusable for our purposes.
* https://ffmpegwasm.netlify.app/docs/performance
*
- * So the alternative is to bundle a ffmpeg binary with our app. e.g.
+ * So the alternative is to bundle a FFmpeg executable binary with our app. e.g.
*
* yarn add fluent-ffmpeg ffmpeg-static ffprobe-static
*
* (we only use ffmpeg-static, the rest are mentioned for completeness' sake).
*
- * Interestingly, Electron already bundles an ffmpeg library (it comes from the
- * ffmpeg fork maintained by Chromium).
+ * Interestingly, Electron already bundles an binary FFmpeg library (it comes
+ * from the ffmpeg fork maintained by Chromium).
* https://chromium.googlesource.com/chromium/third_party/ffmpeg
* https://stackoverflow.com/questions/53963672/what-version-of-ffmpeg-is-bundled-inside-electron
*
@@ -36,84 +35,74 @@ const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
* $ file ente.app/Contents/Frameworks/Electron\ Framework.framework/Versions/Current/Libraries/libffmpeg.dylib
* .../libffmpeg.dylib: Mach-O 64-bit dynamically linked shared library arm64
*
- * I'm not sure if our code is supposed to be able to use it, and how.
+ * But I'm not sure if our code is supposed to be able to use it, and how.
*/
-export async function runFFmpegCmd(
- cmd: string[],
- inputFile: File | ElectronFile,
- outputFileName: string,
- dontTimeout?: boolean,
-) {
- let inputFilePath = null;
- let createdTempInputFile = null;
+export const ffmpegExec = async (
+ command: string[],
+ dataOrPath: Uint8Array | string,
+ outputFileExtension: string,
+ timeoutMS: number,
+): Promise => {
+ // TODO (MR): This currently copies files for both input and output. This
+ // needs to be tested extremely large video files when invoked downstream of
+ // `convertToMP4` in the web code.
+
+ let inputFilePath: string;
+ let isInputFileTemporary: boolean;
+ if (dataOrPath instanceof Uint8Array) {
+ inputFilePath = await makeTempFilePath();
+ isInputFileTemporary = true;
+ } else {
+ inputFilePath = dataOrPath;
+ isInputFileTemporary = false;
+ }
+
+ const outputFilePath = await makeTempFilePath(outputFileExtension);
try {
- if (!existsSync(inputFile.path)) {
- const tempFilePath = await generateTempFilePath(inputFile.name);
- await writeStream(tempFilePath, await inputFile.stream());
- inputFilePath = tempFilePath;
- createdTempInputFile = true;
- } else {
- inputFilePath = inputFile.path;
- }
- const outputFileData = await runFFmpegCmd_(
- cmd,
+ if (dataOrPath instanceof Uint8Array)
+ await fs.writeFile(inputFilePath, dataOrPath);
+
+ const cmd = substitutePlaceholders(
+ command,
inputFilePath,
- outputFileName,
- dontTimeout,
+ outputFilePath,
);
- return new File([outputFileData], outputFileName);
+
+ if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
+ else await execAsync(cmd);
+
+ return fs.readFile(outputFilePath);
} finally {
- if (createdTempInputFile) {
- await deleteTempFile(inputFilePath);
+ try {
+ if (isInputFileTemporary) await deleteTempFile(inputFilePath);
+ await deleteTempFile(outputFilePath);
+ } catch (e) {
+ log.error("Could not clean up temp files", e);
}
}
-}
+};
-export async function runFFmpegCmd_(
- cmd: string[],
+const substitutePlaceholders = (
+ command: string[],
inputFilePath: string,
- outputFileName: string,
- dontTimeout = false,
-) {
- let tempOutputFilePath: string;
- try {
- tempOutputFilePath = await generateTempFilePath(outputFileName);
-
- cmd = cmd.map((cmdPart) => {
- if (cmdPart === FFMPEG_PLACEHOLDER) {
- return ffmpegBinaryPath();
- } else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
- return inputFilePath;
- } else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
- return tempOutputFilePath;
- } else {
- return cmdPart;
- }
- });
-
- if (dontTimeout) {
- await execAsync(cmd);
+ outputFilePath: string,
+) =>
+ command.map((segment) => {
+ if (segment == ffmpegPathPlaceholder) {
+ return ffmpegBinaryPath();
+ } else if (segment == inputPathPlaceholder) {
+ return inputFilePath;
+ } else if (segment == outputPathPlaceholder) {
+ return outputFilePath;
} else {
- await promiseWithTimeout(execAsync(cmd), 30 * 1000);
+ return segment;
}
-
- if (!existsSync(tempOutputFilePath)) {
- throw new Error("ffmpeg output file not found");
- }
- const outputFile = await fs.readFile(tempOutputFilePath);
- return new Uint8Array(outputFile);
- } catch (e) {
- log.error("FFMPEG command failed", e);
- throw e;
- } finally {
- await deleteTempFile(tempOutputFilePath);
- }
-}
+ });
/**
* Return the path to the `ffmpeg` binary.
*
- * At runtime, the ffmpeg binary is present in a path like (macOS example):
+ * At runtime, the FFmpeg binary is present in a path like (macOS example):
* `ente.app/Contents/Resources/app.asar.unpacked/node_modules/ffmpeg-static/ffmpeg`
*/
const ffmpegBinaryPath = () => {
@@ -122,40 +111,3 @@ const ffmpegBinaryPath = () => {
// https://github.com/eugeneware/ffmpeg-static/issues/16
return pathToFfmpeg.replace("app.asar", "app.asar.unpacked");
};
-
-export async function writeTempFile(fileStream: Uint8Array, fileName: string) {
- const tempFilePath = await generateTempFilePath(fileName);
- await fs.writeFile(tempFilePath, fileStream);
- return tempFilePath;
-}
-
-export async function deleteTempFile(tempFilePath: string) {
- const tempDirPath = await getTempDirPath();
- if (!tempFilePath.startsWith(tempDirPath))
- log.error("Attempting to delete a non-temp file ${tempFilePath}");
- await fs.rm(tempFilePath, { force: true });
-}
-
-const promiseWithTimeout = async (
- request: Promise,
- timeout: number,
-): Promise => {
- const timeoutRef: {
- current: NodeJS.Timeout;
- } = { current: null };
- const rejectOnTimeout = new Promise((_, reject) => {
- timeoutRef.current = setTimeout(
- () => reject(new Error("Operation timed out")),
- timeout,
- );
- });
- const requestWithTimeOutCancellation = async () => {
- const resp = await request;
- clearTimeout(timeoutRef.current);
- return resp;
- };
- return await Promise.race([
- requestWithTimeOutCancellation(),
- rejectOnTimeout,
- ]);
-};
diff --git a/desktop/src/main/services/fs.ts b/desktop/src/main/services/fs.ts
index 30ccf146b..609fc82d7 100644
--- a/desktop/src/main/services/fs.ts
+++ b/desktop/src/main/services/fs.ts
@@ -7,29 +7,6 @@ import log from "../log";
const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
-export async function getDirFiles(dirPath: string) {
- const files = await getDirFilePaths(dirPath);
- const electronFiles = await Promise.all(files.map(getElectronFile));
- return electronFiles;
-}
-
-// https://stackoverflow.com/a/63111390
-export const getDirFilePaths = async (dirPath: string) => {
- if (!(await fs.stat(dirPath)).isDirectory()) {
- return [dirPath];
- }
-
- let files: string[] = [];
- const filePaths = await fs.readdir(dirPath);
-
- for (const filePath of filePaths) {
- const absolute = path.join(dirPath, filePath);
- files = [...files, ...(await getDirFilePaths(absolute))];
- }
-
- return files;
-};
-
const getFileStream = async (filePath: string) => {
const file = await fs.open(filePath, "r");
let offset = 0;
diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts
new file mode 100644
index 000000000..26b4b351e
--- /dev/null
+++ b/desktop/src/main/services/image.ts
@@ -0,0 +1,160 @@
+/** @file Image format conversions and thumbnail generation */
+
+import fs from "node:fs/promises";
+import path from "path";
+import { CustomErrorMessage } from "../../types/ipc";
+import log from "../log";
+import { execAsync, isDev } from "../utils-electron";
+import { deleteTempFile, makeTempFilePath } from "../utils-temp";
+
+export const convertToJPEG = async (imageData: Uint8Array) => {
+ const inputFilePath = await makeTempFilePath();
+ const outputFilePath = await makeTempFilePath("jpeg");
+
+ // Construct the command first, it may throw NotAvailable on win32.
+ const command = convertToJPEGCommand(inputFilePath, outputFilePath);
+
+ try {
+ await fs.writeFile(inputFilePath, imageData);
+ await execAsync(command);
+ return new Uint8Array(await fs.readFile(outputFilePath));
+ } finally {
+ try {
+ await deleteTempFile(inputFilePath);
+ await deleteTempFile(outputFilePath);
+ } catch (e) {
+ log.error("Could not clean up temp files", e);
+ }
+ }
+};
+
+const convertToJPEGCommand = (
+ inputFilePath: string,
+ outputFilePath: string,
+) => {
+ switch (process.platform) {
+ case "darwin":
+ return [
+ "sips",
+ "-s",
+ "format",
+ "jpeg",
+ inputFilePath,
+ "--out",
+ outputFilePath,
+ ];
+
+ case "linux":
+ return [
+ imageMagickPath(),
+ inputFilePath,
+ "-quality",
+ "100%",
+ outputFilePath,
+ ];
+
+ default: // "win32"
+ throw new Error(CustomErrorMessage.NotAvailable);
+ }
+};
+
+/** Path to the Linux image-magick executable bundled with our app */
+const imageMagickPath = () =>
+ path.join(isDev ? "build" : process.resourcesPath, "image-magick");
+
+export const generateImageThumbnail = async (
+ dataOrPath: Uint8Array | string,
+ maxDimension: number,
+ maxSize: number,
+): Promise => {
+ let inputFilePath: string;
+ let isInputFileTemporary: boolean;
+ if (dataOrPath instanceof Uint8Array) {
+ inputFilePath = await makeTempFilePath();
+ isInputFileTemporary = true;
+ } else {
+ inputFilePath = dataOrPath;
+ isInputFileTemporary = false;
+ }
+
+ const outputFilePath = await makeTempFilePath("jpeg");
+
+ // Construct the command first, it may throw `NotAvailable` on win32.
+ let quality = 70;
+ let command = generateImageThumbnailCommand(
+ inputFilePath,
+ outputFilePath,
+ maxDimension,
+ quality,
+ );
+
+ try {
+ if (dataOrPath instanceof Uint8Array)
+ await fs.writeFile(inputFilePath, dataOrPath);
+
+ let thumbnail: Uint8Array;
+ do {
+ await execAsync(command);
+ thumbnail = new Uint8Array(await fs.readFile(outputFilePath));
+ quality -= 10;
+ command = generateImageThumbnailCommand(
+ inputFilePath,
+ outputFilePath,
+ maxDimension,
+ quality,
+ );
+ } while (thumbnail.length > maxSize && quality > 50);
+ return thumbnail;
+ } finally {
+ try {
+ if (isInputFileTemporary) await deleteTempFile(inputFilePath);
+ await deleteTempFile(outputFilePath);
+ } catch (e) {
+ log.error("Could not clean up temp files", e);
+ }
+ }
+};
+
+const generateImageThumbnailCommand = (
+ inputFilePath: string,
+ outputFilePath: string,
+ maxDimension: number,
+ quality: number,
+) => {
+ switch (process.platform) {
+ case "darwin":
+ return [
+ "sips",
+ "-s",
+ "format",
+ "jpeg",
+ "-s",
+ "formatOptions",
+ `${quality}`,
+ "-Z",
+ `${maxDimension}`,
+ inputFilePath,
+ "--out",
+ outputFilePath,
+ ];
+
+ case "linux":
+ return [
+ imageMagickPath(),
+ inputFilePath,
+ "-auto-orient",
+ "-define",
+ `jpeg:size=${2 * maxDimension}x${2 * maxDimension}`,
+ "-thumbnail",
+ `${maxDimension}x${maxDimension}>`,
+ "-unsharp",
+ "0x.5",
+ "-quality",
+ `${quality}`,
+ outputFilePath,
+ ];
+
+ default: // "win32"
+ throw new Error(CustomErrorMessage.NotAvailable);
+ }
+};
diff --git a/desktop/src/main/services/imageProcessor.ts b/desktop/src/main/services/imageProcessor.ts
deleted file mode 100644
index f636c153a..000000000
--- a/desktop/src/main/services/imageProcessor.ts
+++ /dev/null
@@ -1,288 +0,0 @@
-import { existsSync } from "fs";
-import fs from "node:fs/promises";
-import path from "path";
-import { CustomErrors, ElectronFile } from "../../types/ipc";
-import log from "../log";
-import { writeStream } from "../stream";
-import { generateTempFilePath } from "../temp";
-import { execAsync, isDev } from "../util";
-import { deleteTempFile } from "./ffmpeg";
-
-const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK";
-const MAX_DIMENSION_PLACEHOLDER = "MAX_DIMENSION";
-const SAMPLE_SIZE_PLACEHOLDER = "SAMPLE_SIZE";
-const INPUT_PATH_PLACEHOLDER = "INPUT";
-const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
-const QUALITY_PLACEHOLDER = "QUALITY";
-
-const MAX_QUALITY = 70;
-const MIN_QUALITY = 50;
-
-const SIPS_HEIC_CONVERT_COMMAND_TEMPLATE = [
- "sips",
- "-s",
- "format",
- "jpeg",
- INPUT_PATH_PLACEHOLDER,
- "--out",
- OUTPUT_PATH_PLACEHOLDER,
-];
-
-const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
- "sips",
- "-s",
- "format",
- "jpeg",
- "-s",
- "formatOptions",
- QUALITY_PLACEHOLDER,
- "-Z",
- MAX_DIMENSION_PLACEHOLDER,
- INPUT_PATH_PLACEHOLDER,
- "--out",
- OUTPUT_PATH_PLACEHOLDER,
-];
-
-const IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE = [
- IMAGE_MAGICK_PLACEHOLDER,
- INPUT_PATH_PLACEHOLDER,
- "-quality",
- "100%",
- OUTPUT_PATH_PLACEHOLDER,
-];
-
-const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
- IMAGE_MAGICK_PLACEHOLDER,
- INPUT_PATH_PLACEHOLDER,
- "-auto-orient",
- "-define",
- `jpeg:size=${SAMPLE_SIZE_PLACEHOLDER}x${SAMPLE_SIZE_PLACEHOLDER}`,
- "-thumbnail",
- `${MAX_DIMENSION_PLACEHOLDER}x${MAX_DIMENSION_PLACEHOLDER}>`,
- "-unsharp",
- "0x.5",
- "-quality",
- QUALITY_PLACEHOLDER,
- OUTPUT_PATH_PLACEHOLDER,
-];
-
-const imageMagickStaticPath = () =>
- path.join(isDev ? "build" : process.resourcesPath, "image-magick");
-
-export async function convertToJPEG(
- fileData: Uint8Array,
- filename: string,
-): Promise {
- if (process.platform == "win32")
- throw Error(CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED);
- const convertedFileData = await convertToJPEG_(fileData, filename);
- return convertedFileData;
-}
-
-async function convertToJPEG_(
- fileData: Uint8Array,
- filename: string,
-): Promise {
- let tempInputFilePath: string;
- let tempOutputFilePath: string;
- try {
- tempInputFilePath = await generateTempFilePath(filename);
- tempOutputFilePath = await generateTempFilePath("output.jpeg");
-
- await fs.writeFile(tempInputFilePath, fileData);
-
- await execAsync(
- constructConvertCommand(tempInputFilePath, tempOutputFilePath),
- );
-
- return new Uint8Array(await fs.readFile(tempOutputFilePath));
- } catch (e) {
- log.error("Failed to convert HEIC", e);
- throw e;
- } finally {
- try {
- await fs.rm(tempInputFilePath, { force: true });
- } catch (e) {
- log.error(`Failed to remove tempInputFile ${tempInputFilePath}`, e);
- }
- try {
- await fs.rm(tempOutputFilePath, { force: true });
- } catch (e) {
- log.error(
- `Failed to remove tempOutputFile ${tempOutputFilePath}`,
- e,
- );
- }
- }
-}
-
-function constructConvertCommand(
- tempInputFilePath: string,
- tempOutputFilePath: string,
-) {
- let convertCmd: string[];
- if (process.platform == "darwin") {
- convertCmd = SIPS_HEIC_CONVERT_COMMAND_TEMPLATE.map((cmdPart) => {
- if (cmdPart === INPUT_PATH_PLACEHOLDER) {
- return tempInputFilePath;
- }
- if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
- return tempOutputFilePath;
- }
- return cmdPart;
- });
- } else if (process.platform == "linux") {
- convertCmd = IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE.map(
- (cmdPart) => {
- if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
- return imageMagickStaticPath();
- }
- if (cmdPart === INPUT_PATH_PLACEHOLDER) {
- return tempInputFilePath;
- }
- if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
- return tempOutputFilePath;
- }
- return cmdPart;
- },
- );
- } else {
- throw new Error(`Unsupported OS ${process.platform}`);
- }
- return convertCmd;
-}
-
-export async function generateImageThumbnail(
- inputFile: File | ElectronFile,
- maxDimension: number,
- maxSize: number,
-): Promise {
- let inputFilePath = null;
- let createdTempInputFile = null;
- try {
- if (process.platform == "win32")
- throw Error(
- CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED,
- );
- if (!existsSync(inputFile.path)) {
- const tempFilePath = await generateTempFilePath(inputFile.name);
- await writeStream(tempFilePath, await inputFile.stream());
- inputFilePath = tempFilePath;
- createdTempInputFile = true;
- } else {
- inputFilePath = inputFile.path;
- }
- const thumbnail = await generateImageThumbnail_(
- inputFilePath,
- maxDimension,
- maxSize,
- );
- return thumbnail;
- } finally {
- if (createdTempInputFile) {
- try {
- await deleteTempFile(inputFilePath);
- } catch (e) {
- log.error(`Failed to deleteTempFile ${inputFilePath}`, e);
- }
- }
- }
-}
-
-async function generateImageThumbnail_(
- inputFilePath: string,
- width: number,
- maxSize: number,
-): Promise {
- let tempOutputFilePath: string;
- let quality = MAX_QUALITY;
- try {
- tempOutputFilePath = await generateTempFilePath("thumb.jpeg");
- let thumbnail: Uint8Array;
- do {
- await execAsync(
- constructThumbnailGenerationCommand(
- inputFilePath,
- tempOutputFilePath,
- width,
- quality,
- ),
- );
- thumbnail = new Uint8Array(await fs.readFile(tempOutputFilePath));
- quality -= 10;
- } while (thumbnail.length > maxSize && quality > MIN_QUALITY);
- return thumbnail;
- } catch (e) {
- log.error("Failed to generate image thumbnail", e);
- throw e;
- } finally {
- try {
- await fs.rm(tempOutputFilePath, { force: true });
- } catch (e) {
- log.error(
- `Failed to remove tempOutputFile ${tempOutputFilePath}`,
- e,
- );
- }
- }
-}
-
-function constructThumbnailGenerationCommand(
- inputFilePath: string,
- tempOutputFilePath: string,
- maxDimension: number,
- quality: number,
-) {
- let thumbnailGenerationCmd: string[];
- if (process.platform == "darwin") {
- thumbnailGenerationCmd = SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map(
- (cmdPart) => {
- if (cmdPart === INPUT_PATH_PLACEHOLDER) {
- return inputFilePath;
- }
- if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
- return tempOutputFilePath;
- }
- if (cmdPart === MAX_DIMENSION_PLACEHOLDER) {
- return maxDimension.toString();
- }
- if (cmdPart === QUALITY_PLACEHOLDER) {
- return quality.toString();
- }
- return cmdPart;
- },
- );
- } else if (process.platform == "linux") {
- thumbnailGenerationCmd =
- IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map((cmdPart) => {
- if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
- return imageMagickStaticPath();
- }
- if (cmdPart === INPUT_PATH_PLACEHOLDER) {
- return inputFilePath;
- }
- if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
- return tempOutputFilePath;
- }
- if (cmdPart.includes(SAMPLE_SIZE_PLACEHOLDER)) {
- return cmdPart.replaceAll(
- SAMPLE_SIZE_PLACEHOLDER,
- (2 * maxDimension).toString(),
- );
- }
- if (cmdPart.includes(MAX_DIMENSION_PLACEHOLDER)) {
- return cmdPart.replaceAll(
- MAX_DIMENSION_PLACEHOLDER,
- maxDimension.toString(),
- );
- }
- if (cmdPart === QUALITY_PLACEHOLDER) {
- return quality.toString();
- }
- return cmdPart;
- });
- } else {
- throw new Error(`Unsupported OS ${process.platform}`);
- }
- return thumbnailGenerationCmd;
-}
diff --git a/desktop/src/main/services/ml-clip.ts b/desktop/src/main/services/ml-clip.ts
index 46af2552b..cdd2baab7 100644
--- a/desktop/src/main/services/ml-clip.ts
+++ b/desktop/src/main/services/ml-clip.ts
@@ -5,115 +5,22 @@
*
* @see `web/apps/photos/src/services/clip-service.ts` for more details.
*/
-import { existsSync } from "fs";
import jpeg from "jpeg-js";
import fs from "node:fs/promises";
import * as ort from "onnxruntime-node";
import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
-import { CustomErrors } from "../../types/ipc";
import log from "../log";
import { writeStream } from "../stream";
-import { generateTempFilePath } from "../temp";
-import { deleteTempFile } from "./ffmpeg";
-import {
- createInferenceSession,
- downloadModel,
- modelPathDownloadingIfNeeded,
- modelSavePath,
-} from "./ml";
+import { deleteTempFile, makeTempFilePath } from "../utils-temp";
+import { makeCachedInferenceSession } from "./ml";
-const textModelName = "clip-text-vit-32-uint8.onnx";
-const textModelByteSize = 64173509; // 61.2 MB
-
-const imageModelName = "clip-image-vit-32-float32.onnx";
-const imageModelByteSize = 351468764; // 335.2 MB
-
-let activeImageModelDownload: Promise | undefined;
-
-const imageModelPathDownloadingIfNeeded = async () => {
- try {
- if (activeImageModelDownload) {
- log.info("Waiting for CLIP image model download to finish");
- await activeImageModelDownload;
- } else {
- activeImageModelDownload = modelPathDownloadingIfNeeded(
- imageModelName,
- imageModelByteSize,
- );
- return await activeImageModelDownload;
- }
- } finally {
- activeImageModelDownload = undefined;
- }
-};
-
-let textModelDownloadInProgress = false;
-
-/* TODO(MR): use the generic method. Then we can remove the exports for the
- internal details functions that we use here */
-const textModelPathDownloadingIfNeeded = async () => {
- if (textModelDownloadInProgress)
- throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
-
- const modelPath = modelSavePath(textModelName);
- if (!existsSync(modelPath)) {
- log.info("CLIP text model not found, downloading");
- textModelDownloadInProgress = true;
- downloadModel(modelPath, textModelName)
- .catch((e) => {
- // log but otherwise ignore
- log.error("CLIP text model download failed", e);
- })
- .finally(() => {
- textModelDownloadInProgress = false;
- });
- throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
- } else {
- const localFileSize = (await fs.stat(modelPath)).size;
- if (localFileSize !== textModelByteSize) {
- log.error(
- `CLIP text model size ${localFileSize} does not match the expected size, downloading again`,
- );
- textModelDownloadInProgress = true;
- downloadModel(modelPath, textModelName)
- .catch((e) => {
- // log but otherwise ignore
- log.error("CLIP text model download failed", e);
- })
- .finally(() => {
- textModelDownloadInProgress = false;
- });
- throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
- }
- }
-
- return modelPath;
-};
-
-let imageSessionPromise: Promise | undefined;
-
-const onnxImageSession = async () => {
- if (!imageSessionPromise) {
- imageSessionPromise = (async () => {
- const modelPath = await imageModelPathDownloadingIfNeeded();
- return createInferenceSession(modelPath);
- })();
- }
- return imageSessionPromise;
-};
-
-let _textSession: any = null;
-
-const onnxTextSession = async () => {
- if (!_textSession) {
- const modelPath = await textModelPathDownloadingIfNeeded();
- _textSession = await createInferenceSession(modelPath);
- }
- return _textSession;
-};
+const cachedCLIPImageSession = makeCachedInferenceSession(
+ "clip-image-vit-32-float32.onnx",
+ 351468764 /* 335.2 MB */,
+);
export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
- const tempFilePath = await generateTempFilePath("");
+ const tempFilePath = await makeTempFilePath();
const imageStream = new Response(jpegImageData.buffer).body;
await writeStream(tempFilePath, imageStream);
try {
@@ -124,19 +31,20 @@ export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
};
const clipImageEmbedding_ = async (jpegFilePath: string) => {
- const imageSession = await onnxImageSession();
+ const session = await cachedCLIPImageSession();
const t1 = Date.now();
const rgbData = await getRGBData(jpegFilePath);
const feeds = {
input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]),
};
const t2 = Date.now();
- const results = await imageSession.run(feeds);
+ const results = await session.run(feeds);
log.debug(
() =>
`onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
);
- const imageEmbedding = results["output"].data; // Float32Array
+ /* Need these model specific casts to type the result */
+ const imageEmbedding = results["output"].data as Float32Array;
return normalizeEmbedding(imageEmbedding);
};
@@ -221,6 +129,11 @@ const normalizeEmbedding = (embedding: Float32Array) => {
return embedding;
};
+const cachedCLIPTextSession = makeCachedInferenceSession(
+ "clip-text-vit-32-uint8.onnx",
+ 64173509 /* 61.2 MB */,
+);
+
let _tokenizer: Tokenizer = null;
const getTokenizer = () => {
if (!_tokenizer) {
@@ -229,8 +142,21 @@ const getTokenizer = () => {
return _tokenizer;
};
-export const clipTextEmbedding = async (text: string) => {
- const imageSession = await onnxTextSession();
+export const clipTextEmbeddingIfAvailable = async (text: string) => {
+ const sessionOrStatus = await Promise.race([
+ cachedCLIPTextSession(),
+ "downloading-model",
+ ]);
+
+ // Don't wait for the download to complete
+ if (typeof sessionOrStatus == "string") {
+ log.info(
+ "Ignoring CLIP text embedding request because model download is pending",
+ );
+ return undefined;
+ }
+
+ const session = sessionOrStatus;
const t1 = Date.now();
const tokenizer = getTokenizer();
const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text));
@@ -238,11 +164,11 @@ export const clipTextEmbedding = async (text: string) => {
input: new ort.Tensor("int32", tokenizedText, [1, 77]),
};
const t2 = Date.now();
- const results = await imageSession.run(feeds);
+ const results = await session.run(feeds);
log.debug(
() =>
`onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
);
- const textEmbedding = results["output"].data;
+ const textEmbedding = results["output"].data as Float32Array;
return normalizeEmbedding(textEmbedding);
};
diff --git a/desktop/src/main/services/ml-face.ts b/desktop/src/main/services/ml-face.ts
index 1f007c5fd..2309d193c 100644
--- a/desktop/src/main/services/ml-face.ts
+++ b/desktop/src/main/services/ml-face.ts
@@ -8,78 +8,15 @@
*/
import * as ort from "onnxruntime-node";
import log from "../log";
-import { createInferenceSession, modelPathDownloadingIfNeeded } from "./ml";
+import { makeCachedInferenceSession } from "./ml";
-const faceDetectionModelName = "yolov5s_face_640_640_dynamic.onnx";
-const faceDetectionModelByteSize = 30762872; // 29.3 MB
-
-const faceEmbeddingModelName = "mobilefacenet_opset15.onnx";
-const faceEmbeddingModelByteSize = 5286998; // 5 MB
-
-let activeFaceDetectionModelDownload: Promise | undefined;
-
-const faceDetectionModelPathDownloadingIfNeeded = async () => {
- try {
- if (activeFaceDetectionModelDownload) {
- log.info("Waiting for face detection model download to finish");
- await activeFaceDetectionModelDownload;
- } else {
- activeFaceDetectionModelDownload = modelPathDownloadingIfNeeded(
- faceDetectionModelName,
- faceDetectionModelByteSize,
- );
- return await activeFaceDetectionModelDownload;
- }
- } finally {
- activeFaceDetectionModelDownload = undefined;
- }
-};
-
-let _faceDetectionSession: Promise | undefined;
-
-const faceDetectionSession = async () => {
- if (!_faceDetectionSession) {
- _faceDetectionSession =
- faceDetectionModelPathDownloadingIfNeeded().then((modelPath) =>
- createInferenceSession(modelPath),
- );
- }
- return _faceDetectionSession;
-};
-
-let activeFaceEmbeddingModelDownload: Promise | undefined;
-
-const faceEmbeddingModelPathDownloadingIfNeeded = async () => {
- try {
- if (activeFaceEmbeddingModelDownload) {
- log.info("Waiting for face embedding model download to finish");
- await activeFaceEmbeddingModelDownload;
- } else {
- activeFaceEmbeddingModelDownload = modelPathDownloadingIfNeeded(
- faceEmbeddingModelName,
- faceEmbeddingModelByteSize,
- );
- return await activeFaceEmbeddingModelDownload;
- }
- } finally {
- activeFaceEmbeddingModelDownload = undefined;
- }
-};
-
-let _faceEmbeddingSession: Promise | undefined;
-
-const faceEmbeddingSession = async () => {
- if (!_faceEmbeddingSession) {
- _faceEmbeddingSession =
- faceEmbeddingModelPathDownloadingIfNeeded().then((modelPath) =>
- createInferenceSession(modelPath),
- );
- }
- return _faceEmbeddingSession;
-};
+const cachedFaceDetectionSession = makeCachedInferenceSession(
+ "yolov5s_face_640_640_dynamic.onnx",
+ 30762872 /* 29.3 MB */,
+);
export const detectFaces = async (input: Float32Array) => {
- const session = await faceDetectionSession();
+ const session = await cachedFaceDetectionSession();
const t = Date.now();
const feeds = {
input: new ort.Tensor("float32", input, [1, 3, 640, 640]),
@@ -89,6 +26,11 @@ export const detectFaces = async (input: Float32Array) => {
return results["output"].data;
};
+const cachedFaceEmbeddingSession = makeCachedInferenceSession(
+ "mobilefacenet_opset15.onnx",
+ 5286998 /* 5 MB */,
+);
+
export const faceEmbedding = async (input: Float32Array) => {
// Dimension of each face (alias)
const mobileFaceNetFaceSize = 112;
@@ -98,11 +40,11 @@ export const faceEmbedding = async (input: Float32Array) => {
const n = Math.round(input.length / (z * z * 3));
const inputTensor = new ort.Tensor("float32", input, [n, z, z, 3]);
- const session = await faceEmbeddingSession();
+ const session = await cachedFaceEmbeddingSession();
const t = Date.now();
const feeds = { img_inputs: inputTensor };
const results = await session.run(feeds);
log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`);
- // TODO: What's with this type? It works in practice, but double check.
- return (results.embeddings as unknown as any)["cpuData"]; // as Float32Array;
+ /* Need these model specific casts to extract and type the result */
+ return (results.embeddings as unknown as any)["cpuData"] as Float32Array;
};
diff --git a/desktop/src/main/services/ml.ts b/desktop/src/main/services/ml.ts
index 60e8241e1..8292596a2 100644
--- a/desktop/src/main/services/ml.ts
+++ b/desktop/src/main/services/ml.ts
@@ -1,5 +1,5 @@
/**
- * @file AI/ML related functionality.
+ * @file AI/ML related functionality, generic layer.
*
* @see also `ml-clip.ts`, `ml-face.ts`.
*
@@ -18,6 +18,49 @@ import * as ort from "onnxruntime-node";
import log from "../log";
import { writeStream } from "../stream";
+/**
+ * Return a function that can be used to trigger a download of the specified
+ * model, and the creating of an ONNX inference session initialized using it.
+ *
+ * Multiple parallel calls to the returned function are fine, it ensures that
+ * the the model will be downloaded and the session created using it only once.
+ * All pending calls to it meanwhile will just await on the same promise.
+ *
+ * And once the promise is resolved, the create ONNX inference session will be
+ * cached, so subsequent calls to the returned function will just reuse the same
+ * session.
+ *
+ * {@link makeCachedInferenceSession} can itself be called anytime, it doesn't
+ * actively trigger a download until the returned function is called.
+ *
+ * @param modelName The name of the model to download.
+ * @param modelByteSize The size in bytes that we expect the model to have. If
+ * the size of the downloaded model does not match the expected size, then we
+ * will redownload it.
+ *
+ * @returns a function. calling that function returns a promise to an ONNX
+ * session.
+ */
+export const makeCachedInferenceSession = (
+ modelName: string,
+ modelByteSize: number,
+) => {
+ let session: Promise | undefined;
+
+ const download = () =>
+ modelPathDownloadingIfNeeded(modelName, modelByteSize);
+
+ const createSession = (modelPath: string) =>
+ createInferenceSession(modelPath);
+
+ const cachedInferenceSession = () => {
+ if (!session) session = download().then(createSession);
+ return session;
+ };
+
+ return cachedInferenceSession;
+};
+
/**
* Download the model named {@link modelName} if we don't already have it.
*
@@ -26,7 +69,7 @@ import { writeStream } from "../stream";
*
* @returns the path to the model on the local machine.
*/
-export const modelPathDownloadingIfNeeded = async (
+const modelPathDownloadingIfNeeded = async (
modelName: string,
expectedByteSize: number,
) => {
@@ -49,10 +92,10 @@ export const modelPathDownloadingIfNeeded = async (
};
/** Return the path where the given {@link modelName} is meant to be saved */
-export const modelSavePath = (modelName: string) =>
+const modelSavePath = (modelName: string) =>
path.join(app.getPath("userData"), "models", modelName);
-export const downloadModel = async (saveLocation: string, name: string) => {
+const downloadModel = async (saveLocation: string, name: string) => {
// `mkdir -p` the directory where we want to save the model.
const saveDir = path.dirname(saveLocation);
await fs.mkdir(saveDir, { recursive: true });
@@ -69,7 +112,7 @@ export const downloadModel = async (saveLocation: string, name: string) => {
/**
* Crete an ONNX {@link InferenceSession} with some defaults.
*/
-export const createInferenceSession = async (modelPath: string) => {
+const createInferenceSession = async (modelPath: string) => {
return await ort.InferenceSession.create(modelPath, {
// Restrict the number of threads to 1
intraOpNumThreads: 1,
diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts
index 8ddb80dc6..88d85db8e 100644
--- a/desktop/src/main/stream.ts
+++ b/desktop/src/main/stream.ts
@@ -1,15 +1,16 @@
/**
* @file stream data to-from renderer using a custom protocol handler.
*/
-import { protocol } from "electron/main";
+import { net, protocol } from "electron/main";
import { createWriteStream, existsSync } from "node:fs";
import fs from "node:fs/promises";
import { Readable } from "node:stream";
+import { pathToFileURL } from "node:url";
import log from "./log";
/**
* Register a protocol handler that we use for streaming large files between the
- * main process (node) and the renderer process (browser) layer.
+ * main (Node.js) and renderer (Chromium) processes.
*
* [Note: IPC streams]
*
@@ -17,11 +18,14 @@ import log from "./log";
* across IPC. And passing the entire contents of the file is not feasible for
* large video files because of the memory pressure the copying would entail.
*
- * As an alternative, we register a custom protocol handler that can provided a
+ * As an alternative, we register a custom protocol handler that can provides a
* bi-directional stream. The renderer can stream data to the node side by
* streaming the request. The node side can stream to the renderer side by
* streaming the response.
*
+ * The stream is not full duplex - while both reads and writes can be streamed,
+ * they need to be streamed separately.
+ *
* See also: [Note: Transferring large amount of data over IPC]
*
* Depends on {@link registerPrivilegedSchemes}.
@@ -29,29 +33,73 @@ import log from "./log";
export const registerStreamProtocol = () => {
protocol.handle("stream", async (request: Request) => {
const url = request.url;
+ // The request URL contains the command to run as the host, and the
+ // pathname of the file as the path. For example,
+ //
+ // stream://write/path/to/file
+ // host-pathname-----
+ //
const { host, pathname } = new URL(url);
// Convert e.g. "%20" to spaces.
const path = decodeURIComponent(pathname);
switch (host) {
- /* stream://write/path/to/file */
- /* host-pathname----- */
+ case "read":
+ return handleRead(path);
case "write":
- try {
- await writeStream(path, request.body);
- return new Response("", { status: 200 });
- } catch (e) {
- log.error(`Failed to write stream for ${url}`, e);
- return new Response(
- `Failed to write stream: ${e.message}`,
- { status: 500 },
- );
- }
+ return handleWrite(path, request);
default:
return new Response("", { status: 404 });
}
});
};
+const handleRead = async (path: string) => {
+ try {
+ const res = await net.fetch(pathToFileURL(path).toString());
+ if (res.ok) {
+ // net.fetch already seems to add "Content-Type" and "Last-Modified"
+ // headers, but I couldn't find documentation for this. In any case,
+ // since we already are stat-ting the file for the "Content-Length",
+ // we explicitly add the "X-Last-Modified-Ms" too,
+ //
+ // 1. Guaranteeing its presence,
+ //
+ // 2. Having it be in the exact format we want (no string <-> date
+ // conversions),
+ //
+ // 3. Retaining milliseconds.
+
+ const stat = await fs.stat(path);
+
+ // Add the file's size as the Content-Length header.
+ const fileSize = stat.size;
+ res.headers.set("Content-Length", `${fileSize}`);
+
+ // Add the file's last modified time (as epoch milliseconds).
+ const mtimeMs = stat.mtimeMs;
+ res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
+ }
+ return res;
+ } catch (e) {
+ log.error(`Failed to read stream at ${path}`, e);
+ return new Response(`Failed to read stream: ${e.message}`, {
+ status: 500,
+ });
+ }
+};
+
+const handleWrite = async (path: string, request: Request) => {
+ try {
+ await writeStream(path, request.body);
+ return new Response("", { status: 200 });
+ } catch (e) {
+ log.error(`Failed to write stream to ${path}`, e);
+ return new Response(`Failed to write stream: ${e.message}`, {
+ status: 500,
+ });
+ }
+};
+
/**
* Write a (web) ReadableStream to a file at the given {@link filePath}.
*
@@ -92,10 +140,7 @@ const convertWebReadableStreamToNode = (readableStream: ReadableStream) => {
return rs;
};
-const writeNodeStream = async (
- filePath: string,
- fileStream: NodeJS.ReadableStream,
-) => {
+const writeNodeStream = async (filePath: string, fileStream: Readable) => {
const writeable = createWriteStream(filePath);
fileStream.on("error", (error) => {
diff --git a/desktop/src/main/temp.ts b/desktop/src/main/temp.ts
deleted file mode 100644
index 489e5cbd4..000000000
--- a/desktop/src/main/temp.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-import { app } from "electron/main";
-import { existsSync } from "node:fs";
-import fs from "node:fs/promises";
-import path from "path";
-
-const CHARACTERS =
- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
-
-export async function getTempDirPath() {
- const tempDirPath = path.join(app.getPath("temp"), "ente");
- await fs.mkdir(tempDirPath, { recursive: true });
- return tempDirPath;
-}
-
-function generateTempName(length: number) {
- let result = "";
-
- const charactersLength = CHARACTERS.length;
- for (let i = 0; i < length; i++) {
- result += CHARACTERS.charAt(
- Math.floor(Math.random() * charactersLength),
- );
- }
- return result;
-}
-
-export async function generateTempFilePath(formatSuffix: string) {
- let tempFilePath: string;
- do {
- const tempDirPath = await getTempDirPath();
- const namePrefix = generateTempName(10);
- tempFilePath = path.join(tempDirPath, namePrefix + "-" + formatSuffix);
- } while (existsSync(tempFilePath));
- return tempFilePath;
-}
diff --git a/desktop/src/main/util.ts b/desktop/src/main/utils-electron.ts
similarity index 94%
rename from desktop/src/main/util.ts
rename to desktop/src/main/utils-electron.ts
index b997d738e..e8a98f1df 100644
--- a/desktop/src/main/util.ts
+++ b/desktop/src/main/utils-electron.ts
@@ -33,11 +33,9 @@ export const execAsync = (command: string | string[]) => {
? shellescape(command)
: command;
const startTime = Date.now();
- log.debug(() => `Running shell command: ${escapedCommand}`);
const result = execAsync_(escapedCommand);
log.debug(
- () =>
- `Completed in ${Math.round(Date.now() - startTime)} ms (${escapedCommand})`,
+ () => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`,
);
return result;
};
diff --git a/desktop/src/main/utils-temp.ts b/desktop/src/main/utils-temp.ts
new file mode 100644
index 000000000..a52daf619
--- /dev/null
+++ b/desktop/src/main/utils-temp.ts
@@ -0,0 +1,63 @@
+import { app } from "electron/main";
+import { existsSync } from "node:fs";
+import fs from "node:fs/promises";
+import path from "path";
+
+/**
+ * Our very own directory within the system temp directory. Go crazy, but
+ * remember to clean up, especially in exception handlers.
+ */
+const enteTempDirPath = async () => {
+ const result = path.join(app.getPath("temp"), "ente");
+ await fs.mkdir(result, { recursive: true });
+ return result;
+};
+
+/** Generate a random string suitable for being used as a file name prefix */
+const randomPrefix = () => {
+ const alphabet =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+
+ let result = "";
+ for (let i = 0; i < 10; i++)
+ result += alphabet[Math.floor(Math.random() * alphabet.length)];
+ return result;
+};
+
+/**
+ * Return the path to a temporary file with the given {@link suffix}.
+ *
+ * The function returns the path to a file in the system temp directory (in an
+ * Ente specific folder therin) with a random prefix and an (optional)
+ * {@link extension}.
+ *
+ * It ensures that there is no existing item with the same name already.
+ *
+ * Use {@link deleteTempFile} to remove this file when you're done.
+ */
+export const makeTempFilePath = async (extension?: string) => {
+ const tempDir = await enteTempDirPath();
+ const suffix = extension ? "." + extension : "";
+ let result: string;
+ do {
+ result = path.join(tempDir, randomPrefix() + suffix);
+ } while (existsSync(result));
+ return result;
+};
+
+/**
+ * Delete a temporary file at the given path if it exists.
+ *
+ * This is the same as a vanilla {@link fs.rm}, except it first checks that the
+ * given path is within the Ente specific directory in the system temp
+ * directory. This acts as an additional safety check.
+ *
+ * @param tempFilePath The path to the temporary file to delete. This path
+ * should've been previously created using {@link makeTempFilePath}.
+ */
+export const deleteTempFile = async (tempFilePath: string) => {
+ const tempDir = await enteTempDirPath();
+ if (!tempFilePath.startsWith(tempDir))
+ throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`);
+ await fs.rm(tempFilePath, { force: true });
+};
diff --git a/desktop/src/main/utils.ts b/desktop/src/main/utils.ts
new file mode 100644
index 000000000..132859a43
--- /dev/null
+++ b/desktop/src/main/utils.ts
@@ -0,0 +1,35 @@
+/**
+ * @file grab bag of utitity functions.
+ *
+ * Many of these are verbatim copies of functions from web code since there
+ * isn't currently a common package that both of them share.
+ */
+
+/**
+ * Wait for {@link ms} milliseconds
+ *
+ * This function is a promisified `setTimeout`. It returns a promise that
+ * resolves after {@link ms} milliseconds.
+ */
+export const wait = (ms: number) =>
+ new Promise((resolve) => setTimeout(resolve, ms));
+
+/**
+ * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
+ * does not resolve within {@link timeoutMS}, then reject with a timeout error.
+ */
+export const withTimeout = async (promise: Promise, ms: number) => {
+ let timeoutId: ReturnType;
+ const rejectOnTimeout = new Promise((_, reject) => {
+ timeoutId = setTimeout(
+ () => reject(new Error("Operation timed out")),
+ ms,
+ );
+ });
+ const promiseAndCancelTimeout = async () => {
+ const result = await promise;
+ clearTimeout(timeoutId);
+ return result;
+ };
+ return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
+};
diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts
index 7d0df41d5..18fb55013 100644
--- a/desktop/src/preload.ts
+++ b/desktop/src/preload.ts
@@ -122,40 +122,38 @@ const fsWriteFile = (path: string, contents: string): Promise =>
const fsIsDir = (dirPath: string): Promise =>
ipcRenderer.invoke("fsIsDir", dirPath);
-// - AUDIT below this
+const fsSize = (path: string): Promise =>
+ ipcRenderer.invoke("fsSize", path);
// - Conversion
-const convertToJPEG = (
- fileData: Uint8Array,
- filename: string,
-): Promise =>
- ipcRenderer.invoke("convertToJPEG", fileData, filename);
+const convertToJPEG = (imageData: Uint8Array): Promise =>
+ ipcRenderer.invoke("convertToJPEG", imageData);
const generateImageThumbnail = (
- inputFile: File | ElectronFile,
+ dataOrPath: Uint8Array | string,
maxDimension: number,
maxSize: number,
): Promise =>
ipcRenderer.invoke(
"generateImageThumbnail",
- inputFile,
+ dataOrPath,
maxDimension,
maxSize,
);
-const runFFmpegCmd = (
- cmd: string[],
- inputFile: File | ElectronFile,
- outputFileName: string,
- dontTimeout?: boolean,
-): Promise =>
+const ffmpegExec = (
+ command: string[],
+ dataOrPath: Uint8Array | string,
+ outputFileExtension: string,
+ timeoutMS: number,
+): Promise =>
ipcRenderer.invoke(
- "runFFmpegCmd",
- cmd,
- inputFile,
- outputFileName,
- dontTimeout,
+ "ffmpegExec",
+ command,
+ dataOrPath,
+ outputFileExtension,
+ timeoutMS,
);
// - ML
@@ -163,8 +161,10 @@ const runFFmpegCmd = (
const clipImageEmbedding = (jpegImageData: Uint8Array): Promise =>
ipcRenderer.invoke("clipImageEmbedding", jpegImageData);
-const clipTextEmbedding = (text: string): Promise =>
- ipcRenderer.invoke("clipTextEmbedding", text);
+const clipTextEmbeddingIfAvailable = (
+ text: string,
+): Promise =>
+ ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text);
const detectFaces = (input: Float32Array): Promise =>
ipcRenderer.invoke("detectFaces", input);
@@ -253,6 +253,7 @@ const setPendingUploadFiles = (
): Promise =>
ipcRenderer.invoke("setPendingUploadFiles", type, filePaths);
+// - TODO: AUDIT below this
// -
const getElectronFilesFromGoogleZip = (
@@ -260,45 +261,46 @@ const getElectronFilesFromGoogleZip = (
): Promise =>
ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath);
-const getDirFiles = (dirPath: string): Promise =>
- ipcRenderer.invoke("getDirFiles", dirPath);
-
-//
-// These objects exposed here will become available to the JS code in our
-// renderer (the web/ code) as `window.ElectronAPIs.*`
-//
-// There are a few related concepts at play here, and it might be worthwhile to
-// read their (excellent) documentation to get an understanding;
-//`
-// - ContextIsolation:
-// https://www.electronjs.org/docs/latest/tutorial/context-isolation
-//
-// - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
-//
-// [Note: Transferring large amount of data over IPC]
-//
-// Electron's IPC implementation uses the HTML standard Structured Clone
-// Algorithm to serialize objects passed between processes.
-// https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
-//
-// In particular, ArrayBuffer is eligible for structured cloning.
-// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
-//
-// Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
-// operation when it happens across threads.
-// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
-//
-// In our case though, we're not dealing with threads but separate processes. So
-// the ArrayBuffer will be copied:
-// > "parameters, errors and return values are **copied** when they're sent over
-// the bridge".
-// https://www.electronjs.org/docs/latest/api/context-bridge#methods
-//
-// The copy itself is relatively fast, but the problem with transfering large
-// amounts of data is potentially running out of memory during the copy.
-//
-// For an alternative, see [Note: IPC streams].
-//
+/**
+ * These objects exposed here will become available to the JS code in our
+ * renderer (the web/ code) as `window.ElectronAPIs.*`
+ *
+ * There are a few related concepts at play here, and it might be worthwhile to
+ * read their (excellent) documentation to get an understanding;
+ *`
+ * - ContextIsolation:
+ * https://www.electronjs.org/docs/latest/tutorial/context-isolation
+ *
+ * - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
+ *
+ * ---
+ *
+ * [Note: Transferring large amount of data over IPC]
+ *
+ * Electron's IPC implementation uses the HTML standard Structured Clone
+ * Algorithm to serialize objects passed between processes.
+ * https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
+ *
+ * In particular, ArrayBuffer is eligible for structured cloning.
+ * https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
+ *
+ * Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
+ * operation when it happens across threads.
+ * https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
+ *
+ * In our case though, we're not dealing with threads but separate processes. So
+ * the ArrayBuffer will be copied:
+ *
+ * > "parameters, errors and return values are **copied** when they're sent over
+ * > the bridge".
+ * >
+ * > https://www.electronjs.org/docs/latest/api/context-bridge#methods
+ *
+ * The copy itself is relatively fast, but the problem with transfering large
+ * amounts of data is potentially running out of memory during the copy.
+ *
+ * For an alternative, see [Note: IPC streams].
+ */
contextBridge.exposeInMainWorld("electron", {
// - General
@@ -329,18 +331,19 @@ contextBridge.exposeInMainWorld("electron", {
readTextFile: fsReadTextFile,
writeFile: fsWriteFile,
isDir: fsIsDir,
+ size: fsSize,
},
// - Conversion
convertToJPEG,
generateImageThumbnail,
- runFFmpegCmd,
+ ffmpegExec,
// - ML
clipImageEmbedding,
- clipTextEmbedding,
+ clipTextEmbeddingIfAvailable,
detectFaces,
faceEmbedding,
@@ -374,5 +377,4 @@ contextBridge.exposeInMainWorld("electron", {
// -
getElectronFilesFromGoogleZip,
- getDirFiles,
});
diff --git a/desktop/src/types/ipc.ts b/desktop/src/types/ipc.ts
index d96341982..3fa375eab 100644
--- a/desktop/src/types/ipc.ts
+++ b/desktop/src/types/ipc.ts
@@ -32,28 +32,13 @@ export interface PendingUploads {
}
/**
- * Errors that have special semantics on the web side.
+ * See: [Note: Custom errors across Electron/Renderer boundary]
*
- * [Note: Custom errors across Electron/Renderer boundary]
- *
- * We need to use the `message` field to disambiguate between errors thrown by
- * the main process when invoked from the renderer process. This is because:
- *
- * > Errors thrown throw `handle` in the main process are not transparent as
- * > they are serialized and only the `message` property from the original error
- * > is provided to the renderer process.
- * >
- * > - https://www.electronjs.org/docs/latest/tutorial/ipc
- * >
- * > Ref: https://github.com/electron/electron/issues/24427
+ * Note: this is not a type, and cannot be used in preload.js; it is only meant
+ * for use in the main process code.
*/
-export const CustomErrors = {
- WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
- "Windows native image processing is not supported",
- UNSUPPORTED_PLATFORM: (platform: string, arch: string) =>
- `Unsupported platform - ${platform} ${arch}`,
- MODEL_DOWNLOAD_PENDING:
- "Model download pending, skipping clip search request",
+export const CustomErrorMessage = {
+ NotAvailable: "This feature in not available on the current OS/arch",
};
/**
diff --git a/docs/docs/auth/migration-guides/authy/index.md b/docs/docs/auth/migration-guides/authy/index.md
index 48ce3965d..1a9228547 100644
--- a/docs/docs/auth/migration-guides/authy/index.md
+++ b/docs/docs/auth/migration-guides/authy/index.md
@@ -18,7 +18,7 @@ A guide written by Green, an ente.io lover
Migrating from Authy can be tiring, as you cannot export your 2FA codes through
the app, meaning that you would have to reconfigure 2FA for all of your accounts
for your new 2FA authenticator. However, easier ways exist to export your codes
-out of Authy. This guide will cover two of the most used methods for mograting
+out of Authy. This guide will cover two of the most used methods for migrating
from Authy to Ente Authenticator.
> [!CAUTION]
diff --git a/mobile/fastlane/metadata/ios/ru/name.txt b/mobile/fastlane/metadata/ios/ru/name.txt
index 44e95b9fc..45bf4920f 100644
--- a/mobile/fastlane/metadata/ios/ru/name.txt
+++ b/mobile/fastlane/metadata/ios/ru/name.txt
@@ -1 +1 @@
-ente фотографии
+ente Фото
diff --git a/mobile/ios/Podfile.lock b/mobile/ios/Podfile.lock
index ef9c650fd..951d1f2f3 100644
--- a/mobile/ios/Podfile.lock
+++ b/mobile/ios/Podfile.lock
@@ -12,19 +12,19 @@ PODS:
- Flutter
- file_saver (0.0.1):
- Flutter
- - Firebase/CoreOnly (10.22.0):
- - FirebaseCore (= 10.22.0)
- - Firebase/Messaging (10.22.0):
+ - Firebase/CoreOnly (10.24.0):
+ - FirebaseCore (= 10.24.0)
+ - Firebase/Messaging (10.24.0):
- Firebase/CoreOnly
- - FirebaseMessaging (~> 10.22.0)
- - firebase_core (2.29.0):
- - Firebase/CoreOnly (= 10.22.0)
+ - FirebaseMessaging (~> 10.24.0)
+ - firebase_core (2.30.0):
+ - Firebase/CoreOnly (= 10.24.0)
- Flutter
- - firebase_messaging (14.7.19):
- - Firebase/Messaging (= 10.22.0)
+ - firebase_messaging (14.8.1):
+ - Firebase/Messaging (= 10.24.0)
- firebase_core
- Flutter
- - FirebaseCore (10.22.0):
+ - FirebaseCore (10.24.0):
- FirebaseCoreInternal (~> 10.0)
- GoogleUtilities/Environment (~> 7.12)
- GoogleUtilities/Logger (~> 7.12)
@@ -35,7 +35,7 @@ PODS:
- GoogleUtilities/Environment (~> 7.8)
- GoogleUtilities/UserDefaults (~> 7.8)
- PromisesObjC (~> 2.1)
- - FirebaseMessaging (10.22.0):
+ - FirebaseMessaging (10.24.0):
- FirebaseCore (~> 10.0)
- FirebaseInstallations (~> 10.0)
- GoogleDataTransport (~> 9.3)
@@ -177,7 +177,7 @@ PODS:
- SDWebImage (5.19.1):
- SDWebImage/Core (= 5.19.1)
- SDWebImage/Core (5.19.1)
- - SDWebImageWebPCoder (0.14.5):
+ - SDWebImageWebPCoder (0.14.6):
- libwebp (~> 1.0)
- SDWebImage/Core (~> 5.17)
- Sentry/HybridSDK (8.21.0):
@@ -195,14 +195,14 @@ PODS:
- sqflite (0.0.3):
- Flutter
- FlutterMacOS
- - sqlite3 (3.45.1):
- - sqlite3/common (= 3.45.1)
- - sqlite3/common (3.45.1)
- - sqlite3/fts5 (3.45.1):
+ - "sqlite3 (3.45.3+1)":
+ - "sqlite3/common (= 3.45.3+1)"
+ - "sqlite3/common (3.45.3+1)"
+ - "sqlite3/fts5 (3.45.3+1)":
- sqlite3/common
- - sqlite3/perf-threadsafe (3.45.1):
+ - "sqlite3/perf-threadsafe (3.45.3+1)":
- sqlite3/common
- - sqlite3/rtree (3.45.1):
+ - "sqlite3/rtree (3.45.3+1)":
- sqlite3/common
- sqlite3_flutter_libs (0.0.1):
- Flutter
@@ -410,13 +410,13 @@ SPEC CHECKSUMS:
dart_ui_isolate: d5bcda83ca4b04f129d70eb90110b7a567aece14
device_info_plus: c6fb39579d0f423935b0c9ce7ee2f44b71b9fce6
file_saver: 503e386464dbe118f630e17b4c2e1190fa0cf808
- Firebase: 797fd7297b7e1be954432743a0b3f90038e45a71
- firebase_core: aaadbddb3cb2ee3792b9804f9dbb63e5f6f7b55c
- firebase_messaging: e65050bf9b187511d80ea3a4de7cf5573d2c7543
- FirebaseCore: 0326ec9b05fbed8f8716cddbf0e36894a13837f7
+ Firebase: 91fefd38712feb9186ea8996af6cbdef41473442
+ firebase_core: 66b99b4fb4e5d7cc4e88d4c195fe986681f3466a
+ firebase_messaging: 0eb0425d28b4f4af147cdd4adcaf7c0100df28ed
+ FirebaseCore: 11dc8a16dfb7c5e3c3f45ba0e191a33ac4f50894
FirebaseCoreInternal: bcb5acffd4ea05e12a783ecf835f2210ce3dc6af
FirebaseInstallations: 8f581fca6478a50705d2bd2abd66d306e0f5736e
- FirebaseMessaging: 9f71037fd9db3376a4caa54e5a3949d1027b4b6e
+ FirebaseMessaging: 4d52717dd820707cc4eadec5eb981b4832ec8d5d
fk_user_agent: 1f47ec39291e8372b1d692b50084b0d54103c545
Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7
flutter_email_sender: 02d7443217d8c41483223627972bfdc09f74276b
@@ -458,14 +458,14 @@ SPEC CHECKSUMS:
receive_sharing_intent: 6837b01768e567fe8562182397bf43d63d8c6437
screen_brightness_ios: 715ca807df953bf676d339f11464e438143ee625
SDWebImage: 40b0b4053e36c660a764958bff99eed16610acbb
- SDWebImageWebPCoder: c94f09adbca681822edad9e532ac752db713eabf
+ SDWebImageWebPCoder: e38c0a70396191361d60c092933e22c20d5b1380
Sentry: ebc12276bd17613a114ab359074096b6b3725203
sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5
SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec
- sqlite3: 73b7fc691fdc43277614250e04d183740cb15078
+ sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a
sqlite3_flutter_libs: af0e8fe9bce48abddd1ffdbbf839db0302d72d80
Toast: 1f5ea13423a1e6674c4abdac5be53587ae481c4e
uni_links: d97da20c7701486ba192624d99bffaaffcfc298a
diff --git a/mobile/lib/app.dart b/mobile/lib/app.dart
index bc406d7c3..e7e299ed9 100644
--- a/mobile/lib/app.dart
+++ b/mobile/lib/app.dart
@@ -64,6 +64,9 @@ class _EnteAppState extends State with WidgetsBindingObserver {
}
void _checkForWidgetLaunch() {
+ if (Platform.isIOS) {
+ return;
+ }
hw.HomeWidget.initiallyLaunchedFromHomeWidget().then(
(uri) => HomeWidgetService.instance.onLaunchFromWidget(uri, context),
);
diff --git a/mobile/lib/core/constants.dart b/mobile/lib/core/constants.dart
index 004580145..6b911569c 100644
--- a/mobile/lib/core/constants.dart
+++ b/mobile/lib/core/constants.dart
@@ -39,13 +39,6 @@ const dragSensitivity = 8;
const supportEmail = 'support@ente.io';
-// Default values for various feature flags
-class FFDefault {
- static const bool enableStripe = true;
- static const bool disableCFWorker = false;
- static const bool enablePasskey = false;
-}
-
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
const multipartPartSize = 20 * 1024 * 1024;
diff --git a/mobile/lib/main.dart b/mobile/lib/main.dart
index 036348e0e..9507090ff 100644
--- a/mobile/lib/main.dart
+++ b/mobile/lib/main.dart
@@ -22,12 +22,12 @@ import 'package:photos/db/upload_locks_db.dart';
import 'package:photos/ente_theme_data.dart';
import "package:photos/face/db.dart";
import "package:photos/l10n/l10n.dart";
+import "package:photos/service_locator.dart";
import 'package:photos/services/app_lifecycle_service.dart';
import 'package:photos/services/billing_service.dart';
import 'package:photos/services/collections_service.dart';
import "package:photos/services/entity_service.dart";
import 'package:photos/services/favorites_service.dart';
-import 'package:photos/services/feature_flag_service.dart';
import 'package:photos/services/home_widget_service.dart';
import 'package:photos/services/local_file_update_service.dart';
import 'package:photos/services/local_sync_service.dart';
@@ -182,6 +182,7 @@ Future _init(bool isBackground, {String via = ''}) async {
_isProcessRunning = true;
_logger.info("Initializing... inBG =$isBackground via: $via");
final SharedPreferences preferences = await SharedPreferences.getInstance();
+
await _logFGHeartBeatInfo();
unawaited(_scheduleHeartBeat(preferences, isBackground));
AppLifecycleService.instance.init(preferences);
@@ -195,6 +196,7 @@ Future _init(bool isBackground, {String via = ''}) async {
CryptoUtil.init();
await Configuration.instance.init();
await NetworkClient.instance.init();
+ ServiceLocator.instance.init(preferences, NetworkClient.instance.enteDio);
await UserService.instance.init();
await EntityService.instance.init();
LocationService.instance.init(preferences);
@@ -229,7 +231,7 @@ Future _init(bool isBackground, {String via = ''}) async {
);
});
}
- unawaited(FeatureFlagService.instance.init());
+
unawaited(SemanticSearchService.instance.init());
MachineLearningController.instance.init();
// Can not including existing tf/ml binaries as they are not being built
@@ -379,7 +381,7 @@ Future _logFGHeartBeatInfo() async {
final String lastRun = lastFGTaskHeartBeatTime == 0
? 'never'
: DateTime.fromMicrosecondsSinceEpoch(lastFGTaskHeartBeatTime).toString();
- _logger.info('isAlreaduunningFG: $isRunningInFG, last Beat: $lastRun');
+ _logger.info('isAlreadyRunningFG: $isRunningInFG, last Beat: $lastRun');
}
void _scheduleSuicide(Duration duration, [String? taskID]) {
diff --git a/mobile/lib/models/file/file.dart b/mobile/lib/models/file/file.dart
index 3495ee18d..ec52d7b96 100644
--- a/mobile/lib/models/file/file.dart
+++ b/mobile/lib/models/file/file.dart
@@ -9,7 +9,7 @@ import 'package:photos/core/constants.dart';
import 'package:photos/models/file/file_type.dart';
import 'package:photos/models/location/location.dart';
import "package:photos/models/metadata/file_magic.dart";
-import 'package:photos/services/feature_flag_service.dart';
+import "package:photos/service_locator.dart";
import 'package:photos/utils/date_time_util.dart';
import 'package:photos/utils/exif_util.dart';
import 'package:photos/utils/file_uploader_util.dart';
@@ -247,8 +247,7 @@ class EnteFile {
return "$localFileServer/$uploadedFileID";
}
final endpoint = Configuration.instance.getHttpEndpoint();
- if (endpoint != kDefaultProductionEndpoint ||
- FeatureFlagService.instance.disableCFWorker()) {
+ if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) {
return endpoint + "/files/download/" + uploadedFileID.toString();
} else {
return "https://files.ente.io/?fileID=" + uploadedFileID.toString();
@@ -264,8 +263,7 @@ class EnteFile {
return "$localFileServer/thumb/$uploadedFileID";
}
final endpoint = Configuration.instance.getHttpEndpoint();
- if (endpoint != kDefaultProductionEndpoint ||
- FeatureFlagService.instance.disableCFWorker()) {
+ if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) {
return endpoint + "/files/preview/" + uploadedFileID.toString();
} else {
return "https://thumbnails.ente.io/?fileID=" + uploadedFileID.toString();
diff --git a/mobile/lib/service_locator.dart b/mobile/lib/service_locator.dart
new file mode 100644
index 000000000..0fec75b46
--- /dev/null
+++ b/mobile/lib/service_locator.dart
@@ -0,0 +1,28 @@
+import "package:dio/dio.dart";
+import "package:ente_feature_flag/ente_feature_flag.dart";
+import "package:shared_preferences/shared_preferences.dart";
+
+class ServiceLocator {
+ late final SharedPreferences prefs;
+ late final Dio enteDio;
+
+ // instance
+ ServiceLocator._privateConstructor();
+
+ static final ServiceLocator instance = ServiceLocator._privateConstructor();
+
+ init(SharedPreferences prefs, Dio enteDio) {
+ this.prefs = prefs;
+ this.enteDio = enteDio;
+ }
+}
+
+FlagService? _flagService;
+
+FlagService get flagService {
+ _flagService ??= FlagService(
+ ServiceLocator.instance.prefs,
+ ServiceLocator.instance.enteDio,
+ );
+ return _flagService!;
+}
diff --git a/mobile/lib/services/collections_service.dart b/mobile/lib/services/collections_service.dart
index 8b82f6576..0981eb767 100644
--- a/mobile/lib/services/collections_service.dart
+++ b/mobile/lib/services/collections_service.dart
@@ -30,9 +30,9 @@ import 'package:photos/models/collection/collection_items.dart';
import 'package:photos/models/file/file.dart';
import "package:photos/models/files_split.dart";
import "package:photos/models/metadata/collection_magic.dart";
+import "package:photos/service_locator.dart";
import 'package:photos/services/app_lifecycle_service.dart';
import "package:photos/services/favorites_service.dart";
-import "package:photos/services/feature_flag_service.dart";
import 'package:photos/services/file_magic_service.dart';
import 'package:photos/services/local_sync_service.dart';
import 'package:photos/services/remote_sync_service.dart';
@@ -189,6 +189,23 @@ class CollectionsService {
return result;
}
+ bool allowUpload(int collectionID) {
+ final Collection? c = _collectionIDToCollections[collectionID];
+ if (c == null) {
+ _logger.info('discardUpload: collectionMissing $collectionID');
+ return false;
+ }
+ if (c.isDeleted) {
+ _logger.info('discardUpload: collectionDeleted $collectionID');
+ return false;
+ }
+ if (!c.isOwner(_config.getUserID()!)) {
+ _logger.info('discardUpload: notOwner $collectionID');
+ return false;
+ }
+ return true;
+ }
+
Future> getArchivedCollection() async {
final allCollections = getCollectionsForUI();
return allCollections
@@ -1162,7 +1179,7 @@ class CollectionsService {
await _addToCollection(dstCollectionID, splitResult.ownedByCurrentUser);
}
if (splitResult.ownedByOtherUsers.isNotEmpty) {
- if (!FeatureFlagService.instance.isInternalUserOrDebugBuild()) {
+ if (!flagService.internalUser) {
throw ArgumentError('Cannot add files owned by other users');
}
late final List filesToCopy;
diff --git a/mobile/lib/services/feature_flag_service.dart b/mobile/lib/services/feature_flag_service.dart
deleted file mode 100644
index 2891b03f6..000000000
--- a/mobile/lib/services/feature_flag_service.dart
+++ /dev/null
@@ -1,142 +0,0 @@
-import 'dart:convert';
-import 'dart:io';
-
-import 'package:flutter/foundation.dart';
-import 'package:logging/logging.dart';
-import 'package:photos/core/configuration.dart';
-import 'package:photos/core/constants.dart';
-import 'package:photos/core/network/network.dart';
-import 'package:shared_preferences/shared_preferences.dart';
-
-class FeatureFlagService {
- FeatureFlagService._privateConstructor();
-
- static final FeatureFlagService instance =
- FeatureFlagService._privateConstructor();
- static const _featureFlagsKey = "feature_flags_key";
- static final _internalUserIDs = const String.fromEnvironment(
- "internal_user_ids",
- defaultValue: "1,2,3,4,191,125,1580559962388044,1580559962392434,10000025",
- ).split(",").map((element) {
- return int.parse(element);
- }).toSet();
-
- final _logger = Logger("FeatureFlagService");
- FeatureFlags? _featureFlags;
- late SharedPreferences _prefs;
-
- Future init() async {
- _prefs = await SharedPreferences.getInstance();
- // Fetch feature flags from network in async manner.
- // Intention of delay is to give more CPU cycles to other tasks
- Future.delayed(
- const Duration(seconds: 5),
- () {
- fetchFeatureFlags();
- },
- );
- }
-
- FeatureFlags _getFeatureFlags() {
- _featureFlags ??=
- FeatureFlags.fromJson(_prefs.getString(_featureFlagsKey)!);
- // if nothing is cached, use defaults as temporary fallback
- if (_featureFlags == null) {
- return FeatureFlags.defaultFlags;
- }
- return _featureFlags!;
- }
-
- bool disableCFWorker() {
- try {
- return _getFeatureFlags().disableCFWorker;
- } catch (e) {
- _logger.severe(e);
- return FFDefault.disableCFWorker;
- }
- }
-
- bool enableStripe() {
- if (Platform.isIOS) {
- return false;
- }
- try {
- return _getFeatureFlags().enableStripe;
- } catch (e) {
- _logger.severe(e);
- return FFDefault.enableStripe;
- }
- }
-
- bool enablePasskey() {
- try {
- if (isInternalUserOrDebugBuild()) {
- return true;
- }
- return _getFeatureFlags().enablePasskey;
- } catch (e) {
- _logger.info('error in enablePasskey check', e);
- return FFDefault.enablePasskey;
- }
- }
-
- bool isInternalUserOrDebugBuild() {
- final String? email = Configuration.instance.getEmail();
- final userID = Configuration.instance.getUserID();
- return (email != null && email.endsWith("@ente.io")) ||
- _internalUserIDs.contains(userID) ||
- kDebugMode;
- }
-
- Future fetchFeatureFlags() async {
- try {
- final response = await NetworkClient.instance
- .getDio()
- .get("https://static.ente.io/feature_flags.json");
- final flagsResponse = FeatureFlags.fromMap(response.data);
- await _prefs.setString(_featureFlagsKey, flagsResponse.toJson());
- _featureFlags = flagsResponse;
- } catch (e) {
- _logger.severe("Failed to sync feature flags ", e);
- }
- }
-}
-
-class FeatureFlags {
- static FeatureFlags defaultFlags = FeatureFlags(
- disableCFWorker: FFDefault.disableCFWorker,
- enableStripe: FFDefault.enableStripe,
- enablePasskey: FFDefault.enablePasskey,
- );
-
- final bool disableCFWorker;
- final bool enableStripe;
- final bool enablePasskey;
-
- FeatureFlags({
- required this.disableCFWorker,
- required this.enableStripe,
- required this.enablePasskey,
- });
-
- Map toMap() {
- return {
- "disableCFWorker": disableCFWorker,
- "enableStripe": enableStripe,
- "enablePasskey": enablePasskey,
- };
- }
-
- String toJson() => json.encode(toMap());
-
- factory FeatureFlags.fromJson(String source) =>
- FeatureFlags.fromMap(json.decode(source));
-
- factory FeatureFlags.fromMap(Map json) {
- return FeatureFlags(
- disableCFWorker: json["disableCFWorker"] ?? FFDefault.disableCFWorker,
- enableStripe: json["enableStripe"] ?? FFDefault.enableStripe,
- enablePasskey: json["enablePasskey"] ?? FFDefault.enablePasskey,
- );
- }
-}
diff --git a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart
index 67cff1136..7a9b75b1b 100644
--- a/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart
+++ b/mobile/lib/services/machine_learning/face_ml/face_ml_service.dart
@@ -621,14 +621,7 @@ class FaceMlService {
'No faces detected for file with name:${enteFile.displayName}',
);
faces.add(
- Face(
- '${result.fileId}-0',
- result.fileId,
- [],
- result.errorOccured ? -1.0 : 0.0,
- face_detection.Detection.empty(),
- 0.0,
- ),
+ Face.empty(result.fileId, error: result.errorOccured),
);
} else {
if (result.decodedImageSize.width == -1 ||
diff --git a/mobile/lib/services/remote_sync_service.dart b/mobile/lib/services/remote_sync_service.dart
index 4c5222758..eab8478a6 100644
--- a/mobile/lib/services/remote_sync_service.dart
+++ b/mobile/lib/services/remote_sync_service.dart
@@ -23,9 +23,9 @@ import "package:photos/models/file/extensions/file_props.dart";
import 'package:photos/models/file/file.dart';
import 'package:photos/models/file/file_type.dart';
import 'package:photos/models/upload_strategy.dart';
+import "package:photos/service_locator.dart";
import 'package:photos/services/app_lifecycle_service.dart';
import 'package:photos/services/collections_service.dart';
-import "package:photos/services/feature_flag_service.dart";
import 'package:photos/services/ignored_files_service.dart';
import 'package:photos/services/local_file_update_service.dart';
import "package:photos/services/notification_service.dart";
@@ -185,7 +185,7 @@ class RemoteSyncService {
rethrow;
} else {
_logger.severe("Error executing remote sync ", e, s);
- if (FeatureFlagService.instance.isInternalUserOrDebugBuild()) {
+ if (flagService.internalUser) {
rethrow;
}
}
diff --git a/mobile/lib/services/update_service.dart b/mobile/lib/services/update_service.dart
index 28c5732c8..e18d8548c 100644
--- a/mobile/lib/services/update_service.dart
+++ b/mobile/lib/services/update_service.dart
@@ -73,9 +73,13 @@ class UpdateService {
return _latestVersion;
}
- Future shouldShowUpdateNoification() async {
+ Future shouldShowUpdateNotification() async {
final shouldUpdate = await this.shouldUpdate();
+ if (!shouldUpdate) {
+ return false;
+ }
+
final lastNotificationShownTime =
_prefs.getInt(kUpdateAvailableShownTimeKey) ?? 0;
final now = DateTime.now().microsecondsSinceEpoch;
@@ -87,7 +91,7 @@ class UpdateService {
}
Future showUpdateNotification() async {
- if (await shouldShowUpdateNoification()) {
+ if (await shouldShowUpdateNotification()) {
// ignore: unawaited_futures
NotificationService.instance.showNotification(
"Update available",
diff --git a/mobile/lib/ui/payment/subscription.dart b/mobile/lib/ui/payment/subscription.dart
index 0327c3ab5..c30a1c67d 100644
--- a/mobile/lib/ui/payment/subscription.dart
+++ b/mobile/lib/ui/payment/subscription.dart
@@ -1,6 +1,6 @@
import 'package:flutter/cupertino.dart';
import 'package:photos/core/configuration.dart';
-import 'package:photos/services/feature_flag_service.dart';
+import "package:photos/service_locator.dart";
import 'package:photos/services/update_service.dart';
import "package:photos/ui/payment/store_subscription_page.dart";
import 'package:photos/ui/payment/stripe_subscription_page.dart';
@@ -9,8 +9,7 @@ StatefulWidget getSubscriptionPage({bool isOnBoarding = false}) {
if (UpdateService.instance.isIndependentFlavor()) {
return StripeSubscriptionPage(isOnboarding: isOnBoarding);
}
- if (FeatureFlagService.instance.enableStripe() &&
- _isUserCreatedPostStripeSupport()) {
+ if (flagService.enableStripe && _isUserCreatedPostStripeSupport()) {
return StripeSubscriptionPage(isOnboarding: isOnBoarding);
} else {
return StoreSubscriptionPage(isOnboarding: isOnBoarding);
diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart
index 0ad5bce31..3306ea36f 100644
--- a/mobile/lib/ui/settings/machine_learning_settings_page.dart
+++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart
@@ -5,7 +5,7 @@ import "package:intl/intl.dart";
import "package:photos/core/event_bus.dart";
import 'package:photos/events/embedding_updated_event.dart';
import "package:photos/generated/l10n.dart";
-import "package:photos/services/feature_flag_service.dart";
+import "package:photos/service_locator.dart";
import 'package:photos/services/machine_learning/semantic_search/frameworks/ml_framework.dart';
import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart';
import "package:photos/theme/ente_theme.dart";
@@ -151,7 +151,7 @@ class _MachineLearningSettingsPageState
const SizedBox(
height: 12,
),
- FeatureFlagService.instance.isInternalUserOrDebugBuild()
+ flagService.internalUser
? MenuItemWidget(
leadingIcon: Icons.delete_sweep_outlined,
captionedTextWidget: CaptionedTextWidget(
diff --git a/mobile/lib/ui/settings/security_section_widget.dart b/mobile/lib/ui/settings/security_section_widget.dart
index dce7e97ec..eb93d85f6 100644
--- a/mobile/lib/ui/settings/security_section_widget.dart
+++ b/mobile/lib/ui/settings/security_section_widget.dart
@@ -10,7 +10,7 @@ import 'package:photos/events/two_factor_status_change_event.dart';
import "package:photos/generated/l10n.dart";
import "package:photos/l10n/l10n.dart";
import "package:photos/models/user_details.dart";
-import "package:photos/services/feature_flag_service.dart";
+import 'package:photos/service_locator.dart';
import 'package:photos/services/local_authentication_service.dart';
import "package:photos/services/passkey_service.dart";
import 'package:photos/services/user_service.dart';
@@ -70,8 +70,6 @@ class _SecuritySectionWidgetState extends State {
final Completer completer = Completer();
final List children = [];
if (_config.hasConfiguredAccount()) {
- final bool isInternalUser =
- FeatureFlagService.instance.isInternalUserOrDebugBuild();
children.addAll(
[
sectionOptionSpacing,
@@ -103,8 +101,8 @@ class _SecuritySectionWidgetState extends State {
},
),
),
- if (isInternalUser) sectionOptionSpacing,
- if (isInternalUser)
+ if (flagService.passKeyEnabled) sectionOptionSpacing,
+ if (flagService.passKeyEnabled)
MenuItemWidget(
captionedTextWidget: CaptionedTextWidget(
title: context.l10n.passkey,
diff --git a/mobile/lib/ui/settings_page.dart b/mobile/lib/ui/settings_page.dart
index e8b5c319d..4f81016e1 100644
--- a/mobile/lib/ui/settings_page.dart
+++ b/mobile/lib/ui/settings_page.dart
@@ -7,6 +7,7 @@ import 'package:photos/core/configuration.dart';
import 'package:photos/core/event_bus.dart';
import 'package:photos/events/opened_settings_event.dart';
import "package:photos/generated/l10n.dart";
+import "package:photos/service_locator.dart";
import "package:photos/services/storage_bonus_service.dart";
import 'package:photos/theme/colors.dart';
import 'package:photos/theme/ente_theme.dart';
@@ -144,7 +145,7 @@ class SettingsPage extends StatelessWidget {
const AboutSectionWidget(),
]);
- if (hasLoggedIn) {
+ if (hasLoggedIn && flagService.internalUser) {
contents.addAll([sectionSpacing, const DebugSectionWidget()]);
contents.addAll([sectionSpacing, const FaceDebugSectionWidget()]);
}
diff --git a/mobile/lib/ui/tabs/home_widget.dart b/mobile/lib/ui/tabs/home_widget.dart
index 4b2c38ce5..ddad5073b 100644
--- a/mobile/lib/ui/tabs/home_widget.dart
+++ b/mobile/lib/ui/tabs/home_widget.dart
@@ -195,7 +195,7 @@ class _HomeWidgetState extends State {
},
);
_initDeepLinks();
- UpdateService.instance.shouldShowUpdateNoification().then((value) {
+ UpdateService.instance.shouldShowUpdateNotification().then((value) {
Future.delayed(Duration.zero, () {
if (value) {
showDialog(
diff --git a/mobile/lib/ui/tools/debug/app_storage_viewer.dart b/mobile/lib/ui/tools/debug/app_storage_viewer.dart
index 055457e08..50ec16c25 100644
--- a/mobile/lib/ui/tools/debug/app_storage_viewer.dart
+++ b/mobile/lib/ui/tools/debug/app_storage_viewer.dart
@@ -7,7 +7,7 @@ import 'package:path_provider/path_provider.dart';
import 'package:photos/core/cache/video_cache_manager.dart';
import 'package:photos/core/configuration.dart';
import "package:photos/generated/l10n.dart";
-import 'package:photos/services/feature_flag_service.dart';
+import "package:photos/service_locator.dart";
import 'package:photos/theme/ente_theme.dart';
import 'package:photos/ui/components/buttons/icon_button_widget.dart';
import 'package:photos/ui/components/captioned_text_widget.dart';
@@ -34,7 +34,7 @@ class _AppStorageViewerState extends State {
@override
void initState() {
- internalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild();
+ internalUser = flagService.internalUser;
addPath();
super.initState();
}
diff --git a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart
index 33417f14f..46d817548 100644
--- a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart
+++ b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart
@@ -18,8 +18,8 @@ import 'package:photos/models/files_split.dart';
import 'package:photos/models/gallery_type.dart';
import "package:photos/models/metadata/common_keys.dart";
import 'package:photos/models/selected_files.dart';
+import "package:photos/service_locator.dart";
import 'package:photos/services/collections_service.dart';
-import "package:photos/services/feature_flag_service.dart";
import 'package:photos/services/hidden_service.dart';
import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart';
import "package:photos/services/machine_learning/face_ml/person/person_service.dart";
@@ -108,7 +108,7 @@ class _FileSelectionActionsWidgetState
@override
Widget build(BuildContext context) {
- _isInternalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild();
+ _isInternalUser = flagService.internalUser;
final ownedFilesCount = split.ownedByCurrentUser.length;
final ownedAndPendingUploadFilesCount =
ownedFilesCount + split.pendingUploads.length;
diff --git a/mobile/lib/ui/viewer/file/file_app_bar.dart b/mobile/lib/ui/viewer/file/file_app_bar.dart
index 98ed03f7a..e029aeb89 100644
--- a/mobile/lib/ui/viewer/file/file_app_bar.dart
+++ b/mobile/lib/ui/viewer/file/file_app_bar.dart
@@ -18,8 +18,8 @@ import 'package:photos/models/file/trash_file.dart';
import 'package:photos/models/ignored_file.dart';
import "package:photos/models/metadata/common_keys.dart";
import 'package:photos/models/selected_files.dart';
+import "package:photos/service_locator.dart";
import 'package:photos/services/collections_service.dart';
-import "package:photos/services/feature_flag_service.dart";
import 'package:photos/services/hidden_service.dart';
import 'package:photos/services/ignored_files_service.dart';
import 'package:photos/services/local_sync_service.dart';
@@ -141,16 +141,10 @@ class FileAppBarState extends State {
);
}
// only show fav option for files owned by the user
- if ((isOwnedByUser ||
- FeatureFlagService.instance.isInternalUserOrDebugBuild()) &&
+ if ((isOwnedByUser || flagService.internalUser) &&
!isFileHidden &&
isFileUploaded) {
- _actions.add(
- Padding(
- padding: const EdgeInsets.all(8),
- child: FavoriteWidget(widget.file),
- ),
- );
+ _actions.add(FavoriteWidget(widget.file));
}
if (!isFileUploaded) {
_actions.add(
diff --git a/mobile/lib/ui/viewer/file/video_widget.dart b/mobile/lib/ui/viewer/file/video_widget.dart
index c9c07df5c..7f9218e9a 100644
--- a/mobile/lib/ui/viewer/file/video_widget.dart
+++ b/mobile/lib/ui/viewer/file/video_widget.dart
@@ -9,7 +9,7 @@ import 'package:photos/core/constants.dart';
import "package:photos/generated/l10n.dart";
import "package:photos/models/file/extensions/file_props.dart";
import 'package:photos/models/file/file.dart';
-import "package:photos/services/feature_flag_service.dart";
+import "package:photos/service_locator.dart";
import 'package:photos/services/files_service.dart';
import "package:photos/ui/actions/file/file_actions.dart";
import 'package:photos/ui/viewer/file/thumbnail_widget.dart';
@@ -161,8 +161,7 @@ class _VideoWidgetState extends State {
}
}).onError(
(error, stackTrace) {
- if (mounted &&
- FeatureFlagService.instance.isInternalUserOrDebugBuild()) {
+ if (mounted && flagService.internalUser) {
if (error is Exception) {
showErrorDialogForException(
context: context,
diff --git a/mobile/lib/ui/viewer/file_details/favorite_widget.dart b/mobile/lib/ui/viewer/file_details/favorite_widget.dart
index c2ad0e771..f9d643490 100644
--- a/mobile/lib/ui/viewer/file_details/favorite_widget.dart
+++ b/mobile/lib/ui/viewer/file_details/favorite_widget.dart
@@ -44,11 +44,13 @@ class _FavoriteWidgetState extends State {
final bool isLiked = snapshot.data ?? false;
return _isLoading
? const EnteLoadingWidget(
- size: 12,
+ size: 14,
+ padding: 2,
) // Add this line
: LikeButton(
size: 24,
isLiked: isLiked,
+ padding: const EdgeInsets.all(2),
onTap: (oldValue) async {
if (widget.file.uploadedFileID == null ||
widget.file.ownerID !=
diff --git a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart
index 1026bd7fd..1f9fb0bbb 100644
--- a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart
+++ b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart
@@ -19,8 +19,8 @@ import 'package:photos/models/device_collection.dart';
import 'package:photos/models/gallery_type.dart';
import "package:photos/models/metadata/common_keys.dart";
import 'package:photos/models/selected_files.dart';
+import 'package:photos/service_locator.dart';
import 'package:photos/services/collections_service.dart';
-import "package:photos/services/feature_flag_service.dart";
import 'package:photos/services/sync_service.dart';
import 'package:photos/services/update_service.dart';
import 'package:photos/ui/actions/collection/collection_sharing_actions.dart';
@@ -96,7 +96,7 @@ class _GalleryAppBarWidgetState extends State {
_selectedFilesListener = () {
setState(() {});
};
- isInternalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild();
+ isInternalUser = flagService.internalUser;
collectionActions = CollectionActions(CollectionsService.instance);
widget.selectedFiles.addListener(_selectedFilesListener);
_userAuthEventSubscription =
diff --git a/mobile/lib/utils/dialog_util.dart b/mobile/lib/utils/dialog_util.dart
index d5518c4c4..d57a6990a 100644
--- a/mobile/lib/utils/dialog_util.dart
+++ b/mobile/lib/utils/dialog_util.dart
@@ -5,7 +5,7 @@ import "package:flutter/services.dart";
import "package:photos/generated/l10n.dart";
import 'package:photos/models/button_result.dart';
import 'package:photos/models/typedefs.dart';
-import "package:photos/services/feature_flag_service.dart";
+import "package:photos/service_locator.dart";
import 'package:photos/theme/colors.dart';
import 'package:photos/ui/common/loading_widget.dart';
import 'package:photos/ui/common/progress_dialog.dart';
@@ -91,8 +91,7 @@ String parseErrorForUI(
}
}
// return generic error if the user is not internal and the error is not in debug mode
- if (!(FeatureFlagService.instance.isInternalUserOrDebugBuild() &&
- kDebugMode)) {
+ if (!(flagService.internalUser && kDebugMode)) {
return genericError;
}
String errorInfo = "";
diff --git a/mobile/lib/utils/file_uploader.dart b/mobile/lib/utils/file_uploader.dart
index adf5847f3..d77bc95d7 100644
--- a/mobile/lib/utils/file_uploader.dart
+++ b/mobile/lib/utils/file_uploader.dart
@@ -29,7 +29,6 @@ import "package:photos/models/metadata/file_magic.dart";
import 'package:photos/models/upload_url.dart';
import "package:photos/models/user_details.dart";
import 'package:photos/services/collections_service.dart';
-import "package:photos/services/feature_flag_service.dart";
import "package:photos/services/file_magic_service.dart";
import 'package:photos/services/local_sync_service.dart';
import 'package:photos/services/sync_service.dart';
@@ -402,6 +401,16 @@ class FileUploader {
_logger.severe('Trying to upload file with missing localID');
return file;
}
+ if (!CollectionsService.instance.allowUpload(collectionID)) {
+ _logger.warning(
+ 'Upload not allowed for collection $collectionID',
+ );
+ if (!file.isUploaded && file.generatedID != null) {
+ _logger.info("Deleting file entry for " + file.toString());
+ await FilesDB.instance.deleteByGeneratedID(file.generatedID!);
+ }
+ return file;
+ }
final String lockKey = file.localID!;
@@ -497,7 +506,7 @@ class FileUploader {
// Calculate the number of parts for the file. Multiple part upload
// is only enabled for internal users and debug builds till it's battle tested.
- final count = FeatureFlagService.instance.isInternalUserOrDebugBuild()
+ final count = kDebugMode
? await calculatePartCount(
await encryptedFile.length(),
)
diff --git a/mobile/lib/utils/multipart_upload_util.dart b/mobile/lib/utils/multipart_upload_util.dart
index 6e0eda8ca..102c08d8d 100644
--- a/mobile/lib/utils/multipart_upload_util.dart
+++ b/mobile/lib/utils/multipart_upload_util.dart
@@ -6,7 +6,7 @@ import "package:dio/dio.dart";
import "package:logging/logging.dart";
import "package:photos/core/constants.dart";
import "package:photos/core/network/network.dart";
-import "package:photos/services/feature_flag_service.dart";
+import "package:photos/service_locator.dart";
import "package:photos/utils/xml_parser_util.dart";
final _enteDio = NetworkClient.instance.enteDio;
@@ -58,7 +58,7 @@ Future calculatePartCount(int fileSize) async {
Future getMultipartUploadURLs(int count) async {
try {
assert(
- FeatureFlagService.instance.isInternalUserOrDebugBuild(),
+ flagService.internalUser,
"Multipart upload should not be enabled for external users.",
);
final response = await _enteDio.get(
diff --git a/mobile/plugins/ente_feature_flag/.metadata b/mobile/plugins/ente_feature_flag/.metadata
new file mode 100644
index 000000000..9fc7ede54
--- /dev/null
+++ b/mobile/plugins/ente_feature_flag/.metadata
@@ -0,0 +1,10 @@
+# This file tracks properties of this Flutter project.
+# Used by Flutter tool to assess capabilities and perform upgrades etc.
+#
+# This file should be version controlled and should not be manually edited.
+
+version:
+ revision: 0b8abb4724aa590dd0f429683339b1e045a1594d
+ channel: stable
+
+project_type: plugin
diff --git a/mobile/plugins/ente_feature_flag/analysis_options.yaml b/mobile/plugins/ente_feature_flag/analysis_options.yaml
new file mode 100644
index 000000000..fac60e247
--- /dev/null
+++ b/mobile/plugins/ente_feature_flag/analysis_options.yaml
@@ -0,0 +1 @@
+include: ../../analysis_options.yaml
\ No newline at end of file
diff --git a/mobile/plugins/ente_feature_flag/lib/ente_feature_flag.dart b/mobile/plugins/ente_feature_flag/lib/ente_feature_flag.dart
new file mode 100644
index 000000000..66a7132d8
--- /dev/null
+++ b/mobile/plugins/ente_feature_flag/lib/ente_feature_flag.dart
@@ -0,0 +1 @@
+export 'src/service.dart';
diff --git a/mobile/plugins/ente_feature_flag/lib/src/model.dart b/mobile/plugins/ente_feature_flag/lib/src/model.dart
new file mode 100644
index 000000000..49b292148
--- /dev/null
+++ b/mobile/plugins/ente_feature_flag/lib/src/model.dart
@@ -0,0 +1,66 @@
+import "dart:convert";
+import "dart:io";
+
+import "package:flutter/foundation.dart";
+
+class RemoteFlags {
+ final bool enableStripe;
+ final bool disableCFWorker;
+ final bool mapEnabled;
+ final bool faceSearchEnabled;
+ final bool passKeyEnabled;
+ final bool recoveryKeyVerified;
+ final bool internalUser;
+ final bool betaUser;
+
+ RemoteFlags({
+ required this.enableStripe,
+ required this.disableCFWorker,
+ required this.mapEnabled,
+ required this.faceSearchEnabled,
+ required this.passKeyEnabled,
+ required this.recoveryKeyVerified,
+ required this.internalUser,
+ required this.betaUser,
+ });
+
+ static RemoteFlags defaultValue = RemoteFlags(
+ enableStripe: Platform.isAndroid,
+ disableCFWorker: false,
+ mapEnabled: false,
+ faceSearchEnabled: false,
+ passKeyEnabled: false,
+ recoveryKeyVerified: false,
+ internalUser: kDebugMode,
+ betaUser: kDebugMode,
+ );
+
+ String toJson() => json.encode(toMap());
+ Map toMap() {
+ return {
+ 'enableStripe': enableStripe,
+ 'disableCFWorker': disableCFWorker,
+ 'mapEnabled': mapEnabled,
+ 'faceSearchEnabled': faceSearchEnabled,
+ 'passKeyEnabled': passKeyEnabled,
+ 'recoveryKeyVerified': recoveryKeyVerified,
+ 'internalUser': internalUser,
+ 'betaUser': betaUser,
+ };
+ }
+
+ factory RemoteFlags.fromMap(Map map) {
+ return RemoteFlags(
+ enableStripe: map['enableStripe'] ?? defaultValue.enableStripe,
+ disableCFWorker: map['disableCFWorker'] ?? defaultValue.disableCFWorker,
+ mapEnabled: map['mapEnabled'] ?? defaultValue.mapEnabled,
+ faceSearchEnabled:
+ map['faceSearchEnabled'] ?? defaultValue.faceSearchEnabled,
+ passKeyEnabled: map['passKeyEnabled'] ?? defaultValue.passKeyEnabled,
+ recoveryKeyVerified:
+ map['recoveryKeyVerified'] ?? defaultValue.recoveryKeyVerified,
+ internalUser: map['internalUser'] ?? defaultValue.internalUser,
+ betaUser: map['betaUser'] ?? defaultValue.betaUser,
+ );
+ }
+}
diff --git a/mobile/plugins/ente_feature_flag/lib/src/service.dart b/mobile/plugins/ente_feature_flag/lib/src/service.dart
new file mode 100644
index 000000000..47539eeb5
--- /dev/null
+++ b/mobile/plugins/ente_feature_flag/lib/src/service.dart
@@ -0,0 +1,75 @@
+// ignore_for_file: always_use_package_imports
+
+import "dart:convert";
+import "dart:developer";
+import "dart:io";
+
+import "package:dio/dio.dart";
+import "package:flutter/foundation.dart";
+import "package:shared_preferences/shared_preferences.dart";
+
+import "model.dart";
+
+class FlagService {
+ final SharedPreferences _prefs;
+ final Dio _enteDio;
+ late final bool _usingEnteEmail;
+
+ FlagService(this._prefs, this._enteDio) {
+ _usingEnteEmail = _prefs.getString("email")?.endsWith("@ente.io") ?? false;
+ Future.delayed(const Duration(seconds: 5), () {
+ _fetch();
+ });
+ }
+
+ RemoteFlags? _flags;
+
+ RemoteFlags get flags {
+ try {
+ if (!_prefs.containsKey("remote_flags")) {
+ _fetch().ignore();
+ }
+ _flags ??= RemoteFlags.fromMap(
+ jsonDecode(_prefs.getString("remote_flags") ?? "{}"),
+ );
+ return _flags!;
+ } catch (e) {
+ debugPrint("Failed to get feature flags $e");
+ return RemoteFlags.defaultValue;
+ }
+ }
+
+ Future _fetch() async {
+ try {
+ if (!_prefs.containsKey("token")) {
+ log("token not found, skip", name: "FlagService");
+ return;
+ }
+ log("fetching feature flags", name: "FlagService");
+ final response = await _enteDio.get("/remote-store/feature-flags");
+ final remoteFlags = RemoteFlags.fromMap(response.data);
+ await _prefs.setString("remote_flags", remoteFlags.toJson());
+ _flags = remoteFlags;
+ } catch (e) {
+ debugPrint("Failed to sync feature flags $e");
+ }
+ }
+
+ bool get disableCFWorker => flags.disableCFWorker;
+
+ bool get internalUser => flags.internalUser || _usingEnteEmail || kDebugMode;
+
+ bool get betaUser => flags.betaUser;
+
+ bool get internalOrBetaUser => internalUser || betaUser;
+
+ bool get enableStripe => Platform.isIOS ? false : flags.enableStripe;
+
+ bool get mapEnabled => flags.mapEnabled;
+
+ bool get faceSearchEnabled => flags.faceSearchEnabled;
+
+ bool get passKeyEnabled => flags.passKeyEnabled || internalOrBetaUser;
+
+ bool get recoveryKeyVerified => flags.recoveryKeyVerified;
+}
diff --git a/mobile/plugins/ente_feature_flag/pubspec.lock b/mobile/plugins/ente_feature_flag/pubspec.lock
new file mode 100644
index 000000000..6760d7c6c
--- /dev/null
+++ b/mobile/plugins/ente_feature_flag/pubspec.lock
@@ -0,0 +1,277 @@
+# Generated by pub
+# See https://dart.dev/tools/pub/glossary#lockfile
+packages:
+ characters:
+ dependency: transitive
+ description:
+ name: characters
+ sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605"
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.3.0"
+ collection:
+ dependency: "direct main"
+ description:
+ name: collection
+ sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.18.0"
+ dio:
+ dependency: "direct main"
+ description:
+ name: dio
+ sha256: "7d328c4d898a61efc3cd93655a0955858e29a0aa647f0f9e02d59b3bb275e2e8"
+ url: "https://pub.dev"
+ source: hosted
+ version: "4.0.6"
+ ffi:
+ dependency: transitive
+ description:
+ name: ffi
+ sha256: "493f37e7df1804778ff3a53bd691d8692ddf69702cf4c1c1096a2e41b4779e21"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.1.2"
+ file:
+ dependency: transitive
+ description:
+ name: file
+ sha256: "5fc22d7c25582e38ad9a8515372cd9a93834027aacf1801cf01164dac0ffa08c"
+ url: "https://pub.dev"
+ source: hosted
+ version: "7.0.0"
+ flutter:
+ dependency: "direct main"
+ description: flutter
+ source: sdk
+ version: "0.0.0"
+ flutter_lints:
+ dependency: "direct dev"
+ description:
+ name: flutter_lints
+ sha256: "9e8c3858111da373efc5aa341de011d9bd23e2c5c5e0c62bccf32438e192d7b1"
+ url: "https://pub.dev"
+ source: hosted
+ version: "3.0.2"
+ flutter_web_plugins:
+ dependency: transitive
+ description: flutter
+ source: sdk
+ version: "0.0.0"
+ http_parser:
+ dependency: transitive
+ description:
+ name: http_parser
+ sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b"
+ url: "https://pub.dev"
+ source: hosted
+ version: "4.0.2"
+ lints:
+ dependency: transitive
+ description:
+ name: lints
+ sha256: cbf8d4b858bb0134ef3ef87841abdf8d63bfc255c266b7bf6b39daa1085c4290
+ url: "https://pub.dev"
+ source: hosted
+ version: "3.0.0"
+ material_color_utilities:
+ dependency: transitive
+ description:
+ name: material_color_utilities
+ sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a"
+ url: "https://pub.dev"
+ source: hosted
+ version: "0.8.0"
+ meta:
+ dependency: transitive
+ description:
+ name: meta
+ sha256: d584fa6707a52763a52446f02cc621b077888fb63b93bbcb1143a7be5a0c0c04
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.11.0"
+ path:
+ dependency: transitive
+ description:
+ name: path
+ sha256: "087ce49c3f0dc39180befefc60fdb4acd8f8620e5682fe2476afd0b3688bb4af"
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.9.0"
+ path_provider_linux:
+ dependency: transitive
+ description:
+ name: path_provider_linux
+ sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.2.1"
+ path_provider_platform_interface:
+ dependency: transitive
+ description:
+ name: path_provider_platform_interface
+ sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.1.2"
+ path_provider_windows:
+ dependency: transitive
+ description:
+ name: path_provider_windows
+ sha256: "8bc9f22eee8690981c22aa7fc602f5c85b497a6fb2ceb35ee5a5e5ed85ad8170"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.2.1"
+ platform:
+ dependency: transitive
+ description:
+ name: platform
+ sha256: "12220bb4b65720483f8fa9450b4332347737cf8213dd2840d8b2c823e47243ec"
+ url: "https://pub.dev"
+ source: hosted
+ version: "3.1.4"
+ plugin_platform_interface:
+ dependency: transitive
+ description:
+ name: plugin_platform_interface
+ sha256: "4820fbfdb9478b1ebae27888254d445073732dae3d6ea81f0b7e06d5dedc3f02"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.1.8"
+ shared_preferences:
+ dependency: "direct main"
+ description:
+ name: shared_preferences
+ sha256: d3bbe5553a986e83980916ded2f0b435ef2e1893dfaa29d5a7a790d0eca12180
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.2.3"
+ shared_preferences_android:
+ dependency: transitive
+ description:
+ name: shared_preferences_android
+ sha256: "1ee8bf911094a1b592de7ab29add6f826a7331fb854273d55918693d5364a1f2"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.2.2"
+ shared_preferences_foundation:
+ dependency: transitive
+ description:
+ name: shared_preferences_foundation
+ sha256: "7708d83064f38060c7b39db12aefe449cb8cdc031d6062280087bc4cdb988f5c"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.3.5"
+ shared_preferences_linux:
+ dependency: transitive
+ description:
+ name: shared_preferences_linux
+ sha256: "9f2cbcf46d4270ea8be39fa156d86379077c8a5228d9dfdb1164ae0bb93f1faa"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.3.2"
+ shared_preferences_platform_interface:
+ dependency: transitive
+ description:
+ name: shared_preferences_platform_interface
+ sha256: "22e2ecac9419b4246d7c22bfbbda589e3acf5c0351137d87dd2939d984d37c3b"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.3.2"
+ shared_preferences_web:
+ dependency: transitive
+ description:
+ name: shared_preferences_web
+ sha256: "9aee1089b36bd2aafe06582b7d7817fd317ef05fc30e6ba14bff247d0933042a"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.3.0"
+ shared_preferences_windows:
+ dependency: transitive
+ description:
+ name: shared_preferences_windows
+ sha256: "841ad54f3c8381c480d0c9b508b89a34036f512482c407e6df7a9c4aa2ef8f59"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.3.2"
+ sky_engine:
+ dependency: transitive
+ description: flutter
+ source: sdk
+ version: "0.0.99"
+ source_span:
+ dependency: transitive
+ description:
+ name: source_span
+ sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c"
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.10.0"
+ stack_trace:
+ dependency: "direct main"
+ description:
+ name: stack_trace
+ sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b"
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.11.1"
+ string_scanner:
+ dependency: transitive
+ description:
+ name: string_scanner
+ sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde"
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.2.0"
+ term_glyph:
+ dependency: transitive
+ description:
+ name: term_glyph
+ sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.2.1"
+ typed_data:
+ dependency: transitive
+ description:
+ name: typed_data
+ sha256: facc8d6582f16042dd49f2463ff1bd6e2c9ef9f3d5da3d9b087e244a7b564b3c
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.3.2"
+ vector_math:
+ dependency: transitive
+ description:
+ name: vector_math
+ sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803"
+ url: "https://pub.dev"
+ source: hosted
+ version: "2.1.4"
+ web:
+ dependency: transitive
+ description:
+ name: web
+ sha256: "97da13628db363c635202ad97068d47c5b8aa555808e7a9411963c533b449b27"
+ url: "https://pub.dev"
+ source: hosted
+ version: "0.5.1"
+ win32:
+ dependency: transitive
+ description:
+ name: win32
+ sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a"
+ url: "https://pub.dev"
+ source: hosted
+ version: "5.4.0"
+ xdg_directories:
+ dependency: transitive
+ description:
+ name: xdg_directories
+ sha256: faea9dee56b520b55a566385b84f2e8de55e7496104adada9962e0bd11bcff1d
+ url: "https://pub.dev"
+ source: hosted
+ version: "1.0.4"
+sdks:
+ dart: ">=3.3.0 <4.0.0"
+ flutter: ">=3.19.0"
diff --git a/mobile/plugins/ente_feature_flag/pubspec.yaml b/mobile/plugins/ente_feature_flag/pubspec.yaml
new file mode 100644
index 000000000..7507d61f1
--- /dev/null
+++ b/mobile/plugins/ente_feature_flag/pubspec.yaml
@@ -0,0 +1,19 @@
+name: ente_feature_flag
+version: 0.0.1
+publish_to: none
+
+environment:
+ sdk: '>=3.3.0 <4.0.0'
+
+dependencies:
+ collection:
+ dio: ^4.0.6
+ flutter:
+ sdk: flutter
+ shared_preferences: ^2.0.5
+ stack_trace:
+
+dev_dependencies:
+ flutter_lints:
+
+flutter:
\ No newline at end of file
diff --git a/mobile/pubspec.lock b/mobile/pubspec.lock
index 0da864aeb..7ac3445c5 100644
--- a/mobile/pubspec.lock
+++ b/mobile/pubspec.lock
@@ -442,6 +442,13 @@ packages:
url: "https://pub.dev"
source: hosted
version: "2.1.17"
+ ente_feature_flag:
+ dependency: "direct main"
+ description:
+ path: "plugins/ente_feature_flag"
+ relative: true
+ source: path
+ version: "0.0.1"
equatable:
dependency: "direct main"
description:
@@ -559,10 +566,10 @@ packages:
dependency: "direct main"
description:
name: firebase_core
- sha256: a864d1b6afd25497a3b57b016886d1763df52baaa69758a46723164de8d187fe
+ sha256: "6b1152a5af3b1cfe7e45309e96fc1aa14873f410f7aadb3878aa7812acfa7531"
url: "https://pub.dev"
source: hosted
- version: "2.29.0"
+ version: "2.30.0"
firebase_core_platform_interface:
dependency: transitive
description:
@@ -583,10 +590,10 @@ packages:
dependency: "direct main"
description:
name: firebase_messaging
- sha256: e41586e0fd04fe9a40424f8b0053d0832e6d04f49e020cdaf9919209a28497e9
+ sha256: "87e3eda0ecdfeadb5fd1cf0dc5153aea5307a0cfca751c4b1ac97bfdd805660e"
url: "https://pub.dev"
source: hosted
- version: "14.7.19"
+ version: "14.8.1"
firebase_messaging_platform_interface:
dependency: transitive
description:
diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml
index 22ccbe8eb..fbdcd92fa 100644
--- a/mobile/pubspec.yaml
+++ b/mobile/pubspec.yaml
@@ -48,6 +48,8 @@ dependencies:
dotted_border: ^2.1.0
dropdown_button2: ^2.0.0
email_validator: ^2.0.1
+ ente_feature_flag:
+ path: plugins/ente_feature_flag
equatable: ^2.0.5
event_bus: ^2.0.0
exif: ^3.0.0
@@ -60,8 +62,8 @@ dependencies:
file_saver:
# Use forked version till this PR is merged: https://github.com/incrediblezayed/file_saver/pull/87
git: https://github.com/jesims/file_saver.git
- firebase_core: ^2.13.1
- firebase_messaging: ^14.6.2
+ firebase_core: ^2.30.0
+ firebase_messaging: ^14.8.0
fk_user_agent: ^2.0.1
flutter:
sdk: flutter
@@ -98,7 +100,7 @@ dependencies:
isar_flutter_libs: ^3.1.0+1
json_annotation: ^4.8.0
latlong2: ^0.9.0
- like_button: ^2.0.2
+ like_button: ^2.0.5
loading_animations: ^2.1.0
local_auth: ^2.1.5
local_auth_android:
diff --git a/server/cmd/museum/main.go b/server/cmd/museum/main.go
index 8be76120d..84c34189d 100644
--- a/server/cmd/museum/main.go
+++ b/server/cmd/museum/main.go
@@ -194,7 +194,7 @@ func main() {
commonBillController := commonbilling.NewController(storagBonusRepo, userRepo, usageRepo)
appStoreController := controller.NewAppStoreController(defaultPlan,
billingRepo, fileRepo, userRepo, commonBillController)
-
+ remoteStoreController := &remoteStoreCtrl.Controller{Repo: remoteStoreRepository}
playStoreController := controller.NewPlayStoreController(defaultPlan,
billingRepo, fileRepo, userRepo, storagBonusRepo, commonBillController)
stripeController := controller.NewStripeController(plans, stripeClients,
@@ -610,6 +610,7 @@ func main() {
UserAuthRepo: userAuthRepo,
UserController: userController,
FamilyController: familyController,
+ RemoteStoreController: remoteStoreController,
FileRepo: fileRepo,
StorageBonusRepo: storagBonusRepo,
BillingRepo: billingRepo,
@@ -631,6 +632,7 @@ func main() {
adminAPI.PUT("/user/change-email", adminHandler.ChangeEmail)
adminAPI.DELETE("/user/delete", adminHandler.DeleteUser)
adminAPI.POST("/user/recover", adminHandler.RecoverAccount)
+ adminAPI.POST("/user/update-flag", adminHandler.UpdateFeatureFlag)
adminAPI.GET("/email-hash", adminHandler.GetEmailHash)
adminAPI.POST("/emails-from-hashes", adminHandler.GetEmailsFromHashes)
adminAPI.PUT("/user/subscription", adminHandler.UpdateSubscription)
@@ -658,7 +660,6 @@ func main() {
privateAPI.DELETE("/authenticator/entity", authenticatorHandler.DeleteEntity)
privateAPI.GET("/authenticator/entity/diff", authenticatorHandler.GetDiff)
- remoteStoreController := &remoteStoreCtrl.Controller{Repo: remoteStoreRepository}
dataCleanupController := &dataCleanupCtrl.DeleteUserCleanupController{
Repo: dataCleanupRepository,
UserRepo: userRepo,
@@ -672,6 +673,7 @@ func main() {
privateAPI.POST("/remote-store/update", remoteStoreHandler.InsertOrUpdate)
privateAPI.GET("/remote-store", remoteStoreHandler.GetKey)
+ privateAPI.GET("/remote-store/feature-flags", remoteStoreHandler.GetFeatureFlags)
pushHandler := &api.PushHandler{PushController: pushController}
privateAPI.POST("/push/token", pushHandler.AddToken)
@@ -837,7 +839,7 @@ func setupAndStartCrons(userAuthRepo *repo.UserAuthRepository, publicCollectionR
schedule(c, "@every 24h", func() {
_ = userAuthRepo.RemoveDeletedTokens(timeUtil.MicrosecondBeforeDays(30))
- _ = castDb.DeleteOldCodes(context.Background(), timeUtil.MicrosecondBeforeDays(1))
+ _ = castDb.DeleteOldSessions(context.Background(), timeUtil.MicrosecondBeforeDays(7))
_ = publicCollectionRepo.CleanupAccessHistory(context.Background())
})
@@ -895,6 +897,8 @@ func setupAndStartCrons(userAuthRepo *repo.UserAuthRepository, publicCollectionR
})
schedule(c, "@every 30m", func() {
+ // delete unclaimed codes older than 60 minutes
+ _ = castDb.DeleteUnclaimedCodes(context.Background(), timeUtil.MicrosecondsBeforeMinutes(60))
dataCleanupCtrl.DeleteDataCron()
})
diff --git a/server/docs/docker.md b/server/docs/docker.md
index d8f3db913..a328d734b 100644
--- a/server/docs/docker.md
+++ b/server/docs/docker.md
@@ -45,7 +45,7 @@ require you to clone the repository or build any images.
+ image: ghcr.io/ente-io/server
```
-4. Create an (empty) configuration file. Yyou can later put your custom
+4. Create an (empty) configuration file. You can later put your custom
configuration in this if needed.
```sh
diff --git a/server/ente/errors.go b/server/ente/errors.go
index 49aed7151..96e7bd4a1 100644
--- a/server/ente/errors.go
+++ b/server/ente/errors.go
@@ -149,6 +149,12 @@ var ErrCastPermissionDenied = ApiError{
HttpStatusCode: http.StatusForbidden,
}
+var ErrCastIPMismatch = ApiError{
+ Code: "CAST_IP_MISMATCH",
+ Message: "IP mismatch",
+ HttpStatusCode: http.StatusForbidden,
+}
+
type ErrorCode string
const (
diff --git a/server/ente/remotestore.go b/server/ente/remotestore.go
index 02eb93232..8f518f2a1 100644
--- a/server/ente/remotestore.go
+++ b/server/ente/remotestore.go
@@ -13,3 +13,66 @@ type UpdateKeyValueRequest struct {
Key string `json:"key" binding:"required"`
Value string `json:"value" binding:"required"`
}
+
+type AdminUpdateKeyValueRequest struct {
+ UserID int64 `json:"userID" binding:"required"`
+ Key string `json:"key" binding:"required"`
+ Value string `json:"value" binding:"required"`
+}
+
+type FeatureFlagResponse struct {
+ EnableStripe bool `json:"enableStripe"`
+ // If true, the mobile client will stop using CF worker to download files
+ DisableCFWorker bool `json:"disableCFWorker"`
+ MapEnabled bool `json:"mapEnabled"`
+ FaceSearchEnabled bool `json:"faceSearchEnabled"`
+ PassKeyEnabled bool `json:"passKeyEnabled"`
+ RecoveryKeyVerified bool `json:"recoveryKeyVerified"`
+ InternalUser bool `json:"internalUser"`
+ BetaUser bool `json:"betaUser"`
+}
+
+type FlagKey string
+
+const (
+ RecoveryKeyVerified FlagKey = "recoveryKeyVerified"
+ MapEnabled FlagKey = "mapEnabled"
+ FaceSearchEnabled FlagKey = "faceSearchEnabled"
+ PassKeyEnabled FlagKey = "passKeyEnabled"
+ IsInternalUser FlagKey = "internalUser"
+ IsBetaUser FlagKey = "betaUser"
+)
+
+func (k FlagKey) String() string {
+ return string(k)
+}
+
+// UserEditable returns true if the key is user editable
+func (k FlagKey) UserEditable() bool {
+ switch k {
+ case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled, PassKeyEnabled:
+ return true
+ default:
+ return false
+ }
+}
+
+func (k FlagKey) IsAdminEditable() bool {
+ switch k {
+ case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled:
+ return false
+ case IsInternalUser, IsBetaUser, PassKeyEnabled:
+ return true
+ default:
+ return true
+ }
+}
+
+func (k FlagKey) IsBoolType() bool {
+ switch k {
+ case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled, PassKeyEnabled, IsInternalUser, IsBetaUser:
+ return true
+ default:
+ return false
+ }
+}
diff --git a/server/migrations/84_add_cast_column.down.sql b/server/migrations/84_add_cast_column.down.sql
new file mode 100644
index 000000000..c08fed94e
--- /dev/null
+++ b/server/migrations/84_add_cast_column.down.sql
@@ -0,0 +1 @@
+ALTER TABLE casting DROP COLUMN IF EXISTS ip;
\ No newline at end of file
diff --git a/server/migrations/84_add_cast_column.up.sql b/server/migrations/84_add_cast_column.up.sql
new file mode 100644
index 000000000..828c2e57c
--- /dev/null
+++ b/server/migrations/84_add_cast_column.up.sql
@@ -0,0 +1,5 @@
+--- Delete all rows from casting table and add a non-nullable column called ip
+BEGIN;
+DELETE FROM casting;
+ALTER TABLE casting ADD COLUMN ip text NOT NULL;
+COMMIT;
diff --git a/server/pkg/api/admin.go b/server/pkg/api/admin.go
index b153e19bb..0b6ac18ef 100644
--- a/server/pkg/api/admin.go
+++ b/server/pkg/api/admin.go
@@ -3,6 +3,7 @@ package api
import (
"errors"
"fmt"
+ "github.com/ente-io/museum/pkg/controller/remotestore"
"net/http"
"strconv"
"strings"
@@ -43,6 +44,7 @@ type AdminHandler struct {
BillingController *controller.BillingController
UserController *user.UserController
FamilyController *family.Controller
+ RemoteStoreController *remotestore.Controller
ObjectCleanupController *controller.ObjectCleanupController
MailingListsController *controller.MailingListsController
DiscordController *discord.DiscordController
@@ -260,6 +262,32 @@ func (h *AdminHandler) RemovePasskeys(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{})
}
+func (h *AdminHandler) UpdateFeatureFlag(c *gin.Context) {
+ var request ente.AdminUpdateKeyValueRequest
+ if err := c.ShouldBindJSON(&request); err != nil {
+ handler.Error(c, stacktrace.Propagate(ente.ErrBadRequest, "Bad request"))
+ return
+ }
+ go h.DiscordController.NotifyAdminAction(
+ fmt.Sprintf("Admin (%d) updating flag:%s to val:%s for %d", auth.GetUserID(c.Request.Header), request.Key, request.Value, request.UserID))
+
+ logger := logrus.WithFields(logrus.Fields{
+ "user_id": request.UserID,
+ "admin_id": auth.GetUserID(c.Request.Header),
+ "req_id": requestid.Get(c),
+ "req_ctx": "update_feature_flag",
+ })
+ logger.Info("Start update")
+ err := h.RemoteStoreController.AdminInsertOrUpdate(c, request)
+ if err != nil {
+ logger.WithError(err).Error("Failed to update flag")
+ handler.Error(c, stacktrace.Propagate(err, ""))
+ return
+ }
+ logger.Info("successfully updated flag")
+ c.JSON(http.StatusOK, gin.H{})
+}
+
func (h *AdminHandler) CloseFamily(c *gin.Context) {
var request ente.AdminOpsForUserRequest
diff --git a/server/pkg/api/remotestore.go b/server/pkg/api/remotestore.go
index ea6e621a3..9f03554de 100644
--- a/server/pkg/api/remotestore.go
+++ b/server/pkg/api/remotestore.go
@@ -49,3 +49,13 @@ func (h *RemoteStoreHandler) GetKey(c *gin.Context) {
}
c.JSON(http.StatusOK, resp)
}
+
+// GetFeatureFlags returns all the feature flags and value for given user
+func (h *RemoteStoreHandler) GetFeatureFlags(c *gin.Context) {
+ resp, err := h.Controller.GetFeatureFlags(c)
+ if err != nil {
+ handler.Error(c, stacktrace.Propagate(err, "failed to get feature flags"))
+ return
+ }
+ c.JSON(http.StatusOK, resp)
+}
diff --git a/server/pkg/controller/cast/controller.go b/server/pkg/controller/cast/controller.go
index 3b76420cc..4432e149f 100644
--- a/server/pkg/controller/cast/controller.go
+++ b/server/pkg/controller/cast/controller.go
@@ -2,12 +2,15 @@ package cast
import (
"context"
+ "github.com/ente-io/museum/ente"
"github.com/ente-io/museum/ente/cast"
"github.com/ente-io/museum/pkg/controller/access"
castRepo "github.com/ente-io/museum/pkg/repo/cast"
"github.com/ente-io/museum/pkg/utils/auth"
+ "github.com/ente-io/museum/pkg/utils/network"
"github.com/ente-io/stacktrace"
"github.com/gin-gonic/gin"
+ "github.com/sirupsen/logrus"
)
type Controller struct {
@@ -24,12 +27,24 @@ func NewController(castRepo *castRepo.Repository,
}
}
-func (c *Controller) RegisterDevice(ctx context.Context, request *cast.RegisterDeviceRequest) (string, error) {
- return c.CastRepo.AddCode(ctx, request.DeviceCode, request.PublicKey)
+func (c *Controller) RegisterDevice(ctx *gin.Context, request *cast.RegisterDeviceRequest) (string, error) {
+ return c.CastRepo.AddCode(ctx, request.DeviceCode, request.PublicKey, network.GetClientIP(ctx))
}
-func (c *Controller) GetPublicKey(ctx context.Context, deviceCode string) (string, error) {
- return c.CastRepo.GetPubKey(ctx, deviceCode)
+func (c *Controller) GetPublicKey(ctx *gin.Context, deviceCode string) (string, error) {
+ pubKey, ip, err := c.CastRepo.GetPubKeyAndIp(ctx, deviceCode)
+ if err != nil {
+ return "", stacktrace.Propagate(err, "")
+ }
+ if ip != network.GetClientIP(ctx) {
+ logrus.WithFields(logrus.Fields{
+ "deviceCode": deviceCode,
+ "ip": ip,
+ "clientIP": network.GetClientIP(ctx),
+ }).Warn("GetPublicKey: IP mismatch")
+ return "", &ente.ErrCastIPMismatch
+ }
+ return pubKey, nil
}
func (c *Controller) GetEncCastData(ctx context.Context, deviceCode string) (*string, error) {
diff --git a/server/pkg/controller/file.go b/server/pkg/controller/file.go
index a4ac4b1b7..e91d299f1 100644
--- a/server/pkg/controller/file.go
+++ b/server/pkg/controller/file.go
@@ -64,8 +64,12 @@ func (c *FileController) validateFileCreateOrUpdateReq(userID int64, file ente.F
if !strings.HasPrefix(file.File.ObjectKey, objectPathPrefix) || !strings.HasPrefix(file.Thumbnail.ObjectKey, objectPathPrefix) {
return stacktrace.Propagate(ente.ErrBadRequest, "Incorrect object key reported")
}
- if file.EncryptedKey == "" || file.KeyDecryptionNonce == "" {
- return stacktrace.Propagate(ente.ErrBadRequest, "EncryptedKey and KeyDecryptionNonce are required")
+ isCreateFileReq := file.ID == 0
+ // Check for attributes for fileCreation. We don't send key details on update
+ if isCreateFileReq {
+ if file.EncryptedKey == "" || file.KeyDecryptionNonce == "" {
+ return stacktrace.Propagate(ente.ErrBadRequest, "EncryptedKey and KeyDecryptionNonce are required")
+ }
}
if file.File.DecryptionHeader == "" || file.Thumbnail.DecryptionHeader == "" {
return stacktrace.Propagate(ente.ErrBadRequest, "DecryptionHeader for file & thumb is required")
@@ -73,18 +77,24 @@ func (c *FileController) validateFileCreateOrUpdateReq(userID int64, file ente.F
if file.UpdationTime == 0 {
return stacktrace.Propagate(ente.ErrBadRequest, "UpdationTime is required")
}
- collection, err := c.CollectionRepo.Get(file.CollectionID)
- if err != nil {
- return stacktrace.Propagate(err, "")
- }
- // Verify that user owns the collection.
- // Warning: Do not remove this check
- if collection.Owner.ID != userID || file.OwnerID != userID {
- return stacktrace.Propagate(ente.ErrPermissionDenied, "")
- }
- if collection.IsDeleted {
- return stacktrace.Propagate(ente.ErrNotFound, "collection has been deleted")
+ if isCreateFileReq {
+ collection, err := c.CollectionRepo.Get(file.CollectionID)
+ if err != nil {
+ return stacktrace.Propagate(err, "")
+ }
+ // Verify that user owns the collection.
+ // Warning: Do not remove this check
+ if collection.Owner.ID != userID {
+ return stacktrace.Propagate(ente.ErrPermissionDenied, "collection doesn't belong to user")
+ }
+ if collection.IsDeleted {
+ return stacktrace.Propagate(ente.ErrNotFound, "collection has been deleted")
+ }
+ if file.OwnerID != userID {
+ return stacktrace.Propagate(ente.ErrPermissionDenied, "file ownerID doesn't match with userID")
+ }
}
+
return nil
}
diff --git a/server/pkg/controller/remotestore/controller.go b/server/pkg/controller/remotestore/controller.go
index d41bf7e5f..bf8e4acfc 100644
--- a/server/pkg/controller/remotestore/controller.go
+++ b/server/pkg/controller/remotestore/controller.go
@@ -3,6 +3,7 @@ package remotestore
import (
"database/sql"
"errors"
+ "fmt"
"github.com/ente-io/museum/ente"
"github.com/ente-io/museum/pkg/repo/remotestore"
@@ -16,12 +17,22 @@ type Controller struct {
Repo *remotestore.Repository
}
-// Insert of update the key's value
+// InsertOrUpdate the key's value
func (c *Controller) InsertOrUpdate(ctx *gin.Context, request ente.UpdateKeyValueRequest) error {
+ if err := _validateRequest(request.Key, request.Value, false); err != nil {
+ return err
+ }
userID := auth.GetUserID(ctx.Request.Header)
return c.Repo.InsertOrUpdate(ctx, userID, request.Key, request.Value)
}
+func (c *Controller) AdminInsertOrUpdate(ctx *gin.Context, request ente.AdminUpdateKeyValueRequest) error {
+ if err := _validateRequest(request.Key, request.Value, true); err != nil {
+ return err
+ }
+ return c.Repo.InsertOrUpdate(ctx, request.UserID, request.Key, request.Value)
+}
+
func (c *Controller) Get(ctx *gin.Context, req ente.GetValueRequest) (*ente.GetValueResponse, error) {
userID := auth.GetUserID(ctx.Request.Header)
value, err := c.Repo.GetValue(ctx, userID, req.Key)
@@ -34,3 +45,50 @@ func (c *Controller) Get(ctx *gin.Context, req ente.GetValueRequest) (*ente.GetV
}
return &ente.GetValueResponse{Value: value}, nil
}
+
+func (c *Controller) GetFeatureFlags(ctx *gin.Context) (*ente.FeatureFlagResponse, error) {
+ userID := auth.GetUserID(ctx.Request.Header)
+ values, err := c.Repo.GetAllValues(ctx, userID)
+ if err != nil {
+ return nil, stacktrace.Propagate(err, "")
+ }
+ response := &ente.FeatureFlagResponse{
+ EnableStripe: true, // enable stripe for all
+ DisableCFWorker: false,
+ }
+ for key, value := range values {
+ flag := ente.FlagKey(key)
+ if !flag.IsBoolType() {
+ continue
+ }
+ switch flag {
+ case ente.RecoveryKeyVerified:
+ response.RecoveryKeyVerified = value == "true"
+ case ente.MapEnabled:
+ response.MapEnabled = value == "true"
+ case ente.FaceSearchEnabled:
+ response.FaceSearchEnabled = value == "true"
+ case ente.PassKeyEnabled:
+ response.PassKeyEnabled = value == "true"
+ case ente.IsInternalUser:
+ response.InternalUser = value == "true"
+ case ente.IsBetaUser:
+ response.BetaUser = value == "true"
+ }
+ }
+ return response, nil
+}
+
+func _validateRequest(key, value string, byAdmin bool) error {
+ flag := ente.FlagKey(key)
+ if !flag.UserEditable() && !byAdmin {
+ return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("key %s is not user editable", key)), "key not user editable")
+ }
+ if byAdmin && !flag.IsAdminEditable() {
+ return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("key %s is not admin editable", key)), "key not admin editable")
+ }
+ if flag.IsBoolType() && value != "true" && value != "false" {
+ return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("value %s is not allowed", value)), "value not allowed")
+ }
+ return nil
+}
diff --git a/server/pkg/middleware/rate_limit.go b/server/pkg/middleware/rate_limit.go
index 08e0f00b6..076c050c9 100644
--- a/server/pkg/middleware/rate_limit.go
+++ b/server/pkg/middleware/rate_limit.go
@@ -150,6 +150,7 @@ func (r *RateLimitMiddleware) getLimiter(reqPath string, reqMethod string) *limi
reqPath == "/public-collection/verify-password" ||
reqPath == "/family/accept-invite" ||
reqPath == "/users/srp/attributes" ||
+ (reqPath == "/cast/device-info/" && reqMethod == "POST") ||
reqPath == "/users/srp/verify-session" ||
reqPath == "/family/invite-info/:token" ||
reqPath == "/family/add-member" ||
diff --git a/server/pkg/repo/cast/repo.go b/server/pkg/repo/cast/repo.go
index 306c1d481..89ebc4083 100644
--- a/server/pkg/repo/cast/repo.go
+++ b/server/pkg/repo/cast/repo.go
@@ -7,6 +7,7 @@ import (
"github.com/ente-io/museum/pkg/utils/random"
"github.com/ente-io/stacktrace"
"github.com/google/uuid"
+ log "github.com/sirupsen/logrus"
"strings"
)
@@ -14,7 +15,7 @@ type Repository struct {
DB *sql.DB
}
-func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string) (string, error) {
+func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string, ip string) (string, error) {
var codeValue string
var err error
if code == nil || *code == "" {
@@ -25,7 +26,7 @@ func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string) (
} else {
codeValue = strings.TrimSpace(*code)
}
- _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id) VALUES ($1, $2, $3)", codeValue, pubKey, uuid.New())
+ _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip)
if err != nil {
return "", err
}
@@ -38,17 +39,17 @@ func (r *Repository) InsertCastData(ctx context.Context, castUserID int64, code
return err
}
-func (r *Repository) GetPubKey(ctx context.Context, code string) (string, error) {
- var pubKey string
- row := r.DB.QueryRowContext(ctx, "SELECT public_key FROM casting WHERE code = $1 and is_deleted=false", code)
- err := row.Scan(&pubKey)
+func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, string, error) {
+ var pubKey, ip string
+ row := r.DB.QueryRowContext(ctx, "SELECT public_key, ip FROM casting WHERE code = $1 and is_deleted=false", code)
+ err := row.Scan(&pubKey, &ip)
if err != nil {
if err == sql.ErrNoRows {
- return "", ente.ErrNotFoundError.NewErr("code not found")
+ return "", "", ente.ErrNotFoundError.NewErr("code not found")
}
- return "", err
+ return "", "", err
}
- return pubKey, nil
+ return pubKey, ip, nil
}
func (r *Repository) GetEncCastData(ctx context.Context, code string) (*string, error) {
@@ -89,12 +90,27 @@ func (r *Repository) UpdateLastUsedAtForToken(ctx context.Context, token string)
return nil
}
-// DeleteOldCodes that are not associated with a collection and are older than the given time
-func (r *Repository) DeleteOldCodes(ctx context.Context, expirtyTime int64) error {
- _, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1 and is_deleted=false and collection_id is null", expirtyTime)
+// DeleteUnclaimedCodes that are not associated with a collection and are older than the given time
+func (r *Repository) DeleteUnclaimedCodes(ctx context.Context, expiryTime int64) error {
+ result, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1 and is_deleted=false and collection_id is null", expiryTime)
if err != nil {
return err
}
+ if rows, rErr := result.RowsAffected(); rErr == nil && rows > 0 {
+ log.Infof("Deleted %d unclaimed codes", rows)
+ }
+ return nil
+}
+
+// DeleteOldSessions where last used at is older than the given time
+func (r *Repository) DeleteOldSessions(ctx context.Context, expiryTime int64) error {
+ result, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1", expiryTime)
+ if err != nil {
+ return err
+ }
+ if rows, rErr := result.RowsAffected(); rErr == nil && rows > 0 {
+ log.Infof("Deleted %d old sessions", rows)
+ }
return nil
}
diff --git a/server/pkg/repo/remotestore/repository.go b/server/pkg/repo/remotestore/repository.go
index dc54b0cfc..2548f4901 100644
--- a/server/pkg/repo/remotestore/repository.go
+++ b/server/pkg/repo/remotestore/repository.go
@@ -13,7 +13,6 @@ type Repository struct {
DB *sql.DB
}
-//
func (r *Repository) InsertOrUpdate(ctx context.Context, userID int64, key string, value string) error {
_, err := r.DB.ExecContext(ctx, `INSERT INTO remote_store(user_id, key_name, key_value) VALUES ($1,$2,$3)
ON CONFLICT (user_id, key_name) DO UPDATE SET key_value = $3;
@@ -40,3 +39,25 @@ func (r *Repository) GetValue(ctx context.Context, userID int64, key string) (st
}
return keyValue, nil
}
+
+// GetAllValues fetches and return all the key value pairs for given user_id
+func (r *Repository) GetAllValues(ctx context.Context, userID int64) (map[string]string, error) {
+ rows, err := r.DB.QueryContext(ctx, `SELECT key_name, key_value FROM remote_store
+ WHERE user_id = $1`,
+ userID, // $1
+ )
+ if err != nil {
+ return nil, stacktrace.Propagate(err, "reading value failed")
+ }
+ defer rows.Close()
+ values := make(map[string]string)
+ for rows.Next() {
+ var key, value string
+ err := rows.Scan(&key, &value)
+ if err != nil {
+ return nil, stacktrace.Propagate(err, "reading value failed")
+ }
+ values[key] = value
+ }
+ return values, nil
+}
diff --git a/web/apps/cast/package.json b/web/apps/cast/package.json
index 2437c6c14..012148969 100644
--- a/web/apps/cast/package.json
+++ b/web/apps/cast/package.json
@@ -7,7 +7,6 @@
"@/next": "*",
"@ente/accounts": "*",
"@ente/eslint-config": "*",
- "@ente/shared": "*",
- "mime-types": "^2.1.35"
+ "@ente/shared": "*"
}
}
diff --git a/web/apps/cast/public/images/help-qrcode.webp b/web/apps/cast/public/images/help-qrcode.webp
deleted file mode 100644
index 79cd22c99..000000000
Binary files a/web/apps/cast/public/images/help-qrcode.webp and /dev/null differ
diff --git a/web/apps/cast/src/constants/collection.ts b/web/apps/cast/src/constants/collection.ts
deleted file mode 100644
index d91cfc81d..000000000
--- a/web/apps/cast/src/constants/collection.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-export enum CollectionType {
- folder = "folder",
- favorites = "favorites",
- album = "album",
- uncategorized = "uncategorized",
-}
-
-export enum CollectionSummaryType {
- folder = "folder",
- favorites = "favorites",
- album = "album",
- archive = "archive",
- trash = "trash",
- uncategorized = "uncategorized",
- all = "all",
- outgoingShare = "outgoingShare",
- incomingShareViewer = "incomingShareViewer",
- incomingShareCollaborator = "incomingShareCollaborator",
- sharedOnlyViaLink = "sharedOnlyViaLink",
- archived = "archived",
- defaultHidden = "defaultHidden",
- hiddenItems = "hiddenItems",
- pinned = "pinned",
-}
diff --git a/web/apps/cast/src/constants/file.ts b/web/apps/cast/src/constants/file.ts
deleted file mode 100644
index 9be574638..000000000
--- a/web/apps/cast/src/constants/file.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-export enum FILE_TYPE {
- IMAGE,
- VIDEO,
- LIVE_PHOTO,
- OTHERS,
-}
-
-export const RAW_FORMATS = [
- "heic",
- "rw2",
- "tiff",
- "arw",
- "cr3",
- "cr2",
- "raf",
- "nef",
- "psd",
- "dng",
- "tif",
-];
diff --git a/web/apps/cast/src/constants/upload.ts b/web/apps/cast/src/constants/upload.ts
index 63d044fb4..2ae1c4383 100644
--- a/web/apps/cast/src/constants/upload.ts
+++ b/web/apps/cast/src/constants/upload.ts
@@ -1,41 +1,13 @@
-import { FILE_TYPE } from "constants/file";
-import { FileTypeInfo } from "types/upload";
-
-// list of format that were missed by type-detection for some files.
-export const WHITELISTED_FILE_FORMATS: FileTypeInfo[] = [
- { fileType: FILE_TYPE.IMAGE, exactType: "jpeg", mimeType: "image/jpeg" },
- { fileType: FILE_TYPE.IMAGE, exactType: "jpg", mimeType: "image/jpeg" },
- { fileType: FILE_TYPE.VIDEO, exactType: "webm", mimeType: "video/webm" },
- { fileType: FILE_TYPE.VIDEO, exactType: "mod", mimeType: "video/mpeg" },
- { fileType: FILE_TYPE.VIDEO, exactType: "mp4", mimeType: "video/mp4" },
- { fileType: FILE_TYPE.IMAGE, exactType: "gif", mimeType: "image/gif" },
- { fileType: FILE_TYPE.VIDEO, exactType: "dv", mimeType: "video/x-dv" },
- {
- fileType: FILE_TYPE.VIDEO,
- exactType: "wmv",
- mimeType: "video/x-ms-asf",
- },
- {
- fileType: FILE_TYPE.VIDEO,
- exactType: "hevc",
- mimeType: "video/hevc",
- },
- {
- fileType: FILE_TYPE.IMAGE,
- exactType: "raf",
- mimeType: "image/x-fuji-raf",
- },
- {
- fileType: FILE_TYPE.IMAGE,
- exactType: "orf",
- mimeType: "image/x-olympus-orf",
- },
-
- {
- fileType: FILE_TYPE.IMAGE,
- exactType: "crw",
- mimeType: "image/x-canon-crw",
- },
+export const RAW_FORMATS = [
+ "heic",
+ "rw2",
+ "tiff",
+ "arw",
+ "cr3",
+ "cr2",
+ "raf",
+ "nef",
+ "psd",
+ "dng",
+ "tif",
];
-
-export const KNOWN_NON_MEDIA_FORMATS = ["xmp", "html", "txt"];
diff --git a/web/apps/cast/src/pages/index.tsx b/web/apps/cast/src/pages/index.tsx
index ce3ec3763..12c859e9c 100644
--- a/web/apps/cast/src/pages/index.tsx
+++ b/web/apps/cast/src/pages/index.tsx
@@ -42,52 +42,77 @@ export default function PairingMode() {
}, []);
useEffect(() => {
- if (!cast) return;
- if (isCastReady) return;
+ if (!cast) {
+ return;
+ }
+ if (isCastReady) {
+ return;
+ }
const context = cast.framework.CastReceiverContext.getInstance();
try {
const options = new cast.framework.CastReceiverOptions();
+ options.maxInactivity = 3600;
options.customNamespaces = Object.assign({});
options.customNamespaces["urn:x-cast:pair-request"] =
cast.framework.system.MessageType.JSON;
options.disableIdleTimeout = true;
+ context.set;
context.addCustomMessageListener(
"urn:x-cast:pair-request",
messageReceiveHandler,
);
+
+ // listen to close request and stop the context
+ context.addEventListener(
+ cast.framework.system.EventType.SENDER_DISCONNECTED,
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ (_) => {
+ context.stop();
+ },
+ );
context.start(options);
+ setIsCastReady(true);
} catch (e) {
log.error("failed to create cast context", e);
}
- setIsCastReady(true);
+
return () => {
- context.stop();
+ // context.stop();
};
- }, [cast, isCastReady]);
+ }, [cast]);
const messageReceiveHandler = (message: {
type: string;
senderId: string;
data: any;
}) => {
- cast.framework.CastReceiverContext.getInstance().sendCustomMessage(
- "urn:x-cast:pair-request",
- message.senderId,
- {
- code: digits.join(""),
- },
- );
+ try {
+ cast.framework.CastReceiverContext.getInstance().sendCustomMessage(
+ "urn:x-cast:pair-request",
+ message.senderId,
+ {
+ code: digits.join(""),
+ },
+ );
+ } catch (e) {
+ log.error("failed to send message", e);
+ }
};
const init = async () => {
- const data = generateSecureData(6);
- setDigits(convertDataToDecimalString(data).split(""));
- const keypair = await generateKeyPair();
- setPublicKeyB64(await toB64(keypair.publicKey));
- setPrivateKeyB64(await toB64(keypair.privateKey));
+ try {
+ const data = generateSecureData(6);
+ setDigits(convertDataToDecimalString(data).split(""));
+ const keypair = await generateKeyPair();
+ setPublicKeyB64(await toB64(keypair.publicKey));
+ setPrivateKeyB64(await toB64(keypair.privateKey));
+ } catch (e) {
+ log.error("failed to generate keypair", e);
+ throw e;
+ }
};
const generateKeyPair = async () => {
@@ -227,21 +252,6 @@ export default function PairingMode() {
{" "}
for help
-
-

-
>
diff --git a/web/apps/cast/src/pages/slideshow.tsx b/web/apps/cast/src/pages/slideshow.tsx
index 774bbd4da..99b2209de 100644
--- a/web/apps/cast/src/pages/slideshow.tsx
+++ b/web/apps/cast/src/pages/slideshow.tsx
@@ -1,7 +1,7 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
import PairedSuccessfullyOverlay from "components/PairedSuccessfullyOverlay";
import { PhotoAuditorium } from "components/PhotoAuditorium";
-import { FILE_TYPE } from "constants/file";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
import {
diff --git a/web/apps/cast/src/services/castDownloadManager.ts b/web/apps/cast/src/services/castDownloadManager.ts
index 76b37c082..2314ed54e 100644
--- a/web/apps/cast/src/services/castDownloadManager.ts
+++ b/web/apps/cast/src/services/castDownloadManager.ts
@@ -1,8 +1,8 @@
+import { FILE_TYPE } from "@/media/file-type";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getCastFileURL } from "@ente/shared/network/api";
-import { FILE_TYPE } from "constants/file";
import { EnteFile } from "types/file";
import { generateStreamFromArrayBuffer } from "utils/file";
diff --git a/web/apps/cast/src/services/detect-type.ts b/web/apps/cast/src/services/detect-type.ts
new file mode 100644
index 000000000..187e19df8
--- /dev/null
+++ b/web/apps/cast/src/services/detect-type.ts
@@ -0,0 +1,28 @@
+import { KnownFileTypeInfos } from "@/media/file-type";
+import { lowercaseExtension } from "@/next/file";
+import FileType from "file-type";
+
+/**
+ * Try to deduce the MIME type for the given {@link file}. Return the MIME type
+ * string if successful _and_ if it is an image or a video, otherwise return
+ * `undefined`.
+ *
+ * It first peeks into the file's initial contents to detect the MIME type. If
+ * that doesn't give any results, it tries to deduce it from the file's name.
+ */
+export const detectMediaMIMEType = async (file: File): Promise => {
+ const chunkSizeForTypeDetection = 4100;
+ const fileChunk = file.slice(0, chunkSizeForTypeDetection);
+ const chunk = new Uint8Array(await fileChunk.arrayBuffer());
+ const result = await FileType.fromBuffer(chunk);
+
+ const mime = result?.mime;
+ if (mime) {
+ if (mime.startsWith("image/") || mime.startsWith("video/")) return mime;
+ else throw new Error(`Detected MIME type ${mime} is not a media file`);
+ }
+
+ const ext = lowercaseExtension(file.name);
+ if (!ext) return undefined;
+ return KnownFileTypeInfos.find((f) => f.extension == ext)?.mimeType;
+};
diff --git a/web/apps/cast/src/services/readerService.ts b/web/apps/cast/src/services/readerService.ts
deleted file mode 100644
index 19f9bb931..000000000
--- a/web/apps/cast/src/services/readerService.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-import { convertBytesToHumanReadable } from "@/next/file";
-import log from "@/next/log";
-
-export async function getUint8ArrayView(file: Blob): Promise {
- try {
- return new Uint8Array(await file.arrayBuffer());
- } catch (e) {
- log.error(
- `Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`,
- e,
- );
- throw e;
- }
-}
diff --git a/web/apps/cast/src/services/typeDetectionService.ts b/web/apps/cast/src/services/typeDetectionService.ts
deleted file mode 100644
index 5acd3844d..000000000
--- a/web/apps/cast/src/services/typeDetectionService.ts
+++ /dev/null
@@ -1,81 +0,0 @@
-import { nameAndExtension } from "@/next/file";
-import log from "@/next/log";
-import { CustomError } from "@ente/shared/error";
-import { FILE_TYPE } from "constants/file";
-import {
- KNOWN_NON_MEDIA_FORMATS,
- WHITELISTED_FILE_FORMATS,
-} from "constants/upload";
-import FileType from "file-type";
-import { FileTypeInfo } from "types/upload";
-import { getUint8ArrayView } from "./readerService";
-
-const TYPE_VIDEO = "video";
-const TYPE_IMAGE = "image";
-const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100;
-
-export async function getFileType(receivedFile: File): Promise {
- try {
- let fileType: FILE_TYPE;
-
- const typeResult = await extractFileType(receivedFile);
- const mimTypeParts: string[] = typeResult.mime?.split("/");
- if (mimTypeParts?.length !== 2) {
- throw Error(CustomError.INVALID_MIME_TYPE(typeResult.mime));
- }
-
- switch (mimTypeParts[0]) {
- case TYPE_IMAGE:
- fileType = FILE_TYPE.IMAGE;
- break;
- case TYPE_VIDEO:
- fileType = FILE_TYPE.VIDEO;
- break;
- default:
- throw Error(CustomError.NON_MEDIA_FILE);
- }
- return {
- fileType,
- exactType: typeResult.ext,
- mimeType: typeResult.mime,
- };
- } catch (e) {
- const ne = nameAndExtension(receivedFile.name);
- const fileFormat = ne[1].toLowerCase();
- const whiteListedFormat = WHITELISTED_FILE_FORMATS.find(
- (a) => a.exactType === fileFormat,
- );
- if (whiteListedFormat) {
- return whiteListedFormat;
- }
- if (KNOWN_NON_MEDIA_FORMATS.includes(fileFormat)) {
- throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
- }
- if (e.message === CustomError.NON_MEDIA_FILE) {
- log.error(`unsupported file format ${fileFormat}`, e);
- throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
- }
- log.error(`type detection failed for format ${fileFormat}`, e);
- throw Error(CustomError.TYPE_DETECTION_FAILED(fileFormat));
- }
-}
-
-async function extractFileType(file: File) {
- const fileBlobChunk = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION);
- const fileDataChunk = await getUint8ArrayView(fileBlobChunk);
- return getFileTypeFromBuffer(fileDataChunk);
-}
-
-async function getFileTypeFromBuffer(buffer: Uint8Array) {
- const result = await FileType.fromBuffer(buffer);
- if (!result?.mime) {
- let logableInfo = "";
- try {
- logableInfo = `result: ${JSON.stringify(result)}`;
- } catch (e) {
- logableInfo = "failed to stringify result";
- }
- throw Error(`mimetype missing from file type result - ${logableInfo}`);
- }
- return result;
-}
diff --git a/web/apps/cast/src/types/collection/index.ts b/web/apps/cast/src/types/collection.ts
similarity index 61%
rename from web/apps/cast/src/types/collection/index.ts
rename to web/apps/cast/src/types/collection.ts
index f9ea9ef04..c495937ae 100644
--- a/web/apps/cast/src/types/collection/index.ts
+++ b/web/apps/cast/src/types/collection.ts
@@ -1,4 +1,3 @@
-import { CollectionSummaryType, CollectionType } from "constants/collection";
import { EnteFile } from "types/file";
import {
EncryptedMagicMetadata,
@@ -20,6 +19,13 @@ export interface CollectionUser {
role: COLLECTION_ROLE;
}
+enum CollectionType {
+ folder = "folder",
+ favorites = "favorites",
+ album = "album",
+ uncategorized = "uncategorized",
+}
+
export interface EncryptedCollection {
id: number;
owner: CollectionUser;
@@ -32,7 +38,7 @@ export interface EncryptedCollection {
type: CollectionType;
attributes: collectionAttributes;
sharees: CollectionUser[];
- publicURLs?: PublicURL[];
+ publicURLs?: unknown;
updationTime: number;
isDeleted: boolean;
magicMetadata: EncryptedMagicMetadata;
@@ -61,54 +67,6 @@ export interface Collection
// define a method on Collection interface to return the sync key as collection.id-time
// this is used to store the last sync time of a collection in local storage
-export interface PublicURL {
- url: string;
- deviceLimit: number;
- validTill: number;
- enableDownload: boolean;
- enableCollect: boolean;
- passwordEnabled: boolean;
- nonce?: string;
- opsLimit?: number;
- memLimit?: number;
-}
-
-export interface UpdatePublicURL {
- collectionID: number;
- disablePassword?: boolean;
- enableDownload?: boolean;
- enableCollect?: boolean;
- validTill?: number;
- deviceLimit?: number;
- passHash?: string;
- nonce?: string;
- opsLimit?: number;
- memLimit?: number;
-}
-
-export interface CreatePublicAccessTokenRequest {
- collectionID: number;
- validTill?: number;
- deviceLimit?: number;
-}
-
-export interface EncryptedFileKey {
- id: number;
- encryptedKey: string;
- keyDecryptionNonce: string;
-}
-
-export interface AddToCollectionRequest {
- collectionID: number;
- files: EncryptedFileKey[];
-}
-
-export interface MoveToCollectionRequest {
- fromCollectionID: number;
- toCollectionID: number;
- files: EncryptedFileKey[];
-}
-
export interface collectionAttributes {
encryptedPath?: string;
pathDecryptionNonce?: string;
@@ -116,11 +74,6 @@ export interface collectionAttributes {
export type CollectionToFileMap = Map;
-export interface RemoveFromCollectionRequest {
- collectionID: number;
- fileIDs: number[];
-}
-
export interface CollectionMagicMetadataProps {
visibility?: VISIBILITY_STATE;
subType?: SUB_TYPE;
@@ -144,16 +97,4 @@ export interface CollectionPublicMagicMetadataProps {
export type CollectionPublicMagicMetadata =
MagicMetadataCore;
-export interface CollectionSummary {
- id: number;
- name: string;
- type: CollectionSummaryType;
- coverFile: EnteFile;
- latestFile: EnteFile;
- fileCount: number;
- updationTime: number;
- order?: number;
-}
-
-export type CollectionSummaries = Map;
export type CollectionFilesCount = Map;
diff --git a/web/apps/cast/src/types/file/index.ts b/web/apps/cast/src/types/file/index.ts
index 1813b5416..c21f04a0a 100644
--- a/web/apps/cast/src/types/file/index.ts
+++ b/web/apps/cast/src/types/file/index.ts
@@ -1,9 +1,9 @@
+import type { Metadata } from "@/media/types/file";
import {
EncryptedMagicMetadata,
MagicMetadataCore,
VISIBILITY_STATE,
} from "types/magicMetadata";
-import { Metadata } from "types/upload";
export interface MetadataFileAttributes {
encryptedData: string;
@@ -64,25 +64,6 @@ export interface EnteFile
isConverted?: boolean;
}
-export interface TrashRequest {
- items: TrashRequestItems[];
-}
-
-export interface TrashRequestItems {
- fileID: number;
- collectionID: number;
-}
-
-export interface FileWithUpdatedMagicMetadata {
- file: EnteFile;
- updatedMagicMetadata: FileMagicMetadata;
-}
-
-export interface FileWithUpdatedPublicMagicMetadata {
- file: EnteFile;
- updatedPublicMagicMetadata: FilePublicMagicMetadata;
-}
-
export interface FileMagicMetadataProps {
visibility?: VISIBILITY_STATE;
filePaths?: string[];
diff --git a/web/apps/cast/src/types/upload/index.ts b/web/apps/cast/src/types/upload/index.ts
deleted file mode 100644
index 0e249846a..000000000
--- a/web/apps/cast/src/types/upload/index.ts
+++ /dev/null
@@ -1,107 +0,0 @@
-import {
- B64EncryptionResult,
- LocalFileAttributes,
-} from "@ente/shared/crypto/types";
-import { FILE_TYPE } from "constants/file";
-import {
- FilePublicMagicMetadata,
- FilePublicMagicMetadataProps,
- MetadataFileAttributes,
- S3FileAttributes,
-} from "types/file";
-import { EncryptedMagicMetadata } from "types/magicMetadata";
-
-export interface DataStream {
- stream: ReadableStream;
- chunkCount: number;
-}
-
-export function isDataStream(object: any): object is DataStream {
- return "stream" in object;
-}
-
-export type Logger = (message: string) => void;
-
-export interface Metadata {
- title: string;
- creationTime: number;
- modificationTime: number;
- latitude: number;
- longitude: number;
- fileType: FILE_TYPE;
- hasStaticThumbnail?: boolean;
- hash?: string;
- imageHash?: string;
- videoHash?: string;
- localID?: number;
- version?: number;
- deviceFolder?: string;
-}
-
-export interface FileTypeInfo {
- fileType: FILE_TYPE;
- exactType: string;
- mimeType?: string;
- imageType?: string;
- videoType?: string;
-}
-
-export interface UploadURL {
- url: string;
- objectKey: string;
-}
-
-export interface FileInMemory {
- filedata: Uint8Array | DataStream;
- thumbnail: Uint8Array;
- hasStaticThumbnail: boolean;
-}
-
-export interface FileWithMetadata
- extends Omit {
- metadata: Metadata;
- localID: number;
- pubMagicMetadata: FilePublicMagicMetadata;
-}
-
-export interface EncryptedFile {
- file: ProcessedFile;
- fileKey: B64EncryptionResult;
-}
-export interface ProcessedFile {
- file: LocalFileAttributes;
- thumbnail: LocalFileAttributes;
- metadata: LocalFileAttributes;
- pubMagicMetadata: EncryptedMagicMetadata;
- localID: number;
-}
-export interface BackupedFile {
- file: S3FileAttributes;
- thumbnail: S3FileAttributes;
- metadata: MetadataFileAttributes;
- pubMagicMetadata: EncryptedMagicMetadata;
-}
-
-export interface UploadFile extends BackupedFile {
- collectionID: number;
- encryptedKey: string;
- keyDecryptionNonce: string;
-}
-
-export interface ParsedExtractedMetadata {
- location: Location;
- creationTime: number;
- width: number;
- height: number;
-}
-
-export interface PublicUploadProps {
- token: string;
- passwordToken: string;
- accessedThroughSharedURL: boolean;
-}
-
-export interface ExtractMetadataResult {
- metadata: Metadata;
- publicMagicMetadata: FilePublicMagicMetadataProps;
-}
diff --git a/web/apps/cast/src/utils/file/index.ts b/web/apps/cast/src/utils/file.ts
similarity index 85%
rename from web/apps/cast/src/utils/file/index.ts
rename to web/apps/cast/src/utils/file.ts
index 60ec0e56e..91961b7be 100644
--- a/web/apps/cast/src/utils/file/index.ts
+++ b/web/apps/cast/src/utils/file.ts
@@ -1,9 +1,10 @@
+import { FILE_TYPE } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
-import { FILE_TYPE, RAW_FORMATS } from "constants/file";
+import { RAW_FORMATS } from "constants/upload";
import CastDownloadManager from "services/castDownloadManager";
-import { getFileType } from "services/typeDetectionService";
+import { detectMediaMIMEType } from "services/detect-type";
import {
EncryptedEnteFile,
EnteFile,
@@ -103,18 +104,6 @@ export function isRawFileFromFileName(fileName: string) {
return false;
}
-/**
- * [Note: File name for local EnteFile objects]
- *
- * The title property in a file's metadata is the original file's name. The
- * metadata of a file cannot be edited. So if later on the file's name is
- * changed, then the edit is stored in the `editedName` property of the public
- * metadata of the file.
- *
- * This function merges these edits onto the file object that we use locally.
- * Effectively, post this step, the file's metadata.title can be used in lieu of
- * its filename.
- */
export function mergeMetadata(files: EnteFile[]): EnteFile[] {
return files.map((file) => {
if (file.pubMagicMetadata?.data.editedTime) {
@@ -143,10 +132,11 @@ export const getPreviewableImage = async (
);
fileBlob = new Blob([imageData]);
}
- const fileType = await getFileType(
+ const mimeType = await detectMediaMIMEType(
new File([fileBlob], file.metadata.title),
);
- fileBlob = new Blob([fileBlob], { type: fileType.mimeType });
+ if (!mimeType) return undefined;
+ fileBlob = new Blob([fileBlob], { type: mimeType });
return fileBlob;
} catch (e) {
log.error("failed to download file", e);
diff --git a/web/apps/cast/src/utils/useCastReceiver.tsx b/web/apps/cast/src/utils/useCastReceiver.tsx
index 176b96882..ff17b0910 100644
--- a/web/apps/cast/src/utils/useCastReceiver.tsx
+++ b/web/apps/cast/src/utils/useCastReceiver.tsx
@@ -21,7 +21,6 @@ const load = (() => {
cast,
});
});
-
document.body.appendChild(script);
});
}
diff --git a/web/apps/photos/package.json b/web/apps/photos/package.json
index 4ade92263..1196b4ddf 100644
--- a/web/apps/photos/package.json
+++ b/web/apps/photos/package.json
@@ -21,7 +21,6 @@
"exifr": "^7.1.3",
"fast-srp-hap": "^2.0.4",
"ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm",
- "file-type": "^16.5.4",
"formik": "^2.1.5",
"hdbscan": "0.0.1-alpha.5",
"heic-convert": "^2.0.0",
@@ -30,6 +29,7 @@
"leaflet-defaulticon-compatibility": "^0.1.1",
"localforage": "^1.9.0",
"memoize-one": "^6.0.0",
+ "mime-types": "^2.1.35",
"ml-matrix": "^6.10.4",
"otpauth": "^9.0.2",
"p-debounce": "^4.0.0",
diff --git a/web/apps/photos/src/components/EnteDateTimePicker.tsx b/web/apps/photos/src/components/EnteDateTimePicker.tsx
index ee5426ebc..e53ed65b9 100644
--- a/web/apps/photos/src/components/EnteDateTimePicker.tsx
+++ b/web/apps/photos/src/components/EnteDateTimePicker.tsx
@@ -5,10 +5,9 @@ import {
MobileDateTimePicker,
} from "@mui/x-date-pickers";
import { AdapterDateFns } from "@mui/x-date-pickers/AdapterDateFns";
-import {
- MAX_EDITED_CREATION_TIME,
- MIN_EDITED_CREATION_TIME,
-} from "constants/file";
+
+const MIN_EDITED_CREATION_TIME = new Date(1800, 0, 1);
+const MAX_EDITED_CREATION_TIME = new Date();
interface Props {
initialValue?: Date;
diff --git a/web/apps/photos/src/components/FixCreationTime.tsx b/web/apps/photos/src/components/FixCreationTime.tsx
index 62c31539a..757ca2737 100644
--- a/web/apps/photos/src/components/FixCreationTime.tsx
+++ b/web/apps/photos/src/components/FixCreationTime.tsx
@@ -13,7 +13,7 @@ import { useFormik } from "formik";
import { t } from "i18next";
import { GalleryContext } from "pages/gallery";
import React, { useContext, useEffect, useState } from "react";
-import { updateCreationTimeWithExif } from "services/updateCreationTimeWithExif";
+import { updateCreationTimeWithExif } from "services/fix-exif";
import { EnteFile } from "types/file";
import EnteDateTimePicker from "./EnteDateTimePicker";
diff --git a/web/apps/photos/src/components/PhotoFrame.tsx b/web/apps/photos/src/components/PhotoFrame.tsx
index 90e1cf32c..8c935ee27 100644
--- a/web/apps/photos/src/components/PhotoFrame.tsx
+++ b/web/apps/photos/src/components/PhotoFrame.tsx
@@ -1,3 +1,4 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
import { PHOTOS_PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error";
@@ -5,7 +6,6 @@ import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
import { styled } from "@mui/material";
import PhotoViewer from "components/PhotoViewer";
import { TRASH_SECTION } from "constants/collection";
-import { FILE_TYPE } from "constants/file";
import { useRouter } from "next/router";
import { GalleryContext } from "pages/gallery";
import PhotoSwipe from "photoswipe";
diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx
index 871da2b05..3a5dbb6bc 100644
--- a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderCaption.tsx
@@ -3,7 +3,6 @@ import { FlexWrapper } from "@ente/shared/components/Container";
import Close from "@mui/icons-material/Close";
import Done from "@mui/icons-material/Done";
import { Box, IconButton, TextField } from "@mui/material";
-import { MAX_CAPTION_SIZE } from "constants/file";
import { Formik } from "formik";
import { t } from "i18next";
import { useState } from "react";
@@ -12,6 +11,8 @@ import { changeCaption, updateExistingFilePubMetadata } from "utils/file";
import * as Yup from "yup";
import { SmallLoadingSpinner } from "../styledComponents/SmallLoadingSpinner";
+export const MAX_CAPTION_SIZE = 5000;
+
interface formValues {
caption: string;
}
diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx
index 1bee86c25..399051185 100644
--- a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx
@@ -1,10 +1,10 @@
+import { FILE_TYPE } from "@/media/file-type";
import { nameAndExtension } from "@/next/file";
import log from "@/next/log";
import { FlexWrapper } from "@ente/shared/components/Container";
import PhotoOutlined from "@mui/icons-material/PhotoOutlined";
import VideocamOutlined from "@mui/icons-material/VideocamOutlined";
import Box from "@mui/material/Box";
-import { FILE_TYPE } from "constants/file";
import { useEffect, useState } from "react";
import { EnteFile } from "types/file";
import { makeHumanReadableStorage } from "utils/billing";
diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx
index 34fdb8e34..a6d37ccf4 100644
--- a/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/index.tsx
@@ -17,7 +17,7 @@ import { t } from "i18next";
import { AppContext } from "pages/_app";
import { GalleryContext } from "pages/gallery";
import { useContext, useEffect, useMemo, useState } from "react";
-import { getEXIFLocation } from "services/upload/exifService";
+import { getEXIFLocation } from "services/exif";
import { EnteFile } from "types/file";
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
import {
diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx
index 997ad3d27..ff795aca7 100644
--- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx
@@ -42,11 +42,10 @@ import { t } from "i18next";
import mime from "mime-types";
import { AppContext } from "pages/_app";
import { getLocalCollections } from "services/collectionService";
+import { detectFileTypeInfo } from "services/detect-type";
import downloadManager from "services/download";
-import { getFileType } from "services/typeDetectionService";
import uploadManager from "services/upload/uploadManager";
import { EnteFile } from "types/file";
-import { FileWithCollection } from "types/upload";
import { getEditorCloseConfirmationMessage } from "utils/ui";
import ColoursMenu from "./ColoursMenu";
import CropMenu, { cropRegionOfCanvas, getCropRegionArgs } from "./CropMenu";
@@ -486,7 +485,7 @@ const ImageEditorOverlay = (props: IProps) => {
if (!canvasRef.current) return;
const editedFile = await getEditedFile();
- const fileType = await getFileType(editedFile);
+ const fileType = await detectFileTypeInfo(editedFile);
const tempImgURL = URL.createObjectURL(
new Blob([editedFile], { type: fileType.mimeType }),
);
@@ -507,15 +506,15 @@ const ImageEditorOverlay = (props: IProps) => {
);
const editedFile = await getEditedFile();
- const file: FileWithCollection = {
- file: editedFile,
- collectionID: props.file.collectionID,
+ const file = {
+ fileOrPath: editedFile,
localID: 1,
+ collectionID: props.file.collectionID,
};
uploadManager.prepareForNewUpload();
uploadManager.showUploadProgressDialog();
- uploadManager.queueFilesForUpload([file], [collection]);
+ uploadManager.uploadFiles([file], [collection]);
setFileURL(null);
props.onClose();
props.closePhotoViewer();
diff --git a/web/apps/photos/src/components/PhotoViewer/index.tsx b/web/apps/photos/src/components/PhotoViewer/index.tsx
index 29da75e53..8e6debf68 100644
--- a/web/apps/photos/src/components/PhotoViewer/index.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/index.tsx
@@ -10,12 +10,13 @@ import { EnteFile } from "types/file";
import {
copyFileToClipboard,
downloadSingleFile,
- getFileExtension,
getFileFromURL,
isRawFile,
isSupportedRawFormat,
} from "utils/file";
+import { FILE_TYPE } from "@/media/file-type";
+import { lowercaseExtension } from "@/next/file";
import { FlexWrapper } from "@ente/shared/components/Container";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import AlbumOutlined from "@mui/icons-material/AlbumOutlined";
@@ -34,7 +35,6 @@ import InfoIcon from "@mui/icons-material/InfoOutlined";
import ReplayIcon from "@mui/icons-material/Replay";
import ZoomInOutlinedIcon from "@mui/icons-material/ZoomInOutlined";
import { Box, Button, styled } from "@mui/material";
-import { FILE_TYPE } from "constants/file";
import {
defaultLivePhotoDefaultOptions,
photoSwipeV4Events,
@@ -43,10 +43,10 @@ import { t } from "i18next";
import isElectron from "is-electron";
import { AppContext } from "pages/_app";
import { GalleryContext } from "pages/gallery";
+import { detectFileTypeInfo } from "services/detect-type";
import downloadManager, { LoadedLivePhotoSourceURL } from "services/download";
+import { getParsedExifData } from "services/exif";
import { trashFiles } from "services/fileService";
-import { getFileType } from "services/typeDetectionService";
-import { getParsedExifData } from "services/upload/exifService";
import { SetFilesDownloadProgressAttributesCreator } from "types/gallery";
import { isClipboardItemPresent } from "utils/common";
import { pauseVideo, playVideo } from "utils/photoFrame";
@@ -348,7 +348,7 @@ function PhotoViewer(props: Iprops) {
}
function updateShowEditButton(file: EnteFile) {
- const extension = getFileExtension(file.metadata.title);
+ const extension = lowercaseExtension(file.metadata.title);
const isSupported =
!isRawFile(extension) || isSupportedRawFormat(extension);
setShowEditButton(
@@ -594,7 +594,7 @@ function PhotoViewer(props: Iprops) {
.image;
fileObject = await getFileFromURL(url, file.metadata.title);
}
- const fileTypeInfo = await getFileType(fileObject);
+ const fileTypeInfo = await detectFileTypeInfo(fileObject);
const exifData = await getParsedExifData(
fileObject,
fileTypeInfo,
@@ -611,9 +611,8 @@ function PhotoViewer(props: Iprops) {
}
} catch (e) {
setExif({ key: file.src, value: null });
- const fileExtension = getFileExtension(file.metadata.title);
log.error(
- `checkExifAvailable failed for extension ${fileExtension}`,
+ `checkExifAvailable failed for file ${file.metadata.title}`,
e,
);
}
diff --git a/web/apps/photos/src/components/PlaceholderThumbnails.tsx b/web/apps/photos/src/components/PlaceholderThumbnails.tsx
index caafbdce6..662e42287 100644
--- a/web/apps/photos/src/components/PlaceholderThumbnails.tsx
+++ b/web/apps/photos/src/components/PlaceholderThumbnails.tsx
@@ -1,8 +1,8 @@
+import { FILE_TYPE } from "@/media/file-type";
import { Overlay } from "@ente/shared/components/Container";
import PhotoOutlined from "@mui/icons-material/PhotoOutlined";
import PlayCircleOutlineOutlined from "@mui/icons-material/PlayCircleOutlineOutlined";
import { styled } from "@mui/material";
-import { FILE_TYPE } from "constants/file";
interface Iprops {
fileType: FILE_TYPE;
diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx
index d7cf151e6..3f737b3e0 100644
--- a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx
+++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx
@@ -1,6 +1,6 @@
+import { FILE_TYPE } from "@/media/file-type";
import CloseIcon from "@mui/icons-material/Close";
import { IconButton } from "@mui/material";
-import { FILE_TYPE } from "constants/file";
import { t } from "i18next";
import memoize from "memoize-one";
import pDebounce from "p-debounce";
diff --git a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx
index 6972cc161..6dc9b851e 100644
--- a/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx
+++ b/web/apps/photos/src/components/Sidebar/AdvancedSettings.tsx
@@ -1,4 +1,3 @@
-import log from "@/next/log";
import ChevronRight from "@mui/icons-material/ChevronRight";
import ScienceIcon from "@mui/icons-material/Science";
import { Box, DialogProps, Stack, Typography } from "@mui/material";
@@ -37,13 +36,10 @@ export default function AdvancedSettings({ open, onClose, onRootClose }) {
}
};
- const toggleCFProxy = async () => {
- try {
- appContext.setIsCFProxyDisabled(!appContext.isCFProxyDisabled);
- } catch (e) {
- log.error("toggleFasterUpload failed", e);
- }
+ const toggleCFProxy = () => {
+ appContext.setIsCFProxyDisabled(!appContext.isCFProxyDisabled);
};
+
const [indexingStatus, setIndexingStatus] = useState({
indexed: 0,
pending: 0,
diff --git a/web/apps/photos/src/components/Sidebar/DebugSection.tsx b/web/apps/photos/src/components/Sidebar/DebugSection.tsx
index 28c65ca8e..e33637403 100644
--- a/web/apps/photos/src/components/Sidebar/DebugSection.tsx
+++ b/web/apps/photos/src/components/Sidebar/DebugSection.tsx
@@ -9,10 +9,6 @@ import { useContext, useEffect, useState } from "react";
import { Trans } from "react-i18next";
import { isInternalUser } from "utils/user";
import { testUpload } from "../../../tests/upload.test";
-import {
- testZipFileReading,
- testZipWithRootFileReadingTest,
-} from "../../../tests/zip-file-reading.test";
export default function DebugSection() {
const appContext = useContext(AppContext);
@@ -62,25 +58,11 @@ export default function DebugSection() {
)}
{isInternalUser() && (
- <>
-
-
-
-
-
- >
+
)}
>
);
diff --git a/web/apps/photos/src/components/Upload/UploadProgress/index.tsx b/web/apps/photos/src/components/Upload/UploadProgress/index.tsx
index 8f16ef2d9..1acffd561 100644
--- a/web/apps/photos/src/components/Upload/UploadProgress/index.tsx
+++ b/web/apps/photos/src/components/Upload/UploadProgress/index.tsx
@@ -1,18 +1,16 @@
-import { useContext, useEffect, useState } from "react";
-import { UploadProgressDialog } from "./dialog";
-import { MinimizedUploadProgress } from "./minimized";
-
-import { t } from "i18next";
-
import { UPLOAD_STAGES } from "constants/upload";
import UploadProgressContext from "contexts/uploadProgress";
+import { t } from "i18next";
import { AppContext } from "pages/_app";
-import {
+import { useContext, useEffect, useState } from "react";
+import type {
InProgressUpload,
SegregatedFinishedUploads,
UploadCounter,
UploadFileNames,
-} from "types/upload/ui";
+} from "services/upload/uploadManager";
+import { UploadProgressDialog } from "./dialog";
+import { MinimizedUploadProgress } from "./minimized";
interface Props {
open: boolean;
diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx
index 752b729a0..d7485398f 100644
--- a/web/apps/photos/src/components/Upload/Uploader.tsx
+++ b/web/apps/photos/src/components/Upload/Uploader.tsx
@@ -1,11 +1,11 @@
-import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
+import { ElectronFile } from "@/next/types/file";
import type { CollectionMapping, Electron } from "@/next/types/ipc";
import { CustomError } from "@ente/shared/error";
import { isPromise } from "@ente/shared/utils";
import DiscFullIcon from "@mui/icons-material/DiscFull";
import UserNameInputDialog from "components/UserNameInputDialog";
-import { PICKED_UPLOAD_TYPE, UPLOAD_STAGES } from "constants/upload";
+import { UPLOAD_STAGES } from "constants/upload";
import { t } from "i18next";
import isElectron from "is-electron";
import { AppContext } from "pages/_app";
@@ -13,13 +13,23 @@ import { GalleryContext } from "pages/gallery";
import { useContext, useEffect, useRef, useState } from "react";
import billingService from "services/billingService";
import { getLatestCollections } from "services/collectionService";
-import { setToUploadCollection } from "services/pending-uploads";
+import { exportMetadataDirectoryName } from "services/export";
import {
getPublicCollectionUID,
getPublicCollectionUploaderName,
savePublicCollectionUploaderName,
} from "services/publicCollectionService";
-import uploadManager from "services/upload/uploadManager";
+import type {
+ FileWithCollection,
+ InProgressUpload,
+ SegregatedFinishedUploads,
+ UploadCounter,
+ UploadFileNames,
+} from "services/upload/uploadManager";
+import uploadManager, {
+ setToUploadCollection,
+} from "services/upload/uploadManager";
+import { fopFileName } from "services/upload/uploadService";
import watcher from "services/watch";
import { NotificationAttributes } from "types/Notification";
import { Collection } from "types/collection";
@@ -31,26 +41,12 @@ import {
SetLoading,
UploadTypeSelectorIntent,
} from "types/gallery";
-import { ElectronFile, FileWithCollection } from "types/upload";
-import {
- InProgressUpload,
- SegregatedFinishedUploads,
- UploadCounter,
- UploadFileNames,
-} from "types/upload/ui";
import { getOrCreateAlbum } from "utils/collection";
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
import {
getDownloadAppMessage,
getRootLevelFileWithFolderNotAllowMessage,
} from "utils/ui";
-import {
- DEFAULT_IMPORT_SUGGESTION,
- filterOutSystemFiles,
- getImportSuggestion,
- groupFilesBasedOnParentFolder,
- type ImportSuggestion,
-} from "utils/upload";
import { SetCollectionNamerAttributes } from "../Collections/CollectionNamer";
import { CollectionMappingChoiceModal } from "./CollectionMappingChoiceModal";
import UploadProgress from "./UploadProgress";
@@ -58,6 +54,12 @@ import UploadTypeSelector from "./UploadTypeSelector";
const FIRST_ALBUM_NAME = "My First Album";
+enum PICKED_UPLOAD_TYPE {
+ FILES = "files",
+ FOLDERS = "folders",
+ ZIPS = "zips",
+}
+
interface Props {
syncWithRemote: (force?: boolean, silent?: boolean) => Promise;
closeCollectionSelector?: () => void;
@@ -112,12 +114,41 @@ export default function Uploader(props: Props) {
const [importSuggestion, setImportSuggestion] = useState(
DEFAULT_IMPORT_SUGGESTION,
);
- const [electronFiles, setElectronFiles] = useState(null);
- const [webFiles, setWebFiles] = useState([]);
- const toUploadFiles = useRef(null);
+ /**
+ * {@link File}s that the user drag-dropped or selected for uploads. This is
+ * the only type of selection that is possible when we're running in the
+ * browser.
+ */
+ const [webFiles, setWebFiles] = useState([]);
+ /**
+ * Paths of file to upload that we've received over the IPC bridge from the
+ * code running in the Node.js layer of our desktop app.
+ */
+ const [desktopFilePaths, setDesktopFilePaths] = useState([]);
+ /**
+ * TODO(MR): When?
+ */
+ const [electronFiles, setElectronFiles] = useState([]);
+
+ /**
+ * Consolidated and cleaned list obtained from {@link webFiles} and
+ * {@link desktopFilePaths}.
+ */
+ const fileOrPathsToUpload = useRef<(File | string)[]>([]);
+
+ /**
+ * If true, then the next upload we'll be processing was initiated by our
+ * desktop app.
+ */
const isPendingDesktopUpload = useRef(false);
+
+ /**
+ * If set, this will be the name of the collection that our desktop app
+ * wishes for us to upload into.
+ */
const pendingDesktopUploadCollectionName = useRef("");
+
// This is set when the user choses a type to upload from the upload type selector dialog
const pickedUploadType = useRef(null);
const zipPaths = useRef(null);
@@ -165,29 +196,11 @@ export default function Uploader(props: Props) {
setUploadProgressView(true);
}
- if (isElectron()) {
- ensureElectron()
- .pendingUploads()
- .then((pending) => {
- if (pending) {
- log.info("Resuming pending desktop upload", pending);
- resumeDesktopUpload(
- pending.type == "files"
- ? PICKED_UPLOAD_TYPE.FILES
- : PICKED_UPLOAD_TYPE.ZIPS,
- pending.files,
- pending.collectionName,
- );
- }
- });
-
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ if (electron) {
const upload = (collectionName: string, filePaths: string[]) => {
isPendingDesktopUpload.current = true;
pendingDesktopUploadCollectionName.current = collectionName;
-
- // TODO (MR):
- // setElectronFiles(filePaths);
+ setDesktopFilePaths(filePaths);
};
const requestSyncWithRemote = () => {
@@ -200,6 +213,19 @@ export default function Uploader(props: Props) {
};
watcher.init(upload, requestSyncWithRemote);
+
+ electron.pendingUploads().then((pending) => {
+ if (pending) {
+ log.info("Resuming pending desktop upload", pending);
+ resumeDesktopUpload(
+ pending.type == "files"
+ ? PICKED_UPLOAD_TYPE.FILES
+ : PICKED_UPLOAD_TYPE.ZIPS,
+ pending.files,
+ pending.collectionName,
+ );
+ }
+ });
}
}, [
publicCollectionGalleryContext.accessedThroughSharedURL,
@@ -284,21 +310,25 @@ export default function Uploader(props: Props) {
useEffect(() => {
if (
- electronFiles?.length > 0 ||
- webFiles?.length > 0 ||
+ desktopFilePaths.length > 0 ||
+ electronFiles.length > 0 ||
+ webFiles.length > 0 ||
appContext.sharedFiles?.length > 0
) {
log.info(
- `upload request type:${
- electronFiles?.length > 0
- ? "electronFiles"
- : webFiles?.length > 0
- ? "webFiles"
- : "sharedFiles"
+ `upload request type: ${
+ desktopFilePaths.length > 0
+ ? "desktopFilePaths"
+ : electronFiles.length > 0
+ ? "electronFiles"
+ : webFiles.length > 0
+ ? "webFiles"
+ : "sharedFiles"
} count ${
- electronFiles?.length ??
- webFiles?.length ??
- appContext?.sharedFiles.length
+ desktopFilePaths.length +
+ electronFiles.length +
+ webFiles.length +
+ (appContext.sharedFiles?.length ?? 0)
}`,
);
if (uploadManager.isUploadRunning()) {
@@ -320,26 +350,39 @@ export default function Uploader(props: Props) {
props.setLoading(true);
if (webFiles?.length > 0) {
// File selection by drag and drop or selection of file.
- toUploadFiles.current = webFiles;
+ fileOrPathsToUpload.current = webFiles;
setWebFiles([]);
} else if (appContext.sharedFiles?.length > 0) {
- toUploadFiles.current = appContext.sharedFiles;
+ fileOrPathsToUpload.current = appContext.sharedFiles;
appContext.resetSharedFiles();
} else if (electronFiles?.length > 0) {
- // File selection from desktop app
- toUploadFiles.current = electronFiles;
+ // File selection from desktop app - deprecated
+ log.warn("Using deprecated code path for ElectronFiles");
+ fileOrPathsToUpload.current = electronFiles.map((f) => f.path);
setElectronFiles([]);
+ } else if (desktopFilePaths && desktopFilePaths.length > 0) {
+ // File selection from our desktop app
+ fileOrPathsToUpload.current = desktopFilePaths;
+ setDesktopFilePaths([]);
}
- toUploadFiles.current = filterOutSystemFiles(toUploadFiles.current);
- if (toUploadFiles.current.length === 0) {
+ log.debug(() => "Uploader received:");
+ log.debug(() => fileOrPathsToUpload.current);
+
+ fileOrPathsToUpload.current = pruneHiddenFiles(
+ fileOrPathsToUpload.current,
+ );
+ if (fileOrPathsToUpload.current.length === 0) {
props.setLoading(false);
return;
}
const importSuggestion = getImportSuggestion(
pickedUploadType.current,
- toUploadFiles.current.map((file) => file["path"]),
+ fileOrPathsToUpload.current.map((file) =>
+ /** TODO(MR): Is path valid for Web files? */
+ typeof file == "string" ? file : file["path"],
+ ),
);
setImportSuggestion(importSuggestion);
@@ -352,7 +395,7 @@ export default function Uploader(props: Props) {
pickedUploadType.current = null;
props.setLoading(false);
}
- }, [webFiles, appContext.sharedFiles, electronFiles]);
+ }, [webFiles, appContext.sharedFiles, electronFiles, desktopFilePaths]);
const resumeDesktopUpload = async (
type: PICKED_UPLOAD_TYPE,
@@ -380,75 +423,73 @@ export default function Uploader(props: Props) {
) => {
try {
log.info(
- `upload file to an existing collection name:${collection.name}, collectionID:${collection.id}`,
+ `Uploading files existing collection id ${collection.id} (${collection.name})`,
);
await preCollectionCreationAction();
- const filesWithCollectionToUpload: FileWithCollection[] =
- toUploadFiles.current.map((file, index) => ({
- file,
+ const filesWithCollectionToUpload = fileOrPathsToUpload.current.map(
+ (fileOrPath, index) => ({
+ fileOrPath,
localID: index,
collectionID: collection.id,
- }));
+ }),
+ );
await waitInQueueAndUploadFiles(
filesWithCollectionToUpload,
[collection],
uploaderName,
);
} catch (e) {
- log.error("Failed to upload files to existing collections", e);
+ log.error("Failed to upload files to existing collection", e);
}
};
const uploadFilesToNewCollections = async (
- strategy: CollectionMapping,
+ mapping: CollectionMapping,
collectionName?: string,
) => {
try {
log.info(
- `upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`,
+ `Uploading files to collection using ${mapping} mapping (${collectionName ?? ""})`,
);
await preCollectionCreationAction();
let filesWithCollectionToUpload: FileWithCollection[] = [];
const collections: Collection[] = [];
- let collectionNameToFilesMap = new Map<
+ let collectionNameToFileOrPaths = new Map<
string,
- (File | ElectronFile)[]
+ (File | string)[]
>();
- if (strategy == "root") {
- collectionNameToFilesMap.set(
+ if (mapping == "root") {
+ collectionNameToFileOrPaths.set(
collectionName,
- toUploadFiles.current,
+ fileOrPathsToUpload.current,
);
} else {
- collectionNameToFilesMap = groupFilesBasedOnParentFolder(
- toUploadFiles.current,
+ collectionNameToFileOrPaths = groupFilesBasedOnParentFolder(
+ fileOrPathsToUpload.current,
);
}
- log.info(
- `upload collections - [${[...collectionNameToFilesMap.keys()]}]`,
- );
try {
- const existingCollection = await getLatestCollections();
+ const existingCollections = await getLatestCollections();
let index = 0;
for (const [
collectionName,
- files,
- ] of collectionNameToFilesMap) {
+ fileOrPaths,
+ ] of collectionNameToFileOrPaths) {
const collection = await getOrCreateAlbum(
collectionName,
- existingCollection,
+ existingCollections,
);
collections.push(collection);
props.setCollections([
- ...existingCollection,
+ ...existingCollections,
...collections,
]);
filesWithCollectionToUpload = [
...filesWithCollectionToUpload,
- ...files.map((file) => ({
+ ...fileOrPaths.map((fileOrPath) => ({
localID: index++,
collectionID: collection.id,
- file,
+ fileOrPath,
})),
];
}
@@ -457,7 +498,6 @@ export default function Uploader(props: Props) {
log.error("Failed to create album", e);
appContext.setDialogMessage({
title: t("ERROR"),
-
close: { variant: "critical" },
content: t("CREATE_ALBUM_FAILED"),
});
@@ -467,7 +507,7 @@ export default function Uploader(props: Props) {
filesWithCollectionToUpload,
collections,
);
- toUploadFiles.current = null;
+ fileOrPathsToUpload.current = null;
} catch (e) {
log.error("Failed to upload files to new collections", e);
}
@@ -509,7 +549,6 @@ export default function Uploader(props: Props) {
uploaderName?: string,
) => {
try {
- log.info("uploadFiles called");
preUploadAction();
if (
electron &&
@@ -517,7 +556,6 @@ export default function Uploader(props: Props) {
!watcher.isUploadRunning()
) {
await setToUploadCollection(collections);
- // TODO (MR): What happens when we have both?
if (zipPaths.current) {
await electron.setPendingUploadFiles(
"zips",
@@ -528,19 +566,20 @@ export default function Uploader(props: Props) {
await electron.setPendingUploadFiles(
"files",
filesWithCollectionToUploadIn.map(
- ({ file }) => (file as ElectronFile).path,
+ // TODO(MR): ElectronFile
+ ({ fileOrPath }) =>
+ typeof fileOrPath == "string"
+ ? fileOrPath
+ : (fileOrPath as any as ElectronFile).path,
),
);
}
- const shouldCloseUploadProgress =
- await uploadManager.queueFilesForUpload(
- filesWithCollectionToUploadIn,
- collections,
- uploaderName,
- );
- if (shouldCloseUploadProgress) {
- closeUploadProgress();
- }
+ const wereFilesProcessed = await uploadManager.uploadFiles(
+ filesWithCollectionToUploadIn,
+ collections,
+ uploaderName,
+ );
+ if (!wereFilesProcessed) closeUploadProgress();
if (isElectron()) {
if (watcher.isUploadRunning()) {
await watcher.allFileUploadsDone(
@@ -553,7 +592,7 @@ export default function Uploader(props: Props) {
}
}
} catch (e) {
- log.error("failed to upload files", e);
+ log.error("Failed to upload files", e);
showUserFacingError(e.message);
closeUploadProgress();
} finally {
@@ -563,18 +602,14 @@ export default function Uploader(props: Props) {
const retryFailed = async () => {
try {
- log.info("user retrying failed upload");
- const filesWithCollections =
+ log.info("Retrying failed uploads");
+ const { files, collections } =
uploadManager.getFailedFilesWithCollections();
const uploaderName = uploadManager.getUploaderName();
await preUploadAction();
- await uploadManager.queueFilesForUpload(
- filesWithCollections.files,
- filesWithCollections.collections,
- uploaderName,
- );
+ await uploadManager.uploadFiles(files, collections, uploaderName);
} catch (e) {
- log.error("retry failed files failed", e);
+ log.error("Retrying failed uploads failed", e);
showUserFacingError(e.message);
closeUploadProgress();
} finally {
@@ -635,9 +670,6 @@ export default function Uploader(props: Props) {
) => {
try {
if (accessedThroughSharedURL) {
- log.info(
- `uploading files to pulbic collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`,
- );
const uploaderName = await getPublicCollectionUploaderName(
getPublicCollectionUID(
publicCollectionGalleryContext.token,
@@ -647,33 +679,30 @@ export default function Uploader(props: Props) {
showUserNameInputDialog();
return;
}
+
if (isPendingDesktopUpload.current) {
isPendingDesktopUpload.current = false;
if (pendingDesktopUploadCollectionName.current) {
- log.info(
- `upload pending files to collection - ${pendingDesktopUploadCollectionName.current}`,
- );
uploadFilesToNewCollections(
"root",
pendingDesktopUploadCollectionName.current,
);
pendingDesktopUploadCollectionName.current = null;
} else {
- log.info(
- `pending upload - strategy - "multiple collections" `,
- );
uploadFilesToNewCollections("parent");
}
return;
}
+
if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) {
- log.info("uploading zip files");
uploadFilesToNewCollections("parent");
return;
}
+
if (isFirstUpload && !importSuggestion.rootFolderName) {
importSuggestion.rootFolderName = FIRST_ALBUM_NAME;
}
+
if (isDragAndDrop.current) {
isDragAndDrop.current = false;
if (
@@ -684,14 +713,15 @@ export default function Uploader(props: Props) {
return;
}
}
+
let showNextModal = () => {};
if (importSuggestion.hasNestedFolders) {
- log.info(`nested folders detected`);
showNextModal = () => setChoiceModalView(true);
} else {
showNextModal = () =>
showCollectionCreateModal(importSuggestion.rootFolderName);
}
+
props.setCollectionSelectorAttributes({
callback: uploadFilesToExistingCollection,
onCancel: handleCollectionSelectorCancel,
@@ -699,7 +729,8 @@ export default function Uploader(props: Props) {
intent: CollectionSelectorIntent.upload,
});
} catch (e) {
- log.error("handleCollectionCreationAndUpload failed", e);
+ // TODO(MR): Why?
+ log.warn("Ignoring error in handleCollectionCreationAndUpload", e);
}
};
@@ -833,7 +864,7 @@ export default function Uploader(props: Props) {
open={userNameInputDialogView}
onClose={handleUserNameInputDialogClose}
onNameSubmit={handlePublicUpload}
- toUploadFilesCount={toUploadFiles.current?.length}
+ toUploadFilesCount={fileOrPathsToUpload.current?.length}
uploaderName={uploaderNameRef.current}
/>
>
@@ -849,3 +880,103 @@ async function waitAndRun(
}
await task();
}
+
+// This is used to prompt the user the make upload strategy choice
+interface ImportSuggestion {
+ rootFolderName: string;
+ hasNestedFolders: boolean;
+ hasRootLevelFileWithFolder: boolean;
+}
+
+const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = {
+ rootFolderName: "",
+ hasNestedFolders: false,
+ hasRootLevelFileWithFolder: false,
+};
+
+function getImportSuggestion(
+ uploadType: PICKED_UPLOAD_TYPE,
+ paths: string[],
+): ImportSuggestion {
+ if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) {
+ return DEFAULT_IMPORT_SUGGESTION;
+ }
+
+ const getCharCount = (str: string) => (str.match(/\//g) ?? []).length;
+ paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2));
+ const firstPath = paths[0];
+ const lastPath = paths[paths.length - 1];
+
+ const L = firstPath.length;
+ let i = 0;
+ const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/"));
+ const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/"));
+
+ while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++;
+ let commonPathPrefix = firstPath.substring(0, i);
+
+ if (commonPathPrefix) {
+ commonPathPrefix = commonPathPrefix.substring(
+ 0,
+ commonPathPrefix.lastIndexOf("/"),
+ );
+ if (commonPathPrefix) {
+ commonPathPrefix = commonPathPrefix.substring(
+ commonPathPrefix.lastIndexOf("/") + 1,
+ );
+ }
+ }
+ return {
+ rootFolderName: commonPathPrefix || null,
+ hasNestedFolders: firstFileFolder !== lastFileFolder,
+ hasRootLevelFileWithFolder: firstFileFolder === "",
+ };
+}
+
+// This function groups files that are that have the same parent folder into collections
+// For Example, for user files have a directory structure like this
+// a
+// / | \
+// b j c
+// /|\ / \
+// e f g h i
+//
+// The files will grouped into 3 collections.
+// [a => [j],
+// b => [e,f,g],
+// c => [h, i]]
+const groupFilesBasedOnParentFolder = (fileOrPaths: (File | string)[]) => {
+ const result = new Map();
+ for (const fileOrPath of fileOrPaths) {
+ const filePath =
+ /* TODO(MR): ElectronFile */
+ typeof fileOrPath == "string"
+ ? fileOrPath
+ : (fileOrPath["path"] as string);
+
+ let folderPath = filePath.substring(0, filePath.lastIndexOf("/"));
+ // If the parent folder of a file is "metadata"
+ // we consider it to be part of the parent folder
+ // For Eg,For FileList -> [a/x.png, a/metadata/x.png.json]
+ // they will both we grouped into the collection "a"
+ // This is cluster the metadata json files in the same collection as the file it is for
+ if (folderPath.endsWith(exportMetadataDirectoryName)) {
+ folderPath = folderPath.substring(0, folderPath.lastIndexOf("/"));
+ }
+ const folderName = folderPath.substring(
+ folderPath.lastIndexOf("/") + 1,
+ );
+ if (!folderName) throw Error("Unexpected empty folder name");
+ if (!result.has(folderName)) result.set(folderName, []);
+ result.get(folderName).push(fileOrPath);
+ }
+ return result;
+};
+
+/**
+ * Filter out hidden files from amongst {@link fileOrPaths}.
+ *
+ * Hidden files are those whose names begin with a "." (dot).
+ */
+const pruneHiddenFiles = (fileOrPaths: (File | string)[]) =>
+ fileOrPaths.filter((f) => !fopFileName(f).startsWith("."));
diff --git a/web/apps/photos/src/components/WatchFolder.tsx b/web/apps/photos/src/components/WatchFolder.tsx
index 738bafde8..710a54168 100644
--- a/web/apps/photos/src/components/WatchFolder.tsx
+++ b/web/apps/photos/src/components/WatchFolder.tsx
@@ -1,5 +1,5 @@
import { ensureElectron } from "@/next/electron";
-import { basename } from "@/next/file";
+import { basename, dirname } from "@/next/file";
import type { CollectionMapping, FolderWatch } from "@/next/types/ipc";
import { ensure } from "@/utils/ensure";
import {
@@ -32,7 +32,6 @@ import { t } from "i18next";
import { AppContext } from "pages/_app";
import React, { useContext, useEffect, useState } from "react";
import watcher from "services/watch";
-import { areAllInSameDirectory } from "utils/upload";
interface WatchFolderProps {
open: boolean;
@@ -324,3 +323,12 @@ const EntryOptions: React.FC = ({ confirmStopWatching }) => {
);
};
+
+/**
+ * Return true if all the paths in the given list are items that belong to the
+ * same (arbitrary) directory.
+ *
+ * Empty list of paths is considered to be in the same directory.
+ */
+const areAllInSameDirectory = (paths: string[]) =>
+ new Set(paths.map(dirname)).size == 1;
diff --git a/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx b/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx
index 8704258f8..8091618a1 100644
--- a/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx
+++ b/web/apps/photos/src/components/pages/gallery/PreviewCard.tsx
@@ -1,3 +1,4 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
import { Overlay } from "@ente/shared/components/Container";
import { CustomError } from "@ente/shared/error";
@@ -11,7 +12,6 @@ import {
StaticThumbnail,
} from "components/PlaceholderThumbnails";
import { TRASH_SECTION } from "constants/collection";
-import { FILE_TYPE } from "constants/file";
import { GAP_BTW_TILES, IMAGE_CONTAINER_MAX_WIDTH } from "constants/gallery";
import { DeduplicateContext } from "pages/deduplicate";
import { GalleryContext } from "pages/gallery";
diff --git a/web/apps/photos/src/constants/ffmpeg.ts b/web/apps/photos/src/constants/ffmpeg.ts
index 9ecc41eb5..fb0d762e5 100644
--- a/web/apps/photos/src/constants/ffmpeg.ts
+++ b/web/apps/photos/src/constants/ffmpeg.ts
@@ -1,3 +1,3 @@
-export const INPUT_PATH_PLACEHOLDER = "INPUT";
-export const FFMPEG_PLACEHOLDER = "FFMPEG";
-export const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
+export const ffmpegPathPlaceholder = "FFMPEG";
+export const inputPathPlaceholder = "INPUT";
+export const outputPathPlaceholder = "OUTPUT";
diff --git a/web/apps/photos/src/constants/file.ts b/web/apps/photos/src/constants/file.ts
deleted file mode 100644
index 46065136c..000000000
--- a/web/apps/photos/src/constants/file.ts
+++ /dev/null
@@ -1,43 +0,0 @@
-export const MIN_EDITED_CREATION_TIME = new Date(1800, 0, 1);
-export const MAX_EDITED_CREATION_TIME = new Date();
-
-export const MAX_EDITED_FILE_NAME_LENGTH = 100;
-export const MAX_CAPTION_SIZE = 5000;
-
-export const TYPE_HEIC = "heic";
-export const TYPE_HEIF = "heif";
-export const TYPE_JPEG = "jpeg";
-export const TYPE_JPG = "jpg";
-
-export enum FILE_TYPE {
- IMAGE,
- VIDEO,
- LIVE_PHOTO,
- OTHERS,
-}
-
-export const RAW_FORMATS = [
- "heic",
- "rw2",
- "tiff",
- "arw",
- "cr3",
- "cr2",
- "raf",
- "nef",
- "psd",
- "dng",
- "tif",
-];
-export const SUPPORTED_RAW_FORMATS = [
- "heic",
- "rw2",
- "tiff",
- "arw",
- "cr3",
- "cr2",
- "nef",
- "psd",
- "dng",
- "tif",
-];
diff --git a/web/apps/photos/src/constants/upload.ts b/web/apps/photos/src/constants/upload.ts
index 1f8858bc3..a0103cb6e 100644
--- a/web/apps/photos/src/constants/upload.ts
+++ b/web/apps/photos/src/constants/upload.ts
@@ -1,61 +1,4 @@
-import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
-import { FILE_TYPE } from "constants/file";
-import { FileTypeInfo, Location, ParsedExtractedMetadata } from "types/upload";
-
-// list of format that were missed by type-detection for some files.
-export const WHITELISTED_FILE_FORMATS: FileTypeInfo[] = [
- { fileType: FILE_TYPE.IMAGE, exactType: "jpeg", mimeType: "image/jpeg" },
- { fileType: FILE_TYPE.IMAGE, exactType: "jpg", mimeType: "image/jpeg" },
- { fileType: FILE_TYPE.VIDEO, exactType: "webm", mimeType: "video/webm" },
- { fileType: FILE_TYPE.VIDEO, exactType: "mod", mimeType: "video/mpeg" },
- { fileType: FILE_TYPE.VIDEO, exactType: "mp4", mimeType: "video/mp4" },
- { fileType: FILE_TYPE.IMAGE, exactType: "gif", mimeType: "image/gif" },
- { fileType: FILE_TYPE.VIDEO, exactType: "dv", mimeType: "video/x-dv" },
- {
- fileType: FILE_TYPE.VIDEO,
- exactType: "wmv",
- mimeType: "video/x-ms-asf",
- },
- {
- fileType: FILE_TYPE.VIDEO,
- exactType: "hevc",
- mimeType: "video/hevc",
- },
- {
- fileType: FILE_TYPE.IMAGE,
- exactType: "raf",
- mimeType: "image/x-fuji-raf",
- },
- {
- fileType: FILE_TYPE.IMAGE,
- exactType: "orf",
- mimeType: "image/x-olympus-orf",
- },
-
- {
- fileType: FILE_TYPE.IMAGE,
- exactType: "crw",
- mimeType: "image/x-canon-crw",
- },
- {
- fileType: FILE_TYPE.VIDEO,
- exactType: "mov",
- mimeType: "video/quicktime",
- },
-];
-
-export const KNOWN_NON_MEDIA_FORMATS = ["xmp", "html", "txt"];
-
-export const EXIFLESS_FORMATS = ["gif", "bmp"];
-
-// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
-export const MULTIPART_PART_SIZE = 20 * 1024 * 1024;
-
-export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE;
-
-export const FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor(
- MULTIPART_PART_SIZE / FILE_READER_CHUNK_SIZE,
-);
+import { Location } from "types/metadata";
export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random();
@@ -70,11 +13,6 @@ export enum UPLOAD_STAGES {
FINISH,
}
-export enum UPLOAD_STRATEGY {
- SINGLE_COLLECTION,
- COLLECTION_PER_FOLDER,
-}
-
export enum UPLOAD_RESULT {
FAILED,
ALREADY_UPLOADED,
@@ -86,51 +24,3 @@ export enum UPLOAD_RESULT {
UPLOADED_WITH_STATIC_THUMBNAIL,
ADDED_SYMLINK,
}
-
-export enum PICKED_UPLOAD_TYPE {
- FILES = "files",
- FOLDERS = "folders",
- ZIPS = "zips",
-}
-
-export const MAX_FILE_SIZE_SUPPORTED = 4 * 1024 * 1024 * 1024; // 4 GB
-
-export const LIVE_PHOTO_ASSET_SIZE_LIMIT = 20 * 1024 * 1024; // 20MB
-
-export const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
- location: NULL_LOCATION,
- creationTime: null,
- width: null,
- height: null,
-};
-
-export const A_SEC_IN_MICROSECONDS = 1e6;
-
-export const BLACK_THUMBNAIL_BASE64 =
- "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" +
- "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" +
- "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" +
- "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" +
- "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" +
- "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" +
- "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" +
- "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" +
- "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" +
- "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" +
- "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
- "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
- "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" +
- "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
- "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
- "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
- "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
- "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
- "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
- "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
- "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
- "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
- "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" +
- "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
- "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
- "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
- "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k=";
diff --git a/web/apps/photos/src/contexts/uploadProgress.tsx b/web/apps/photos/src/contexts/uploadProgress.tsx
index fe5f733b8..b25df7d65 100644
--- a/web/apps/photos/src/contexts/uploadProgress.tsx
+++ b/web/apps/photos/src/contexts/uploadProgress.tsx
@@ -1,11 +1,11 @@
import { UPLOAD_STAGES } from "constants/upload";
import { createContext } from "react";
-import {
+import type {
InProgressUpload,
SegregatedFinishedUploads,
UploadCounter,
UploadFileNames,
-} from "types/upload/ui";
+} from "services/upload/uploadManager";
interface UploadProgressContextType {
open: boolean;
diff --git a/web/apps/photos/src/services/clip-service.ts b/web/apps/photos/src/services/clip-service.ts
index 253d8097e..703c89cf4 100644
--- a/web/apps/photos/src/services/clip-service.ts
+++ b/web/apps/photos/src/services/clip-service.ts
@@ -1,10 +1,10 @@
+import { FILE_TYPE } from "@/media/file-type";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
-import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron";
import PQueue from "p-queue";
import { Embedding } from "types/embedding";
@@ -75,7 +75,6 @@ class CLIPService {
private onFileUploadedHandler:
| ((arg: { enteFile: EnteFile; localFile: globalThis.File }) => void)
| null = null;
- private unsupportedPlatform = false;
constructor() {
this.liveEmbeddingExtractionQueue = new PQueue({
@@ -85,7 +84,7 @@ class CLIPService {
}
isPlatformSupported = () => {
- return isElectron() && !this.unsupportedPlatform;
+ return isElectron();
};
private logoutHandler = async () => {
@@ -99,9 +98,6 @@ class CLIPService {
setupOnFileUploadListener = async () => {
try {
- if (this.unsupportedPlatform) {
- return;
- }
if (this.onFileUploadedHandler) {
log.info("file upload listener already setup");
return;
@@ -188,26 +184,12 @@ class CLIPService {
}
};
- getTextEmbedding = async (text: string): Promise => {
- try {
- return ensureElectron().clipTextEmbedding(text);
- } catch (e) {
- if (e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)) {
- this.unsupportedPlatform = true;
- }
- log.error("Failed to compute CLIP text embedding", e);
- throw e;
- }
+ getTextEmbeddingIfAvailable = async (text: string) => {
+ return ensureElectron().clipTextEmbeddingIfAvailable(text);
};
private runClipEmbeddingExtraction = async (canceller: AbortController) => {
try {
- if (this.unsupportedPlatform) {
- log.info(
- `skipping clip embedding extraction, platform unsupported`,
- );
- return;
- }
const user = getData(LS_KEYS.USER);
if (!user) {
return;
@@ -254,11 +236,6 @@ class CLIPService {
e,
);
}
- if (
- e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)
- ) {
- this.unsupportedPlatform = true;
- }
if (
e?.message === CustomError.REQUEST_CANCELLED ||
e?.message?.includes(CustomError.UNSUPPORTED_PLATFORM)
diff --git a/web/apps/photos/src/services/deduplicationService.ts b/web/apps/photos/src/services/deduplicationService.ts
index 9d8ab399f..1683e554c 100644
--- a/web/apps/photos/src/services/deduplicationService.ts
+++ b/web/apps/photos/src/services/deduplicationService.ts
@@ -1,11 +1,11 @@
+import { hasFileHash } from "@/media/file";
+import { FILE_TYPE } from "@/media/file-type";
+import type { Metadata } from "@/media/types/file";
import log from "@/next/log";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
-import { FILE_TYPE } from "constants/file";
import { EnteFile } from "types/file";
-import { Metadata } from "types/upload";
-import { hasFileHash } from "utils/upload";
const ENDPOINT = getEndpoint();
diff --git a/web/apps/photos/src/services/detect-type.ts b/web/apps/photos/src/services/detect-type.ts
new file mode 100644
index 000000000..e92e10bf8
--- /dev/null
+++ b/web/apps/photos/src/services/detect-type.ts
@@ -0,0 +1,99 @@
+import {
+ FILE_TYPE,
+ KnownFileTypeInfos,
+ KnownNonMediaFileExtensions,
+ type FileTypeInfo,
+} from "@/media/file-type";
+import { lowercaseExtension } from "@/next/file";
+import { CustomError } from "@ente/shared/error";
+import FileType from "file-type";
+
+/**
+ * Read the file's initial contents or use the file's name to detect its type.
+ *
+ * This function first reads an initial chunk of the file and tries to detect
+ * the file's {@link FileTypeInfo} from it. If that doesn't work, it then falls
+ * back to using the file's name to detect it.
+ *
+ * If neither of these two approaches work, it throws an exception.
+ *
+ * If we were able to detect the file type, but it is explicitly not a media
+ * (image or video) format that we support, this function throws an error with
+ * the message `CustomError.UNSUPPORTED_FILE_FORMAT`.
+ *
+ * @param file A {@link File} object
+ *
+ * @returns The detected {@link FileTypeInfo}.
+ */
+export const detectFileTypeInfo = async (file: File): Promise =>
+ detectFileTypeInfoFromChunk(() => readInitialChunkOfFile(file), file.name);
+
+/**
+ * The lower layer implementation of the type detector.
+ *
+ * Usually, when the code already has a {@link File} object at hand, it is
+ * easier to use the higher level {@link detectFileTypeInfo} function.
+ *
+ * However, this lower level function is also exposed for use in cases like
+ * during upload where we might not have a File object and would like to provide
+ * the initial chunk of the file's contents in a different way.
+ *
+ * @param readInitialChunk A function to call to read the initial chunk of the
+ * file's data. There is no strict requirement for the size of the chunk this
+ * function should return, generally the first few KBs should be good.
+ *
+ * @param fileNameOrPath The full path or just the file name of the file whose
+ * type we're trying to determine. This is used by the fallback layer that tries
+ * to detect the type info from the file's extension.
+ */
+export const detectFileTypeInfoFromChunk = async (
+ readInitialChunk: () => Promise,
+ fileNameOrPath: string,
+): Promise => {
+ try {
+ const typeResult = await detectFileTypeFromBuffer(
+ await readInitialChunk(),
+ );
+
+ const mimeType = typeResult.mime;
+
+ let fileType: FILE_TYPE;
+ if (mimeType.startsWith("image/")) {
+ fileType = FILE_TYPE.IMAGE;
+ } else if (mimeType.startsWith("video/")) {
+ fileType = FILE_TYPE.VIDEO;
+ } else {
+ throw new Error(CustomError.UNSUPPORTED_FILE_FORMAT);
+ }
+
+ return {
+ fileType,
+ // See https://github.com/sindresorhus/file-type/blob/main/core.d.ts
+ // for the full list of ext values.
+ extension: typeResult.ext,
+ mimeType,
+ };
+ } catch (e) {
+ const extension = lowercaseExtension(fileNameOrPath);
+ const known = KnownFileTypeInfos.find((f) => f.extension == extension);
+ if (known) return known;
+
+ if (KnownNonMediaFileExtensions.includes(extension))
+ throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
+
+ throw e;
+ }
+};
+
+const readInitialChunkOfFile = async (file: File) => {
+ const chunkSizeForTypeDetection = 4100;
+ const chunk = file.slice(0, chunkSizeForTypeDetection);
+ return new Uint8Array(await chunk.arrayBuffer());
+};
+
+const detectFileTypeFromBuffer = async (buffer: Uint8Array) => {
+ const result = await FileType.fromBuffer(buffer);
+ if (!result)
+ throw Error("Could not deduce file type from the file's contents");
+ return result;
+};
diff --git a/web/apps/photos/src/services/download/index.ts b/web/apps/photos/src/services/download/index.ts
index 41af5c055..70934dac0 100644
--- a/web/apps/photos/src/services/download/index.ts
+++ b/web/apps/photos/src/services/download/index.ts
@@ -1,3 +1,5 @@
+import { FILE_TYPE } from "@/media/file-type";
+import { decodeLivePhoto } from "@/media/live-photo";
import { openCache, type BlobCache } from "@/next/blob-cache";
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
@@ -5,13 +7,12 @@ import ComlinkCryptoWorker from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
+import { isPlaybackPossible } from "@ente/shared/media/video-playback";
import { Remote } from "comlink";
-import { FILE_TYPE } from "constants/file";
+import isElectron from "is-electron";
+import * as ffmpegService from "services/ffmpeg";
import { EnteFile } from "types/file";
-import {
- generateStreamFromArrayBuffer,
- getRenderableFileURL,
-} from "utils/file";
+import { generateStreamFromArrayBuffer, getRenderableImage } from "utils/file";
import { PhotosDownloadClient } from "./clients/photos";
import { PublicAlbumsDownloadClient } from "./clients/publicAlbums";
@@ -149,7 +150,7 @@ class DownloadManagerImpl {
this.ensureInitialized();
const key = file.id.toString();
- const cached = await this.thumbnailCache.get(key);
+ const cached = await this.thumbnailCache?.get(key);
if (cached) return new Uint8Array(await cached.arrayBuffer());
if (localOnly) return null;
@@ -303,7 +304,7 @@ class DownloadManagerImpl {
if (cachedBlob) res = new Response(cachedBlob);
else {
res = await this.downloadClient.downloadFileStream(file);
- this?.fileCache.put(cacheKey, await res.blob());
+ this.fileCache?.put(cacheKey, await res.blob());
}
const reader = res.body.getReader();
@@ -467,3 +468,159 @@ function createDownloadClient(
return new PhotosDownloadClient(token, timeout);
}
}
+
+async function getRenderableFileURL(
+ file: EnteFile,
+ fileBlob: Blob,
+ originalFileURL: string,
+ forceConvert: boolean,
+): Promise {
+ let srcURLs: SourceURLs["url"];
+ switch (file.metadata.fileType) {
+ case FILE_TYPE.IMAGE: {
+ const convertedBlob = await getRenderableImage(
+ file.metadata.title,
+ fileBlob,
+ );
+ const convertedURL = getFileObjectURL(
+ originalFileURL,
+ fileBlob,
+ convertedBlob,
+ );
+ srcURLs = convertedURL;
+ break;
+ }
+ case FILE_TYPE.LIVE_PHOTO: {
+ srcURLs = await getRenderableLivePhotoURL(
+ file,
+ fileBlob,
+ forceConvert,
+ );
+ break;
+ }
+ case FILE_TYPE.VIDEO: {
+ const convertedBlob = await getPlayableVideo(
+ file.metadata.title,
+ fileBlob,
+ forceConvert,
+ );
+ const convertedURL = getFileObjectURL(
+ originalFileURL,
+ fileBlob,
+ convertedBlob,
+ );
+ srcURLs = convertedURL;
+ break;
+ }
+ default: {
+ srcURLs = originalFileURL;
+ break;
+ }
+ }
+
+ let isOriginal: boolean;
+ if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
+ isOriginal = false;
+ } else {
+ isOriginal = (srcURLs as string) === (originalFileURL as string);
+ }
+
+ return {
+ url: srcURLs,
+ isOriginal,
+ isRenderable:
+ file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs,
+ type:
+ file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
+ ? "livePhoto"
+ : "normal",
+ };
+}
+
+const getFileObjectURL = (
+ originalFileURL: string,
+ originalBlob: Blob,
+ convertedBlob: Blob,
+) => {
+ const convertedURL = convertedBlob
+ ? convertedBlob === originalBlob
+ ? originalFileURL
+ : URL.createObjectURL(convertedBlob)
+ : null;
+ return convertedURL;
+};
+
+async function getRenderableLivePhotoURL(
+ file: EnteFile,
+ fileBlob: Blob,
+ forceConvert: boolean,
+): Promise {
+ const livePhoto = await decodeLivePhoto(file.metadata.title, fileBlob);
+
+ const getRenderableLivePhotoImageURL = async () => {
+ try {
+ const imageBlob = new Blob([livePhoto.imageData]);
+ const convertedImageBlob = await getRenderableImage(
+ livePhoto.imageFileName,
+ imageBlob,
+ );
+
+ return URL.createObjectURL(convertedImageBlob);
+ } catch (e) {
+ //ignore and return null
+ return null;
+ }
+ };
+
+ const getRenderableLivePhotoVideoURL = async () => {
+ try {
+ const videoBlob = new Blob([livePhoto.videoData]);
+ const convertedVideoBlob = await getPlayableVideo(
+ livePhoto.videoFileName,
+ videoBlob,
+ forceConvert,
+ true,
+ );
+ return URL.createObjectURL(convertedVideoBlob);
+ } catch (e) {
+ //ignore and return null
+ return null;
+ }
+ };
+
+ return {
+ image: getRenderableLivePhotoImageURL,
+ video: getRenderableLivePhotoVideoURL,
+ };
+}
+
+async function getPlayableVideo(
+ videoNameTitle: string,
+ videoBlob: Blob,
+ forceConvert = false,
+ runOnWeb = false,
+) {
+ try {
+ const isPlayable = await isPlaybackPossible(
+ URL.createObjectURL(videoBlob),
+ );
+ if (isPlayable && !forceConvert) {
+ return videoBlob;
+ } else {
+ if (!forceConvert && !runOnWeb && !isElectron()) {
+ return null;
+ }
+ log.info(
+ `video format not supported, converting it name: ${videoNameTitle}`,
+ );
+ const mp4ConvertedVideo = await ffmpegService.convertToMP4(
+ new File([videoBlob], videoNameTitle),
+ );
+ log.info(`video successfully converted ${videoNameTitle}`);
+ return new Blob([mp4ConvertedVideo]);
+ }
+ } catch (e) {
+ log.error("video conversion failed", e);
+ return null;
+ }
+}
diff --git a/web/apps/photos/src/services/upload/exifService.ts b/web/apps/photos/src/services/exif.ts
similarity index 84%
rename from web/apps/photos/src/services/upload/exifService.ts
rename to web/apps/photos/src/services/exif.ts
index a26075b3a..584d79f88 100644
--- a/web/apps/photos/src/services/upload/exifService.ts
+++ b/web/apps/photos/src/services/exif.ts
@@ -1,12 +1,10 @@
+import { type FileTypeInfo } from "@/media/file-type";
import log from "@/next/log";
-import { CustomError } from "@ente/shared/error";
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
-import { EXIFLESS_FORMATS, NULL_LOCATION } from "constants/upload";
+import { NULL_LOCATION } from "constants/upload";
import exifr from "exifr";
import piexif from "piexifjs";
-import { FileTypeInfo, Location } from "types/upload";
-
-const EXIFR_UNSUPPORTED_FILE_FORMAT_MESSAGE = "Unknown file format";
+import type { Location, ParsedExtractedMetadata } from "types/metadata";
type ParsedEXIFData = Record &
Partial<{
@@ -36,15 +34,59 @@ type RawEXIFData = Record &
ImageHeight: number;
}>;
+const exifTagsNeededForParsingImageMetadata = [
+ "DateTimeOriginal",
+ "CreateDate",
+ "ModifyDate",
+ "GPSLatitude",
+ "GPSLongitude",
+ "GPSLatitudeRef",
+ "GPSLongitudeRef",
+ "DateCreated",
+ "ExifImageWidth",
+ "ExifImageHeight",
+ "ImageWidth",
+ "ImageHeight",
+ "PixelXDimension",
+ "PixelYDimension",
+ "MetadataDate",
+];
+
+/**
+ * Read EXIF data from an image {@link file} and use that to construct and
+ * return an {@link ParsedExtractedMetadata}.
+ *
+ * This function is tailored for use when we upload files.
+ */
+export const parseImageMetadata = async (
+ file: File,
+ fileTypeInfo: FileTypeInfo,
+): Promise => {
+ const exifData = await getParsedExifData(
+ file,
+ fileTypeInfo,
+ exifTagsNeededForParsingImageMetadata,
+ );
+
+ return {
+ location: getEXIFLocation(exifData),
+ creationTime: getEXIFTime(exifData),
+ width: exifData?.imageWidth ?? null,
+ height: exifData?.imageHeight ?? null,
+ };
+};
+
export async function getParsedExifData(
receivedFile: File,
- fileTypeInfo: FileTypeInfo,
+ { extension }: FileTypeInfo,
tags?: string[],
): Promise {
+ const exifLessFormats = ["gif", "bmp"];
+ const exifrUnsupportedFileFormatMessage = "Unknown file format";
+
try {
- if (EXIFLESS_FORMATS.includes(fileTypeInfo.exactType)) {
- return null;
- }
+ if (exifLessFormats.includes(extension)) return null;
+
const exifData: RawEXIFData = await exifr.parse(receivedFile, {
reviveValues: false,
tiff: true,
@@ -66,16 +108,11 @@ export async function getParsedExifData(
: exifData;
return parseExifData(filteredExifData);
} catch (e) {
- if (e.message === EXIFR_UNSUPPORTED_FILE_FORMAT_MESSAGE) {
- log.error(
- `exif library unsupported format ${fileTypeInfo.exactType}`,
- e,
- );
+ if (e.message == exifrUnsupportedFileFormatMessage) {
+ log.error(`EXIFR does not support ${extension} files`, e);
+ return undefined;
} else {
- log.error(
- `get parsed exif data failed for file type ${fileTypeInfo.exactType}`,
- e,
- );
+ log.error(`Failed to parse EXIF data for a ${extension} file`, e);
throw e;
}
}
@@ -180,7 +217,7 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData {
function parseEXIFDate(dateTimeString: string) {
try {
if (typeof dateTimeString !== "string" || dateTimeString === "") {
- throw Error(CustomError.NOT_A_DATE);
+ throw new Error("Invalid date string");
}
// Check and parse date in the format YYYYMMDD
@@ -211,7 +248,7 @@ function parseEXIFDate(dateTimeString: string) {
typeof day === "undefined" ||
Number.isNaN(day)
) {
- throw Error(CustomError.NOT_A_DATE);
+ throw new Error("Invalid date");
}
let date: Date;
if (
@@ -227,7 +264,7 @@ function parseEXIFDate(dateTimeString: string) {
date = new Date(year, month - 1, day, hour, minute, second);
}
if (Number.isNaN(+date)) {
- throw Error(CustomError.NOT_A_DATE);
+ throw new Error("Invalid date");
}
return date;
} catch (e) {
@@ -249,7 +286,7 @@ export function parseEXIFLocation(
gpsLatitude.length !== 3 ||
gpsLongitude.length !== 3
) {
- throw Error(CustomError.NOT_A_LOCATION);
+ throw new Error("Invalid EXIF location");
}
const latitude = convertDMSToDD(
gpsLatitude[0],
@@ -274,7 +311,7 @@ export function parseEXIFLocation(
})}`,
e,
);
- return NULL_LOCATION;
+ return { ...NULL_LOCATION };
}
}
@@ -291,7 +328,7 @@ function convertDMSToDD(
export function getEXIFLocation(exifData: ParsedEXIFData): Location {
if (!exifData || (!exifData.latitude && exifData.latitude !== 0)) {
- return NULL_LOCATION;
+ return { ...NULL_LOCATION };
}
return { latitude: exifData.latitude, longitude: exifData.longitude };
}
diff --git a/web/apps/photos/src/services/export/index.ts b/web/apps/photos/src/services/export/index.ts
index 882c36f9b..5a732658a 100644
--- a/web/apps/photos/src/services/export/index.ts
+++ b/web/apps/photos/src/services/export/index.ts
@@ -1,4 +1,6 @@
+import { FILE_TYPE } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
+import type { Metadata } from "@/media/types/file";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
@@ -6,12 +8,11 @@ import { Events, eventBus } from "@ente/shared/events";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
import { formatDateTimeShort } from "@ente/shared/time/format";
import { User } from "@ente/shared/user/types";
-import { sleep } from "@ente/shared/utils";
+import { wait } from "@ente/shared/utils";
import QueueProcessor, {
CancellationStatus,
RequestCanceller,
} from "@ente/shared/utils/queueProcessor";
-import { FILE_TYPE } from "constants/file";
import { Collection } from "types/collection";
import {
CollectionExportNames,
@@ -22,7 +23,6 @@ import {
FileExportNames,
} from "types/export";
import { EnteFile } from "types/file";
-import { Metadata } from "types/upload";
import {
constructCollectionNameMap,
getCollectionUserFacingName,
@@ -919,7 +919,7 @@ class ExportService {
e.message === CustomError.EXPORT_RECORD_JSON_PARSING_FAILED &&
retry
) {
- await sleep(1000);
+ await wait(1000);
return await this.getExportRecord(folder, false);
}
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
@@ -994,6 +994,7 @@ class ExportService {
file,
);
await writeStream(
+ electron,
`${collectionExportPath}/${fileExportName}`,
updatedFileStream,
);
@@ -1047,6 +1048,7 @@ class ExportService {
file,
);
await writeStream(
+ electron,
`${collectionExportPath}/${imageExportName}`,
imageStream,
);
@@ -1061,6 +1063,7 @@ class ExportService {
);
try {
await writeStream(
+ electron,
`${collectionExportPath}/${videoExportName}`,
videoStream,
);
diff --git a/web/apps/photos/src/services/export/migration.ts b/web/apps/photos/src/services/export/migration.ts
index 3f471b539..9404ddde5 100644
--- a/web/apps/photos/src/services/export/migration.ts
+++ b/web/apps/photos/src/services/export/migration.ts
@@ -1,10 +1,11 @@
+import { FILE_TYPE } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
import { ensureElectron } from "@/next/electron";
+import { nameAndExtension } from "@/next/file";
import log from "@/next/log";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
-import { sleep } from "@ente/shared/utils";
-import { FILE_TYPE } from "constants/file";
+import { wait } from "@ente/shared/utils";
import { getLocalCollections } from "services/collectionService";
import downloadManager from "services/download";
import { getAllLocalFiles } from "services/fileService";
@@ -25,7 +26,6 @@ import {
getIDBasedSortedFiles,
getPersonalFiles,
mergeMetadata,
- splitFilenameAndExtension,
} from "utils/file";
import {
safeDirectoryName,
@@ -305,7 +305,7 @@ async function getFileExportNamesFromExportedFiles(
);
let success = 0;
for (const file of exportedFiles) {
- await sleep(0);
+ await wait(0);
const collectionPath = exportedCollectionPaths.get(file.collectionID);
log.debug(
() =>
@@ -501,9 +501,7 @@ const getUniqueFileExportNameForMigration = (
.get(collectionPath)
?.has(getFileSavePath(collectionPath, fileExportName))
) {
- const filenameParts = splitFilenameAndExtension(
- sanitizeFilename(filename),
- );
+ const filenameParts = nameAndExtension(sanitizeFilename(filename));
if (filenameParts[1]) {
fileExportName = `${filenameParts[0]}(${count}).${filenameParts[1]}`;
} else {
diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts
new file mode 100644
index 000000000..6fc2404e2
--- /dev/null
+++ b/web/apps/photos/src/services/ffmpeg.ts
@@ -0,0 +1,304 @@
+import { ElectronFile } from "@/next/types/file";
+import type { Electron } from "@/next/types/ipc";
+import { ComlinkWorker } from "@/next/worker/comlink-worker";
+import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
+import { Remote } from "comlink";
+import {
+ ffmpegPathPlaceholder,
+ inputPathPlaceholder,
+ outputPathPlaceholder,
+} from "constants/ffmpeg";
+import { NULL_LOCATION } from "constants/upload";
+import type { ParsedExtractedMetadata } from "types/metadata";
+import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker";
+
+/**
+ * Generate a thumbnail for the given video using a wasm FFmpeg running in a web
+ * worker.
+ *
+ * This function is called during upload, when we need to generate thumbnails
+ * for the new files that the user is adding.
+ *
+ * @param blob The input video blob.
+ *
+ * @returns JPEG data of the generated thumbnail.
+ *
+ * See also {@link generateVideoThumbnailNative}.
+ */
+export const generateVideoThumbnailWeb = async (blob: Blob) =>
+ _generateVideoThumbnail((seekTime: number) =>
+ ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg", 0),
+ );
+
+const _generateVideoThumbnail = async (
+ thumbnailAtTime: (seekTime: number) => Promise,
+) => {
+ try {
+ // Try generating thumbnail at seekTime 1 second.
+ return await thumbnailAtTime(1);
+ } catch (e) {
+ // If that fails, try again at the beginning. If even this throws, let
+ // it fail.
+ return await thumbnailAtTime(0);
+ }
+};
+
+/**
+ * Generate a thumbnail for the given video using a native FFmpeg binary bundled
+ * with our desktop app.
+ *
+ * This function is called during upload, when we need to generate thumbnails
+ * for the new files that the user is adding.
+ *
+ * @param dataOrPath The input video's data or the path to the video on the
+ * user's local filesystem. See: [Note: Reading a fileOrPath].
+ *
+ * @returns JPEG data of the generated thumbnail.
+ *
+ * See also {@link generateVideoThumbnailNative}.
+ */
+export const generateVideoThumbnailNative = async (
+ electron: Electron,
+ dataOrPath: Uint8Array | string,
+) =>
+ _generateVideoThumbnail((seekTime: number) =>
+ electron.ffmpegExec(
+ makeGenThumbnailCommand(seekTime),
+ dataOrPath,
+ "jpeg",
+ 0,
+ ),
+ );
+
+const makeGenThumbnailCommand = (seekTime: number) => [
+ ffmpegPathPlaceholder,
+ "-i",
+ inputPathPlaceholder,
+ "-ss",
+ `00:00:0${seekTime}`,
+ "-vframes",
+ "1",
+ "-vf",
+ "scale=-1:720",
+ outputPathPlaceholder,
+];
+
+/**
+ * Extract metadata from the given video
+ *
+ * When we're running in the context of our desktop app _and_ we're passed a
+ * file path , this uses the native FFmpeg bundled with our desktop app.
+ * Otherwise it uses a wasm FFmpeg running in a web worker.
+ *
+ * This function is called during upload, when we need to extract the metadata
+ * of videos that the user is uploading.
+ *
+ * @param fileOrPath A {@link File}, or the absolute path to a file on the
+ * user's local filesytem. A path can only be provided when we're running in the
+ * context of our desktop app.
+ */
+export const extractVideoMetadata = async (
+ fileOrPath: File | string,
+): Promise => {
+ const command = extractVideoMetadataCommand;
+ const outputData =
+ fileOrPath instanceof File
+ ? await ffmpegExecWeb(command, fileOrPath, "txt", 0)
+ : await electron.ffmpegExec(command, fileOrPath, "txt", 0);
+
+ return parseFFmpegExtractedMetadata(outputData);
+};
+
+// Options:
+//
+// - `-c [short for codex] copy`
+// - copy is the [stream_specifier](ffmpeg.org/ffmpeg.html#Stream-specifiers)
+// - copies all the stream without re-encoding
+//
+// - `-map_metadata`
+// - http://ffmpeg.org/ffmpeg.html#Advanced-options (search for map_metadata)
+// - copies all stream metadata to the output
+//
+// - `-f ffmetadata`
+// - https://ffmpeg.org/ffmpeg-formats.html#Metadata-1
+// - dump metadata from media files into a simple INI-like utf-8 text file
+//
+const extractVideoMetadataCommand = [
+ ffmpegPathPlaceholder,
+ "-i",
+ inputPathPlaceholder,
+ "-c",
+ "copy",
+ "-map_metadata",
+ "0",
+ "-f",
+ "ffmetadata",
+ outputPathPlaceholder,
+];
+
+enum MetadataTags {
+ CREATION_TIME = "creation_time",
+ APPLE_CONTENT_IDENTIFIER = "com.apple.quicktime.content.identifier",
+ APPLE_LIVE_PHOTO_IDENTIFIER = "com.apple.quicktime.live-photo.auto",
+ APPLE_CREATION_DATE = "com.apple.quicktime.creationdate",
+ APPLE_LOCATION_ISO = "com.apple.quicktime.location.ISO6709",
+ LOCATION = "location",
+}
+
+function parseFFmpegExtractedMetadata(encodedMetadata: Uint8Array) {
+ const metadataString = new TextDecoder().decode(encodedMetadata);
+ const metadataPropertyArray = metadataString.split("\n");
+ const metadataKeyValueArray = metadataPropertyArray.map((property) =>
+ property.split("="),
+ );
+ const validKeyValuePairs = metadataKeyValueArray.filter(
+ (keyValueArray) => keyValueArray.length === 2,
+ ) as Array<[string, string]>;
+
+ const metadataMap = Object.fromEntries(validKeyValuePairs);
+
+ const location = parseAppleISOLocation(
+ metadataMap[MetadataTags.APPLE_LOCATION_ISO] ??
+ metadataMap[MetadataTags.LOCATION],
+ );
+
+ const creationTime = parseCreationTime(
+ metadataMap[MetadataTags.APPLE_CREATION_DATE] ??
+ metadataMap[MetadataTags.CREATION_TIME],
+ );
+ const parsedMetadata: ParsedExtractedMetadata = {
+ creationTime,
+ location: {
+ latitude: location.latitude,
+ longitude: location.longitude,
+ },
+ width: null,
+ height: null,
+ };
+ return parsedMetadata;
+}
+
+function parseAppleISOLocation(isoLocation: string) {
+ let location = { ...NULL_LOCATION };
+ if (isoLocation) {
+ const [latitude, longitude] = isoLocation
+ .match(/(\+|-)\d+\.*\d+/g)
+ .map((x) => parseFloat(x));
+
+ location = { latitude, longitude };
+ }
+ return location;
+}
+
+function parseCreationTime(creationTime: string) {
+ let dateTime = null;
+ if (creationTime) {
+ dateTime = validateAndGetCreationUnixTimeInMicroSeconds(
+ new Date(creationTime),
+ );
+ }
+ return dateTime;
+}
+
+/** Called when viewing a file */
+export async function convertToMP4(file: File) {
+ return await ffmpegExec2(
+ [
+ ffmpegPathPlaceholder,
+ "-i",
+ inputPathPlaceholder,
+ "-preset",
+ "ultrafast",
+ outputPathPlaceholder,
+ ],
+ file,
+ "mp4",
+ 30 * 1000,
+ );
+}
+
+/**
+ * Run the given FFmpeg command using a wasm FFmpeg running in a web worker.
+ *
+ * As a rough ballpark, currently the native FFmpeg integration in the desktop
+ * app is 10-20x faster than the wasm one. See: [Note: FFmpeg in Electron].
+ */
+const ffmpegExecWeb = async (
+ command: string[],
+ blob: Blob,
+ outputFileExtension: string,
+ timeoutMs: number,
+) => {
+ const worker = await workerFactory.lazy();
+ return await worker.exec(command, blob, outputFileExtension, timeoutMs);
+};
+
+/**
+ * Run the given FFmpeg command using a native FFmpeg binary bundled with our
+ * desktop app.
+ *
+ * See also: {@link ffmpegExecWeb}.
+ */
+/*
+TODO(MR): Remove me
+const ffmpegExecNative = async (
+ electron: Electron,
+ command: string[],
+ blob: Blob,
+ timeoutMs: number = 0,
+) => {
+ const electron = globalThis.electron;
+ if (electron) {
+ const data = new Uint8Array(await blob.arrayBuffer());
+ return await electron.ffmpegExec(command, data, timeoutMs);
+ } else {
+ const worker = await workerFactory.lazy();
+ return await worker.exec(command, blob, timeoutMs);
+ }
+};
+*/
+
+const ffmpegExec2 = async (
+ command: string[],
+ inputFile: File | ElectronFile,
+ outputFileExtension: string,
+ timeoutMS: number = 0,
+) => {
+ const electron = globalThis.electron;
+ if (electron || false) {
+ throw new Error("WIP");
+ // return electron.ffmpegExec(
+ // command,
+ // /* TODO(MR): ElectronFile changes */
+ // inputFile as unknown as string,
+ // outputFileName,
+ // timeoutMS,
+ // );
+ } else {
+ /* TODO(MR): ElectronFile changes */
+ return ffmpegExecWeb(
+ command,
+ inputFile as File,
+ outputFileExtension,
+ timeoutMS,
+ );
+ }
+};
+
+/** Lazily create a singleton instance of our worker */
+class WorkerFactory {
+ private instance: Promise>;
+
+ async lazy() {
+ if (!this.instance) this.instance = createComlinkWorker().remote;
+ return this.instance;
+ }
+}
+
+const workerFactory = new WorkerFactory();
+
+const createComlinkWorker = () =>
+ new ComlinkWorker(
+ "ffmpeg-worker",
+ new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)),
+ );
diff --git a/web/apps/photos/src/services/ffmpeg/ffmpegFactory.ts b/web/apps/photos/src/services/ffmpeg/ffmpegFactory.ts
deleted file mode 100644
index 49aee9868..000000000
--- a/web/apps/photos/src/services/ffmpeg/ffmpegFactory.ts
+++ /dev/null
@@ -1,37 +0,0 @@
-import { ElectronFile } from "types/upload";
-import ComlinkFFmpegWorker from "utils/comlink/ComlinkFFmpegWorker";
-
-export interface IFFmpeg {
- run: (
- cmd: string[],
- inputFile: File | ElectronFile,
- outputFilename: string,
- dontTimeout?: boolean,
- ) => Promise;
-}
-
-class FFmpegFactory {
- private client: IFFmpeg;
- async getFFmpegClient() {
- if (!this.client) {
- const electron = globalThis.electron;
- if (electron) {
- this.client = {
- run(cmd, inputFile, outputFilename, dontTimeout) {
- return electron.runFFmpegCmd(
- cmd,
- inputFile,
- outputFilename,
- dontTimeout,
- );
- },
- };
- } else {
- this.client = await ComlinkFFmpegWorker.getInstance();
- }
- }
- return this.client;
- }
-}
-
-export default new FFmpegFactory();
diff --git a/web/apps/photos/src/services/ffmpeg/ffmpegService.ts b/web/apps/photos/src/services/ffmpeg/ffmpegService.ts
deleted file mode 100644
index 0a6a66cb0..000000000
--- a/web/apps/photos/src/services/ffmpeg/ffmpegService.ts
+++ /dev/null
@@ -1,100 +0,0 @@
-import log from "@/next/log";
-import {
- FFMPEG_PLACEHOLDER,
- INPUT_PATH_PLACEHOLDER,
- OUTPUT_PATH_PLACEHOLDER,
-} from "constants/ffmpeg";
-import { ElectronFile } from "types/upload";
-import { parseFFmpegExtractedMetadata } from "utils/ffmpeg";
-import ffmpegFactory from "./ffmpegFactory";
-
-export async function generateVideoThumbnail(
- file: File | ElectronFile,
-): Promise {
- try {
- let seekTime = 1;
- const ffmpegClient = await ffmpegFactory.getFFmpegClient();
- while (seekTime >= 0) {
- try {
- return await ffmpegClient.run(
- [
- FFMPEG_PLACEHOLDER,
- "-i",
- INPUT_PATH_PLACEHOLDER,
- "-ss",
- `00:00:0${seekTime}`,
- "-vframes",
- "1",
- "-vf",
- "scale=-1:720",
- OUTPUT_PATH_PLACEHOLDER,
- ],
- file,
- "thumb.jpeg",
- );
- } catch (e) {
- if (seekTime === 0) {
- throw e;
- }
- }
- seekTime--;
- }
- } catch (e) {
- log.error("ffmpeg generateVideoThumbnail failed", e);
- throw e;
- }
-}
-
-export async function extractVideoMetadata(file: File | ElectronFile) {
- try {
- const ffmpegClient = await ffmpegFactory.getFFmpegClient();
- // https://stackoverflow.com/questions/9464617/retrieving-and-saving-media-metadata-using-ffmpeg
- // -c [short for codex] copy[(stream_specifier)[ffmpeg.org/ffmpeg.html#Stream-specifiers]] => copies all the stream without re-encoding
- // -map_metadata [http://ffmpeg.org/ffmpeg.html#Advanced-options search for map_metadata] => copies all stream metadata to the out
- // -f ffmetadata [https://ffmpeg.org/ffmpeg-formats.html#Metadata-1] => dump metadata from media files into a simple UTF-8-encoded INI-like text file
- const metadata = await ffmpegClient.run(
- [
- FFMPEG_PLACEHOLDER,
- "-i",
- INPUT_PATH_PLACEHOLDER,
- "-c",
- "copy",
- "-map_metadata",
- "0",
- "-f",
- "ffmetadata",
- OUTPUT_PATH_PLACEHOLDER,
- ],
- file,
- `metadata.txt`,
- );
- return parseFFmpegExtractedMetadata(
- new Uint8Array(await metadata.arrayBuffer()),
- );
- } catch (e) {
- log.error("ffmpeg extractVideoMetadata failed", e);
- throw e;
- }
-}
-
-export async function convertToMP4(file: File | ElectronFile) {
- try {
- const ffmpegClient = await ffmpegFactory.getFFmpegClient();
- return await ffmpegClient.run(
- [
- FFMPEG_PLACEHOLDER,
- "-i",
- INPUT_PATH_PLACEHOLDER,
- "-preset",
- "ultrafast",
- OUTPUT_PATH_PLACEHOLDER,
- ],
- file,
- "output.mp4",
- true,
- );
- } catch (e) {
- log.error("ffmpeg convertToMP4 failed", e);
- throw e;
- }
-}
diff --git a/web/apps/photos/src/services/updateCreationTimeWithExif.ts b/web/apps/photos/src/services/fix-exif.ts
similarity index 94%
rename from web/apps/photos/src/services/updateCreationTimeWithExif.ts
rename to web/apps/photos/src/services/fix-exif.ts
index 667ae44f4..f47e4c5ed 100644
--- a/web/apps/photos/src/services/updateCreationTimeWithExif.ts
+++ b/web/apps/photos/src/services/fix-exif.ts
@@ -1,15 +1,15 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
import type { FixOption } from "components/FixCreationTime";
-import { FILE_TYPE } from "constants/file";
-import { getFileType } from "services/typeDetectionService";
+import { detectFileTypeInfo } from "services/detect-type";
import { EnteFile } from "types/file";
import {
changeFileCreationTime,
updateExistingFilePubMetadata,
} from "utils/file";
import downloadManager from "./download";
-import { getParsedExifData } from "./upload/exifService";
+import { getParsedExifData } from "./exif";
const EXIF_TIME_TAGS = [
"DateTimeOriginal",
@@ -53,7 +53,7 @@ export async function updateCreationTimeWithExif(
[fileBlob],
file.metadata.title,
);
- const fileTypeInfo = await getFileType(fileObject);
+ const fileTypeInfo = await detectFileTypeInfo(fileObject);
const exifData = await getParsedExifData(
fileObject,
fileTypeInfo,
diff --git a/web/apps/photos/src/services/heic-convert/service.ts b/web/apps/photos/src/services/heic-convert.ts
similarity index 78%
rename from web/apps/photos/src/services/heic-convert/service.ts
rename to web/apps/photos/src/services/heic-convert.ts
index 0dc650612..478cce218 100644
--- a/web/apps/photos/src/services/heic-convert/service.ts
+++ b/web/apps/photos/src/services/heic-convert.ts
@@ -4,8 +4,18 @@ import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error";
import { retryAsyncFunction } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
-import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker";
-import { DedicatedConvertWorker } from "worker/convert.worker";
+import { type DedicatedHEICConvertWorker } from "worker/heic-convert.worker";
+
+/**
+ * Convert a HEIC image to a JPEG.
+ *
+ * Behind the scenes, it uses a web worker pool to do the conversion using a
+ * WASM HEIC conversion package.
+ *
+ * @param heicBlob The HEIC blob to convert.
+ * @returns The JPEG blob.
+ */
+export const heicToJPEG = (heicBlob: Blob) => converter.convert(heicBlob);
const WORKER_POOL_SIZE = 2;
const WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS = [100, 100];
@@ -14,20 +24,18 @@ const BREATH_TIME_IN_MICROSECONDS = 1000;
class HEICConverter {
private convertProcessor = new QueueProcessor();
- private workerPool: ComlinkWorker[] = [];
- private ready: Promise;
+ private workerPool: ComlinkWorker[] = [];
- constructor() {
- this.ready = this.init();
- }
- private async init() {
+ private initIfNeeded() {
+ if (this.workerPool.length > 0) return;
this.workerPool = [];
- for (let i = 0; i < WORKER_POOL_SIZE; i++) {
- this.workerPool.push(getDedicatedConvertWorker());
- }
+ for (let i = 0; i < WORKER_POOL_SIZE; i++)
+ this.workerPool.push(createComlinkWorker());
}
+
async convert(fileBlob: Blob): Promise {
- await this.ready;
+ this.initIfNeeded();
+
const response = this.convertProcessor.queueUpRequest(() =>
retryAsyncFunction(async () => {
const convertWorker = this.workerPool.shift();
@@ -42,9 +50,7 @@ class HEICConverter {
}, WAIT_TIME_IN_MICROSECONDS);
const startTime = Date.now();
const convertedHEIC =
- await worker.convertHEICToJPEG(
- fileBlob,
- );
+ await worker.heicToJPEG(fileBlob);
log.info(
`originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size,
@@ -90,11 +96,12 @@ class HEICConverter {
} catch (e) {
log.error("heic conversion failed", e);
convertWorker.terminate();
- this.workerPool.push(getDedicatedConvertWorker());
+ this.workerPool.push(createComlinkWorker());
throw e;
}
}, WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS),
);
+
try {
return await response.promise;
} catch (e) {
@@ -107,4 +114,11 @@ class HEICConverter {
}
}
-export default new HEICConverter();
+/** The singleton instance of {@link HEICConverter}. */
+const converter = new HEICConverter();
+
+const createComlinkWorker = () =>
+ new ComlinkWorker(
+ "heic-convert-worker",
+ new Worker(new URL("worker/heic-convert.worker.ts", import.meta.url)),
+ );
diff --git a/web/apps/photos/src/services/heicConversionService.ts b/web/apps/photos/src/services/heicConversionService.ts
deleted file mode 100644
index 189781494..000000000
--- a/web/apps/photos/src/services/heicConversionService.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-import log from "@/next/log";
-import WasmHEICConverterService from "./heic-convert/service";
-
-class HeicConversionService {
- async convert(heicFileData: Blob): Promise {
- try {
- return await WasmHEICConverterService.convert(heicFileData);
- } catch (e) {
- log.error("failed to convert heic file", e);
- throw e;
- }
- }
-}
-export default new HeicConversionService();
diff --git a/web/apps/photos/src/services/locationSearchService.ts b/web/apps/photos/src/services/locationSearchService.ts
index 2aa2b6bac..354c87a71 100644
--- a/web/apps/photos/src/services/locationSearchService.ts
+++ b/web/apps/photos/src/services/locationSearchService.ts
@@ -1,6 +1,6 @@
import log from "@/next/log";
import { LocationTagData } from "types/entity";
-import { Location } from "types/upload";
+import { Location } from "types/metadata";
export interface City {
city: string;
diff --git a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts
index c5df14b22..d1c5e9db5 100644
--- a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts
+++ b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts
@@ -1,8 +1,8 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { eventBus, Events } from "@ente/shared/events";
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
-import { FILE_TYPE } from "constants/file";
import debounce from "debounce";
import PQueue from "p-queue";
import { JobResult } from "types/common/job";
diff --git a/web/apps/photos/src/services/machineLearning/readerService.ts b/web/apps/photos/src/services/machineLearning/readerService.ts
index a18b3c908..62aebdbd1 100644
--- a/web/apps/photos/src/services/machineLearning/readerService.ts
+++ b/web/apps/photos/src/services/machineLearning/readerService.ts
@@ -1,5 +1,5 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
-import { FILE_TYPE } from "constants/file";
import { MLSyncContext, MLSyncFileContext } from "types/machineLearning";
import {
getLocalFileImageBitmap,
diff --git a/web/apps/photos/src/services/pending-uploads.ts b/web/apps/photos/src/services/pending-uploads.ts
deleted file mode 100644
index 3b219f5b0..000000000
--- a/web/apps/photos/src/services/pending-uploads.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-import { ensureElectron } from "@/next/electron";
-import { Collection } from "types/collection";
-import { ElectronFile, FileWithCollection } from "types/upload";
-
-export const setToUploadCollection = async (collections: Collection[]) => {
- let collectionName: string = null;
- /* collection being one suggest one of two things
- 1. Either the user has upload to a single existing collection
- 2. Created a new single collection to upload to
- may have had multiple folder, but chose to upload
- to one album
- hence saving the collection name when upload collection count is 1
- helps the info of user choosing this options
- and on next upload we can directly start uploading to this collection
- */
- if (collections.length === 1) {
- collectionName = collections[0].name;
- }
- await ensureElectron().setPendingUploadCollection(collectionName);
-};
-
-export const updatePendingUploads = async (files: FileWithCollection[]) => {
- const filePaths = [];
- for (const fileWithCollection of files) {
- if (fileWithCollection.isLivePhoto) {
- filePaths.push(
- (fileWithCollection.livePhotoAssets.image as ElectronFile).path,
- (fileWithCollection.livePhotoAssets.video as ElectronFile).path,
- );
- } else {
- filePaths.push((fileWithCollection.file as ElectronFile).path);
- }
- }
- await ensureElectron().setPendingUploadFiles("files", filePaths);
-};
-
-export const cancelRemainingUploads = async () => {
- const electron = ensureElectron();
- await electron.setPendingUploadCollection(undefined);
- await electron.setPendingUploadFiles("zips", []);
- await electron.setPendingUploadFiles("files", []);
-};
diff --git a/web/apps/photos/src/services/readerService.ts b/web/apps/photos/src/services/readerService.ts
deleted file mode 100644
index e410144cf..000000000
--- a/web/apps/photos/src/services/readerService.ts
+++ /dev/null
@@ -1,59 +0,0 @@
-import { convertBytesToHumanReadable } from "@/next/file";
-import log from "@/next/log";
-import { ElectronFile } from "types/upload";
-
-export async function getUint8ArrayView(
- file: Blob | ElectronFile,
-): Promise {
- try {
- return new Uint8Array(await file.arrayBuffer());
- } catch (e) {
- log.error(
- `Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`,
- e,
- );
- throw e;
- }
-}
-
-export function getFileStream(file: File, chunkSize: number) {
- const fileChunkReader = fileChunkReaderMaker(file, chunkSize);
-
- const stream = new ReadableStream({
- async pull(controller: ReadableStreamDefaultController) {
- const chunk = await fileChunkReader.next();
- if (chunk.done) {
- controller.close();
- } else {
- controller.enqueue(chunk.value);
- }
- },
- });
- const chunkCount = Math.ceil(file.size / chunkSize);
- return {
- stream,
- chunkCount,
- };
-}
-
-export async function getElectronFileStream(
- file: ElectronFile,
- chunkSize: number,
-) {
- const chunkCount = Math.ceil(file.size / chunkSize);
- return {
- stream: await file.stream(),
- chunkCount,
- };
-}
-
-async function* fileChunkReaderMaker(file: File, chunkSize: number) {
- let offset = 0;
- while (offset < file.size) {
- const blob = file.slice(offset, chunkSize + offset);
- const fileChunk = await getUint8ArrayView(blob);
- yield fileChunk;
- offset += chunkSize;
- }
- return null;
-}
diff --git a/web/apps/photos/src/services/searchService.ts b/web/apps/photos/src/services/searchService.ts
index 408c3daa5..96c574b9d 100644
--- a/web/apps/photos/src/services/searchService.ts
+++ b/web/apps/photos/src/services/searchService.ts
@@ -1,7 +1,6 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
-import { CustomError } from "@ente/shared/error";
import * as chrono from "chrono-node";
-import { FILE_TYPE } from "constants/file";
import { t } from "i18next";
import { Collection } from "types/collection";
import { EntityType, LocationTag, LocationTagData } from "types/entity";
@@ -287,24 +286,20 @@ async function getLocationSuggestions(searchPhrase: string) {
return [...locationTagSuggestions, ...citySearchSuggestions];
}
-async function getClipSuggestion(searchPhrase: string): Promise {
- try {
- if (!clipService.isPlatformSupported()) {
- return null;
- }
-
- const clipResults = await searchClip(searchPhrase);
- return {
- type: SuggestionType.CLIP,
- value: clipResults,
- label: searchPhrase,
- };
- } catch (e) {
- if (!e.message?.includes(CustomError.MODEL_DOWNLOAD_PENDING)) {
- log.error("getClipSuggestion failed", e);
- }
+async function getClipSuggestion(
+ searchPhrase: string,
+): Promise {
+ if (!clipService.isPlatformSupported()) {
return null;
}
+
+ const clipResults = await searchClip(searchPhrase);
+ if (!clipResults) return undefined;
+ return {
+ type: SuggestionType.CLIP,
+ value: clipResults,
+ label: searchPhrase,
+ };
}
function searchCollection(
@@ -374,9 +369,14 @@ async function searchLocationTag(searchPhrase: string): Promise {
return matchedLocationTags;
}
-async function searchClip(searchPhrase: string): Promise {
+const searchClip = async (
+ searchPhrase: string,
+): Promise => {
+ const textEmbedding =
+ await clipService.getTextEmbeddingIfAvailable(searchPhrase);
+ if (!textEmbedding) return undefined;
+
const imageEmbeddings = await getLocalEmbeddings();
- const textEmbedding = await clipService.getTextEmbedding(searchPhrase);
const clipSearchResult = new Map(
(
await Promise.all(
@@ -394,7 +394,7 @@ async function searchClip(searchPhrase: string): Promise {
);
return clipSearchResult;
-}
+};
function convertSuggestionToSearchQuery(option: Suggestion): Search {
switch (option.type) {
diff --git a/web/apps/photos/src/services/typeDetectionService.ts b/web/apps/photos/src/services/typeDetectionService.ts
deleted file mode 100644
index 5ff8f0169..000000000
--- a/web/apps/photos/src/services/typeDetectionService.ts
+++ /dev/null
@@ -1,96 +0,0 @@
-import log from "@/next/log";
-import { CustomError } from "@ente/shared/error";
-import { FILE_TYPE } from "constants/file";
-import {
- KNOWN_NON_MEDIA_FORMATS,
- WHITELISTED_FILE_FORMATS,
-} from "constants/upload";
-import FileType, { FileTypeResult } from "file-type";
-import { ElectronFile, FileTypeInfo } from "types/upload";
-import { getFileExtension } from "utils/file";
-import { getUint8ArrayView } from "./readerService";
-
-const TYPE_VIDEO = "video";
-const TYPE_IMAGE = "image";
-const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100;
-
-export async function getFileType(
- receivedFile: File | ElectronFile,
-): Promise {
- try {
- let fileType: FILE_TYPE;
- let typeResult: FileTypeResult;
-
- if (receivedFile instanceof File) {
- typeResult = await extractFileType(receivedFile);
- } else {
- typeResult = await extractElectronFileType(receivedFile);
- }
-
- const mimTypeParts: string[] = typeResult.mime?.split("/");
-
- if (mimTypeParts?.length !== 2) {
- throw Error(CustomError.INVALID_MIME_TYPE(typeResult.mime));
- }
- switch (mimTypeParts[0]) {
- case TYPE_IMAGE:
- fileType = FILE_TYPE.IMAGE;
- break;
- case TYPE_VIDEO:
- fileType = FILE_TYPE.VIDEO;
- break;
- default:
- throw Error(CustomError.NON_MEDIA_FILE);
- }
- return {
- fileType,
- exactType: typeResult.ext,
- mimeType: typeResult.mime,
- };
- } catch (e) {
- const fileFormat = getFileExtension(receivedFile.name);
- const whiteListedFormat = WHITELISTED_FILE_FORMATS.find(
- (a) => a.exactType === fileFormat,
- );
- if (whiteListedFormat) {
- return whiteListedFormat;
- }
- if (KNOWN_NON_MEDIA_FORMATS.includes(fileFormat)) {
- throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
- }
- if (e.message === CustomError.NON_MEDIA_FILE) {
- log.error(`unsupported file format ${fileFormat}`, e);
- throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
- }
- log.error(`type detection failed for format ${fileFormat}`, e);
- throw Error(CustomError.TYPE_DETECTION_FAILED(fileFormat));
- }
-}
-
-async function extractFileType(file: File) {
- const fileBlobChunk = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION);
- const fileDataChunk = await getUint8ArrayView(fileBlobChunk);
- return getFileTypeFromBuffer(fileDataChunk);
-}
-
-async function extractElectronFileType(file: ElectronFile) {
- const stream = await file.stream();
- const reader = stream.getReader();
- const { value: fileDataChunk } = await reader.read();
- await reader.cancel();
- return getFileTypeFromBuffer(fileDataChunk);
-}
-
-async function getFileTypeFromBuffer(buffer: Uint8Array) {
- const result = await FileType.fromBuffer(buffer);
- if (!result?.mime) {
- let logableInfo = "";
- try {
- logableInfo = `result: ${JSON.stringify(result)}`;
- } catch (e) {
- logableInfo = "failed to stringify result";
- }
- throw Error(`mimetype missing from file type result - ${logableInfo}`);
- }
- return result;
-}
diff --git a/web/apps/photos/src/services/upload/date.ts b/web/apps/photos/src/services/upload/date.ts
new file mode 100644
index 000000000..89934e37c
--- /dev/null
+++ b/web/apps/photos/src/services/upload/date.ts
@@ -0,0 +1,166 @@
+import log from "@/next/log";
+import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
+
+/**
+ * Try to extract a date (as epoch microseconds) from a file name by matching it
+ * against certain known patterns for media files.
+ *
+ * If it doesn't match a known pattern, or if there is some error during the
+ * parsing, return `undefined`.
+ */
+export const tryParseEpochMicrosecondsFromFileName = (
+ fileName: string,
+): number | undefined => {
+ try {
+ fileName = fileName.trim();
+ let parsedDate: Date;
+ if (fileName.startsWith("IMG-") || fileName.startsWith("VID-")) {
+ // WhatsApp media files
+ // Sample name: IMG-20171218-WA0028.jpg
+ parsedDate = parseDateFromFusedDateString(fileName.split("-")[1]);
+ } else if (fileName.startsWith("Screenshot_")) {
+ // Screenshots on Android
+ // Sample name: Screenshot_20181227-152914.jpg
+ parsedDate = parseDateFromFusedDateString(
+ fileName.replaceAll("Screenshot_", ""),
+ );
+ } else if (fileName.startsWith("signal-")) {
+ // Signal images
+ // Sample name: signal-2018-08-21-100217.jpg
+ const p = fileName.split("-");
+ const dateString = `${p[1]}${p[2]}${p[3]}-${p[4]}`;
+ parsedDate = parseDateFromFusedDateString(dateString);
+ }
+ if (!parsedDate) {
+ parsedDate = tryToParseDateTime(fileName);
+ }
+ return validateAndGetCreationUnixTimeInMicroSeconds(parsedDate);
+ } catch (e) {
+ log.error(`Could not extract date from file name ${fileName}`, e);
+ return undefined;
+ }
+};
+
+interface DateComponent {
+ year: T;
+ month: T;
+ day: T;
+ hour: T;
+ minute: T;
+ second: T;
+}
+
+const currentYear = new Date().getFullYear();
+
+/*
+generates data component for date in format YYYYMMDD-HHMMSS
+ */
+function parseDateFromFusedDateString(dateTime: string) {
+ const dateComponent: DateComponent = convertDateComponentToNumber({
+ year: dateTime.slice(0, 4),
+ month: dateTime.slice(4, 6),
+ day: dateTime.slice(6, 8),
+ hour: dateTime.slice(9, 11),
+ minute: dateTime.slice(11, 13),
+ second: dateTime.slice(13, 15),
+ });
+ return validateAndGetDateFromComponents(dateComponent);
+}
+
+/* sample date format = 2018-08-19 12:34:45
+ the date has six symbol separated number values
+ which we would extract and use to form the date
+ */
+export function tryToParseDateTime(dateTime: string): Date {
+ const dateComponent = getDateComponentsFromSymbolJoinedString(dateTime);
+ if (dateComponent.year?.length === 8 && dateComponent.month?.length === 6) {
+ // the filename has size 8 consecutive and then 6 consecutive digits
+ // high possibility that the it is a date in format YYYYMMDD-HHMMSS
+ const possibleDateTime = dateComponent.year + "-" + dateComponent.month;
+ return parseDateFromFusedDateString(possibleDateTime);
+ }
+ return validateAndGetDateFromComponents(
+ convertDateComponentToNumber(dateComponent),
+ );
+}
+
+function getDateComponentsFromSymbolJoinedString(
+ dateTime: string,
+): DateComponent {
+ const [year, month, day, hour, minute, second] =
+ dateTime.match(/\d+/g) ?? [];
+
+ return { year, month, day, hour, minute, second };
+}
+
+function validateAndGetDateFromComponents(
+ dateComponent: DateComponent,
+ options = { minYear: 1990, maxYear: currentYear + 1 },
+) {
+ let date = getDateFromComponents(dateComponent);
+ if (hasTimeValues(dateComponent) && !isTimePartValid(date, dateComponent)) {
+ // if the date has time values but they are not valid
+ // then we remove the time values and try to validate the date
+ date = getDateFromComponents(removeTimeValues(dateComponent));
+ }
+ if (!isDatePartValid(date, dateComponent)) {
+ return null;
+ }
+ if (
+ date.getFullYear() < options.minYear ||
+ date.getFullYear() > options.maxYear
+ ) {
+ return null;
+ }
+ return date;
+}
+
+function isTimePartValid(date: Date, dateComponent: DateComponent) {
+ return (
+ date.getHours() === dateComponent.hour &&
+ date.getMinutes() === dateComponent.minute &&
+ date.getSeconds() === dateComponent.second
+ );
+}
+
+function isDatePartValid(date: Date, dateComponent: DateComponent) {
+ return (
+ date.getFullYear() === dateComponent.year &&
+ date.getMonth() === dateComponent.month &&
+ date.getDate() === dateComponent.day
+ );
+}
+
+function convertDateComponentToNumber(
+ dateComponent: DateComponent,
+): DateComponent {
+ return {
+ year: Number(dateComponent.year),
+ // https://stackoverflow.com/questions/2552483/why-does-the-month-argument-range-from-0-to-11-in-javascripts-date-constructor
+ month: Number(dateComponent.month) - 1,
+ day: Number(dateComponent.day),
+ hour: Number(dateComponent.hour),
+ minute: Number(dateComponent.minute),
+ second: Number(dateComponent.second),
+ };
+}
+
+function getDateFromComponents(dateComponent: DateComponent) {
+ const { year, month, day, hour, minute, second } = dateComponent;
+ if (hasTimeValues(dateComponent)) {
+ return new Date(year, month, day, hour, minute, second);
+ } else {
+ return new Date(year, month, day);
+ }
+}
+
+function hasTimeValues(dateComponent: DateComponent) {
+ const { hour, minute, second } = dateComponent;
+ return !isNaN(hour) && !isNaN(minute) && !isNaN(second);
+}
+
+function removeTimeValues(
+ dateComponent: DateComponent,
+): DateComponent {
+ return { ...dateComponent, hour: 0, minute: 0, second: 0 };
+}
diff --git a/web/apps/photos/src/services/upload/metadataService.ts b/web/apps/photos/src/services/upload/metadataService.ts
deleted file mode 100644
index 5a8c4e1f5..000000000
--- a/web/apps/photos/src/services/upload/metadataService.ts
+++ /dev/null
@@ -1,654 +0,0 @@
-import { encodeLivePhoto } from "@/media/live-photo";
-import { getFileNameSize } from "@/next/file";
-import log from "@/next/log";
-import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
-import { CustomError } from "@ente/shared/error";
-import {
- parseDateFromFusedDateString,
- tryToParseDateTime,
- validateAndGetCreationUnixTimeInMicroSeconds,
-} from "@ente/shared/time";
-import { Remote } from "comlink";
-import { FILE_TYPE } from "constants/file";
-import {
- FILE_READER_CHUNK_SIZE,
- LIVE_PHOTO_ASSET_SIZE_LIMIT,
- NULL_EXTRACTED_METADATA,
- NULL_LOCATION,
-} from "constants/upload";
-import * as ffmpegService from "services/ffmpeg/ffmpegService";
-import { getElectronFileStream, getFileStream } from "services/readerService";
-import { getFileType } from "services/typeDetectionService";
-import { FilePublicMagicMetadataProps } from "types/file";
-import {
- DataStream,
- ElectronFile,
- ExtractMetadataResult,
- FileTypeInfo,
- FileWithCollection,
- LivePhotoAssets,
- Location,
- Metadata,
- ParsedExtractedMetadata,
- ParsedMetadataJSON,
- ParsedMetadataJSONMap,
-} from "types/upload";
-import { getFileTypeFromExtensionForLivePhotoClustering } from "utils/file/livePhoto";
-import { getUint8ArrayView } from "../readerService";
-import { getEXIFLocation, getEXIFTime, getParsedExifData } from "./exifService";
-import { generateThumbnail } from "./thumbnailService";
-import uploadCancelService from "./uploadCancelService";
-import { extractFileMetadata } from "./uploadService";
-
-const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
- creationTime: null,
- modificationTime: null,
- ...NULL_LOCATION,
-};
-
-const EXIF_TAGS_NEEDED = [
- "DateTimeOriginal",
- "CreateDate",
- "ModifyDate",
- "GPSLatitude",
- "GPSLongitude",
- "GPSLatitudeRef",
- "GPSLongitudeRef",
- "DateCreated",
- "ExifImageWidth",
- "ExifImageHeight",
- "ImageWidth",
- "ImageHeight",
- "PixelXDimension",
- "PixelYDimension",
- "MetadataDate",
-];
-
-export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46;
-
-export async function extractMetadata(
- worker: Remote,
- receivedFile: File | ElectronFile,
- fileTypeInfo: FileTypeInfo,
-): Promise {
- let extractedMetadata: ParsedExtractedMetadata = NULL_EXTRACTED_METADATA;
- if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
- extractedMetadata = await getImageMetadata(receivedFile, fileTypeInfo);
- } else if (fileTypeInfo.fileType === FILE_TYPE.VIDEO) {
- extractedMetadata = await getVideoMetadata(receivedFile);
- }
- const fileHash = await getFileHash(worker, receivedFile);
-
- const metadata: Metadata = {
- title: receivedFile.name,
- creationTime:
- extractedMetadata.creationTime ??
- extractDateFromFileName(receivedFile.name) ??
- receivedFile.lastModified * 1000,
- modificationTime: receivedFile.lastModified * 1000,
- latitude: extractedMetadata.location.latitude,
- longitude: extractedMetadata.location.longitude,
- fileType: fileTypeInfo.fileType,
- hash: fileHash,
- };
- const publicMagicMetadata: FilePublicMagicMetadataProps = {
- w: extractedMetadata.width,
- h: extractedMetadata.height,
- };
- return { metadata, publicMagicMetadata };
-}
-
-export async function getImageMetadata(
- receivedFile: File | ElectronFile,
- fileTypeInfo: FileTypeInfo,
-): Promise {
- let imageMetadata = NULL_EXTRACTED_METADATA;
- try {
- if (!(receivedFile instanceof File)) {
- receivedFile = new File(
- [await receivedFile.blob()],
- receivedFile.name,
- {
- lastModified: receivedFile.lastModified,
- },
- );
- }
- const exifData = await getParsedExifData(
- receivedFile,
- fileTypeInfo,
- EXIF_TAGS_NEEDED,
- );
-
- imageMetadata = {
- location: getEXIFLocation(exifData),
- creationTime: getEXIFTime(exifData),
- width: exifData?.imageWidth ?? null,
- height: exifData?.imageHeight ?? null,
- };
- } catch (e) {
- log.error("getExifData failed", e);
- }
- return imageMetadata;
-}
-
-export const getMetadataJSONMapKeyForJSON = (
- collectionID: number,
- jsonFileName: string,
-) => {
- let title = jsonFileName.slice(0, -1 * ".json".length);
- const endsWithNumberedSuffixWithBrackets = title.match(/\(\d+\)$/);
- if (endsWithNumberedSuffixWithBrackets) {
- title = title.slice(
- 0,
- -1 * endsWithNumberedSuffixWithBrackets[0].length,
- );
- const [name, extension] = splitFilenameAndExtension(title);
- return `${collectionID}-${name}${endsWithNumberedSuffixWithBrackets[0]}.${extension}`;
- }
- return `${collectionID}-${title}`;
-};
-
-// if the file name is greater than MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT(46) , then google photos clips the file name
-// so we need to use the clipped file name to get the metadataJSON file
-export const getClippedMetadataJSONMapKeyForFile = (
- collectionID: number,
- fileName: string,
-) => {
- return `${collectionID}-${fileName.slice(
- 0,
- MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
- )}`;
-};
-
-export const getMetadataJSONMapKeyForFile = (
- collectionID: number,
- fileName: string,
-) => {
- return `${collectionID}-${getFileOriginalName(fileName)}`;
-};
-
-export async function parseMetadataJSON(receivedFile: File | ElectronFile) {
- try {
- if (!(receivedFile instanceof File)) {
- receivedFile = new File(
- [await receivedFile.blob()],
- receivedFile.name,
- );
- }
- const metadataJSON: object = JSON.parse(await receivedFile.text());
-
- const parsedMetadataJSON: ParsedMetadataJSON =
- NULL_PARSED_METADATA_JSON;
- if (!metadataJSON) {
- return;
- }
-
- if (
- metadataJSON["photoTakenTime"] &&
- metadataJSON["photoTakenTime"]["timestamp"]
- ) {
- parsedMetadataJSON.creationTime =
- metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
- } else if (
- metadataJSON["creationTime"] &&
- metadataJSON["creationTime"]["timestamp"]
- ) {
- parsedMetadataJSON.creationTime =
- metadataJSON["creationTime"]["timestamp"] * 1000000;
- }
- if (
- metadataJSON["modificationTime"] &&
- metadataJSON["modificationTime"]["timestamp"]
- ) {
- parsedMetadataJSON.modificationTime =
- metadataJSON["modificationTime"]["timestamp"] * 1000000;
- }
- let locationData: Location = NULL_LOCATION;
- if (
- metadataJSON["geoData"] &&
- (metadataJSON["geoData"]["latitude"] !== 0.0 ||
- metadataJSON["geoData"]["longitude"] !== 0.0)
- ) {
- locationData = metadataJSON["geoData"];
- } else if (
- metadataJSON["geoDataExif"] &&
- (metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
- metadataJSON["geoDataExif"]["longitude"] !== 0.0)
- ) {
- locationData = metadataJSON["geoDataExif"];
- }
- if (locationData !== null) {
- parsedMetadataJSON.latitude = locationData.latitude;
- parsedMetadataJSON.longitude = locationData.longitude;
- }
- return parsedMetadataJSON;
- } catch (e) {
- log.error("parseMetadataJSON failed", e);
- // ignore
- }
-}
-
-// tries to extract date from file name if available else returns null
-export function extractDateFromFileName(filename: string): number {
- try {
- filename = filename.trim();
- let parsedDate: Date;
- if (filename.startsWith("IMG-") || filename.startsWith("VID-")) {
- // Whatsapp media files
- // sample name IMG-20171218-WA0028.jpg
- parsedDate = parseDateFromFusedDateString(filename.split("-")[1]);
- } else if (filename.startsWith("Screenshot_")) {
- // Screenshots on droid
- // sample name Screenshot_20181227-152914.jpg
- parsedDate = parseDateFromFusedDateString(
- filename.replaceAll("Screenshot_", ""),
- );
- } else if (filename.startsWith("signal-")) {
- // signal images
- // sample name :signal-2018-08-21-100217.jpg
- const dateString = convertSignalNameToFusedDateString(filename);
- parsedDate = parseDateFromFusedDateString(dateString);
- }
- if (!parsedDate) {
- parsedDate = tryToParseDateTime(filename);
- }
- return validateAndGetCreationUnixTimeInMicroSeconds(parsedDate);
- } catch (e) {
- log.error("failed to extract date From FileName ", e);
- return null;
- }
-}
-
-function convertSignalNameToFusedDateString(filename: string) {
- const dateStringParts = filename.split("-");
- return `${dateStringParts[1]}${dateStringParts[2]}${dateStringParts[3]}-${dateStringParts[4]}`;
-}
-
-const EDITED_FILE_SUFFIX = "-edited";
-
-/*
- Get the original file name for edited file to associate it to original file's metadataJSON file
- as edited file doesn't have their own metadata file
-*/
-function getFileOriginalName(fileName: string) {
- let originalName: string = null;
- const [nameWithoutExtension, extension] =
- splitFilenameAndExtension(fileName);
-
- const isEditedFile = nameWithoutExtension.endsWith(EDITED_FILE_SUFFIX);
- if (isEditedFile) {
- originalName = nameWithoutExtension.slice(
- 0,
- -1 * EDITED_FILE_SUFFIX.length,
- );
- } else {
- originalName = nameWithoutExtension;
- }
- if (extension) {
- originalName += "." + extension;
- }
- return originalName;
-}
-
-async function getVideoMetadata(file: File | ElectronFile) {
- let videoMetadata = NULL_EXTRACTED_METADATA;
- try {
- log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
- videoMetadata = await ffmpegService.extractVideoMetadata(file);
- log.info(
- `videoMetadata successfully extracted ${getFileNameSize(file)}`,
- );
- } catch (e) {
- log.error("failed to get video metadata", e);
- log.info(
- `videoMetadata extracted failed ${getFileNameSize(file)} ,${
- e.message
- } `,
- );
- }
-
- return videoMetadata;
-}
-
-interface LivePhotoIdentifier {
- collectionID: number;
- fileType: FILE_TYPE;
- name: string;
- size: number;
-}
-
-const UNDERSCORE_THREE = "_3";
-// Note: The icloud-photos-downloader library appends _HVEC to the end of the filename in case of live photos
-// https://github.com/icloud-photos-downloader/icloud_photos_downloader
-const UNDERSCORE_HEVC = "_HVEC";
-
-export async function getLivePhotoFileType(
- livePhotoAssets: LivePhotoAssets,
-): Promise {
- const imageFileTypeInfo = await getFileType(livePhotoAssets.image);
- const videoFileTypeInfo = await getFileType(livePhotoAssets.video);
- return {
- fileType: FILE_TYPE.LIVE_PHOTO,
- exactType: `${imageFileTypeInfo.exactType}+${videoFileTypeInfo.exactType}`,
- imageType: imageFileTypeInfo.exactType,
- videoType: videoFileTypeInfo.exactType,
- };
-}
-
-export async function extractLivePhotoMetadata(
- worker: Remote,
- parsedMetadataJSONMap: ParsedMetadataJSONMap,
- collectionID: number,
- fileTypeInfo: FileTypeInfo,
- livePhotoAssets: LivePhotoAssets,
-): Promise {
- const imageFileTypeInfo: FileTypeInfo = {
- fileType: FILE_TYPE.IMAGE,
- exactType: fileTypeInfo.imageType,
- };
- const {
- metadata: imageMetadata,
- publicMagicMetadata: imagePublicMagicMetadata,
- } = await extractFileMetadata(
- worker,
- parsedMetadataJSONMap,
- collectionID,
- imageFileTypeInfo,
- livePhotoAssets.image,
- );
- const videoHash = await getFileHash(worker, livePhotoAssets.video);
- return {
- metadata: {
- ...imageMetadata,
- title: getLivePhotoName(livePhotoAssets),
- fileType: FILE_TYPE.LIVE_PHOTO,
- imageHash: imageMetadata.hash,
- videoHash: videoHash,
- hash: undefined,
- },
- publicMagicMetadata: imagePublicMagicMetadata,
- };
-}
-
-export function getLivePhotoSize(livePhotoAssets: LivePhotoAssets) {
- return livePhotoAssets.image.size + livePhotoAssets.video.size;
-}
-
-export function getLivePhotoName(livePhotoAssets: LivePhotoAssets) {
- return livePhotoAssets.image.name;
-}
-
-export async function readLivePhoto(
- fileTypeInfo: FileTypeInfo,
- livePhotoAssets: LivePhotoAssets,
-) {
- const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
- livePhotoAssets.image,
- {
- exactType: fileTypeInfo.imageType,
- fileType: FILE_TYPE.IMAGE,
- },
- );
-
- const imageData = await getUint8ArrayView(livePhotoAssets.image);
-
- const videoData = await getUint8ArrayView(livePhotoAssets.video);
-
- return {
- filedata: await encodeLivePhoto({
- imageFileName: livePhotoAssets.image.name,
- imageData,
- videoFileName: livePhotoAssets.video.name,
- videoData,
- }),
- thumbnail,
- hasStaticThumbnail,
- };
-}
-
-export async function clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
- try {
- const analysedMediaFiles: FileWithCollection[] = [];
- mediaFiles
- .sort((firstMediaFile, secondMediaFile) =>
- splitFilenameAndExtension(
- firstMediaFile.file.name,
- )[0].localeCompare(
- splitFilenameAndExtension(secondMediaFile.file.name)[0],
- ),
- )
- .sort(
- (firstMediaFile, secondMediaFile) =>
- firstMediaFile.collectionID - secondMediaFile.collectionID,
- );
- let index = 0;
- while (index < mediaFiles.length - 1) {
- if (uploadCancelService.isUploadCancelationRequested()) {
- throw Error(CustomError.UPLOAD_CANCELLED);
- }
- const firstMediaFile = mediaFiles[index];
- const secondMediaFile = mediaFiles[index + 1];
- const firstFileType =
- getFileTypeFromExtensionForLivePhotoClustering(
- firstMediaFile.file.name,
- );
- const secondFileType =
- getFileTypeFromExtensionForLivePhotoClustering(
- secondMediaFile.file.name,
- );
- const firstFileIdentifier: LivePhotoIdentifier = {
- collectionID: firstMediaFile.collectionID,
- fileType: firstFileType,
- name: firstMediaFile.file.name,
- size: firstMediaFile.file.size,
- };
- const secondFileIdentifier: LivePhotoIdentifier = {
- collectionID: secondMediaFile.collectionID,
- fileType: secondFileType,
- name: secondMediaFile.file.name,
- size: secondMediaFile.file.size,
- };
- if (
- areFilesLivePhotoAssets(
- firstFileIdentifier,
- secondFileIdentifier,
- )
- ) {
- let imageFile: File | ElectronFile;
- let videoFile: File | ElectronFile;
- if (
- firstFileType === FILE_TYPE.IMAGE &&
- secondFileType === FILE_TYPE.VIDEO
- ) {
- imageFile = firstMediaFile.file;
- videoFile = secondMediaFile.file;
- } else {
- videoFile = firstMediaFile.file;
- imageFile = secondMediaFile.file;
- }
- const livePhotoLocalID = firstMediaFile.localID;
- analysedMediaFiles.push({
- localID: livePhotoLocalID,
- collectionID: firstMediaFile.collectionID,
- isLivePhoto: true,
- livePhotoAssets: {
- image: imageFile,
- video: videoFile,
- },
- });
- index += 2;
- } else {
- analysedMediaFiles.push({
- ...firstMediaFile,
- isLivePhoto: false,
- });
- index += 1;
- }
- }
- if (index === mediaFiles.length - 1) {
- analysedMediaFiles.push({
- ...mediaFiles[index],
- isLivePhoto: false,
- });
- }
- return analysedMediaFiles;
- } catch (e) {
- if (e.message === CustomError.UPLOAD_CANCELLED) {
- throw e;
- } else {
- log.error("failed to cluster live photo", e);
- throw e;
- }
- }
-}
-
-function areFilesLivePhotoAssets(
- firstFileIdentifier: LivePhotoIdentifier,
- secondFileIdentifier: LivePhotoIdentifier,
-) {
- const haveSameCollectionID =
- firstFileIdentifier.collectionID === secondFileIdentifier.collectionID;
- const areNotSameFileType =
- firstFileIdentifier.fileType !== secondFileIdentifier.fileType;
-
- let firstFileNameWithoutSuffix: string;
- let secondFileNameWithoutSuffix: string;
- if (firstFileIdentifier.fileType === FILE_TYPE.IMAGE) {
- firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
- getFileNameWithoutExtension(firstFileIdentifier.name),
- // Note: The Google Live Photo image file can have video extension appended as suffix, passing that to removePotentialLivePhotoSuffix to remove it
- // Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file)
- getFileExtensionWithDot(secondFileIdentifier.name),
- );
- secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
- getFileNameWithoutExtension(secondFileIdentifier.name),
- );
- } else {
- firstFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
- getFileNameWithoutExtension(firstFileIdentifier.name),
- );
- secondFileNameWithoutSuffix = removePotentialLivePhotoSuffix(
- getFileNameWithoutExtension(secondFileIdentifier.name),
- getFileExtensionWithDot(firstFileIdentifier.name),
- );
- }
- if (
- haveSameCollectionID &&
- isImageOrVideo(firstFileIdentifier.fileType) &&
- isImageOrVideo(secondFileIdentifier.fileType) &&
- areNotSameFileType &&
- firstFileNameWithoutSuffix === secondFileNameWithoutSuffix
- ) {
- // checks size of live Photo assets are less than allowed limit
- // I did that based on the assumption that live photo assets ideally would not be larger than LIVE_PHOTO_ASSET_SIZE_LIMIT
- // also zipping library doesn't support stream as a input
- if (
- firstFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT &&
- secondFileIdentifier.size <= LIVE_PHOTO_ASSET_SIZE_LIMIT
- ) {
- return true;
- } else {
- log.error(
- `${CustomError.TOO_LARGE_LIVE_PHOTO_ASSETS} - ${JSON.stringify({
- fileSizes: [
- firstFileIdentifier.size,
- secondFileIdentifier.size,
- ],
- })}`,
- );
- }
- }
- return false;
-}
-
-function removePotentialLivePhotoSuffix(
- filenameWithoutExtension: string,
- suffix?: string,
-) {
- let presentSuffix: string;
- if (filenameWithoutExtension.endsWith(UNDERSCORE_THREE)) {
- presentSuffix = UNDERSCORE_THREE;
- } else if (filenameWithoutExtension.endsWith(UNDERSCORE_HEVC)) {
- presentSuffix = UNDERSCORE_HEVC;
- } else if (
- filenameWithoutExtension.endsWith(UNDERSCORE_HEVC.toLowerCase())
- ) {
- presentSuffix = UNDERSCORE_HEVC.toLowerCase();
- } else if (suffix) {
- if (filenameWithoutExtension.endsWith(suffix)) {
- presentSuffix = suffix;
- } else if (filenameWithoutExtension.endsWith(suffix.toLowerCase())) {
- presentSuffix = suffix.toLowerCase();
- }
- }
- if (presentSuffix) {
- return filenameWithoutExtension.slice(0, presentSuffix.length * -1);
- } else {
- return filenameWithoutExtension;
- }
-}
-
-function getFileNameWithoutExtension(filename: string) {
- const lastDotPosition = filename.lastIndexOf(".");
- if (lastDotPosition === -1) return filename;
- else return filename.slice(0, lastDotPosition);
-}
-
-function getFileExtensionWithDot(filename: string) {
- const lastDotPosition = filename.lastIndexOf(".");
- if (lastDotPosition === -1) return "";
- else return filename.slice(lastDotPosition);
-}
-
-function splitFilenameAndExtension(filename: string): [string, string] {
- const lastDotPosition = filename.lastIndexOf(".");
- if (lastDotPosition === -1) return [filename, null];
- else
- return [
- filename.slice(0, lastDotPosition),
- filename.slice(lastDotPosition + 1),
- ];
-}
-
-const isImageOrVideo = (fileType: FILE_TYPE) =>
- [FILE_TYPE.IMAGE, FILE_TYPE.VIDEO].includes(fileType);
-
-async function getFileHash(
- worker: Remote,
- file: File | ElectronFile,
-) {
- try {
- log.info(`getFileHash called for ${getFileNameSize(file)}`);
- let filedata: DataStream;
- if (file instanceof File) {
- filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
- } else {
- filedata = await getElectronFileStream(
- file,
- FILE_READER_CHUNK_SIZE,
- );
- }
- const hashState = await worker.initChunkHashing();
-
- const streamReader = filedata.stream.getReader();
- for (let i = 0; i < filedata.chunkCount; i++) {
- const { done, value: chunk } = await streamReader.read();
- if (done) {
- throw Error(CustomError.CHUNK_LESS_THAN_EXPECTED);
- }
- await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
- }
- const { done } = await streamReader.read();
- if (!done) {
- throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
- }
- const hash = await worker.completeChunkHashing(hashState);
- log.info(
- `file hashing completed successfully ${getFileNameSize(file)}`,
- );
- return hash;
- } catch (e) {
- log.error("getFileHash failed", e);
- log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
- }
-}
diff --git a/web/apps/photos/src/services/upload/multiPartUploadService.ts b/web/apps/photos/src/services/upload/multiPartUploadService.ts
deleted file mode 100644
index 1b4442710..000000000
--- a/web/apps/photos/src/services/upload/multiPartUploadService.ts
+++ /dev/null
@@ -1,132 +0,0 @@
-import { CustomError } from "@ente/shared/error";
-import {
- FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
- RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
-} from "constants/upload";
-import { DataStream, Logger, MultipartUploadURLs } from "types/upload";
-import * as convert from "xml-js";
-import UIService from "./uiService";
-import uploadCancelService from "./uploadCancelService";
-import UploadHttpClient from "./uploadHttpClient";
-import uploadService from "./uploadService";
-
-interface PartEtag {
- PartNumber: number;
- ETag: string;
-}
-
-function calculatePartCount(chunkCount: number) {
- const partCount = Math.ceil(
- chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
- );
- return partCount;
-}
-export async function uploadStreamUsingMultipart(
- logger: Logger,
- fileLocalID: number,
- dataStream: DataStream,
-) {
- const uploadPartCount = calculatePartCount(dataStream.chunkCount);
- logger(`fetching ${uploadPartCount} urls for multipart upload`);
- const multipartUploadURLs =
- await uploadService.fetchMultipartUploadURLs(uploadPartCount);
- logger(`fetched ${uploadPartCount} urls for multipart upload`);
-
- const fileObjectKey = await uploadStreamInParts(
- logger,
- multipartUploadURLs,
- dataStream.stream,
- fileLocalID,
- uploadPartCount,
- );
- return fileObjectKey;
-}
-
-export async function uploadStreamInParts(
- logger: Logger,
- multipartUploadURLs: MultipartUploadURLs,
- dataStream: ReadableStream,
- fileLocalID: number,
- uploadPartCount: number,
-) {
- const streamReader = dataStream.getReader();
- const percentPerPart = getRandomProgressPerPartUpload(uploadPartCount);
- const partEtags: PartEtag[] = [];
- logger(`uploading file in chunks`);
- for (const [
- index,
- fileUploadURL,
- ] of multipartUploadURLs.partURLs.entries()) {
- if (uploadCancelService.isUploadCancelationRequested()) {
- throw Error(CustomError.UPLOAD_CANCELLED);
- }
- const uploadChunk = await combineChunksToFormUploadPart(streamReader);
- const progressTracker = UIService.trackUploadProgress(
- fileLocalID,
- percentPerPart,
- index,
- );
- let eTag = null;
- if (!uploadService.getIsCFUploadProxyDisabled()) {
- eTag = await UploadHttpClient.putFilePartV2(
- fileUploadURL,
- uploadChunk,
- progressTracker,
- );
- } else {
- eTag = await UploadHttpClient.putFilePart(
- fileUploadURL,
- uploadChunk,
- progressTracker,
- );
- }
- partEtags.push({ PartNumber: index + 1, ETag: eTag });
- }
- const { done } = await streamReader.read();
- if (!done) {
- throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
- }
- logger(`uploading file in chunks done`);
- logger(`completing multipart upload`);
- await completeMultipartUpload(partEtags, multipartUploadURLs.completeURL);
- logger(`completing multipart upload done`);
- return multipartUploadURLs.objectKey;
-}
-
-function getRandomProgressPerPartUpload(uploadPartCount: number) {
- const percentPerPart =
- RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount;
- return percentPerPart;
-}
-
-async function combineChunksToFormUploadPart(
- streamReader: ReadableStreamDefaultReader,
-) {
- const combinedChunks = [];
- for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
- const { done, value: chunk } = await streamReader.read();
- if (done) {
- break;
- }
- for (let index = 0; index < chunk.length; index++) {
- combinedChunks.push(chunk[index]);
- }
- }
- return Uint8Array.from(combinedChunks);
-}
-
-async function completeMultipartUpload(
- partEtags: PartEtag[],
- completeURL: string,
-) {
- const options = { compact: true, ignoreComment: true, spaces: 4 };
- const body = convert.js2xml(
- { CompleteMultipartUpload: { Part: partEtags } },
- options,
- );
- if (!uploadService.getIsCFUploadProxyDisabled()) {
- await UploadHttpClient.completeMultipartUploadV2(completeURL, body);
- } else {
- await UploadHttpClient.completeMultipartUpload(completeURL, body);
- }
-}
diff --git a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts
index f7d87c51c..8f18a1638 100644
--- a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts
+++ b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts
@@ -3,8 +3,8 @@ import { CustomError, handleUploadError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import { EnteFile } from "types/file";
-import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload";
-import { retryHTTPCall } from "utils/upload/uploadRetrier";
+import { retryHTTPCall } from "./uploadHttpClient";
+import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService";
const ENDPOINT = getEndpoint();
diff --git a/web/apps/photos/src/services/upload/takeout.ts b/web/apps/photos/src/services/upload/takeout.ts
new file mode 100644
index 000000000..5cd16130e
--- /dev/null
+++ b/web/apps/photos/src/services/upload/takeout.ts
@@ -0,0 +1,166 @@
+/** @file Dealing with the JSON metadata in Google Takeouts */
+
+import { ensureElectron } from "@/next/electron";
+import { nameAndExtension } from "@/next/file";
+import log from "@/next/log";
+import { NULL_LOCATION } from "constants/upload";
+import type { Location } from "types/metadata";
+
+export interface ParsedMetadataJSON {
+ creationTime: number;
+ modificationTime: number;
+ latitude: number;
+ longitude: number;
+}
+
+export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46;
+
+export const getMetadataJSONMapKeyForJSON = (
+ collectionID: number,
+ jsonFileName: string,
+) => {
+ let title = jsonFileName.slice(0, -1 * ".json".length);
+ const endsWithNumberedSuffixWithBrackets = title.match(/\(\d+\)$/);
+ if (endsWithNumberedSuffixWithBrackets) {
+ title = title.slice(
+ 0,
+ -1 * endsWithNumberedSuffixWithBrackets[0].length,
+ );
+ const [name, extension] = nameAndExtension(title);
+ return `${collectionID}-${name}${endsWithNumberedSuffixWithBrackets[0]}.${extension}`;
+ }
+ return `${collectionID}-${title}`;
+};
+
+// if the file name is greater than MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT(46) , then google photos clips the file name
+// so we need to use the clipped file name to get the metadataJSON file
+export const getClippedMetadataJSONMapKeyForFile = (
+ collectionID: number,
+ fileName: string,
+) => {
+ return `${collectionID}-${fileName.slice(
+ 0,
+ MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
+ )}`;
+};
+
+export const getMetadataJSONMapKeyForFile = (
+ collectionID: number,
+ fileName: string,
+) => {
+ return `${collectionID}-${getFileOriginalName(fileName)}`;
+};
+
+const EDITED_FILE_SUFFIX = "-edited";
+
+/*
+ Get the original file name for edited file to associate it to original file's metadataJSON file
+ as edited file doesn't have their own metadata file
+*/
+function getFileOriginalName(fileName: string) {
+ let originalName: string = null;
+ const [name, extension] = nameAndExtension(fileName);
+
+ const isEditedFile = name.endsWith(EDITED_FILE_SUFFIX);
+ if (isEditedFile) {
+ originalName = name.slice(0, -1 * EDITED_FILE_SUFFIX.length);
+ } else {
+ originalName = name;
+ }
+ if (extension) {
+ originalName += "." + extension;
+ }
+ return originalName;
+}
+
+/** Try to parse the contents of a metadata JSON file from a Google Takeout. */
+export const tryParseTakeoutMetadataJSON = async (
+ fileOrPath: File | string,
+): Promise => {
+ try {
+ const text =
+ fileOrPath instanceof File
+ ? await fileOrPath.text()
+ : await ensureElectron().fs.readTextFile(fileOrPath);
+
+ return parseMetadataJSONText(text);
+ } catch (e) {
+ log.error("Failed to parse takeout metadata JSON", e);
+ return undefined;
+ }
+};
+
+const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
+ creationTime: null,
+ modificationTime: null,
+ ...NULL_LOCATION,
+};
+
+const parseMetadataJSONText = (text: string) => {
+ const metadataJSON: object = JSON.parse(text);
+ if (!metadataJSON) {
+ return undefined;
+ }
+
+ const parsedMetadataJSON = { ...NULL_PARSED_METADATA_JSON };
+
+ if (
+ metadataJSON["photoTakenTime"] &&
+ metadataJSON["photoTakenTime"]["timestamp"]
+ ) {
+ parsedMetadataJSON.creationTime =
+ metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
+ } else if (
+ metadataJSON["creationTime"] &&
+ metadataJSON["creationTime"]["timestamp"]
+ ) {
+ parsedMetadataJSON.creationTime =
+ metadataJSON["creationTime"]["timestamp"] * 1000000;
+ }
+ if (
+ metadataJSON["modificationTime"] &&
+ metadataJSON["modificationTime"]["timestamp"]
+ ) {
+ parsedMetadataJSON.modificationTime =
+ metadataJSON["modificationTime"]["timestamp"] * 1000000;
+ }
+ let locationData: Location = { ...NULL_LOCATION };
+ if (
+ metadataJSON["geoData"] &&
+ (metadataJSON["geoData"]["latitude"] !== 0.0 ||
+ metadataJSON["geoData"]["longitude"] !== 0.0)
+ ) {
+ locationData = metadataJSON["geoData"];
+ } else if (
+ metadataJSON["geoDataExif"] &&
+ (metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
+ metadataJSON["geoDataExif"]["longitude"] !== 0.0)
+ ) {
+ locationData = metadataJSON["geoDataExif"];
+ }
+ if (locationData !== null) {
+ parsedMetadataJSON.latitude = locationData.latitude;
+ parsedMetadataJSON.longitude = locationData.longitude;
+ }
+ return parsedMetadataJSON;
+};
+
+/**
+ * Return the matching entry (if any) from {@link parsedMetadataJSONMap} for the
+ * {@link fileName} and {@link collectionID} combination.
+ */
+export const matchTakeoutMetadata = (
+ fileName: string,
+ collectionID: number,
+ parsedMetadataJSONMap: Map,
+) => {
+ let key = getMetadataJSONMapKeyForFile(collectionID, fileName);
+ let takeoutMetadata = parsedMetadataJSONMap.get(key);
+
+ if (!takeoutMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) {
+ key = getClippedMetadataJSONMapKeyForFile(collectionID, fileName);
+ takeoutMetadata = parsedMetadataJSONMap.get(key);
+ }
+
+ return takeoutMetadata;
+};
diff --git a/web/apps/photos/src/services/upload/thumbnail.ts b/web/apps/photos/src/services/upload/thumbnail.ts
new file mode 100644
index 000000000..a44c941f1
--- /dev/null
+++ b/web/apps/photos/src/services/upload/thumbnail.ts
@@ -0,0 +1,237 @@
+import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type";
+import log from "@/next/log";
+import { type Electron } from "@/next/types/ipc";
+import { withTimeout } from "@ente/shared/utils";
+import * as ffmpeg from "services/ffmpeg";
+import { heicToJPEG } from "services/heic-convert";
+
+/** Maximum width or height of the generated thumbnail */
+const maxThumbnailDimension = 720;
+/** Maximum size (in bytes) of the generated thumbnail */
+const maxThumbnailSize = 100 * 1024; // 100 KB
+
+/**
+ * Generate a JPEG thumbnail for the given image or video blob.
+ *
+ * The thumbnail has a smaller file size so that is quick to load. But more
+ * importantly, it uses a universal file format (JPEG in our case) so that the
+ * thumbnail itself can be opened in all clients, even those like the web client
+ * itself that might not yet have support for more exotic formats.
+ *
+ * @param blob The image or video blob whose thumbnail we want to generate.
+ *
+ * @param fileTypeInfo The type information for the file this blob came from.
+ *
+ * @return The JPEG data of the generated thumbnail.
+ */
+export const generateThumbnailWeb = async (
+ blob: Blob,
+ fileTypeInfo: FileTypeInfo,
+): Promise =>
+ fileTypeInfo.fileType === FILE_TYPE.IMAGE
+ ? await generateImageThumbnailUsingCanvas(blob, fileTypeInfo)
+ : await generateVideoThumbnailWeb(blob);
+
+const generateImageThumbnailUsingCanvas = async (
+ blob: Blob,
+ { extension }: FileTypeInfo,
+) => {
+ if (extension == "heic" || extension == "heif") {
+ log.debug(() => `Pre-converting HEIC to JPEG for thumbnail generation`);
+ blob = await heicToJPEG(blob);
+ }
+
+ const canvas = document.createElement("canvas");
+ const canvasCtx = canvas.getContext("2d");
+
+ const imageURL = URL.createObjectURL(blob);
+ await withTimeout(
+ new Promise((resolve, reject) => {
+ const image = new Image();
+ image.setAttribute("src", imageURL);
+ image.onload = () => {
+ try {
+ URL.revokeObjectURL(imageURL);
+ const { width, height } = scaledThumbnailDimensions(
+ image.width,
+ image.height,
+ maxThumbnailDimension,
+ );
+ canvas.width = width;
+ canvas.height = height;
+ canvasCtx.drawImage(image, 0, 0, width, height);
+ resolve(undefined);
+ } catch (e) {
+ reject(e);
+ }
+ };
+ }),
+ 30 * 1000,
+ );
+
+ return await compressedJPEGData(canvas);
+};
+
+const generateVideoThumbnailWeb = async (blob: Blob) => {
+ try {
+ return await ffmpeg.generateVideoThumbnailWeb(blob);
+ } catch (e) {
+ log.error(
+ `Failed to generate video thumbnail using the wasm FFmpeg web worker, will fallback to canvas`,
+ e,
+ );
+ return generateVideoThumbnailUsingCanvas(blob);
+ }
+};
+
+const generateVideoThumbnailUsingCanvas = async (blob: Blob) => {
+ const canvas = document.createElement("canvas");
+ const canvasCtx = canvas.getContext("2d");
+
+ const videoURL = URL.createObjectURL(blob);
+ await withTimeout(
+ new Promise((resolve, reject) => {
+ const video = document.createElement("video");
+ video.preload = "metadata";
+ video.src = videoURL;
+ video.addEventListener("loadeddata", () => {
+ try {
+ URL.revokeObjectURL(videoURL);
+ const { width, height } = scaledThumbnailDimensions(
+ video.videoWidth,
+ video.videoHeight,
+ maxThumbnailDimension,
+ );
+ canvas.width = width;
+ canvas.height = height;
+ canvasCtx.drawImage(video, 0, 0, width, height);
+ resolve(undefined);
+ } catch (e) {
+ reject(e);
+ }
+ });
+ }),
+ 30 * 1000,
+ );
+
+ return await compressedJPEGData(canvas);
+};
+
+/**
+ * Compute the size of the thumbnail to create for an image with the given
+ * {@link width} and {@link height}.
+ *
+ * This function calculates a new size of an image for limiting it to maximum
+ * width and height (both specified by {@link maxDimension}), while maintaining
+ * aspect ratio.
+ *
+ * It returns `{0, 0}` for invalid inputs.
+ */
+const scaledThumbnailDimensions = (
+ width: number,
+ height: number,
+ maxDimension: number,
+): { width: number; height: number } => {
+ if (width === 0 || height === 0) return { width: 0, height: 0 };
+ const widthScaleFactor = maxDimension / width;
+ const heightScaleFactor = maxDimension / height;
+ const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor);
+ const thumbnailDimensions = {
+ width: Math.round(width * scaleFactor),
+ height: Math.round(height * scaleFactor),
+ };
+ if (thumbnailDimensions.width === 0 || thumbnailDimensions.height === 0)
+ return { width: 0, height: 0 };
+ return thumbnailDimensions;
+};
+
+const compressedJPEGData = async (canvas: HTMLCanvasElement) => {
+ let blob: Blob;
+ let prevSize = Number.MAX_SAFE_INTEGER;
+ let quality = 0.7;
+
+ do {
+ if (blob) prevSize = blob.size;
+ blob = await new Promise((resolve) => {
+ canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality);
+ });
+ quality -= 0.1;
+ } while (
+ quality >= 0.5 &&
+ blob.size > maxThumbnailSize &&
+ percentageSizeDiff(blob.size, prevSize) >= 10
+ );
+
+ return new Uint8Array(await blob.arrayBuffer());
+};
+
+const percentageSizeDiff = (
+ newThumbnailSize: number,
+ oldThumbnailSize: number,
+) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
+
+/**
+ * Generate a JPEG thumbnail for the given file or path using native tools.
+ *
+ * This function only works when we're running in the context of our desktop
+ * app, and this dependency is enforced by the need to pass the {@link electron}
+ * object which we use to perform IPC with the Node.js side of our desktop app.
+ *
+ * @param dataOrPath Contents of an image or video file, or the path to the
+ * image or video file on the user's local filesystem, whose thumbnail we want
+ * to generate.
+ *
+ * @param fileTypeInfo The type information for {@link dataOrPath}.
+ *
+ * @return The JPEG data of the generated thumbnail.
+ *
+ * See also {@link generateThumbnailWeb}.
+ */
+export const generateThumbnailNative = async (
+ electron: Electron,
+ dataOrPath: Uint8Array | string,
+ fileTypeInfo: FileTypeInfo,
+): Promise =>
+ fileTypeInfo.fileType === FILE_TYPE.IMAGE
+ ? await electron.generateImageThumbnail(
+ dataOrPath,
+ maxThumbnailDimension,
+ maxThumbnailSize,
+ )
+ : ffmpeg.generateVideoThumbnailNative(electron, dataOrPath);
+
+/**
+ * A fallback, black, thumbnail for use in cases where thumbnail generation
+ * fails.
+ */
+export const fallbackThumbnail = () =>
+ Uint8Array.from(atob(blackThumbnailB64), (c) => c.charCodeAt(0));
+
+const blackThumbnailB64 =
+ "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" +
+ "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" +
+ "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" +
+ "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" +
+ "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" +
+ "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" +
+ "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" +
+ "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" +
+ "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" +
+ "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" +
+ "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+ "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+ "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" +
+ "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+ "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+ "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+ "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+ "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+ "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
+ "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+ "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+ "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+ "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" +
+ "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
+ "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+ "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+ "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k=";
diff --git a/web/apps/photos/src/services/upload/thumbnailService.ts b/web/apps/photos/src/services/upload/thumbnailService.ts
deleted file mode 100644
index 071ef3078..000000000
--- a/web/apps/photos/src/services/upload/thumbnailService.ts
+++ /dev/null
@@ -1,332 +0,0 @@
-import { ensureElectron } from "@/next/electron";
-import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
-import log from "@/next/log";
-import { CustomError } from "@ente/shared/error";
-import { FILE_TYPE } from "constants/file";
-import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
-import isElectron from "is-electron";
-import * as FFmpegService from "services/ffmpeg/ffmpegService";
-import HeicConversionService from "services/heicConversionService";
-import { ElectronFile, FileTypeInfo } from "types/upload";
-import { isFileHEIC } from "utils/file";
-import { getUint8ArrayView } from "../readerService";
-
-const MAX_THUMBNAIL_DIMENSION = 720;
-const MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF = 10;
-const MAX_THUMBNAIL_SIZE = 100 * 1024;
-const MIN_QUALITY = 0.5;
-const MAX_QUALITY = 0.7;
-
-const WAIT_TIME_THUMBNAIL_GENERATION = 30 * 1000;
-
-interface Dimension {
- width: number;
- height: number;
-}
-
-export async function generateThumbnail(
- file: File | ElectronFile,
- fileTypeInfo: FileTypeInfo,
-): Promise<{ thumbnail: Uint8Array; hasStaticThumbnail: boolean }> {
- try {
- log.info(`generating thumbnail for ${getFileNameSize(file)}`);
- let hasStaticThumbnail = false;
- let thumbnail: Uint8Array;
- try {
- if (fileTypeInfo.fileType === FILE_TYPE.IMAGE) {
- thumbnail = await generateImageThumbnail(file, fileTypeInfo);
- } else {
- thumbnail = await generateVideoThumbnail(file, fileTypeInfo);
- }
- if (thumbnail.length > 1.5 * MAX_THUMBNAIL_SIZE) {
- log.error(
- `thumbnail greater than max limit - ${JSON.stringify({
- thumbnailSize: convertBytesToHumanReadable(
- thumbnail.length,
- ),
- fileSize: convertBytesToHumanReadable(file.size),
- fileType: fileTypeInfo.exactType,
- })}`,
- );
- }
- if (thumbnail.length === 0) {
- throw Error("EMPTY THUMBNAIL");
- }
- log.info(
- `thumbnail successfully generated ${getFileNameSize(file)}`,
- );
- } catch (e) {
- log.error(
- `thumbnail generation failed ${getFileNameSize(file)} with format ${fileTypeInfo.exactType}`,
- e,
- );
- thumbnail = Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) =>
- c.charCodeAt(0),
- );
- hasStaticThumbnail = true;
- }
- return { thumbnail, hasStaticThumbnail };
- } catch (e) {
- log.error("Error generating static thumbnail", e);
- throw e;
- }
-}
-
-async function generateImageThumbnail(
- file: File | ElectronFile,
- fileTypeInfo: FileTypeInfo,
-) {
- if (isElectron()) {
- try {
- return await generateImageThumbnailInElectron(
- file,
- MAX_THUMBNAIL_DIMENSION,
- MAX_THUMBNAIL_SIZE,
- );
- } catch (e) {
- return await generateImageThumbnailUsingCanvas(file, fileTypeInfo);
- }
- } else {
- return await generateImageThumbnailUsingCanvas(file, fileTypeInfo);
- }
-}
-
-const generateImageThumbnailInElectron = async (
- inputFile: File | ElectronFile,
- maxDimension: number,
- maxSize: number,
-): Promise => {
- try {
- const startTime = Date.now();
- const thumb = await ensureElectron().generateImageThumbnail(
- inputFile,
- maxDimension,
- maxSize,
- );
- log.info(
- `originalFileSize:${convertBytesToHumanReadable(
- inputFile?.size,
- )},thumbFileSize:${convertBytesToHumanReadable(
- thumb?.length,
- )}, native thumbnail generation time: ${
- Date.now() - startTime
- }ms `,
- );
- return thumb;
- } catch (e) {
- if (
- e.message !==
- CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED
- ) {
- log.error("failed to generate image thumbnail natively", e);
- }
- throw e;
- }
-};
-
-export async function generateImageThumbnailUsingCanvas(
- file: File | ElectronFile,
- fileTypeInfo: FileTypeInfo,
-) {
- const canvas = document.createElement("canvas");
- const canvasCTX = canvas.getContext("2d");
-
- let imageURL = null;
- let timeout = null;
- const isHEIC = isFileHEIC(fileTypeInfo.exactType);
- if (isHEIC) {
- log.info(`HEICConverter called for ${getFileNameSize(file)}`);
- const convertedBlob = await HeicConversionService.convert(
- new Blob([await file.arrayBuffer()]),
- );
- file = new File([convertedBlob], file.name);
- log.info(`${getFileNameSize(file)} successfully converted`);
- }
- let image = new Image();
- imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
- await new Promise((resolve, reject) => {
- image.setAttribute("src", imageURL);
- image.onload = () => {
- try {
- URL.revokeObjectURL(imageURL);
- const imageDimension = {
- width: image.width,
- height: image.height,
- };
- const thumbnailDimension = calculateThumbnailDimension(
- imageDimension,
- MAX_THUMBNAIL_DIMENSION,
- );
- canvas.width = thumbnailDimension.width;
- canvas.height = thumbnailDimension.height;
- canvasCTX.drawImage(
- image,
- 0,
- 0,
- thumbnailDimension.width,
- thumbnailDimension.height,
- );
- image = null;
- clearTimeout(timeout);
- resolve(null);
- } catch (e) {
- const err = new Error(CustomError.THUMBNAIL_GENERATION_FAILED, {
- cause: e,
- });
- reject(err);
- }
- };
- timeout = setTimeout(
- () => reject(new Error("Operation timed out")),
- WAIT_TIME_THUMBNAIL_GENERATION,
- );
- });
- const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
- return await getUint8ArrayView(thumbnailBlob);
-}
-
-async function generateVideoThumbnail(
- file: File | ElectronFile,
- fileTypeInfo: FileTypeInfo,
-) {
- let thumbnail: Uint8Array;
- try {
- log.info(
- `ffmpeg generateThumbnail called for ${getFileNameSize(file)}`,
- );
-
- const thumbnail = await FFmpegService.generateVideoThumbnail(file);
- log.info(
- `ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`,
- );
- return await getUint8ArrayView(thumbnail);
- } catch (e) {
- log.info(
- `ffmpeg thumbnail generated failed ${getFileNameSize(
- file,
- )} error: ${e.message}`,
- );
- log.error(
- `failed to generate thumbnail using ffmpeg for format ${fileTypeInfo.exactType}`,
- e,
- );
- thumbnail = await generateVideoThumbnailUsingCanvas(file);
- }
- return thumbnail;
-}
-
-export async function generateVideoThumbnailUsingCanvas(
- file: File | ElectronFile,
-) {
- const canvas = document.createElement("canvas");
- const canvasCTX = canvas.getContext("2d");
-
- let timeout = null;
- let videoURL = null;
-
- let video = document.createElement("video");
- videoURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
- await new Promise((resolve, reject) => {
- video.preload = "metadata";
- video.src = videoURL;
- video.addEventListener("loadeddata", function () {
- try {
- URL.revokeObjectURL(videoURL);
- if (!video) {
- throw Error("video load failed");
- }
- const videoDimension = {
- width: video.videoWidth,
- height: video.videoHeight,
- };
- const thumbnailDimension = calculateThumbnailDimension(
- videoDimension,
- MAX_THUMBNAIL_DIMENSION,
- );
- canvas.width = thumbnailDimension.width;
- canvas.height = thumbnailDimension.height;
- canvasCTX.drawImage(
- video,
- 0,
- 0,
- thumbnailDimension.width,
- thumbnailDimension.height,
- );
- video = null;
- clearTimeout(timeout);
- resolve(null);
- } catch (e) {
- const err = Error(
- `${CustomError.THUMBNAIL_GENERATION_FAILED} err: ${e}`,
- );
- log.error(CustomError.THUMBNAIL_GENERATION_FAILED, e);
- reject(err);
- }
- });
- timeout = setTimeout(
- () => reject(new Error("Operation timed out")),
- WAIT_TIME_THUMBNAIL_GENERATION,
- );
- });
- const thumbnailBlob = await getCompressedThumbnailBlobFromCanvas(canvas);
- return await getUint8ArrayView(thumbnailBlob);
-}
-
-async function getCompressedThumbnailBlobFromCanvas(canvas: HTMLCanvasElement) {
- let thumbnailBlob: Blob = null;
- let prevSize = Number.MAX_SAFE_INTEGER;
- let quality = MAX_QUALITY;
-
- do {
- if (thumbnailBlob) {
- prevSize = thumbnailBlob.size;
- }
- thumbnailBlob = await new Promise((resolve) => {
- canvas.toBlob(
- function (blob) {
- resolve(blob);
- },
- "image/jpeg",
- quality,
- );
- });
- thumbnailBlob = thumbnailBlob ?? new Blob([]);
- quality -= 0.1;
- } while (
- quality >= MIN_QUALITY &&
- thumbnailBlob.size > MAX_THUMBNAIL_SIZE &&
- percentageSizeDiff(thumbnailBlob.size, prevSize) >=
- MIN_COMPRESSION_PERCENTAGE_SIZE_DIFF
- );
-
- return thumbnailBlob;
-}
-
-function percentageSizeDiff(
- newThumbnailSize: number,
- oldThumbnailSize: number,
-) {
- return ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
-}
-
-// method to calculate new size of image for limiting it to maximum width and height, maintaining aspect ratio
-// returns {0,0} for invalid inputs
-function calculateThumbnailDimension(
- originalDimension: Dimension,
- maxDimension: number,
-): Dimension {
- if (originalDimension.height === 0 || originalDimension.width === 0) {
- return { width: 0, height: 0 };
- }
- const widthScaleFactor = maxDimension / originalDimension.width;
- const heightScaleFactor = maxDimension / originalDimension.height;
- const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor);
- const thumbnailDimension = {
- width: Math.round(originalDimension.width * scaleFactor),
- height: Math.round(originalDimension.height * scaleFactor),
- };
- if (thumbnailDimension.width === 0 || thumbnailDimension.height === 0) {
- return { width: 0, height: 0 };
- }
- return thumbnailDimension;
-}
diff --git a/web/apps/photos/src/services/upload/uiService.ts b/web/apps/photos/src/services/upload/uiService.ts
deleted file mode 100644
index 13dd78001..000000000
--- a/web/apps/photos/src/services/upload/uiService.ts
+++ /dev/null
@@ -1,218 +0,0 @@
-import { CustomError } from "@ente/shared/error";
-import { Canceler } from "axios";
-import {
- RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
- UPLOAD_RESULT,
- UPLOAD_STAGES,
-} from "constants/upload";
-import {
- FinishedUploads,
- InProgressUpload,
- InProgressUploads,
- ProgressUpdater,
- SegregatedFinishedUploads,
-} from "types/upload/ui";
-import uploadCancelService from "./uploadCancelService";
-
-const REQUEST_TIMEOUT_TIME = 30 * 1000; // 30 sec;
-class UIService {
- private progressUpdater: ProgressUpdater;
-
- // UPLOAD LEVEL STATES
- private uploadStage: UPLOAD_STAGES = UPLOAD_STAGES.START;
- private filenames: Map = new Map();
- private hasLivePhoto: boolean = false;
- private uploadProgressView: boolean = false;
-
- // STAGE LEVEL STATES
- private perFileProgress: number;
- private filesUploadedCount: number;
- private totalFilesCount: number;
- private inProgressUploads: InProgressUploads = new Map();
- private finishedUploads: FinishedUploads = new Map();
-
- init(progressUpdater: ProgressUpdater) {
- this.progressUpdater = progressUpdater;
- this.progressUpdater.setUploadStage(this.uploadStage);
- this.progressUpdater.setUploadFilenames(this.filenames);
- this.progressUpdater.setHasLivePhotos(this.hasLivePhoto);
- this.progressUpdater.setUploadProgressView(this.uploadProgressView);
- this.progressUpdater.setUploadCounter({
- finished: this.filesUploadedCount,
- total: this.totalFilesCount,
- });
- this.progressUpdater.setInProgressUploads(
- convertInProgressUploadsToList(this.inProgressUploads),
- );
- this.progressUpdater.setFinishedUploads(
- segregatedFinishedUploadsToList(this.finishedUploads),
- );
- }
-
- reset(count = 0) {
- this.setTotalFileCount(count);
- this.filesUploadedCount = 0;
- this.inProgressUploads = new Map();
- this.finishedUploads = new Map();
- this.updateProgressBarUI();
- }
-
- setTotalFileCount(count: number) {
- this.totalFilesCount = count;
- if (count > 0) {
- this.perFileProgress = 100 / this.totalFilesCount;
- } else {
- this.perFileProgress = 0;
- }
- }
-
- setFileProgress(key: number, progress: number) {
- this.inProgressUploads.set(key, progress);
- this.updateProgressBarUI();
- }
-
- setUploadStage(stage: UPLOAD_STAGES) {
- this.uploadStage = stage;
- this.progressUpdater.setUploadStage(stage);
- }
-
- setFilenames(filenames: Map) {
- this.filenames = filenames;
- this.progressUpdater.setUploadFilenames(filenames);
- }
-
- setHasLivePhoto(hasLivePhoto: boolean) {
- this.hasLivePhoto = hasLivePhoto;
- this.progressUpdater.setHasLivePhotos(hasLivePhoto);
- }
-
- setUploadProgressView(uploadProgressView: boolean) {
- this.uploadProgressView = uploadProgressView;
- this.progressUpdater.setUploadProgressView(uploadProgressView);
- }
-
- increaseFileUploaded() {
- this.filesUploadedCount++;
- this.updateProgressBarUI();
- }
-
- moveFileToResultList(key: number, uploadResult: UPLOAD_RESULT) {
- this.finishedUploads.set(key, uploadResult);
- this.inProgressUploads.delete(key);
- this.updateProgressBarUI();
- }
-
- hasFilesInResultList() {
- const finishedUploadsList = segregatedFinishedUploadsToList(
- this.finishedUploads,
- );
- for (const x of finishedUploadsList.values()) {
- if (x.length > 0) {
- return true;
- }
- }
- return false;
- }
-
- private updateProgressBarUI() {
- const {
- setPercentComplete,
- setUploadCounter,
- setInProgressUploads,
- setFinishedUploads,
- } = this.progressUpdater;
- setUploadCounter({
- finished: this.filesUploadedCount,
- total: this.totalFilesCount,
- });
- let percentComplete =
- this.perFileProgress *
- (this.finishedUploads.size || this.filesUploadedCount);
- if (this.inProgressUploads) {
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- for (const [_, progress] of this.inProgressUploads) {
- // filter negative indicator values during percentComplete calculation
- if (progress < 0) {
- continue;
- }
- percentComplete += (this.perFileProgress * progress) / 100;
- }
- }
-
- setPercentComplete(percentComplete);
- setInProgressUploads(
- convertInProgressUploadsToList(this.inProgressUploads),
- );
- setFinishedUploads(
- segregatedFinishedUploadsToList(this.finishedUploads),
- );
- }
-
- trackUploadProgress(
- fileLocalID: number,
- percentPerPart = RANDOM_PERCENTAGE_PROGRESS_FOR_PUT(),
- index = 0,
- ) {
- const cancel: { exec: Canceler } = { exec: () => {} };
- const cancelTimedOutRequest = () =>
- cancel.exec(CustomError.REQUEST_TIMEOUT);
-
- const cancelCancelledUploadRequest = () =>
- cancel.exec(CustomError.UPLOAD_CANCELLED);
-
- let timeout = null;
- const resetTimeout = () => {
- if (timeout) {
- clearTimeout(timeout);
- }
- timeout = setTimeout(cancelTimedOutRequest, REQUEST_TIMEOUT_TIME);
- };
- return {
- cancel,
- onUploadProgress: (event) => {
- this.inProgressUploads.set(
- fileLocalID,
- Math.min(
- Math.round(
- percentPerPart * index +
- (percentPerPart * event.loaded) / event.total,
- ),
- 98,
- ),
- );
- this.updateProgressBarUI();
- if (event.loaded === event.total) {
- clearTimeout(timeout);
- } else {
- resetTimeout();
- }
- if (uploadCancelService.isUploadCancelationRequested()) {
- cancelCancelledUploadRequest();
- }
- },
- };
- }
-}
-
-export default new UIService();
-
-function convertInProgressUploadsToList(inProgressUploads) {
- return [...inProgressUploads.entries()].map(
- ([localFileID, progress]) =>
- ({
- localFileID,
- progress,
- }) as InProgressUpload,
- );
-}
-
-function segregatedFinishedUploadsToList(finishedUploads: FinishedUploads) {
- const segregatedFinishedUploads = new Map() as SegregatedFinishedUploads;
- for (const [localID, result] of finishedUploads) {
- if (!segregatedFinishedUploads.has(result)) {
- segregatedFinishedUploads.set(result, []);
- }
- segregatedFinishedUploads.get(result).push(localID);
- }
- return segregatedFinishedUploads;
-}
diff --git a/web/apps/photos/src/services/upload/uploadCancelService.ts b/web/apps/photos/src/services/upload/uploadCancelService.ts
deleted file mode 100644
index 790245784..000000000
--- a/web/apps/photos/src/services/upload/uploadCancelService.ts
+++ /dev/null
@@ -1,23 +0,0 @@
-interface UploadCancelStatus {
- value: boolean;
-}
-
-class UploadCancelService {
- private shouldUploadBeCancelled: UploadCancelStatus = {
- value: false,
- };
-
- reset() {
- this.shouldUploadBeCancelled.value = false;
- }
-
- requestUploadCancelation() {
- this.shouldUploadBeCancelled.value = true;
- }
-
- isUploadCancelationRequested(): boolean {
- return this.shouldUploadBeCancelled.value;
- }
-}
-
-export default new UploadCancelService();
diff --git a/web/apps/photos/src/services/upload/uploadHttpClient.ts b/web/apps/photos/src/services/upload/uploadHttpClient.ts
index 7ba35dc0d..e8ae6de97 100644
--- a/web/apps/photos/src/services/upload/uploadHttpClient.ts
+++ b/web/apps/photos/src/services/upload/uploadHttpClient.ts
@@ -3,9 +3,9 @@ import { CustomError, handleUploadError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
+import { wait } from "@ente/shared/utils";
import { EnteFile } from "types/file";
-import { MultipartUploadURLs, UploadFile, UploadURL } from "types/upload";
-import { retryHTTPCall } from "utils/upload/uploadRetrier";
+import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService";
const ENDPOINT = getEndpoint();
const UPLOAD_ENDPOINT = getUploadEndpoint();
@@ -236,3 +236,31 @@ class UploadHttpClient {
}
export default new UploadHttpClient();
+
+const retrySleepTimeInMilliSeconds = [2000, 5000, 10000];
+
+export async function retryHTTPCall(
+ func: () => Promise,
+ checkForBreakingError?: (error) => void,
+): Promise {
+ const retrier = async (
+ func: () => Promise,
+ attemptNumber: number = 0,
+ ) => {
+ try {
+ const resp = await func();
+ return resp;
+ } catch (e) {
+ if (checkForBreakingError) {
+ checkForBreakingError(e);
+ }
+ if (attemptNumber < retrySleepTimeInMilliSeconds.length) {
+ await wait(retrySleepTimeInMilliSeconds[attemptNumber]);
+ return await retrier(func, attemptNumber + 1);
+ } else {
+ throw e;
+ }
+ }
+ };
+ return await retrier(func);
+}
diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts
index a01cd1775..665cd76c8 100644
--- a/web/apps/photos/src/services/upload/uploadManager.ts
+++ b/web/apps/photos/src/services/upload/uploadManager.ts
@@ -1,17 +1,25 @@
-import { getFileNameSize } from "@/next/file";
+import { FILE_TYPE } from "@/media/file-type";
+import { potentialFileTypeFromExtension } from "@/media/live-photo";
+import { ensureElectron } from "@/next/electron";
+import { lowercaseExtension, nameAndExtension } from "@/next/file";
import log from "@/next/log";
+import { ElectronFile } from "@/next/types/file";
+import type { Electron } from "@/next/types/ipc";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
+import { ensure } from "@/utils/ensure";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
+import { wait } from "@ente/shared/utils";
+import { Canceler } from "axios";
import { Remote } from "comlink";
-import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
-import isElectron from "is-electron";
import {
- cancelRemainingUploads,
- updatePendingUploads,
-} from "services/pending-uploads";
+ RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
+ UPLOAD_RESULT,
+ UPLOAD_STAGES,
+} from "constants/upload";
+import isElectron from "is-electron";
import {
getLocalPublicFiles,
getPublicCollectionUID,
@@ -21,43 +29,312 @@ import watcher from "services/watch";
import { Collection } from "types/collection";
import { EncryptedEnteFile, EnteFile } from "types/file";
import { SetFiles } from "types/gallery";
-import {
- FileWithCollection,
- ParsedMetadataJSON,
- ParsedMetadataJSONMap,
- PublicUploadProps,
-} from "types/upload";
-import { ProgressUpdater } from "types/upload/ui";
import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file";
-import {
- areFileWithCollectionsSame,
- segregateMetadataAndMediaFiles,
-} from "utils/upload";
import { getLocalFiles } from "../fileService";
import {
getMetadataJSONMapKeyForJSON,
- parseMetadataJSON,
-} from "./metadataService";
-import { default as UIService, default as uiService } from "./uiService";
-import uploadCancelService from "./uploadCancelService";
-import UploadService, { uploader } from "./uploadService";
+ tryParseTakeoutMetadataJSON,
+ type ParsedMetadataJSON,
+} from "./takeout";
+import UploadService, { fopFileName, fopSize, uploader } from "./uploadService";
-const MAX_CONCURRENT_UPLOADS = 4;
+export type FileID = number;
+
+export type PercentageUploaded = number;
+/* localID => fileName */
+export type UploadFileNames = Map;
+
+export interface UploadCounter {
+ finished: number;
+ total: number;
+}
+
+export interface InProgressUpload {
+ localFileID: FileID;
+ progress: PercentageUploaded;
+}
+
+export interface FinishedUpload {
+ localFileID: FileID;
+ result: UPLOAD_RESULT;
+}
+
+export type InProgressUploads = Map;
+
+export type FinishedUploads = Map;
+
+export type SegregatedFinishedUploads = Map;
+
+export interface ProgressUpdater {
+ setPercentComplete: React.Dispatch>;
+ setUploadCounter: React.Dispatch>;
+ setUploadStage: React.Dispatch>;
+ setInProgressUploads: React.Dispatch<
+ React.SetStateAction
+ >;
+ setFinishedUploads: React.Dispatch<
+ React.SetStateAction
+ >;
+ setUploadFilenames: React.Dispatch>;
+ setHasLivePhotos: React.Dispatch>;
+ setUploadProgressView: React.Dispatch>;
+}
+
+/** The number of uploads to process in parallel. */
+const maxConcurrentUploads = 4;
+
+export interface FileWithCollection {
+ localID: number;
+ collectionID: number;
+ isLivePhoto?: boolean;
+ fileOrPath?: File | string;
+ livePhotoAssets?: LivePhotoAssets;
+}
+
+export interface LivePhotoAssets {
+ image: File | string;
+ video: File | string;
+}
+
+export interface PublicUploadProps {
+ token: string;
+ passwordToken: string;
+ accessedThroughSharedURL: boolean;
+}
+
+interface UploadCancelStatus {
+ value: boolean;
+}
+
+class UploadCancelService {
+ private shouldUploadBeCancelled: UploadCancelStatus = {
+ value: false,
+ };
+
+ reset() {
+ this.shouldUploadBeCancelled.value = false;
+ }
+
+ requestUploadCancelation() {
+ this.shouldUploadBeCancelled.value = true;
+ }
+
+ isUploadCancelationRequested(): boolean {
+ return this.shouldUploadBeCancelled.value;
+ }
+}
+
+const uploadCancelService = new UploadCancelService();
+
+class UIService {
+ private progressUpdater: ProgressUpdater;
+
+ // UPLOAD LEVEL STATES
+ private uploadStage: UPLOAD_STAGES = UPLOAD_STAGES.START;
+ private filenames: Map = new Map();
+ private hasLivePhoto: boolean = false;
+ private uploadProgressView: boolean = false;
+
+ // STAGE LEVEL STATES
+ private perFileProgress: number;
+ private filesUploadedCount: number;
+ private totalFilesCount: number;
+ private inProgressUploads: InProgressUploads = new Map();
+ private finishedUploads: FinishedUploads = new Map();
+
+ init(progressUpdater: ProgressUpdater) {
+ this.progressUpdater = progressUpdater;
+ this.progressUpdater.setUploadStage(this.uploadStage);
+ this.progressUpdater.setUploadFilenames(this.filenames);
+ this.progressUpdater.setHasLivePhotos(this.hasLivePhoto);
+ this.progressUpdater.setUploadProgressView(this.uploadProgressView);
+ this.progressUpdater.setUploadCounter({
+ finished: this.filesUploadedCount,
+ total: this.totalFilesCount,
+ });
+ this.progressUpdater.setInProgressUploads(
+ convertInProgressUploadsToList(this.inProgressUploads),
+ );
+ this.progressUpdater.setFinishedUploads(
+ groupByResult(this.finishedUploads),
+ );
+ }
+
+ reset(count = 0) {
+ this.setTotalFileCount(count);
+ this.filesUploadedCount = 0;
+ this.inProgressUploads = new Map();
+ this.finishedUploads = new Map();
+ this.updateProgressBarUI();
+ }
+
+ setTotalFileCount(count: number) {
+ this.totalFilesCount = count;
+ if (count > 0) {
+ this.perFileProgress = 100 / this.totalFilesCount;
+ } else {
+ this.perFileProgress = 0;
+ }
+ }
+
+ setFileProgress(key: number, progress: number) {
+ this.inProgressUploads.set(key, progress);
+ this.updateProgressBarUI();
+ }
+
+ setUploadStage(stage: UPLOAD_STAGES) {
+ this.uploadStage = stage;
+ this.progressUpdater.setUploadStage(stage);
+ }
+
+ setFiles(files: { localID: number; fileName: string }[]) {
+ const filenames = new Map(files.map((f) => [f.localID, f.fileName]));
+ this.filenames = filenames;
+ this.progressUpdater.setUploadFilenames(filenames);
+ }
+
+ setHasLivePhoto(hasLivePhoto: boolean) {
+ this.hasLivePhoto = hasLivePhoto;
+ this.progressUpdater.setHasLivePhotos(hasLivePhoto);
+ }
+
+ setUploadProgressView(uploadProgressView: boolean) {
+ this.uploadProgressView = uploadProgressView;
+ this.progressUpdater.setUploadProgressView(uploadProgressView);
+ }
+
+ increaseFileUploaded() {
+ this.filesUploadedCount++;
+ this.updateProgressBarUI();
+ }
+
+ moveFileToResultList(key: number, uploadResult: UPLOAD_RESULT) {
+ this.finishedUploads.set(key, uploadResult);
+ this.inProgressUploads.delete(key);
+ this.updateProgressBarUI();
+ }
+
+ hasFilesInResultList() {
+ return this.finishedUploads.size > 0;
+ }
+
+ private updateProgressBarUI() {
+ const {
+ setPercentComplete,
+ setUploadCounter,
+ setInProgressUploads,
+ setFinishedUploads,
+ } = this.progressUpdater;
+ setUploadCounter({
+ finished: this.filesUploadedCount,
+ total: this.totalFilesCount,
+ });
+ let percentComplete =
+ this.perFileProgress *
+ (this.finishedUploads.size || this.filesUploadedCount);
+ if (this.inProgressUploads) {
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
+ for (const [_, progress] of this.inProgressUploads) {
+ // filter negative indicator values during percentComplete calculation
+ if (progress < 0) {
+ continue;
+ }
+ percentComplete += (this.perFileProgress * progress) / 100;
+ }
+ }
+
+ setPercentComplete(percentComplete);
+ setInProgressUploads(
+ convertInProgressUploadsToList(this.inProgressUploads),
+ );
+ setFinishedUploads(groupByResult(this.finishedUploads));
+ }
+
+ trackUploadProgress(
+ fileLocalID: number,
+ percentPerPart = RANDOM_PERCENTAGE_PROGRESS_FOR_PUT(),
+ index = 0,
+ ) {
+ const cancel: { exec: Canceler } = { exec: () => {} };
+ const cancelTimedOutRequest = () =>
+ cancel.exec(CustomError.REQUEST_TIMEOUT);
+
+ const cancelCancelledUploadRequest = () =>
+ cancel.exec(CustomError.UPLOAD_CANCELLED);
+
+ let timeout = null;
+ const resetTimeout = () => {
+ if (timeout) {
+ clearTimeout(timeout);
+ }
+ timeout = setTimeout(cancelTimedOutRequest, 30 * 1000 /* 30 sec */);
+ };
+ return {
+ cancel,
+ onUploadProgress: (event) => {
+ this.inProgressUploads.set(
+ fileLocalID,
+ Math.min(
+ Math.round(
+ percentPerPart * index +
+ (percentPerPart * event.loaded) / event.total,
+ ),
+ 98,
+ ),
+ );
+ this.updateProgressBarUI();
+ if (event.loaded === event.total) {
+ clearTimeout(timeout);
+ } else {
+ resetTimeout();
+ }
+ if (uploadCancelService.isUploadCancelationRequested()) {
+ cancelCancelledUploadRequest();
+ }
+ },
+ };
+ }
+}
+
+function convertInProgressUploadsToList(inProgressUploads) {
+ return [...inProgressUploads.entries()].map(
+ ([localFileID, progress]) =>
+ ({
+ localFileID,
+ progress,
+ }) as InProgressUpload,
+ );
+}
+
+const groupByResult = (finishedUploads: FinishedUploads) => {
+ const groups: SegregatedFinishedUploads = new Map();
+ for (const [localID, result] of finishedUploads) {
+ if (!groups.has(result)) groups.set(result, []);
+ groups.get(result).push(localID);
+ }
+ return groups;
+};
class UploadManager {
private cryptoWorkers = new Array<
ComlinkWorker
- >(MAX_CONCURRENT_UPLOADS);
- private parsedMetadataJSONMap: ParsedMetadataJSONMap;
- private filesToBeUploaded: FileWithCollection[];
- private remainingFiles: FileWithCollection[] = [];
- private failedFiles: FileWithCollection[];
+ >(maxConcurrentUploads);
+ private parsedMetadataJSONMap: Map;
+ private filesToBeUploaded: ClusteredFile[];
+ private remainingFiles: ClusteredFile[] = [];
+ private failedFiles: ClusteredFile[];
private existingFiles: EnteFile[];
private setFiles: SetFiles;
private collections: Map;
private uploadInProgress: boolean;
private publicUploadProps: PublicUploadProps;
private uploaderName: string;
+ private uiService: UIService;
+ private isCFUploadProxyDisabled: boolean = false;
+
+ constructor() {
+ this.uiService = new UIService();
+ }
public async init(
progressUpdater: ProgressUpdater,
@@ -65,13 +342,14 @@ class UploadManager {
publicCollectProps: PublicUploadProps,
isCFUploadProxyDisabled: boolean,
) {
- UIService.init(progressUpdater);
+ this.uiService.init(progressUpdater);
const remoteIsCFUploadProxyDisabled =
await getDisableCFUploadProxyFlag();
if (remoteIsCFUploadProxyDisabled) {
isCFUploadProxyDisabled = remoteIsCFUploadProxyDisabled;
}
- UploadService.init(publicCollectProps, isCFUploadProxyDisabled);
+ this.isCFUploadProxyDisabled = isCFUploadProxyDisabled;
+ UploadService.init(publicCollectProps);
this.setFiles = setFiles;
this.publicUploadProps = publicCollectProps;
}
@@ -89,18 +367,106 @@ class UploadManager {
this.uploaderName = null;
}
- prepareForNewUpload() {
+ public prepareForNewUpload() {
this.resetState();
- UIService.reset();
+ this.uiService.reset();
uploadCancelService.reset();
- UIService.setUploadStage(UPLOAD_STAGES.START);
+ this.uiService.setUploadStage(UPLOAD_STAGES.START);
}
showUploadProgressDialog() {
- UIService.setUploadProgressView(true);
+ this.uiService.setUploadProgressView(true);
}
- async updateExistingFilesAndCollections(collections: Collection[]) {
+ /**
+ * Upload files
+ *
+ * This method waits for all the files to get uploaded (successfully or
+ * unsucessfully) before returning.
+ *
+ * It is an error to call this method when there is already an in-progress
+ * upload.
+ *
+ * @param filesWithCollectionToUploadIn The files to upload, each paired
+ * with the id of the collection that they should be uploaded into.
+ *
+ * @returns `true` if at least one file was processed
+ */
+ public async uploadFiles(
+ filesWithCollectionToUploadIn: FileWithCollection[],
+ collections: Collection[],
+ uploaderName?: string,
+ ) {
+ if (this.uploadInProgress)
+ throw new Error("Cannot run multiple uploads at once");
+
+ log.info(`Uploading ${filesWithCollectionToUploadIn.length} files`);
+ this.uploadInProgress = true;
+ this.uploaderName = uploaderName;
+
+ try {
+ await this.updateExistingFilesAndCollections(collections);
+
+ const namedFiles = filesWithCollectionToUploadIn.map(
+ makeFileWithCollectionIDAndName,
+ );
+
+ this.uiService.setFiles(namedFiles);
+
+ const [metadataFiles, mediaFiles] =
+ splitMetadataAndMediaFiles(namedFiles);
+
+ if (metadataFiles.length) {
+ this.uiService.setUploadStage(
+ UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
+ );
+
+ await this.parseMetadataJSONFiles(metadataFiles);
+ }
+
+ if (mediaFiles.length) {
+ const clusteredMediaFiles = await clusterLivePhotos(mediaFiles);
+
+ this.abortIfCancelled();
+
+ // Live photos might've been clustered together, reset the list
+ // of files to reflect that.
+ this.uiService.setFiles(clusteredMediaFiles);
+
+ this.uiService.setHasLivePhoto(
+ mediaFiles.length != clusteredMediaFiles.length,
+ );
+
+ await this.uploadMediaFiles(clusteredMediaFiles);
+ }
+ } catch (e) {
+ if (e.message === CustomError.UPLOAD_CANCELLED) {
+ if (isElectron()) {
+ this.remainingFiles = [];
+ await cancelRemainingUploads();
+ }
+ } else {
+ log.error("Uploading failed", e);
+ throw e;
+ }
+ } finally {
+ this.uiService.setUploadStage(UPLOAD_STAGES.FINISH);
+ for (let i = 0; i < maxConcurrentUploads; i++) {
+ this.cryptoWorkers[i]?.terminate();
+ }
+ this.uploadInProgress = false;
+ }
+
+ return this.uiService.hasFilesInResultList();
+ }
+
+ private abortIfCancelled = () => {
+ if (uploadCancelService.isUploadCancelationRequested()) {
+ throw Error(CustomError.UPLOAD_CANCELLED);
+ }
+ };
+
+ private async updateExistingFilesAndCollections(collections: Collection[]) {
if (this.publicUploadProps.accessedThroughSharedURL) {
this.existingFiles = await getLocalPublicFiles(
getPublicCollectionUID(this.publicUploadProps.token),
@@ -113,168 +479,41 @@ class UploadManager {
);
}
- public async queueFilesForUpload(
- filesWithCollectionToUploadIn: FileWithCollection[],
- collections: Collection[],
- uploaderName?: string,
- ) {
- try {
- if (this.uploadInProgress) {
- throw Error("can't run multiple uploads at once");
- }
- this.uploadInProgress = true;
- await this.updateExistingFilesAndCollections(collections);
- this.uploaderName = uploaderName;
- log.info(
- `received ${filesWithCollectionToUploadIn.length} files to upload`,
- );
- uiService.setFilenames(
- new Map(
- filesWithCollectionToUploadIn.map((mediaFile) => [
- mediaFile.localID,
- UploadService.getAssetName(mediaFile),
- ]),
- ),
- );
- const { metadataJSONFiles, mediaFiles } =
- segregateMetadataAndMediaFiles(filesWithCollectionToUploadIn);
- log.info(`has ${metadataJSONFiles.length} metadata json files`);
- log.info(`has ${mediaFiles.length} media files`);
- if (metadataJSONFiles.length) {
- UIService.setUploadStage(
- UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
- );
- await this.parseMetadataJSONFiles(metadataJSONFiles);
+ private async parseMetadataJSONFiles(files: FileWithCollectionIDAndName[]) {
+ this.uiService.reset(files.length);
- UploadService.setParsedMetadataJSONMap(
- this.parsedMetadataJSONMap,
- );
- }
- if (mediaFiles.length) {
- log.info(`clusterLivePhotoFiles started`);
- const analysedMediaFiles =
- await UploadService.clusterLivePhotoFiles(mediaFiles);
- log.info(`clusterLivePhotoFiles ended`);
- log.info(
- `got live photos: ${
- mediaFiles.length !== analysedMediaFiles.length
- }`,
- );
- uiService.setFilenames(
- new Map(
- analysedMediaFiles.map((mediaFile) => [
- mediaFile.localID,
- UploadService.getAssetName(mediaFile),
- ]),
- ),
- );
+ for (const { fileOrPath, fileName, collectionID } of files) {
+ this.abortIfCancelled();
- UIService.setHasLivePhoto(
- mediaFiles.length !== analysedMediaFiles.length,
+ log.info(`Parsing metadata JSON ${fileName}`);
+ const metadataJSON = await tryParseTakeoutMetadataJSON(fileOrPath);
+ if (metadataJSON) {
+ this.parsedMetadataJSONMap.set(
+ getMetadataJSONMapKeyForJSON(collectionID, fileName),
+ metadataJSON,
);
-
- await this.uploadMediaFiles(analysedMediaFiles);
+ this.uiService.increaseFileUploaded();
}
- } catch (e) {
- if (e.message === CustomError.UPLOAD_CANCELLED) {
- if (isElectron()) {
- this.remainingFiles = [];
- await cancelRemainingUploads();
- }
- } else {
- log.error("uploading failed with error", e);
- throw e;
- }
- } finally {
- UIService.setUploadStage(UPLOAD_STAGES.FINISH);
- for (let i = 0; i < MAX_CONCURRENT_UPLOADS; i++) {
- this.cryptoWorkers[i]?.terminate();
- }
- this.uploadInProgress = false;
- }
- try {
- if (!UIService.hasFilesInResultList()) {
- return true;
- } else {
- return false;
- }
- } catch (e) {
- log.error(" failed to return shouldCloseProgressBar", e);
- return false;
}
}
- private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
- try {
- log.info(`parseMetadataJSONFiles function executed `);
-
- UIService.reset(metadataFiles.length);
-
- for (const { file, collectionID } of metadataFiles) {
- try {
- if (uploadCancelService.isUploadCancelationRequested()) {
- throw Error(CustomError.UPLOAD_CANCELLED);
- }
- log.info(
- `parsing metadata json file ${getFileNameSize(file)}`,
- );
-
- const parsedMetadataJSON = await parseMetadataJSON(file);
- if (parsedMetadataJSON) {
- this.parsedMetadataJSONMap.set(
- getMetadataJSONMapKeyForJSON(
- collectionID,
- file.name,
- ),
- parsedMetadataJSON && { ...parsedMetadataJSON },
- );
- UIService.increaseFileUploaded();
- }
- log.info(
- `successfully parsed metadata json file ${getFileNameSize(
- file,
- )}`,
- );
- } catch (e) {
- if (e.message === CustomError.UPLOAD_CANCELLED) {
- throw e;
- } else {
- // and don't break for subsequent files just log and move on
- log.error("parsing failed for a file", e);
- log.info(
- `failed to parse metadata json file ${getFileNameSize(
- file,
- )} error: ${e.message}`,
- );
- }
- }
- }
- } catch (e) {
- if (e.message !== CustomError.UPLOAD_CANCELLED) {
- log.error("error seeding MetadataMap", e);
- }
- throw e;
- }
- }
-
- private async uploadMediaFiles(mediaFiles: FileWithCollection[]) {
- log.info(`uploadMediaFiles called`);
+ private async uploadMediaFiles(mediaFiles: ClusteredFile[]) {
this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles];
if (isElectron()) {
this.remainingFiles = [...this.remainingFiles, ...mediaFiles];
}
- UIService.reset(mediaFiles.length);
+ this.uiService.reset(mediaFiles.length);
await UploadService.setFileCount(mediaFiles.length);
- UIService.setUploadStage(UPLOAD_STAGES.UPLOADING);
+ this.uiService.setUploadStage(UPLOAD_STAGES.UPLOADING);
const uploadProcesses = [];
for (
let i = 0;
- i < MAX_CONCURRENT_UPLOADS && this.filesToBeUploaded.length > 0;
+ i < maxConcurrentUploads && this.filesToBeUploaded.length > 0;
i++
) {
this.cryptoWorkers[i] = getDedicatedCryptoWorker();
@@ -285,64 +524,81 @@ class UploadManager {
}
private async uploadNextFileInQueue(worker: Remote) {
+ const uiService = this.uiService;
+
while (this.filesToBeUploaded.length > 0) {
- if (uploadCancelService.isUploadCancelationRequested()) {
- throw Error(CustomError.UPLOAD_CANCELLED);
- }
- let fileWithCollection = this.filesToBeUploaded.pop();
- const { collectionID } = fileWithCollection;
+ this.abortIfCancelled();
+
+ const clusteredFile = this.filesToBeUploaded.pop();
+ const { localID, collectionID } = clusteredFile;
const collection = this.collections.get(collectionID);
- fileWithCollection = { ...fileWithCollection, collection };
- const { fileUploadResult, uploadedFile } = await uploader(
- worker,
- this.existingFiles,
- fileWithCollection,
+ const uploadableFile = { ...clusteredFile, collection };
+
+ uiService.setFileProgress(localID, 0);
+ await wait(0);
+
+ const { uploadResult, uploadedFile } = await uploader(
+ uploadableFile,
this.uploaderName,
+ this.existingFiles,
+ this.parsedMetadataJSONMap,
+ worker,
+ this.isCFUploadProxyDisabled,
+ () => {
+ this.abortIfCancelled();
+ },
+ (
+ fileLocalID: number,
+ percentPerPart?: number,
+ index?: number,
+ ) =>
+ uiService.trackUploadProgress(
+ fileLocalID,
+ percentPerPart,
+ index,
+ ),
);
const finalUploadResult = await this.postUploadTask(
- fileUploadResult,
+ uploadableFile,
+ uploadResult,
uploadedFile,
- fileWithCollection,
);
- UIService.moveFileToResultList(
- fileWithCollection.localID,
- finalUploadResult,
- );
- UIService.increaseFileUploaded();
+ this.uiService.moveFileToResultList(localID, finalUploadResult);
+ this.uiService.increaseFileUploaded();
UploadService.reducePendingUploadCount();
}
}
- async postUploadTask(
- fileUploadResult: UPLOAD_RESULT,
- uploadedFile: EncryptedEnteFile | EnteFile | null,
- fileWithCollection: FileWithCollection,
+ private async postUploadTask(
+ uploadableFile: UploadableFile,
+ uploadResult: UPLOAD_RESULT,
+ uploadedFile: EncryptedEnteFile | EnteFile | undefined,
) {
+ log.info(
+ `Uploaded ${uploadableFile.fileName} with result ${uploadResult}`,
+ );
try {
let decryptedFile: EnteFile;
- log.info(
- `post upload action -> fileUploadResult: ${fileUploadResult} uploadedFile present ${!!uploadedFile}`,
- );
- await this.updateElectronRemainingFiles(fileWithCollection);
- switch (fileUploadResult) {
+ await this.removeFromPendingUploads(uploadableFile);
+ switch (uploadResult) {
case UPLOAD_RESULT.FAILED:
case UPLOAD_RESULT.BLOCKED:
- this.failedFiles.push(fileWithCollection);
+ this.failedFiles.push(uploadableFile);
break;
case UPLOAD_RESULT.ALREADY_UPLOADED:
decryptedFile = uploadedFile as EnteFile;
break;
case UPLOAD_RESULT.ADDED_SYMLINK:
decryptedFile = uploadedFile as EnteFile;
- fileUploadResult = UPLOAD_RESULT.UPLOADED;
+ uploadResult = UPLOAD_RESULT.UPLOADED;
break;
case UPLOAD_RESULT.UPLOADED:
case UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL:
decryptedFile = await decryptFile(
uploadedFile as EncryptedEnteFile,
- fileWithCollection.collection.key,
+ uploadableFile.collection.key,
);
break;
case UPLOAD_RESULT.UNSUPPORTED:
@@ -350,33 +606,33 @@ class UploadManager {
// no-op
break;
default:
- throw Error("Invalid Upload Result" + fileUploadResult);
+ throw new Error(`Invalid Upload Result ${uploadResult}`);
}
if (
[
UPLOAD_RESULT.ADDED_SYMLINK,
UPLOAD_RESULT.UPLOADED,
UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL,
- ].includes(fileUploadResult)
+ ].includes(uploadResult)
) {
try {
eventBus.emit(Events.FILE_UPLOADED, {
enteFile: decryptedFile,
localFile:
- fileWithCollection.file ??
- fileWithCollection.livePhotoAssets.image,
+ uploadableFile.fileOrPath ??
+ uploadableFile.livePhotoAssets.image,
});
} catch (e) {
- log.error("Error in fileUploaded handlers", e);
+ log.warn("Ignoring error in fileUploaded handlers", e);
}
this.updateExistingFiles(decryptedFile);
}
await this.watchFolderCallback(
- fileUploadResult,
- fileWithCollection,
+ uploadResult,
+ uploadableFile,
uploadedFile as EncryptedEnteFile,
);
- return fileUploadResult;
+ return uploadResult;
} catch (e) {
log.error("failed to do post file upload action", e);
return UPLOAD_RESULT.FAILED;
@@ -385,7 +641,7 @@ class UploadManager {
private async watchFolderCallback(
fileUploadResult: UPLOAD_RESULT,
- fileWithCollection: FileWithCollection,
+ fileWithCollection: ClusteredFile,
uploadedFile: EncryptedEnteFile,
) {
if (isElectron()) {
@@ -400,19 +656,19 @@ class UploadManager {
}
public cancelRunningUpload() {
- log.info("user cancelled running upload");
- UIService.setUploadStage(UPLOAD_STAGES.CANCELLING);
+ log.info("User cancelled running upload");
+ this.uiService.setUploadStage(UPLOAD_STAGES.CANCELLING);
uploadCancelService.requestUploadCancelation();
}
- getFailedFilesWithCollections() {
+ public getFailedFilesWithCollections() {
return {
files: this.failedFiles,
collections: [...this.collections.values()],
};
}
- getUploaderName() {
+ public getUploaderName() {
return this.uploaderName;
}
@@ -428,14 +684,13 @@ class UploadManager {
this.setFiles((files) => sortFiles([...files, decryptedFile]));
}
- private async updateElectronRemainingFiles(
- fileWithCollection: FileWithCollection,
- ) {
- if (isElectron()) {
+ private async removeFromPendingUploads({ localID }: ClusteredFile) {
+ const electron = globalThis.electron;
+ if (electron) {
this.remainingFiles = this.remainingFiles.filter(
- (file) => !areFileWithCollectionsSame(file, fileWithCollection),
+ (f) => f.localID != localID,
);
- await updatePendingUploads(this.remainingFiles);
+ await updatePendingUploads(electron, this.remainingFiles);
}
}
@@ -445,3 +700,301 @@ class UploadManager {
}
export default new UploadManager();
+
+/**
+ * The data operated on by the intermediate stages of the upload.
+ *
+ * [Note: Intermediate file types during upload]
+ *
+ * As files progress through stages, they get more and more bits tacked on to
+ * them. These types document the journey.
+ *
+ * - The input is {@link FileWithCollection}. This can either be a new
+ * {@link FileWithCollection}, in which case it'll only have a
+ * {@link localID}, {@link collectionID} and a {@link fileOrPath}. Or it could
+ * be a retry, in which case it'll not have a {@link fileOrPath} but instead
+ * will have data from a previous stage (concretely, it'll just be a
+ * relabelled {@link ClusteredFile}), like a snake eating its tail.
+ *
+ * - Immediately we convert it to {@link FileWithCollectionIDAndName}. This is
+ * to mostly systematize what we have, and also attach a {@link fileName}.
+ *
+ * - These then get converted to "assets", whereby both parts of a live photo
+ * are combined. This is a {@link ClusteredFile}.
+ *
+ * - On to the {@link ClusteredFile} we attach the corresponding
+ * {@link collection}, giving us {@link UploadableFile}. This is what gets
+ * queued and then passed to the {@link uploader}.
+ */
+type FileWithCollectionIDAndName = {
+ /** A unique ID for the duration of the upload */
+ localID: number;
+ /** The ID of the collection to which this file should be uploaded. */
+ collectionID: number;
+ /**
+ * The name of the file.
+ *
+ * In case of live photos, this'll be the name of the image part.
+ */
+ fileName: string;
+ /** `true` if this is a live photo. */
+ isLivePhoto?: boolean;
+ /* Valid for non-live photos */
+ fileOrPath?: File | string;
+ /* Valid for live photos */
+ livePhotoAssets?: LivePhotoAssets;
+};
+
+const makeFileWithCollectionIDAndName = (
+ f: FileWithCollection,
+): FileWithCollectionIDAndName => {
+ const fileOrPath = f.fileOrPath;
+ /* TODO(MR): ElectronFile */
+ if (!(fileOrPath instanceof File || typeof fileOrPath == "string"))
+ throw new Error(`Unexpected file ${f}`);
+
+ return {
+ localID: ensure(f.localID),
+ collectionID: ensure(f.collectionID),
+ fileName: ensure(
+ f.isLivePhoto
+ ? fopFileName(f.livePhotoAssets.image)
+ : fopFileName(fileOrPath),
+ ),
+ isLivePhoto: f.isLivePhoto,
+ fileOrPath: fileOrPath,
+ livePhotoAssets: f.livePhotoAssets,
+ };
+};
+
+/**
+ * A file with both parts of a live photo clubbed together.
+ *
+ * See: [Note: Intermediate file types during upload].
+ */
+type ClusteredFile = {
+ localID: number;
+ collectionID: number;
+ fileName: string;
+ isLivePhoto: boolean;
+ fileOrPath?: File | string;
+ livePhotoAssets?: LivePhotoAssets;
+};
+
+/**
+ * The file that we hand off to the uploader. Essentially {@link ClusteredFile}
+ * with the {@link collection} attached to it.
+ *
+ * See: [Note: Intermediate file types during upload].
+ */
+export type UploadableFile = ClusteredFile & {
+ collection: Collection;
+};
+
+const splitMetadataAndMediaFiles = (
+ files: FileWithCollectionIDAndName[],
+): [
+ metadata: FileWithCollectionIDAndName[],
+ media: FileWithCollectionIDAndName[],
+] =>
+ files.reduce(
+ ([metadata, media], f) => {
+ if (lowercaseExtension(f.fileName) == "json") metadata.push(f);
+ else media.push(f);
+ return [metadata, media];
+ },
+ [[], []],
+ );
+
+export const setToUploadCollection = async (collections: Collection[]) => {
+ let collectionName: string = null;
+ /* collection being one suggest one of two things
+ 1. Either the user has upload to a single existing collection
+ 2. Created a new single collection to upload to
+ may have had multiple folder, but chose to upload
+ to one album
+ hence saving the collection name when upload collection count is 1
+ helps the info of user choosing this options
+ and on next upload we can directly start uploading to this collection
+ */
+ if (collections.length === 1) {
+ collectionName = collections[0].name;
+ }
+ await ensureElectron().setPendingUploadCollection(collectionName);
+};
+
+const updatePendingUploads = async (
+ electron: Electron,
+ files: ClusteredFile[],
+) => {
+ const paths = files
+ .map((file) =>
+ file.isLivePhoto
+ ? [file.livePhotoAssets.image, file.livePhotoAssets.video]
+ : [file.fileOrPath],
+ )
+ .flat()
+ .map((f) => getFilePathElectron(f));
+ await electron.setPendingUploadFiles("files", paths);
+};
+
+/**
+ * NOTE: a stop gap measure, only meant to be called by code that is running in
+ * the context of a desktop app initiated upload
+ */
+export const getFilePathElectron = (file: File | ElectronFile | string) =>
+ typeof file == "string" ? file : (file as ElectronFile).path;
+
+const cancelRemainingUploads = async () => {
+ const electron = ensureElectron();
+ await electron.setPendingUploadCollection(undefined);
+ await electron.setPendingUploadFiles("zips", []);
+ await electron.setPendingUploadFiles("files", []);
+};
+
+/**
+ * Go through the given files, combining any sibling image + video assets into a
+ * single live photo when appropriate.
+ */
+const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => {
+ const result: ClusteredFile[] = [];
+ files
+ .sort((f, g) =>
+ nameAndExtension(f.fileName)[0].localeCompare(
+ nameAndExtension(g.fileName)[0],
+ ),
+ )
+ .sort((f, g) => f.collectionID - g.collectionID);
+ let index = 0;
+ while (index < files.length - 1) {
+ const f = files[index];
+ const g = files[index + 1];
+ const fFileType = potentialFileTypeFromExtension(f.fileName);
+ const gFileType = potentialFileTypeFromExtension(g.fileName);
+ const fa: PotentialLivePhotoAsset = {
+ fileName: f.fileName,
+ fileType: fFileType,
+ collectionID: f.collectionID,
+ fileOrPath: f.fileOrPath,
+ };
+ const ga: PotentialLivePhotoAsset = {
+ fileName: g.fileName,
+ fileType: gFileType,
+ collectionID: g.collectionID,
+ fileOrPath: g.fileOrPath,
+ };
+ if (await areLivePhotoAssets(fa, ga)) {
+ const [image, video] =
+ fFileType == FILE_TYPE.IMAGE ? [f, g] : [g, f];
+ result.push({
+ localID: f.localID,
+ collectionID: f.collectionID,
+ fileName: image.fileName,
+ isLivePhoto: true,
+ livePhotoAssets: {
+ image: image.fileOrPath,
+ video: video.fileOrPath,
+ },
+ });
+ index += 2;
+ } else {
+ result.push({
+ ...f,
+ isLivePhoto: false,
+ });
+ index += 1;
+ }
+ }
+ if (index === files.length - 1) {
+ result.push({
+ ...files[index],
+ isLivePhoto: false,
+ });
+ }
+ return result;
+};
+
+interface PotentialLivePhotoAsset {
+ fileName: string;
+ fileType: FILE_TYPE;
+ collectionID: number;
+ fileOrPath: File | string;
+}
+
+const areLivePhotoAssets = async (
+ f: PotentialLivePhotoAsset,
+ g: PotentialLivePhotoAsset,
+) => {
+ if (f.collectionID != g.collectionID) return false;
+
+ const [fName, fExt] = nameAndExtension(f.fileName);
+ const [gName, gExt] = nameAndExtension(g.fileName);
+
+ let fPrunedName: string;
+ let gPrunedName: string;
+ if (f.fileType == FILE_TYPE.IMAGE && g.fileType == FILE_TYPE.VIDEO) {
+ fPrunedName = removePotentialLivePhotoSuffix(
+ fName,
+ // A Google Live Photo image file can have video extension appended
+ // as suffix, so we pass that to removePotentialLivePhotoSuffix to
+ // remove it.
+ //
+ // Example: IMG_20210630_0001.mp4.jpg (Google Live Photo image file)
+ gExt ? `.${gExt}` : undefined,
+ );
+ gPrunedName = removePotentialLivePhotoSuffix(gName);
+ } else if (f.fileType == FILE_TYPE.VIDEO && g.fileType == FILE_TYPE.IMAGE) {
+ fPrunedName = removePotentialLivePhotoSuffix(fName);
+ gPrunedName = removePotentialLivePhotoSuffix(
+ gName,
+ fExt ? `.${fExt}` : undefined,
+ );
+ } else {
+ return false;
+ }
+
+ if (fPrunedName != gPrunedName) return false;
+
+ // Also check that the size of an individual Live Photo asset is less than
+ // an (arbitrary) limit. This should be true in practice as the videos for a
+ // live photo are a few seconds long. Further on, the zipping library that
+ // we use doesn't support stream as a input.
+
+ const maxAssetSize = 20 * 1024 * 1024; /* 20MB */
+ const fSize = await fopSize(f.fileOrPath);
+ const gSize = await fopSize(g.fileOrPath);
+ if (fSize > maxAssetSize || gSize > maxAssetSize) {
+ log.info(
+ `Not classifying assets with too large sizes ${[fSize, gSize]} as a live photo`,
+ );
+ return false;
+ }
+
+ return true;
+};
+
+const removePotentialLivePhotoSuffix = (name: string, suffix?: string) => {
+ const suffix_3 = "_3";
+
+ // The icloud-photos-downloader library appends _HVEC to the end of the
+ // filename in case of live photos.
+ //
+ // https://github.com/icloud-photos-downloader/icloud_photos_downloader
+ const suffix_hvec = "_HVEC";
+
+ let foundSuffix: string | undefined;
+ if (name.endsWith(suffix_3)) {
+ foundSuffix = suffix_3;
+ } else if (
+ name.endsWith(suffix_hvec) ||
+ name.endsWith(suffix_hvec.toLowerCase())
+ ) {
+ foundSuffix = suffix_hvec;
+ } else if (suffix) {
+ if (name.endsWith(suffix) || name.endsWith(suffix.toLowerCase())) {
+ foundSuffix = suffix;
+ }
+ }
+
+ return foundSuffix ? name.slice(0, foundSuffix.length * -1) : name;
+};
diff --git a/web/apps/photos/src/services/upload/uploadService.ts b/web/apps/photos/src/services/upload/uploadService.ts
index abcf49591..d49b32129 100644
--- a/web/apps/photos/src/services/upload/uploadService.ts
+++ b/web/apps/photos/src/services/upload/uploadService.ts
@@ -1,99 +1,111 @@
-import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
+import { hasFileHash } from "@/media/file";
+import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type";
+import { encodeLivePhoto } from "@/media/live-photo";
+import type { Metadata } from "@/media/types/file";
+import { ensureElectron } from "@/next/electron";
+import { basename } from "@/next/file";
import log from "@/next/log";
+import { CustomErrorMessage } from "@/next/types/ipc";
+import { ensure } from "@/utils/ensure";
+import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
-import {
- B64EncryptionResult,
- EncryptionResult,
-} from "@ente/shared/crypto/types";
+import { B64EncryptionResult } from "@ente/shared/crypto/types";
import { CustomError, handleUploadError } from "@ente/shared/error";
-import { sleep } from "@ente/shared/utils";
import { Remote } from "comlink";
import {
- FILE_READER_CHUNK_SIZE,
- MAX_FILE_SIZE_SUPPORTED,
- MULTIPART_PART_SIZE,
+ NULL_LOCATION,
+ RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
UPLOAD_RESULT,
} from "constants/upload";
import { addToCollection } from "services/collectionService";
-import { Collection } from "types/collection";
+import { parseImageMetadata } from "services/exif";
+import * as ffmpeg from "services/ffmpeg";
+import {
+ PublicUploadProps,
+ type LivePhotoAssets,
+} from "services/upload/uploadManager";
import {
EnteFile,
- FilePublicMagicMetadata,
- FilePublicMagicMetadataProps,
+ MetadataFileAttributes,
+ S3FileAttributes,
+ type EncryptedEnteFile,
+ type FilePublicMagicMetadata,
+ type FilePublicMagicMetadataProps,
} from "types/file";
import { EncryptedMagicMetadata } from "types/magicMetadata";
-import {
- BackupedFile,
- DataStream,
- ElectronFile,
- EncryptedFile,
- ExtractMetadataResult,
- FileInMemory,
- FileTypeInfo,
- FileWithCollection,
- FileWithMetadata,
- Logger,
- ParsedMetadataJSON,
- ParsedMetadataJSONMap,
- ProcessedFile,
- PublicUploadProps,
- UploadAsset,
- UploadFile,
- UploadURL,
- isDataStream,
-} from "types/upload";
+import type { ParsedExtractedMetadata } from "types/metadata";
import {
getNonEmptyMagicMetadataProps,
updateMagicMetadata,
} from "utils/magicMetadata";
-import { findMatchingExistingFiles } from "utils/upload";
-import {
- getElectronFileStream,
- getFileStream,
- getUint8ArrayView,
-} from "../readerService";
-import { getFileType } from "../typeDetectionService";
-import {
- MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
- clusterLivePhotoFiles,
- extractLivePhotoMetadata,
- extractMetadata,
- getClippedMetadataJSONMapKeyForFile,
- getLivePhotoFileType,
- getLivePhotoName,
- getLivePhotoSize,
- getMetadataJSONMapKeyForFile,
- readLivePhoto,
-} from "./metadataService";
-import { uploadStreamUsingMultipart } from "./multiPartUploadService";
+import { readStream } from "utils/native-stream";
+import * as convert from "xml-js";
+import { detectFileTypeInfoFromChunk } from "../detect-type";
+import { tryParseEpochMicrosecondsFromFileName } from "./date";
import publicUploadHttpClient from "./publicUploadHttpClient";
-import { generateThumbnail } from "./thumbnailService";
-import UIService from "./uiService";
-import uploadCancelService from "./uploadCancelService";
+import type { ParsedMetadataJSON } from "./takeout";
+import { matchTakeoutMetadata } from "./takeout";
+import {
+ fallbackThumbnail,
+ generateThumbnailNative,
+ generateThumbnailWeb,
+} from "./thumbnail";
import UploadHttpClient from "./uploadHttpClient";
+import type { UploadableFile } from "./uploadManager";
+
+/**
+ * A readable stream for a file, and its associated size and last modified time.
+ *
+ * This is the in-memory representation of the `fileOrPath` type that we usually
+ * pass around. See: [Note: Reading a fileOrPath]
+ */
+interface FileStream {
+ /**
+ * A stream of the file's contents
+ *
+ * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
+ * (except the last chunk which can be smaller since a file would rarely
+ * align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
+ *
+ * Note: A stream can only be read once!
+ */
+ stream: ReadableStream;
+ /**
+ * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
+ * sized (except the last one).
+ */
+ chunkCount: number;
+ /**
+ * The size in bytes of the underlying file.
+ */
+ fileSize: number;
+ /**
+ * The modification time of the file, in epoch milliseconds.
+ */
+ lastModifiedMs: number;
+ /**
+ * Set to the underlying {@link File} when we also have access to it.
+ */
+ file?: File;
+}
+
+/**
+ * If the stream we have is more than 5 ENCRYPTION_CHUNK_SIZE chunks, then use
+ * multipart uploads for it, with each multipart-part containing 5 chunks.
+ *
+ * ENCRYPTION_CHUNK_SIZE is 4 MB, and the number of chunks in a single upload
+ * part is 5, so each part is (up to) 20 MB.
+ */
+const multipartChunksPerPart = 5;
/** Upload files to cloud storage */
class UploadService {
private uploadURLs: UploadURL[] = [];
- private parsedMetadataJSONMap: ParsedMetadataJSONMap = new Map<
- string,
- ParsedMetadataJSON
- >();
-
- private uploaderName: string;
-
private pendingUploadCount: number = 0;
-
private publicUploadProps: PublicUploadProps = undefined;
- private isCFUploadProxyDisabled: boolean = false;
-
- init(
- publicUploadProps: PublicUploadProps,
- isCFUploadProxyDisabled: boolean,
- ) {
+ init(publicUploadProps: PublicUploadProps) {
this.publicUploadProps = publicUploadProps;
- this.isCFUploadProxyDisabled = isCFUploadProxyDisabled;
}
async setFileCount(fileCount: number) {
@@ -101,194 +113,18 @@ class UploadService {
await this.preFetchUploadURLs();
}
- setParsedMetadataJSONMap(parsedMetadataJSONMap: ParsedMetadataJSONMap) {
- this.parsedMetadataJSONMap = parsedMetadataJSONMap;
- }
-
- setUploaderName(uploaderName: string) {
- this.uploaderName = uploaderName;
- }
-
- getUploaderName() {
- return this.uploaderName;
- }
-
- getIsCFUploadProxyDisabled() {
- return this.isCFUploadProxyDisabled;
- }
-
reducePendingUploadCount() {
this.pendingUploadCount--;
}
- getAssetSize({ isLivePhoto, file, livePhotoAssets }: UploadAsset) {
- return isLivePhoto
- ? getLivePhotoSize(livePhotoAssets)
- : getFileSize(file);
- }
-
- getAssetName({ isLivePhoto, file, livePhotoAssets }: UploadAsset) {
- return isLivePhoto
- ? getLivePhotoName(livePhotoAssets)
- : getFilename(file);
- }
-
- getAssetFileType({ isLivePhoto, file, livePhotoAssets }: UploadAsset) {
- return isLivePhoto
- ? getLivePhotoFileType(livePhotoAssets)
- : getFileType(file);
- }
-
- async readAsset(
- fileTypeInfo: FileTypeInfo,
- { isLivePhoto, file, livePhotoAssets }: UploadAsset,
- ) {
- return isLivePhoto
- ? await readLivePhoto(fileTypeInfo, livePhotoAssets)
- : await readFile(fileTypeInfo, file);
- }
-
- async extractAssetMetadata(
- worker: Remote,
- { isLivePhoto, file, livePhotoAssets }: UploadAsset,
- collectionID: number,
- fileTypeInfo: FileTypeInfo,
- ): Promise {
- return isLivePhoto
- ? extractLivePhotoMetadata(
- worker,
- this.parsedMetadataJSONMap,
- collectionID,
- fileTypeInfo,
- livePhotoAssets,
- )
- : await extractFileMetadata(
- worker,
- this.parsedMetadataJSONMap,
- collectionID,
- fileTypeInfo,
- file,
- );
- }
-
- clusterLivePhotoFiles(mediaFiles: FileWithCollection[]) {
- return clusterLivePhotoFiles(mediaFiles);
- }
-
- constructPublicMagicMetadata(
- publicMagicMetadataProps: FilePublicMagicMetadataProps,
- ) {
- return constructPublicMagicMetadata(publicMagicMetadataProps);
- }
-
- async encryptAsset(
- worker: Remote,
- file: FileWithMetadata,
- encryptionKey: string,
- ): Promise {
- return encryptFile(worker, file, encryptionKey);
- }
-
- async uploadToBucket(
- logger: Logger,
- file: ProcessedFile,
- ): Promise {
- try {
- let fileObjectKey: string = null;
- logger("uploading file to bucket");
- if (isDataStream(file.file.encryptedData)) {
- logger("uploading using multipart");
- fileObjectKey = await uploadStreamUsingMultipart(
- logger,
- file.localID,
- file.file.encryptedData,
- );
- logger("uploading using multipart done");
- } else {
- logger("uploading using single part");
- const progressTracker = UIService.trackUploadProgress(
- file.localID,
- );
- const fileUploadURL = await this.getUploadURL();
- if (!this.isCFUploadProxyDisabled) {
- logger("uploading using cf proxy");
- fileObjectKey = await UploadHttpClient.putFileV2(
- fileUploadURL,
- file.file.encryptedData as Uint8Array,
- progressTracker,
- );
- } else {
- logger("uploading directly to s3");
- fileObjectKey = await UploadHttpClient.putFile(
- fileUploadURL,
- file.file.encryptedData as Uint8Array,
- progressTracker,
- );
- }
- logger("uploading using single part done");
- }
- logger("uploading thumbnail to bucket");
- const thumbnailUploadURL = await this.getUploadURL();
- let thumbnailObjectKey: string = null;
- if (!this.isCFUploadProxyDisabled) {
- thumbnailObjectKey = await UploadHttpClient.putFileV2(
- thumbnailUploadURL,
- file.thumbnail.encryptedData,
- null,
- );
- } else {
- thumbnailObjectKey = await UploadHttpClient.putFile(
- thumbnailUploadURL,
- file.thumbnail.encryptedData,
- null,
- );
- }
- logger("uploading thumbnail to bucket done");
-
- const backupedFile: BackupedFile = {
- file: {
- decryptionHeader: file.file.decryptionHeader,
- objectKey: fileObjectKey,
- },
- thumbnail: {
- decryptionHeader: file.thumbnail.decryptionHeader,
- objectKey: thumbnailObjectKey,
- },
- metadata: file.metadata,
- pubMagicMetadata: file.pubMagicMetadata,
- };
- return backupedFile;
- } catch (e) {
- if (e.message !== CustomError.UPLOAD_CANCELLED) {
- log.error("error uploading to bucket", e);
- }
- throw e;
- }
- }
-
- getUploadFile(
- collection: Collection,
- backupedFile: BackupedFile,
- fileKey: B64EncryptionResult,
- ): UploadFile {
- const uploadFile: UploadFile = {
- collectionID: collection.id,
- encryptedKey: fileKey.encryptedData,
- keyDecryptionNonce: fileKey.nonce,
- ...backupedFile,
- };
- uploadFile;
- return uploadFile;
- }
-
- private async getUploadURL() {
+ async getUploadURL() {
if (this.uploadURLs.length === 0 && this.pendingUploadCount) {
await this.fetchUploadURLs();
}
return this.uploadURLs.pop();
}
- public async preFetchUploadURLs() {
+ private async preFetchUploadURLs() {
try {
await this.fetchUploadURLs();
// checking for any subscription related errors
@@ -344,9 +180,916 @@ const uploadService = new UploadService();
export default uploadService;
-export async function constructPublicMagicMetadata(
+/**
+ * Return the file name for the given {@link fileOrPath}.
+ *
+ * @param fileOrPath The {@link File}, or the path to it. Note that it is only
+ * valid to specify a path if we are running in the context of our desktop app.
+ */
+export const fopFileName = (fileOrPath: File | string) =>
+ typeof fileOrPath == "string" ? basename(fileOrPath) : fileOrPath.name;
+
+/**
+ * Return the size of the given {@link fileOrPath}.
+ *
+ * @param fileOrPath The {@link File}, or the path to it. Note that it is only
+ * valid to specify a path if we are running in the context of our desktop app.
+ */
+export const fopSize = async (fileOrPath: File | string): Promise =>
+ fileOrPath instanceof File
+ ? fileOrPath.size
+ : await ensureElectron().fs.size(fileOrPath);
+
+/* -- Various intermediate type used during upload -- */
+
+interface UploadAsset {
+ isLivePhoto?: boolean;
+ fileOrPath?: File | string;
+ livePhotoAssets?: LivePhotoAssets;
+}
+
+interface ThumbnailedFile {
+ fileStreamOrData: FileStream | Uint8Array;
+ /** The JPEG data of the generated thumbnail */
+ thumbnail: Uint8Array;
+ /**
+ * `true` if this is a fallback (all black) thumbnail we're returning since
+ * thumbnail generation failed for some reason.
+ */
+ hasStaticThumbnail: boolean;
+}
+
+interface FileWithMetadata extends Omit {
+ metadata: Metadata;
+ localID: number;
+ pubMagicMetadata: FilePublicMagicMetadata;
+}
+
+interface EncryptedFile {
+ file: ProcessedFile;
+ fileKey: B64EncryptionResult;
+}
+
+interface EncryptedFileStream {
+ /**
+ * A stream of the file's encrypted contents
+ *
+ * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
+ * (except the last chunk which can be smaller since a file would rarely
+ * align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
+ */
+ stream: ReadableStream;
+ /**
+ * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
+ * sized (except the last one).
+ */
+ chunkCount: number;
+}
+
+interface LocalFileAttributes<
+ T extends string | Uint8Array | EncryptedFileStream,
+> {
+ encryptedData: T;
+ decryptionHeader: string;
+}
+
+interface EncryptionResult<
+ T extends string | Uint8Array | EncryptedFileStream,
+> {
+ file: LocalFileAttributes;
+ key: string;
+}
+
+interface ProcessedFile {
+ file: LocalFileAttributes;
+ thumbnail: LocalFileAttributes;
+ metadata: LocalFileAttributes;
+ pubMagicMetadata: EncryptedMagicMetadata;
+ localID: number;
+}
+
+export interface BackupedFile {
+ file: S3FileAttributes;
+ thumbnail: S3FileAttributes;
+ metadata: MetadataFileAttributes;
+ pubMagicMetadata: EncryptedMagicMetadata;
+}
+
+export interface UploadFile extends BackupedFile {
+ collectionID: number;
+ encryptedKey: string;
+ keyDecryptionNonce: string;
+}
+
+export interface MultipartUploadURLs {
+ objectKey: string;
+ partURLs: string[];
+ completeURL: string;
+}
+
+export interface UploadURL {
+ url: string;
+ objectKey: string;
+}
+
+/**
+ * A function that can be called to obtain a "progressTracker" that then is
+ * directly fed to axios to both cancel the upload if needed, and update the
+ * progress status.
+ *
+ * Enhancement: The return value needs to be typed.
+ */
+type MakeProgressTracker = (
+ fileLocalID: number,
+ percentPerPart?: number,
+ index?: number,
+) => unknown;
+
+interface UploadResponse {
+ uploadResult: UPLOAD_RESULT;
+ uploadedFile?: EncryptedEnteFile | EnteFile;
+}
+
+/**
+ * Upload the given {@link UploadableFile}
+ *
+ * This is lower layer implementation of the upload. It is invoked by
+ * {@link UploadManager} after it has assembled all the relevant bits we need to
+ * go forth and upload.
+ */
+export const uploader = async (
+ { collection, localID, fileName, ...uploadAsset }: UploadableFile,
+ uploaderName: string,
+ existingFiles: EnteFile[],
+ parsedMetadataJSONMap: Map,
+ worker: Remote,
+ isCFUploadProxyDisabled: boolean,
+ abortIfCancelled: () => void,
+ makeProgessTracker: MakeProgressTracker,
+): Promise => {
+ log.info(`Uploading ${fileName}`);
+ try {
+ /*
+ * We read the file four times:
+ * 1. To determine its MIME type (only needs first few KBs).
+ * 2. To extract its metadata.
+ * 3. To calculate its hash.
+ * 4. To encrypt it.
+ *
+ * When we already have a File object the multiple reads are fine.
+ *
+ * When we're in the context of our desktop app and have a path, it
+ * might be possible to optimize further by using `ReadableStream.tee`
+ * to perform these steps simultaneously. However, that'll require
+ * restructuring the code so that these steps run in a parallel manner
+ * (tee will not work for strictly sequential reads of large streams).
+ */
+
+ const { fileTypeInfo, fileSize, lastModifiedMs } =
+ await readAssetDetails(uploadAsset);
+
+ const maxFileSize = 4 * 1024 * 1024 * 1024; /* 4 GB */
+ if (fileSize >= maxFileSize)
+ return { uploadResult: UPLOAD_RESULT.TOO_LARGE };
+
+ abortIfCancelled();
+
+ const { metadata, publicMagicMetadata } = await extractAssetMetadata(
+ uploadAsset,
+ fileTypeInfo,
+ lastModifiedMs,
+ collection.id,
+ parsedMetadataJSONMap,
+ worker,
+ );
+
+ const matches = existingFiles.filter((file) =>
+ areFilesSame(file.metadata, metadata),
+ );
+
+ const anyMatch = matches?.length > 0 ? matches[0] : undefined;
+
+ if (anyMatch) {
+ const matchInSameCollection = matches.find(
+ (f) => f.collectionID == collection.id,
+ );
+ if (matchInSameCollection) {
+ return {
+ uploadResult: UPLOAD_RESULT.ALREADY_UPLOADED,
+ uploadedFile: matchInSameCollection,
+ };
+ } else {
+ // Any of the matching files can be used to add a symlink.
+ const symlink = Object.assign({}, anyMatch);
+ symlink.collectionID = collection.id;
+ await addToCollection(collection, [symlink]);
+ return {
+ uploadResult: UPLOAD_RESULT.ADDED_SYMLINK,
+ uploadedFile: symlink,
+ };
+ }
+ }
+
+ abortIfCancelled();
+
+ const { fileStreamOrData, thumbnail, hasStaticThumbnail } =
+ await readAsset(fileTypeInfo, uploadAsset);
+
+ if (hasStaticThumbnail) metadata.hasStaticThumbnail = true;
+
+ const pubMagicMetadata = await constructPublicMagicMetadata({
+ ...publicMagicMetadata,
+ uploaderName,
+ });
+
+ abortIfCancelled();
+
+ const fileWithMetadata: FileWithMetadata = {
+ localID,
+ fileStreamOrData,
+ thumbnail,
+ metadata,
+ pubMagicMetadata,
+ };
+
+ const encryptedFile = await encryptFile(
+ fileWithMetadata,
+ collection.key,
+ worker,
+ );
+
+ abortIfCancelled();
+
+ const backupedFile = await uploadToBucket(
+ encryptedFile.file,
+ makeProgessTracker,
+ isCFUploadProxyDisabled,
+ abortIfCancelled,
+ );
+
+ const uploadedFile = await uploadService.uploadFile({
+ collectionID: collection.id,
+ encryptedKey: encryptedFile.fileKey.encryptedData,
+ keyDecryptionNonce: encryptedFile.fileKey.nonce,
+ ...backupedFile,
+ });
+
+ return {
+ uploadResult: metadata.hasStaticThumbnail
+ ? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL
+ : UPLOAD_RESULT.UPLOADED,
+ uploadedFile: uploadedFile,
+ };
+ } catch (e) {
+ if (e.message == CustomError.UPLOAD_CANCELLED) {
+ log.info(`Upload for ${fileName} cancelled`);
+ } else if (e.message == CustomError.UNSUPPORTED_FILE_FORMAT) {
+ log.info(`Not uploading ${fileName}: unsupported file format`);
+ } else {
+ log.error(`Upload failed for ${fileName}`, e);
+ }
+
+ const error = handleUploadError(e);
+ switch (error.message) {
+ case CustomError.ETAG_MISSING:
+ return { uploadResult: UPLOAD_RESULT.BLOCKED };
+ case CustomError.UNSUPPORTED_FILE_FORMAT:
+ return { uploadResult: UPLOAD_RESULT.UNSUPPORTED };
+ case CustomError.FILE_TOO_LARGE:
+ return {
+ uploadResult: UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE,
+ };
+ default:
+ return { uploadResult: UPLOAD_RESULT.FAILED };
+ }
+ }
+};
+
+/**
+ * Read the given file or path into an in-memory representation.
+ *
+ * [Note: Reading a fileOrPath]
+ *
+ * The file can be either a web
+ * [File](https://developer.mozilla.org/en-US/docs/Web/API/File) or the absolute
+ * path to a file on desk.
+ *
+ * tl;dr; There are three cases:
+ *
+ * 1. web / File
+ * 2. desktop / File
+ * 3. desktop / path
+ *
+ * For the when and why, read on.
+ *
+ * The code that accesses files (e.g. uplaads) gets invoked in two contexts:
+ *
+ * 1. web: the normal mode, when we're running in as a web app in the browser.
+ *
+ * 2. desktop: when we're running inside our desktop app.
+ *
+ * In the web context, we'll always get a File, since within the browser we
+ * cannot programmatically construct paths to or arbitrarily access files on the
+ * user's filesystem. Note that even if we were to have an absolute path at
+ * hand, we cannot programmatically create such File objects to arbitrary
+ * absolute paths on user's local filesystem for security reasons.
+ *
+ * So in the web context, this will always be a File we get as a result of an
+ * explicit user interaction (e.g. drag and drop).
+ *
+ * In the desktop context, this can be either a File or a path.
+ *
+ * 2. If the user provided us this file via some user interaction (say a drag
+ * and a drop), this'll still be a File.
+ *
+ * 3. However, when running in the desktop app we have the ability to access
+ * absolute paths on the user's file system. For example, if the user asks us
+ * to watch certain folders on their disk for changes, we'll be able to pick
+ * up new images being added, and in such cases, the parameter here will be a
+ * path. Another example is when resuming an previously interrupted upload -
+ * we'll only have the path at hand in such cases, not the File object.
+ *
+ * Case 2, when we're provided a path, is simple. We don't have a choice, since
+ * we cannot still programmatically construct a File object (we can construct it
+ * on the Node.js layer, but it can't then be transferred over the IPC
+ * boundary). So all our operations use the path itself.
+ *
+ * Case 3 involves a choice on a use-case basis, since
+ *
+ * (a) unlike in the web context, such File objects also have the full path.
+ * See: [Note: File paths when running under Electron].
+ *
+ * (b) neither File nor the path is a better choice for all use cases.
+ *
+ * The advantage of the File object is that the browser has already read it into
+ * memory for us. The disadvantage comes in the case where we need to
+ * communicate with the native Node.js layer of our desktop app. Since this
+ * communication happens over IPC, the File's contents need to be serialized and
+ * copied, which is a bummer for large videos etc.
+ */
+const readFileOrPath = async (
+ fileOrPath: File | string,
+): Promise => {
+ let underlyingStream: ReadableStream;
+ let file: File | undefined;
+ let fileSize: number;
+ let lastModifiedMs: number;
+
+ if (fileOrPath instanceof File) {
+ file = fileOrPath;
+ underlyingStream = file.stream();
+ fileSize = file.size;
+ lastModifiedMs = file.lastModified;
+ } else {
+ const path = fileOrPath;
+ const {
+ response,
+ size,
+ lastModifiedMs: lm,
+ } = await readStream(ensureElectron(), path);
+ underlyingStream = response.body;
+ fileSize = size;
+ lastModifiedMs = lm;
+ }
+
+ const N = ENCRYPTION_CHUNK_SIZE;
+ const chunkCount = Math.ceil(fileSize / ENCRYPTION_CHUNK_SIZE);
+
+ // Pipe the underlying stream through a transformer that emits
+ // ENCRYPTION_CHUNK_SIZE-ed chunks (except the last one, which can be
+ // smaller).
+ let pending: Uint8Array | undefined;
+ const transformer = new TransformStream({
+ async transform(
+ chunk: Uint8Array,
+ controller: TransformStreamDefaultController,
+ ) {
+ let next: Uint8Array;
+ if (pending) {
+ next = new Uint8Array(pending.length + chunk.length);
+ next.set(pending);
+ next.set(chunk, pending.length);
+ pending = undefined;
+ } else {
+ next = chunk;
+ }
+ while (next.length >= N) {
+ controller.enqueue(next.slice(0, N));
+ next = next.slice(N);
+ }
+ if (next.length) pending = next;
+ },
+ flush(controller: TransformStreamDefaultController) {
+ if (pending) controller.enqueue(pending);
+ },
+ });
+
+ const stream = underlyingStream.pipeThrough(transformer);
+
+ return { stream, chunkCount, fileSize, lastModifiedMs, file };
+};
+
+interface ReadAssetDetailsResult {
+ fileTypeInfo: FileTypeInfo;
+ fileSize: number;
+ lastModifiedMs: number;
+}
+
+/**
+ * Read the file(s) to determine the type, size and last modified time of the
+ * given {@link asset}.
+ */
+const readAssetDetails = async ({
+ isLivePhoto,
+ livePhotoAssets,
+ fileOrPath,
+}: UploadAsset): Promise =>
+ isLivePhoto
+ ? readLivePhotoDetails(livePhotoAssets)
+ : readImageOrVideoDetails(fileOrPath);
+
+const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => {
+ const img = await readImageOrVideoDetails(image);
+ const vid = await readImageOrVideoDetails(video);
+
+ return {
+ fileTypeInfo: {
+ fileType: FILE_TYPE.LIVE_PHOTO,
+ extension: `${img.fileTypeInfo.extension}+${vid.fileTypeInfo.extension}`,
+ imageType: img.fileTypeInfo.extension,
+ videoType: vid.fileTypeInfo.extension,
+ },
+ fileSize: img.fileSize + vid.fileSize,
+ lastModifiedMs: img.lastModifiedMs,
+ };
+};
+
+/**
+ * Read the beginning of the given file (or its path), or use its filename as a
+ * fallback, to determine its MIME type. From that, construct and return a
+ * {@link FileTypeInfo}.
+ *
+ * While we're at it, also return the size of the file, and its last modified
+ * time (expressed as epoch milliseconds).
+ *
+ * @param fileOrPath See: [Note: Reading a fileOrPath]
+ */
+const readImageOrVideoDetails = async (fileOrPath: File | string) => {
+ const { stream, fileSize, lastModifiedMs } =
+ await readFileOrPath(fileOrPath);
+
+ const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => {
+ const reader = stream.getReader();
+ const chunk = ensure((await reader.read()).value);
+ await reader.cancel();
+ return chunk;
+ }, fopFileName(fileOrPath));
+
+ return { fileTypeInfo, fileSize, lastModifiedMs };
+};
+
+/**
+ * Read the entirety of a readable stream.
+ *
+ * It is not recommended to use this for large (say, multi-hundred MB) files. It
+ * is provided as a syntactic shortcut for cases where we already know that the
+ * size of the stream will be reasonable enough to be read in its entirety
+ * without us running out of memory.
+ */
+const readEntireStream = async (stream: ReadableStream) =>
+ new Uint8Array(await new Response(stream).arrayBuffer());
+
+interface ExtractAssetMetadataResult {
+ metadata: Metadata;
+ publicMagicMetadata: FilePublicMagicMetadataProps;
+}
+
+/**
+ * Compute the hash, extract EXIF or other metadata, and merge in data from the
+ * {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum.
+ */
+const extractAssetMetadata = async (
+ { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
+ fileTypeInfo: FileTypeInfo,
+ lastModifiedMs: number,
+ collectionID: number,
+ parsedMetadataJSONMap: Map,
+ worker: Remote,
+): Promise =>
+ isLivePhoto
+ ? await extractLivePhotoMetadata(
+ livePhotoAssets,
+ fileTypeInfo,
+ lastModifiedMs,
+ collectionID,
+ parsedMetadataJSONMap,
+ worker,
+ )
+ : await extractImageOrVideoMetadata(
+ fileOrPath,
+ fileTypeInfo,
+ lastModifiedMs,
+ collectionID,
+ parsedMetadataJSONMap,
+ worker,
+ );
+
+const extractLivePhotoMetadata = async (
+ livePhotoAssets: LivePhotoAssets,
+ fileTypeInfo: FileTypeInfo,
+ lastModifiedMs: number,
+ collectionID: number,
+ parsedMetadataJSONMap: Map,
+ worker: Remote,
+) => {
+ const imageFileTypeInfo: FileTypeInfo = {
+ fileType: FILE_TYPE.IMAGE,
+ extension: fileTypeInfo.imageType,
+ };
+ const { metadata: imageMetadata, publicMagicMetadata } =
+ await extractImageOrVideoMetadata(
+ livePhotoAssets.image,
+ imageFileTypeInfo,
+ lastModifiedMs,
+ collectionID,
+ parsedMetadataJSONMap,
+ worker,
+ );
+
+ const videoHash = await computeHash(livePhotoAssets.video, worker);
+
+ return {
+ metadata: {
+ ...imageMetadata,
+ title: fopFileName(livePhotoAssets.image),
+ fileType: FILE_TYPE.LIVE_PHOTO,
+ imageHash: imageMetadata.hash,
+ videoHash: videoHash,
+ hash: undefined,
+ },
+ publicMagicMetadata,
+ };
+};
+
+const extractImageOrVideoMetadata = async (
+ fileOrPath: File | string,
+ fileTypeInfo: FileTypeInfo,
+ lastModifiedMs: number,
+ collectionID: number,
+ parsedMetadataJSONMap: Map,
+ worker: Remote,
+) => {
+ const fileName = fopFileName(fileOrPath);
+ const { fileType } = fileTypeInfo;
+
+ let extractedMetadata: ParsedExtractedMetadata;
+ if (fileType === FILE_TYPE.IMAGE) {
+ extractedMetadata =
+ (await tryExtractImageMetadata(
+ fileOrPath,
+ fileTypeInfo,
+ lastModifiedMs,
+ )) ?? NULL_EXTRACTED_METADATA;
+ } else if (fileType === FILE_TYPE.VIDEO) {
+ extractedMetadata =
+ (await tryExtractVideoMetadata(fileOrPath)) ??
+ NULL_EXTRACTED_METADATA;
+ } else {
+ throw new Error(`Unexpected file type ${fileType} for ${fileOrPath}`);
+ }
+
+ const hash = await computeHash(fileOrPath, worker);
+
+ const modificationTime = lastModifiedMs * 1000;
+ const creationTime =
+ extractedMetadata.creationTime ??
+ tryParseEpochMicrosecondsFromFileName(fileName) ??
+ modificationTime;
+
+ const metadata: Metadata = {
+ title: fileName,
+ creationTime,
+ modificationTime,
+ latitude: extractedMetadata.location.latitude,
+ longitude: extractedMetadata.location.longitude,
+ fileType,
+ hash,
+ };
+
+ const publicMagicMetadata: FilePublicMagicMetadataProps = {
+ w: extractedMetadata.width,
+ h: extractedMetadata.height,
+ };
+
+ const takeoutMetadata = matchTakeoutMetadata(
+ fileName,
+ collectionID,
+ parsedMetadataJSONMap,
+ );
+
+ if (takeoutMetadata)
+ for (const [key, value] of Object.entries(takeoutMetadata))
+ if (value) metadata[key] = value;
+
+ return { metadata, publicMagicMetadata };
+};
+
+const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
+ location: { ...NULL_LOCATION },
+ creationTime: null,
+ width: null,
+ height: null,
+};
+
+async function tryExtractImageMetadata(
+ fileOrPath: File | string,
+ fileTypeInfo: FileTypeInfo,
+ lastModifiedMs: number,
+): Promise {
+ let file: File;
+ if (fileOrPath instanceof File) {
+ file = fileOrPath;
+ } else {
+ const path = fileOrPath;
+ // The library we use for extracting EXIF from images, exifr, doesn't
+ // support streams. But unlike videos, for images it is reasonable to
+ // read the entire stream into memory here.
+ const { response } = await readStream(ensureElectron(), path);
+ file = new File([await response.arrayBuffer()], basename(path), {
+ lastModified: lastModifiedMs,
+ });
+ }
+
+ try {
+ return await parseImageMetadata(file, fileTypeInfo);
+ } catch (e) {
+ log.error(`Failed to extract image metadata for ${fileOrPath}`, e);
+ return undefined;
+ }
+}
+
+const tryExtractVideoMetadata = async (fileOrPath: File | string) => {
+ try {
+ return await ffmpeg.extractVideoMetadata(fileOrPath);
+ } catch (e) {
+ log.error(`Failed to extract video metadata for ${fileOrPath}`, e);
+ return undefined;
+ }
+};
+
+const computeHash = async (
+ fileOrPath: File | string,
+ worker: Remote,
+) => {
+ const { stream, chunkCount } = await readFileOrPath(fileOrPath);
+ const hashState = await worker.initChunkHashing();
+
+ const streamReader = stream.getReader();
+ for (let i = 0; i < chunkCount; i++) {
+ const { done, value: chunk } = await streamReader.read();
+ if (done) throw new Error("Less chunks than expected");
+ await worker.hashFileChunk(hashState, Uint8Array.from(chunk));
+ }
+
+ const { done } = await streamReader.read();
+ if (!done) throw new Error("More chunks than expected");
+ return await worker.completeChunkHashing(hashState);
+};
+
+/**
+ * Return true if the two files, as represented by their metadata, are same.
+ *
+ * Note that the metadata includes the hash of the file's contents (when
+ * available), so this also in effect compares the contents of the files, not
+ * just the "meta" information about them.
+ */
+const areFilesSame = (f: Metadata, g: Metadata) =>
+ hasFileHash(f) && hasFileHash(g)
+ ? areFilesSameHash(f, g)
+ : areFilesSameNoHash(f, g);
+
+const areFilesSameHash = (f: Metadata, g: Metadata) => {
+ if (f.fileType !== g.fileType || f.title !== g.title) {
+ return false;
+ }
+ if (f.fileType === FILE_TYPE.LIVE_PHOTO) {
+ return f.imageHash === g.imageHash && f.videoHash === g.videoHash;
+ } else {
+ return f.hash === g.hash;
+ }
+};
+
+/**
+ * Older files that were uploaded before we introduced hashing will not have
+ * hashes, so retain and use the logic we used back then for such files.
+ *
+ * Deprecation notice April 2024: Note that hashing was introduced very early
+ * (years ago), so the chance of us finding files without hashes is rare. And
+ * even in these cases, the worst that'll happen is that a duplicate file would
+ * get uploaded which can later be deduped. So we can get rid of this case at
+ * some point (e.g. the mobile app doesn't do this extra check, just uploads).
+ */
+const areFilesSameNoHash = (f: Metadata, g: Metadata) => {
+ /*
+ * The maximum difference in the creation/modification times of two similar
+ * files is set to 1 second. This is because while uploading files in the
+ * web - browsers and users could have set reduced precision of file times
+ * to prevent timing attacks and fingerprinting.
+ *
+ * See:
+ * https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision
+ */
+ const oneSecond = 1e6;
+ return (
+ f.fileType == g.fileType &&
+ f.title == g.title &&
+ Math.abs(f.creationTime - g.creationTime) < oneSecond &&
+ Math.abs(f.modificationTime - g.modificationTime) < oneSecond
+ );
+};
+
+const readAsset = async (
+ fileTypeInfo: FileTypeInfo,
+ { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
+): Promise =>
+ isLivePhoto
+ ? await readLivePhoto(livePhotoAssets, fileTypeInfo)
+ : await readImageOrVideo(fileOrPath, fileTypeInfo);
+
+const readLivePhoto = async (
+ livePhotoAssets: LivePhotoAssets,
+ fileTypeInfo: FileTypeInfo,
+) => {
+ const {
+ fileStreamOrData: imageFileStreamOrData,
+ thumbnail,
+ hasStaticThumbnail,
+ } = await withThumbnail(
+ livePhotoAssets.image,
+ {
+ extension: fileTypeInfo.imageType,
+ fileType: FILE_TYPE.IMAGE,
+ },
+ await readFileOrPath(livePhotoAssets.image),
+ );
+ const videoFileStreamOrData = await readFileOrPath(livePhotoAssets.video);
+
+ // The JS zip library that encodeLivePhoto uses does not support
+ // ReadableStreams, so pass the file (blob) if we have one, otherwise read
+ // the entire stream into memory and pass the resultant data.
+ //
+ // This is a reasonable behaviour since the videos corresponding to live
+ // photos are only a couple of seconds long (we have already done a
+ // pre-flight check to ensure their size is small in `areLivePhotoAssets`).
+ const fileOrData = async (sd: FileStream | Uint8Array) => {
+ const _fs = async ({ file, stream }: FileStream) =>
+ file ? file : await readEntireStream(stream);
+ return sd instanceof Uint8Array ? sd : _fs(sd);
+ };
+
+ return {
+ fileStreamOrData: await encodeLivePhoto({
+ imageFileName: fopFileName(livePhotoAssets.image),
+ imageFileOrData: await fileOrData(imageFileStreamOrData),
+ videoFileName: fopFileName(livePhotoAssets.video),
+ videoFileOrData: await fileOrData(videoFileStreamOrData),
+ }),
+ thumbnail,
+ hasStaticThumbnail,
+ };
+};
+
+const readImageOrVideo = async (
+ fileOrPath: File | string,
+ fileTypeInfo: FileTypeInfo,
+) => {
+ const fileStream = await readFileOrPath(fileOrPath);
+ return withThumbnail(fileOrPath, fileTypeInfo, fileStream);
+};
+
+// TODO(MR): Merge with the uploader
+class ModuleState {
+ /**
+ * This will be set to true if we get an error from the Node.js side of our
+ * desktop app telling us that native image thumbnail generation is not
+ * available for the current OS/arch combination.
+ *
+ * That way, we can stop pestering it again and again (saving an IPC
+ * round-trip).
+ *
+ * Note the double negative when it is used.
+ */
+ isNativeImageThumbnailGenerationNotAvailable = false;
+}
+
+const moduleState = new ModuleState();
+
+/**
+ * Augment the given {@link dataOrStream} with thumbnail information.
+ *
+ * This is a companion method for {@link readFileOrPath}, and can be used to
+ * convert the result of {@link readFileOrPath} into an {@link ThumbnailedFile}.
+ *
+ * Note: The `fileStream` in the returned ThumbnailedFile may be different from
+ * the one passed to the function.
+ */
+const withThumbnail = async (
+ fileOrPath: File | string,
+ fileTypeInfo: FileTypeInfo,
+ fileStream: FileStream,
+): Promise => {
+ let fileData: Uint8Array | undefined;
+ let thumbnail: Uint8Array | undefined;
+ let hasStaticThumbnail = false;
+
+ const electron = globalThis.electron;
+ const notAvailable =
+ fileTypeInfo.fileType == FILE_TYPE.IMAGE &&
+ moduleState.isNativeImageThumbnailGenerationNotAvailable;
+
+ // 1. Native thumbnail generation using file's path.
+ if (electron && !notAvailable) {
+ try {
+ // When running in the context of our desktop app, File paths will
+ // be absolute. See: [Note: File paths when running under Electron].
+ thumbnail = await generateThumbnailNative(
+ electron,
+ fileOrPath instanceof File ? fileOrPath["path"] : fileOrPath,
+ fileTypeInfo,
+ );
+ } catch (e) {
+ if (e.message == CustomErrorMessage.NotAvailable) {
+ moduleState.isNativeImageThumbnailGenerationNotAvailable = true;
+ } else {
+ log.error("Native thumbnail generation failed", e);
+ }
+ }
+ }
+
+ if (!thumbnail) {
+ let blob: Blob | undefined;
+ if (fileOrPath instanceof File) {
+ // 2. Browser based thumbnail generation for File (blobs).
+ blob = fileOrPath;
+ } else {
+ // 3. Browser based thumbnail generation for paths.
+ //
+ // There are two reasons why we could get here:
+ //
+ // - We're running under Electron, but thumbnail generation is not
+ // available. This is currently only a specific scenario for image
+ // files on Windows.
+ //
+ // - We're running under the Electron, but the thumbnail generation
+ // otherwise failed for some exception.
+ //
+ // The fallback in this case involves reading the entire stream into
+ // memory, and passing that data across the IPC boundary in a single
+ // go (i.e. not in a streaming manner). This is risky for videos of
+ // unbounded sizes, plus that isn't the expected scenario. So
+ // instead of trying to cater for arbitrary exceptions, we only run
+ // this fallback to cover for the case where thumbnail generation
+ // was not available for an image file on Windows. If/when we add
+ // support of native thumbnailing on Windows too, this entire branch
+ // can be removed.
+
+ if (fileTypeInfo.fileType == FILE_TYPE.IMAGE) {
+ const data = await readEntireStream(fileStream.stream);
+ blob = new Blob([data]);
+
+ // The Readable stream cannot be read twice, so use the data
+ // directly for subsequent steps.
+ fileData = data;
+ } else {
+ log.warn(
+ `Not using browser based thumbnail generation fallback for video at path ${fileOrPath}`,
+ );
+ }
+ }
+
+ try {
+ if (blob)
+ thumbnail = await generateThumbnailWeb(blob, fileTypeInfo);
+ } catch (e) {
+ log.error("Web thumbnail creation failed", e);
+ }
+ }
+
+ if (!thumbnail) {
+ thumbnail = fallbackThumbnail();
+ hasStaticThumbnail = true;
+ }
+
+ return {
+ fileStreamOrData: fileData ?? fileStream,
+ thumbnail,
+ hasStaticThumbnail,
+ };
+};
+
+const constructPublicMagicMetadata = async (
publicMagicMetadataProps: FilePublicMagicMetadataProps,
-): Promise {
+): Promise => {
const nonEmptyPublicMagicMetadataProps = getNonEmptyMagicMetadataProps(
publicMagicMetadataProps,
);
@@ -355,147 +1098,67 @@ export async function constructPublicMagicMetadata(
return null;
}
return await updateMagicMetadata(publicMagicMetadataProps);
-}
+};
-function getFileSize(file: File | ElectronFile) {
- return file.size;
-}
-
-function getFilename(file: File | ElectronFile) {
- return file.name;
-}
-
-async function readFile(
- fileTypeInfo: FileTypeInfo,
- rawFile: File | ElectronFile,
-): Promise {
- const { thumbnail, hasStaticThumbnail } = await generateThumbnail(
- rawFile,
- fileTypeInfo,
- );
- log.info(`reading file data ${getFileNameSize(rawFile)} `);
- let filedata: Uint8Array | DataStream;
- if (!(rawFile instanceof File)) {
- if (rawFile.size > MULTIPART_PART_SIZE) {
- filedata = await getElectronFileStream(
- rawFile,
- FILE_READER_CHUNK_SIZE,
- );
- } else {
- filedata = await getUint8ArrayView(rawFile);
- }
- } else if (rawFile.size > MULTIPART_PART_SIZE) {
- filedata = getFileStream(rawFile, FILE_READER_CHUNK_SIZE);
- } else {
- filedata = await getUint8ArrayView(rawFile);
- }
-
- log.info(`read file data successfully ${getFileNameSize(rawFile)} `);
-
- return {
- filedata,
- thumbnail,
- hasStaticThumbnail,
- };
-}
-
-export async function extractFileMetadata(
- worker: Remote,
- parsedMetadataJSONMap: ParsedMetadataJSONMap,
- collectionID: number,
- fileTypeInfo: FileTypeInfo,
- rawFile: File | ElectronFile,
-): Promise {
- let key = getMetadataJSONMapKeyForFile(collectionID, rawFile.name);
- let googleMetadata: ParsedMetadataJSON = parsedMetadataJSONMap.get(key);
-
- if (!googleMetadata && key.length > MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT) {
- key = getClippedMetadataJSONMapKeyForFile(collectionID, rawFile.name);
- googleMetadata = parsedMetadataJSONMap.get(key);
- }
-
- const { metadata, publicMagicMetadata } = await extractMetadata(
- worker,
- rawFile,
- fileTypeInfo,
- );
-
- for (const [key, value] of Object.entries(googleMetadata ?? {})) {
- if (!value) {
- continue;
- }
- metadata[key] = value;
- }
- return { metadata, publicMagicMetadata };
-}
-
-async function encryptFile(
- worker: Remote,
+const encryptFile = async (
file: FileWithMetadata,
encryptionKey: string,
-): Promise {
- try {
- const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
- worker,
- file.filedata,
- );
+ worker: Remote,
+): Promise => {
+ const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
+ file.fileStreamOrData,
+ worker,
+ );
- const { file: encryptedThumbnail } = await worker.encryptThumbnail(
- file.thumbnail,
- fileKey,
- );
- const { file: encryptedMetadata } = await worker.encryptMetadata(
- file.metadata,
- fileKey,
- );
+ const { file: encryptedThumbnail } = await worker.encryptThumbnail(
+ file.thumbnail,
+ fileKey,
+ );
- let encryptedPubMagicMetadata: EncryptedMagicMetadata;
- if (file.pubMagicMetadata) {
- const { file: encryptedPubMagicMetadataData } =
- await worker.encryptMetadata(
- file.pubMagicMetadata.data,
- fileKey,
- );
- encryptedPubMagicMetadata = {
- version: file.pubMagicMetadata.version,
- count: file.pubMagicMetadata.count,
- data: encryptedPubMagicMetadataData.encryptedData,
- header: encryptedPubMagicMetadataData.decryptionHeader,
- };
- }
+ const { file: encryptedMetadata } = await worker.encryptMetadata(
+ file.metadata,
+ fileKey,
+ );
- const encryptedKey = await worker.encryptToB64(fileKey, encryptionKey);
-
- const result: EncryptedFile = {
- file: {
- file: encryptedFiledata,
- thumbnail: encryptedThumbnail,
- metadata: encryptedMetadata,
- pubMagicMetadata: encryptedPubMagicMetadata,
- localID: file.localID,
- },
- fileKey: encryptedKey,
+ let encryptedPubMagicMetadata: EncryptedMagicMetadata;
+ if (file.pubMagicMetadata) {
+ const { file: encryptedPubMagicMetadataData } =
+ await worker.encryptMetadata(file.pubMagicMetadata.data, fileKey);
+ encryptedPubMagicMetadata = {
+ version: file.pubMagicMetadata.version,
+ count: file.pubMagicMetadata.count,
+ data: encryptedPubMagicMetadataData.encryptedData,
+ header: encryptedPubMagicMetadataData.decryptionHeader,
};
- return result;
- } catch (e) {
- log.error("Error encrypting files", e);
- throw e;
}
-}
-async function encryptFiledata(
- worker: Remote,
- filedata: Uint8Array | DataStream,
-): Promise> {
- return isDataStream(filedata)
- ? await encryptFileStream(worker, filedata)
- : await worker.encryptFile(filedata);
-}
+ const encryptedKey = await worker.encryptToB64(fileKey, encryptionKey);
-async function encryptFileStream(
+ const result: EncryptedFile = {
+ file: {
+ file: encryptedFiledata,
+ thumbnail: encryptedThumbnail,
+ metadata: encryptedMetadata,
+ pubMagicMetadata: encryptedPubMagicMetadata,
+ localID: file.localID,
+ },
+ fileKey: encryptedKey,
+ };
+ return result;
+};
+
+const encryptFiledata = async (
+ fileStreamOrData: FileStream | Uint8Array,
worker: Remote,
- fileData: DataStream,
-) {
+): Promise> =>
+ fileStreamOrData instanceof Uint8Array
+ ? await worker.encryptFile(fileStreamOrData)
+ : await encryptFileStream(fileStreamOrData, worker);
+
+const encryptFileStream = async (
+ fileData: FileStream,
+ worker: Remote,
+) => {
const { stream, chunkCount } = fileData;
const fileStreamReader = stream.getReader();
const { key, decryptionHeader, pushState } =
@@ -523,184 +1186,171 @@ async function encryptFileStream(
encryptedData: { stream: encryptedFileStream, chunkCount },
},
};
-}
+};
-interface UploadResponse {
- fileUploadResult: UPLOAD_RESULT;
- uploadedFile?: EnteFile;
-}
-
-export async function uploader(
- worker: Remote,
- existingFiles: EnteFile[],
- fileWithCollection: FileWithCollection,
- uploaderName: string,
-): Promise {
- const { collection, localID, ...uploadAsset } = fileWithCollection;
- const fileNameSize = `${uploadService.getAssetName(
- fileWithCollection,
- )}_${convertBytesToHumanReadable(uploadService.getAssetSize(uploadAsset))}`;
-
- log.info(`uploader called for ${fileNameSize}`);
- UIService.setFileProgress(localID, 0);
- await sleep(0);
- let fileTypeInfo: FileTypeInfo;
- let fileSize: number;
+const uploadToBucket = async (
+ file: ProcessedFile,
+ makeProgessTracker: MakeProgressTracker,
+ isCFUploadProxyDisabled: boolean,
+ abortIfCancelled: () => void,
+): Promise => {
try {
- fileSize = uploadService.getAssetSize(uploadAsset);
- if (fileSize >= MAX_FILE_SIZE_SUPPORTED) {
- return { fileUploadResult: UPLOAD_RESULT.TOO_LARGE };
- }
- log.info(`getting filetype for ${fileNameSize}`);
- fileTypeInfo = await uploadService.getAssetFileType(uploadAsset);
- log.info(
- `got filetype for ${fileNameSize} - ${JSON.stringify(fileTypeInfo)}`,
- );
+ let fileObjectKey: string = null;
- log.info(`extracting metadata ${fileNameSize}`);
- const { metadata, publicMagicMetadata } =
- await uploadService.extractAssetMetadata(
- worker,
- uploadAsset,
- collection.id,
- fileTypeInfo,
+ const encryptedData = file.file.encryptedData;
+ if (
+ !(encryptedData instanceof Uint8Array) &&
+ encryptedData.chunkCount >= multipartChunksPerPart
+ ) {
+ // We have a stream, and it is more than multipartChunksPerPart
+ // chunks long, so use a multipart upload to upload it.
+ fileObjectKey = await uploadStreamUsingMultipart(
+ file.localID,
+ encryptedData,
+ makeProgessTracker,
+ isCFUploadProxyDisabled,
+ abortIfCancelled,
);
+ } else {
+ const data =
+ encryptedData instanceof Uint8Array
+ ? encryptedData
+ : await readEntireStream(encryptedData.stream);
- const matchingExistingFiles = findMatchingExistingFiles(
- existingFiles,
- metadata,
- );
- log.debug(
- () =>
- `matchedFileList: ${matchingExistingFiles
- .map((f) => `${f.id}-${f.metadata.title}`)
- .join(",")}`,
- );
- if (matchingExistingFiles?.length) {
- const matchingExistingFilesCollectionIDs =
- matchingExistingFiles.map((e) => e.collectionID);
- log.debug(
- () =>
- `matched file collectionIDs:${matchingExistingFilesCollectionIDs}
- and collectionID:${collection.id}`,
- );
- if (matchingExistingFilesCollectionIDs.includes(collection.id)) {
- log.info(
- `file already present in the collection , skipped upload for ${fileNameSize}`,
+ const progressTracker = makeProgessTracker(file.localID);
+ const fileUploadURL = await uploadService.getUploadURL();
+ if (!isCFUploadProxyDisabled) {
+ fileObjectKey = await UploadHttpClient.putFileV2(
+ fileUploadURL,
+ data,
+ progressTracker,
);
- const sameCollectionMatchingExistingFile =
- matchingExistingFiles.find(
- (f) => f.collectionID === collection.id,
- );
- return {
- fileUploadResult: UPLOAD_RESULT.ALREADY_UPLOADED,
- uploadedFile: sameCollectionMatchingExistingFile,
- };
} else {
- log.info(
- `same file in ${matchingExistingFilesCollectionIDs.length} collection found for ${fileNameSize} ,adding symlink`,
+ fileObjectKey = await UploadHttpClient.putFile(
+ fileUploadURL,
+ data,
+ progressTracker,
);
- // any of the matching file can used to add a symlink
- const resultFile = Object.assign({}, matchingExistingFiles[0]);
- resultFile.collectionID = collection.id;
- await addToCollection(collection, [resultFile]);
- return {
- fileUploadResult: UPLOAD_RESULT.ADDED_SYMLINK,
- uploadedFile: resultFile,
- };
}
}
- if (uploadCancelService.isUploadCancelationRequested()) {
- throw Error(CustomError.UPLOAD_CANCELLED);
- }
- log.info(`reading asset ${fileNameSize}`);
-
- const file = await uploadService.readAsset(fileTypeInfo, uploadAsset);
-
- if (file.hasStaticThumbnail) {
- metadata.hasStaticThumbnail = true;
- }
-
- const pubMagicMetadata =
- await uploadService.constructPublicMagicMetadata({
- ...publicMagicMetadata,
- uploaderName,
- });
-
- const fileWithMetadata: FileWithMetadata = {
- localID,
- filedata: file.filedata,
- thumbnail: file.thumbnail,
- metadata,
- pubMagicMetadata,
- };
-
- if (uploadCancelService.isUploadCancelationRequested()) {
- throw Error(CustomError.UPLOAD_CANCELLED);
- }
- log.info(`encryptAsset ${fileNameSize}`);
- const encryptedFile = await uploadService.encryptAsset(
- worker,
- fileWithMetadata,
- collection.key,
- );
-
- if (uploadCancelService.isUploadCancelationRequested()) {
- throw Error(CustomError.UPLOAD_CANCELLED);
- }
- log.info(`uploadToBucket ${fileNameSize}`);
- const logger: Logger = (message: string) => {
- log.info(message, `fileNameSize: ${fileNameSize}`);
- };
- const backupedFile: BackupedFile = await uploadService.uploadToBucket(
- logger,
- encryptedFile.file,
- );
-
- const uploadFile: UploadFile = uploadService.getUploadFile(
- collection,
- backupedFile,
- encryptedFile.fileKey,
- );
- log.info(`uploading file to server ${fileNameSize}`);
-
- const uploadedFile = await uploadService.uploadFile(uploadFile);
-
- log.info(`${fileNameSize} successfully uploaded`);
-
- return {
- fileUploadResult: metadata.hasStaticThumbnail
- ? UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL
- : UPLOAD_RESULT.UPLOADED,
- uploadedFile: uploadedFile,
- };
- } catch (e) {
- log.info(`upload failed for ${fileNameSize} ,error: ${e.message}`);
- if (
- e.message !== CustomError.UPLOAD_CANCELLED &&
- e.message !== CustomError.UNSUPPORTED_FILE_FORMAT
- ) {
- log.error(
- `file upload failed - ${JSON.stringify({
- fileFormat: fileTypeInfo?.exactType,
- fileSize: convertBytesToHumanReadable(fileSize),
- })}`,
- e,
+ const thumbnailUploadURL = await uploadService.getUploadURL();
+ let thumbnailObjectKey: string = null;
+ if (!isCFUploadProxyDisabled) {
+ thumbnailObjectKey = await UploadHttpClient.putFileV2(
+ thumbnailUploadURL,
+ file.thumbnail.encryptedData,
+ null,
+ );
+ } else {
+ thumbnailObjectKey = await UploadHttpClient.putFile(
+ thumbnailUploadURL,
+ file.thumbnail.encryptedData,
+ null,
);
}
- const error = handleUploadError(e);
- switch (error.message) {
- case CustomError.ETAG_MISSING:
- return { fileUploadResult: UPLOAD_RESULT.BLOCKED };
- case CustomError.UNSUPPORTED_FILE_FORMAT:
- return { fileUploadResult: UPLOAD_RESULT.UNSUPPORTED };
- case CustomError.FILE_TOO_LARGE:
- return {
- fileUploadResult:
- UPLOAD_RESULT.LARGER_THAN_AVAILABLE_STORAGE,
- };
- default:
- return { fileUploadResult: UPLOAD_RESULT.FAILED };
+
+ const backupedFile: BackupedFile = {
+ file: {
+ decryptionHeader: file.file.decryptionHeader,
+ objectKey: fileObjectKey,
+ },
+ thumbnail: {
+ decryptionHeader: file.thumbnail.decryptionHeader,
+ objectKey: thumbnailObjectKey,
+ },
+ metadata: file.metadata,
+ pubMagicMetadata: file.pubMagicMetadata,
+ };
+ return backupedFile;
+ } catch (e) {
+ if (e.message !== CustomError.UPLOAD_CANCELLED) {
+ log.error("Error when uploading to bucket", e);
+ }
+ throw e;
+ }
+};
+
+interface PartEtag {
+ PartNumber: number;
+ ETag: string;
+}
+
+async function uploadStreamUsingMultipart(
+ fileLocalID: number,
+ dataStream: EncryptedFileStream,
+ makeProgessTracker: MakeProgressTracker,
+ isCFUploadProxyDisabled: boolean,
+ abortIfCancelled: () => void,
+) {
+ const uploadPartCount = Math.ceil(
+ dataStream.chunkCount / multipartChunksPerPart,
+ );
+ const multipartUploadURLs =
+ await uploadService.fetchMultipartUploadURLs(uploadPartCount);
+
+ const { stream } = dataStream;
+
+ const streamReader = stream.getReader();
+ const percentPerPart =
+ RANDOM_PERCENTAGE_PROGRESS_FOR_PUT() / uploadPartCount;
+ const partEtags: PartEtag[] = [];
+ for (const [
+ index,
+ fileUploadURL,
+ ] of multipartUploadURLs.partURLs.entries()) {
+ abortIfCancelled();
+
+ const uploadChunk = await combineChunksToFormUploadPart(streamReader);
+ const progressTracker = makeProgessTracker(
+ fileLocalID,
+ percentPerPart,
+ index,
+ );
+ let eTag = null;
+ if (!isCFUploadProxyDisabled) {
+ eTag = await UploadHttpClient.putFilePartV2(
+ fileUploadURL,
+ uploadChunk,
+ progressTracker,
+ );
+ } else {
+ eTag = await UploadHttpClient.putFilePart(
+ fileUploadURL,
+ uploadChunk,
+ progressTracker,
+ );
+ }
+ partEtags.push({ PartNumber: index + 1, ETag: eTag });
+ }
+ const { done } = await streamReader.read();
+ if (!done) throw new Error("More chunks than expected");
+
+ const completeURL = multipartUploadURLs.completeURL;
+ const cBody = convert.js2xml(
+ { CompleteMultipartUpload: { Part: partEtags } },
+ { compact: true, ignoreComment: true, spaces: 4 },
+ );
+ if (!isCFUploadProxyDisabled) {
+ await UploadHttpClient.completeMultipartUploadV2(completeURL, cBody);
+ } else {
+ await UploadHttpClient.completeMultipartUpload(completeURL, cBody);
+ }
+
+ return multipartUploadURLs.objectKey;
+}
+
+async function combineChunksToFormUploadPart(
+ streamReader: ReadableStreamDefaultReader,
+) {
+ const combinedChunks = [];
+ for (let i = 0; i < multipartChunksPerPart; i++) {
+ const { done, value: chunk } = await streamReader.read();
+ if (done) {
+ break;
+ }
+ for (let index = 0; index < chunk.length; index++) {
+ combinedChunks.push(chunk[index]);
}
}
+ return Uint8Array.from(combinedChunks);
}
diff --git a/web/apps/photos/src/services/wasm/ffmpeg.ts b/web/apps/photos/src/services/wasm/ffmpeg.ts
deleted file mode 100644
index 10c5a5c05..000000000
--- a/web/apps/photos/src/services/wasm/ffmpeg.ts
+++ /dev/null
@@ -1,115 +0,0 @@
-import log from "@/next/log";
-import { promiseWithTimeout } from "@ente/shared/utils";
-import QueueProcessor from "@ente/shared/utils/queueProcessor";
-import { generateTempName } from "@ente/shared/utils/temp";
-import { createFFmpeg, FFmpeg } from "ffmpeg-wasm";
-import { getUint8ArrayView } from "services/readerService";
-
-const INPUT_PATH_PLACEHOLDER = "INPUT";
-const FFMPEG_PLACEHOLDER = "FFMPEG";
-const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
-
-const FFMPEG_EXECUTION_WAIT_TIME = 30 * 1000;
-
-export class WasmFFmpeg {
- private ffmpeg: FFmpeg;
- private ready: Promise = null;
- private ffmpegTaskQueue = new QueueProcessor();
-
- constructor() {
- this.ffmpeg = createFFmpeg({
- corePath: "/js/ffmpeg/ffmpeg-core.js",
- mt: false,
- });
-
- this.ready = this.init();
- }
-
- private async init() {
- if (!this.ffmpeg.isLoaded()) {
- await this.ffmpeg.load();
- }
- }
-
- async run(
- cmd: string[],
- inputFile: File,
- outputFileName: string,
- dontTimeout = false,
- ) {
- const response = this.ffmpegTaskQueue.queueUpRequest(() => {
- if (dontTimeout) {
- return this.execute(cmd, inputFile, outputFileName);
- } else {
- return promiseWithTimeout(
- this.execute(cmd, inputFile, outputFileName),
- FFMPEG_EXECUTION_WAIT_TIME,
- );
- }
- });
- try {
- return await response.promise;
- } catch (e) {
- log.error("ffmpeg run failed", e);
- throw e;
- }
- }
-
- private async execute(
- cmd: string[],
- inputFile: File,
- outputFileName: string,
- ) {
- let tempInputFilePath: string;
- let tempOutputFilePath: string;
- try {
- await this.ready;
- const extension = getFileExtension(inputFile.name);
- const tempNameSuffix = extension ? `input.${extension}` : "input";
- tempInputFilePath = `${generateTempName(10, tempNameSuffix)}`;
- this.ffmpeg.FS(
- "writeFile",
- tempInputFilePath,
- await getUint8ArrayView(inputFile),
- );
- tempOutputFilePath = `${generateTempName(10, outputFileName)}`;
-
- cmd = cmd.map((cmdPart) => {
- if (cmdPart === FFMPEG_PLACEHOLDER) {
- return "";
- } else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
- return tempInputFilePath;
- } else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
- return tempOutputFilePath;
- } else {
- return cmdPart;
- }
- });
- log.info(`${cmd}`);
- await this.ffmpeg.run(...cmd);
- return new File(
- [this.ffmpeg.FS("readFile", tempOutputFilePath)],
- outputFileName,
- );
- } finally {
- try {
- this.ffmpeg.FS("unlink", tempInputFilePath);
- } catch (e) {
- log.error("unlink input file failed", e);
- }
- try {
- this.ffmpeg.FS("unlink", tempOutputFilePath);
- } catch (e) {
- log.error("unlink output file failed", e);
- }
- }
- }
-}
-
-function getFileExtension(filename: string) {
- const lastDotPosition = filename.lastIndexOf(".");
- if (lastDotPosition === -1) return null;
- else {
- return filename.slice(lastDotPosition + 1);
- }
-}
diff --git a/web/apps/photos/src/services/watch.ts b/web/apps/photos/src/services/watch.ts
index 77467a497..4de5881aa 100644
--- a/web/apps/photos/src/services/watch.ts
+++ b/web/apps/photos/src/services/watch.ts
@@ -11,14 +11,15 @@ import type {
FolderWatch,
FolderWatchSyncedFile,
} from "@/next/types/ipc";
+import { ensureString } from "@/utils/ensure";
import { UPLOAD_RESULT } from "constants/upload";
import debounce from "debounce";
-import uploadManager from "services/upload/uploadManager";
+import uploadManager, {
+ type FileWithCollection,
+} from "services/upload/uploadManager";
import { Collection } from "types/collection";
import { EncryptedEnteFile } from "types/file";
-import { ElectronFile, FileWithCollection } from "types/upload";
import { groupFilesBasedOnCollectionID } from "utils/file";
-import { isHiddenFile } from "utils/upload";
import { removeFromCollection } from "./collectionService";
import { getLocalFiles } from "./fileService";
@@ -44,7 +45,16 @@ class FolderWatcher {
private uploadRunning = false;
/** `true` if we are temporarily paused to let a user upload go through. */
private isPaused = false;
- private filePathToUploadedFileIDMap = new Map();
+ /**
+ * A map from file paths to an Ente file for files that were uploaded (or
+ * symlinked) as part of the most recent upload attempt.
+ */
+ private uploadedFileForPath = new Map();
+ /**
+ * A set of file paths that could not be uploaded in the most recent upload
+ * attempt. These are the uploads that failed due to a permanent error that
+ * a retry will not fix.
+ */
private unUploadableFilePaths = new Set();
/**
@@ -315,6 +325,8 @@ class FolderWatcher {
fileWithCollection: FileWithCollection,
file: EncryptedEnteFile,
) {
+ // The files we get here will have fileWithCollection.file as a string,
+ // not as a File or a ElectronFile
if (
[
UPLOAD_RESULT.ADDED_SYMLINK,
@@ -324,19 +336,17 @@ class FolderWatcher {
].includes(fileUploadResult)
) {
if (fileWithCollection.isLivePhoto) {
- this.filePathToUploadedFileIDMap.set(
- (fileWithCollection.livePhotoAssets.image as ElectronFile)
- .path,
+ this.uploadedFileForPath.set(
+ ensureString(fileWithCollection.livePhotoAssets.image),
file,
);
- this.filePathToUploadedFileIDMap.set(
- (fileWithCollection.livePhotoAssets.video as ElectronFile)
- .path,
+ this.uploadedFileForPath.set(
+ ensureString(fileWithCollection.livePhotoAssets.video),
file,
);
} else {
- this.filePathToUploadedFileIDMap.set(
- (fileWithCollection.file as ElectronFile).path,
+ this.uploadedFileForPath.set(
+ ensureString(fileWithCollection.fileOrPath),
file,
);
}
@@ -347,16 +357,14 @@ class FolderWatcher {
) {
if (fileWithCollection.isLivePhoto) {
this.unUploadableFilePaths.add(
- (fileWithCollection.livePhotoAssets.image as ElectronFile)
- .path,
+ ensureString(fileWithCollection.livePhotoAssets.image),
);
this.unUploadableFilePaths.add(
- (fileWithCollection.livePhotoAssets.video as ElectronFile)
- .path,
+ ensureString(fileWithCollection.livePhotoAssets.video),
);
} else {
this.unUploadableFilePaths.add(
- (fileWithCollection.file as ElectronFile).path,
+ ensureString(fileWithCollection.fileOrPath),
);
}
}
@@ -383,15 +391,7 @@ class FolderWatcher {
);
const { syncedFiles, ignoredFiles } =
- this.parseAllFileUploadsDone(filesWithCollection);
-
- log.debug(() =>
- JSON.stringify({
- f: "watch/allFileUploadsDone",
- syncedFiles,
- ignoredFiles,
- }),
- );
+ this.deduceSyncedAndIgnored(filesWithCollection);
if (syncedFiles.length > 0)
await electron.watch.updateSyncedFiles(
@@ -411,81 +411,55 @@ class FolderWatcher {
this.debouncedRunNextEvent();
}
- private parseAllFileUploadsDone(filesWithCollection: FileWithCollection[]) {
+ private deduceSyncedAndIgnored(filesWithCollection: FileWithCollection[]) {
const syncedFiles: FolderWatch["syncedFiles"] = [];
const ignoredFiles: FolderWatch["ignoredFiles"] = [];
+ const markSynced = (file: EncryptedEnteFile, path: string) => {
+ syncedFiles.push({
+ path,
+ uploadedFileID: file.id,
+ collectionID: file.collectionID,
+ });
+ this.uploadedFileForPath.delete(path);
+ };
+
+ const markIgnored = (path: string) => {
+ log.debug(() => `Permanently ignoring file at ${path}`);
+ ignoredFiles.push(path);
+ this.unUploadableFilePaths.delete(path);
+ };
+
for (const fileWithCollection of filesWithCollection) {
if (fileWithCollection.isLivePhoto) {
- const imagePath = (
- fileWithCollection.livePhotoAssets.image as ElectronFile
- ).path;
- const videoPath = (
- fileWithCollection.livePhotoAssets.video as ElectronFile
- ).path;
+ const imagePath = ensureString(
+ fileWithCollection.livePhotoAssets.image,
+ );
+ const videoPath = ensureString(
+ fileWithCollection.livePhotoAssets.video,
+ );
- if (
- this.filePathToUploadedFileIDMap.has(imagePath) &&
- this.filePathToUploadedFileIDMap.has(videoPath)
- ) {
- const imageFile = {
- path: imagePath,
- uploadedFileID:
- this.filePathToUploadedFileIDMap.get(imagePath).id,
- collectionID:
- this.filePathToUploadedFileIDMap.get(imagePath)
- .collectionID,
- };
- const videoFile = {
- path: videoPath,
- uploadedFileID:
- this.filePathToUploadedFileIDMap.get(videoPath).id,
- collectionID:
- this.filePathToUploadedFileIDMap.get(videoPath)
- .collectionID,
- };
- syncedFiles.push(imageFile);
- syncedFiles.push(videoFile);
- log.debug(
- () =>
- `added image ${JSON.stringify(
- imageFile,
- )} and video file ${JSON.stringify(
- videoFile,
- )} to uploadedFiles`,
- );
+ const imageFile = this.uploadedFileForPath.get(imagePath);
+ const videoFile = this.uploadedFileForPath.get(videoPath);
+
+ if (imageFile && videoFile) {
+ markSynced(imageFile, imagePath);
+ markSynced(videoFile, videoPath);
} else if (
this.unUploadableFilePaths.has(imagePath) &&
this.unUploadableFilePaths.has(videoPath)
) {
- ignoredFiles.push(imagePath);
- ignoredFiles.push(videoPath);
- log.debug(
- () =>
- `added image ${imagePath} and video file ${videoPath} to rejectedFiles`,
- );
+ markIgnored(imagePath);
+ markIgnored(videoPath);
}
- this.filePathToUploadedFileIDMap.delete(imagePath);
- this.filePathToUploadedFileIDMap.delete(videoPath);
} else {
- const filePath = (fileWithCollection.file as ElectronFile).path;
-
- if (this.filePathToUploadedFileIDMap.has(filePath)) {
- const file = {
- path: filePath,
- uploadedFileID:
- this.filePathToUploadedFileIDMap.get(filePath).id,
- collectionID:
- this.filePathToUploadedFileIDMap.get(filePath)
- .collectionID,
- };
- syncedFiles.push(file);
- log.debug(() => `added file ${JSON.stringify(file)}`);
- } else if (this.unUploadableFilePaths.has(filePath)) {
- ignoredFiles.push(filePath);
- log.debug(() => `added file ${filePath} to rejectedFiles`);
+ const path = ensureString(fileWithCollection.fileOrPath);
+ const file = this.uploadedFileForPath.get(path);
+ if (file) {
+ markSynced(file, path);
+ } else if (this.unUploadableFilePaths.has(path)) {
+ markIgnored(path);
}
- this.filePathToUploadedFileIDMap.delete(filePath);
}
}
@@ -621,6 +595,13 @@ const pathsToUpload = (paths: string[], watch: FolderWatch) =>
// Files that are on disk but not yet synced or ignored.
.filter((path) => !isSyncedOrIgnoredPath(path, watch));
+/**
+ * Return true if the file at the given {@link path} is hidden.
+ *
+ * Hidden files are those whose names begin with a "." (dot).
+ */
+const isHiddenFile = (path: string) => basename(path).startsWith(".");
+
/**
* Return the paths to previously synced files that are no longer on disk and so
* must be removed from the Ente collection.
@@ -636,7 +617,7 @@ const isSyncedOrIgnoredPath = (path: string, watch: FolderWatch) =>
const collectionNameForPath = (path: string, watch: FolderWatch) =>
watch.collectionMapping == "root"
- ? dirname(watch.folderPath)
+ ? basename(watch.folderPath)
: parentDirectoryName(path);
const parentDirectoryName = (path: string) => basename(dirname(path));
diff --git a/web/apps/photos/src/types/entity.ts b/web/apps/photos/src/types/entity.ts
index 9580bf333..60844ce46 100644
--- a/web/apps/photos/src/types/entity.ts
+++ b/web/apps/photos/src/types/entity.ts
@@ -1,4 +1,4 @@
-import { Location } from "types/upload";
+import { Location } from "types/metadata";
export enum EntityType {
LOCATION_TAG = "location",
diff --git a/web/apps/photos/src/types/file/index.ts b/web/apps/photos/src/types/file/index.ts
index 2991e1f8b..c3d4cca44 100644
--- a/web/apps/photos/src/types/file/index.ts
+++ b/web/apps/photos/src/types/file/index.ts
@@ -1,10 +1,10 @@
+import type { Metadata } from "@/media/types/file";
import { SourceURLs } from "services/download";
import {
EncryptedMagicMetadata,
MagicMetadataCore,
VISIBILITY_STATE,
} from "types/magicMetadata";
-import { Metadata } from "types/upload";
export interface MetadataFileAttributes {
encryptedData: string;
diff --git a/web/apps/photos/src/types/metadata.ts b/web/apps/photos/src/types/metadata.ts
new file mode 100644
index 000000000..7994e6247
--- /dev/null
+++ b/web/apps/photos/src/types/metadata.ts
@@ -0,0 +1,11 @@
+export interface Location {
+ latitude: number;
+ longitude: number;
+}
+
+export interface ParsedExtractedMetadata {
+ location: Location;
+ creationTime: number;
+ width: number;
+ height: number;
+}
diff --git a/web/apps/photos/src/types/search/index.ts b/web/apps/photos/src/types/search/index.ts
index 29a1cffef..cf50f4a06 100644
--- a/web/apps/photos/src/types/search/index.ts
+++ b/web/apps/photos/src/types/search/index.ts
@@ -1,4 +1,4 @@
-import { FILE_TYPE } from "constants/file";
+import { FILE_TYPE } from "@/media/file-type";
import { City } from "services/locationSearchService";
import { LocationTagData } from "types/entity";
import { EnteFile } from "types/file";
diff --git a/web/apps/photos/src/types/upload/index.ts b/web/apps/photos/src/types/upload/index.ts
deleted file mode 100644
index 35164ec47..000000000
--- a/web/apps/photos/src/types/upload/index.ts
+++ /dev/null
@@ -1,168 +0,0 @@
-import {
- B64EncryptionResult,
- LocalFileAttributes,
-} from "@ente/shared/crypto/types";
-import { FILE_TYPE } from "constants/file";
-import { Collection } from "types/collection";
-import {
- FilePublicMagicMetadata,
- FilePublicMagicMetadataProps,
- MetadataFileAttributes,
- S3FileAttributes,
-} from "types/file";
-import { EncryptedMagicMetadata } from "types/magicMetadata";
-
-export interface DataStream {
- stream: ReadableStream;
- chunkCount: number;
-}
-
-export function isDataStream(object: any): object is DataStream {
- return "stream" in object;
-}
-
-export type Logger = (message: string) => void;
-
-export interface Metadata {
- /**
- * The file name.
- *
- * See: [Note: File name for local EnteFile objects]
- */
- title: string;
- creationTime: number;
- modificationTime: number;
- latitude: number;
- longitude: number;
- fileType: FILE_TYPE;
- hasStaticThumbnail?: boolean;
- hash?: string;
- imageHash?: string;
- videoHash?: string;
- localID?: number;
- version?: number;
- deviceFolder?: string;
-}
-
-export interface Location {
- latitude: number;
- longitude: number;
-}
-
-export interface ParsedMetadataJSON {
- creationTime: number;
- modificationTime: number;
- latitude: number;
- longitude: number;
-}
-
-export interface MultipartUploadURLs {
- objectKey: string;
- partURLs: string[];
- completeURL: string;
-}
-
-export interface FileTypeInfo {
- fileType: FILE_TYPE;
- exactType: string;
- mimeType?: string;
- imageType?: string;
- videoType?: string;
-}
-
-/*
- * ElectronFile is a custom interface that is used to represent
- * any file on disk as a File-like object in the Electron desktop app.
- *
- * This was added to support the auto-resuming of failed uploads
- * which needed absolute paths to the files which the
- * normal File interface does not provide.
- */
-export interface ElectronFile {
- name: string;
- path: string;
- size: number;
- lastModified: number;
- stream: () => Promise>;
- blob: () => Promise;
- arrayBuffer: () => Promise;
-}
-
-export interface UploadAsset {
- isLivePhoto?: boolean;
- file?: File | ElectronFile;
- livePhotoAssets?: LivePhotoAssets;
-}
-
-export interface LivePhotoAssets {
- image: globalThis.File | ElectronFile;
- video: globalThis.File | ElectronFile;
-}
-
-export interface FileWithCollection extends UploadAsset {
- localID: number;
- collection?: Collection;
- collectionID?: number;
-}
-
-export type ParsedMetadataJSONMap = Map;
-
-export interface UploadURL {
- url: string;
- objectKey: string;
-}
-
-export interface FileInMemory {
- filedata: Uint8Array | DataStream;
- thumbnail: Uint8Array;
- hasStaticThumbnail: boolean;
-}
-
-export interface FileWithMetadata
- extends Omit {
- metadata: Metadata;
- localID: number;
- pubMagicMetadata: FilePublicMagicMetadata;
-}
-
-export interface EncryptedFile {
- file: ProcessedFile;
- fileKey: B64EncryptionResult;
-}
-export interface ProcessedFile {
- file: LocalFileAttributes;
- thumbnail: LocalFileAttributes;
- metadata: LocalFileAttributes;
- pubMagicMetadata: EncryptedMagicMetadata;
- localID: number;
-}
-export interface BackupedFile {
- file: S3FileAttributes;
- thumbnail: S3FileAttributes;
- metadata: MetadataFileAttributes;
- pubMagicMetadata: EncryptedMagicMetadata;
-}
-
-export interface UploadFile extends BackupedFile {
- collectionID: number;
- encryptedKey: string;
- keyDecryptionNonce: string;
-}
-
-export interface ParsedExtractedMetadata {
- location: Location;
- creationTime: number;
- width: number;
- height: number;
-}
-
-export interface PublicUploadProps {
- token: string;
- passwordToken: string;
- accessedThroughSharedURL: boolean;
-}
-
-export interface ExtractMetadataResult {
- metadata: Metadata;
- publicMagicMetadata: FilePublicMagicMetadataProps;
-}
diff --git a/web/apps/photos/src/types/upload/ui.ts b/web/apps/photos/src/types/upload/ui.ts
deleted file mode 100644
index bce381213..000000000
--- a/web/apps/photos/src/types/upload/ui.ts
+++ /dev/null
@@ -1,43 +0,0 @@
-import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
-
-export type FileID = number;
-export type FileName = string;
-
-export type PercentageUploaded = number;
-export type UploadFileNames = Map;
-
-export interface UploadCounter {
- finished: number;
- total: number;
-}
-
-export interface InProgressUpload {
- localFileID: FileID;
- progress: PercentageUploaded;
-}
-
-export interface FinishedUpload {
- localFileID: FileID;
- result: UPLOAD_RESULT;
-}
-
-export type InProgressUploads = Map;
-
-export type FinishedUploads = Map;
-
-export type SegregatedFinishedUploads = Map;
-
-export interface ProgressUpdater {
- setPercentComplete: React.Dispatch>;
- setUploadCounter: React.Dispatch>;
- setUploadStage: React.Dispatch>;
- setInProgressUploads: React.Dispatch<
- React.SetStateAction
- >;
- setFinishedUploads: React.Dispatch<
- React.SetStateAction
- >;
- setUploadFilenames: React.Dispatch>;
- setHasLivePhotos: React.Dispatch>;
- setUploadProgressView: React.Dispatch>;
-}
diff --git a/web/apps/photos/src/utils/comlink/ComlinkConvertWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkConvertWorker.ts
deleted file mode 100644
index 860317158..000000000
--- a/web/apps/photos/src/utils/comlink/ComlinkConvertWorker.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-import { haveWindow } from "@/next/env";
-import { ComlinkWorker } from "@/next/worker/comlink-worker";
-import { Remote } from "comlink";
-import { DedicatedConvertWorker } from "worker/convert.worker";
-
-class ComlinkConvertWorker {
- private comlinkWorkerInstance: Remote;
-
- async getInstance() {
- if (!this.comlinkWorkerInstance) {
- this.comlinkWorkerInstance =
- await getDedicatedConvertWorker().remote;
- }
- return this.comlinkWorkerInstance;
- }
-}
-
-export const getDedicatedConvertWorker = () => {
- if (haveWindow()) {
- const cryptoComlinkWorker = new ComlinkWorker<
- typeof DedicatedConvertWorker
- >(
- "ente-convert-worker",
- new Worker(new URL("worker/convert.worker.ts", import.meta.url)),
- );
- return cryptoComlinkWorker;
- }
-};
-
-export default new ComlinkConvertWorker();
diff --git a/web/apps/photos/src/utils/comlink/ComlinkFFmpegWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkFFmpegWorker.ts
deleted file mode 100644
index 29d19d6fa..000000000
--- a/web/apps/photos/src/utils/comlink/ComlinkFFmpegWorker.ts
+++ /dev/null
@@ -1,25 +0,0 @@
-import { ComlinkWorker } from "@/next/worker/comlink-worker";
-import { Remote } from "comlink";
-import { DedicatedFFmpegWorker } from "worker/ffmpeg.worker";
-
-class ComlinkFFmpegWorker {
- private comlinkWorkerInstance: Promise>;
-
- async getInstance() {
- if (!this.comlinkWorkerInstance) {
- const comlinkWorker = getDedicatedFFmpegWorker();
- this.comlinkWorkerInstance = comlinkWorker.remote;
- }
- return this.comlinkWorkerInstance;
- }
-}
-
-const getDedicatedFFmpegWorker = () => {
- const cryptoComlinkWorker = new ComlinkWorker(
- "ente-ffmpeg-worker",
- new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)),
- );
- return cryptoComlinkWorker;
-};
-
-export default new ComlinkFFmpegWorker();
diff --git a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts
index c1ed53f7b..f312a2c5c 100644
--- a/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts
+++ b/web/apps/photos/src/utils/comlink/ComlinkMLWorker.ts
@@ -1,6 +1,6 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
-import { DedicatedMLWorker } from "worker/ml.worker";
+import { type DedicatedMLWorker } from "worker/ml.worker";
export const getDedicatedMLWorker = (name: string) => {
if (haveWindow()) {
diff --git a/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts b/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts
index bc6506605..4886bacda 100644
--- a/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts
+++ b/web/apps/photos/src/utils/comlink/ComlinkSearchWorker.ts
@@ -1,7 +1,7 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
-import { DedicatedSearchWorker } from "worker/search.worker";
+import { type DedicatedSearchWorker } from "worker/search.worker";
class ComlinkSearchWorker {
private comlinkWorkerInstance: Remote;
diff --git a/web/apps/photos/src/utils/ffmpeg/index.ts b/web/apps/photos/src/utils/ffmpeg/index.ts
deleted file mode 100644
index 8a4332a7f..000000000
--- a/web/apps/photos/src/utils/ffmpeg/index.ts
+++ /dev/null
@@ -1,67 +0,0 @@
-import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
-import { NULL_LOCATION } from "constants/upload";
-import { ParsedExtractedMetadata } from "types/upload";
-
-enum MetadataTags {
- CREATION_TIME = "creation_time",
- APPLE_CONTENT_IDENTIFIER = "com.apple.quicktime.content.identifier",
- APPLE_LIVE_PHOTO_IDENTIFIER = "com.apple.quicktime.live-photo.auto",
- APPLE_CREATION_DATE = "com.apple.quicktime.creationdate",
- APPLE_LOCATION_ISO = "com.apple.quicktime.location.ISO6709",
- LOCATION = "location",
-}
-
-export function parseFFmpegExtractedMetadata(encodedMetadata: Uint8Array) {
- const metadataString = new TextDecoder().decode(encodedMetadata);
- const metadataPropertyArray = metadataString.split("\n");
- const metadataKeyValueArray = metadataPropertyArray.map((property) =>
- property.split("="),
- );
- const validKeyValuePairs = metadataKeyValueArray.filter(
- (keyValueArray) => keyValueArray.length === 2,
- ) as Array<[string, string]>;
-
- const metadataMap = Object.fromEntries(validKeyValuePairs);
-
- const location = parseAppleISOLocation(
- metadataMap[MetadataTags.APPLE_LOCATION_ISO] ??
- metadataMap[MetadataTags.LOCATION],
- );
-
- const creationTime = parseCreationTime(
- metadataMap[MetadataTags.APPLE_CREATION_DATE] ??
- metadataMap[MetadataTags.CREATION_TIME],
- );
- const parsedMetadata: ParsedExtractedMetadata = {
- creationTime,
- location: {
- latitude: location.latitude,
- longitude: location.longitude,
- },
- width: null,
- height: null,
- };
- return parsedMetadata;
-}
-
-function parseAppleISOLocation(isoLocation: string) {
- let location = NULL_LOCATION;
- if (isoLocation) {
- const [latitude, longitude] = isoLocation
- .match(/(\+|-)\d+\.*\d+/g)
- .map((x) => parseFloat(x));
-
- location = { latitude, longitude };
- }
- return location;
-}
-
-function parseCreationTime(creationTime: string) {
- let dateTime = null;
- if (creationTime) {
- dateTime = validateAndGetCreationUnixTimeInMicroSeconds(
- new Date(creationTime),
- );
- }
- return dateTime;
-}
diff --git a/web/apps/photos/src/utils/file/index.ts b/web/apps/photos/src/utils/file/index.ts
index cc3ddc5e1..5d7762abf 100644
--- a/web/apps/photos/src/utils/file/index.ts
+++ b/web/apps/photos/src/utils/file/index.ts
@@ -1,40 +1,26 @@
+import { FILE_TYPE } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
-import { convertBytesToHumanReadable } from "@/next/file";
+import { lowercaseExtension } from "@/next/file";
import log from "@/next/log";
-import type { Electron } from "@/next/types/ipc";
+import { CustomErrorMessage, type Electron } from "@/next/types/ipc";
import { workerBridge } from "@/next/worker/worker-bridge";
import ComlinkCryptoWorker from "@ente/shared/crypto";
-import { CustomError } from "@ente/shared/error";
-import { isPlaybackPossible } from "@ente/shared/media/video-playback";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
-import { downloadUsingAnchor } from "@ente/shared/utils";
-import {
- FILE_TYPE,
- RAW_FORMATS,
- SUPPORTED_RAW_FORMATS,
- TYPE_HEIC,
- TYPE_HEIF,
- TYPE_JPEG,
- TYPE_JPG,
-} from "constants/file";
+import { downloadUsingAnchor, withTimeout } from "@ente/shared/utils";
import { t } from "i18next";
import isElectron from "is-electron";
import { moveToHiddenCollection } from "services/collectionService";
-import DownloadManager, {
- LivePhotoSourceURL,
- SourceURLs,
-} from "services/download";
-import * as ffmpegService from "services/ffmpeg/ffmpegService";
+import { detectFileTypeInfo } from "services/detect-type";
+import DownloadManager from "services/download";
+import { updateFileCreationDateInEXIF } from "services/exif";
import {
deleteFromTrash,
trashFiles,
updateFileMagicMetadata,
updateFilePublicMagicMetadata,
} from "services/fileService";
-import heicConversionService from "services/heicConversionService";
-import { getFileType } from "services/typeDetectionService";
-import { updateFileCreationDateInEXIF } from "services/upload/exifService";
+import { heicToJPEG } from "services/heic-convert";
import {
EncryptedEnteFile,
EnteFile,
@@ -50,12 +36,36 @@ import {
SetFilesDownloadProgressAttributesCreator,
} from "types/gallery";
import { VISIBILITY_STATE } from "types/magicMetadata";
-import { FileTypeInfo } from "types/upload";
import { isArchivedFile, updateMagicMetadata } from "utils/magicMetadata";
import { safeFileName } from "utils/native-fs";
import { writeStream } from "utils/native-stream";
-const WAIT_TIME_IMAGE_CONVERSION = 30 * 1000;
+const RAW_FORMATS = [
+ "heic",
+ "rw2",
+ "tiff",
+ "arw",
+ "cr3",
+ "cr2",
+ "raf",
+ "nef",
+ "psd",
+ "dng",
+ "tif",
+];
+
+const SUPPORTED_RAW_FORMATS = [
+ "heic",
+ "rw2",
+ "tiff",
+ "arw",
+ "cr3",
+ "cr2",
+ "nef",
+ "psd",
+ "dng",
+ "tif",
+];
export enum FILE_OPS_TYPE {
DOWNLOAD,
@@ -67,16 +77,32 @@ export enum FILE_OPS_TYPE {
DELETE_PERMANENTLY,
}
+class ModuleState {
+ /**
+ * This will be set to true if we get an error from the Node.js side of our
+ * desktop app telling us that native JPEG conversion is not available for
+ * the current OS/arch combination.
+ *
+ * That way, we can stop pestering it again and again (saving an IPC
+ * round-trip).
+ *
+ * Note the double negative when it is used.
+ */
+ isNativeJPEGConversionNotAvailable = false;
+}
+
+const moduleState = new ModuleState();
+
export async function getUpdatedEXIFFileForDownload(
fileReader: FileReader,
file: EnteFile,
fileStream: ReadableStream,
): Promise> {
- const extension = getFileExtension(file.metadata.title);
+ const extension = lowercaseExtension(file.metadata.title);
if (
file.metadata.fileType === FILE_TYPE.IMAGE &&
file.pubMagicMetadata?.data.editedTime &&
- (extension === TYPE_JPEG || extension === TYPE_JPG)
+ (extension == "jpeg" || extension == "jpg")
) {
const fileBlob = await new Response(fileStream).blob();
const updatedFileBlob = await updateFileCreationDateInEXIF(
@@ -100,19 +126,19 @@ export async function downloadFile(file: EnteFile) {
const { imageFileName, imageData, videoFileName, videoData } =
await decodeLivePhoto(file.metadata.title, fileBlob);
const image = new File([imageData], imageFileName);
- const imageType = await getFileType(image);
+ const imageType = await detectFileTypeInfo(image);
const tempImageURL = URL.createObjectURL(
new Blob([imageData], { type: imageType.mimeType }),
);
const video = new File([videoData], videoFileName);
- const videoType = await getFileType(video);
+ const videoType = await detectFileTypeInfo(video);
const tempVideoURL = URL.createObjectURL(
new Blob([videoData], { type: videoType.mimeType }),
);
downloadUsingAnchor(tempImageURL, imageFileName);
downloadUsingAnchor(tempVideoURL, videoFileName);
} else {
- const fileType = await getFileType(
+ const fileType = await detectFileTypeInfo(
new File([fileBlob], file.metadata.title),
);
fileBlob = await new Response(
@@ -248,20 +274,6 @@ export async function decryptFile(
}
}
-export function splitFilenameAndExtension(filename: string): [string, string] {
- const lastDotPosition = filename.lastIndexOf(".");
- if (lastDotPosition === -1) return [filename, null];
- else
- return [
- filename.slice(0, lastDotPosition),
- filename.slice(lastDotPosition + 1),
- ];
-}
-
-export function getFileExtension(filename: string) {
- return splitFilenameAndExtension(filename)[1]?.toLocaleLowerCase();
-}
-
export function generateStreamFromArrayBuffer(data: Uint8Array) {
return new ReadableStream({
async start(controller: ReadableStreamDefaultController) {
@@ -271,242 +283,61 @@ export function generateStreamFromArrayBuffer(data: Uint8Array) {
});
}
-export async function getRenderableFileURL(
- file: EnteFile,
- fileBlob: Blob,
- originalFileURL: string,
- forceConvert: boolean,
-): Promise {
- let srcURLs: SourceURLs["url"];
- switch (file.metadata.fileType) {
- case FILE_TYPE.IMAGE: {
- const convertedBlob = await getRenderableImage(
- file.metadata.title,
- fileBlob,
- );
- const convertedURL = getFileObjectURL(
- originalFileURL,
- fileBlob,
- convertedBlob,
- );
- srcURLs = convertedURL;
- break;
- }
- case FILE_TYPE.LIVE_PHOTO: {
- srcURLs = await getRenderableLivePhotoURL(
- file,
- fileBlob,
- forceConvert,
- );
- break;
- }
- case FILE_TYPE.VIDEO: {
- const convertedBlob = await getPlayableVideo(
- file.metadata.title,
- fileBlob,
- forceConvert,
- );
- const convertedURL = getFileObjectURL(
- originalFileURL,
- fileBlob,
- convertedBlob,
- );
- srcURLs = convertedURL;
- break;
- }
- default: {
- srcURLs = originalFileURL;
- break;
- }
- }
-
- let isOriginal: boolean;
- if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
- isOriginal = false;
- } else {
- isOriginal = (srcURLs as string) === (originalFileURL as string);
- }
-
- return {
- url: srcURLs,
- isOriginal,
- isRenderable:
- file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs,
- type:
- file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
- ? "livePhoto"
- : "normal",
- };
-}
-
-async function getRenderableLivePhotoURL(
- file: EnteFile,
- fileBlob: Blob,
- forceConvert: boolean,
-): Promise {
- const livePhoto = await decodeLivePhoto(file.metadata.title, fileBlob);
-
- const getRenderableLivePhotoImageURL = async () => {
- try {
- const imageBlob = new Blob([livePhoto.imageData]);
- const convertedImageBlob = await getRenderableImage(
- livePhoto.imageFileName,
- imageBlob,
- );
-
- return URL.createObjectURL(convertedImageBlob);
- } catch (e) {
- //ignore and return null
- return null;
- }
- };
-
- const getRenderableLivePhotoVideoURL = async () => {
- try {
- const videoBlob = new Blob([livePhoto.videoData]);
- const convertedVideoBlob = await getPlayableVideo(
- livePhoto.videoFileName,
- videoBlob,
- forceConvert,
- true,
- );
- return URL.createObjectURL(convertedVideoBlob);
- } catch (e) {
- //ignore and return null
- return null;
- }
- };
-
- return {
- image: getRenderableLivePhotoImageURL,
- video: getRenderableLivePhotoVideoURL,
- };
-}
-
-export async function getPlayableVideo(
- videoNameTitle: string,
- videoBlob: Blob,
- forceConvert = false,
- runOnWeb = false,
-) {
- try {
- const isPlayable = await isPlaybackPossible(
- URL.createObjectURL(videoBlob),
- );
- if (isPlayable && !forceConvert) {
- return videoBlob;
- } else {
- if (!forceConvert && !runOnWeb && !isElectron()) {
- return null;
- }
- log.info(
- `video format not supported, converting it name: ${videoNameTitle}`,
- );
- const mp4ConvertedVideo = await ffmpegService.convertToMP4(
- new File([videoBlob], videoNameTitle),
- );
- log.info(`video successfully converted ${videoNameTitle}`);
- return new Blob([await mp4ConvertedVideo.arrayBuffer()]);
- }
- } catch (e) {
- log.error("video conversion failed", e);
- return null;
- }
-}
-
-export async function getRenderableImage(fileName: string, imageBlob: Blob) {
- let fileTypeInfo: FileTypeInfo;
+export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
try {
const tempFile = new File([imageBlob], fileName);
- fileTypeInfo = await getFileType(tempFile);
- log.debug(() => `file type info: ${JSON.stringify(fileTypeInfo)}`);
- const { exactType } = fileTypeInfo;
- let convertedImageBlob: Blob;
- if (isRawFile(exactType)) {
- try {
- if (!isSupportedRawFormat(exactType)) {
- throw Error(CustomError.UNSUPPORTED_RAW_FORMAT);
- }
+ const fileTypeInfo = await detectFileTypeInfo(tempFile);
+ log.debug(
+ () => `Need renderable image for ${JSON.stringify(fileTypeInfo)}`,
+ );
+ const { extension } = fileTypeInfo;
- if (!isElectron()) {
- throw new Error("not available on web");
- }
- log.info(
- `RawConverter called for ${fileName}-${convertBytesToHumanReadable(
- imageBlob.size,
- )}`,
- );
- convertedImageBlob = await convertToJPEGInElectron(
- imageBlob,
- fileName,
- );
- log.info(`${fileName} successfully converted`);
- } catch (e) {
- try {
- if (!isFileHEIC(exactType)) {
- throw e;
- }
- log.info(
- `HEICConverter called for ${fileName}-${convertBytesToHumanReadable(
- imageBlob.size,
- )}`,
- );
- convertedImageBlob =
- await heicConversionService.convert(imageBlob);
- log.info(`${fileName} successfully converted`);
- } catch (e) {
- throw Error(CustomError.NON_PREVIEWABLE_FILE);
- }
- }
- return convertedImageBlob;
- } else {
+ if (!isRawFile(extension)) {
+ // Either it is not something we know how to handle yet, or
+ // something that the browser already knows how to render.
return imageBlob;
}
- } catch (e) {
- log.error(
- `Failed to get renderable image for ${JSON.stringify(fileTypeInfo)}`,
- e,
- );
- return null;
- }
-}
-const convertToJPEGInElectron = async (
- fileBlob: Blob,
- filename: string,
-): Promise => {
- try {
- const startTime = Date.now();
- const inputFileData = new Uint8Array(await fileBlob.arrayBuffer());
- const electron = globalThis.electron;
- const convertedFileData = electron
- ? await electron.convertToJPEG(inputFileData, filename)
- : await workerBridge.convertToJPEG(inputFileData, filename);
- log.info(
- `originalFileSize:${convertBytesToHumanReadable(
- fileBlob?.size,
- )},convertedFileSize:${convertBytesToHumanReadable(
- convertedFileData?.length,
- )}, native conversion time: ${Date.now() - startTime}ms `,
- );
- return new Blob([convertedFileData]);
- } catch (e) {
- if (
- e.message !==
- CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED
- ) {
- log.error("failed to convert to jpeg natively", e);
+ const available = !moduleState.isNativeJPEGConversionNotAvailable;
+ if (isElectron() && available && isSupportedRawFormat(extension)) {
+ // If we're running in our desktop app, see if our Node.js layer can
+ // convert this into a JPEG using native tools for us.
+ try {
+ return await nativeConvertToJPEG(imageBlob);
+ } catch (e) {
+ if (e.message == CustomErrorMessage.NotAvailable) {
+ moduleState.isNativeJPEGConversionNotAvailable = true;
+ } else {
+ log.error("Native conversion to JPEG failed", e);
+ }
+ }
}
- throw e;
+
+ if (extension == "heic" || extension == "heif") {
+ // For HEIC/HEIF files we can use our web HEIC converter.
+ return await heicToJPEG(imageBlob);
+ }
+
+ return undefined;
+ } catch (e) {
+ log.error(`Failed to get renderable image for ${fileName}`, e);
+ return undefined;
}
};
-export function isFileHEIC(exactType: string) {
- return (
- exactType.toLowerCase().endsWith(TYPE_HEIC) ||
- exactType.toLowerCase().endsWith(TYPE_HEIF)
- );
-}
+const nativeConvertToJPEG = async (imageBlob: Blob) => {
+ const startTime = Date.now();
+ const imageData = new Uint8Array(await imageBlob.arrayBuffer());
+ const electron = globalThis.electron;
+ // If we're running in a worker, we need to reroute the request back to
+ // the main thread since workers don't have access to the `window` (and
+ // thus, to the `window.electron`) object.
+ const jpegData = electron
+ ? await electron.convertToJPEG(imageData)
+ : await workerBridge.convertToJPEG(imageData);
+ log.debug(() => `Native JPEG conversion took ${Date.now() - startTime} ms`);
+ return new Blob([jpegData]);
+};
export function isRawFile(exactType: string) {
return RAW_FORMATS.includes(exactType.toLowerCase());
@@ -604,6 +435,18 @@ export function isSharedFile(user: User, file: EnteFile) {
return file.ownerID !== user.id;
}
+/**
+ * [Note: File name for local EnteFile objects]
+ *
+ * The title property in a file's metadata is the original file's name. The
+ * metadata of a file cannot be edited. So if later on the file's name is
+ * changed, then the edit is stored in the `editedName` property of the public
+ * metadata of the file.
+ *
+ * This function merges these edits onto the file object that we use locally.
+ * Effectively, post this step, the file's metadata.title can be used in lieu of
+ * its filename.
+ */
export function mergeMetadata(files: EnteFile[]): EnteFile[] {
return files.map((file) => {
if (file.pubMagicMetadata?.data.editedTime) {
@@ -809,7 +652,11 @@ async function downloadFileDesktop(
fs.exists,
);
const imageStream = generateStreamFromArrayBuffer(imageData);
- await writeStream(`${downloadDir}/${imageExportName}`, imageStream);
+ await writeStream(
+ electron,
+ `${downloadDir}/${imageExportName}`,
+ imageStream,
+ );
try {
const videoExportName = await safeFileName(
downloadDir,
@@ -817,7 +664,11 @@ async function downloadFileDesktop(
fs.exists,
);
const videoStream = generateStreamFromArrayBuffer(videoData);
- await writeStream(`${downloadDir}/${videoExportName}`, videoStream);
+ await writeStream(
+ electron,
+ `${downloadDir}/${videoExportName}`,
+ videoStream,
+ );
} catch (e) {
await fs.rm(`${downloadDir}/${imageExportName}`);
throw e;
@@ -828,7 +679,11 @@ async function downloadFileDesktop(
file.metadata.title,
fs.exists,
);
- await writeStream(`${downloadDir}/${fileExportName}`, updatedStream);
+ await writeStream(
+ electron,
+ `${downloadDir}/${fileExportName}`,
+ updatedStream,
+ );
}
}
@@ -840,7 +695,7 @@ export const getArchivedFiles = (files: EnteFile[]) => {
};
export const createTypedObjectURL = async (blob: Blob, fileName: string) => {
- const type = await getFileType(new File([blob], fileName));
+ const type = await detectFileTypeInfo(new File([blob], fileName));
return URL.createObjectURL(new Blob([blob], { type: type.mimeType }));
};
@@ -853,15 +708,14 @@ export const getUserOwnedFiles = (files: EnteFile[]) => {
};
// doesn't work on firefox
-export const copyFileToClipboard = async (fileUrl: string) => {
+export const copyFileToClipboard = async (fileURL: string) => {
const canvas = document.createElement("canvas");
const canvasCTX = canvas.getContext("2d");
const image = new Image();
const blobPromise = new Promise((resolve, reject) => {
- let timeout: NodeJS.Timeout = null;
try {
- image.setAttribute("src", fileUrl);
+ image.setAttribute("src", fileURL);
image.onload = () => {
canvas.width = image.width;
canvas.height = image.height;
@@ -873,26 +727,17 @@ export const copyFileToClipboard = async (fileUrl: string) => {
"image/png",
1,
);
-
- clearTimeout(timeout);
};
} catch (e) {
- log.error("failed to copy to clipboard", e);
+ log.error("Failed to copy to clipboard", e);
reject(e);
- } finally {
- clearTimeout(timeout);
}
- timeout = setTimeout(
- () => reject(new Error("Operation timed out")),
- WAIT_TIME_IMAGE_CONVERSION,
- );
});
- const { ClipboardItem } = window;
+ const blob = await withTimeout(blobPromise, 30 * 1000);
- await navigator.clipboard
- .write([new ClipboardItem({ "image/png": blobPromise })])
- .catch((e) => log.error("failed to copy to clipboard", e));
+ const { ClipboardItem } = window;
+ await navigator.clipboard.write([new ClipboardItem({ "image/png": blob })]);
};
export function getLatestVersionFiles(files: EnteFile[]) {
@@ -1061,16 +906,3 @@ const fixTimeHelper = async (
) => {
setFixCreationTimeAttributes({ files: selectedFiles });
};
-
-const getFileObjectURL = (
- originalFileURL: string,
- originalBlob: Blob,
- convertedBlob: Blob,
-) => {
- const convertedURL = convertedBlob
- ? convertedBlob === originalBlob
- ? originalFileURL
- : URL.createObjectURL(convertedBlob)
- : null;
- return convertedURL;
-};
diff --git a/web/apps/photos/src/utils/file/livePhoto.ts b/web/apps/photos/src/utils/file/livePhoto.ts
deleted file mode 100644
index 7d687217c..000000000
--- a/web/apps/photos/src/utils/file/livePhoto.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-import { FILE_TYPE } from "constants/file";
-import { getFileExtension } from "utils/file";
-
-const IMAGE_EXTENSIONS = [
- "heic",
- "heif",
- "jpeg",
- "jpg",
- "png",
- "gif",
- "bmp",
- "tiff",
- "webp",
-];
-
-const VIDEO_EXTENSIONS = [
- "mov",
- "mp4",
- "m4v",
- "avi",
- "wmv",
- "flv",
- "mkv",
- "webm",
- "3gp",
- "3g2",
- "avi",
- "ogv",
- "mpg",
- "mp",
-];
-
-export function getFileTypeFromExtensionForLivePhotoClustering(
- filename: string,
-) {
- const extension = getFileExtension(filename)?.toLowerCase();
- if (IMAGE_EXTENSIONS.includes(extension)) {
- return FILE_TYPE.IMAGE;
- } else if (VIDEO_EXTENSIONS.includes(extension)) {
- return FILE_TYPE.VIDEO;
- }
-}
diff --git a/web/apps/photos/src/utils/machineLearning/index.ts b/web/apps/photos/src/utils/machineLearning/index.ts
index a89bccc4c..bc9ae3974 100644
--- a/web/apps/photos/src/utils/machineLearning/index.ts
+++ b/web/apps/photos/src/utils/machineLearning/index.ts
@@ -1,6 +1,6 @@
+import { FILE_TYPE } from "@/media/file-type";
import { decodeLivePhoto } from "@/media/live-photo";
import log from "@/next/log";
-import { FILE_TYPE } from "constants/file";
import PQueue from "p-queue";
import DownloadManager from "services/download";
import { getLocalFiles } from "services/fileService";
diff --git a/web/apps/photos/src/utils/native-stream.ts b/web/apps/photos/src/utils/native-stream.ts
index 7dba1acf9..85d54b790 100644
--- a/web/apps/photos/src/utils/native-stream.ts
+++ b/web/apps/photos/src/utils/native-stream.ts
@@ -4,17 +4,81 @@
* NOTE: These functions only work when we're running in our desktop app.
*/
+import type { Electron } from "@/next/types/ipc";
+
/**
- * Write the given stream to a file on the local machine.
+ * Stream the given file from the user's local filesystem.
*
- * **This only works when we're running in our desktop app**. It uses the
+ * This only works when we're running in our desktop app since it uses the
* "stream://" protocol handler exposed by our custom code in the Node.js layer.
* See: [Note: IPC streams].
*
+ * To avoid accidentally invoking it in a non-desktop app context, it requires
+ * the {@link Electron} object as a parameter (even though it doesn't use it).
+ *
+ * @param path The path on the file on the user's local filesystem whose
+ * contents we want to stream.
+ *
+ * @return A ({@link Response}, size, lastModifiedMs) triple.
+ *
+ * * The response contains the contents of the file. In particular, the `body`
+ * {@link ReadableStream} property of this response can be used to read the
+ * files contents in a streaming manner.
+ *
+ * * The size is the size of the file that we'll be reading from disk.
+ *
+ * * The lastModifiedMs value is the last modified time of the file that we're
+ * reading, expressed as epoch milliseconds.
+ */
+export const readStream = async (
+ _: Electron,
+ path: string,
+): Promise<{ response: Response; size: number; lastModifiedMs: number }> => {
+ const req = new Request(`stream://read${path}`, {
+ method: "GET",
+ });
+
+ const res = await fetch(req);
+ if (!res.ok)
+ throw new Error(
+ `Failed to read stream from ${path}: HTTP ${res.status}`,
+ );
+
+ const size = readNumericHeader(res, "Content-Length");
+ const lastModifiedMs = readNumericHeader(res, "X-Last-Modified-Ms");
+
+ return { response: res, size, lastModifiedMs };
+};
+
+const readNumericHeader = (res: Response, key: string) => {
+ const valueText = res.headers.get(key);
+ const value = +valueText;
+ if (isNaN(value))
+ throw new Error(
+ `Expected a numeric ${key} when reading a stream response, instead got ${valueText}`,
+ );
+ return value;
+};
+
+/**
+ * Write the given stream to a file on the local machine.
+ *
+ * This only works when we're running in our desktop app since it uses the
+ * "stream://" protocol handler exposed by our custom code in the Node.js layer.
+ * See: [Note: IPC streams].
+ *
+ * To avoid accidentally invoking it in a non-desktop app context, it requires
+ * the {@link Electron} object as a parameter (even though it doesn't use it).
+ *
* @param path The path on the local machine where to write the file to.
+ *
* @param stream The stream which should be written into the file.
- * */
-export const writeStream = async (path: string, stream: ReadableStream) => {
+ */
+export const writeStream = async (
+ _: Electron,
+ path: string,
+ stream: ReadableStream,
+) => {
// TODO(MR): This doesn't currently work.
//
// Not sure what I'm doing wrong here; I've opened an issue upstream
@@ -38,7 +102,7 @@ export const writeStream = async (path: string, stream: ReadableStream) => {
// GET can't have a body
method: "POST",
body: stream,
- // @ts-expect-error TypeScript's libdom.d.ts does not include the
+ // --@ts-expect-error TypeScript's libdom.d.ts does not include the
// "duplex" parameter, e.g. see
// https://github.com/node-fetch/node-fetch/issues/1769.
duplex: "half",
diff --git a/web/apps/photos/src/utils/photoFrame/index.ts b/web/apps/photos/src/utils/photoFrame/index.ts
index faf0679e7..93b680149 100644
--- a/web/apps/photos/src/utils/photoFrame/index.ts
+++ b/web/apps/photos/src/utils/photoFrame/index.ts
@@ -1,5 +1,5 @@
+import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
-import { FILE_TYPE } from "constants/file";
import { LivePhotoSourceURL, SourceURLs } from "services/download";
import { EnteFile } from "types/file";
import { SetSelectedState } from "types/gallery";
diff --git a/web/apps/photos/src/utils/upload/index.ts b/web/apps/photos/src/utils/upload/index.ts
deleted file mode 100644
index 4e6d216cf..000000000
--- a/web/apps/photos/src/utils/upload/index.ts
+++ /dev/null
@@ -1,231 +0,0 @@
-import { basename, dirname } from "@/next/file";
-import { FILE_TYPE } from "constants/file";
-import { A_SEC_IN_MICROSECONDS, PICKED_UPLOAD_TYPE } from "constants/upload";
-import isElectron from "is-electron";
-import { exportMetadataDirectoryName } from "services/export";
-import { EnteFile } from "types/file";
-import { ElectronFile, FileWithCollection, Metadata } from "types/upload";
-
-const TYPE_JSON = "json";
-const DEDUPE_COLLECTION = new Set(["icloud library", "icloudlibrary"]);
-
-export function findMatchingExistingFiles(
- existingFiles: EnteFile[],
- newFileMetadata: Metadata,
-): EnteFile[] {
- const matchingFiles: EnteFile[] = [];
- for (const existingFile of existingFiles) {
- if (areFilesSame(existingFile.metadata, newFileMetadata)) {
- matchingFiles.push(existingFile);
- }
- }
- return matchingFiles;
-}
-
-export function shouldDedupeAcrossCollection(collectionName: string): boolean {
- // using set to avoid unnecessary regex for removing spaces for each upload
- return DEDUPE_COLLECTION.has(collectionName.toLocaleLowerCase());
-}
-
-export function areFilesSame(
- existingFile: Metadata,
- newFile: Metadata,
-): boolean {
- if (hasFileHash(existingFile) && hasFileHash(newFile)) {
- return areFilesWithFileHashSame(existingFile, newFile);
- } else {
- /*
- * The maximum difference in the creation/modification times of two similar files is set to 1 second.
- * This is because while uploading files in the web - browsers and users could have set reduced
- * precision of file times to prevent timing attacks and fingerprinting.
- * Context: https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified#reduced_time_precision
- */
- if (
- existingFile.fileType === newFile.fileType &&
- Math.abs(existingFile.creationTime - newFile.creationTime) <
- A_SEC_IN_MICROSECONDS &&
- Math.abs(existingFile.modificationTime - newFile.modificationTime) <
- A_SEC_IN_MICROSECONDS &&
- existingFile.title === newFile.title
- ) {
- return true;
- } else {
- return false;
- }
- }
-}
-
-export function hasFileHash(file: Metadata) {
- return file.hash || (file.imageHash && file.videoHash);
-}
-
-export function areFilesWithFileHashSame(
- existingFile: Metadata,
- newFile: Metadata,
-): boolean {
- if (
- existingFile.fileType !== newFile.fileType ||
- existingFile.title !== newFile.title
- ) {
- return false;
- }
- if (existingFile.fileType === FILE_TYPE.LIVE_PHOTO) {
- return (
- existingFile.imageHash === newFile.imageHash &&
- existingFile.videoHash === newFile.videoHash
- );
- } else {
- return existingFile.hash === newFile.hash;
- }
-}
-
-export function segregateMetadataAndMediaFiles(
- filesWithCollectionToUpload: FileWithCollection[],
-) {
- const metadataJSONFiles: FileWithCollection[] = [];
- const mediaFiles: FileWithCollection[] = [];
- filesWithCollectionToUpload.forEach((fileWithCollection) => {
- const file = fileWithCollection.file;
- if (file.name.toLowerCase().endsWith(TYPE_JSON)) {
- metadataJSONFiles.push(fileWithCollection);
- } else {
- mediaFiles.push(fileWithCollection);
- }
- });
- return { mediaFiles, metadataJSONFiles };
-}
-
-export function areFileWithCollectionsSame(
- firstFile: FileWithCollection,
- secondFile: FileWithCollection,
-): boolean {
- return firstFile.localID === secondFile.localID;
-}
-
-/**
- * Return true if all the paths in the given list are items that belong to the
- * same (arbitrary) directory.
- *
- * Empty list of paths is considered to be in the same directory.
- */
-export const areAllInSameDirectory = (paths: string[]) =>
- new Set(paths.map(dirname)).size == 1;
-
-// This is used to prompt the user the make upload strategy choice
-export interface ImportSuggestion {
- rootFolderName: string;
- hasNestedFolders: boolean;
- hasRootLevelFileWithFolder: boolean;
-}
-
-export const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = {
- rootFolderName: "",
- hasNestedFolders: false,
- hasRootLevelFileWithFolder: false,
-};
-
-export function getImportSuggestion(
- uploadType: PICKED_UPLOAD_TYPE,
- paths: string[],
-): ImportSuggestion {
- if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) {
- return DEFAULT_IMPORT_SUGGESTION;
- }
-
- const getCharCount = (str: string) => (str.match(/\//g) ?? []).length;
- paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2));
- const firstPath = paths[0];
- const lastPath = paths[paths.length - 1];
-
- const L = firstPath.length;
- let i = 0;
- const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/"));
- const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/"));
-
- while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++;
- let commonPathPrefix = firstPath.substring(0, i);
-
- if (commonPathPrefix) {
- commonPathPrefix = commonPathPrefix.substring(
- 0,
- commonPathPrefix.lastIndexOf("/"),
- );
- if (commonPathPrefix) {
- commonPathPrefix = commonPathPrefix.substring(
- commonPathPrefix.lastIndexOf("/") + 1,
- );
- }
- }
- return {
- rootFolderName: commonPathPrefix || null,
- hasNestedFolders: firstFileFolder !== lastFileFolder,
- hasRootLevelFileWithFolder: firstFileFolder === "",
- };
-}
-
-// This function groups files that are that have the same parent folder into collections
-// For Example, for user files have a directory structure like this
-// a
-// / | \
-// b j c
-// /|\ / \
-// e f g h i
-//
-// The files will grouped into 3 collections.
-// [a => [j],
-// b => [e,f,g],
-// c => [h, i]]
-export function groupFilesBasedOnParentFolder(
- toUploadFiles: File[] | ElectronFile[],
-) {
- const collectionNameToFilesMap = new Map();
- for (const file of toUploadFiles) {
- const filePath = file["path"] as string;
-
- let folderPath = filePath.substring(0, filePath.lastIndexOf("/"));
- // If the parent folder of a file is "metadata"
- // we consider it to be part of the parent folder
- // For Eg,For FileList -> [a/x.png, a/metadata/x.png.json]
- // they will both we grouped into the collection "a"
- // This is cluster the metadata json files in the same collection as the file it is for
- if (folderPath.endsWith(exportMetadataDirectoryName)) {
- folderPath = folderPath.substring(0, folderPath.lastIndexOf("/"));
- }
- const folderName = folderPath.substring(
- folderPath.lastIndexOf("/") + 1,
- );
- if (!folderName?.length) {
- throw Error("folderName can't be null");
- }
- if (!collectionNameToFilesMap.has(folderName)) {
- collectionNameToFilesMap.set(folderName, []);
- }
- collectionNameToFilesMap.get(folderName).push(file);
- }
- return collectionNameToFilesMap;
-}
-
-export function filterOutSystemFiles(files: File[] | ElectronFile[]) {
- if (files[0] instanceof File) {
- const browserFiles = files as File[];
- return browserFiles.filter((file) => {
- return !isSystemFile(file);
- });
- } else {
- const electronFiles = files as ElectronFile[];
- return electronFiles.filter((file) => {
- return !isSystemFile(file);
- });
- }
-}
-
-export function isSystemFile(file: File | ElectronFile) {
- return file.name.startsWith(".");
-}
-
-/**
- * Return true if the file at the given {@link path} is hidden.
- *
- * Hidden files are those whose names begin with a "." (dot).
- */
-export const isHiddenFile = (path: string) => basename(path).startsWith(".");
diff --git a/web/apps/photos/src/utils/upload/uploadRetrier.ts b/web/apps/photos/src/utils/upload/uploadRetrier.ts
deleted file mode 100644
index 3d314fd14..000000000
--- a/web/apps/photos/src/utils/upload/uploadRetrier.ts
+++ /dev/null
@@ -1,29 +0,0 @@
-import { sleep } from "@ente/shared/utils";
-
-const retrySleepTimeInMilliSeconds = [2000, 5000, 10000];
-
-export async function retryHTTPCall(
- func: () => Promise,
- checkForBreakingError?: (error) => void,
-): Promise {
- const retrier = async (
- func: () => Promise,
- attemptNumber: number = 0,
- ) => {
- try {
- const resp = await func();
- return resp;
- } catch (e) {
- if (checkForBreakingError) {
- checkForBreakingError(e);
- }
- if (attemptNumber < retrySleepTimeInMilliSeconds.length) {
- await sleep(retrySleepTimeInMilliSeconds[attemptNumber]);
- return await retrier(func, attemptNumber + 1);
- } else {
- throw e;
- }
- }
- };
- return await retrier(func);
-}
diff --git a/web/apps/photos/src/worker/convert.worker.ts b/web/apps/photos/src/worker/convert.worker.ts
deleted file mode 100644
index d8ab22d3a..000000000
--- a/web/apps/photos/src/worker/convert.worker.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-import * as Comlink from "comlink";
-import HeicConvert from "heic-convert";
-import { getUint8ArrayView } from "services/readerService";
-
-export class DedicatedConvertWorker {
- async convertHEICToJPEG(fileBlob: Blob) {
- return convertHEICToJPEG(fileBlob);
- }
-}
-
-Comlink.expose(DedicatedConvertWorker, self);
-
-/**
- * Convert a HEIC file to a JPEG file.
- *
- * Both the input and output are blobs.
- */
-export const convertHEICToJPEG = async (heicBlob: Blob): Promise => {
- const filedata = await getUint8ArrayView(heicBlob);
- const result = await HeicConvert({ buffer: filedata, format: "JPEG" });
- const convertedFileData = new Uint8Array(result);
- const convertedFileBlob = new Blob([convertedFileData]);
- return convertedFileBlob;
-};
diff --git a/web/apps/photos/src/worker/ffmpeg.worker.ts b/web/apps/photos/src/worker/ffmpeg.worker.ts
index d3f503abb..946a2090f 100644
--- a/web/apps/photos/src/worker/ffmpeg.worker.ts
+++ b/web/apps/photos/src/worker/ffmpeg.worker.ts
@@ -1,15 +1,117 @@
-import * as Comlink from "comlink";
-import { WasmFFmpeg } from "services/wasm/ffmpeg";
+import log from "@/next/log";
+import { withTimeout } from "@ente/shared/utils";
+import QueueProcessor from "@ente/shared/utils/queueProcessor";
+import { expose } from "comlink";
+import {
+ ffmpegPathPlaceholder,
+ inputPathPlaceholder,
+ outputPathPlaceholder,
+} from "constants/ffmpeg";
+import { FFmpeg, createFFmpeg } from "ffmpeg-wasm";
export class DedicatedFFmpegWorker {
- wasmFFmpeg: WasmFFmpeg;
+ private ffmpeg: FFmpeg;
+ private ffmpegTaskQueue = new QueueProcessor();
+
constructor() {
- this.wasmFFmpeg = new WasmFFmpeg();
+ this.ffmpeg = createFFmpeg({
+ corePath: "/js/ffmpeg/ffmpeg-core.js",
+ mt: false,
+ });
}
- run(cmd, inputFile, outputFileName, dontTimeout) {
- return this.wasmFFmpeg.run(cmd, inputFile, outputFileName, dontTimeout);
+ /**
+ * Execute a FFmpeg {@link command} on {@link blob}.
+ *
+ * This is a sibling of {@link ffmpegExec} exposed by the desktop app in
+ * `ipc.ts`. See [Note: FFmpeg in Electron].
+ */
+ async exec(
+ command: string[],
+ blob: Blob,
+ outputFileExtension: string,
+ timeoutMs,
+ ): Promise {
+ if (!this.ffmpeg.isLoaded()) await this.ffmpeg.load();
+
+ const go = () =>
+ ffmpegExec(this.ffmpeg, command, outputFileExtension, blob);
+
+ const request = this.ffmpegTaskQueue.queueUpRequest(() =>
+ timeoutMs ? withTimeout(go(), timeoutMs) : go(),
+ );
+
+ return await request.promise;
}
}
-Comlink.expose(DedicatedFFmpegWorker, self);
+expose(DedicatedFFmpegWorker, self);
+
+const ffmpegExec = async (
+ ffmpeg: FFmpeg,
+ command: string[],
+ outputFileExtension: string,
+ blob: Blob,
+) => {
+ const inputPath = randomPrefix();
+ const outputSuffix = outputFileExtension ? "." + outputFileExtension : "";
+ const outputPath = randomPrefix() + outputSuffix;
+
+ const cmd = substitutePlaceholders(command, inputPath, outputPath);
+
+ const inputData = new Uint8Array(await blob.arrayBuffer());
+
+ try {
+ const startTime = Date.now();
+
+ ffmpeg.FS("writeFile", inputPath, inputData);
+ await ffmpeg.run(...cmd);
+
+ const result = ffmpeg.FS("readFile", outputPath);
+
+ const ms = Math.round(Date.now() - startTime);
+ log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`);
+ return result;
+ } finally {
+ try {
+ ffmpeg.FS("unlink", inputPath);
+ } catch (e) {
+ log.error(`Failed to remove input ${inputPath}`, e);
+ }
+ try {
+ ffmpeg.FS("unlink", outputPath);
+ } catch (e) {
+ log.error(`Failed to remove output ${outputPath}`, e);
+ }
+ }
+};
+
+/** Generate a random string suitable for being used as a file name prefix */
+const randomPrefix = () => {
+ const alphabet =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
+
+ let result = "";
+ for (let i = 0; i < 10; i++)
+ result += alphabet[Math.floor(Math.random() * alphabet.length)];
+ return result;
+};
+
+const substitutePlaceholders = (
+ command: string[],
+ inputFilePath: string,
+ outputFilePath: string,
+) =>
+ command
+ .map((segment) => {
+ if (segment == ffmpegPathPlaceholder) {
+ return undefined;
+ } else if (segment == inputPathPlaceholder) {
+ return inputFilePath;
+ } else if (segment == outputPathPlaceholder) {
+ return outputFilePath;
+ } else {
+ return segment;
+ }
+ })
+ .filter((c) => !!c);
diff --git a/web/apps/photos/src/worker/heic-convert.worker.ts b/web/apps/photos/src/worker/heic-convert.worker.ts
new file mode 100644
index 000000000..96a1a9468
--- /dev/null
+++ b/web/apps/photos/src/worker/heic-convert.worker.ts
@@ -0,0 +1,22 @@
+import { expose } from "comlink";
+import HeicConvert from "heic-convert";
+
+export class DedicatedHEICConvertWorker {
+ async heicToJPEG(heicBlob: Blob) {
+ return heicToJPEG(heicBlob);
+ }
+}
+
+expose(DedicatedHEICConvertWorker, self);
+
+/**
+ * Convert a HEIC file to a JPEG file.
+ *
+ * Both the input and output are blobs.
+ */
+export const heicToJPEG = async (heicBlob: Blob): Promise => {
+ const buffer = new Uint8Array(await heicBlob.arrayBuffer());
+ const result = await HeicConvert({ buffer, format: "JPEG" });
+ const convertedData = new Uint8Array(result);
+ return new Blob([convertedData]);
+};
diff --git a/web/apps/photos/tests/upload.test.ts b/web/apps/photos/tests/upload.test.ts
index 6e58cf0c2..c4d76d524 100644
--- a/web/apps/photos/tests/upload.test.ts
+++ b/web/apps/photos/tests/upload.test.ts
@@ -1,13 +1,13 @@
-import { tryToParseDateTime } from "@ente/shared/time";
-import { FILE_TYPE } from "constants/file";
+import { FILE_TYPE } from "@/media/file-type";
import { getLocalCollections } from "services/collectionService";
import { getLocalFiles } from "services/fileService";
+import { tryToParseDateTime } from "services/upload/date";
import {
MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT,
getClippedMetadataJSONMapKeyForFile,
getMetadataJSONMapKeyForFile,
getMetadataJSONMapKeyForJSON,
-} from "services/upload/metadataService";
+} from "services/upload/takeout";
import { getUserDetailsV2 } from "services/userService";
import { groupFilesBasedOnCollectionID } from "utils/file";
diff --git a/web/apps/photos/tests/zip-file-reading.test.ts b/web/apps/photos/tests/zip-file-reading.test.ts
deleted file mode 100644
index 07d70f067..000000000
--- a/web/apps/photos/tests/zip-file-reading.test.ts
+++ /dev/null
@@ -1,111 +0,0 @@
-import { getFileNameSize } from "@/next/file";
-import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload";
-import { getElectronFileStream, getFileStream } from "services/readerService";
-import { DataStream } from "types/upload";
-import { getImportSuggestion } from "utils/upload";
-
-// This was for used to verify that converting from the browser readable stream
-// to the node readable stream correctly handles files that align on the 4 MB
-// data boundary. This expects a zip file containing random files of various
-// sizes starting from 1M to 20M.
-export const testZipFileReading = async () => {
- try {
- const electron = globalThis.electron;
- if (!electron) {
- console.log("testZipFileReading Check is for desktop only");
- return;
- }
- if (!process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH) {
- throw Error(
- "upload test failed NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH missing",
- );
- }
- const files = await electron.getElectronFilesFromGoogleZip(
- process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH,
- );
- if (!files?.length) {
- throw Error(
- `testZipFileReading Check failed ❌
- No files selected`,
- );
- }
- console.log("test zip file reading check started");
- let i = 0;
- for (const file of files) {
- i++;
- let filedata: DataStream;
- if (file instanceof File) {
- filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
- } else {
- filedata = await getElectronFileStream(
- file,
- FILE_READER_CHUNK_SIZE,
- );
- }
- const streamReader = filedata.stream.getReader();
- for (let i = 0; i < filedata.chunkCount; i++) {
- const { done } = await streamReader.read();
- if (done) {
- throw Error(
- `testZipFileReading Check failed ❌
- ${getFileNameSize(
- file,
- )} less than expected chunks, expected: ${
- filedata.chunkCount
- }, got ${i - 1}`,
- );
- }
- }
- const { done } = await streamReader.read();
-
- if (!done) {
- throw Error(
- `testZipFileReading Check failed ❌
- ${getFileNameSize(
- file,
- )} more than expected chunks, expected: ${
- filedata.chunkCount
- }`,
- );
- }
- console.log(`${i}/${files.length} passed ✅`);
- }
- console.log("test zip file reading check passed ✅");
- } catch (e) {
- console.log(e);
- }
-};
-
-// This was used when fixing a bug around handling a zip file that has a photo
-// at the root.
-export const testZipWithRootFileReadingTest = async () => {
- try {
- const electron = globalThis.electron;
- if (!electron) {
- console.log("testZipFileReading Check is for desktop only");
- return;
- }
- if (!process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH) {
- throw Error(
- "upload test failed NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH missing",
- );
- }
- const files = await electron.getElectronFilesFromGoogleZip(
- process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH,
- );
-
- const importSuggestion = getImportSuggestion(
- PICKED_UPLOAD_TYPE.ZIPS,
- files.map((file) => file["path"]),
- );
- if (!importSuggestion.rootFolderName) {
- throw Error(
- `testZipWithRootFileReadingTest Check failed ❌
- rootFolderName is missing`,
- );
- }
- console.log("testZipWithRootFileReadingTest passed ✅");
- } catch (e) {
- console.log(e);
- }
-};
diff --git a/web/docs/dependencies.md b/web/docs/dependencies.md
index 7dece3a37..83c4c16c8 100644
--- a/web/docs/dependencies.md
+++ b/web/docs/dependencies.md
@@ -133,8 +133,13 @@ some cases.
## Media
-- "jszip" is used for reading zip files in JavaScript. Live photos are zip
- files under the hood.
+- ["jszip"](https://github.com/Stuk/jszip) is used for reading zip files in
+ JavaScript (Live photos are zip files under the hood).
+
+- ["file-type"](https://github.com/sindresorhus/file-type) is used for MIME
+ type detection. We are at an old version 16.5.4 because v17 onwards the
+ package became ESM only - for our limited use case, the custom Webpack
+ configuration that entails is not worth the upgrade.
## Photos app specific
diff --git a/web/packages/accounts/components/ChangeEmail.tsx b/web/packages/accounts/components/ChangeEmail.tsx
index 3f47be8a1..ec647e671 100644
--- a/web/packages/accounts/components/ChangeEmail.tsx
+++ b/web/packages/accounts/components/ChangeEmail.tsx
@@ -6,7 +6,7 @@ import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer";
import LinkButton from "@ente/shared/components/LinkButton";
import SubmitButton from "@ente/shared/components/SubmitButton";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
-import { sleep } from "@ente/shared/utils";
+import { wait } from "@ente/shared/utils";
import { Alert, Box, TextField } from "@mui/material";
import { Formik, FormikHelpers } from "formik";
import { t } from "i18next";
@@ -59,7 +59,7 @@ function ChangeEmailForm({ appName }: PageProps) {
setData(LS_KEYS.USER, { ...getData(LS_KEYS.USER), email });
setLoading(false);
setSuccess(true);
- await sleep(1000);
+ await wait(1000);
goToApp();
} catch (e) {
setLoading(false);
diff --git a/web/packages/accounts/components/two-factor/VerifyForm.tsx b/web/packages/accounts/components/two-factor/VerifyForm.tsx
index 810a6c010..b7f7fc278 100644
--- a/web/packages/accounts/components/two-factor/VerifyForm.tsx
+++ b/web/packages/accounts/components/two-factor/VerifyForm.tsx
@@ -9,7 +9,7 @@ import {
VerticallyCentered,
} from "@ente/shared/components/Container";
import SubmitButton from "@ente/shared/components/SubmitButton";
-import { sleep } from "@ente/shared/utils";
+import { wait } from "@ente/shared/utils";
import { Box, Typography } from "@mui/material";
interface formValues {
@@ -33,7 +33,7 @@ export default function VerifyTwoFactor(props: Props) {
const markSuccessful = async () => {
setWaiting(false);
setSuccess(true);
- await sleep(1000);
+ await wait(1000);
};
const submitForm = async (
diff --git a/web/packages/media/file-type.ts b/web/packages/media/file-type.ts
new file mode 100644
index 000000000..b180918cd
--- /dev/null
+++ b/web/packages/media/file-type.ts
@@ -0,0 +1,63 @@
+export enum FILE_TYPE {
+ IMAGE,
+ VIDEO,
+ LIVE_PHOTO,
+ OTHERS,
+}
+
+export interface FileTypeInfo {
+ fileType: FILE_TYPE;
+ /**
+ * A lowercased, standardized extension for files of the current type.
+ *
+ * TODO(MR): This in not valid for LIVE_PHOTO.
+ */
+ extension: string;
+ mimeType?: string;
+ imageType?: string;
+ videoType?: string;
+}
+
+// list of format that were missed by type-detection for some files.
+export const KnownFileTypeInfos: FileTypeInfo[] = [
+ { fileType: FILE_TYPE.IMAGE, extension: "jpeg", mimeType: "image/jpeg" },
+ { fileType: FILE_TYPE.IMAGE, extension: "jpg", mimeType: "image/jpeg" },
+ { fileType: FILE_TYPE.VIDEO, extension: "webm", mimeType: "video/webm" },
+ { fileType: FILE_TYPE.VIDEO, extension: "mod", mimeType: "video/mpeg" },
+ { fileType: FILE_TYPE.VIDEO, extension: "mp4", mimeType: "video/mp4" },
+ { fileType: FILE_TYPE.IMAGE, extension: "gif", mimeType: "image/gif" },
+ { fileType: FILE_TYPE.VIDEO, extension: "dv", mimeType: "video/x-dv" },
+ {
+ fileType: FILE_TYPE.VIDEO,
+ extension: "wmv",
+ mimeType: "video/x-ms-asf",
+ },
+ {
+ fileType: FILE_TYPE.VIDEO,
+ extension: "hevc",
+ mimeType: "video/hevc",
+ },
+ {
+ fileType: FILE_TYPE.IMAGE,
+ extension: "raf",
+ mimeType: "image/x-fuji-raf",
+ },
+ {
+ fileType: FILE_TYPE.IMAGE,
+ extension: "orf",
+ mimeType: "image/x-olympus-orf",
+ },
+
+ {
+ fileType: FILE_TYPE.IMAGE,
+ extension: "crw",
+ mimeType: "image/x-canon-crw",
+ },
+ {
+ fileType: FILE_TYPE.VIDEO,
+ extension: "mov",
+ mimeType: "video/quicktime",
+ },
+];
+
+export const KnownNonMediaFileExtensions = ["xmp", "html", "txt"];
diff --git a/web/packages/media/file.ts b/web/packages/media/file.ts
new file mode 100644
index 000000000..c84050049
--- /dev/null
+++ b/web/packages/media/file.ts
@@ -0,0 +1,4 @@
+import type { Metadata } from "./types/file";
+
+export const hasFileHash = (file: Metadata) =>
+ !!file.hash || (!!file.imageHash && !!file.videoHash);
diff --git a/web/packages/media/live-photo.ts b/web/packages/media/live-photo.ts
index 16143ca13..35a186a41 100644
--- a/web/packages/media/live-photo.ts
+++ b/web/packages/media/live-photo.ts
@@ -1,5 +1,54 @@
-import { fileNameFromComponents, nameAndExtension } from "@/next/file";
+import {
+ fileNameFromComponents,
+ lowercaseExtension,
+ nameAndExtension,
+} from "@/next/file";
import JSZip from "jszip";
+import { FILE_TYPE } from "./file-type";
+
+const potentialImageExtensions = [
+ "heic",
+ "heif",
+ "jpeg",
+ "jpg",
+ "png",
+ "gif",
+ "bmp",
+ "tiff",
+ "webp",
+];
+
+const potentialVideoExtensions = [
+ "mov",
+ "mp4",
+ "m4v",
+ "avi",
+ "wmv",
+ "flv",
+ "mkv",
+ "webm",
+ "3gp",
+ "3g2",
+ "avi",
+ "ogv",
+ "mpg",
+ "mp",
+];
+
+/**
+ * Use the file extension of the given {@link fileName} to deduce if is is
+ * potentially the image or the video part of a Live Photo.
+ */
+export const potentialFileTypeFromExtension = (
+ fileName: string,
+): FILE_TYPE | undefined => {
+ const ext = lowercaseExtension(fileName);
+ if (!ext) return undefined;
+
+ if (potentialImageExtensions.includes(ext)) return FILE_TYPE.IMAGE;
+ else if (potentialVideoExtensions.includes(ext)) return FILE_TYPE.VIDEO;
+ else return undefined;
+};
/**
* An in-memory representation of a live photo.
@@ -61,6 +110,14 @@ export const decodeLivePhoto = async (
return { imageFileName, imageData, videoFileName, videoData };
};
+/** Variant of {@link LivePhoto}, but one that allows files and data. */
+interface EncodeLivePhotoInput {
+ imageFileName: string;
+ imageFileOrData: File | Uint8Array;
+ videoFileName: string;
+ videoFileOrData: File | Uint8Array;
+}
+
/**
* Return a binary serialized representation of a live photo.
*
@@ -73,15 +130,15 @@ export const decodeLivePhoto = async (
*/
export const encodeLivePhoto = async ({
imageFileName,
- imageData,
+ imageFileOrData,
videoFileName,
- videoData,
-}: LivePhoto) => {
+ videoFileOrData,
+}: EncodeLivePhotoInput) => {
const [, imageExt] = nameAndExtension(imageFileName);
const [, videoExt] = nameAndExtension(videoFileName);
const zip = new JSZip();
- zip.file(fileNameFromComponents(["image", imageExt]), imageData);
- zip.file(fileNameFromComponents(["video", videoExt]), videoData);
+ zip.file(fileNameFromComponents(["image", imageExt]), imageFileOrData);
+ zip.file(fileNameFromComponents(["video", videoExt]), videoFileOrData);
return await zip.generateAsync({ type: "uint8array" });
};
diff --git a/web/packages/media/package.json b/web/packages/media/package.json
index 7ab047317..8be7e8bb6 100644
--- a/web/packages/media/package.json
+++ b/web/packages/media/package.json
@@ -4,6 +4,7 @@
"private": true,
"dependencies": {
"@/next": "*",
+ "file-type": "16.5.4",
"jszip": "^3.10"
}
}
diff --git a/web/packages/media/types/file.ts b/web/packages/media/types/file.ts
new file mode 100644
index 000000000..b6314b7cd
--- /dev/null
+++ b/web/packages/media/types/file.ts
@@ -0,0 +1,73 @@
+import type { FILE_TYPE } from "../file-type";
+
+/**
+ * Information about the file that never changes post upload.
+ *
+ * [Note: Metadatum]
+ *
+ * There are three different sources of metadata relating to a file.
+ *
+ * 1. Metadata
+ * 2. Magic Metadata
+ * 3. Public Magic Metadata
+ *
+ * The names of API entities are such for historical reasons, but we can think
+ * of them as:
+ *
+ * 1. Metadata
+ * 2. Private Mutable Metadata
+ * 3. Shared Mutable Metadata
+ *
+ * Metadata is the original metadata that we attached to the file when it was
+ * uploaded. It is immutable, and it never changes.
+ *
+ * Later on, the user might make changes to the file's metadata. Since the
+ * metadata is immutable, we need a place to keep these mutations.
+ *
+ * Some mutations are "private" to the user who owns the file. For example, the
+ * user might archive the file. Such modifications get written to (2), Private
+ * Mutable Metadata.
+ *
+ * Other mutations are "public" across all the users with whom the file is
+ * shared. For example, if the user (owner) edits the name of the file, all
+ * people with whom this file is shared can see the new edited name. Such
+ * modifications get written to (3), Shared Mutable Metadata.
+ *
+ * When the client needs to show a file, it needs to "merge" in 2 or 3 of these
+ * sources.
+ *
+ * - When showing a shared file, (1) and (3) are merged, with changes from (3)
+ * taking precedence, to obtain the full metadata pertinent to the file.
+ * - When showing a normal (un-shared) file, (1), (2) and (3) are merged, with
+ * changes from (2) and (3) taking precedence, to obtain the full metadata.
+ * (2) and (3) have no intersection of keys, so they can be merged in any
+ * order.
+ *
+ * While these sources can be conceptually merged, it is important for the
+ * client to also retain the original sources unchanged. This is because the
+ * metadatas (any of the three) might have keys that the current client does not
+ * yet understand, so when updating some key, say filename in (3), it should
+ * only edit the key it knows about but retain the rest of the source JSON
+ * unchanged.
+ */
+export interface Metadata {
+ /**
+ * The file name.
+ *
+ * See: [Note: File name for local EnteFile objects]
+ */
+ title: string;
+ creationTime: number;
+ modificationTime: number;
+ latitude: number;
+ longitude: number;
+ /** The "Ente" file type. */
+ fileType: FILE_TYPE;
+ hasStaticThumbnail?: boolean;
+ hash?: string;
+ imageHash?: string;
+ videoHash?: string;
+ localID?: number;
+ version?: number;
+ deviceFolder?: string;
+}
diff --git a/web/packages/next/blob-cache.ts b/web/packages/next/blob-cache.ts
index 8789a5078..0e092fed6 100644
--- a/web/packages/next/blob-cache.ts
+++ b/web/packages/next/blob-cache.ts
@@ -113,6 +113,10 @@ export const openCache = async (
*
* await blob.arrayBuffer()
*
+ * To convert from a Blob to Uint8Array, chain the two steps
+ *
+ * new Uint8Array(await blob.arrayBuffer())
+ *
* To convert from an ArrayBuffer or Uint8Array to Blob
*
* new Blob([arrayBuffer, andOrAnyArray, andOrstring])
diff --git a/web/packages/next/file.ts b/web/packages/next/file.ts
index 83b20f2ec..56d27b79b 100644
--- a/web/packages/next/file.ts
+++ b/web/packages/next/file.ts
@@ -25,6 +25,23 @@ export const nameAndExtension = (fileName: string): FileNameComponents => {
return [fileName.slice(0, i), fileName.slice(i + 1)];
};
+/**
+ * If the file name or path has an extension, return a lowercased version of it.
+ *
+ * This is handy when comparing the extension to a known set without worrying
+ * about case sensitivity.
+ *
+ * See {@link nameAndExtension} for its more generic sibling.
+ */
+export const lowercaseExtension = (
+ fileNameOrPath: string,
+): string | undefined => {
+ // We rely on the implementation of nameAndExtension using lastIndexOf to
+ // allow us to also work on paths.
+ const [, ext] = nameAndExtension(fileNameOrPath);
+ return ext?.toLowerCase();
+};
+
/**
* Construct a file name from its components (name and extension).
*
@@ -66,6 +83,13 @@ export const dirname = (path: string) => {
return pathComponents.join("/");
};
+/**
+ * Return a short description of the given {@link fileOrPath} suitable for
+ * helping identify it in log messages.
+ */
+export const fopLabel = (fileOrPath: File | string) =>
+ fileOrPath instanceof File ? `File(${fileOrPath.name})` : fileOrPath;
+
export function getFileNameSize(file: File | ElectronFile) {
return `${file.name}_${convertBytesToHumanReadable(file.size)}`;
}
diff --git a/web/packages/next/locales/de-DE/translation.json b/web/packages/next/locales/de-DE/translation.json
index 38b877fd4..de7980f3e 100644
--- a/web/packages/next/locales/de-DE/translation.json
+++ b/web/packages/next/locales/de-DE/translation.json
@@ -455,7 +455,7 @@
"CURRENT_USAGE": "Aktuelle Nutzung ist {{usage}}",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
- "CONFIRM_ACCOUNT_DELETION_MESSAGE": "Ihre hochgeladenen Daten werden zur Löschung vorgemerkt, und Ihr Konto wird endgültig gelöscht.
Dieser Vorgang kann nicht rückgängig gemacht werden.",
+ "CONFIRM_ACCOUNT_DELETION_MESSAGE": "Deine hochgeladenen Daten werden zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht.
Dieser Vorgang kann nicht rückgängig gemacht werden.",
"AUTHENTICATE": "Authentifizieren",
"UPLOADED_TO_SINGLE_COLLECTION": "",
"UPLOADED_TO_SEPARATE_COLLECTIONS": "",
@@ -606,7 +606,7 @@
"VISIT_CAST_ENTE_IO": "",
"CAST_AUTO_PAIR_FAILED": "",
"FREEHAND": "Freihand",
- "APPLY_CROP": "",
+ "APPLY_CROP": "Zuschnitt anwenden",
"PHOTO_EDIT_REQUIRED_TO_SAVE": "",
"PASSKEYS": "Passkeys",
"DELETE_PASSKEY": "Passkey löschen",
diff --git a/web/packages/next/locales/it-IT/translation.json b/web/packages/next/locales/it-IT/translation.json
index eb3e6bfa8..b66131ad7 100644
--- a/web/packages/next/locales/it-IT/translation.json
+++ b/web/packages/next/locales/it-IT/translation.json
@@ -7,7 +7,7 @@
"HERO_SLIDE_3": "Android, iOS, Web, Desktop",
"LOGIN": "Accedi",
"SIGN_UP": "Registrati",
- "NEW_USER": "",
+ "NEW_USER": "Prima volta con Ente",
"EXISTING_USER": "Accedi",
"ENTER_NAME": "Inserisci il nome",
"PUBLIC_UPLOADER_NAME_MESSAGE": "Aggiungi un nome in modo che i tuoi amici sappiano chi ringraziare per queste fantastiche foto!",
@@ -168,18 +168,18 @@
"UPDATE_PAYMENT_METHOD": "Aggiorna metodo di pagamento",
"MONTHLY": "Mensile",
"YEARLY": "Annuale",
- "update_subscription_title": "",
+ "update_subscription_title": "Conferma le modifiche al piano",
"UPDATE_SUBSCRIPTION_MESSAGE": "Sei sicuro di voler cambiare il piano?",
"UPDATE_SUBSCRIPTION": "Cambia piano",
"CANCEL_SUBSCRIPTION": "Annulla abbonamento",
"CANCEL_SUBSCRIPTION_MESSAGE": "Tutti i tuoi dati saranno cancellati dai nostri server alla fine di questo periodo di fatturazione.
Sei sicuro di voler annullare il tuo abbonamento?
",
- "CANCEL_SUBSCRIPTION_WITH_ADDON_MESSAGE": "",
+ "CANCEL_SUBSCRIPTION_WITH_ADDON_MESSAGE": "Sei sicuro di volere annullare il tuo abbonamento?
",
"SUBSCRIPTION_CANCEL_FAILED": "Impossibile annullare l'abbonamento",
"SUBSCRIPTION_CANCEL_SUCCESS": "Abbonamento annullato con successo",
"REACTIVATE_SUBSCRIPTION": "Riattiva abbonamento",
"REACTIVATE_SUBSCRIPTION_MESSAGE": "Una volta riattivato, ti verrà addebitato il valore di {{date, dateTime}}",
"SUBSCRIPTION_ACTIVATE_SUCCESS": "Iscrizione attivata con successo ",
- "SUBSCRIPTION_ACTIVATE_FAILED": "",
+ "SUBSCRIPTION_ACTIVATE_FAILED": "Impossibile riattivare il rinnovo dell'abbonamento",
"SUBSCRIPTION_PURCHASE_SUCCESS_TITLE": "Grazie",
"CANCEL_SUBSCRIPTION_ON_MOBILE": "Annulla abbonamento mobile",
"CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE": "",
@@ -201,7 +201,7 @@
"CREATE_ALBUM_FAILED": "Operazione di creazione dell'album fallita, per favore riprova",
"SEARCH": "Ricerca",
"SEARCH_RESULTS": "Risultati della ricerca",
- "NO_RESULTS": "",
+ "NO_RESULTS": "Nessun risultato trovato",
"SEARCH_HINT": "",
"SEARCH_TYPE": {
"COLLECTION": "Album",
@@ -219,7 +219,7 @@
"photos_count_other": "",
"TERMS_AND_CONDITIONS": "",
"ADD_TO_COLLECTION": "Aggiungi all'album",
- "SELECTED": "",
+ "SELECTED": "selezionato",
"PEOPLE": "Persone",
"INDEXING_SCHEDULED": "",
"ANALYZING_PHOTOS": "",
@@ -241,8 +241,8 @@
"DISABLE_MAPS": "Disattivare Mappa?",
"ENABLE_MAP_DESCRIPTION": "",
"DISABLE_MAP_DESCRIPTION": "",
- "DISABLE_MAP": "",
- "DETAILS": "",
+ "DISABLE_MAP": "Disattivare Mappa",
+ "DETAILS": "Dettagli",
"VIEW_EXIF": "",
"NO_EXIF": "",
"EXIF": "EXIF",
@@ -258,23 +258,23 @@
"LOST_DEVICE": "",
"INCORRECT_CODE": "Codice errato",
"TWO_FACTOR_INFO": "Aggiungi un ulteriore livello di sicurezza richiedendo più informazioni rispetto a email e password per eseguire l'accesso al tuo account",
- "DISABLE_TWO_FACTOR_LABEL": "",
+ "DISABLE_TWO_FACTOR_LABEL": "Disabilita l'autenticazione a due fattori",
"UPDATE_TWO_FACTOR_LABEL": "",
"DISABLE": "",
"RECONFIGURE": "",
"UPDATE_TWO_FACTOR": "",
"UPDATE_TWO_FACTOR_MESSAGE": "",
- "UPDATE": "",
+ "UPDATE": "Aggiorna",
"DISABLE_TWO_FACTOR": "",
"DISABLE_TWO_FACTOR_MESSAGE": "",
"TWO_FACTOR_DISABLE_FAILED": "",
"EXPORT_DATA": "Esporta dati",
- "SELECT_FOLDER": "",
- "DESTINATION": "",
+ "SELECT_FOLDER": "Seleziona cartella",
+ "DESTINATION": "Destinazione",
"START": "",
"LAST_EXPORT_TIME": "",
- "EXPORT_AGAIN": "",
- "LOCAL_STORAGE_NOT_ACCESSIBLE": "",
+ "EXPORT_AGAIN": "Risincronizza",
+ "LOCAL_STORAGE_NOT_ACCESSIBLE": "Archivio locale non accessibile",
"LOCAL_STORAGE_NOT_ACCESSIBLE_MESSAGE": "",
"SEND_OTT": "Invia OTP",
"EMAIl_ALREADY_OWNED": "Email già in uso",
diff --git a/web/packages/next/log.ts b/web/packages/next/log.ts
index 3dadbd288..a04520ed3 100644
--- a/web/packages/next/log.ts
+++ b/web/packages/next/log.ts
@@ -27,27 +27,30 @@ const workerLogToDisk = (message: string) => {
});
};
-const logError = (message: string, e?: unknown) => {
- if (!e) {
- logError_(message);
- return;
- }
+const messageWithError = (message: string, e?: unknown) => {
+ if (!e) return message;
let es: string;
if (e instanceof Error) {
// In practice, we expect ourselves to be called with Error objects, so
// this is the happy path so to say.
- es = `${e.name}: ${e.message}\n${e.stack}`;
+ es = [`${e.name}: ${e.message}`, e.stack].filter((x) => x).join("\n");
} else {
// For the rest rare cases, use the default string serialization of e.
es = String(e);
}
- logError_(`${message}: ${es}`);
+ return `${message}: ${es}`;
};
-const logError_ = (message: string) => {
- const m = `[error] ${message}`;
+const logError = (message: string, e?: unknown) => {
+ const m = `[error] ${messageWithError(message, e)}`;
+ if (isDevBuild) console.error(m);
+ logToDisk(m);
+};
+
+const logWarn = (message: string, e?: unknown) => {
+ const m = `[warn] ${messageWithError(message, e)}`;
if (isDevBuild) console.error(m);
logToDisk(m);
};
@@ -90,6 +93,11 @@ export default {
* printed to the browser console.
*/
error: logError,
+ /**
+ * Sibling of {@link error}, with the same parameters and behaviour, except
+ * it gets prefixed with a warning instead of an error tag.
+ */
+ warn: logWarn,
/**
* Log a message.
*
diff --git a/web/packages/next/types/file.ts b/web/packages/next/types/file.ts
index dc8a148e9..75641e3a2 100644
--- a/web/packages/next/types/file.ts
+++ b/web/packages/next/types/file.ts
@@ -1,8 +1,3 @@
-export enum UPLOAD_STRATEGY {
- SINGLE_COLLECTION,
- COLLECTION_PER_FOLDER,
-}
-
/*
* ElectronFile is a custom interface that is used to represent
* any file on disk as a File-like object in the Electron desktop app.
@@ -21,11 +16,6 @@ export interface ElectronFile {
arrayBuffer: () => Promise;
}
-export interface DataStream {
- stream: ReadableStream;
- chunkCount: number;
-}
-
export interface EventQueueItem {
type: "upload" | "trash";
folderPath: string;
diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts
index 0628bb0ca..1622a820d 100644
--- a/web/packages/next/types/ipc.ts
+++ b/web/packages/next/types/ipc.ts
@@ -189,34 +189,94 @@ export interface Electron {
* directory.
*/
isDir: (dirPath: string) => Promise;
- };
- /*
- * TODO: AUDIT below this - Some of the types we use below are not copyable
- * across process boundaries, and such functions will (expectedly) fail at
- * runtime. For such functions, find an efficient alternative or refactor
- * the dataflow.
- */
+ /**
+ * Return the size in bytes of the file at {@link path}.
+ */
+ size: (path: string) => Promise;
+ };
// - Conversion
- convertToJPEG: (
- fileData: Uint8Array,
- filename: string,
- ) => Promise;
+ /**
+ * Try to convert an arbitrary image into JPEG using native layer tools.
+ *
+ * The behaviour is OS dependent. On macOS we use the `sips` utility, and on
+ * some Linux architectures we use an ImageMagick executable bundled with
+ * our desktop app.
+ *
+ * In other cases (primarily Windows), where native JPEG conversion is not
+ * yet possible, this function will throw an error with the
+ * {@link CustomErrorMessage.NotAvailable} message.
+ *
+ * @param imageData The raw image data (the contents of the image file).
+ *
+ * @returns JPEG data of the converted image.
+ */
+ convertToJPEG: (imageData: Uint8Array) => Promise;
+ /**
+ * Generate a JPEG thumbnail for the given image.
+ *
+ * The behaviour is OS dependent. On macOS we use the `sips` utility, and on
+ * some Linux architectures we use an ImageMagick executable bundled with
+ * our desktop app.
+ *
+ * In other cases (primarily Windows), where native thumbnail generation is
+ * not yet possible, this function will throw an error with the
+ * {@link CustomErrorMessage.NotAvailable} message.
+ *
+ * @param dataOrPath The raw image data (the contents of the image file), or
+ * the path to the image file, whose thumbnail we want to generate.
+ * @param maxDimension The maximum width or height of the generated
+ * thumbnail.
+ * @param maxSize Maximum size (in bytes) of the generated thumbnail.
+ *
+ * @returns JPEG data of the generated thumbnail.
+ */
generateImageThumbnail: (
- inputFile: File | ElectronFile,
+ dataOrPath: Uint8Array | string,
maxDimension: number,
maxSize: number,
) => Promise;
- runFFmpegCmd: (
- cmd: string[],
- inputFile: File | ElectronFile,
- outputFileName: string,
- dontTimeout?: boolean,
- ) => Promise;
+ /**
+ * Execute a FFmpeg {@link command} on the given {@link dataOrPath}.
+ *
+ * This executes the command using a FFmpeg executable we bundle with our
+ * desktop app. We also have a wasm FFmpeg wasm implementation that we use
+ * when running on the web, which has a sibling function with the same
+ * parameters. See [Note: ffmpeg in Electron].
+ *
+ * @param command An array of strings, each representing one positional
+ * parameter in the command to execute. Placeholders for the input, output
+ * and ffmpeg's own path are replaced before executing the command
+ * (respectively {@link inputPathPlaceholder},
+ * {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}).
+ *
+ * @param dataOrPath The bytes of the input file, or the path to the input
+ * file on the user's local disk. In both cases, the data gets serialized to
+ * a temporary file, and then that path gets substituted in the FFmpeg
+ * {@link command} in lieu of {@link inputPathPlaceholder}.
+ *
+ * @param outputFileExtension The extension (without the dot, e.g. "jpeg")
+ * to use for the output file that we ask FFmpeg to create in
+ * {@param command}. While this file will eventually get deleted, and we'll
+ * just return its contents, for some FFmpeg command the extension matters
+ * (e.g. conversion to a JPEG fails if the extension is arbitrary).
+ *
+ * @param timeoutMS If non-zero, then abort and throw a timeout error if the
+ * ffmpeg command takes more than the given number of milliseconds.
+ *
+ * @returns The contents of the output file produced by the ffmpeg command
+ * (specified as {@link outputPathPlaceholder} in {@link command}).
+ */
+ ffmpegExec: (
+ command: string[],
+ dataOrPath: Uint8Array | string,
+ outputFileExtension: string,
+ timeoutMS: number,
+ ) => Promise;
// - ML
@@ -232,7 +292,18 @@ export interface Electron {
clipImageEmbedding: (jpegImageData: Uint8Array) => Promise;
/**
- * Return a CLIP embedding of the given image.
+ * Return a CLIP embedding of the given image if we already have the model
+ * downloaded and prepped. If the model is not available return `undefined`.
+ *
+ * This differs from the other sibling ML functions in that it doesn't wait
+ * for the model download to finish. It does trigger a model download, but
+ * then immediately returns `undefined`. At some future point, when the
+ * model downloaded finishes, calls to this function will start returning
+ * the result we seek.
+ *
+ * The reason for doing it in this asymmetric way is because CLIP text
+ * embeddings are used as part of deducing user initiated search results,
+ * and we don't want to block that interaction on a large network request.
*
* See: [Note: CLIP based magic search]
*
@@ -240,7 +311,9 @@ export interface Electron {
*
* @returns A CLIP embedding.
*/
- clipTextEmbedding: (text: string) => Promise;
+ clipTextEmbeddingIfAvailable: (
+ text: string,
+ ) => Promise;
/**
* Detect faces in the given image using YOLO.
@@ -418,14 +491,40 @@ export interface Electron {
filePaths: string[],
) => Promise;
+ /*
+ * TODO: AUDIT below this - Some of the types we use below are not copyable
+ * across process boundaries, and such functions will (expectedly) fail at
+ * runtime. For such functions, find an efficient alternative or refactor
+ * the dataflow.
+ */
+
// -
getElectronFilesFromGoogleZip: (
filePath: string,
) => Promise;
- getDirFiles: (dirPath: string) => Promise;
}
+/**
+ * Errors that have special semantics on the web side.
+ *
+ * [Note: Custom errors across Electron/Renderer boundary]
+ *
+ * If we need to identify errors thrown by the main process when invoked from
+ * the renderer process, we can only use the `message` field because:
+ *
+ * > Errors thrown throw `handle` in the main process are not transparent as
+ * > they are serialized and only the `message` property from the original error
+ * > is provided to the renderer process.
+ * >
+ * > - https://www.electronjs.org/docs/latest/tutorial/ipc
+ * >
+ * > Ref: https://github.com/electron/electron/issues/24427
+ */
+export const CustomErrorMessage = {
+ NotAvailable: "This feature in not available on the current OS/arch",
+};
+
/**
* Data passed across the IPC bridge when an app update is available.
*/
diff --git a/web/packages/next/worker/comlink-worker.ts b/web/packages/next/worker/comlink-worker.ts
index a5237fccc..5929e5361 100644
--- a/web/packages/next/worker/comlink-worker.ts
+++ b/web/packages/next/worker/comlink-worker.ts
@@ -12,24 +12,17 @@ export class ComlinkWorker InstanceType> {
this.name = name;
this.worker = worker;
- this.worker.onerror = (ev) => {
+ worker.onerror = (event) => {
log.error(
- `Got error event from worker: ${JSON.stringify({
- errorEvent: JSON.stringify(ev),
- name: this.name,
- })}`,
+ `Got error event from worker: ${JSON.stringify({ event, name })}`,
);
};
- log.debug(() => `Initiated ${this.name}`);
- const comlink = wrap(this.worker);
+ log.debug(() => `Initiated web worker ${name}`);
+ const comlink = wrap(worker);
this.remote = new comlink() as Promise>>;
expose(workerBridge, worker);
}
- public getName() {
- return this.name;
- }
-
public terminate() {
this.worker.terminate();
log.debug(() => `Terminated ${this.name}`);
@@ -43,15 +36,16 @@ export class ComlinkWorker InstanceType> {
* `workerBridge` object after importing it from `worker-bridge.ts`.
*
* Not all workers need access to all these functions, and this can indeed be
- * done in a more fine-grained, per-worker, manner if needed.
+ * done in a more fine-grained, per-worker, manner if needed. For now, since it
+ * is a motley bunch, we just inject them all.
*/
const workerBridge = {
// Needed: generally (presumably)
logToDisk,
// Needed by ML worker
getAuthToken: () => ensureLocalUser().then((user) => user.token),
- convertToJPEG: (inputFileData: Uint8Array, filename: string) =>
- ensureElectron().convertToJPEG(inputFileData, filename),
+ convertToJPEG: (imageData: Uint8Array) =>
+ ensureElectron().convertToJPEG(imageData),
detectFaces: (input: Float32Array) => ensureElectron().detectFaces(input),
faceEmbedding: (input: Float32Array) =>
ensureElectron().faceEmbedding(input),
diff --git a/web/packages/shared/crypto/index.ts b/web/packages/shared/crypto/index.ts
index 00ac8d32f..4e20fb92a 100644
--- a/web/packages/shared/crypto/index.ts
+++ b/web/packages/shared/crypto/index.ts
@@ -1,6 +1,6 @@
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
-import { DedicatedCryptoWorker } from "./internal/crypto.worker";
+import { type DedicatedCryptoWorker } from "./internal/crypto.worker";
class ComlinkCryptoWorker {
private comlinkWorkerInstance:
diff --git a/web/packages/shared/crypto/types.ts b/web/packages/shared/crypto/types.ts
index 4cf4c56b1..e591820f0 100644
--- a/web/packages/shared/crypto/types.ts
+++ b/web/packages/shared/crypto/types.ts
@@ -1,17 +1,3 @@
-import { DataStream } from "@/next/types/file";
-
-export interface LocalFileAttributes<
- T extends string | Uint8Array | DataStream,
-> {
- encryptedData: T;
- decryptionHeader: string;
-}
-
-export interface EncryptionResult {
- file: LocalFileAttributes;
- key: string;
-}
-
export interface B64EncryptionResult {
encryptedData: string;
key: string;
diff --git a/web/packages/shared/error/index.ts b/web/packages/shared/error/index.ts
index 12a87d2db..d226d62b6 100644
--- a/web/packages/shared/error/index.ts
+++ b/web/packages/shared/error/index.ts
@@ -22,13 +22,10 @@ export function isApiErrorResponse(object: any): object is ApiErrorResponse {
}
export const CustomError = {
- THUMBNAIL_GENERATION_FAILED: "thumbnail generation failed",
VIDEO_PLAYBACK_FAILED: "video playback failed",
ETAG_MISSING: "no header/etag present in response body",
KEY_MISSING: "encrypted key missing from localStorage",
FAILED_TO_LOAD_WEB_WORKER: "failed to load web worker",
- CHUNK_MORE_THAN_EXPECTED: "chunks more than expected",
- CHUNK_LESS_THAN_EXPECTED: "chunks less than expected",
UNSUPPORTED_FILE_FORMAT: "unsupported file format",
FILE_TOO_LARGE: "file too large",
SUBSCRIPTION_EXPIRED: "subscription expired",
@@ -49,9 +46,6 @@ export const CustomError = {
SUBSCRIPTION_NEEDED: "subscription not present",
NOT_FOUND: "not found ",
NO_METADATA: "no metadata",
- TOO_LARGE_LIVE_PHOTO_ASSETS: "too large live photo assets",
- NOT_A_DATE: "not a date",
- NOT_A_LOCATION: "not a location",
FILE_ID_NOT_FOUND: "file with id not found",
WEAK_DEVICE: "password decryption failed on the device",
INCORRECT_PASSWORD: "incorrect password",
@@ -60,8 +54,6 @@ export const CustomError = {
HIDDEN_COLLECTION_SYNC_FILE_ATTEMPTED:
"hidden collection sync file attempted",
UNKNOWN_ERROR: "Something went wrong, please try again",
- TYPE_DETECTION_FAILED: (fileFormat: string) =>
- `type detection failed ${fileFormat}`,
WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
"Windows native image processing is not supported",
NETWORK_ERROR: "Network Error",
@@ -73,9 +65,6 @@ export const CustomError = {
AUTH_KEY_NOT_FOUND: "auth key not found",
EXIF_DATA_NOT_FOUND: "exif data not found",
SELECT_FOLDER_ABORTED: "select folder aborted",
- NON_MEDIA_FILE: "non media file",
- UNSUPPORTED_RAW_FORMAT: "unsupported raw format",
- NON_PREVIEWABLE_FILE: "non previewable file",
PROCESSING_FAILED: "processing failed",
EXPORT_RECORD_JSON_PARSING_FAILED: "export record json parsing failed",
TWO_FACTOR_ENABLED: "two factor enabled",
@@ -84,8 +73,6 @@ export const CustomError = {
ServerError: "server error",
FILE_NOT_FOUND: "file not found",
UNSUPPORTED_PLATFORM: "Unsupported platform",
- MODEL_DOWNLOAD_PENDING:
- "Model download pending, skipping clip search request",
UPDATE_URL_FILE_ID_MISMATCH: "update url file id mismatch",
URL_ALREADY_SET: "url already set",
FILE_CONVERSION_FAILED: "file conversion failed",
diff --git a/web/packages/shared/hooks/useFileInput.tsx b/web/packages/shared/hooks/useFileInput.tsx
index b357d918e..b53fecb58 100644
--- a/web/packages/shared/hooks/useFileInput.tsx
+++ b/web/packages/shared/hooks/useFileInput.tsx
@@ -1,5 +1,19 @@
import { useCallback, useRef, useState } from "react";
+/*
+ * TODO (MR): Understand how this is happening, and validate it further (on
+ * first glance this is correct).
+ *
+ * [Note: File paths when running under Electron]
+ *
+ * We have access to the absolute path of the web {@link File} object when we
+ * are running in the context of our desktop app.
+ *
+ * This is in contrast to the `webkitRelativePath` that we get when we're
+ * running in the browser, which is the relative path to the directory that the
+ * user selected (or just the name of the file if the user selected or
+ * drag/dropped a single one).
+ */
export interface FileWithPath extends File {
readonly path?: string;
}
diff --git a/web/packages/shared/time/index.ts b/web/packages/shared/time/index.ts
index d98bc411b..87e1d9648 100644
--- a/web/packages/shared/time/index.ts
+++ b/web/packages/shared/time/index.ts
@@ -5,17 +5,6 @@ export interface TimeDelta {
years?: number;
}
-interface DateComponent {
- year: T;
- month: T;
- day: T;
- hour: T;
- minute: T;
- second: T;
-}
-
-const currentYear = new Date().getFullYear();
-
export function getUnixTimeInMicroSecondsWithDelta(delta: TimeDelta): number {
let currentDate = new Date();
if (delta?.hours) {
@@ -71,116 +60,3 @@ function _addYears(date: Date, years: number) {
result.setFullYear(date.getFullYear() + years);
return result;
}
-
-/*
-generates data component for date in format YYYYMMDD-HHMMSS
- */
-export function parseDateFromFusedDateString(dateTime: string) {
- const dateComponent: DateComponent = convertDateComponentToNumber({
- year: dateTime.slice(0, 4),
- month: dateTime.slice(4, 6),
- day: dateTime.slice(6, 8),
- hour: dateTime.slice(9, 11),
- minute: dateTime.slice(11, 13),
- second: dateTime.slice(13, 15),
- });
- return validateAndGetDateFromComponents(dateComponent);
-}
-
-/* sample date format = 2018-08-19 12:34:45
- the date has six symbol separated number values
- which we would extract and use to form the date
- */
-export function tryToParseDateTime(dateTime: string): Date {
- const dateComponent = getDateComponentsFromSymbolJoinedString(dateTime);
- if (dateComponent.year?.length === 8 && dateComponent.month?.length === 6) {
- // the filename has size 8 consecutive and then 6 consecutive digits
- // high possibility that the it is a date in format YYYYMMDD-HHMMSS
- const possibleDateTime = dateComponent.year + "-" + dateComponent.month;
- return parseDateFromFusedDateString(possibleDateTime);
- }
- return validateAndGetDateFromComponents(
- convertDateComponentToNumber(dateComponent),
- );
-}
-
-function getDateComponentsFromSymbolJoinedString(
- dateTime: string,
-): DateComponent {
- const [year, month, day, hour, minute, second] =
- dateTime.match(/\d+/g) ?? [];
-
- return { year, month, day, hour, minute, second };
-}
-
-function validateAndGetDateFromComponents(
- dateComponent: DateComponent,
- options = { minYear: 1990, maxYear: currentYear + 1 },
-) {
- let date = getDateFromComponents(dateComponent);
- if (hasTimeValues(dateComponent) && !isTimePartValid(date, dateComponent)) {
- // if the date has time values but they are not valid
- // then we remove the time values and try to validate the date
- date = getDateFromComponents(removeTimeValues(dateComponent));
- }
- if (!isDatePartValid(date, dateComponent)) {
- return null;
- }
- if (
- date.getFullYear() < options.minYear ||
- date.getFullYear() > options.maxYear
- ) {
- return null;
- }
- return date;
-}
-
-function isTimePartValid(date: Date, dateComponent: DateComponent) {
- return (
- date.getHours() === dateComponent.hour &&
- date.getMinutes() === dateComponent.minute &&
- date.getSeconds() === dateComponent.second
- );
-}
-
-function isDatePartValid(date: Date, dateComponent: DateComponent) {
- return (
- date.getFullYear() === dateComponent.year &&
- date.getMonth() === dateComponent.month &&
- date.getDate() === dateComponent.day
- );
-}
-
-function convertDateComponentToNumber(
- dateComponent: DateComponent,
-): DateComponent {
- return {
- year: Number(dateComponent.year),
- // https://stackoverflow.com/questions/2552483/why-does-the-month-argument-range-from-0-to-11-in-javascripts-date-constructor
- month: Number(dateComponent.month) - 1,
- day: Number(dateComponent.day),
- hour: Number(dateComponent.hour),
- minute: Number(dateComponent.minute),
- second: Number(dateComponent.second),
- };
-}
-
-function getDateFromComponents(dateComponent: DateComponent) {
- const { year, month, day, hour, minute, second } = dateComponent;
- if (hasTimeValues(dateComponent)) {
- return new Date(year, month, day, hour, minute, second);
- } else {
- return new Date(year, month, day);
- }
-}
-
-function hasTimeValues(dateComponent: DateComponent) {
- const { hour, minute, second } = dateComponent;
- return !isNaN(hour) && !isNaN(minute) && !isNaN(second);
-}
-
-function removeTimeValues(
- dateComponent: DateComponent,
-): DateComponent {
- return { ...dateComponent, hour: 0, minute: 0, second: 0 };
-}
diff --git a/web/packages/shared/utils/index.ts b/web/packages/shared/utils/index.ts
index c027b6cb6..568ec5cc4 100644
--- a/web/packages/shared/utils/index.ts
+++ b/web/packages/shared/utils/index.ts
@@ -4,9 +4,8 @@
* This function is a promisified `setTimeout`. It returns a promise that
* resolves after {@link ms} milliseconds.
*/
-export async function sleep(ms: number) {
- await new Promise((resolve) => setTimeout(resolve, ms));
-}
+export const wait = (ms: number) =>
+ new Promise((resolve) => setTimeout(resolve, ms));
export function downloadAsFile(filename: string, content: string) {
const file = new Blob([content], {
@@ -49,29 +48,27 @@ export async function retryAsyncFunction(
if (attemptNumber === waitTimeBeforeNextTry.length) {
throw e;
}
- await sleep(waitTimeBeforeNextTry[attemptNumber]);
+ await wait(waitTimeBeforeNextTry[attemptNumber]);
}
}
}
-export const promiseWithTimeout = async (
- request: Promise,
- timeout: number,
-): Promise => {
- const timeoutRef = { current: null };
- const rejectOnTimeout = new Promise((_, reject) => {
- timeoutRef.current = setTimeout(
+/**
+ * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
+ * does not resolve within {@link timeoutMS}, then reject with a timeout error.
+ */
+export const withTimeout = async (promise: Promise, ms: number) => {
+ let timeoutId: ReturnType;
+ const rejectOnTimeout = new Promise((_, reject) => {
+ timeoutId = setTimeout(
() => reject(new Error("Operation timed out")),
- timeout,
+ ms,
);
});
- const requestWithTimeOutCancellation = async () => {
- const resp = await request;
- clearTimeout(timeoutRef.current);
- return resp;
+ const promiseAndCancelTimeout = async () => {
+ const result = await promise;
+ clearTimeout(timeoutId);
+ return result;
};
- return await Promise.race([
- requestWithTimeOutCancellation(),
- rejectOnTimeout,
- ]);
+ return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
};
diff --git a/web/packages/shared/utils/temp.ts b/web/packages/shared/utils/temp.ts
deleted file mode 100644
index 984f4abb0..000000000
--- a/web/packages/shared/utils/temp.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-const CHARACTERS =
- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
-
-export function generateTempName(length: number, suffix: string) {
- let tempName = "";
-
- const charactersLength = CHARACTERS.length;
- for (let i = 0; i < length; i++) {
- tempName += CHARACTERS.charAt(
- Math.floor(Math.random() * charactersLength),
- );
- }
- return `${tempName}-${suffix}`;
-}
diff --git a/web/packages/utils/ensure.ts b/web/packages/utils/ensure.ts
index 2e8f9a213..761cedc99 100644
--- a/web/packages/utils/ensure.ts
+++ b/web/packages/utils/ensure.ts
@@ -5,3 +5,12 @@ export const ensure = (v: T | undefined): T => {
if (v === undefined) throw new Error("Required value was not found");
return v;
};
+
+/**
+ * Throw an exception if the given value is not a string.
+ */
+export const ensureString = (v: unknown): string => {
+ if (typeof v != "string")
+ throw new Error(`Expected a string, instead found ${String(v)}`);
+ return v;
+};
diff --git a/web/yarn.lock b/web/yarn.lock
index 61d2cfeae..6886647d7 100644
--- a/web/yarn.lock
+++ b/web/yarn.lock
@@ -2505,7 +2505,7 @@ file-selector@^0.4.0:
dependencies:
tslib "^2.0.3"
-file-type@^16.5.4:
+file-type@16.5.4:
version "16.5.4"
resolved "https://registry.yarnpkg.com/file-type/-/file-type-16.5.4.tgz#474fb4f704bee427681f98dd390058a172a6c2fd"
integrity sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw==