[web] [desktop] Consolidate logging (#1376)

This commit is contained in:
Manav Rathi 2024-04-08 21:10:48 +05:30 committed by GitHub
commit f37c46935c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
96 changed files with 887 additions and 1219 deletions

View file

@ -7,11 +7,6 @@ module.exports = {
// "plugin:@typescript-eslint/strict-type-checked",
// "plugin:@typescript-eslint/stylistic-type-checked",
],
/* Temporarily disable some rules
Enhancement: Remove me */
rules: {
"no-unused-vars": "off",
},
/* Temporarily add a global
Enhancement: Remove me */
globals: {

View file

@ -61,15 +61,15 @@ Electron process. This allows us to directly use the output produced by
### Others
* [any-shell-escape](https://github.com/boazy/any-shell-escape) is for
escaping shell commands before we execute them (e.g. say when invoking the
embedded ffmpeg CLI).
- [any-shell-escape](https://github.com/boazy/any-shell-escape) is for
escaping shell commands before we execute them (e.g. say when invoking the
embedded ffmpeg CLI).
* [auto-launch](https://github.com/Teamwork/node-auto-launch) is for
automatically starting our app on login, if the user so wishes.
- [auto-launch](https://github.com/Teamwork/node-auto-launch) is for
automatically starting our app on login, if the user so wishes.
* [electron-store](https://github.com/sindresorhus/electron-store) is used for
persisting user preferences and other arbitrary data.
- [electron-store](https://github.com/sindresorhus/electron-store) is used for
persisting user preferences and other arbitrary data.
## Dev
@ -79,12 +79,12 @@ are similar to that in the web code.
Some extra ones specific to the code here are:
* [concurrently](https://github.com/open-cli-tools/concurrently) for spawning
parallel tasks when we do `yarn dev`.
- [concurrently](https://github.com/open-cli-tools/concurrently) for spawning
parallel tasks when we do `yarn dev`.
* [shx](https://github.com/shelljs/shx) for providing a portable way to use Unix
commands in our `package.json` scripts. This allows us to use the same
commands (like `ln`) across different platforms like Linux and Windows.
- [shx](https://github.com/shelljs/shx) for providing a portable way to use
Unix commands in our `package.json` scripts. This allows us to use the same
commands (like `ln`) across different platforms like Linux and Windows.
## Functionality
@ -111,11 +111,11 @@ watcher for the watch folders functionality.
### AI/ML
* [onnxruntime-node](https://github.com/Microsoft/onnxruntime)
* html-entities is used by the bundled clip-bpe-ts.
* GGML binaries are bundled
* We also use [jpeg-js](https://github.com/jpeg-js/jpeg-js#readme) for
conversion of all images to JPEG before processing.
- [onnxruntime-node](https://github.com/Microsoft/onnxruntime)
- html-entities is used by the bundled clip-bpe-ts.
- GGML binaries are bundled
- We also use [jpeg-js](https://github.com/jpeg-js/jpeg-js#readme) for
conversion of all images to JPEG before processing.
## ZIP

View file

@ -1,17 +0,0 @@
import { logError } from "../main/log";
import { keysStore } from "../stores/keys.store";
import { safeStorageStore } from "../stores/safeStorage.store";
import { uploadStatusStore } from "../stores/upload.store";
import { watchStore } from "../stores/watch.store";
export const clearElectronStore = () => {
try {
uploadStatusStore.clear();
keysStore.clear();
safeStorageStore.clear();
watchStore.clear();
} catch (e) {
logError(e, "error while clearing electron store");
throw e;
}
};

View file

@ -1,28 +0,0 @@
import { safeStorage } from "electron/main";
import { logError } from "../main/log";
import { safeStorageStore } from "../stores/safeStorage.store";
export async function setEncryptionKey(encryptionKey: string) {
try {
const encryptedKey: Buffer =
await safeStorage.encryptString(encryptionKey);
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
safeStorageStore.set("encryptionKey", b64EncryptedKey);
} catch (e) {
logError(e, "setEncryptionKey failed");
throw e;
}
}
export async function getEncryptionKey(): Promise<string> {
try {
const b64EncryptedKey = safeStorageStore.get("encryptionKey");
if (b64EncryptedKey) {
const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
return await safeStorage.decryptString(keyBuffer);
}
} catch (e) {
logError(e, "getEncryptionKey failed");
throw e;
}
}

View file

@ -1,41 +0,0 @@
import { getElectronFile } from "../services/fs";
import {
getElectronFilesFromGoogleZip,
getSavedFilePaths,
} from "../services/upload";
import { uploadStatusStore } from "../stores/upload.store";
import { ElectronFile, FILE_PATH_TYPE } from "../types/ipc";
export const getPendingUploads = async () => {
const filePaths = getSavedFilePaths(FILE_PATH_TYPE.FILES);
const zipPaths = getSavedFilePaths(FILE_PATH_TYPE.ZIPS);
const collectionName = uploadStatusStore.get("collectionName");
let files: ElectronFile[] = [];
let type: FILE_PATH_TYPE;
if (zipPaths.length) {
type = FILE_PATH_TYPE.ZIPS;
for (const zipPath of zipPaths) {
files = [
...files,
...(await getElectronFilesFromGoogleZip(zipPath)),
];
}
const pendingFilePaths = new Set(filePaths);
files = files.filter((file) => pendingFilePaths.has(file.path));
} else if (filePaths.length) {
type = FILE_PATH_TYPE.FILES;
files = await Promise.all(filePaths.map(getElectronFile));
}
return {
files,
collectionName,
type,
};
};
export {
getElectronFilesFromGoogleZip,
setToUploadCollection,
setToUploadFiles,
} from "../services/upload";

View file

@ -1,26 +0,0 @@
/**
* [Note: Custom errors across Electron/Renderer boundary]
*
* We need to use the `message` field to disambiguate between errors thrown by
* the main process when invoked from the renderer process. This is because:
*
* > Errors thrown throw `handle` in the main process are not transparent as
* > they are serialized and only the `message` property from the original error
* > is provided to the renderer process.
* >
* > - https://www.electronjs.org/docs/latest/tutorial/ipc
* >
* > Ref: https://github.com/electron/electron/issues/24427
*/
export const CustomErrors = {
WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
"Windows native image processing is not supported",
INVALID_OS: (os: string) => `Invalid OS - ${os}`,
WAIT_TIME_EXCEEDED: "Wait time exceeded",
UNSUPPORTED_PLATFORM: (platform: string, arch: string) =>
`Unsupported platform - ${platform} ${arch}`,
MODEL_DOWNLOAD_PENDING:
"Model download pending, skipping clip search request",
INVALID_FILE_PATH: "Invalid file path",
INVALID_CLIP_MODEL: (model: string) => `Invalid Clip model - ${model}`,
};

View file

@ -12,6 +12,7 @@ import { app, BrowserWindow, Menu } from "electron/main";
import serveNextAt from "next-electron-server";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import {
addAllowOriginHeader,
@ -19,7 +20,6 @@ import {
handleDockIconHideOnAutoLaunch,
handleDownloads,
handleExternalLinks,
logStartupBanner,
setupMacWindowOnDockIconClick,
setupTrayItem,
} from "./main/init";
@ -72,6 +72,21 @@ const setupRendererServer = () => {
serveNextAt(rendererURL);
};
/**
* Log a standard startup banner.
*
* This helps us identify app starts and other environment details in the logs.
*/
const logStartupBanner = () => {
const version = isDev ? "dev" : app.getVersion();
log.info(`Starting ente-photos-desktop ${version}`);
const platform = process.platform;
const osRelease = os.release();
const systemVersion = process.getSystemVersion();
log.info("Running on", { platform, osRelease, systemVersion });
};
function enableSharedArrayBufferSupport() {
app.commandLine.appendSwitch("enable-features", "SharedArrayBuffer");
}
@ -126,12 +141,12 @@ const deleteLegacyDiskCacheDirIfExists = async () => {
}
};
function setupAppEventEmitter(mainWindow: BrowserWindow) {
// fire event when mainWindow is in foreground
mainWindow.on("focus", () => {
mainWindow.webContents.send("app-in-foreground");
});
}
const attachEventHandlers = (mainWindow: BrowserWindow) => {
// Let ipcRenderer know when mainWindow is in the foreground.
mainWindow.on("focus", () =>
mainWindow.webContents.send("app-in-foreground"),
);
};
const main = () => {
const gotTheLock = app.requestSingleInstanceLock();
@ -144,6 +159,7 @@ const main = () => {
initLogging();
setupRendererServer();
logStartupBanner();
handleDockIconHideOnAutoLaunch();
increaseDiskCache();
enableSharedArrayBufferSupport();
@ -163,7 +179,6 @@ const main = () => {
//
// Note that some Electron APIs can only be used after this event occurs.
app.on("ready", async () => {
logStartupBanner();
mainWindow = await createWindow();
const watcher = initWatcher(mainWindow);
setupTrayItem(mainWindow);
@ -175,13 +190,13 @@ const main = () => {
handleDownloads(mainWindow);
handleExternalLinks(mainWindow);
addAllowOriginHeader(mainWindow);
setupAppEventEmitter(mainWindow);
attachEventHandlers(mainWindow);
try {
deleteLegacyDiskCacheDirIfExists();
} catch (e) {
// Log but otherwise ignore errors during non-critical startup
// actions
// actions.
log.error("Ignoring startup error", e);
}
});

View file

@ -1,6 +1,5 @@
import { app, BrowserWindow, nativeImage, Tray } from "electron";
import { existsSync } from "node:fs";
import os from "node:os";
import path from "node:path";
import { isAppQuitting, rendererURL } from "../main";
import autoLauncher from "../services/autoLauncher";
@ -77,8 +76,6 @@ export const createWindow = async () => {
return mainWindow;
};
export async function handleUpdates(mainWindow: BrowserWindow) {}
export const setupTrayItem = (mainWindow: BrowserWindow) => {
const iconName = isPlatform("mac")
? "taskbar-icon-Template.png"
@ -149,16 +146,6 @@ export async function handleDockIconHideOnAutoLaunch() {
}
}
export function logStartupBanner() {
const version = isDev ? "dev" : app.getVersion();
log.info(`Hello from ente-photos-desktop ${version}`);
const platform = process.platform;
const osRelease = os.release();
const systemVersion = process.getSystemVersion();
log.info("Running on", { platform, osRelease, systemVersion });
}
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
const headers: Record<string, string[]> = {};
for (const key of Object.keys(responseHeaders)) {

View file

@ -10,14 +10,6 @@
import type { FSWatcher } from "chokidar";
import { ipcMain } from "electron/main";
import { clearElectronStore } from "../api/electronStore";
import { getEncryptionKey, setEncryptionKey } from "../api/safeStorage";
import {
getElectronFilesFromGoogleZip,
getPendingUploads,
setToUploadCollection,
setToUploadFiles,
} from "../api/upload";
import {
appVersion,
muteUpdateNotification,
@ -34,6 +26,17 @@ import {
convertToJPEG,
generateImageThumbnail,
} from "../services/imageProcessor";
import {
clearElectronStore,
getEncryptionKey,
setEncryptionKey,
} from "../services/store";
import {
getElectronFilesFromGoogleZip,
getPendingUploads,
setToUploadCollection,
setToUploadFiles,
} from "../services/upload";
import {
addWatchMapping,
getWatchMappings,
@ -91,16 +94,16 @@ export const attachIPCHandlers = () => {
// - General
ipcMain.handle("appVersion", (_) => appVersion());
ipcMain.handle("appVersion", () => appVersion());
ipcMain.handle("openDirectory", (_, dirPath) => openDirectory(dirPath));
ipcMain.handle("openLogDirectory", (_) => openLogDirectory());
ipcMain.handle("openLogDirectory", () => openLogDirectory());
// See [Note: Catching exception during .send/.on]
ipcMain.on("logToDisk", (_, message) => logToDisk(message));
ipcMain.on("clear-electron-store", (_) => {
ipcMain.on("clear-electron-store", () => {
clearElectronStore();
});
@ -108,11 +111,11 @@ export const attachIPCHandlers = () => {
setEncryptionKey(encryptionKey),
);
ipcMain.handle("getEncryptionKey", (_) => getEncryptionKey());
ipcMain.handle("getEncryptionKey", () => getEncryptionKey());
// - App update
ipcMain.on("update-and-restart", (_) => updateAndRestart());
ipcMain.on("update-and-restart", () => updateAndRestart());
ipcMain.on("skip-app-update", (_, version) => skipAppUpdate(version));
@ -157,13 +160,13 @@ export const attachIPCHandlers = () => {
// - File selection
ipcMain.handle("selectDirectory", (_) => selectDirectory());
ipcMain.handle("selectDirectory", () => selectDirectory());
ipcMain.handle("showUploadFilesDialog", (_) => showUploadFilesDialog());
ipcMain.handle("showUploadFilesDialog", () => showUploadFilesDialog());
ipcMain.handle("showUploadDirsDialog", (_) => showUploadDirsDialog());
ipcMain.handle("showUploadDirsDialog", () => showUploadDirsDialog());
ipcMain.handle("showUploadZipDialog", (_) => showUploadZipDialog());
ipcMain.handle("showUploadZipDialog", () => showUploadZipDialog());
// - FS
@ -177,12 +180,12 @@ export const attachIPCHandlers = () => {
ipcMain.handle(
"saveStreamToDisk",
(_, path: string, fileStream: ReadableStream<any>) =>
(_, path: string, fileStream: ReadableStream) =>
saveStreamToDisk(path, fileStream),
);
ipcMain.handle("saveFileToDisk", (_, path: string, file: any) =>
saveFileToDisk(path, file),
ipcMain.handle("saveFileToDisk", (_, path: string, contents: string) =>
saveFileToDisk(path, contents),
);
ipcMain.handle("readTextFile", (_, path: string) => readTextFile(path));
@ -203,7 +206,7 @@ export const attachIPCHandlers = () => {
// - Upload
ipcMain.handle("getPendingUploads", (_) => getPendingUploads());
ipcMain.handle("getPendingUploads", () => getPendingUploads());
ipcMain.handle(
"setToUploadFiles",
@ -252,7 +255,7 @@ export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => {
removeWatchMapping(watcher, folderPath),
);
ipcMain.handle("getWatchMappings", (_) => getWatchMappings());
ipcMain.handle("getWatchMappings", () => getWatchMappings());
ipcMain.handle(
"updateWatchMappingSyncedFiles",

View file

@ -15,7 +15,7 @@ import { isDev } from "./util";
*/
export const initLogging = () => {
log.transports.file.fileName = "ente.log";
log.transports.file.maxSize = 50 * 1024 * 1024; // 50MB;
log.transports.file.maxSize = 50 * 1024 * 1024; // 50 MB
log.transports.file.format = "[{y}-{m}-{d}T{h}:{i}:{s}{z}] {text}";
log.transports.console.level = false;
@ -31,25 +31,7 @@ export const logToDisk = (message: string) => {
log.info(`[rndr] ${message}`);
};
export const logError = logErrorSentry;
/** Deprecated, but no alternative yet */
export function logErrorSentry(
error: any,
msg: string,
info?: Record<string, unknown>,
) {
logToDisk(
`error: ${error?.name} ${error?.message} ${
error?.stack
} msg: ${msg} info: ${JSON.stringify(info)}`,
);
if (isDev) {
console.log(error, { msg, info });
}
}
const logError1 = (message: string, e?: unknown) => {
const logError = (message: string, e?: unknown) => {
if (!e) {
logError_(message);
return;
@ -82,7 +64,7 @@ const logInfo = (...params: any[]) => {
};
const logDebug = (param: () => any) => {
if (isDev) console.log(`[debug] ${util.inspect(param())}`);
if (isDev) console.log(`[main] [debug] ${util.inspect(param())}`);
};
/**
@ -98,12 +80,13 @@ export default {
* Log an error message with an optional associated error object.
*
* {@link e} is generally expected to be an `instanceof Error` but it can be
* any arbitrary object that we obtain, say, when in a try-catch handler.
* any arbitrary object that we obtain, say, when in a try-catch handler (in
* JavaScript any arbitrary value can be thrown).
*
* The log is written to disk. In development builds, the log is also
* printed to the (Node.js process') console.
* printed to the main (Node.js) process console.
*/
error: logError1,
error: logError,
/**
* Log a message.
*
@ -111,7 +94,7 @@ export default {
* arbitrary number of arbitrary parameters that it then serializes.
*
* The log is written to disk. In development builds, the log is also
* printed to the (Node.js process') console.
* printed to the main (Node.js) process console.
*/
info: logInfo,
/**
@ -121,11 +104,11 @@ export default {
* function to call to get the log message instead of directly taking the
* message. The provided function will only be called in development builds.
*
* The function can return an arbitrary value which is serialied before
* The function can return an arbitrary value which is serialized before
* being logged.
*
* This log is not written to disk. It is printed to the (Node.js process')
* console only on development builds.
* This log is NOT written to disk. And it is printed to the main (Node.js)
* process console, but only on development builds.
*/
debug: logDebug,
};

View file

@ -1,3 +1,4 @@
/* eslint-disable no-unused-vars */
/**
* @file The preload script
*
@ -31,9 +32,9 @@
* and when changing one of them, remember to see if the other two also need
* changing:
*
* - [renderer] web/packages/shared/electron/types.ts contains docs
* - [preload] desktop/src/preload.ts
* - [main] desktop/src/main/ipc.ts contains impl
* - [renderer] web/packages/next/types/electron.ts contains docs
* - [preload] desktop/src/preload.ts
* - [main] desktop/src/main/ipc.ts contains impl
*/
import { contextBridge, ipcRenderer } from "electron/renderer";
@ -53,7 +54,7 @@ import type {
const appVersion = (): Promise<string> => ipcRenderer.invoke("appVersion");
const openDirectory = (dirPath: string): Promise<void> =>
ipcRenderer.invoke("openDirectory");
ipcRenderer.invoke("openDirectory", dirPath);
const openLogDirectory = (): Promise<void> =>
ipcRenderer.invoke("openLogDirectory");
@ -68,9 +69,7 @@ const fsExists = (path: string): Promise<boolean> =>
const registerForegroundEventListener = (onForeground: () => void) => {
ipcRenderer.removeAllListeners("app-in-foreground");
ipcRenderer.on("app-in-foreground", () => {
onForeground();
});
ipcRenderer.on("app-in-foreground", onForeground);
};
const clearElectronStore = () => {
@ -228,11 +227,11 @@ const checkExistsAndCreateDir = (dirPath: string): Promise<void> =>
const saveStreamToDisk = (
path: string,
fileStream: ReadableStream<any>,
fileStream: ReadableStream,
): Promise<void> => ipcRenderer.invoke("saveStreamToDisk", path, fileStream);
const saveFileToDisk = (path: string, file: any): Promise<void> =>
ipcRenderer.invoke("saveFileToDisk", path, file);
const saveFileToDisk = (path: string, contents: string): Promise<void> =>
ipcRenderer.invoke("saveFileToDisk", path, contents);
const readTextFile = (path: string): Promise<string> =>
ipcRenderer.invoke("readTextFile", path);

View file

@ -1,9 +1,9 @@
import { compareVersions } from "compare-versions";
import { app, BrowserWindow } from "electron";
import { default as ElectronLog, default as log } from "electron-log";
import { default as electronLog } from "electron-log";
import { autoUpdater } from "electron-updater";
import { setIsAppQuitting, setIsUpdateAvailable } from "../main";
import { logErrorSentry } from "../main/log";
import log from "../main/log";
import { AppUpdateInfo } from "../types/ipc";
import {
clearMuteUpdateNotificationVersion,
@ -18,7 +18,7 @@ const FIVE_MIN_IN_MICROSECOND = 5 * 60 * 1000;
const ONE_DAY_IN_MICROSECOND = 1 * 24 * 60 * 60 * 1000;
export function setupAutoUpdater(mainWindow: BrowserWindow) {
autoUpdater.logger = log;
autoUpdater.logger = electronLog;
autoUpdater.autoDownload = false;
checkForUpdateAndNotify(mainWindow);
setInterval(
@ -33,49 +33,36 @@ export function forceCheckForUpdateAndNotify(mainWindow: BrowserWindow) {
clearMuteUpdateNotificationVersion();
checkForUpdateAndNotify(mainWindow);
} catch (e) {
logErrorSentry(e, "forceCheckForUpdateAndNotify failed");
log.error("forceCheckForUpdateAndNotify failed", e);
}
}
async function checkForUpdateAndNotify(mainWindow: BrowserWindow) {
try {
log.debug("checkForUpdateAndNotify called");
const updateCheckResult = await autoUpdater.checkForUpdates();
log.debug("update version", updateCheckResult.updateInfo.version);
if (
compareVersions(
updateCheckResult.updateInfo.version,
app.getVersion(),
) <= 0
) {
log.debug("already at latest version");
log.debug(() => "checkForUpdateAndNotify");
const { updateInfo } = await autoUpdater.checkForUpdates();
log.debug(() => `Update version ${updateInfo.version}`);
if (compareVersions(updateInfo.version, app.getVersion()) <= 0) {
log.debug(() => "Skipping update, already at latest version");
return;
}
const skipAppVersion = getSkipAppVersion();
if (
skipAppVersion &&
updateCheckResult.updateInfo.version === skipAppVersion
) {
log.info(
"user chose to skip version ",
updateCheckResult.updateInfo.version,
);
if (skipAppVersion && updateInfo.version === skipAppVersion) {
log.info(`User chose to skip version ${updateInfo.version}`);
return;
}
let timeout: NodeJS.Timeout;
log.debug("attempting auto update");
log.debug(() => "Attempting auto update");
autoUpdater.downloadUpdate();
const muteUpdateNotificationVersion =
getMuteUpdateNotificationVersion();
if (
muteUpdateNotificationVersion &&
updateCheckResult.updateInfo.version ===
muteUpdateNotificationVersion
updateInfo.version === muteUpdateNotificationVersion
) {
log.info(
"user chose to mute update notification for version ",
updateCheckResult.updateInfo.version,
`User has muted update notifications for version ${updateInfo.version}`,
);
return;
}
@ -84,28 +71,28 @@ async function checkForUpdateAndNotify(mainWindow: BrowserWindow) {
() =>
showUpdateDialog(mainWindow, {
autoUpdatable: true,
version: updateCheckResult.updateInfo.version,
version: updateInfo.version,
}),
FIVE_MIN_IN_MICROSECOND,
);
});
autoUpdater.on("error", (error) => {
clearTimeout(timeout);
logErrorSentry(error, "auto update failed");
log.error("Auto update failed", error);
showUpdateDialog(mainWindow, {
autoUpdatable: false,
version: updateCheckResult.updateInfo.version,
version: updateInfo.version,
});
});
setIsUpdateAvailable(true);
} catch (e) {
logErrorSentry(e, "checkForUpdateAndNotify failed");
log.error("checkForUpdateAndNotify failed", e);
}
}
export function updateAndRestart() {
ElectronLog.log("user quit the app");
log.info("user quit the app");
setIsAppQuitting(true);
autoUpdater.quitAndInstall();
}

View file

@ -1,7 +1,7 @@
import chokidar from "chokidar";
import { BrowserWindow } from "electron";
import path from "path";
import { logError } from "../main/log";
import log from "../main/log";
import { getWatchMappings } from "../services/watch";
import { getElectronFile } from "./fs";
@ -38,7 +38,7 @@ export function initWatcher(mainWindow: BrowserWindow) {
);
})
.on("error", (error) => {
logError(error, "error while watching files");
log.error("Error while watching files", error);
});
return watcher;

View file

@ -2,11 +2,10 @@ import { app, net } from "electron/main";
import { existsSync } from "fs";
import fs from "node:fs/promises";
import path from "node:path";
import { CustomErrors } from "../constants/errors";
import { writeStream } from "../main/fs";
import log, { logErrorSentry } from "../main/log";
import log from "../main/log";
import { execAsync, isDev } from "../main/util";
import { Model } from "../types/ipc";
import { CustomErrors, Model, isModel } from "../types/ipc";
import Tokenizer from "../utils/clip-bpe-ts/mod";
import { getPlatform } from "../utils/common/platform";
import { generateTempFilePath } from "../utils/temp";
@ -78,7 +77,7 @@ async function downloadModel(saveLocation: string, url: string) {
let imageModelDownloadInProgress: Promise<void> = null;
export async function getClipImageModelPath(type: "ggml" | "onnx") {
const getClipImageModelPath = async (type: "ggml" | "onnx") => {
try {
const modelSavePath = getModelSavePath(IMAGE_MODEL_NAME[type]);
if (imageModelDownloadInProgress) {
@ -86,7 +85,7 @@ export async function getClipImageModelPath(type: "ggml" | "onnx") {
await imageModelDownloadInProgress;
} else {
if (!existsSync(modelSavePath)) {
log.info("clip image model not found, downloading");
log.info("CLIP image model not found, downloading");
imageModelDownloadInProgress = downloadModel(
modelSavePath,
IMAGE_MODEL_DOWNLOAD_URL[type],
@ -96,7 +95,7 @@ export async function getClipImageModelPath(type: "ggml" | "onnx") {
const localFileSize = (await fs.stat(modelSavePath)).size;
if (localFileSize !== IMAGE_MODEL_SIZE_IN_BYTES[type]) {
log.info(
`clip image model size mismatch, downloading again got: ${localFileSize}`,
`CLIP image model size mismatch, downloading again got: ${localFileSize}`,
);
imageModelDownloadInProgress = downloadModel(
modelSavePath,
@ -110,21 +109,22 @@ export async function getClipImageModelPath(type: "ggml" | "onnx") {
} finally {
imageModelDownloadInProgress = null;
}
}
};
let textModelDownloadInProgress: boolean = false;
export async function getClipTextModelPath(type: "ggml" | "onnx") {
const getClipTextModelPath = async (type: "ggml" | "onnx") => {
const modelSavePath = getModelSavePath(TEXT_MODEL_NAME[type]);
if (textModelDownloadInProgress) {
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
} else {
if (!existsSync(modelSavePath)) {
log.info("clip text model not found, downloading");
log.info("CLIP text model not found, downloading");
textModelDownloadInProgress = true;
downloadModel(modelSavePath, TEXT_MODEL_DOWNLOAD_URL[type])
.catch(() => {
// ignore
.catch((e) => {
// log but otherwise ignore
log.error("CLIP text model download failed", e);
})
.finally(() => {
textModelDownloadInProgress = false;
@ -134,12 +134,13 @@ export async function getClipTextModelPath(type: "ggml" | "onnx") {
const localFileSize = (await fs.stat(modelSavePath)).size;
if (localFileSize !== TEXT_MODEL_SIZE_IN_BYTES[type]) {
log.info(
`clip text model size mismatch, downloading again got: ${localFileSize}`,
`CLIP text model size mismatch, downloading again got: ${localFileSize}`,
);
textModelDownloadInProgress = true;
downloadModel(modelSavePath, TEXT_MODEL_DOWNLOAD_URL[type])
.catch(() => {
// ignore
.catch((e) => {
// log but otherwise ignore
log.error("CLIP text model download failed", e);
})
.finally(() => {
textModelDownloadInProgress = false;
@ -149,7 +150,7 @@ export async function getClipTextModelPath(type: "ggml" | "onnx") {
}
}
return modelSavePath;
}
};
function getGGMLClipPath() {
return isDev
@ -198,6 +199,8 @@ export const computeImageEmbedding = async (
model: Model,
imageData: Uint8Array,
): Promise<Float32Array> => {
if (!isModel(model)) throw new Error(`Invalid CLIP model ${model}`);
let tempInputFilePath = null;
try {
tempInputFilePath = await generateTempFilePath("");
@ -243,180 +246,69 @@ async function computeImageEmbedding_(
inputFilePath: string,
): Promise<Float32Array> {
if (!existsSync(inputFilePath)) {
throw Error(CustomErrors.INVALID_FILE_PATH);
throw new Error("Invalid file path");
}
if (model === Model.GGML_CLIP) {
return await computeGGMLImageEmbedding(inputFilePath);
} else if (model === Model.ONNX_CLIP) {
return await computeONNXImageEmbedding(inputFilePath);
} else {
throw Error(CustomErrors.INVALID_CLIP_MODEL(model));
switch (model) {
case "ggml-clip":
return await computeGGMLImageEmbedding(inputFilePath);
case "onnx-clip":
return await computeONNXImageEmbedding(inputFilePath);
}
}
export async function computeGGMLImageEmbedding(
const computeGGMLImageEmbedding = async (
inputFilePath: string,
): Promise<Float32Array> {
try {
const clipModelPath = await getClipImageModelPath("ggml");
const ggmlclipPath = getGGMLClipPath();
const cmd = IMAGE_EMBEDDING_EXTRACT_CMD.map((cmdPart) => {
if (cmdPart === GGMLCLIP_PATH_PLACEHOLDER) {
return ggmlclipPath;
} else if (cmdPart === CLIP_MODEL_PATH_PLACEHOLDER) {
return clipModelPath;
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return inputFilePath;
} else {
return cmdPart;
}
});
): Promise<Float32Array> => {
const clipModelPath = await getClipImageModelPath("ggml");
const ggmlclipPath = getGGMLClipPath();
const cmd = IMAGE_EMBEDDING_EXTRACT_CMD.map((cmdPart) => {
if (cmdPart === GGMLCLIP_PATH_PLACEHOLDER) {
return ggmlclipPath;
} else if (cmdPart === CLIP_MODEL_PATH_PLACEHOLDER) {
return clipModelPath;
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return inputFilePath;
} else {
return cmdPart;
}
});
const { stdout } = await execAsync(cmd);
// parse stdout and return embedding
// get the last line of stdout
const lines = stdout.split("\n");
const lastLine = lines[lines.length - 1];
const embedding = JSON.parse(lastLine);
const embeddingArray = new Float32Array(embedding);
return embeddingArray;
} catch (err) {
log.error("Failed to compute GGML image embedding", err);
throw err;
}
}
const { stdout } = await execAsync(cmd);
// parse stdout and return embedding
// get the last line of stdout
const lines = stdout.split("\n");
const lastLine = lines[lines.length - 1];
const embedding = JSON.parse(lastLine);
const embeddingArray = new Float32Array(embedding);
return embeddingArray;
};
export async function computeONNXImageEmbedding(
const computeONNXImageEmbedding = async (
inputFilePath: string,
): Promise<Float32Array> {
try {
const imageSession = await getOnnxImageSession();
const t1 = Date.now();
const rgbData = await getRGBData(inputFilePath);
const feeds = {
input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
log.info(
`onnx image embedding time: ${Date.now() - t1} ms (prep:${
t2 - t1
} ms, extraction: ${Date.now() - t2} ms)`,
);
const imageEmbedding = results["output"].data; // Float32Array
return normalizeEmbedding(imageEmbedding);
} catch (err) {
log.error("Failed to compute ONNX image embedding", err);
throw err;
}
}
export async function computeTextEmbedding(
model: Model,
text: string,
): Promise<Float32Array> {
try {
const embedding = computeTextEmbedding_(model, text);
return embedding;
} catch (err) {
if (isExecError(err)) {
const parsedExecError = parseExecError(err);
throw Error(parsedExecError);
} else {
throw err;
}
}
}
async function computeTextEmbedding_(
model: Model,
text: string,
): Promise<Float32Array> {
if (model === Model.GGML_CLIP) {
return await computeGGMLTextEmbedding(text);
} else {
return await computeONNXTextEmbedding(text);
}
}
export async function computeGGMLTextEmbedding(
text: string,
): Promise<Float32Array> {
try {
const clipModelPath = await getClipTextModelPath("ggml");
const ggmlclipPath = getGGMLClipPath();
const cmd = TEXT_EMBEDDING_EXTRACT_CMD.map((cmdPart) => {
if (cmdPart === GGMLCLIP_PATH_PLACEHOLDER) {
return ggmlclipPath;
} else if (cmdPart === CLIP_MODEL_PATH_PLACEHOLDER) {
return clipModelPath;
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return text;
} else {
return cmdPart;
}
});
const { stdout } = await execAsync(cmd);
// parse stdout and return embedding
// get the last line of stdout
const lines = stdout.split("\n");
const lastLine = lines[lines.length - 1];
const embedding = JSON.parse(lastLine);
const embeddingArray = new Float32Array(embedding);
return embeddingArray;
} catch (err) {
if (err.message === CustomErrors.MODEL_DOWNLOAD_PENDING) {
log.info(CustomErrors.MODEL_DOWNLOAD_PENDING);
} else {
log.error("Failed to compute GGML text embedding", err);
}
throw err;
}
}
export async function computeONNXTextEmbedding(
text: string,
): Promise<Float32Array> {
try {
const imageSession = await getOnnxTextSession();
const t1 = Date.now();
const tokenizer = getTokenizer();
const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text));
const feeds = {
input: new ort.Tensor("int32", tokenizedText, [1, 77]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
log.info(
`onnx text embedding time: ${Date.now() - t1} ms (prep:${
t2 - t1
} ms, extraction: ${Date.now() - t2} ms)`,
);
const textEmbedding = results["output"].data; // Float32Array
return normalizeEmbedding(textEmbedding);
} catch (err) {
if (err.message === CustomErrors.MODEL_DOWNLOAD_PENDING) {
log.info(CustomErrors.MODEL_DOWNLOAD_PENDING);
} else {
logErrorSentry(err, "Error in computeONNXTextEmbedding");
}
throw err;
}
}
): Promise<Float32Array> => {
const imageSession = await getOnnxImageSession();
const t1 = Date.now();
const rgbData = await getRGBData(inputFilePath);
const feeds = {
input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
log.info(
`onnx image embedding time: ${Date.now() - t1} ms (prep:${
t2 - t1
} ms, extraction: ${Date.now() - t2} ms)`,
);
const imageEmbedding = results["output"].data; // Float32Array
return normalizeEmbedding(imageEmbedding);
};
async function getRGBData(inputFilePath: string) {
const jpegData = await fs.readFile(inputFilePath);
let rawImageData;
try {
rawImageData = jpeg.decode(jpegData, {
useTArray: true,
formatAsRGBA: false,
});
} catch (err) {
logErrorSentry(err, "JPEG decode error");
throw err;
}
const rawImageData = jpeg.decode(jpegData, {
useTArray: true,
formatAsRGBA: false,
});
const nx: number = rawImageData.width;
const ny: number = rawImageData.height;
@ -479,21 +371,7 @@ async function getRGBData(inputFilePath: string) {
return result;
}
export const computeClipMatchScore = async (
imageEmbedding: Float32Array,
textEmbedding: Float32Array,
) => {
if (imageEmbedding.length !== textEmbedding.length) {
throw Error("imageEmbedding and textEmbedding length mismatch");
}
let score = 0;
for (let index = 0; index < imageEmbedding.length; index++) {
score += imageEmbedding[index] * textEmbedding[index];
}
return score;
};
export const normalizeEmbedding = (embedding: Float32Array) => {
const normalizeEmbedding = (embedding: Float32Array) => {
let normalization = 0;
for (let index = 0; index < embedding.length; index++) {
normalization += embedding[index] * embedding[index];
@ -504,3 +382,82 @@ export const normalizeEmbedding = (embedding: Float32Array) => {
}
return embedding;
};
export async function computeTextEmbedding(
model: Model,
text: string,
): Promise<Float32Array> {
if (!isModel(model)) throw new Error(`Invalid CLIP model ${model}`);
try {
const embedding = computeTextEmbedding_(model, text);
return embedding;
} catch (err) {
if (isExecError(err)) {
const parsedExecError = parseExecError(err);
throw Error(parsedExecError);
} else {
throw err;
}
}
}
async function computeTextEmbedding_(
model: Model,
text: string,
): Promise<Float32Array> {
switch (model) {
case "ggml-clip":
return await computeGGMLTextEmbedding(text);
case "onnx-clip":
return await computeONNXTextEmbedding(text);
}
}
export async function computeGGMLTextEmbedding(
text: string,
): Promise<Float32Array> {
const clipModelPath = await getClipTextModelPath("ggml");
const ggmlclipPath = getGGMLClipPath();
const cmd = TEXT_EMBEDDING_EXTRACT_CMD.map((cmdPart) => {
if (cmdPart === GGMLCLIP_PATH_PLACEHOLDER) {
return ggmlclipPath;
} else if (cmdPart === CLIP_MODEL_PATH_PLACEHOLDER) {
return clipModelPath;
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return text;
} else {
return cmdPart;
}
});
const { stdout } = await execAsync(cmd);
// parse stdout and return embedding
// get the last line of stdout
const lines = stdout.split("\n");
const lastLine = lines[lines.length - 1];
const embedding = JSON.parse(lastLine);
const embeddingArray = new Float32Array(embedding);
return embeddingArray;
}
export async function computeONNXTextEmbedding(
text: string,
): Promise<Float32Array> {
const imageSession = await getOnnxTextSession();
const t1 = Date.now();
const tokenizer = getTokenizer();
const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text));
const feeds = {
input: new ort.Tensor("int32", tokenizedText, [1, 77]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
log.info(
`onnx text embedding time: ${Date.now() - t1} ms (prep:${
t2 - t1
} ms, extraction: ${Date.now() - t2} ms)`,
);
const textEmbedding = results["output"].data; // Float32Array
return normalizeEmbedding(textEmbedding);
}

View file

@ -1,7 +1,6 @@
import pathToFfmpeg from "ffmpeg-static";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import { CustomErrors } from "../constants/errors";
import { writeStream } from "../main/fs";
import log from "../main/log";
import { execAsync } from "../main/util";
@ -146,7 +145,7 @@ const promiseWithTimeout = async <T>(
} = { current: null };
const rejectOnTimeout = new Promise<null>((_, reject) => {
timeoutRef.current = setTimeout(
() => reject(Error(CustomErrors.WAIT_TIME_EXCEEDED)),
() => reject(new Error("Operation timed out")),
timeout,
);
});

View file

@ -2,7 +2,7 @@ import StreamZip from "node-stream-zip";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import { logError } from "../main/log";
import log from "../main/log";
import { ElectronFile } from "../types/ipc";
const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
@ -115,7 +115,9 @@ export const getZipFileStream = async (
const inProgress = {
current: false,
};
// eslint-disable-next-line no-unused-vars
let resolveObj: (value?: any) => void = null;
// eslint-disable-next-line no-unused-vars
let rejectObj: (reason?: any) => void = null;
stream.on("readable", () => {
try {
@ -179,7 +181,7 @@ export const getZipFileStream = async (
controller.close();
}
} catch (e) {
logError(e, "readableStream pull failed");
log.error("Failed to pull from readableStream", e);
controller.close();
}
},

View file

@ -1,11 +1,10 @@
import { existsSync } from "fs";
import fs from "node:fs/promises";
import path from "path";
import { CustomErrors } from "../constants/errors";
import { writeStream } from "../main/fs";
import { logError, logErrorSentry } from "../main/log";
import log from "../main/log";
import { execAsync, isDev } from "../main/util";
import { ElectronFile } from "../types/ipc";
import { CustomErrors, ElectronFile } from "../types/ipc";
import { isPlatform } from "../utils/common/platform";
import { generateTempFilePath } from "../utils/temp";
import { deleteTempFile } from "./ffmpeg";
@ -103,18 +102,21 @@ async function convertToJPEG_(
return new Uint8Array(await fs.readFile(tempOutputFilePath));
} catch (e) {
logErrorSentry(e, "failed to convert heic");
log.error("Failed to convert HEIC", e);
throw e;
} finally {
try {
await fs.rm(tempInputFilePath, { force: true });
} catch (e) {
logErrorSentry(e, "failed to remove tempInputFile");
log.error(`Failed to remove tempInputFile ${tempInputFilePath}`, e);
}
try {
await fs.rm(tempOutputFilePath, { force: true });
} catch (e) {
logErrorSentry(e, "failed to remove tempOutputFile");
log.error(
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
e,
);
}
}
}
@ -150,7 +152,7 @@ function constructConvertCommand(
},
);
} else {
throw Error(CustomErrors.INVALID_OS(process.platform));
throw new Error(`Unsupported OS ${process.platform}`);
}
return convertCmd;
}
@ -187,7 +189,7 @@ export async function generateImageThumbnail(
try {
await deleteTempFile(inputFilePath);
} catch (e) {
logError(e, "failed to deleteTempFile");
log.error(`Failed to deleteTempFile ${inputFilePath}`, e);
}
}
}
@ -217,13 +219,16 @@ async function generateImageThumbnail_(
} while (thumbnail.length > maxSize && quality > MIN_QUALITY);
return thumbnail;
} catch (e) {
logErrorSentry(e, "generate image thumbnail failed");
log.error("Failed to generate image thumbnail", e);
throw e;
} finally {
try {
await fs.rm(tempOutputFilePath, { force: true });
} catch (e) {
logErrorSentry(e, "failed to remove tempOutputFile");
log.error(
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
e,
);
}
}
}
@ -283,7 +288,7 @@ function constructThumbnailGenerationCommand(
return cmdPart;
});
} else {
throw Error(CustomErrors.INVALID_OS(process.platform));
throw new Error(`Unsupported OS ${process.platform}`);
}
return thumbnailGenerationCmd;
}

View file

@ -0,0 +1,26 @@
import { safeStorage } from "electron/main";
import { keysStore } from "../stores/keys.store";
import { safeStorageStore } from "../stores/safeStorage.store";
import { uploadStatusStore } from "../stores/upload.store";
import { watchStore } from "../stores/watch.store";
export const clearElectronStore = () => {
uploadStatusStore.clear();
keysStore.clear();
safeStorageStore.clear();
watchStore.clear();
};
export async function setEncryptionKey(encryptionKey: string) {
const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey);
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
safeStorageStore.set("encryptionKey", b64EncryptedKey);
}
export async function getEncryptionKey(): Promise<string> {
const b64EncryptedKey = safeStorageStore.get("encryptionKey");
if (b64EncryptedKey) {
const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
return await safeStorage.decryptString(keyBuffer);
}
}

View file

@ -1,10 +1,39 @@
import StreamZip from "node-stream-zip";
import path from "path";
import { getElectronFile } from "../services/fs";
import { uploadStatusStore } from "../stores/upload.store";
import { ElectronFile, FILE_PATH_TYPE } from "../types/ipc";
import { FILE_PATH_KEYS } from "../types/main";
import { getValidPaths, getZipFileStream } from "./fs";
export const getPendingUploads = async () => {
const filePaths = getSavedFilePaths(FILE_PATH_TYPE.FILES);
const zipPaths = getSavedFilePaths(FILE_PATH_TYPE.ZIPS);
const collectionName = uploadStatusStore.get("collectionName");
let files: ElectronFile[] = [];
let type: FILE_PATH_TYPE;
if (zipPaths.length) {
type = FILE_PATH_TYPE.ZIPS;
for (const zipPath of zipPaths) {
files = [
...files,
...(await getElectronFilesFromGoogleZip(zipPath)),
];
}
const pendingFilePaths = new Set(filePaths);
files = files.filter((file) => pendingFilePaths.has(file.path));
} else if (filePaths.length) {
type = FILE_PATH_TYPE.FILES;
files = await Promise.all(filePaths.map(getElectronFile));
}
return {
files,
collectionName,
type,
};
};
export const getSavedFilePaths = (type: FILE_PATH_TYPE) => {
const paths =
getValidPaths(

View file

@ -19,6 +19,7 @@
* curl -v -H "Location;" -H "User-Agent: FooBar's so-called ""Browser""" "http://www.daveeddy.com/?name=dave&age=24"
Which is suitable for being executed by the shell.
*/
/* eslint-disable no-unused-vars */
declare module "any-shell-escape" {
declare const shellescape: (args: readonly string | string[]) => string;
export default shellescape;

View file

@ -4,6 +4,32 @@
* This file is manually kept in sync with the renderer code.
* See [Note: types.ts <-> preload.ts <-> ipc.ts]
*/
/**
* Errors that have special semantics on the web side.
*
* [Note: Custom errors across Electron/Renderer boundary]
*
* We need to use the `message` field to disambiguate between errors thrown by
* the main process when invoked from the renderer process. This is because:
*
* > Errors thrown throw `handle` in the main process are not transparent as
* > they are serialized and only the `message` property from the original error
* > is provided to the renderer process.
* >
* > - https://www.electronjs.org/docs/latest/tutorial/ipc
* >
* > Ref: https://github.com/electron/electron/issues/24427
*/
export const CustomErrors = {
WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
"Windows native image processing is not supported",
UNSUPPORTED_PLATFORM: (platform: string, arch: string) =>
`Unsupported platform - ${platform} ${arch}`,
MODEL_DOWNLOAD_PENDING:
"Model download pending, skipping clip search request",
};
/**
* Deprecated - Use File + webUtils.getPathForFile instead
*
@ -45,6 +71,7 @@ export interface WatchStoreType {
}
export enum FILE_PATH_TYPE {
/* eslint-disable no-unused-vars */
FILES = "files",
ZIPS = "zips",
}
@ -54,7 +81,6 @@ export interface AppUpdateInfo {
version: string;
}
export enum Model {
GGML_CLIP = "ggml-clip",
ONNX_CLIP = "onnx-clip",
}
export type Model = "ggml-clip" | "onnx-clip";
export const isModel = (s: unknown) => s == "ggml-clip" || s == "onnx-clip";

View file

@ -18,6 +18,7 @@ export interface KeysStoreType {
};
}
/* eslint-disable no-unused-vars */
export const FILE_PATH_KEYS: {
[k in FILE_PATH_TYPE]: keyof UploadStoreType;
} = {

View file

@ -1,5 +1,6 @@
import { CustomHead } from "@/next/components/Head";
import { setupI18n } from "@/next/i18n";
import { logStartupBanner } from "@/next/log-web";
import {
APPS,
APP_TITLES,
@ -16,15 +17,12 @@ import { MessageContainer } from "@ente/shared/components/MessageContainer";
import AppNavbar from "@ente/shared/components/Navbar/app";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { useLocalState } from "@ente/shared/hooks/useLocalState";
import {
clearLogsIfLocalStorageLimitExceeded,
logStartupMessage,
} from "@ente/shared/logging/web";
import HTTPService from "@ente/shared/network/HTTPService";
import { LS_KEYS } from "@ente/shared/storage/localStorage";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { getTheme } from "@ente/shared/themes";
import { THEME_COLOR } from "@ente/shared/themes/constants";
import { SetTheme } from "@ente/shared/themes/types";
import type { User } from "@ente/shared/user/types";
import { CssBaseline, useMediaQuery } from "@mui/material";
import { ThemeProvider } from "@mui/material/styles";
import { t } from "i18next";
@ -67,15 +65,12 @@ export default function App({ Component, pageProps }: AppProps) {
);
useEffect(() => {
//setup i18n
setupI18n().finally(() => setIsI18nReady(true));
// set client package name in headers
const userId = (getData(LS_KEYS.USER) as User)?.id;
logStartupBanner(APPS.AUTH, userId);
HTTPService.setHeaders({
"X-Client-Package": CLIENT_PACKAGE_NAMES.get(APPS.AUTH),
});
// setup logging
clearLogsIfLocalStorageLimitExceeded();
logStartupMessage(APPS.AUTH);
}, []);
const setUserOnline = () => setOffline(false);

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium";
import { addLogLine } from "@ente/shared/logging";
import castGateway from "@ente/shared/network/cast";
import LargeType from "components/LargeType";
import _sodium from "libsodium-wrappers";
@ -60,7 +60,7 @@ export default function PairingMode() {
);
context.start(options);
} catch (e) {
addLogLine(e, "failed to create cast context");
log.error("failed to create cast context", e);
}
setIsCastReady(true);
return () => {

View file

@ -1,9 +1,9 @@
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getCastFileURL } from "@ente/shared/network/api";
import { FILE_TYPE } from "constants/file";
import { EnteFile } from "types/file";
import ComlinkCryptoWorker from "utils/comlink/ComlinkCryptoWorker";
import { generateStreamFromArrayBuffer } from "utils/file";
class CastDownloadManager {

View file

@ -1,5 +1,5 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { logError } from "@ente/shared/sentry";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
export async function getUint8ArrayView(file: Blob): Promise<Uint8Array> {
try {

View file

@ -1,6 +1,6 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { CustomError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { FILE_TYPE } from "constants/file";
import {
KNOWN_NON_MEDIA_FORMATS,

View file

@ -1,25 +0,0 @@
import { Remote } from "comlink";
import { DedicatedCryptoWorker } from "worker/crypto.worker";
import { ComlinkWorker } from "./comlinkWorker";
class ComlinkCryptoWorker {
private comlinkWorkerInstance: Promise<Remote<DedicatedCryptoWorker>>;
async getInstance() {
if (!this.comlinkWorkerInstance) {
const comlinkWorker = getDedicatedCryptoWorker();
this.comlinkWorkerInstance = comlinkWorker.remote;
}
return this.comlinkWorkerInstance;
}
}
export const getDedicatedCryptoWorker = () => {
const cryptoComlinkWorker = new ComlinkWorker<typeof DedicatedCryptoWorker>(
"ente-crypto-worker",
new Worker(new URL("worker/crypto.worker.ts", import.meta.url)),
);
return cryptoComlinkWorker;
};
export default new ComlinkCryptoWorker();

View file

@ -1,25 +0,0 @@
import { addLocalLog } from "@ente/shared/logging";
import { Remote, wrap } from "comlink";
export class ComlinkWorker<T extends new () => InstanceType<T>> {
public remote: Promise<Remote<InstanceType<T>>>;
private worker: Worker;
private name: string;
constructor(name: string, worker: Worker) {
this.name = name;
this.worker = worker;
this.worker.onerror = (errorEvent) => {
console.error("Got error event from worker", errorEvent);
};
addLocalLog(() => `Initiated ${this.name}`);
const comlink = wrap<T>(this.worker);
this.remote = new comlink() as Promise<Remote<InstanceType<T>>>;
}
public terminate() {
this.worker.terminate();
addLocalLog(() => `Terminated ${this.name}`);
}
}

View file

@ -1,3 +1,4 @@
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { logError } from "@ente/shared/sentry";
import { FILE_TYPE, RAW_FORMATS } from "constants/file";
import CastDownloadManager from "services/castDownloadManager";
@ -9,7 +10,6 @@ import {
FileMagicMetadata,
FilePublicMagicMetadata,
} from "types/file";
import ComlinkCryptoWorker from "utils/comlink/ComlinkCryptoWorker";
export function sortFiles(files: EnteFile[], sortAsc = false) {
// sort based on the time of creation time of the file,

View file

@ -1,215 +0,0 @@
import * as libsodium from "@ente/shared/crypto/internal/libsodium";
import * as Comlink from "comlink";
import { StateAddress } from "libsodium-wrappers";
const textDecoder = new TextDecoder();
const textEncoder = new TextEncoder();
export class DedicatedCryptoWorker {
async decryptMetadata(
encryptedMetadata: string,
header: string,
key: string,
) {
const encodedMetadata = await libsodium.decryptChaChaOneShot(
await libsodium.fromB64(encryptedMetadata),
await libsodium.fromB64(header),
key,
);
return JSON.parse(textDecoder.decode(encodedMetadata));
}
async decryptThumbnail(
fileData: Uint8Array,
header: Uint8Array,
key: string,
) {
return libsodium.decryptChaChaOneShot(fileData, header, key);
}
async decryptEmbedding(
encryptedEmbedding: string,
header: string,
key: string,
) {
const encodedEmbedding = await libsodium.decryptChaChaOneShot(
await libsodium.fromB64(encryptedEmbedding),
await libsodium.fromB64(header),
key,
);
return Float32Array.from(
JSON.parse(textDecoder.decode(encodedEmbedding)),
);
}
async decryptFile(fileData: Uint8Array, header: Uint8Array, key: string) {
return libsodium.decryptChaCha(fileData, header, key);
}
async encryptMetadata(metadata: Object, key: string) {
const encodedMetadata = textEncoder.encode(JSON.stringify(metadata));
const { file: encryptedMetadata } =
await libsodium.encryptChaChaOneShot(encodedMetadata, key);
const { encryptedData, ...other } = encryptedMetadata;
return {
file: {
encryptedData: await libsodium.toB64(encryptedData),
...other,
},
key,
};
}
async encryptThumbnail(fileData: Uint8Array, key: string) {
return libsodium.encryptChaChaOneShot(fileData, key);
}
async encryptEmbedding(embedding: Float32Array, key: string) {
const encodedEmbedding = textEncoder.encode(
JSON.stringify(Array.from(embedding)),
);
const { file: encryptEmbedding } = await libsodium.encryptChaChaOneShot(
encodedEmbedding,
key,
);
const { encryptedData, ...other } = encryptEmbedding;
return {
file: {
encryptedData: await libsodium.toB64(encryptedData),
...other,
},
key,
};
}
async encryptFile(fileData: Uint8Array) {
return libsodium.encryptChaCha(fileData);
}
async encryptFileChunk(
data: Uint8Array,
pushState: StateAddress,
isFinalChunk: boolean,
) {
return libsodium.encryptFileChunk(data, pushState, isFinalChunk);
}
async initChunkEncryption() {
return libsodium.initChunkEncryption();
}
async initChunkDecryption(header: Uint8Array, key: Uint8Array) {
return libsodium.initChunkDecryption(header, key);
}
async decryptFileChunk(fileData: Uint8Array, pullState: StateAddress) {
return libsodium.decryptFileChunk(fileData, pullState);
}
async initChunkHashing() {
return libsodium.initChunkHashing();
}
async hashFileChunk(hashState: StateAddress, chunk: Uint8Array) {
return libsodium.hashFileChunk(hashState, chunk);
}
async completeChunkHashing(hashState: StateAddress) {
return libsodium.completeChunkHashing(hashState);
}
async deriveKey(
passphrase: string,
salt: string,
opsLimit: number,
memLimit: number,
) {
return libsodium.deriveKey(passphrase, salt, opsLimit, memLimit);
}
async deriveSensitiveKey(passphrase: string, salt: string) {
return libsodium.deriveSensitiveKey(passphrase, salt);
}
async deriveInteractiveKey(passphrase: string, salt: string) {
return libsodium.deriveInteractiveKey(passphrase, salt);
}
async decryptB64(data: string, nonce: string, key: string) {
return libsodium.decryptB64(data, nonce, key);
}
async decryptToUTF8(data: string, nonce: string, key: string) {
return libsodium.decryptToUTF8(data, nonce, key);
}
async encryptToB64(data: string, key: string) {
return libsodium.encryptToB64(data, key);
}
async generateKeyAndEncryptToB64(data: string) {
return libsodium.generateKeyAndEncryptToB64(data);
}
async encryptUTF8(data: string, key: string) {
return libsodium.encryptUTF8(data, key);
}
async generateEncryptionKey() {
return libsodium.generateEncryptionKey();
}
async generateSaltToDeriveKey() {
return libsodium.generateSaltToDeriveKey();
}
async generateKeyPair() {
return libsodium.generateKeyPair();
}
async boxSealOpen(input: string, publicKey: string, secretKey: string) {
return libsodium.boxSealOpen(input, publicKey, secretKey);
}
async boxSeal(input: string, publicKey: string) {
return libsodium.boxSeal(input, publicKey);
}
async generateSubKey(
key: string,
subKeyLength: number,
subKeyID: number,
context: string,
) {
return libsodium.generateSubKey(key, subKeyLength, subKeyID, context);
}
async fromUTF8(string: string) {
return libsodium.fromUTF8(string);
}
async toUTF8(data: string) {
return libsodium.toUTF8(data);
}
async toB64(data: Uint8Array) {
return libsodium.toB64(data);
}
async toURLSafeB64(data: Uint8Array) {
return libsodium.toURLSafeB64(data);
}
async fromB64(string: string) {
return libsodium.fromB64(string);
}
async toHex(string: string) {
return libsodium.toHex(string);
}
async fromHex(string: string) {
return libsodium.fromHex(string);
}
}
Comlink.expose(DedicatedCryptoWorker, self);

View file

@ -20,7 +20,6 @@
"blazeface-back": "^0.0.9",
"bs58": "^5.0.0",
"chrono-node": "^2.2.6",
"comlink": "^4.3.0",
"date-fns": "^2",
"debounce": "^2.0.0",
"density-clustering": "^1.3.0",

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import { VerticallyCentered } from "@ente/shared/components/Container";
import DialogBoxV2 from "@ente/shared/components/DialogBoxV2";
import EnteButton from "@ente/shared/components/EnteButton";
@ -6,9 +7,7 @@ import SingleInputForm, {
SingleInputFormProps,
} from "@ente/shared/components/SingleInputForm";
import { boxSeal } from "@ente/shared/crypto/internal/libsodium";
import { addLogLine } from "@ente/shared/logging";
import castGateway from "@ente/shared/network/cast";
import { logError } from "@ente/shared/sentry";
import { Link, Typography } from "@mui/material";
import { t } from "i18next";
import { useEffect, useState } from "react";
@ -105,7 +104,7 @@ export default function AlbumCastDialog(props: Props) {
await instance.requestSession();
} catch (e) {
setView("auto-cast-error");
logError(e, "Error requesting session");
log.error("Error requesting session", e);
return;
}
const session = instance.getCurrentSession();
@ -124,7 +123,7 @@ export default function AlbumCastDialog(props: Props) {
})
.catch((e) => {
setView("auto-cast-error");
logError(e, "Error casting to TV");
log.error("Error casting to TV", e);
});
}
},
@ -133,10 +132,10 @@ export default function AlbumCastDialog(props: Props) {
session
.sendMessage("urn:x-cast:pair-request", {})
.then(() => {
addLogLine("Message sent successfully");
log.debug(() => "Message sent successfully");
})
.catch((error) => {
logError(error, "Error sending message");
.catch((e) => {
log.error("Error sending message", e);
});
});
}

View file

@ -1,5 +1,5 @@
import ElectronAPIs from "@/next/electron";
import LinkButton from "@ente/shared/components/LinkButton";
import ElectronAPIs from "@ente/shared/electron";
import { logError } from "@ente/shared/sentry";
import { Tooltip } from "@mui/material";
import { styled } from "@mui/material/styles";

View file

@ -1,11 +1,10 @@
import log from "@/next/log";
import {
SpaceBetweenFlex,
VerticallyCenteredFlex,
} from "@ente/shared/components/Container";
import DialogTitleWithCloseButton from "@ente/shared/components/DialogBox/TitleWithCloseButton";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import {
Box,
Button,
@ -68,7 +67,7 @@ export default function ExportModal(props: Props) {
setContinuousExport(exportSettings?.continuousExport ?? false);
void syncExportRecord(exportSettings?.folder);
} catch (e) {
logError(e, "export on mount useEffect failed");
log.error("export on mount useEffect failed", e);
}
}, []);
@ -123,7 +122,7 @@ export default function ExportModal(props: Props) {
setPendingExports(pendingExports);
} catch (e) {
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "syncExportRecord failed");
log.error("syncExportRecord failed", e);
}
}
};
@ -135,12 +134,12 @@ export default function ExportModal(props: Props) {
const handleChangeExportDirectoryClick = async () => {
try {
const newFolder = await exportService.changeExportDirectory();
addLogLine(`Export folder changed to ${newFolder}`);
log.info(`Export folder changed to ${newFolder}`);
updateExportFolder(newFolder);
void syncExportRecord(newFolder);
} catch (e) {
if (e.message !== CustomError.SELECT_FOLDER_ABORTED) {
logError(e, "handleChangeExportDirectoryClick failed");
log.error("handleChangeExportDirectoryClick failed", e);
}
}
};
@ -156,7 +155,7 @@ export default function ExportModal(props: Props) {
}
updateContinuousExport(newContinuousExport);
} catch (e) {
logError(e, "onContinuousExportChange failed");
log.error("onContinuousExportChange failed", e);
}
};
@ -166,7 +165,7 @@ export default function ExportModal(props: Props) {
await exportService.scheduleExport();
} catch (e) {
if (e.message !== CustomError.EXPORT_FOLDER_DOES_NOT_EXIST) {
logError(e, "scheduleExport failed");
log.error("scheduleExport failed", e);
}
}
};

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import Notification from "components/Notification";
import { t } from "i18next";
import isElectron from "is-electron";

View file

@ -1,11 +1,9 @@
import { Skeleton, styled } from "@mui/material";
import { useEffect, useState } from "react";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { cached } from "@ente/shared/storage/cacheStorage/helpers";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
import { Skeleton, styled } from "@mui/material";
import { useEffect, useState } from "react";
import machineLearningService from "services/machineLearning/machineLearningService";
import { imageBitmapToBlob } from "utils/image";
@ -44,9 +42,9 @@ export function ImageCacheView(props: {
props.url,
async () => {
try {
addLogLine(
"ImageCacheView: regenerate face crop",
props.faceID,
log.debug(
() =>
`ImageCacheView: regenerate face crop for ${props.faceID}`,
);
return machineLearningService.regenerateFaceCrop(
user.token,
@ -54,9 +52,9 @@ export function ImageCacheView(props: {
props.faceID,
);
} catch (e) {
logError(
e,
log.error(
"ImageCacheView: regenerate face crop failed",
e,
);
}
},
@ -65,7 +63,7 @@ export function ImageCacheView(props: {
!didCancel && setImageBlob(blob);
} catch (e) {
logError(e, "ImageCacheView useEffect failed");
log.error("ImageCacheView useEffect failed", e);
}
}
loadImage();

View file

@ -1,5 +1,5 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { FlexWrapper } from "@ente/shared/components/Container";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { Box, styled } from "@mui/material";
import {
DATE_CONTAINER_HEIGHT,

View file

@ -1,6 +1,6 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { FlexWrapper } from "@ente/shared/components/Container";
import { formatDate, getDate, isSameDay } from "@ente/shared/time/format";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { Box, Checkbox, Link, Typography, styled } from "@mui/material";
import {
DATE_CONTAINER_HEIGHT,

View file

@ -16,9 +16,9 @@ import {
isSupportedRawFormat,
} from "utils/file";
import log from "@/next/log";
import { FlexWrapper } from "@ente/shared/components/Container";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { addLocalLog } from "@ente/shared/logging";
import AlbumOutlined from "@mui/icons-material/AlbumOutlined";
import ChevronLeft from "@mui/icons-material/ChevronLeft";
import ChevronRight from "@mui/icons-material/ChevronRight";
@ -171,7 +171,7 @@ function PhotoViewer(props: Iprops) {
return;
}
addLocalLog(() => "Event: " + event.key);
log.debug(() => "Event: " + event.key);
switch (event.key) {
case "i":

View file

@ -3,9 +3,9 @@ import { AppContext } from "pages/_app";
import { useContext, useEffect, useState } from "react";
import { Trans } from "react-i18next";
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import { savedLogs } from "@/next/log-web";
import { addLogLine } from "@ente/shared/logging";
import { getDebugLogs } from "@ente/shared/logging/web";
import { downloadAsFile } from "@ente/shared/utils";
import Typography from "@mui/material/Typography";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
@ -38,22 +38,17 @@ export default function DebugSection() {
proceed: {
text: t("DOWNLOAD"),
variant: "accent",
action: downloadDebugLogs,
action: downloadLogs,
},
close: {
text: t("CANCEL"),
},
});
const downloadDebugLogs = () => {
addLogLine("exporting logs");
if (isElectron()) {
ElectronAPIs.openLogDirectory();
} else {
const logs = getDebugLogs();
downloadAsFile(`debug_logs_${Date.now()}.txt`, logs);
}
const downloadLogs = () => {
addLogLine("Downloading logs");
if (isElectron()) ElectronAPIs.openLogDirectory();
else downloadAsFile(`debug_logs_${Date.now()}.txt`, savedLogs());
};
return (

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";

View file

@ -6,8 +6,8 @@ import watchFolderService from "services/watchFolder/watchFolderService";
import { WatchMapping } from "types/watchFolder";
import { MappingList } from "./mappingList";
import ElectronAPIs from "@/next/electron";
import DialogTitleWithCloseButton from "@ente/shared/components/DialogBox/TitleWithCloseButton";
import ElectronAPIs from "@ente/shared/electron";
import UploadStrategyChoiceModal from "components/Upload/UploadStrategyChoiceModal";
import { PICKED_UPLOAD_TYPE, UPLOAD_STRATEGY } from "constants/upload";
import isElectron from "is-electron";

View file

@ -1,5 +1,8 @@
import { CustomHead } from "@/next/components/Head";
import ElectronAPIs from "@/next/electron";
import { setupI18n } from "@/next/i18n";
import { logStartupBanner } from "@/next/log-web";
import { AppUpdateInfo } from "@/next/types/ipc";
import {
APPS,
APP_TITLES,
@ -20,16 +23,10 @@ import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { MessageContainer } from "@ente/shared/components/MessageContainer";
import AppNavbar from "@ente/shared/components/Navbar/app";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import ElectronAPIs from "@ente/shared/electron";
import { AppUpdateInfo } from "@ente/shared/electron/types";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { useLocalState } from "@ente/shared/hooks/useLocalState";
import { addLogLine } from "@ente/shared/logging";
import {
clearLogsIfLocalStorageLimitExceeded,
logStartupMessage,
} from "@ente/shared/logging/web";
import HTTPService from "@ente/shared/network/HTTPService";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
@ -41,6 +38,7 @@ import {
import { getTheme } from "@ente/shared/themes";
import { THEME_COLOR } from "@ente/shared/themes/constants";
import { SetTheme } from "@ente/shared/themes/types";
import type { User } from "@ente/shared/user/types";
import ArrowForward from "@mui/icons-material/ArrowForward";
import { CssBaseline, useMediaQuery } from "@mui/material";
import { ThemeProvider } from "@mui/material/styles";
@ -149,15 +147,12 @@ export default function App({ Component, pageProps }: AppProps) {
);
useEffect(() => {
//setup i18n
setupI18n().finally(() => setIsI18nReady(true));
// set client package name in headers
const userId = (getData(LS_KEYS.USER) as User)?.id;
logStartupBanner(APPS.PHOTOS, userId);
HTTPService.setHeaders({
"X-Client-Package": CLIENT_PACKAGE_NAMES.get(APPS.PHOTOS),
});
// setup logging
clearLogsIfLocalStorageLimitExceeded();
logStartupMessage(APPS.PHOTOS);
}, []);
useEffect(() => {

View file

@ -89,9 +89,9 @@ import {
splitNormalAndHiddenCollections,
} from "utils/collection";
import ElectronAPIs from "@/next/electron";
import { APPS } from "@ente/shared/apps/constants";
import { CenteredFlex } from "@ente/shared/components/Container";
import ElectronAPIs from "@ente/shared/electron";
import useFileInput from "@ente/shared/hooks/useFileInput";
import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore";

View file

@ -1,3 +1,4 @@
import ElectronAPIs from "@/next/electron";
import Login from "@ente/accounts/components/Login";
import SignUp from "@ente/accounts/components/SignUp";
import { APPS } from "@ente/shared/apps/constants";
@ -5,7 +6,6 @@ import { EnteLogo } from "@ente/shared/components/EnteLogo";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { saveKeyInSessionStore } from "@ente/shared/crypto/helpers";
import ElectronAPIs from "@ente/shared/electron";
import { getAlbumsURL } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import localForage from "@ente/shared/storage/localForage";

View file

@ -1,5 +1,5 @@
import ElectronAPIs from "@/next/electron";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import ElectronAPIs from "@ente/shared/electron";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";

View file

@ -37,7 +37,7 @@ import {
} from "utils/file";
import { decodeLivePhoto } from "../livePhotoService";
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";

View file

@ -1,5 +1,4 @@
import { addLocalLog, addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import log from "@/next/log";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
import { sleep } from "@ente/shared/utils";
@ -52,25 +51,25 @@ export async function migrateExport(
updateProgress: (progress: ExportProgress) => void,
) {
try {
addLogLine(`current export version: ${exportRecord.version}`);
log.info(`current export version: ${exportRecord.version}`);
if (exportRecord.version === 0) {
addLogLine("migrating export to version 1");
log.info("migrating export to version 1");
await migrationV0ToV1(exportDir, exportRecord as ExportRecordV0);
exportRecord = await exportService.updateExportRecord(exportDir, {
version: 1,
});
addLogLine("migration to version 1 complete");
log.info("migration to version 1 complete");
}
if (exportRecord.version === 1) {
addLogLine("migrating export to version 2");
log.info("migrating export to version 2");
await migrationV1ToV2(exportRecord as ExportRecordV1, exportDir);
exportRecord = await exportService.updateExportRecord(exportDir, {
version: 2,
});
addLogLine("migration to version 2 complete");
log.info("migration to version 2 complete");
}
if (exportRecord.version === 2) {
addLogLine("migrating export to version 3");
log.info("migrating export to version 3");
await migrationV2ToV3(
exportDir,
exportRecord as ExportRecordV2,
@ -79,28 +78,28 @@ export async function migrateExport(
exportRecord = await exportService.updateExportRecord(exportDir, {
version: 3,
});
addLogLine("migration to version 3 complete");
log.info("migration to version 3 complete");
}
if (exportRecord.version === 3) {
addLogLine("migrating export to version 4");
log.info("migrating export to version 4");
await migrationV3ToV4(exportDir, exportRecord as ExportRecord);
exportRecord = await exportService.updateExportRecord(exportDir, {
version: 4,
});
addLogLine("migration to version 4 complete");
log.info("migration to version 4 complete");
}
if (exportRecord.version === 4) {
addLogLine("migrating export to version 5");
log.info("migrating export to version 5");
await migrationV4ToV5(exportDir, exportRecord as ExportRecord);
exportRecord = await exportService.updateExportRecord(exportDir, {
version: 5,
});
addLogLine("migration to version 5 complete");
log.info("migration to version 5 complete");
}
addLogLine(`Record at latest version`);
log.info(`Record at latest version`);
} catch (e) {
logError(e, "export record migration failed");
log.error("export record migration failed", e);
throw e;
}
}
@ -321,9 +320,8 @@ async function getFileExportNamesFromExportedFiles(
if (!exportedFiles.length) {
return;
}
addLogLine(
"updating exported files to exported file paths property",
`got ${exportedFiles.length} files`,
log.info(
`updating exported files to exported file paths property, got ${exportedFiles.length} files`,
);
let exportedFileNames: FileExportNames;
const usedFilePaths = new Map<string, Set<string>>();
@ -334,7 +332,7 @@ async function getFileExportNamesFromExportedFiles(
for (const file of exportedFiles) {
await sleep(0);
const collectionPath = exportedCollectionPaths.get(file.collectionID);
addLocalLog(
log.debug(
() =>
`collection path for ${file.collectionID} is ${collectionPath}`,
);
@ -367,7 +365,7 @@ async function getFileExportNamesFromExportedFiles(
usedFilePaths,
);
}
addLocalLog(
log.debug(
() =>
`file export name for ${file.metadata.title} is ${fileExportName}`,
);
@ -419,7 +417,7 @@ async function addCollectionExportedRecordV1(
await exportService.updateExportRecord(folder, exportRecord);
} catch (e) {
logError(e, "addCollectionExportedRecord failed");
log.error("addCollectionExportedRecord failed", e);
throw e;
}
}

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import isElectron from "is-electron";
import { ElectronFile } from "types/upload";
import ComlinkFFmpegWorker from "utils/comlink/ComlinkFFmpegWorker";

View file

@ -1,10 +1,10 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { retryAsyncFunction } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker";
import { DedicatedConvertWorker } from "worker/convert.worker";

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import { logError } from "@ente/shared/sentry";
import { PICKED_UPLOAD_TYPE } from "constants/upload";
import { Collection } from "types/collection";

View file

@ -1,8 +1,8 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import PQueue from "p-queue";
import { EnteFile } from "types/file";
import {

View file

@ -1,8 +1,8 @@
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { eventBus, Events } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import { FILE_TYPE } from "constants/file";
import debounce from "debounce";
import PQueue from "p-queue";

View file

@ -1,5 +1,5 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { logError } from "@ente/shared/sentry";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { ElectronFile } from "types/upload";
export async function getUint8ArrayView(

View file

@ -1,6 +1,6 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { CustomError } from "@ente/shared/error";
import { logError } from "@ente/shared/sentry";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { FILE_TYPE } from "constants/file";
import {
KNOWN_NON_MEDIA_FORMATS,

View file

@ -1,5 +1,5 @@
import { getFileNameSize } from "@/next/file";
import { addLogLine } from "@ente/shared/logging";
import { getFileNameSize } from "@ente/shared/logging/web";
import { logError } from "@ente/shared/sentry";
import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from "constants/upload";
import {

View file

@ -1,7 +1,7 @@
import { getFileNameSize } from "@/next/file";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { getFileNameSize } from "@ente/shared/logging/web";
import { logError } from "@ente/shared/sentry";
import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE } from "constants/upload";

View file

@ -1,9 +1,8 @@
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { getFileNameSize } from "@ente/shared/logging/web";
import { logError } from "@ente/shared/sentry";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { FILE_TYPE } from "constants/file";
import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
import isElectron from "is-electron";

View file

@ -24,11 +24,11 @@ import UIService from "./uiService";
import UploadService from "./uploadService";
import uploader from "./uploader";
import { getFileNameSize } from "@/next/file";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import { getFileNameSize } from "@ente/shared/logging/web";
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import { Remote } from "comlink";
import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
import isElectron from "is-electron";

View file

@ -1,9 +1,8 @@
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError, handleUploadError } from "@ente/shared/error";
import { addLocalLog, addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import { sleep } from "@ente/shared/utils";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import { Remote } from "comlink";
import { MAX_FILE_SIZE_SUPPORTED, UPLOAD_RESULT } from "constants/upload";
import { addToCollection } from "services/collectionService";
@ -40,7 +39,7 @@ export default async function uploader(
fileWithCollection,
)}_${convertBytesToHumanReadable(UploadService.getAssetSize(uploadAsset))}`;
addLogLine(`uploader called for ${fileNameSize}`);
log.info(`uploader called for ${fileNameSize}`);
UIService.setFileProgress(localID, 0);
await sleep(0);
let fileTypeInfo: FileTypeInfo;
@ -50,13 +49,13 @@ export default async function uploader(
if (fileSize >= MAX_FILE_SIZE_SUPPORTED) {
return { fileUploadResult: UPLOAD_RESULT.TOO_LARGE };
}
addLogLine(`getting filetype for ${fileNameSize}`);
log.info(`getting filetype for ${fileNameSize}`);
fileTypeInfo = await UploadService.getAssetFileType(uploadAsset);
addLogLine(
log.info(
`got filetype for ${fileNameSize} - ${JSON.stringify(fileTypeInfo)}`,
);
addLogLine(`extracting metadata ${fileNameSize}`);
log.info(`extracting metadata ${fileNameSize}`);
const { metadata, publicMagicMetadata } =
await UploadService.extractAssetMetadata(
worker,
@ -69,7 +68,7 @@ export default async function uploader(
existingFiles,
metadata,
);
addLocalLog(
log.debug(
() =>
`matchedFileList: ${matchingExistingFiles
.map((f) => `${f.id}-${f.metadata.title}`)
@ -78,13 +77,13 @@ export default async function uploader(
if (matchingExistingFiles?.length) {
const matchingExistingFilesCollectionIDs =
matchingExistingFiles.map((e) => e.collectionID);
addLocalLog(
log.debug(
() =>
`matched file collectionIDs:${matchingExistingFilesCollectionIDs}
and collectionID:${collection.id}`,
);
if (matchingExistingFilesCollectionIDs.includes(collection.id)) {
addLogLine(
log.info(
`file already present in the collection , skipped upload for ${fileNameSize}`,
);
const sameCollectionMatchingExistingFile =
@ -96,7 +95,7 @@ export default async function uploader(
uploadedFile: sameCollectionMatchingExistingFile,
};
} else {
addLogLine(
log.info(
`same file in ${matchingExistingFilesCollectionIDs.length} collection found for ${fileNameSize} ,adding symlink`,
);
// any of the matching file can used to add a symlink
@ -112,7 +111,7 @@ export default async function uploader(
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(`reading asset ${fileNameSize}`);
log.info(`reading asset ${fileNameSize}`);
const file = await UploadService.readAsset(fileTypeInfo, uploadAsset);
@ -137,7 +136,7 @@ export default async function uploader(
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(`encryptAsset ${fileNameSize}`);
log.info(`encryptAsset ${fileNameSize}`);
const encryptedFile = await UploadService.encryptAsset(
worker,
fileWithMetadata,
@ -147,9 +146,9 @@ export default async function uploader(
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(`uploadToBucket ${fileNameSize}`);
log.info(`uploadToBucket ${fileNameSize}`);
const logger: Logger = (message: string) => {
addLogLine(message, `fileNameSize: ${fileNameSize}`);
log.info(message, `fileNameSize: ${fileNameSize}`);
};
const backupedFile: BackupedFile = await UploadService.uploadToBucket(
logger,
@ -161,11 +160,11 @@ export default async function uploader(
backupedFile,
encryptedFile.fileKey,
);
addLogLine(`uploading file to server ${fileNameSize}`);
log.info(`uploading file to server ${fileNameSize}`);
const uploadedFile = await UploadService.uploadFile(uploadFile);
addLogLine(`${fileNameSize} successfully uploaded`);
log.info(`${fileNameSize} successfully uploaded`);
return {
fileUploadResult: metadata.hasStaticThumbnail
@ -174,15 +173,18 @@ export default async function uploader(
uploadedFile: uploadedFile,
};
} catch (e) {
addLogLine(`upload failed for ${fileNameSize} ,error: ${e.message}`);
log.info(`upload failed for ${fileNameSize} ,error: ${e.message}`);
if (
e.message !== CustomError.UPLOAD_CANCELLED &&
e.message !== CustomError.UNSUPPORTED_FILE_FORMAT
) {
logError(e, "file upload failed", {
fileFormat: fileTypeInfo?.exactType,
fileSize: convertBytesToHumanReadable(fileSize),
});
log.error(
`file upload failed - ${JSON.stringify({
fileFormat: fileTypeInfo?.exactType,
fileSize: convertBytesToHumanReadable(fileSize),
})}`,
e,
);
}
const error = handleUploadError(e);
switch (error.message) {

View file

@ -1,5 +1,5 @@
import { getFileNameSize } from "@/next/file";
import { addLogLine } from "@ente/shared/logging";
import { getFileNameSize } from "@ente/shared/logging/web";
import { logError } from "@ente/shared/sentry";
import { NULL_EXTRACTED_METADATA } from "constants/upload";
import * as ffmpegService from "services/ffmpeg/ffmpegService";

View file

@ -1,6 +1,5 @@
import ElectronAPIs from "@ente/shared/electron";
import { addLocalLog, addLogLine } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import ElectronAPIs from "@/next/electron";
import log from "@/next/log";
import { UPLOAD_RESULT, UPLOAD_STRATEGY } from "constants/upload";
import debounce from "debounce";
import uploadManager from "services/upload/uploadManager";
@ -66,7 +65,7 @@ class watchFolderService {
this.setupWatcherFunctions();
await this.getAndSyncDiffOfFiles();
} catch (e) {
logError(e, "error while initializing watch service");
log.error("error while initializing watch service", e);
}
}
@ -90,7 +89,7 @@ class watchFolderService {
this.trashDiffOfFiles(mapping, filesOnDisk);
}
} catch (e) {
logError(e, "error while getting and syncing diff of files");
log.error("error while getting and syncing diff of files", e);
}
}
@ -193,7 +192,7 @@ class watchFolderService {
);
this.getAndSyncDiffOfFiles();
} catch (e) {
logError(e, "error while adding watch mapping");
log.error("error while adding watch mapping", e);
}
}
@ -201,7 +200,7 @@ class watchFolderService {
try {
await ElectronAPIs.removeWatchMapping(folderPath);
} catch (e) {
logError(e, "error while removing watch mapping");
log.error("error while removing watch mapping", e);
}
}
@ -209,7 +208,7 @@ class watchFolderService {
try {
return (await ElectronAPIs.getWatchMappings()) ?? [];
} catch (e) {
logError(e, "error while getting watch mappings");
log.error("error while getting watch mappings", e);
return [];
}
}
@ -230,7 +229,7 @@ class watchFolderService {
}
const event = this.clubSameCollectionEvents();
addLogLine(
log.info(
`running event type:${event.type} collectionName:${event.collectionName} folderPath:${event.folderPath} , fileCount:${event.files?.length} pathsCount: ${event.paths?.length}`,
);
const mappings = await this.getWatchMappings();
@ -240,12 +239,12 @@ class watchFolderService {
if (!mapping) {
throw Error("no Mapping found for event");
}
addLogLine(
log.info(
`mapping for event rootFolder: ${mapping.rootFolderName} folderPath: ${mapping.folderPath} uploadStrategy: ${mapping.uploadStrategy} syncedFilesCount: ${mapping.syncedFiles.length} ignoredFilesCount ${mapping.ignoredFiles.length}`,
);
if (event.type === "upload") {
event.files = getValidFilesToUpload(event.files, mapping);
addLogLine(`valid files count: ${event.files?.length}`);
log.info(`valid files count: ${event.files?.length}`);
if (event.files.length === 0) {
return;
}
@ -262,7 +261,7 @@ class watchFolderService {
setTimeout(() => this.runNextEvent(), 0);
}
} catch (e) {
logError(e, "runNextEvent failed");
log.error("runNextEvent failed", e);
}
}
@ -273,7 +272,7 @@ class watchFolderService {
this.setCollectionName(this.currentEvent.collectionName);
this.setElectronFiles(this.currentEvent.files);
} catch (e) {
logError(e, "error while running next upload");
log.error("error while running next upload", e);
}
}
@ -282,7 +281,7 @@ class watchFolderService {
fileWithCollection: FileWithCollection,
file: EncryptedEnteFile,
) {
addLocalLog(() => `onFileUpload called`);
log.debug(() => `onFileUpload called`);
if (!this.isUploadRunning()) {
return;
}
@ -338,7 +337,7 @@ class watchFolderService {
collections: Collection[],
) {
try {
addLocalLog(
log.debug(
() =>
`allFileUploadsDone,${JSON.stringify(
filesWithCollection,
@ -348,8 +347,8 @@ class watchFolderService {
(collection) =>
collection.id === filesWithCollection[0].collectionID,
);
addLocalLog(() => `got collection ${!!collection}`);
addLocalLog(
log.debug(() => `got collection ${!!collection}`);
log.debug(
() =>
`${this.isEventRunning} ${this.currentEvent.collectionName} ${collection?.name}`,
);
@ -371,8 +370,8 @@ class watchFolderService {
);
}
addLocalLog(() => `syncedFiles ${JSON.stringify(syncedFiles)}`);
addLocalLog(() => `ignoredFiles ${JSON.stringify(ignoredFiles)}`);
log.debug(() => `syncedFiles ${JSON.stringify(syncedFiles)}`);
log.debug(() => `ignoredFiles ${JSON.stringify(ignoredFiles)}`);
if (syncedFiles.length > 0) {
this.currentlySyncedMapping.syncedFiles = [
@ -397,7 +396,7 @@ class watchFolderService {
this.runPostUploadsAction();
} catch (e) {
logError(e, "error while running all file uploads done");
log.error("error while running all file uploads done", e);
}
}
@ -442,7 +441,7 @@ class watchFolderService {
};
syncedFiles.push(imageFile);
syncedFiles.push(videoFile);
addLocalLog(
log.debug(
() =>
`added image ${JSON.stringify(
imageFile,
@ -456,7 +455,7 @@ class watchFolderService {
) {
ignoredFiles.push(imagePath);
ignoredFiles.push(videoPath);
addLocalLog(
log.debug(
() =>
`added image ${imagePath} and video file ${videoPath} to rejectedFiles`,
);
@ -476,10 +475,10 @@ class watchFolderService {
.collectionID,
};
syncedFiles.push(file);
addLocalLog(() => `added file ${JSON.stringify(file)} `);
log.debug(() => `added file ${JSON.stringify(file)}`);
} else if (this.unUploadableFilePaths.has(filePath)) {
ignoredFiles.push(filePath);
addLocalLog(() => `added file ${filePath} to rejectedFiles`);
log.debug(() => `added file ${filePath} to rejectedFiles`);
}
this.filePathToUploadedFileIDMap.delete(filePath);
}
@ -509,7 +508,7 @@ class watchFolderService {
this.currentlySyncedMapping.syncedFiles,
);
} catch (e) {
logError(e, "error while running next trash");
log.error("error while running next trash", e);
}
}
@ -539,7 +538,7 @@ class watchFolderService {
}
this.syncWithRemote();
} catch (e) {
logError(e, "error while trashing by IDs");
log.error("error while trashing by IDs", e);
}
}
@ -581,7 +580,7 @@ class watchFolderService {
folderPath: mapping.folderPath,
};
} catch (e) {
logError(e, "error while getting collection name");
log.error("error while getting collection name", e);
}
}
@ -599,7 +598,7 @@ class watchFolderService {
const folderPath = await ElectronAPIs.selectDirectory();
return folderPath;
} catch (e) {
logError(e, "error while selecting folder");
log.error("error while selecting folder", e);
}
}
@ -627,7 +626,7 @@ class watchFolderService {
const isFolder = await ElectronAPIs.isFolder(folderPath);
return isFolder;
} catch (e) {
logError(e, "error while checking if folder exists");
log.error("error while checking if folder exists", e);
}
}

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { getAlbumsURL } from "@ente/shared/network/api";

View file

@ -1,5 +1,5 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { DedicatedConvertWorker } from "worker/convert.worker";

View file

@ -1,4 +1,4 @@
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { DedicatedFFmpegWorker } from "worker/ffmpeg.worker";

View file

@ -1,5 +1,5 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { DedicatedMLWorker } from "worker/ml.worker";
export const getDedicatedMLWorker = (name: string) => {

View file

@ -1,5 +1,5 @@
import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@ente/shared/worker/comlinkWorker";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { DedicatedSearchWorker } from "worker/search.worker";

View file

@ -1,4 +1,10 @@
import { logError } from "@ente/shared/sentry";
import ElectronAPIs from "@/next/electron";
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import { workerBridge } from "@/next/worker/worker-bridge";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import { isPlaybackPossible } from "@ente/shared/media/video-playback";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
import { downloadUsingAnchor } from "@ente/shared/utils";
@ -11,11 +17,20 @@ import {
TYPE_JPEG,
TYPE_JPG,
} from "constants/file";
import { t } from "i18next";
import isElectron from "is-electron";
import { moveToHiddenCollection } from "services/collectionService";
import DownloadManager, {
LivePhotoSourceURL,
SourceURLs,
} from "services/download";
import * as ffmpegService from "services/ffmpeg/ffmpegService";
import {
deleteFromTrash,
trashFiles,
updateFileMagicMetadata,
updateFilePublicMagicMetadata,
} from "services/fileService";
import heicConversionService from "services/heicConversionService";
import { decodeLivePhoto } from "services/livePhotoService";
import { getFileType } from "services/typeDetectionService";
@ -35,27 +50,9 @@ import {
SetFilesDownloadProgressAttributesCreator,
} from "types/gallery";
import { VISIBILITY_STATE } from "types/magicMetadata";
import { isArchivedFile, updateMagicMetadata } from "utils/magicMetadata";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import { addLocalLog, addLogLine } from "@ente/shared/logging";
import { isPlaybackPossible } from "@ente/shared/media/video-playback";
import { convertBytesToHumanReadable } from "@ente/shared/utils/size";
import isElectron from "is-electron";
import { moveToHiddenCollection } from "services/collectionService";
import {
deleteFromTrash,
trashFiles,
updateFileMagicMetadata,
updateFilePublicMagicMetadata,
} from "services/fileService";
import { FileTypeInfo } from "types/upload";
import { default as ElectronAPIs } from "@ente/shared/electron";
import { workerBridge } from "@ente/shared/worker/worker-bridge";
import { t } from "i18next";
import { getFileExportPath, getUniqueFileExportName } from "utils/export";
import { isArchivedFile, updateMagicMetadata } from "utils/magicMetadata";
const WAIT_TIME_IMAGE_CONVERSION = 30 * 1000;
@ -128,7 +125,7 @@ export async function downloadFile(file: EnteFile) {
downloadUsingAnchor(tempURL, file.metadata.title);
}
} catch (e) {
logError(e, "failed to download file");
log.error("failed to download file", e);
throw e;
}
}
@ -244,7 +241,7 @@ export async function decryptFile(
pubMagicMetadata: filePubMagicMetadata,
};
} catch (e) {
logError(e, "file decryption failed");
log.error("file decryption failed", e);
throw e;
}
}
@ -413,19 +410,17 @@ export async function getPlayableVideo(
if (!forceConvert && !runOnWeb && !isElectron()) {
return null;
}
addLogLine(
"video format not supported, converting it name:",
videoNameTitle,
log.info(
`video format not supported, converting it name: ${videoNameTitle}`,
);
const mp4ConvertedVideo = await ffmpegService.convertToMP4(
new File([videoBlob], videoNameTitle),
);
addLogLine("video successfully converted", videoNameTitle);
log.info(`video successfully converted ${videoNameTitle}`);
return new Blob([await mp4ConvertedVideo.arrayBuffer()]);
}
} catch (e) {
addLogLine("video conversion failed", videoNameTitle);
logError(e, "video conversion failed");
log.error("video conversion failed", e);
return null;
}
}
@ -435,7 +430,7 @@ export async function getRenderableImage(fileName: string, imageBlob: Blob) {
try {
const tempFile = new File([imageBlob], fileName);
fileTypeInfo = await getFileType(tempFile);
addLocalLog(() => `file type info: ${JSON.stringify(fileTypeInfo)}`);
log.debug(() => `file type info: ${JSON.stringify(fileTypeInfo)}`);
const { exactType } = fileTypeInfo;
let convertedImageBlob: Blob;
if (isRawFile(exactType)) {
@ -447,7 +442,7 @@ export async function getRenderableImage(fileName: string, imageBlob: Blob) {
if (!isElectron()) {
throw Error(CustomError.NOT_AVAILABLE_ON_WEB);
}
addLogLine(
log.info(
`RawConverter called for ${fileName}-${convertBytesToHumanReadable(
imageBlob.size,
)}`,
@ -456,20 +451,20 @@ export async function getRenderableImage(fileName: string, imageBlob: Blob) {
imageBlob,
fileName,
);
addLogLine(`${fileName} successfully converted`);
log.info(`${fileName} successfully converted`);
} catch (e) {
try {
if (!isFileHEIC(exactType)) {
throw e;
}
addLogLine(
log.info(
`HEICConverter called for ${fileName}-${convertBytesToHumanReadable(
imageBlob.size,
)}`,
);
convertedImageBlob =
await heicConversionService.convert(imageBlob);
addLogLine(`${fileName} successfully converted`);
log.info(`${fileName} successfully converted`);
} catch (e) {
throw Error(CustomError.NON_PREVIEWABLE_FILE);
}
@ -479,7 +474,10 @@ export async function getRenderableImage(fileName: string, imageBlob: Blob) {
return imageBlob;
}
} catch (e) {
logError(e, "get Renderable Image failed", { fileTypeInfo });
log.error(
`Failed to get renderable image for ${JSON.stringify(fileTypeInfo)}`,
e,
);
return null;
}
}
@ -491,11 +489,10 @@ const convertToJPEGInElectron = async (
try {
const startTime = Date.now();
const inputFileData = new Uint8Array(await fileBlob.arrayBuffer());
const convertedFileData = await workerBridge.convertToJPEG(
inputFileData,
filename,
);
addLogLine(
const convertedFileData = isElectron()
? await ElectronAPIs.convertToJPEG(inputFileData, filename)
: await workerBridge.convertToJPEG(inputFileData, filename);
log.info(
`originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size,
)},convertedFileSize:${convertBytesToHumanReadable(
@ -508,7 +505,7 @@ const convertToJPEGInElectron = async (
e.message !==
CustomError.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED
) {
logError(e, "failed to convert to jpeg natively");
log.error("failed to convert to jpeg natively", e);
}
throw e;
}
@ -761,7 +758,7 @@ export async function downloadFiles(
await downloadFile(file);
progressBarUpdater?.increaseSuccess();
} catch (e) {
logError(e, "download fail for file");
log.error("download fail for file", e);
progressBarUpdater?.increaseFailed();
}
}
@ -785,7 +782,7 @@ export async function downloadFilesDesktop(
await downloadFileDesktop(fileReader, file, downloadPath);
progressBarUpdater?.increaseSuccess();
} catch (e) {
logError(e, "download fail for file");
log.error("download fail for file", e);
progressBarUpdater?.increaseFailed();
}
}
@ -890,7 +887,7 @@ export const copyFileToClipboard = async (fileUrl: string) => {
clearTimeout(timeout);
};
} catch (e) {
void logError(e, "failed to copy to clipboard");
log.error("failed to copy to clipboard", e);
reject(e);
} finally {
clearTimeout(timeout);
@ -905,7 +902,7 @@ export const copyFileToClipboard = async (fileUrl: string) => {
await navigator.clipboard
.write([new ClipboardItem({ "image/png": blobPromise })])
.catch((e) => logError(e, "failed to copy to clipboard"));
.catch((e) => log.error("failed to copy to clipboard", e));
};
export function getLatestVersionFiles(files: EnteFile[]) {

View file

@ -1,7 +1,7 @@
import ElectronAPIs from "@/next/electron";
import { AppUpdateInfo } from "@/next/types/ipc";
import { logoutUser } from "@ente/accounts/services/user";
import { DialogBoxAttributes } from "@ente/shared/components/DialogBox/types";
import ElectronAPIs from "@ente/shared/electron";
import { AppUpdateInfo } from "@ente/shared/electron/types";
import AutoAwesomeOutlinedIcon from "@mui/icons-material/AutoAwesomeOutlined";
import InfoOutlined from "@mui/icons-material/InfoRounded";
import { Link } from "@mui/material";

View file

@ -1,5 +1,5 @@
import ElectronAPIs from "@ente/shared/electron";
import { getFileNameSize } from "@ente/shared/logging/web";
import ElectronAPIs from "@/next/electron";
import { getFileNameSize } from "@/next/file";
import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload";
import isElectron from "is-electron";
import { getElectronFileStream, getFileStream } from "services/readerService";

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer";
import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title";
@ -5,7 +6,6 @@ import LinkButton from "@ente/shared/components/LinkButton";
import SingleInputForm, {
SingleInputFormProps,
} from "@ente/shared/components/SingleInputForm";
import { addLocalLog } from "@ente/shared/logging";
import { LS_KEYS, setData } from "@ente/shared/storage/localStorage";
import { Input } from "@mui/material";
import { t } from "i18next";
@ -29,9 +29,7 @@ export default function Login(props: LoginProps) {
try {
setData(LS_KEYS.USER, { email });
const srpAttributes = await getSRPAttributes(email);
addLocalLog(
() => ` srpAttributes: ${JSON.stringify(srpAttributes)}`,
);
log.debug(() => ` srpAttributes: ${JSON.stringify(srpAttributes)}`);
if (!srpAttributes || srpAttributes.isEmailMFAEnabled) {
await sendOtt(props.appName, email);
router.push(PAGES.VERIFY);

View file

@ -1,29 +1,5 @@
import { useEffect, useState } from "react";
import { t } from "i18next";
import {
decryptAndStoreToken,
generateAndSaveIntermediateKeyAttributes,
generateLoginSubKey,
saveKeyInSessionStore,
} from "@ente/shared/crypto/helpers";
import {
LS_KEYS,
clearData,
getData,
setData,
} from "@ente/shared/storage/localStorage";
import {
SESSION_KEYS,
getKey,
removeKey,
setKey,
} from "@ente/shared/storage/sessionStorage";
import { PAGES } from "../constants/pages";
import { generateSRPSetupAttributes } from "../services/srp";
import { logoutUser } from "../services/user";
import ElectronAPIs from "@/next/electron";
import log from "@/next/log";
import { APP_HOMES } from "@ente/shared/apps/constants";
import { PageProps } from "@ente/shared/apps/types";
import { VerticallyCentered } from "@ente/shared/components/Container";
@ -36,23 +12,47 @@ import VerifyMasterPasswordForm, {
VerifyMasterPasswordFormProps,
} from "@ente/shared/components/VerifyMasterPasswordForm";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import {
decryptAndStoreToken,
generateAndSaveIntermediateKeyAttributes,
generateLoginSubKey,
saveKeyInSessionStore,
} from "@ente/shared/crypto/helpers";
import { B64EncryptionResult } from "@ente/shared/crypto/types";
import ElectronAPIs from "@ente/shared/electron";
import { CustomError } from "@ente/shared/error";
import { addLocalLog } from "@ente/shared/logging";
import { getAccountsURL } from "@ente/shared/network/api";
import { logError } from "@ente/shared/sentry";
import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore";
import {
LS_KEYS,
clearData,
getData,
setData,
} from "@ente/shared/storage/localStorage";
import {
getToken,
isFirstLogin,
setIsFirstLogin,
} from "@ente/shared/storage/localStorage/helpers";
import {
SESSION_KEYS,
getKey,
removeKey,
setKey,
} from "@ente/shared/storage/sessionStorage";
import { KeyAttributes, User } from "@ente/shared/user/types";
import { t } from "i18next";
import isElectron from "is-electron";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
import { getSRPAttributes } from "../api/srp";
import { configureSRP, loginViaSRP } from "../services/srp";
import { PAGES } from "../constants/pages";
import {
configureSRP,
generateSRPSetupAttributes,
loginViaSRP,
} from "../services/srp";
import { logoutUser } from "../services/user";
import { SRPAttributes } from "../types/srp";
export default function Credentials({ appContext, appName }: PageProps) {
@ -230,7 +230,7 @@ export default function Credentials({ appContext, appName }: PageProps) {
setData(LS_KEYS.SRP_ATTRIBUTES, srpAttributes);
}
}
addLocalLog(() => `userSRPSetupPending ${!srpAttributes}`);
log.debug(() => `userSRPSetupPending ${!srpAttributes}`);
if (!srpAttributes) {
const loginSubKey = await generateLoginSubKey(kek);
const srpSetupAttributes =

View file

@ -1,14 +1,11 @@
import { SRP, SrpClient } from "fast-srp-hap";
import { SRPAttributes, SRPSetupAttributes } from "../types/srp";
import log from "@/next/log";
import { UserVerificationResponse } from "@ente/accounts/types/user";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { generateLoginSubKey } from "@ente/shared/crypto/helpers";
import { addLocalLog } from "@ente/shared/logging";
import { logError } from "@ente/shared/sentry";
import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { SRP, SrpClient } from "fast-srp-hap";
import { v4 as uuidv4 } from "uuid";
import {
completeSRPSetup,
@ -16,6 +13,7 @@ import {
startSRPSetup,
verifySRPSession,
} from "../api/srp";
import { SRPAttributes, SRPSetupAttributes } from "../types/srp";
import { convertBase64ToBuffer, convertBufferToBase64 } from "../utils";
const SRP_PARAMS = SRP.params["4096"];
@ -42,7 +40,7 @@ export const configureSRP = async ({
const srpA = convertBufferToBase64(srpClient.computeA());
addLocalLog(() => `srp a: ${srpA}`);
log.debug(() => `srp a: ${srpA}`);
const token = getToken();
const { setupID, srpB } = await startSRPSetup(token, {
srpA,
@ -62,7 +60,7 @@ export const configureSRP = async ({
srpClient.checkM2(convertBase64ToBuffer(srpM2));
} catch (e) {
logError(e, "srp configure failed");
log.error("Failed to configure SRP", e);
throw e;
} finally {
InMemoryStore.set(MS_KEYS.SRP_CONFIGURE_IN_PROGRESS, false);
@ -87,22 +85,18 @@ export const generateSRPSetupAttributes = async (
const srpVerifier = convertBufferToBase64(srpVerifierBuffer);
addLocalLog(
() => `SRP setup attributes generated',
${JSON.stringify({
srpSalt,
srpUserID,
srpVerifier,
loginSubKey,
})}`,
);
return {
const result = {
srpUserID,
srpSalt,
srpVerifier,
loginSubKey,
};
log.debug(
() => `SRP setup attributes generated: ${JSON.stringify(result)}`,
);
return result;
};
export const loginViaSRP = async (
@ -124,17 +118,17 @@ export const loginViaSRP = async (
srpClient.setB(convertBase64ToBuffer(srpB));
const m1 = srpClient.computeM1();
addLocalLog(() => `srp m1: ${convertBufferToBase64(m1)}`);
log.debug(() => `srp m1: ${convertBufferToBase64(m1)}`);
const { srpM2, ...rest } = await verifySRPSession(
sessionID,
srpAttributes.srpUserID,
convertBufferToBase64(m1),
);
addLocalLog(() => `srp verify session successful,srpM2: ${srpM2}`);
log.debug(() => `srp verify session successful,srpM2: ${srpM2}`);
srpClient.checkM2(convertBase64ToBuffer(srpM2));
addLocalLog(() => `srp server verify successful`);
log.debug(() => `srp server verify successful`);
return rest;
} catch (e) {
logError(e, "srp verify failed");

View file

@ -1,4 +1,4 @@
import ElectronAPIs from "@ente/shared/electron";
import ElectronAPIs from "@/next/electron";
import { Events, eventBus } from "@ente/shared/events";
import { logError } from "@ente/shared/sentry";
import InMemoryStore from "@ente/shared/storage/InMemoryStore";

View file

@ -0,0 +1,10 @@
import type { ElectronAPIsType } from "./types/ipc";
// TODO (MR):
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const ElectronAPIs = (globalThis as unknown as any)[
// eslint-disable-next-line @typescript-eslint/dot-notation, @typescript-eslint/no-unsafe-member-access
"ElectronAPIs"
] as ElectronAPIsType;
export default ElectronAPIs;

View file

@ -1,3 +1,9 @@
import type { ElectronFile } from "./types/file";
export function getFileNameSize(file: File | ElectronFile) {
return `${file.name}_${convertBytesToHumanReadable(file.size)}`;
}
export function convertBytesToHumanReadable(
bytes: number,
precision = 2,

View file

@ -0,0 +1,81 @@
import { isDevBuild } from "@/next/env";
import { addLogLine } from "@ente/shared/logging";
/**
* Log a standard startup banner.
*
* This helps us identify app starts and other environment details in the logs.
*
* @param appId An identifier of the app that is starting.
* @param userId The uid for the currently logged in user, if any.
*/
export const logStartupBanner = (appId: string, userId?: number) => {
// TODO (MR): Remove the need to lowercase it, change the enum itself.
const appIdL = appId.toLowerCase();
const sha = process.env.GIT_SHA;
const buildId = isDevBuild ? "dev " : sha ? `git ${sha} ` : "";
addLogLine(`Starting ente-${appIdL}-web ${buildId}uid ${userId ?? 0}`);
};
interface LogEntry {
timestamp: number;
logLine: string;
}
const lsKey = "logs";
/**
* Record {@link message} in a persistent log storage.
*
* These strings, alongwith associated timestamps, get added to a small ring
* buffer, whose contents can be later be retrieved by using {@link savedLogs}.
*
* This ring buffer is persisted in the browser's local storage.
*/
export const logToDisk = (message: string) => {
const maxCount = 1000;
const log: LogEntry = { logLine: message, timestamp: Date.now() };
try {
const logs = logEntries();
if (logs.length > maxCount) {
logs.slice(logs.length - maxCount);
}
logs.push(log);
localStorage.setItem(lsKey, JSON.stringify({ logs }));
} catch (e) {
console.error("Failed to persist log", e);
if (e instanceof Error && e.name === "QuotaExceededError") {
localStorage.removeItem(lsKey);
}
}
};
const logEntries = (): unknown[] => {
const s = localStorage.getItem("logs");
if (!s) return [];
const o: unknown = JSON.parse(s);
if (!(o && typeof o == "object" && "logs" in o && Array.isArray(o.logs))) {
console.error("Unexpected log entries obtained from local storage", o);
return [];
}
return o.logs;
};
/**
* Return a string containing all recently saved log messages.
*
* @see {@link persistLog}.
*/
export const savedLogs = () => logEntries().map(formatEntry).join("\n");
const formatEntry = (e: unknown) => {
if (e && typeof e == "object" && "timestamp" in e && "logLine" in e) {
const timestamp = e.timestamp;
const logLine = e.logLine;
if (typeof timestamp == "number" && typeof logLine == "string") {
return `[${new Date(timestamp).toISOString()}] ${logLine}`;
}
}
return String(e);
};

118
web/packages/next/log.ts Normal file
View file

@ -0,0 +1,118 @@
import { inWorker } from "@/next/env";
import isElectron from "is-electron";
import ElectronAPIs from "./electron";
import { isDevBuild } from "./env";
import { logToDisk as webLogToDisk } from "./log-web";
import { workerBridge } from "./worker/worker-bridge";
/**
* Write a {@link message} to the on-disk log.
*
* This is used by the renderer process (via the contextBridge) to add entries
* in the log that is saved on disk.
*/
export const logToDisk = (message: string) => {
if (isElectron()) ElectronAPIs.logToDisk(message);
else if (inWorker()) workerLogToDisk(message);
else webLogToDisk(message);
};
const workerLogToDisk = (message: string) => {
workerBridge.logToDisk(message).catch((e) => {
console.error(
"Failed to log a message from worker",
e,
"\nThe message was",
message,
);
});
};
const logError = (message: string, e?: unknown) => {
if (!e) {
logError_(message);
return;
}
let es: string;
if (e instanceof Error) {
// In practice, we expect ourselves to be called with Error objects, so
// this is the happy path so to say.
es = `${e.name}: ${e.message}\n${e.stack}`;
} else {
// For the rest rare cases, use the default string serialization of e.
es = String(e);
}
logError_(`${message}: ${es}`);
};
const logError_ = (message: string) => {
const m = `[error] ${message}`;
if (isDevBuild) console.error(m);
logToDisk(m);
};
const logInfo = (...params: unknown[]) => {
const message = params
.map((p) => (typeof p == "string" ? p : JSON.stringify(p)))
.join(" ");
const m = `[info] ${message}`;
if (isDevBuild) console.log(m);
logToDisk(m);
};
const logDebug = (param: () => unknown) => {
if (isDevBuild) console.log("[debug]", param());
};
/**
* Ente's logger.
*
* This is an object that provides three functions to log at the corresponding
* levels - error, info or debug.
*
* Whenever we need to save a log message to disk,
*
* - When running under electron these messages are saved to the log maintained
* by the electron app we're running under.
*
* - Otherwise such messages are written to a ring buffer in local storage.
*/
export default {
/**
* Log an error message with an optional associated error object.
*
* {@link e} is generally expected to be an `instanceof Error` but it can be
* any arbitrary object that we obtain, say, when in a try-catch handler (in
* JavaScript any arbitrary value can be thrown).
*
* The log is written to disk. In development builds, the log is also
* printed to the browser console.
*/
error: logError,
/**
* Log a message.
*
* This is meant as a replacement of {@link console.log}, and takes an
* arbitrary number of arbitrary parameters that it then serializes.
*
* The log is written to disk. In development builds, the log is also
* printed to the browser console.
*/
info: logInfo,
/**
* Log a debug message.
*
* To avoid running unnecessary code in release builds, this takes a
* function to call to get the log message instead of directly taking the
* message. The provided function will only be called in development builds.
*
* The function can return an arbitrary value which is serialized before
* being logged.
*
* This log is NOT written to disk. And it is printed to the browser
* console, but only in development builds.
*/
debug: logDebug,
};

View file

@ -7,6 +7,7 @@
"@emotion/styled": "^11.11",
"@mui/icons-material": "^5.15",
"@mui/material": "^5.15",
"comlink": "^4.4",
"get-user-locale": "^2.3",
"i18next": "^23.10",
"i18next-resources-to-backend": "^1.2.0",

View file

@ -1,3 +1,8 @@
export enum UPLOAD_STRATEGY {
SINGLE_COLLECTION,
COLLECTION_PER_FOLDER,
}
/*
* ElectronFile is a custom interface that is used to represent
* any file on disk as a File-like object in the Electron desktop app.
@ -20,3 +25,25 @@ export interface DataStream {
stream: ReadableStream<Uint8Array>;
chunkCount: number;
}
export interface WatchMappingSyncedFile {
path: string;
uploadedFileID: number;
collectionID: number;
}
export interface WatchMapping {
rootFolderName: string;
folderPath: string;
uploadStrategy: UPLOAD_STRATEGY;
syncedFiles: WatchMappingSyncedFile[];
ignoredFiles: string[];
}
export interface EventQueueItem {
type: "upload" | "trash";
folderPath: string;
collectionName?: string;
paths?: string[];
files?: ElectronFile[];
}

View file

@ -3,8 +3,7 @@
//
// See [Note: types.ts <-> preload.ts <-> ipc.ts]
import type { ElectronFile } from "@ente/shared/upload/types";
import type { WatchMapping } from "@ente/shared/watchFolder/types";
import type { ElectronFile, WatchMapping } from "./file";
export interface AppUpdateInfo {
autoUpdatable: boolean;
@ -199,9 +198,9 @@ export interface ElectronAPIsType {
checkExistsAndCreateDir: (dirPath: string) => Promise<void>;
saveStreamToDisk: (
path: string,
fileStream: ReadableStream<any>,
fileStream: ReadableStream,
) => Promise<void>;
saveFileToDisk: (path: string, file: any) => Promise<void>;
saveFileToDisk: (path: string, contents: string) => Promise<void>;
readTextFile: (path: string) => Promise<string>;
isFolder: (dirPath: string) => Promise<boolean>;
moveFile: (oldPath: string, newPath: string) => Promise<void>;

View file

@ -1,7 +1,6 @@
import { addLocalLog, logToDisk } from "@ente/shared/logging";
import { Remote, expose, wrap } from "comlink";
import ElectronAPIs from "../electron";
import { logError } from "../sentry";
import ElectronAPIs from "@/next/electron";
import log, { logToDisk } from "@/next/log";
import { expose, wrap, type Remote } from "comlink";
export class ComlinkWorker<T extends new () => InstanceType<T>> {
public remote: Promise<Remote<InstanceType<T>>>;
@ -12,13 +11,15 @@ export class ComlinkWorker<T extends new () => InstanceType<T>> {
this.name = name;
this.worker = worker;
this.worker.onerror = (errorEvent) => {
logError(Error(errorEvent.message), "Got error event from worker", {
errorEvent: JSON.stringify(errorEvent),
name: this.name,
});
this.worker.onerror = (ev) => {
log.error(
`Got error event from worker: ${JSON.stringify({
errorEvent: JSON.stringify(ev),
name: this.name,
})}`,
);
};
addLocalLog(() => `Initiated ${this.name}`);
log.debug(() => `Initiated ${this.name}`);
const comlink = wrap<T>(this.worker);
this.remote = new comlink() as Promise<Remote<InstanceType<T>>>;
expose(workerBridge, worker);
@ -30,7 +31,7 @@ export class ComlinkWorker<T extends new () => InstanceType<T>> {
public terminate() {
this.worker.terminate();
addLocalLog(() => `Terminated ${this.name}`);
log.debug(() => `Terminated ${this.name}`);
}
}
@ -39,7 +40,7 @@ export class ComlinkWorker<T extends new () => InstanceType<T>> {
* create.
*
* Inside the worker's code, this can be accessed by using the sibling
* `workerBridge` object by importing `worker-bridge.ts`.
* `workerBridge` object after importing it from `worker-bridge.ts`.
*/
const workerBridge = {
logToDisk,

View file

@ -1,5 +1,5 @@
import { wrap } from "comlink";
import type { WorkerBridge } from "./comlinkWorker";
import type { WorkerBridge } from "./comlink-worker";
/**
* The web worker side handle to the {@link WorkerBridge} exposed by the main
@ -7,6 +7,6 @@ import type { WorkerBridge } from "./comlinkWorker";
*
* This file is meant to be run inside a worker. Accessing the properties of
* this object will be transparently (but asynchrorously) relayed to the
* implementation of the {@link WorkerBridge} in `comlinkWorker.ts`.
* implementation of the {@link WorkerBridge} in `comlink-worker.ts`.
*/
export const workerBridge = wrap<WorkerBridge>(globalThis);

View file

@ -1,5 +1,5 @@
import ElectronAPIs from "@/next/electron";
import LinkButton from "@ente/shared/components/LinkButton";
import ElectronAPIs from "@ente/shared/electron";
import { logError } from "@ente/shared/sentry";
import { Tooltip } from "@mui/material";
import { styled } from "@mui/material/styles";

View file

@ -1,3 +1,4 @@
import ElectronAPIs from "@/next/electron";
import { setRecoveryKey } from "@ente/accounts/api/user";
import { logError } from "@ente/shared/sentry";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
@ -7,7 +8,6 @@ import { getActualKey } from "@ente/shared/user";
import { KeyAttributes } from "@ente/shared/user/types";
import isElectron from "is-electron";
import ComlinkCryptoWorker from ".";
import ElectronAPIs from "../electron";
import { addLogLine } from "../logging";
const LOGIN_SUB_KEY_LENGTH = 32;

View file

@ -1,5 +1,5 @@
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { Remote } from "comlink";
import { ComlinkWorker } from "../worker/comlinkWorker";
import { DedicatedCryptoWorker } from "./internal/crypto.worker";
class ComlinkCryptoWorker {

View file

@ -1,4 +1,4 @@
import { DataStream } from "@ente/shared/upload/types";
import { DataStream } from "@/next/types/file";
export interface LocalFileAttributes<
T extends string | Uint8Array | DataStream,

View file

@ -1,5 +0,0 @@
import { ElectronAPIsType } from "./types";
const ElectronAPIs: ElectronAPIsType = globalThis["ElectronAPIs"];
export default ElectronAPIs;

View file

@ -1,57 +1,9 @@
import { inWorker, isDevBuild } from "@/next/env";
import { logError } from "@ente/shared/sentry";
import isElectron from "is-electron";
import ElectronAPIs from "../electron";
import { workerBridge } from "../worker/worker-bridge";
import { formatLog, logWeb } from "./web";
export const MAX_LOG_SIZE = 5 * 1024 * 1024; // 5MB
export const MAX_LOG_LINES = 1000;
export const logToDisk = (message: string) => {
if (isElectron()) {
ElectronAPIs.logToDisk(message);
} else {
logWeb(message);
}
};
import log from "@/next/log";
export function addLogLine(
log: string | number | boolean,
msg: string | number | boolean,
...optionalParams: (string | number | boolean)[]
) {
try {
const completeLog = [log, ...optionalParams].join(" ");
if (isDevBuild) {
console.log(completeLog);
}
if (inWorker()) {
workerBridge
.logToDisk(completeLog)
.catch((e) =>
console.error(
"Failed to log a message from worker",
e,
"\nThe message was",
completeLog,
),
);
} else {
logToDisk(completeLog);
}
} catch (e) {
logError(e, "failed to addLogLine", undefined, true);
// ignore
}
const completeLog = [msg, ...optionalParams].join(" ");
log.info(completeLog);
}
export const addLocalLog = (getLog: () => string) => {
if (isDevBuild) {
console.log(
formatLog({
logLine: getLog(),
timestamp: Date.now(),
}),
);
}
};

View file

@ -1,105 +0,0 @@
import { isDevBuild } from "@/next/env";
import { logError } from "@ente/shared/sentry";
import {
LS_KEYS,
getData,
removeData,
setData,
} from "@ente/shared/storage/localStorage";
import { addLogLine } from ".";
import { formatDateTimeShort } from "../time/format";
import { ElectronFile } from "../upload/types";
import type { User } from "../user/types";
import { convertBytesToHumanReadable } from "../utils/size";
export const MAX_LOG_SIZE = 5 * 1024 * 1024; // 5MB
export const MAX_LOG_LINES = 1000;
export interface Log {
timestamp: number;
logLine: string;
}
export function logWeb(logLine: string) {
try {
const log: Log = { logLine, timestamp: Date.now() };
const logs = getLogs();
if (logs.length > MAX_LOG_LINES) {
logs.slice(logs.length - MAX_LOG_LINES);
}
logs.push(log);
setLogs(logs);
} catch (e) {
if (e.name === "QuotaExceededError") {
deleteLogs();
logWeb("logs cleared");
}
}
}
export function getDebugLogs() {
return combineLogLines(getLogs());
}
export function getFileNameSize(file: File | ElectronFile) {
return `${file.name}_${convertBytesToHumanReadable(file.size)}`;
}
export const clearLogsIfLocalStorageLimitExceeded = () => {
try {
const logs = getDebugLogs();
const logSize = getStringSize(logs);
if (logSize > MAX_LOG_SIZE) {
deleteLogs();
logWeb("Logs cleared due to size limit exceeded");
} else {
try {
logWeb(`app started`);
} catch (e) {
deleteLogs();
}
}
logWeb(`logs size: ${convertBytesToHumanReadable(logSize)}`);
} catch (e) {
logError(
e,
"failed to clearLogsIfLocalStorageLimitExceeded",
undefined,
true,
);
}
};
export const logStartupMessage = async (appId: string) => {
// TODO (MR): Remove the need to lowercase it, change the enum itself.
const appIdL = appId.toLowerCase();
const userID = (getData(LS_KEYS.USER) as User)?.id;
const sha = process.env.GIT_SHA;
const buildId = isDevBuild ? "dev " : sha ? `git ${sha} ` : "";
addLogLine(`ente-${appIdL}-web ${buildId}uid ${userID}`);
};
function getLogs(): Log[] {
return getData(LS_KEYS.LOGS)?.logs ?? [];
}
function setLogs(logs: Log[]) {
setData(LS_KEYS.LOGS, { logs });
}
function deleteLogs() {
removeData(LS_KEYS.LOGS);
}
function getStringSize(str: string) {
return new Blob([str]).size;
}
export function formatLog(log: Log) {
return `[${formatDateTimeShort(log.timestamp)}] ${log.logLine}`;
}
function combineLogLines(logs: Log[]) {
return logs.map(formatLog).join("\n");
}

View file

@ -14,13 +14,13 @@ export enum LS_KEYS {
EXPORT = "export",
THUMBNAIL_FIX_STATE = "thumbnailFixState",
LIVE_PHOTO_INFO_SHOWN_COUNT = "livePhotoInfoShownCount",
LOGS = "logs",
// LOGS = "logs",
USER_DETAILS = "userDetails",
COLLECTION_SORT_BY = "collectionSortBy",
THEME = "theme",
WAIT_TIME = "waitTime",
API_ENDPOINT = "apiEndpoint",
// Moved to the new wrapper @/utils/local-storage
// Moved to the new wrapper @/next/local-storage
// LOCALE = 'locale',
MAP_ENABLED = "mapEnabled",
SRP_SETUP_ATTRIBUTES = "srpSetupAttributes",

View file

@ -12,5 +12,11 @@
"target": "es5",
"useUnknownInCatchVariables": false
},
"include": ["**/*.ts", "**/*.tsx", "**/*.js", "themes/mui-theme.d.ts"]
"include": [
"**/*.ts",
"**/*.tsx",
"**/*.js",
"themes/mui-theme.d.ts",
"../next/log-web.ts"
]
}

View file

@ -1,4 +0,0 @@
export enum UPLOAD_STRATEGY {
SINGLE_COLLECTION,
COLLECTION_PER_FOLDER,
}

View file

@ -1,24 +0,0 @@
import { UPLOAD_STRATEGY } from "@ente/shared/upload/constants";
import { ElectronFile } from "@ente/shared/upload/types";
export interface WatchMappingSyncedFile {
path: string;
uploadedFileID: number;
collectionID: number;
}
export interface WatchMapping {
rootFolderName: string;
folderPath: string;
uploadStrategy: UPLOAD_STRATEGY;
syncedFiles: WatchMappingSyncedFile[];
ignoredFiles: string[];
}
export interface EventQueueItem {
type: "upload" | "trash";
folderPath: string;
collectionName?: string;
paths?: string[];
files?: ElectronFile[];
}

View file

@ -1777,7 +1777,7 @@ combined-stream@^1.0.8:
dependencies:
delayed-stream "~1.0.0"
comlink@^4.3.0:
comlink@^4.4:
version "4.4.1"
resolved "https://registry.yarnpkg.com/comlink/-/comlink-4.4.1.tgz#e568b8e86410b809e8600eb2cf40c189371ef981"
integrity sha512-+1dlx0aY5Jo1vHy/tSsIGpSkN4tS9rZSW8FIhG0JH/crs9wwweswIo/POr451r7bZww3hFbPAKnTpimzL/mm4Q==