[desktop] IPC refactoring and general cleanup (#1572)
This commit is contained in:
commit
c353ceaaf9
39 changed files with 514 additions and 883 deletions
30
.github/workflows/desktop-lint.yml
vendored
Normal file
30
.github/workflows/desktop-lint.yml
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
name: "Lint (desktop)"
|
||||
|
||||
on:
|
||||
# Run on every push to a branch other than main that changes desktop/
|
||||
push:
|
||||
branches-ignore: [main, "deploy/**"]
|
||||
paths:
|
||||
- "desktop/**"
|
||||
- ".github/workflows/desktop-lint.yml"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: desktop
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup node and enable yarn caching
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "yarn"
|
||||
cache-dependency-path: "desktop/yarn.lock"
|
||||
|
||||
- run: yarn install
|
||||
|
||||
- run: yarn lint
|
|
@ -1,21 +1,36 @@
|
|||
/* eslint-env node */
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
/* What we really want eventually */
|
||||
// "plugin:@typescript-eslint/strict-type-checked",
|
||||
// "plugin:@typescript-eslint/stylistic-type-checked",
|
||||
"plugin:@typescript-eslint/strict-type-checked",
|
||||
"plugin:@typescript-eslint/stylistic-type-checked",
|
||||
],
|
||||
plugins: ["@typescript-eslint"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
project: true,
|
||||
},
|
||||
root: true,
|
||||
ignorePatterns: [".eslintrc.js", "app", "out", "dist"],
|
||||
env: {
|
||||
es2022: true,
|
||||
node: true,
|
||||
},
|
||||
rules: {
|
||||
/* Allow numbers to be used in template literals */
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
},
|
||||
],
|
||||
/* Allow void expressions as the entire body of an arrow function */
|
||||
"@typescript-eslint/no-confusing-void-expression": [
|
||||
"error",
|
||||
{
|
||||
ignoreArrowShorthand: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
|
|
@ -90,6 +90,9 @@ Some extra ones specific to the code here are:
|
|||
Unix commands in our `package.json` scripts. This allows us to use the same
|
||||
commands (like `ln`) across different platforms like Linux and Windows.
|
||||
|
||||
- [@tsconfig/recommended](https://github.com/tsconfig/bases) gives us a base
|
||||
tsconfig for the Node.js version that our current Electron version uses.
|
||||
|
||||
## Functionality
|
||||
|
||||
### Format conversion
|
||||
|
|
|
@ -15,8 +15,8 @@
|
|||
"dev-main": "tsc && electron app/main.js",
|
||||
"dev-renderer": "cd ../web && yarn install && yarn dev:photos",
|
||||
"postinstall": "electron-builder install-app-deps",
|
||||
"lint": "yarn prettier --check . && eslint --ext .ts src",
|
||||
"lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src"
|
||||
"lint": "yarn prettier --check . && eslint --ext .ts src && yarn tsc",
|
||||
"lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src && yarn tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"any-shell-escape": "^0.1",
|
||||
|
@ -34,6 +34,7 @@
|
|||
"onnxruntime-node": "^1.17"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tsconfig/node20": "^20.1.4",
|
||||
"@types/auto-launch": "^5.0",
|
||||
"@types/ffmpeg-static": "^3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7",
|
||||
|
|
|
@ -26,7 +26,7 @@ import { createWatcher } from "./main/services/watch";
|
|||
import { userPreferences } from "./main/stores/user-preferences";
|
||||
import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch";
|
||||
import { registerStreamProtocol } from "./main/stream";
|
||||
import { isDev } from "./main/utils-electron";
|
||||
import { isDev } from "./main/utils/electron";
|
||||
|
||||
/**
|
||||
* The URL where the renderer HTML is being served from.
|
||||
|
@ -141,36 +141,12 @@ const registerPrivilegedSchemes = () => {
|
|||
]);
|
||||
};
|
||||
|
||||
/**
|
||||
* [Note: Increased disk cache for the desktop app]
|
||||
*
|
||||
* Set the "disk-cache-size" command line flag to ask the Chromium process to
|
||||
* use a larger size for the caches that it keeps on disk. This allows us to use
|
||||
* the web based caching mechanisms on both the web and the desktop app, just
|
||||
* ask the embedded Chromium to be a bit more generous in disk usage when
|
||||
* running as the desktop app.
|
||||
*
|
||||
* The size we provide is in bytes.
|
||||
* https://www.electronjs.org/docs/latest/api/command-line-switches#--disk-cache-sizesize
|
||||
*
|
||||
* Note that increasing the disk cache size does not guarantee that Chromium
|
||||
* will respect in verbatim, it uses its own heuristics atop this hint.
|
||||
* https://superuser.com/questions/378991/what-is-chrome-default-cache-size-limit/1577693#1577693
|
||||
*
|
||||
* See also: [Note: Caching files].
|
||||
*/
|
||||
const increaseDiskCache = () =>
|
||||
app.commandLine.appendSwitch(
|
||||
"disk-cache-size",
|
||||
`${5 * 1024 * 1024 * 1024}`, // 5 GB
|
||||
);
|
||||
|
||||
/**
|
||||
* Create an return the {@link BrowserWindow} that will form our app's UI.
|
||||
*
|
||||
* This window will show the HTML served from {@link rendererURL}.
|
||||
*/
|
||||
const createMainWindow = async () => {
|
||||
const createMainWindow = () => {
|
||||
// Create the main window. This'll show our web content.
|
||||
const window = new BrowserWindow({
|
||||
webPreferences: {
|
||||
|
@ -184,7 +160,7 @@ const createMainWindow = async () => {
|
|||
show: false,
|
||||
});
|
||||
|
||||
const wasAutoLaunched = await autoLauncher.wasAutoLaunched();
|
||||
const wasAutoLaunched = autoLauncher.wasAutoLaunched();
|
||||
if (wasAutoLaunched) {
|
||||
// Don't automatically show the app's window if we were auto-launched.
|
||||
// On macOS, also hide the dock icon on macOS.
|
||||
|
@ -198,7 +174,7 @@ const createMainWindow = async () => {
|
|||
if (isDev) window.webContents.openDevTools();
|
||||
|
||||
window.webContents.on("render-process-gone", (_, details) => {
|
||||
log.error(`render-process-gone: ${details}`);
|
||||
log.error(`render-process-gone: ${details.reason}`);
|
||||
window.webContents.reload();
|
||||
});
|
||||
|
||||
|
@ -227,7 +203,7 @@ const createMainWindow = async () => {
|
|||
});
|
||||
|
||||
window.on("show", () => {
|
||||
if (process.platform == "darwin") app.dock.show();
|
||||
if (process.platform == "darwin") void app.dock.show();
|
||||
});
|
||||
|
||||
// Let ipcRenderer know when mainWindow is in the foreground so that it can
|
||||
|
@ -281,7 +257,7 @@ export const allowExternalLinks = (webContents: WebContents) => {
|
|||
// Returning `action` "deny" accomplishes this.
|
||||
webContents.setWindowOpenHandler(({ url }) => {
|
||||
if (!url.startsWith(rendererURL)) {
|
||||
shell.openExternal(url);
|
||||
void shell.openExternal(url);
|
||||
return { action: "deny" };
|
||||
} else {
|
||||
return { action: "allow" };
|
||||
|
@ -321,24 +297,24 @@ const setupTrayItem = (mainWindow: BrowserWindow) => {
|
|||
* Older versions of our app used to maintain a cache dir using the main
|
||||
* process. This has been deprecated in favor of using a normal web cache.
|
||||
*
|
||||
* See [Note: Increased disk cache for the desktop app]
|
||||
*
|
||||
* Delete the old cache dir if it exists. This code was added March 2024, and
|
||||
* can be removed after some time once most people have upgraded to newer
|
||||
* versions.
|
||||
*/
|
||||
const deleteLegacyDiskCacheDirIfExists = async () => {
|
||||
// The existing code was passing "cache" as a parameter to getPath. This is
|
||||
// incorrect if we go by the types - "cache" is not a valid value for the
|
||||
// parameter to `app.getPath`.
|
||||
// The existing code was passing "cache" as a parameter to getPath.
|
||||
//
|
||||
// It might be an issue in the types, since at runtime it seems to work. For
|
||||
// example, on macOS I get `~/Library/Caches`.
|
||||
// However, "cache" is not a valid parameter to getPath. It works! (for
|
||||
// example, on macOS I get `~/Library/Caches`), but it is intentionally not
|
||||
// documented as part of the public API:
|
||||
//
|
||||
// - docs: remove "cache" from app.getPath
|
||||
// https://github.com/electron/electron/pull/33509
|
||||
//
|
||||
// Irrespective, we replicate the original behaviour so that we get back the
|
||||
// same path that the old got was getting.
|
||||
// same path that the old code was getting.
|
||||
//
|
||||
// @ts-expect-error
|
||||
// @ts-expect-error "cache" works but is not part of the public API.
|
||||
const cacheDir = path.join(app.getPath("cache"), "ente");
|
||||
if (existsSync(cacheDir)) {
|
||||
log.info(`Removing legacy disk cache from ${cacheDir}`);
|
||||
|
@ -375,7 +351,6 @@ const main = () => {
|
|||
// The order of the next two calls is important
|
||||
setupRendererServer();
|
||||
registerPrivilegedSchemes();
|
||||
increaseDiskCache();
|
||||
migrateLegacyWatchStoreIfNeeded();
|
||||
|
||||
app.on("second-instance", () => {
|
||||
|
@ -390,39 +365,35 @@ const main = () => {
|
|||
// Emitted once, when Electron has finished initializing.
|
||||
//
|
||||
// Note that some Electron APIs can only be used after this event occurs.
|
||||
app.on("ready", async () => {
|
||||
// Create window and prepare for the renderer.
|
||||
mainWindow = await createMainWindow();
|
||||
attachIPCHandlers();
|
||||
attachFSWatchIPCHandlers(createWatcher(mainWindow));
|
||||
registerStreamProtocol();
|
||||
void app.whenReady().then(() => {
|
||||
void (async () => {
|
||||
// Create window and prepare for the renderer.
|
||||
mainWindow = createMainWindow();
|
||||
attachIPCHandlers();
|
||||
attachFSWatchIPCHandlers(createWatcher(mainWindow));
|
||||
registerStreamProtocol();
|
||||
|
||||
// Configure the renderer's environment.
|
||||
setDownloadPath(mainWindow.webContents);
|
||||
allowExternalLinks(mainWindow.webContents);
|
||||
// Configure the renderer's environment.
|
||||
setDownloadPath(mainWindow.webContents);
|
||||
allowExternalLinks(mainWindow.webContents);
|
||||
|
||||
// TODO(MR): Remove or resurrect
|
||||
// The commit that introduced this header override had the message
|
||||
// "fix cors issue for uploads". Not sure what that means, so disabling
|
||||
// it for now to see why exactly this is required.
|
||||
// addAllowOriginHeader(mainWindow);
|
||||
// Start loading the renderer.
|
||||
void mainWindow.loadURL(rendererURL);
|
||||
|
||||
// Start loading the renderer.
|
||||
mainWindow.loadURL(rendererURL);
|
||||
// Continue on with the rest of the startup sequence.
|
||||
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
|
||||
setupTrayItem(mainWindow);
|
||||
if (!isDev) setupAutoUpdater(mainWindow);
|
||||
|
||||
// Continue on with the rest of the startup sequence.
|
||||
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
|
||||
setupTrayItem(mainWindow);
|
||||
if (!isDev) setupAutoUpdater(mainWindow);
|
||||
|
||||
try {
|
||||
deleteLegacyDiskCacheDirIfExists();
|
||||
deleteLegacyKeysStoreIfExists();
|
||||
} catch (e) {
|
||||
// Log but otherwise ignore errors during non-critical startup
|
||||
// actions.
|
||||
log.error("Ignoring startup error", e);
|
||||
}
|
||||
try {
|
||||
await deleteLegacyDiskCacheDirIfExists();
|
||||
await deleteLegacyKeysStoreIfExists();
|
||||
} catch (e) {
|
||||
// Log but otherwise ignore errors during non-critical startup
|
||||
// actions.
|
||||
log.error("Ignoring startup error", e);
|
||||
}
|
||||
})();
|
||||
});
|
||||
|
||||
// This is a macOS only event. Show our window when the user activates the
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
import { dialog } from "electron/main";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { ElectronFile } from "../types/ipc";
|
||||
import { getElectronFile } from "./services/fs";
|
||||
import { getElectronFilesFromGoogleZip } from "./services/upload";
|
||||
|
||||
export const selectDirectory = async () => {
|
||||
const result = await dialog.showOpenDialog({
|
||||
properties: ["openDirectory"],
|
||||
});
|
||||
if (result.filePaths && result.filePaths.length > 0) {
|
||||
return result.filePaths[0]?.split(path.sep)?.join(path.posix.sep);
|
||||
}
|
||||
};
|
||||
|
||||
export const showUploadFilesDialog = async () => {
|
||||
const selectedFiles = await dialog.showOpenDialog({
|
||||
properties: ["openFile", "multiSelections"],
|
||||
});
|
||||
const filePaths = selectedFiles.filePaths;
|
||||
return await Promise.all(filePaths.map(getElectronFile));
|
||||
};
|
||||
|
||||
export const showUploadDirsDialog = async () => {
|
||||
const dir = await dialog.showOpenDialog({
|
||||
properties: ["openDirectory", "multiSelections"],
|
||||
});
|
||||
|
||||
let filePaths: string[] = [];
|
||||
for (const dirPath of dir.filePaths) {
|
||||
filePaths = [...filePaths, ...(await getDirFilePaths(dirPath))];
|
||||
}
|
||||
|
||||
return await Promise.all(filePaths.map(getElectronFile));
|
||||
};
|
||||
|
||||
// https://stackoverflow.com/a/63111390
|
||||
const getDirFilePaths = async (dirPath: string) => {
|
||||
if (!(await fs.stat(dirPath)).isDirectory()) {
|
||||
return [dirPath];
|
||||
}
|
||||
|
||||
let files: string[] = [];
|
||||
const filePaths = await fs.readdir(dirPath);
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
const absolute = path.join(dirPath, filePath);
|
||||
files = [...files, ...(await getDirFilePaths(absolute))];
|
||||
}
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
export const showUploadZipDialog = async () => {
|
||||
const selectedFiles = await dialog.showOpenDialog({
|
||||
properties: ["openFile", "multiSelections"],
|
||||
filters: [{ name: "Zip File", extensions: ["zip"] }],
|
||||
});
|
||||
const filePaths = selectedFiles.filePaths;
|
||||
|
||||
let files: ElectronFile[] = [];
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
files = [...files, ...(await getElectronFilesFromGoogleZip(filePath))];
|
||||
}
|
||||
|
||||
return {
|
||||
zipPaths: filePaths,
|
||||
files,
|
||||
};
|
||||
};
|
|
@ -1,29 +0,0 @@
|
|||
/**
|
||||
* @file file system related functions exposed over the context bridge.
|
||||
*/
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
|
||||
export const fsExists = (path: string) => existsSync(path);
|
||||
|
||||
export const fsRename = (oldPath: string, newPath: string) =>
|
||||
fs.rename(oldPath, newPath);
|
||||
|
||||
export const fsMkdirIfNeeded = (dirPath: string) =>
|
||||
fs.mkdir(dirPath, { recursive: true });
|
||||
|
||||
export const fsRmdir = (path: string) => fs.rmdir(path);
|
||||
|
||||
export const fsRm = (path: string) => fs.rm(path);
|
||||
|
||||
export const fsReadTextFile = async (filePath: string) =>
|
||||
fs.readFile(filePath, "utf-8");
|
||||
|
||||
export const fsWriteFile = (path: string, contents: string) =>
|
||||
fs.writeFile(path, contents);
|
||||
|
||||
export const fsIsDir = async (dirPath: string) => {
|
||||
if (!existsSync(dirPath)) return false;
|
||||
const stat = await fs.stat(dirPath);
|
||||
return stat.isDirectory();
|
||||
};
|
|
@ -1,21 +0,0 @@
|
|||
import { BrowserWindow } from "electron";
|
||||
|
||||
export function addAllowOriginHeader(mainWindow: BrowserWindow) {
|
||||
mainWindow.webContents.session.webRequest.onHeadersReceived(
|
||||
(details, callback) => {
|
||||
details.responseHeaders = lowerCaseHeaders(details.responseHeaders);
|
||||
details.responseHeaders["access-control-allow-origin"] = ["*"];
|
||||
callback({
|
||||
responseHeaders: details.responseHeaders,
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
|
||||
const headers: Record<string, string[]> = {};
|
||||
for (const key of Object.keys(responseHeaders)) {
|
||||
headers[key.toLowerCase()] = responseHeaders[key];
|
||||
}
|
||||
return headers;
|
||||
}
|
|
@ -16,12 +16,19 @@ import type {
|
|||
PendingUploads,
|
||||
ZipItem,
|
||||
} from "../types/ipc";
|
||||
import { logToDisk } from "./log";
|
||||
import {
|
||||
appVersion,
|
||||
skipAppUpdate,
|
||||
updateAndRestart,
|
||||
updateOnNextRestart,
|
||||
} from "./services/app-update";
|
||||
import {
|
||||
openDirectory,
|
||||
openLogDirectory,
|
||||
selectDirectory,
|
||||
showUploadDirsDialog,
|
||||
showUploadFilesDialog,
|
||||
showUploadZipDialog,
|
||||
} from "./dialogs";
|
||||
} from "./services/dir";
|
||||
import { ffmpegExec } from "./services/ffmpeg";
|
||||
import {
|
||||
fsExists,
|
||||
fsIsDir,
|
||||
|
@ -31,15 +38,7 @@ import {
|
|||
fsRm,
|
||||
fsRmdir,
|
||||
fsWriteFile,
|
||||
} from "./fs";
|
||||
import { logToDisk } from "./log";
|
||||
import {
|
||||
appVersion,
|
||||
skipAppUpdate,
|
||||
updateAndRestart,
|
||||
updateOnNextRestart,
|
||||
} from "./services/app-update";
|
||||
import { ffmpegExec } from "./services/ffmpeg";
|
||||
} from "./services/fs";
|
||||
import { convertToJPEG, generateImageThumbnail } from "./services/image";
|
||||
import {
|
||||
clipImageEmbedding,
|
||||
|
@ -68,7 +67,6 @@ import {
|
|||
watchUpdateIgnoredFiles,
|
||||
watchUpdateSyncedFiles,
|
||||
} from "./services/watch";
|
||||
import { openDirectory, openLogDirectory } from "./utils-electron";
|
||||
|
||||
/**
|
||||
* Listen for IPC events sent/invoked by the renderer process, and route them to
|
||||
|
@ -95,16 +93,20 @@ export const attachIPCHandlers = () => {
|
|||
|
||||
ipcMain.handle("appVersion", () => appVersion());
|
||||
|
||||
ipcMain.handle("openDirectory", (_, dirPath) => openDirectory(dirPath));
|
||||
ipcMain.handle("openDirectory", (_, dirPath: string) =>
|
||||
openDirectory(dirPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("openLogDirectory", () => openLogDirectory());
|
||||
|
||||
// See [Note: Catching exception during .send/.on]
|
||||
ipcMain.on("logToDisk", (_, message) => logToDisk(message));
|
||||
ipcMain.on("logToDisk", (_, message: string) => logToDisk(message));
|
||||
|
||||
ipcMain.handle("selectDirectory", () => selectDirectory());
|
||||
|
||||
ipcMain.on("clearStores", () => clearStores());
|
||||
|
||||
ipcMain.handle("saveEncryptionKey", (_, encryptionKey) =>
|
||||
ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) =>
|
||||
saveEncryptionKey(encryptionKey),
|
||||
);
|
||||
|
||||
|
@ -114,21 +116,23 @@ export const attachIPCHandlers = () => {
|
|||
|
||||
ipcMain.on("updateAndRestart", () => updateAndRestart());
|
||||
|
||||
ipcMain.on("updateOnNextRestart", (_, version) =>
|
||||
ipcMain.on("updateOnNextRestart", (_, version: string) =>
|
||||
updateOnNextRestart(version),
|
||||
);
|
||||
|
||||
ipcMain.on("skipAppUpdate", (_, version) => skipAppUpdate(version));
|
||||
ipcMain.on("skipAppUpdate", (_, version: string) => skipAppUpdate(version));
|
||||
|
||||
// - FS
|
||||
|
||||
ipcMain.handle("fsExists", (_, path) => fsExists(path));
|
||||
ipcMain.handle("fsExists", (_, path: string) => fsExists(path));
|
||||
|
||||
ipcMain.handle("fsRename", (_, oldPath: string, newPath: string) =>
|
||||
fsRename(oldPath, newPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("fsMkdirIfNeeded", (_, dirPath) => fsMkdirIfNeeded(dirPath));
|
||||
ipcMain.handle("fsMkdirIfNeeded", (_, dirPath: string) =>
|
||||
fsMkdirIfNeeded(dirPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("fsRmdir", (_, path: string) => fsRmdir(path));
|
||||
|
||||
|
@ -193,16 +197,6 @@ export const attachIPCHandlers = () => {
|
|||
faceEmbedding(input),
|
||||
);
|
||||
|
||||
// - File selection
|
||||
|
||||
ipcMain.handle("selectDirectory", () => selectDirectory());
|
||||
|
||||
ipcMain.handle("showUploadFilesDialog", () => showUploadFilesDialog());
|
||||
|
||||
ipcMain.handle("showUploadDirsDialog", () => showUploadDirsDialog());
|
||||
|
||||
ipcMain.handle("showUploadZipDialog", () => showUploadZipDialog());
|
||||
|
||||
// - Upload
|
||||
|
||||
ipcMain.handle("listZipItems", (_, zipPath: string) =>
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import log from "electron-log";
|
||||
import util from "node:util";
|
||||
import { isDev } from "./utils-electron";
|
||||
import { isDev } from "./utils/electron";
|
||||
|
||||
/**
|
||||
* Initialize logging in the main process.
|
||||
*
|
||||
* This will set our underlying logger up to log to a file named `ente.log`,
|
||||
*
|
||||
* - on Linux at ~/.config/ente/logs/main.log
|
||||
* - on macOS at ~/Library/Logs/ente/main.log
|
||||
* - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\main.log
|
||||
* - on Linux at ~/.config/ente/logs/ente.log
|
||||
* - on macOS at ~/Library/Logs/ente/ente.log
|
||||
* - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
|
||||
*
|
||||
* On dev builds, it will also log to the console.
|
||||
*/
|
||||
|
@ -65,7 +65,7 @@ const logError_ = (message: string) => {
|
|||
if (isDev) console.error(`[error] ${message}`);
|
||||
};
|
||||
|
||||
const logInfo = (...params: any[]) => {
|
||||
const logInfo = (...params: unknown[]) => {
|
||||
const message = params
|
||||
.map((p) => (typeof p == "string" ? p : util.inspect(p)))
|
||||
.join(" ");
|
||||
|
@ -73,7 +73,7 @@ const logInfo = (...params: any[]) => {
|
|||
if (isDev) console.log(`[info] ${message}`);
|
||||
};
|
||||
|
||||
const logDebug = (param: () => any) => {
|
||||
const logDebug = (param: () => unknown) => {
|
||||
if (isDev) {
|
||||
const p = param();
|
||||
console.log(`[debug] ${typeof p == "string" ? p : util.inspect(p)}`);
|
||||
|
|
|
@ -8,8 +8,9 @@ import {
|
|||
import { allowWindowClose } from "../main";
|
||||
import { forceCheckForAppUpdates } from "./services/app-update";
|
||||
import autoLauncher from "./services/auto-launcher";
|
||||
import { openLogDirectory } from "./services/dir";
|
||||
import { userPreferences } from "./stores/user-preferences";
|
||||
import { isDev, openLogDirectory } from "./utils-electron";
|
||||
import { isDev } from "./utils/electron";
|
||||
|
||||
/** Create and return the entries in the app's main menu bar */
|
||||
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
||||
|
@ -29,12 +30,12 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
|
||||
|
||||
const handleViewChangelog = () =>
|
||||
shell.openExternal(
|
||||
void shell.openExternal(
|
||||
"https://github.com/ente-io/ente/blob/main/desktop/CHANGELOG.md",
|
||||
);
|
||||
|
||||
const toggleAutoLaunch = () => {
|
||||
autoLauncher.toggleAutoLaunch();
|
||||
void autoLauncher.toggleAutoLaunch();
|
||||
isAutoLaunchEnabled = !isAutoLaunchEnabled;
|
||||
};
|
||||
|
||||
|
@ -45,13 +46,15 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
shouldHideDockIcon = !shouldHideDockIcon;
|
||||
};
|
||||
|
||||
const handleHelp = () => shell.openExternal("https://help.ente.io/photos/");
|
||||
const handleHelp = () =>
|
||||
void shell.openExternal("https://help.ente.io/photos/");
|
||||
|
||||
const handleSupport = () => shell.openExternal("mailto:support@ente.io");
|
||||
const handleSupport = () =>
|
||||
void shell.openExternal("mailto:support@ente.io");
|
||||
|
||||
const handleBlog = () => shell.openExternal("https://ente.io/blog/");
|
||||
const handleBlog = () => void shell.openExternal("https://ente.io/blog/");
|
||||
|
||||
const handleViewLogs = openLogDirectory;
|
||||
const handleViewLogs = () => void openLogDirectory();
|
||||
|
||||
return Menu.buildFromTemplate([
|
||||
{
|
||||
|
|
|
@ -12,8 +12,8 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
|
|||
autoUpdater.autoDownload = false;
|
||||
|
||||
const oneDay = 1 * 24 * 60 * 60 * 1000;
|
||||
setInterval(() => checkForUpdatesAndNotify(mainWindow), oneDay);
|
||||
checkForUpdatesAndNotify(mainWindow);
|
||||
setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay);
|
||||
void checkForUpdatesAndNotify(mainWindow);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -22,7 +22,7 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
|
|||
export const forceCheckForAppUpdates = (mainWindow: BrowserWindow) => {
|
||||
userPreferences.delete("skipAppVersion");
|
||||
userPreferences.delete("muteUpdateNotificationVersion");
|
||||
checkForUpdatesAndNotify(mainWindow);
|
||||
void checkForUpdatesAndNotify(mainWindow);
|
||||
};
|
||||
|
||||
const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
|
||||
|
@ -56,7 +56,7 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
|
|||
mainWindow.webContents.send("appUpdateAvailable", update);
|
||||
|
||||
log.debug(() => "Attempting auto update");
|
||||
autoUpdater.downloadUpdate();
|
||||
await autoUpdater.downloadUpdate();
|
||||
|
||||
let timeoutId: ReturnType<typeof setTimeout>;
|
||||
const fiveMinutes = 5 * 60 * 1000;
|
||||
|
|
|
@ -38,7 +38,7 @@ class AutoLauncher {
|
|||
}
|
||||
}
|
||||
|
||||
async wasAutoLaunched() {
|
||||
wasAutoLaunched() {
|
||||
if (this.autoLaunch) {
|
||||
return app.commandLine.hasSwitch("hidden");
|
||||
} else {
|
||||
|
|
48
desktop/src/main/services/dir.ts
Normal file
48
desktop/src/main/services/dir.ts
Normal file
|
@ -0,0 +1,48 @@
|
|||
import { shell } from "electron/common";
|
||||
import { app, dialog } from "electron/main";
|
||||
import path from "node:path";
|
||||
import { posixPath } from "../utils/electron";
|
||||
|
||||
export const selectDirectory = async () => {
|
||||
const result = await dialog.showOpenDialog({
|
||||
properties: ["openDirectory"],
|
||||
});
|
||||
const dirPath = result.filePaths[0];
|
||||
return dirPath ? posixPath(dirPath) : undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Open the given {@link dirPath} in the system's folder viewer.
|
||||
*
|
||||
* For example, on macOS this'll open {@link dirPath} in Finder.
|
||||
*/
|
||||
export const openDirectory = async (dirPath: string) => {
|
||||
const res = await shell.openPath(path.normalize(dirPath));
|
||||
// shell.openPath resolves with a string containing the error message
|
||||
// corresponding to the failure if a failure occurred, otherwise "".
|
||||
if (res) throw new Error(`Failed to open directory ${dirPath}: res`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Open the app's log directory in the system's folder viewer.
|
||||
*
|
||||
* @see {@link openDirectory}
|
||||
*/
|
||||
export const openLogDirectory = () => openDirectory(logDirectoryPath());
|
||||
|
||||
/**
|
||||
* Return the path where the logs for the app are saved.
|
||||
*
|
||||
* [Note: Electron app paths]
|
||||
*
|
||||
* By default, these paths are at the following locations:
|
||||
*
|
||||
* - macOS: `~/Library/Application Support/ente`
|
||||
* - Linux: `~/.config/ente`
|
||||
* - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local\ente`
|
||||
* - Windows: C:\Users\<you>\AppData\Local\<Your App Name>
|
||||
*
|
||||
* https://www.electronjs.org/docs/latest/api/app
|
||||
*
|
||||
*/
|
||||
const logDirectoryPath = () => app.getPath("logs");
|
|
@ -2,13 +2,13 @@ import pathToFfmpeg from "ffmpeg-static";
|
|||
import fs from "node:fs/promises";
|
||||
import type { ZipItem } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { withTimeout } from "../utils";
|
||||
import { execAsync } from "../utils-electron";
|
||||
import { ensure, withTimeout } from "../utils/common";
|
||||
import { execAsync } from "../utils/electron";
|
||||
import {
|
||||
deleteTempFile,
|
||||
makeFileForDataOrPathOrZipItem,
|
||||
makeTempFilePath,
|
||||
} from "../utils-temp";
|
||||
} from "../utils/temp";
|
||||
|
||||
/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
|
||||
const ffmpegPathPlaceholder = "FFMPEG";
|
||||
|
@ -110,5 +110,5 @@ const ffmpegBinaryPath = () => {
|
|||
// This substitution of app.asar by app.asar.unpacked is suggested by the
|
||||
// ffmpeg-static library author themselves:
|
||||
// https://github.com/eugeneware/ffmpeg-static/issues/16
|
||||
return pathToFfmpeg.replace("app.asar", "app.asar.unpacked");
|
||||
return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked");
|
||||
};
|
||||
|
|
|
@ -1,154 +1,30 @@
|
|||
import StreamZip from "node-stream-zip";
|
||||
/**
|
||||
* @file file system related functions exposed over the context bridge.
|
||||
*/
|
||||
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { ElectronFile } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
|
||||
const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
|
||||
export const fsExists = (path: string) => existsSync(path);
|
||||
|
||||
const getFileStream = async (filePath: string) => {
|
||||
const file = await fs.open(filePath, "r");
|
||||
let offset = 0;
|
||||
const readableStream = new ReadableStream<Uint8Array>({
|
||||
async pull(controller) {
|
||||
try {
|
||||
const buff = new Uint8Array(FILE_STREAM_CHUNK_SIZE);
|
||||
const bytesRead = (await file.read(
|
||||
buff,
|
||||
0,
|
||||
FILE_STREAM_CHUNK_SIZE,
|
||||
offset,
|
||||
)) as unknown as number;
|
||||
offset += bytesRead;
|
||||
if (bytesRead === 0) {
|
||||
controller.close();
|
||||
await file.close();
|
||||
} else {
|
||||
controller.enqueue(buff.slice(0, bytesRead));
|
||||
}
|
||||
} catch (e) {
|
||||
await file.close();
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await file.close();
|
||||
},
|
||||
});
|
||||
return readableStream;
|
||||
};
|
||||
|
||||
export async function getElectronFile(filePath: string): Promise<ElectronFile> {
|
||||
const fileStats = await fs.stat(filePath);
|
||||
return {
|
||||
path: filePath.split(path.sep).join(path.posix.sep),
|
||||
name: path.basename(filePath),
|
||||
size: fileStats.size,
|
||||
lastModified: fileStats.mtime.valueOf(),
|
||||
stream: async () => {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error("electronFile does not exist");
|
||||
}
|
||||
return await getFileStream(filePath);
|
||||
},
|
||||
blob: async () => {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error("electronFile does not exist");
|
||||
}
|
||||
const blob = await fs.readFile(filePath);
|
||||
return new Blob([new Uint8Array(blob)]);
|
||||
},
|
||||
arrayBuffer: async () => {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error("electronFile does not exist");
|
||||
}
|
||||
const blob = await fs.readFile(filePath);
|
||||
return new Uint8Array(blob);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const getZipFileStream = async (
|
||||
zip: StreamZip.StreamZipAsync,
|
||||
filePath: string,
|
||||
) => {
|
||||
const stream = await zip.stream(filePath);
|
||||
const done = {
|
||||
current: false,
|
||||
};
|
||||
const inProgress = {
|
||||
current: false,
|
||||
};
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let resolveObj: (value?: any) => void = null;
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let rejectObj: (reason?: any) => void = null;
|
||||
stream.on("readable", () => {
|
||||
try {
|
||||
if (resolveObj) {
|
||||
inProgress.current = true;
|
||||
const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
|
||||
if (chunk) {
|
||||
resolveObj(new Uint8Array(chunk));
|
||||
resolveObj = null;
|
||||
}
|
||||
inProgress.current = false;
|
||||
}
|
||||
} catch (e) {
|
||||
rejectObj(e);
|
||||
}
|
||||
});
|
||||
stream.on("end", () => {
|
||||
try {
|
||||
done.current = true;
|
||||
if (resolveObj && !inProgress.current) {
|
||||
resolveObj(null);
|
||||
resolveObj = null;
|
||||
}
|
||||
} catch (e) {
|
||||
rejectObj(e);
|
||||
}
|
||||
});
|
||||
stream.on("error", (e) => {
|
||||
try {
|
||||
done.current = true;
|
||||
if (rejectObj) {
|
||||
rejectObj(e);
|
||||
rejectObj = null;
|
||||
}
|
||||
} catch (e) {
|
||||
rejectObj(e);
|
||||
}
|
||||
});
|
||||
|
||||
const readStreamData = async () => {
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
|
||||
|
||||
if (chunk || done.current) {
|
||||
resolve(chunk);
|
||||
} else {
|
||||
resolveObj = resolve;
|
||||
rejectObj = reject;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const readableStream = new ReadableStream<Uint8Array>({
|
||||
async pull(controller) {
|
||||
try {
|
||||
const data = await readStreamData();
|
||||
|
||||
if (data) {
|
||||
controller.enqueue(data);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("Failed to pull from readableStream", e);
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
return readableStream;
|
||||
export const fsRename = (oldPath: string, newPath: string) =>
|
||||
fs.rename(oldPath, newPath);
|
||||
|
||||
export const fsMkdirIfNeeded = (dirPath: string) =>
|
||||
fs.mkdir(dirPath, { recursive: true });
|
||||
|
||||
export const fsRmdir = (path: string) => fs.rmdir(path);
|
||||
|
||||
export const fsRm = (path: string) => fs.rm(path);
|
||||
|
||||
export const fsReadTextFile = async (filePath: string) =>
|
||||
fs.readFile(filePath, "utf-8");
|
||||
|
||||
export const fsWriteFile = (path: string, contents: string) =>
|
||||
fs.writeFile(path, contents);
|
||||
|
||||
export const fsIsDir = async (dirPath: string) => {
|
||||
if (!existsSync(dirPath)) return false;
|
||||
const stat = await fs.stat(dirPath);
|
||||
return stat.isDirectory();
|
||||
};
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
/** @file Image format conversions and thumbnail generation */
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
import path from "node:path";
|
||||
import { CustomErrorMessage, type ZipItem } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { execAsync, isDev } from "../utils-electron";
|
||||
import { execAsync, isDev } from "../utils/electron";
|
||||
import {
|
||||
deleteTempFile,
|
||||
makeFileForDataOrPathOrZipItem,
|
||||
makeTempFilePath,
|
||||
} from "../utils-temp";
|
||||
} from "../utils/temp";
|
||||
|
||||
export const convertToJPEG = async (imageData: Uint8Array) => {
|
||||
const inputFilePath = await makeTempFilePath();
|
||||
|
|
|
@ -11,7 +11,8 @@ import * as ort from "onnxruntime-node";
|
|||
import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
|
||||
import { ensure } from "../utils/common";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils/temp";
|
||||
import { makeCachedInferenceSession } from "./ml";
|
||||
|
||||
const cachedCLIPImageSession = makeCachedInferenceSession(
|
||||
|
@ -22,7 +23,7 @@ const cachedCLIPImageSession = makeCachedInferenceSession(
|
|||
export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
|
||||
const tempFilePath = await makeTempFilePath();
|
||||
const imageStream = new Response(jpegImageData.buffer).body;
|
||||
await writeStream(tempFilePath, imageStream);
|
||||
await writeStream(tempFilePath, ensure(imageStream));
|
||||
try {
|
||||
return await clipImageEmbedding_(tempFilePath);
|
||||
} finally {
|
||||
|
@ -44,30 +45,30 @@ const clipImageEmbedding_ = async (jpegFilePath: string) => {
|
|||
`onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
|
||||
);
|
||||
/* Need these model specific casts to type the result */
|
||||
const imageEmbedding = results["output"].data as Float32Array;
|
||||
const imageEmbedding = ensure(results.output).data as Float32Array;
|
||||
return normalizeEmbedding(imageEmbedding);
|
||||
};
|
||||
|
||||
const getRGBData = async (jpegFilePath: string) => {
|
||||
const getRGBData = async (jpegFilePath: string): Promise<number[]> => {
|
||||
const jpegData = await fs.readFile(jpegFilePath);
|
||||
const rawImageData = jpeg.decode(jpegData, {
|
||||
useTArray: true,
|
||||
formatAsRGBA: false,
|
||||
});
|
||||
|
||||
const nx: number = rawImageData.width;
|
||||
const ny: number = rawImageData.height;
|
||||
const inputImage: Uint8Array = rawImageData.data;
|
||||
const nx = rawImageData.width;
|
||||
const ny = rawImageData.height;
|
||||
const inputImage = rawImageData.data;
|
||||
|
||||
const nx2: number = 224;
|
||||
const ny2: number = 224;
|
||||
const totalSize: number = 3 * nx2 * ny2;
|
||||
const nx2 = 224;
|
||||
const ny2 = 224;
|
||||
const totalSize = 3 * nx2 * ny2;
|
||||
|
||||
const result: number[] = Array(totalSize).fill(0);
|
||||
const scale: number = Math.max(nx, ny) / 224;
|
||||
const result = Array<number>(totalSize).fill(0);
|
||||
const scale = Math.max(nx, ny) / 224;
|
||||
|
||||
const nx3: number = Math.round(nx / scale);
|
||||
const ny3: number = Math.round(ny / scale);
|
||||
const nx3 = Math.round(nx / scale);
|
||||
const ny3 = Math.round(ny / scale);
|
||||
|
||||
const mean: number[] = [0.48145466, 0.4578275, 0.40821073];
|
||||
const std: number[] = [0.26862954, 0.26130258, 0.27577711];
|
||||
|
@ -76,40 +77,40 @@ const getRGBData = async (jpegFilePath: string) => {
|
|||
for (let x = 0; x < nx3; x++) {
|
||||
for (let c = 0; c < 3; c++) {
|
||||
// Linear interpolation
|
||||
const sx: number = (x + 0.5) * scale - 0.5;
|
||||
const sy: number = (y + 0.5) * scale - 0.5;
|
||||
const sx = (x + 0.5) * scale - 0.5;
|
||||
const sy = (y + 0.5) * scale - 0.5;
|
||||
|
||||
const x0: number = Math.max(0, Math.floor(sx));
|
||||
const y0: number = Math.max(0, Math.floor(sy));
|
||||
const x0 = Math.max(0, Math.floor(sx));
|
||||
const y0 = Math.max(0, Math.floor(sy));
|
||||
|
||||
const x1: number = Math.min(x0 + 1, nx - 1);
|
||||
const y1: number = Math.min(y0 + 1, ny - 1);
|
||||
const x1 = Math.min(x0 + 1, nx - 1);
|
||||
const y1 = Math.min(y0 + 1, ny - 1);
|
||||
|
||||
const dx: number = sx - x0;
|
||||
const dy: number = sy - y0;
|
||||
const dx = sx - x0;
|
||||
const dy = sy - y0;
|
||||
|
||||
const j00: number = 3 * (y0 * nx + x0) + c;
|
||||
const j01: number = 3 * (y0 * nx + x1) + c;
|
||||
const j10: number = 3 * (y1 * nx + x0) + c;
|
||||
const j11: number = 3 * (y1 * nx + x1) + c;
|
||||
const j00 = 3 * (y0 * nx + x0) + c;
|
||||
const j01 = 3 * (y0 * nx + x1) + c;
|
||||
const j10 = 3 * (y1 * nx + x0) + c;
|
||||
const j11 = 3 * (y1 * nx + x1) + c;
|
||||
|
||||
const v00: number = inputImage[j00];
|
||||
const v01: number = inputImage[j01];
|
||||
const v10: number = inputImage[j10];
|
||||
const v11: number = inputImage[j11];
|
||||
const v00 = inputImage[j00] ?? 0;
|
||||
const v01 = inputImage[j01] ?? 0;
|
||||
const v10 = inputImage[j10] ?? 0;
|
||||
const v11 = inputImage[j11] ?? 0;
|
||||
|
||||
const v0: number = v00 * (1 - dx) + v01 * dx;
|
||||
const v1: number = v10 * (1 - dx) + v11 * dx;
|
||||
const v0 = v00 * (1 - dx) + v01 * dx;
|
||||
const v1 = v10 * (1 - dx) + v11 * dx;
|
||||
|
||||
const v: number = v0 * (1 - dy) + v1 * dy;
|
||||
const v = v0 * (1 - dy) + v1 * dy;
|
||||
|
||||
const v2: number = Math.min(Math.max(Math.round(v), 0), 255);
|
||||
const v2 = Math.min(Math.max(Math.round(v), 0), 255);
|
||||
|
||||
// createTensorWithDataList is dumb compared to reshape and
|
||||
// hence has to be given with one channel after another
|
||||
const i: number = y * nx3 + x + (c % 3) * 224 * 224;
|
||||
const i = y * nx3 + x + (c % 3) * 224 * 224;
|
||||
|
||||
result[i] = (v2 / 255 - mean[c]) / std[c];
|
||||
result[i] = (v2 / 255 - (mean[c] ?? 0)) / (std[c] ?? 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -119,13 +120,12 @@ const getRGBData = async (jpegFilePath: string) => {
|
|||
|
||||
const normalizeEmbedding = (embedding: Float32Array) => {
|
||||
let normalization = 0;
|
||||
for (let index = 0; index < embedding.length; index++) {
|
||||
normalization += embedding[index] * embedding[index];
|
||||
}
|
||||
for (const v of embedding) normalization += v * v;
|
||||
|
||||
const sqrtNormalization = Math.sqrt(normalization);
|
||||
for (let index = 0; index < embedding.length; index++) {
|
||||
embedding[index] = embedding[index] / sqrtNormalization;
|
||||
}
|
||||
for (let index = 0; index < embedding.length; index++)
|
||||
embedding[index] = ensure(embedding[index]) / sqrtNormalization;
|
||||
|
||||
return embedding;
|
||||
};
|
||||
|
||||
|
@ -134,11 +134,9 @@ const cachedCLIPTextSession = makeCachedInferenceSession(
|
|||
64173509 /* 61.2 MB */,
|
||||
);
|
||||
|
||||
let _tokenizer: Tokenizer = null;
|
||||
let _tokenizer: Tokenizer | undefined;
|
||||
const getTokenizer = () => {
|
||||
if (!_tokenizer) {
|
||||
_tokenizer = new Tokenizer();
|
||||
}
|
||||
if (!_tokenizer) _tokenizer = new Tokenizer();
|
||||
return _tokenizer;
|
||||
};
|
||||
|
||||
|
@ -169,6 +167,6 @@ export const clipTextEmbeddingIfAvailable = async (text: string) => {
|
|||
() =>
|
||||
`onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
|
||||
);
|
||||
const textEmbedding = results["output"].data as Float32Array;
|
||||
const textEmbedding = ensure(results.output).data as Float32Array;
|
||||
return normalizeEmbedding(textEmbedding);
|
||||
};
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
*/
|
||||
import * as ort from "onnxruntime-node";
|
||||
import log from "../log";
|
||||
import { ensure } from "../utils/common";
|
||||
import { makeCachedInferenceSession } from "./ml";
|
||||
|
||||
const cachedFaceDetectionSession = makeCachedInferenceSession(
|
||||
|
@ -23,7 +24,7 @@ export const detectFaces = async (input: Float32Array) => {
|
|||
};
|
||||
const results = await session.run(feeds);
|
||||
log.debug(() => `onnx/yolo face detection took ${Date.now() - t} ms`);
|
||||
return results["output"].data;
|
||||
return ensure(results.output).data;
|
||||
};
|
||||
|
||||
const cachedFaceEmbeddingSession = makeCachedInferenceSession(
|
||||
|
@ -46,5 +47,6 @@ export const faceEmbedding = async (input: Float32Array) => {
|
|||
const results = await session.run(feeds);
|
||||
log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`);
|
||||
/* Need these model specific casts to extract and type the result */
|
||||
return (results.embeddings as unknown as any)["cpuData"] as Float32Array;
|
||||
return (results.embeddings as unknown as Record<string, unknown>)
|
||||
.cpuData as Float32Array;
|
||||
};
|
||||
|
|
|
@ -34,6 +34,7 @@ import { writeStream } from "../stream";
|
|||
* actively trigger a download until the returned function is called.
|
||||
*
|
||||
* @param modelName The name of the model to download.
|
||||
*
|
||||
* @param modelByteSize The size in bytes that we expect the model to have. If
|
||||
* the size of the downloaded model does not match the expected size, then we
|
||||
* will redownload it.
|
||||
|
@ -99,13 +100,15 @@ const downloadModel = async (saveLocation: string, name: string) => {
|
|||
// `mkdir -p` the directory where we want to save the model.
|
||||
const saveDir = path.dirname(saveLocation);
|
||||
await fs.mkdir(saveDir, { recursive: true });
|
||||
// Download
|
||||
// Download.
|
||||
log.info(`Downloading ML model from ${name}`);
|
||||
const url = `https://models.ente.io/${name}`;
|
||||
const res = await net.fetch(url);
|
||||
if (!res.ok) throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`);
|
||||
// Save
|
||||
await writeStream(saveLocation, res.body);
|
||||
const body = res.body;
|
||||
if (!body) throw new Error(`Received an null response for ${url}`);
|
||||
// Save.
|
||||
await writeStream(saveLocation, body);
|
||||
log.info(`Downloaded CLIP model ${name}`);
|
||||
};
|
||||
|
||||
|
@ -114,9 +117,9 @@ const downloadModel = async (saveLocation: string, name: string) => {
|
|||
*/
|
||||
const createInferenceSession = async (modelPath: string) => {
|
||||
return await ort.InferenceSession.create(modelPath, {
|
||||
// Restrict the number of threads to 1
|
||||
// Restrict the number of threads to 1.
|
||||
intraOpNumThreads: 1,
|
||||
// Be more conservative with RAM usage
|
||||
// Be more conservative with RAM usage.
|
||||
enableCpuMemArena: false,
|
||||
});
|
||||
};
|
||||
|
|
|
@ -14,15 +14,15 @@ export const clearStores = () => {
|
|||
watchStore.clear();
|
||||
};
|
||||
|
||||
export const saveEncryptionKey = async (encryptionKey: string) => {
|
||||
const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey);
|
||||
export const saveEncryptionKey = (encryptionKey: string) => {
|
||||
const encryptedKey = safeStorage.encryptString(encryptionKey);
|
||||
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
|
||||
safeStorageStore.set("encryptionKey", b64EncryptedKey);
|
||||
};
|
||||
|
||||
export const encryptionKey = async (): Promise<string | undefined> => {
|
||||
export const encryptionKey = (): string | undefined => {
|
||||
const b64EncryptedKey = safeStorageStore.get("encryptionKey");
|
||||
if (!b64EncryptedKey) return undefined;
|
||||
const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
|
||||
return await safeStorage.decryptString(keyBuffer);
|
||||
return safeStorage.decryptString(keyBuffer);
|
||||
};
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import StreamZip from "node-stream-zip";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { existsSync } from "original-fs";
|
||||
import path from "path";
|
||||
import type { ElectronFile, PendingUploads, ZipItem } from "../../types/ipc";
|
||||
import type { PendingUploads, ZipItem } from "../../types/ipc";
|
||||
import { uploadStatusStore } from "../stores/upload-status";
|
||||
import { getZipFileStream } from "./fs";
|
||||
|
||||
export const listZipItems = async (zipPath: string): Promise<ZipItem[]> => {
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
|
@ -15,13 +14,13 @@ export const listZipItems = async (zipPath: string): Promise<ZipItem[]> => {
|
|||
for (const entry of Object.values(entries)) {
|
||||
const basename = path.basename(entry.name);
|
||||
// Ignore "hidden" files (files whose names begins with a dot).
|
||||
if (entry.isFile && basename.length > 0 && basename[0] != ".") {
|
||||
if (entry.isFile && basename.startsWith(".")) {
|
||||
// `entry.name` is the path within the zip.
|
||||
entryNames.push(entry.name);
|
||||
}
|
||||
}
|
||||
|
||||
zip.close();
|
||||
await zip.close();
|
||||
|
||||
return entryNames.map((entryName) => [zipPath, entryName]);
|
||||
};
|
||||
|
@ -36,8 +35,12 @@ export const pathOrZipItemSize = async (
|
|||
const [zipPath, entryName] = pathOrZipItem;
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
const entry = await zip.entry(entryName);
|
||||
if (!entry)
|
||||
throw new Error(
|
||||
`An entry with name ${entryName} does not exist in the zip file at ${zipPath}`,
|
||||
);
|
||||
const size = entry.size;
|
||||
zip.close();
|
||||
await zip.close();
|
||||
return size;
|
||||
}
|
||||
};
|
||||
|
@ -61,7 +64,7 @@ export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
|
|||
// file, but the dedup logic will kick in at that point so no harm will come
|
||||
// off it.
|
||||
if (allZipItems === undefined) {
|
||||
const allZipPaths = uploadStatusStore.get("filePaths");
|
||||
const allZipPaths = uploadStatusStore.get("filePaths") ?? [];
|
||||
const zipPaths = allZipPaths.filter((f) => existsSync(f));
|
||||
zipItems = [];
|
||||
for (const zip of zipPaths)
|
||||
|
@ -79,71 +82,23 @@ export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
|
|||
};
|
||||
};
|
||||
|
||||
export const setPendingUploads = async (pendingUploads: PendingUploads) =>
|
||||
export const setPendingUploads = (pendingUploads: PendingUploads) =>
|
||||
uploadStatusStore.set(pendingUploads);
|
||||
|
||||
export const markUploadedFiles = async (paths: string[]) => {
|
||||
export const markUploadedFiles = (paths: string[]) => {
|
||||
const existing = uploadStatusStore.get("filePaths");
|
||||
const updated = existing.filter((p) => !paths.includes(p));
|
||||
const updated = existing?.filter((p) => !paths.includes(p));
|
||||
uploadStatusStore.set("filePaths", updated);
|
||||
};
|
||||
|
||||
export const markUploadedZipItems = async (
|
||||
export const markUploadedZipItems = (
|
||||
items: [zipPath: string, entryName: string][],
|
||||
) => {
|
||||
const existing = uploadStatusStore.get("zipItems");
|
||||
const updated = existing.filter(
|
||||
const updated = existing?.filter(
|
||||
(z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]),
|
||||
);
|
||||
uploadStatusStore.set("zipItems", updated);
|
||||
};
|
||||
|
||||
export const clearPendingUploads = () => uploadStatusStore.clear();
|
||||
|
||||
export const getElectronFilesFromGoogleZip = async (filePath: string) => {
|
||||
const zip = new StreamZip.async({
|
||||
file: filePath,
|
||||
});
|
||||
const zipName = path.basename(filePath, ".zip");
|
||||
|
||||
const entries = await zip.entries();
|
||||
const files: ElectronFile[] = [];
|
||||
|
||||
for (const entry of Object.values(entries)) {
|
||||
const basename = path.basename(entry.name);
|
||||
if (entry.isFile && basename.length > 0 && basename[0] !== ".") {
|
||||
files.push(await getZipEntryAsElectronFile(zipName, zip, entry));
|
||||
}
|
||||
}
|
||||
|
||||
zip.close();
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
export async function getZipEntryAsElectronFile(
|
||||
zipName: string,
|
||||
zip: StreamZip.StreamZipAsync,
|
||||
entry: StreamZip.ZipEntry,
|
||||
): Promise<ElectronFile> {
|
||||
return {
|
||||
path: path
|
||||
.join(zipName, entry.name)
|
||||
.split(path.sep)
|
||||
.join(path.posix.sep),
|
||||
name: path.basename(entry.name),
|
||||
size: entry.size,
|
||||
lastModified: entry.time,
|
||||
stream: async () => {
|
||||
return await getZipFileStream(zip, entry.name);
|
||||
},
|
||||
blob: async () => {
|
||||
const buffer = await zip.entryData(entry.name);
|
||||
return new Blob([new Uint8Array(buffer)]);
|
||||
},
|
||||
arrayBuffer: async () => {
|
||||
const buffer = await zip.entryData(entry.name);
|
||||
return new Uint8Array(buffer);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
|
|
@ -3,9 +3,10 @@ import { BrowserWindow } from "electron/main";
|
|||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { FolderWatch, type CollectionMapping } from "../../types/ipc";
|
||||
import { fsIsDir } from "../fs";
|
||||
import log from "../log";
|
||||
import { watchStore } from "../stores/watch";
|
||||
import { posixPath } from "../utils/electron";
|
||||
import { fsIsDir } from "./fs";
|
||||
|
||||
/**
|
||||
* Create and return a new file system watcher.
|
||||
|
@ -46,23 +47,15 @@ const eventData = (path: string): [string, FolderWatch] => {
|
|||
return [path, watch];
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a file system {@link filePath} that uses the local system specific
|
||||
* path separators into a path that uses POSIX file separators.
|
||||
*/
|
||||
const posixPath = (filePath: string) =>
|
||||
filePath.split(path.sep).join(path.posix.sep);
|
||||
|
||||
export const watchGet = (watcher: FSWatcher) => {
|
||||
const [valid, deleted] = folderWatches().reduce(
|
||||
([valid, deleted], watch) => {
|
||||
(fsIsDir(watch.folderPath) ? valid : deleted).push(watch);
|
||||
return [valid, deleted];
|
||||
},
|
||||
[[], []],
|
||||
);
|
||||
if (deleted.length) {
|
||||
for (const watch of deleted) watchRemove(watcher, watch.folderPath);
|
||||
export const watchGet = async (watcher: FSWatcher): Promise<FolderWatch[]> => {
|
||||
const valid: FolderWatch[] = [];
|
||||
const deletedPaths: string[] = [];
|
||||
for (const watch of folderWatches()) {
|
||||
if (await fsIsDir(watch.folderPath)) valid.push(watch);
|
||||
else deletedPaths.push(watch.folderPath);
|
||||
}
|
||||
if (deletedPaths.length) {
|
||||
await Promise.all(deletedPaths.map((p) => watchRemove(watcher, p)));
|
||||
setFolderWatches(valid);
|
||||
}
|
||||
return valid;
|
||||
|
@ -80,7 +73,7 @@ export const watchAdd = async (
|
|||
) => {
|
||||
const watches = folderWatches();
|
||||
|
||||
if (!fsIsDir(folderPath))
|
||||
if (!(await fsIsDir(folderPath)))
|
||||
throw new Error(
|
||||
`Attempting to add a folder watch for a folder path ${folderPath} that is not an existing directory`,
|
||||
);
|
||||
|
@ -104,7 +97,7 @@ export const watchAdd = async (
|
|||
return watches;
|
||||
};
|
||||
|
||||
export const watchRemove = async (watcher: FSWatcher, folderPath: string) => {
|
||||
export const watchRemove = (watcher: FSWatcher, folderPath: string) => {
|
||||
const watches = folderWatches();
|
||||
const filtered = watches.filter((watch) => watch.folderPath != folderPath);
|
||||
if (watches.length == filtered.length)
|
||||
|
|
|
@ -6,24 +6,24 @@ export interface UploadStatusStore {
|
|||
*
|
||||
* Not all pending uploads will have an associated collection.
|
||||
*/
|
||||
collectionName?: string;
|
||||
collectionName: string | undefined;
|
||||
/**
|
||||
* Paths to regular files that are pending upload.
|
||||
*
|
||||
* This should generally be present, albeit empty, but it is marked optional
|
||||
* in sympathy with its siblings.
|
||||
*/
|
||||
filePaths?: string[];
|
||||
filePaths: string[] | undefined;
|
||||
/**
|
||||
* Each item is the path to a zip file and the name of an entry within it.
|
||||
*
|
||||
* This is marked optional since legacy stores will not have it.
|
||||
*/
|
||||
zipItems?: [zipPath: string, entryName: string][];
|
||||
zipItems: [zipPath: string, entryName: string][] | undefined;
|
||||
/**
|
||||
* @deprecated Legacy paths to zip files, now subsumed into zipItems.
|
||||
*/
|
||||
zipPaths?: string[];
|
||||
zipPaths: string[] | undefined;
|
||||
}
|
||||
|
||||
const uploadStatusSchema: Schema<UploadStatusStore> = {
|
||||
|
|
|
@ -3,7 +3,7 @@ import { type FolderWatch } from "../../types/ipc";
|
|||
import log from "../log";
|
||||
|
||||
interface WatchStore {
|
||||
mappings: FolderWatchWithLegacyFields[];
|
||||
mappings?: FolderWatchWithLegacyFields[];
|
||||
}
|
||||
|
||||
type FolderWatchWithLegacyFields = FolderWatch & {
|
||||
|
@ -54,8 +54,12 @@ export const watchStore = new Store({
|
|||
*/
|
||||
export const migrateLegacyWatchStoreIfNeeded = () => {
|
||||
let needsUpdate = false;
|
||||
const watches = watchStore.get("mappings")?.map((watch) => {
|
||||
const updatedWatches = [];
|
||||
for (const watch of watchStore.get("mappings") ?? []) {
|
||||
let collectionMapping = watch.collectionMapping;
|
||||
// The required type defines the latest schema, but before migration
|
||||
// this'll be undefined, so tell ESLint to calm down.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!collectionMapping) {
|
||||
collectionMapping = watch.uploadStrategy == 1 ? "parent" : "root";
|
||||
needsUpdate = true;
|
||||
|
@ -64,10 +68,10 @@ export const migrateLegacyWatchStoreIfNeeded = () => {
|
|||
delete watch.rootFolderName;
|
||||
needsUpdate = true;
|
||||
}
|
||||
return { ...watch, collectionMapping };
|
||||
});
|
||||
updatedWatches.push({ ...watch, collectionMapping });
|
||||
}
|
||||
if (needsUpdate) {
|
||||
watchStore.set("mappings", watches);
|
||||
watchStore.set("mappings", updatedWatches);
|
||||
log.info("Migrated legacy watch store data to new schema");
|
||||
}
|
||||
};
|
||||
|
|
|
@ -6,8 +6,10 @@ import StreamZip from "node-stream-zip";
|
|||
import { createWriteStream, existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { Readable } from "node:stream";
|
||||
import { ReadableStream } from "node:stream/web";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import log from "./log";
|
||||
import { ensure } from "./utils/common";
|
||||
|
||||
/**
|
||||
* Register a protocol handler that we use for streaming large files between the
|
||||
|
@ -89,7 +91,7 @@ const handleRead = async (path: string) => {
|
|||
return res;
|
||||
} catch (e) {
|
||||
log.error(`Failed to read stream at ${path}`, e);
|
||||
return new Response(`Failed to read stream: ${e.message}`, {
|
||||
return new Response(`Failed to read stream: ${String(e)}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
|
@ -99,10 +101,25 @@ const handleReadZip = async (zipPath: string, entryName: string) => {
|
|||
try {
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
const entry = await zip.entry(entryName);
|
||||
const stream = await zip.stream(entry);
|
||||
// TODO(MR): when to call zip.close()
|
||||
if (!entry) return new Response("", { status: 404 });
|
||||
|
||||
return new Response(Readable.toWeb(new Readable(stream)), {
|
||||
// This returns an "old style" NodeJS.ReadableStream.
|
||||
const stream = await zip.stream(entry);
|
||||
// Convert it into a new style NodeJS.Readable.
|
||||
const nodeReadable = new Readable().wrap(stream);
|
||||
// Then convert it into a Web stream.
|
||||
const webReadableStreamAny = Readable.toWeb(nodeReadable);
|
||||
// However, we get a ReadableStream<any> now. This doesn't go into the
|
||||
// `BodyInit` expected by the Response constructor, which wants a
|
||||
// ReadableStream<Uint8Array>. Force a cast.
|
||||
const webReadableStream =
|
||||
webReadableStreamAny as ReadableStream<Uint8Array>;
|
||||
|
||||
// Close the zip handle when the underlying stream closes.
|
||||
// TODO(MR): Verify
|
||||
stream.on("end", () => void zip.close());
|
||||
|
||||
return new Response(webReadableStream, {
|
||||
headers: {
|
||||
// We don't know the exact type, but it doesn't really matter,
|
||||
// just set it to a generic binary content-type so that the
|
||||
|
@ -122,7 +139,7 @@ const handleReadZip = async (zipPath: string, entryName: string) => {
|
|||
`Failed to read entry ${entryName} from zip file at ${zipPath}`,
|
||||
e,
|
||||
);
|
||||
return new Response(`Failed to read stream: ${e.message}`, {
|
||||
return new Response(`Failed to read stream: ${String(e)}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
|
@ -130,11 +147,11 @@ const handleReadZip = async (zipPath: string, entryName: string) => {
|
|||
|
||||
const handleWrite = async (path: string, request: Request) => {
|
||||
try {
|
||||
await writeStream(path, request.body);
|
||||
await writeStream(path, ensure(request.body));
|
||||
return new Response("", { status: 200 });
|
||||
} catch (e) {
|
||||
log.error(`Failed to write stream to ${path}`, e);
|
||||
return new Response(`Failed to write stream: ${e.message}`, {
|
||||
return new Response(`Failed to write stream: ${String(e)}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
|
@ -146,56 +163,29 @@ const handleWrite = async (path: string, request: Request) => {
|
|||
* The returned promise resolves when the write completes.
|
||||
*
|
||||
* @param filePath The local filesystem path where the file should be written.
|
||||
* @param readableStream A [web
|
||||
* ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
|
||||
*
|
||||
* @param readableStream A web
|
||||
* [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
|
||||
*/
|
||||
export const writeStream = (filePath: string, readableStream: ReadableStream) =>
|
||||
writeNodeStream(filePath, convertWebReadableStreamToNode(readableStream));
|
||||
|
||||
/**
|
||||
* Convert a Web ReadableStream into a Node.js ReadableStream
|
||||
*
|
||||
* This can be used to, for example, write a ReadableStream obtained via
|
||||
* `net.fetch` into a file using the Node.js `fs` APIs
|
||||
*/
|
||||
const convertWebReadableStreamToNode = (readableStream: ReadableStream) => {
|
||||
const reader = readableStream.getReader();
|
||||
const rs = new Readable();
|
||||
|
||||
rs._read = async () => {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
|
||||
if (!result.done) {
|
||||
rs.push(Buffer.from(result.value));
|
||||
} else {
|
||||
rs.push(null);
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
rs.emit("error", e);
|
||||
}
|
||||
};
|
||||
|
||||
return rs;
|
||||
};
|
||||
writeNodeStream(filePath, Readable.fromWeb(readableStream));
|
||||
|
||||
const writeNodeStream = async (filePath: string, fileStream: Readable) => {
|
||||
const writeable = createWriteStream(filePath);
|
||||
|
||||
fileStream.on("error", (error) => {
|
||||
writeable.destroy(error); // Close the writable stream with an error
|
||||
fileStream.on("error", (err) => {
|
||||
writeable.destroy(err); // Close the writable stream with an error
|
||||
});
|
||||
|
||||
fileStream.pipe(writeable);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
writeable.on("finish", resolve);
|
||||
writeable.on("error", async (e: unknown) => {
|
||||
writeable.on("error", (err) => {
|
||||
if (existsSync(filePath)) {
|
||||
await fs.unlink(filePath);
|
||||
void fs.unlink(filePath);
|
||||
}
|
||||
reject(e);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
|
@ -1,10 +1,19 @@
|
|||
/**
|
||||
* @file grab bag of utitity functions.
|
||||
* @file grab bag of utility functions.
|
||||
*
|
||||
* Many of these are verbatim copies of functions from web code since there
|
||||
* isn't currently a common package that both of them share.
|
||||
* These are verbatim copies of functions from web code since there isn't
|
||||
* currently a common package that both of them share.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Throw an exception if the given value is `null` or `undefined`.
|
||||
*/
|
||||
export const ensure = <T>(v: T | null | undefined): T => {
|
||||
if (v === null) throw new Error("Required value was null");
|
||||
if (v === undefined) throw new Error("Required value was not found");
|
||||
return v;
|
||||
};
|
||||
|
||||
/**
|
||||
* Wait for {@link ms} milliseconds
|
||||
*
|
|
@ -1,14 +1,20 @@
|
|||
import shellescape from "any-shell-escape";
|
||||
import { shell } from "electron"; /* TODO(MR): Why is this not in /main? */
|
||||
import { app } from "electron/main";
|
||||
import { exec } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import log from "./log";
|
||||
import log from "../log";
|
||||
|
||||
/** `true` if the app is running in development mode. */
|
||||
export const isDev = !app.isPackaged;
|
||||
|
||||
/**
|
||||
* Convert a file system {@link filePath} that uses the local system specific
|
||||
* path separators into a path that uses POSIX file separators.
|
||||
*/
|
||||
export const posixPath = (filePath: string) =>
|
||||
filePath.split(path.sep).join(path.posix.sep);
|
||||
|
||||
/**
|
||||
* Run a shell command asynchronously.
|
||||
*
|
||||
|
@ -41,39 +47,3 @@ export const execAsync = (command: string | string[]) => {
|
|||
};
|
||||
|
||||
const execAsync_ = promisify(exec);
|
||||
|
||||
/**
|
||||
* Open the given {@link dirPath} in the system's folder viewer.
|
||||
*
|
||||
* For example, on macOS this'll open {@link dirPath} in Finder.
|
||||
*/
|
||||
export const openDirectory = async (dirPath: string) => {
|
||||
const res = await shell.openPath(path.normalize(dirPath));
|
||||
// shell.openPath resolves with a string containing the error message
|
||||
// corresponding to the failure if a failure occurred, otherwise "".
|
||||
if (res) throw new Error(`Failed to open directory ${dirPath}: res`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Open the app's log directory in the system's folder viewer.
|
||||
*
|
||||
* @see {@link openDirectory}
|
||||
*/
|
||||
export const openLogDirectory = () => openDirectory(logDirectoryPath());
|
||||
|
||||
/**
|
||||
* Return the path where the logs for the app are saved.
|
||||
*
|
||||
* [Note: Electron app paths]
|
||||
*
|
||||
* By default, these paths are at the following locations:
|
||||
*
|
||||
* - macOS: `~/Library/Application Support/ente`
|
||||
* - Linux: `~/.config/ente`
|
||||
* - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local\ente`
|
||||
* - Windows: C:\Users\<you>\AppData\Local\<Your App Name>
|
||||
*
|
||||
* https://www.electronjs.org/docs/latest/api/app
|
||||
*
|
||||
*/
|
||||
const logDirectoryPath = () => app.getPath("logs");
|
|
@ -2,8 +2,9 @@ import { app } from "electron/main";
|
|||
import StreamZip from "node-stream-zip";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
import type { ZipItem } from "../types/ipc";
|
||||
import path from "node:path";
|
||||
import type { ZipItem } from "../../types/ipc";
|
||||
import { ensure } from "./common";
|
||||
|
||||
/**
|
||||
* Our very own directory within the system temp directory. Go crazy, but
|
||||
|
@ -17,13 +18,10 @@ const enteTempDirPath = async () => {
|
|||
|
||||
/** Generate a random string suitable for being used as a file name prefix */
|
||||
const randomPrefix = () => {
|
||||
const alphabet =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
const ch = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
const randomChar = () => ensure(ch[Math.floor(Math.random() * ch.length)]);
|
||||
|
||||
let result = "";
|
||||
for (let i = 0; i < 10; i++)
|
||||
result += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
return result;
|
||||
return Array(10).fill("").map(randomChar).join("");
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -76,15 +74,14 @@ interface FileForDataOrPathOrZipItem {
|
|||
*/
|
||||
isFileTemporary: boolean;
|
||||
/**
|
||||
* If set, this'll be a function that can be called to actually write the
|
||||
* contents of the source `Uint8Array | string | ZipItem` into the file at
|
||||
* {@link path}.
|
||||
* A function that can be called to actually write the contents of the
|
||||
* source `Uint8Array | string | ZipItem` into the file at {@link path}.
|
||||
*
|
||||
* It will be undefined if the source is already a path since nothing needs
|
||||
* to be written in that case. In the other two cases this function will
|
||||
* write the data or zip item into the file at {@link path}.
|
||||
* It will do nothing in the case when the source is already a path. In the
|
||||
* other two cases this function will write the data or zip item into the
|
||||
* file at {@link path}.
|
||||
*/
|
||||
writeToTemporaryFile?: () => Promise<void>;
|
||||
writeToTemporaryFile: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -101,7 +98,9 @@ export const makeFileForDataOrPathOrZipItem = async (
|
|||
): Promise<FileForDataOrPathOrZipItem> => {
|
||||
let path: string;
|
||||
let isFileTemporary: boolean;
|
||||
let writeToTemporaryFile: () => Promise<void> | undefined;
|
||||
let writeToTemporaryFile = async () => {
|
||||
/* no-op */
|
||||
};
|
||||
|
||||
if (typeof dataOrPathOrZipItem == "string") {
|
||||
path = dataOrPathOrZipItem;
|
||||
|
@ -117,7 +116,7 @@ export const makeFileForDataOrPathOrZipItem = async (
|
|||
const [zipPath, entryName] = dataOrPathOrZipItem;
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
await zip.extract(entryName, path);
|
||||
zip.close();
|
||||
await zip.close();
|
||||
};
|
||||
}
|
||||
}
|
|
@ -44,7 +44,6 @@ import { contextBridge, ipcRenderer, webUtils } from "electron/renderer";
|
|||
import type {
|
||||
AppUpdate,
|
||||
CollectionMapping,
|
||||
ElectronFile,
|
||||
FolderWatch,
|
||||
PendingUploads,
|
||||
ZipItem,
|
||||
|
@ -52,23 +51,23 @@ import type {
|
|||
|
||||
// - General
|
||||
|
||||
const appVersion = (): Promise<string> => ipcRenderer.invoke("appVersion");
|
||||
const appVersion = () => ipcRenderer.invoke("appVersion");
|
||||
|
||||
const logToDisk = (message: string): void =>
|
||||
ipcRenderer.send("logToDisk", message);
|
||||
|
||||
const openDirectory = (dirPath: string): Promise<void> =>
|
||||
const openDirectory = (dirPath: string) =>
|
||||
ipcRenderer.invoke("openDirectory", dirPath);
|
||||
|
||||
const openLogDirectory = (): Promise<void> =>
|
||||
ipcRenderer.invoke("openLogDirectory");
|
||||
const openLogDirectory = () => ipcRenderer.invoke("openLogDirectory");
|
||||
|
||||
const selectDirectory = () => ipcRenderer.invoke("selectDirectory");
|
||||
|
||||
const clearStores = () => ipcRenderer.send("clearStores");
|
||||
|
||||
const encryptionKey = (): Promise<string | undefined> =>
|
||||
ipcRenderer.invoke("encryptionKey");
|
||||
const encryptionKey = () => ipcRenderer.invoke("encryptionKey");
|
||||
|
||||
const saveEncryptionKey = (encryptionKey: string): Promise<void> =>
|
||||
const saveEncryptionKey = (encryptionKey: string) =>
|
||||
ipcRenderer.invoke("saveEncryptionKey", encryptionKey);
|
||||
|
||||
const onMainWindowFocus = (cb?: () => void) => {
|
||||
|
@ -100,39 +99,36 @@ const skipAppUpdate = (version: string) => {
|
|||
|
||||
// - FS
|
||||
|
||||
const fsExists = (path: string): Promise<boolean> =>
|
||||
ipcRenderer.invoke("fsExists", path);
|
||||
const fsExists = (path: string) => ipcRenderer.invoke("fsExists", path);
|
||||
|
||||
const fsMkdirIfNeeded = (dirPath: string): Promise<void> =>
|
||||
const fsMkdirIfNeeded = (dirPath: string) =>
|
||||
ipcRenderer.invoke("fsMkdirIfNeeded", dirPath);
|
||||
|
||||
const fsRename = (oldPath: string, newPath: string): Promise<void> =>
|
||||
const fsRename = (oldPath: string, newPath: string) =>
|
||||
ipcRenderer.invoke("fsRename", oldPath, newPath);
|
||||
|
||||
const fsRmdir = (path: string): Promise<void> =>
|
||||
ipcRenderer.invoke("fsRmdir", path);
|
||||
const fsRmdir = (path: string) => ipcRenderer.invoke("fsRmdir", path);
|
||||
|
||||
const fsRm = (path: string): Promise<void> => ipcRenderer.invoke("fsRm", path);
|
||||
const fsRm = (path: string) => ipcRenderer.invoke("fsRm", path);
|
||||
|
||||
const fsReadTextFile = (path: string): Promise<string> =>
|
||||
const fsReadTextFile = (path: string) =>
|
||||
ipcRenderer.invoke("fsReadTextFile", path);
|
||||
|
||||
const fsWriteFile = (path: string, contents: string): Promise<void> =>
|
||||
const fsWriteFile = (path: string, contents: string) =>
|
||||
ipcRenderer.invoke("fsWriteFile", path, contents);
|
||||
|
||||
const fsIsDir = (dirPath: string): Promise<boolean> =>
|
||||
ipcRenderer.invoke("fsIsDir", dirPath);
|
||||
const fsIsDir = (dirPath: string) => ipcRenderer.invoke("fsIsDir", dirPath);
|
||||
|
||||
// - Conversion
|
||||
|
||||
const convertToJPEG = (imageData: Uint8Array): Promise<Uint8Array> =>
|
||||
const convertToJPEG = (imageData: Uint8Array) =>
|
||||
ipcRenderer.invoke("convertToJPEG", imageData);
|
||||
|
||||
const generateImageThumbnail = (
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> =>
|
||||
) =>
|
||||
ipcRenderer.invoke(
|
||||
"generateImageThumbnail",
|
||||
dataOrPathOrZipItem,
|
||||
|
@ -145,7 +141,7 @@ const ffmpegExec = (
|
|||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
): Promise<Uint8Array> =>
|
||||
) =>
|
||||
ipcRenderer.invoke(
|
||||
"ffmpegExec",
|
||||
command,
|
||||
|
@ -156,62 +152,37 @@ const ffmpegExec = (
|
|||
|
||||
// - ML
|
||||
|
||||
const clipImageEmbedding = (jpegImageData: Uint8Array): Promise<Float32Array> =>
|
||||
const clipImageEmbedding = (jpegImageData: Uint8Array) =>
|
||||
ipcRenderer.invoke("clipImageEmbedding", jpegImageData);
|
||||
|
||||
const clipTextEmbeddingIfAvailable = (
|
||||
text: string,
|
||||
): Promise<Float32Array | undefined> =>
|
||||
const clipTextEmbeddingIfAvailable = (text: string) =>
|
||||
ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text);
|
||||
|
||||
const detectFaces = (input: Float32Array): Promise<Float32Array> =>
|
||||
const detectFaces = (input: Float32Array) =>
|
||||
ipcRenderer.invoke("detectFaces", input);
|
||||
|
||||
const faceEmbedding = (input: Float32Array): Promise<Float32Array> =>
|
||||
const faceEmbedding = (input: Float32Array) =>
|
||||
ipcRenderer.invoke("faceEmbedding", input);
|
||||
|
||||
// - File selection
|
||||
|
||||
// TODO: Deprecated - use dialogs on the renderer process itself
|
||||
|
||||
const selectDirectory = (): Promise<string> =>
|
||||
ipcRenderer.invoke("selectDirectory");
|
||||
|
||||
const showUploadFilesDialog = (): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("showUploadFilesDialog");
|
||||
|
||||
const showUploadDirsDialog = (): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("showUploadDirsDialog");
|
||||
|
||||
const showUploadZipDialog = (): Promise<{
|
||||
zipPaths: string[];
|
||||
files: ElectronFile[];
|
||||
}> => ipcRenderer.invoke("showUploadZipDialog");
|
||||
|
||||
// - Watch
|
||||
|
||||
const watchGet = (): Promise<FolderWatch[]> => ipcRenderer.invoke("watchGet");
|
||||
const watchGet = () => ipcRenderer.invoke("watchGet");
|
||||
|
||||
const watchAdd = (
|
||||
folderPath: string,
|
||||
collectionMapping: CollectionMapping,
|
||||
): Promise<FolderWatch[]> =>
|
||||
const watchAdd = (folderPath: string, collectionMapping: CollectionMapping) =>
|
||||
ipcRenderer.invoke("watchAdd", folderPath, collectionMapping);
|
||||
|
||||
const watchRemove = (folderPath: string): Promise<FolderWatch[]> =>
|
||||
const watchRemove = (folderPath: string) =>
|
||||
ipcRenderer.invoke("watchRemove", folderPath);
|
||||
|
||||
const watchUpdateSyncedFiles = (
|
||||
syncedFiles: FolderWatch["syncedFiles"],
|
||||
folderPath: string,
|
||||
): Promise<void> =>
|
||||
ipcRenderer.invoke("watchUpdateSyncedFiles", syncedFiles, folderPath);
|
||||
) => ipcRenderer.invoke("watchUpdateSyncedFiles", syncedFiles, folderPath);
|
||||
|
||||
const watchUpdateIgnoredFiles = (
|
||||
ignoredFiles: FolderWatch["ignoredFiles"],
|
||||
folderPath: string,
|
||||
): Promise<void> =>
|
||||
ipcRenderer.invoke("watchUpdateIgnoredFiles", ignoredFiles, folderPath);
|
||||
) => ipcRenderer.invoke("watchUpdateIgnoredFiles", ignoredFiles, folderPath);
|
||||
|
||||
const watchOnAddFile = (f: (path: string, watch: FolderWatch) => void) => {
|
||||
ipcRenderer.removeAllListeners("watchAddFile");
|
||||
|
@ -234,34 +205,31 @@ const watchOnRemoveDir = (f: (path: string, watch: FolderWatch) => void) => {
|
|||
);
|
||||
};
|
||||
|
||||
const watchFindFiles = (folderPath: string): Promise<string[]> =>
|
||||
const watchFindFiles = (folderPath: string) =>
|
||||
ipcRenderer.invoke("watchFindFiles", folderPath);
|
||||
|
||||
// - Upload
|
||||
|
||||
const pathForFile = (file: File) => webUtils.getPathForFile(file);
|
||||
|
||||
const listZipItems = (zipPath: string): Promise<ZipItem[]> =>
|
||||
const listZipItems = (zipPath: string) =>
|
||||
ipcRenderer.invoke("listZipItems", zipPath);
|
||||
|
||||
const pathOrZipItemSize = (pathOrZipItem: string | ZipItem): Promise<number> =>
|
||||
const pathOrZipItemSize = (pathOrZipItem: string | ZipItem) =>
|
||||
ipcRenderer.invoke("pathOrZipItemSize", pathOrZipItem);
|
||||
|
||||
const pendingUploads = (): Promise<PendingUploads | undefined> =>
|
||||
ipcRenderer.invoke("pendingUploads");
|
||||
const pendingUploads = () => ipcRenderer.invoke("pendingUploads");
|
||||
|
||||
const setPendingUploads = (pendingUploads: PendingUploads): Promise<void> =>
|
||||
const setPendingUploads = (pendingUploads: PendingUploads) =>
|
||||
ipcRenderer.invoke("setPendingUploads", pendingUploads);
|
||||
|
||||
const markUploadedFiles = (paths: PendingUploads["filePaths"]): Promise<void> =>
|
||||
const markUploadedFiles = (paths: PendingUploads["filePaths"]) =>
|
||||
ipcRenderer.invoke("markUploadedFiles", paths);
|
||||
|
||||
const markUploadedZipItems = (
|
||||
items: PendingUploads["zipItems"],
|
||||
): Promise<void> => ipcRenderer.invoke("markUploadedZipItems", items);
|
||||
const markUploadedZipItems = (items: PendingUploads["zipItems"]) =>
|
||||
ipcRenderer.invoke("markUploadedZipItems", items);
|
||||
|
||||
const clearPendingUploads = (): Promise<void> =>
|
||||
ipcRenderer.invoke("clearPendingUploads");
|
||||
const clearPendingUploads = () => ipcRenderer.invoke("clearPendingUploads");
|
||||
|
||||
/**
|
||||
* These objects exposed here will become available to the JS code in our
|
||||
|
@ -310,6 +278,7 @@ contextBridge.exposeInMainWorld("electron", {
|
|||
logToDisk,
|
||||
openDirectory,
|
||||
openLogDirectory,
|
||||
selectDirectory,
|
||||
clearStores,
|
||||
encryptionKey,
|
||||
saveEncryptionKey,
|
||||
|
@ -348,13 +317,6 @@ contextBridge.exposeInMainWorld("electron", {
|
|||
detectFaces,
|
||||
faceEmbedding,
|
||||
|
||||
// - File selection
|
||||
|
||||
selectDirectory,
|
||||
showUploadFilesDialog,
|
||||
showUploadDirsDialog,
|
||||
showUploadZipDialog,
|
||||
|
||||
// - Watch
|
||||
|
||||
watch: {
|
||||
|
|
5
desktop/src/thirdparty/clip-bpe-ts/mod.ts
vendored
5
desktop/src/thirdparty/clip-bpe-ts/mod.ts
vendored
|
@ -1,3 +1,5 @@
|
|||
/* eslint-disable */
|
||||
|
||||
import * as htmlEntities from "html-entities";
|
||||
import bpeVocabData from "./bpe_simple_vocab_16e6";
|
||||
// import ftfy from "https://deno.land/x/ftfy_pyodide@v0.1.1/mod.js";
|
||||
|
@ -410,6 +412,7 @@ export default class {
|
|||
newWord.push(first + second);
|
||||
i += 2;
|
||||
} else {
|
||||
// @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code"
|
||||
newWord.push(word[i]);
|
||||
i += 1;
|
||||
}
|
||||
|
@ -434,6 +437,7 @@ export default class {
|
|||
.map((b) => this.byteEncoder[b.charCodeAt(0) as number])
|
||||
.join("");
|
||||
bpeTokens.push(
|
||||
// @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code"
|
||||
...this.bpe(token)
|
||||
.split(" ")
|
||||
.map((bpeToken: string) => this.encoder[bpeToken]),
|
||||
|
@ -458,6 +462,7 @@ export default class {
|
|||
.join("");
|
||||
text = [...text]
|
||||
.map((c) => this.byteDecoder[c])
|
||||
// @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code"
|
||||
.map((v) => String.fromCharCode(v))
|
||||
.join("")
|
||||
.replace(/<\/w>/g, " ");
|
||||
|
|
|
@ -28,7 +28,7 @@ export interface FolderWatchSyncedFile {
|
|||
export type ZipItem = [zipPath: string, entryName: string];
|
||||
|
||||
export interface PendingUploads {
|
||||
collectionName: string;
|
||||
collectionName: string | undefined;
|
||||
filePaths: string[];
|
||||
zipItems: ZipItem[];
|
||||
}
|
||||
|
@ -42,25 +42,3 @@ export interface PendingUploads {
|
|||
export const CustomErrorMessage = {
|
||||
NotAvailable: "This feature in not available on the current OS/arch",
|
||||
};
|
||||
|
||||
/**
|
||||
* Deprecated - Use File + webUtils.getPathForFile instead
|
||||
*
|
||||
* Electron used to augment the standard web
|
||||
* [File](https://developer.mozilla.org/en-US/docs/Web/API/File) object with an
|
||||
* additional `path` property. This is now deprecated, and will be removed in a
|
||||
* future release.
|
||||
* https://www.electronjs.org/docs/latest/api/file-object
|
||||
*
|
||||
* The alternative to the `path` property is to use `webUtils.getPathForFile`
|
||||
* https://www.electronjs.org/docs/latest/api/web-utils
|
||||
*/
|
||||
export interface ElectronFile {
|
||||
name: string;
|
||||
path: string;
|
||||
size: number;
|
||||
lastModified: number;
|
||||
stream: () => Promise<ReadableStream<Uint8Array>>;
|
||||
blob: () => Promise<Blob>;
|
||||
arrayBuffer: () => Promise<Uint8Array>;
|
||||
}
|
||||
|
|
|
@ -3,71 +3,34 @@
|
|||
into JavaScript that'll then be loaded and run by the main (node) process
|
||||
of our Electron app. */
|
||||
|
||||
/*
|
||||
* Recommended target, lib and other settings for code running in the
|
||||
* version of Node.js bundled with Electron.
|
||||
*
|
||||
* Currently, with Electron 30, this is Node.js 20.11.1.
|
||||
* https://www.electronjs.org/blog/electron-30-0
|
||||
*/
|
||||
"extends": "@tsconfig/node20/tsconfig.json",
|
||||
|
||||
/* TSConfig docs: https://aka.ms/tsconfig.json */
|
||||
|
||||
"compilerOptions": {
|
||||
/* Recommended target, lib and other settings for code running in the
|
||||
version of Node.js bundled with Electron.
|
||||
|
||||
Currently, with Electron 29, this is Node.js 20.9
|
||||
https://www.electronjs.org/blog/electron-29-0
|
||||
|
||||
Note that we cannot do
|
||||
|
||||
"extends": "@tsconfig/node20/tsconfig.json",
|
||||
|
||||
because that sets "lib": ["es2023"]. However (and I don't fully
|
||||
understand what's going on here), that breaks our compilation since
|
||||
tsc can then not find type definitions of things like ReadableStream.
|
||||
|
||||
Adding "dom" to "lib" (e.g. `"lib": ["es2023", "dom"]`) fixes the
|
||||
issue, but that doesn't sound correct - the main Electron process
|
||||
isn't running in a browser context.
|
||||
|
||||
It is possible that we're using some of the types incorrectly. For
|
||||
now, we just omit the "lib" definition and rely on the defaults for
|
||||
the "target" we've chosen. This is also what the current
|
||||
electron-forge starter does:
|
||||
|
||||
yarn create electron-app electron-forge-starter -- --template=webpack-typescript
|
||||
|
||||
Enhancement: Can revisit this later.
|
||||
|
||||
Refs:
|
||||
- https://github.com/electron/electron/issues/27092
|
||||
- https://github.com/electron/electron/issues/16146
|
||||
*/
|
||||
|
||||
"target": "es2022",
|
||||
"module": "node16",
|
||||
|
||||
/* Enable various workarounds to play better with CJS libraries */
|
||||
"esModuleInterop": true,
|
||||
/* Speed things up by not type checking `node_modules` */
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Emit the generated JS into `app/` */
|
||||
"outDir": "app",
|
||||
|
||||
/* Temporary overrides to get things to compile with the older config */
|
||||
"strict": false,
|
||||
"noImplicitAny": true
|
||||
|
||||
/* Below is the state we want */
|
||||
/* Enable these one by one */
|
||||
// "strict": true,
|
||||
|
||||
/* Require the `type` modifier when importing types */
|
||||
// "verbatimModuleSyntax": true
|
||||
/* We want this, but it causes "ESM syntax is not allowed in a CommonJS
|
||||
module when 'verbatimModuleSyntax' is enabled" currently */
|
||||
/* "verbatimModuleSyntax": true, */
|
||||
|
||||
"strict": true,
|
||||
/* Stricter than strict */
|
||||
// "noImplicitReturns": true,
|
||||
// "noUnusedParameters": true,
|
||||
// "noUnusedLocals": true,
|
||||
// "noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedParameters": true,
|
||||
"noUnusedLocals": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
/* e.g. makes array indexing returns undefined */
|
||||
// "noUncheckedIndexedAccess": true,
|
||||
// "exactOptionalPropertyTypes": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"exactOptionalPropertyTypes": true
|
||||
},
|
||||
/* Transpile all `.ts` files in `src/` */
|
||||
"include": ["src/**/*.ts"]
|
||||
|
|
|
@ -246,6 +246,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf"
|
||||
integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==
|
||||
|
||||
"@tsconfig/node20@^20.1.4":
|
||||
version "20.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@tsconfig/node20/-/node20-20.1.4.tgz#3457d42eddf12d3bde3976186ab0cd22b85df928"
|
||||
integrity sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg==
|
||||
|
||||
"@types/auto-launch@^5.0":
|
||||
version "5.0.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/auto-launch/-/auto-launch-5.0.5.tgz#439ed36aaaea501e2e2cfbddd8a20c366c34863b"
|
||||
|
|
|
@ -324,10 +324,12 @@ export default function Uploader({
|
|||
// Trigger an upload when any of the dependencies change.
|
||||
useEffect(() => {
|
||||
const allItemAndPaths = [
|
||||
/* TODO(MR): ElectronFile | use webkitRelativePath || name here */
|
||||
webFiles.map((f) => [f, f["path"] ?? f.name]),
|
||||
// See: [Note: webkitRelativePath]. In particular, they use POSIX
|
||||
// separators.
|
||||
webFiles.map((f) => [f, f.webkitRelativePath ?? f.name]),
|
||||
desktopFiles.map((fp) => [fp, fp.path]),
|
||||
desktopFilePaths.map((p) => [p, p]),
|
||||
// ze[1], the entry name, uses POSIX separators.
|
||||
desktopZipItems.map((ze) => [ze, ze[1]]),
|
||||
].flat() as [UploadItem, string][];
|
||||
|
||||
|
@ -930,9 +932,5 @@ export const setPendingUploads = async (
|
|||
}
|
||||
}
|
||||
|
||||
await electron.setPendingUploads({
|
||||
collectionName,
|
||||
filePaths,
|
||||
zipItems: zipItems,
|
||||
});
|
||||
await electron.setPendingUploads({ collectionName, filePaths, zipItems });
|
||||
};
|
||||
|
|
|
@ -50,8 +50,6 @@ export type BlobCacheNamespace = (typeof blobCacheNames)[number];
|
|||
* ([the WebKit bug](https://bugs.webkit.org/show_bug.cgi?id=231706)), so it's
|
||||
* not trivial to use this as a full on replacement of the Web Cache in the
|
||||
* browser. So for now we go with this split implementation.
|
||||
*
|
||||
* See also: [Note: Increased disk cache for the desktop app].
|
||||
*/
|
||||
export interface BlobCache {
|
||||
/**
|
||||
|
|
|
@ -3,8 +3,6 @@
|
|||
//
|
||||
// See [Note: types.ts <-> preload.ts <-> ipc.ts]
|
||||
|
||||
import type { ElectronFile } from "./file";
|
||||
|
||||
/**
|
||||
* Extra APIs provided by our Node.js layer when our code is running inside our
|
||||
* desktop (Electron) app.
|
||||
|
@ -51,6 +49,18 @@ export interface Electron {
|
|||
*/
|
||||
openLogDirectory: () => Promise<void>;
|
||||
|
||||
/**
|
||||
* Ask the user to select a directory on their local file system, and return
|
||||
* it path.
|
||||
*
|
||||
* We don't strictly need IPC for this, we can use a hidden <input> element
|
||||
* and trigger its click for the same behaviour (as we do for the
|
||||
* `useFileInput` hook that we use for uploads). However, it's a bit
|
||||
* cumbersome, and we anyways will need to IPC to get back its full path, so
|
||||
* it is just convenient to expose this direct method.
|
||||
*/
|
||||
selectDirectory: () => Promise<string | undefined>;
|
||||
|
||||
/**
|
||||
* Clear any stored data.
|
||||
*
|
||||
|
@ -122,6 +132,8 @@ export interface Electron {
|
|||
*/
|
||||
skipAppUpdate: (version: string) => void;
|
||||
|
||||
// - FS
|
||||
|
||||
/**
|
||||
* A subset of file system access APIs.
|
||||
*
|
||||
|
@ -332,20 +344,6 @@ export interface Electron {
|
|||
*/
|
||||
faceEmbedding: (input: Float32Array) => Promise<Float32Array>;
|
||||
|
||||
// - File selection
|
||||
// TODO: Deprecated - use dialogs on the renderer process itself
|
||||
|
||||
selectDirectory: () => Promise<string>;
|
||||
|
||||
showUploadFilesDialog: () => Promise<ElectronFile[]>;
|
||||
|
||||
showUploadDirsDialog: () => Promise<ElectronFile[]>;
|
||||
|
||||
showUploadZipDialog: () => Promise<{
|
||||
zipPaths: string[];
|
||||
files: ElectronFile[];
|
||||
}>;
|
||||
|
||||
// - Watch
|
||||
|
||||
/**
|
||||
|
@ -634,6 +632,19 @@ export interface FolderWatchSyncedFile {
|
|||
* The name of the entry is not just the file name, but rather is the full path
|
||||
* of the file within the zip. That is, each entry name uniquely identifies a
|
||||
* particular file within the given zip.
|
||||
*
|
||||
* When `entryName` is a path within a nested directory, it is guaranteed to use
|
||||
* the POSIX path separator ("/") since that is the path separator required by
|
||||
* the ZIP format itself
|
||||
*
|
||||
* > 4.4.17.1 The name of the file, with optional relative path.
|
||||
* >
|
||||
* > The path stored MUST NOT contain a drive or device letter, or a leading
|
||||
* > slash. All slashes MUST be forward slashes '/' as opposed to backwards
|
||||
* > slashes '\' for compatibility with Amiga and UNIX file systems etc. If
|
||||
* > input came from standard input, there is no file name field.
|
||||
* >
|
||||
* > https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
|
||||
*/
|
||||
export type ZipItem = [zipPath: string, entryName: string];
|
||||
|
||||
|
@ -652,8 +663,10 @@ export interface PendingUploads {
|
|||
* This is name of the collection (when uploading to a singular collection)
|
||||
* or the root collection (when uploading to separate * albums) to which we
|
||||
* these uploads are meant to go to. See {@link CollectionMapping}.
|
||||
*
|
||||
* It will not be set if we're just uploading standalone files.
|
||||
*/
|
||||
collectionName: string;
|
||||
collectionName?: string;
|
||||
/**
|
||||
* Paths of regular files that need to be uploaded.
|
||||
*/
|
||||
|
|
|
@ -1,28 +1,5 @@
|
|||
import { useCallback, useRef, useState } from "react";
|
||||
|
||||
/**
|
||||
* [Note: File paths when running under Electron]
|
||||
*
|
||||
* We have access to the absolute path of the web {@link File} object when we
|
||||
* are running in the context of our desktop app.
|
||||
*
|
||||
* https://www.electronjs.org/docs/latest/api/file-object
|
||||
*
|
||||
* This is in contrast to the `webkitRelativePath` that we get when we're
|
||||
* running in the browser, which is the relative path to the directory that the
|
||||
* user selected (or just the name of the file if the user selected or
|
||||
* drag/dropped a single one).
|
||||
*
|
||||
* Note that this is a deprecated approach. From Electron docs:
|
||||
*
|
||||
* > Warning: The path property that Electron adds to the File interface is
|
||||
* > deprecated and will be removed in a future Electron release. We recommend
|
||||
* > you use `webUtils.getPathForFile` instead.
|
||||
*/
|
||||
export interface FileWithPath extends File {
|
||||
readonly path?: string;
|
||||
}
|
||||
|
||||
interface UseFileInputParams {
|
||||
directory?: boolean;
|
||||
accept?: string;
|
||||
|
@ -72,19 +49,31 @@ export default function useFileInput({
|
|||
event,
|
||||
) => {
|
||||
if (!!event.target && !!event.target.files) {
|
||||
const files = [...event.target.files].map((file) =>
|
||||
toFileWithPath(file),
|
||||
);
|
||||
setSelectedFiles(files);
|
||||
setSelectedFiles([...event.target.files]);
|
||||
}
|
||||
};
|
||||
|
||||
// [Note: webkitRelativePath]
|
||||
//
|
||||
// If the webkitdirectory attribute of an <input> HTML element is set then
|
||||
// the File objects that we get will have `webkitRelativePath` property
|
||||
// containing the relative path to the selected directory.
|
||||
//
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/webkitdirectory
|
||||
//
|
||||
// These paths use the POSIX path separator ("/").
|
||||
// https://stackoverflow.com/questions/62806233/when-using-webkitrelativepath-is-the-path-separator-operating-system-specific
|
||||
//
|
||||
const directoryOpts = directory
|
||||
? { directory: "", webkitdirectory: "" }
|
||||
: {};
|
||||
|
||||
const getInputProps = useCallback(
|
||||
() => ({
|
||||
type: "file",
|
||||
multiple: true,
|
||||
style: { display: "none" },
|
||||
...(directory ? { directory: "", webkitdirectory: "" } : {}),
|
||||
...directoryOpts,
|
||||
ref: inputRef,
|
||||
onChange: handleChange,
|
||||
...(accept ? { accept } : {}),
|
||||
|
@ -98,26 +87,3 @@ export default function useFileInput({
|
|||
selectedFiles: selectedFiles,
|
||||
};
|
||||
}
|
||||
|
||||
// https://github.com/react-dropzone/file-selector/blob/master/src/file.ts#L88
|
||||
export function toFileWithPath(file: File, path?: string): FileWithPath {
|
||||
if (typeof (file as any).path !== "string") {
|
||||
// on electron, path is already set to the absolute path
|
||||
const { webkitRelativePath } = file;
|
||||
Object.defineProperty(file, "path", {
|
||||
value:
|
||||
typeof path === "string"
|
||||
? path
|
||||
: typeof webkitRelativePath === "string" && // If <input webkitdirectory> is set,
|
||||
// the File will have a {webkitRelativePath} property
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/webkitdirectory
|
||||
webkitRelativePath.length > 0
|
||||
? webkitRelativePath
|
||||
: file.name,
|
||||
writable: false,
|
||||
configurable: false,
|
||||
enumerable: true,
|
||||
});
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
/**
|
||||
* Throw an exception if the given value is undefined.
|
||||
* Throw an exception if the given value is `null` or `undefined`.
|
||||
*/
|
||||
export const ensure = <T>(v: T | undefined): T => {
|
||||
export const ensure = <T>(v: T | null | undefined): T => {
|
||||
if (v === null) throw new Error("Required value was null");
|
||||
if (v === undefined) throw new Error("Required value was not found");
|
||||
return v;
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue