Merge branch 'main' into mobile_face
This commit is contained in:
commit
52f605831f
193 changed files with 6133 additions and 6061 deletions
|
@ -78,12 +78,14 @@
|
|||
"data": "Datei",
|
||||
"importCodes": "Codes importieren",
|
||||
"importTypePlainText": "Klartext",
|
||||
"importTypeEnteEncrypted": "Verschlüsselter Ente-Export",
|
||||
"passwordForDecryptingExport": "Passwort um den Export zu entschlüsseln",
|
||||
"passwordEmptyError": "Passwort kann nicht leer sein",
|
||||
"importFromApp": "Importiere Codes von {appName}",
|
||||
"importGoogleAuthGuide": "Exportiere deine Accounts von Google Authenticator zu einem QR-Code, durch die \"Konten übertragen\" Option. Scanne den QR-Code danach mit einem anderen Gerät.\n\nTipp: Du kannst die Kamera eines Laptops verwenden, um ein Foto den dem QR-Code zu erstellen.",
|
||||
"importSelectJsonFile": "Wähle eine JSON-Datei",
|
||||
"importSelectAppExport": "{appName} Exportdatei auswählen",
|
||||
"importEnteEncGuide": "Wähle die von Ente exportierte, verschlüsselte JSON-Datei",
|
||||
"importRaivoGuide": "Verwenden Sie die Option \"Export OTPs to Zip archive\" in den Raivo-Einstellungen.\n\nEntpacken Sie die Zip-Datei und importieren Sie die JSON-Datei.",
|
||||
"importBitwardenGuide": "Verwenden Sie die Option \"Tresor exportieren\" innerhalb der Bitwarden Tools und importieren Sie die unverschlüsselte JSON-Datei.",
|
||||
"importAegisGuide": "Verwenden Sie die Option \"Tresor exportieren\" in den Aegis-Einstellungen.\n\nFalls Ihr Tresor verschlüsselt ist, müssen Sie das Passwort für den Tresor eingeben, um ihn zu entschlüsseln.",
|
||||
|
@ -121,12 +123,14 @@
|
|||
"suggestFeatures": "Features vorschlagen",
|
||||
"faq": "FAQ",
|
||||
"faq_q_1": "Wie sicher ist Auth?",
|
||||
"faq_a_1": "Alle Codes, die du über Auth sicherst, werden Ende-zu-Ende-verschlüsselt gespeichert. Das bedeutet, dass nur du auf deine Codes zugreifen kannst. Unsere Anwendungen sind quelloffen und unsere Kryptografie wurde extern geprüft.",
|
||||
"faq_q_2": "Kann ich auf meine Codes auf dem Desktop zugreifen?",
|
||||
"faq_a_2": "Sie können auf Ihre Codes im Web via auth.ente.io zugreifen.",
|
||||
"faq_q_3": "Wie kann ich Codes löschen?",
|
||||
"faq_a_3": "Sie können einen Code löschen, indem Sie auf dem Code nach links wischen.",
|
||||
"faq_q_4": "Wie kann ich das Projekt unterstützen?",
|
||||
"faq_a_4": "Sie können die Entwicklung dieses Projekts unterstützen, indem Sie unsere Fotos-App auf ente.io abonnieren.",
|
||||
"faq_q_5": "Wie kann ich die FaceID-Sperre in Auth aktivieren",
|
||||
"faq_a_5": "Sie können FaceID unter Einstellungen → Sicherheit → Sperrbildschirm aktivieren.",
|
||||
"somethingWentWrongMessage": "Ein Fehler ist aufgetreten, bitte versuchen Sie es erneut",
|
||||
"leaveFamily": "Familie verlassen",
|
||||
|
@ -196,6 +200,9 @@
|
|||
"doThisLater": "Auf später verschieben",
|
||||
"saveKey": "Schlüssel speichern",
|
||||
"save": "Speichern",
|
||||
"send": "Senden",
|
||||
"saveOrSendDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) oder an andere Apps senden?",
|
||||
"saveOnlyDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) speichern?",
|
||||
"back": "Zurück",
|
||||
"createAccount": "Account erstellen",
|
||||
"passwordStrength": "Passwortstärke: {passwordStrengthValue}",
|
||||
|
@ -343,6 +350,7 @@
|
|||
"deleteCodeAuthMessage": "Authentifizieren, um Code zu löschen",
|
||||
"showQRAuthMessage": "Authentifizieren, um QR-Code anzuzeigen",
|
||||
"confirmAccountDeleteTitle": "Kontolöschung bestätigen",
|
||||
"confirmAccountDeleteMessage": "Dieses Konto ist mit anderen Ente-Apps verknüpft, falls du welche verwendest.\n\nDeine hochgeladenen Daten werden in allen Ente-Apps zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht.",
|
||||
"androidBiometricHint": "Identität bestätigen",
|
||||
"@androidBiometricHint": {
|
||||
"description": "Hint message advising the user how to authenticate with biometrics. It is used on Android side. Maximum 60 characters."
|
||||
|
|
|
@ -7,11 +7,6 @@ module.exports = {
|
|||
// "plugin:@typescript-eslint/strict-type-checked",
|
||||
// "plugin:@typescript-eslint/stylistic-type-checked",
|
||||
],
|
||||
/* Temporarily add a global
|
||||
Enhancement: Remove me */
|
||||
globals: {
|
||||
NodeJS: "readonly",
|
||||
},
|
||||
plugins: ["@typescript-eslint"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
|
|
|
@ -94,12 +94,12 @@ Some extra ones specific to the code here are:
|
|||
|
||||
### Format conversion
|
||||
|
||||
The main tool we use is for arbitrary conversions is FFMPEG. To bundle a
|
||||
The main tool we use is for arbitrary conversions is ffmpeg. To bundle a
|
||||
(platform specific) static binary of ffmpeg with our app, we use
|
||||
[ffmpeg-static](https://github.com/eugeneware/ffmpeg-static).
|
||||
|
||||
> There is a significant (~20x) speed difference between using the compiled
|
||||
> FFMPEG binary and using the WASM one (that our renderer process already has).
|
||||
> ffmpeg binary and using the wasm one (that our renderer process already has).
|
||||
> Which is why we bundle it to speed up operations on the desktop app.
|
||||
|
||||
In addition, we also bundle a static Linux binary of imagemagick in our extra
|
||||
|
|
|
@ -8,18 +8,15 @@
|
|||
*
|
||||
* https://www.electronjs.org/docs/latest/tutorial/process-model#the-main-process
|
||||
*/
|
||||
import { nativeImage } from "electron";
|
||||
import { app, BrowserWindow, Menu, protocol, Tray } from "electron/main";
|
||||
|
||||
import { nativeImage, shell } from "electron/common";
|
||||
import type { WebContents } from "electron/main";
|
||||
import { BrowserWindow, Menu, Tray, app, protocol } from "electron/main";
|
||||
import serveNextAt from "next-electron-server";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import {
|
||||
addAllowOriginHeader,
|
||||
handleDownloads,
|
||||
handleExternalLinks,
|
||||
} from "./main/init";
|
||||
import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc";
|
||||
import log, { initLogging } from "./main/log";
|
||||
import { createApplicationMenu, createTrayContextMenu } from "./main/menu";
|
||||
|
@ -29,12 +26,12 @@ import { createWatcher } from "./main/services/watch";
|
|||
import { userPreferences } from "./main/stores/user-preferences";
|
||||
import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch";
|
||||
import { registerStreamProtocol } from "./main/stream";
|
||||
import { isDev } from "./main/util";
|
||||
import { isDev } from "./main/utils-electron";
|
||||
|
||||
/**
|
||||
* The URL where the renderer HTML is being served from.
|
||||
*/
|
||||
export const rendererURL = "ente://app";
|
||||
const rendererURL = "ente://app";
|
||||
|
||||
/**
|
||||
* We want to hide our window instead of closing it when the user presses the
|
||||
|
@ -205,9 +202,11 @@ const createMainWindow = async () => {
|
|||
window.webContents.reload();
|
||||
});
|
||||
|
||||
// "The unresponsive event is fired when Chromium detects that your
|
||||
// webContents is not responding to input messages for > 30 seconds."
|
||||
window.webContents.on("unresponsive", () => {
|
||||
log.error(
|
||||
"Main window's webContents are unresponsive, will restart the renderer process",
|
||||
"MainWindow's webContents are unresponsive, will restart the renderer process",
|
||||
);
|
||||
window.webContents.forcefullyCrashRenderer();
|
||||
});
|
||||
|
@ -238,6 +237,58 @@ const createMainWindow = async () => {
|
|||
return window;
|
||||
};
|
||||
|
||||
/**
|
||||
* Automatically set the save path for user initiated downloads to the system's
|
||||
* "downloads" directory instead of asking the user to select a save location.
|
||||
*/
|
||||
export const setDownloadPath = (webContents: WebContents) => {
|
||||
webContents.session.on("will-download", (_, item) => {
|
||||
item.setSavePath(
|
||||
uniqueSavePath(app.getPath("downloads"), item.getFilename()),
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const uniqueSavePath = (dirPath: string, fileName: string) => {
|
||||
const { name, ext } = path.parse(fileName);
|
||||
|
||||
let savePath = path.join(dirPath, fileName);
|
||||
let n = 1;
|
||||
while (existsSync(savePath)) {
|
||||
const suffixedName = [`${name}(${n})`, ext].filter((x) => x).join(".");
|
||||
savePath = path.join(dirPath, suffixedName);
|
||||
n++;
|
||||
}
|
||||
return savePath;
|
||||
};
|
||||
|
||||
/**
|
||||
* Allow opening external links, e.g. when the user clicks on the "Feature
|
||||
* requests" button in the sidebar (to open our GitHub repository), or when they
|
||||
* click the "Support" button to send an email to support.
|
||||
*
|
||||
* @param webContents The renderer to configure.
|
||||
*/
|
||||
export const allowExternalLinks = (webContents: WebContents) => {
|
||||
// By default, if the user were open a link, say
|
||||
// https://github.com/ente-io/ente/discussions, then it would open a _new_
|
||||
// BrowserWindow within our app.
|
||||
//
|
||||
// This is not the behaviour we want; what we want is to ask the system to
|
||||
// handle the link (e.g. open the URL in the default browser, or if it is a
|
||||
// mailto: link, then open the user's mail client).
|
||||
//
|
||||
// Returning `action` "deny" accomplishes this.
|
||||
webContents.setWindowOpenHandler(({ url }) => {
|
||||
if (!url.startsWith(rendererURL)) {
|
||||
shell.openExternal(url);
|
||||
return { action: "deny" };
|
||||
} else {
|
||||
return { action: "allow" };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add an icon for our app in the system tray.
|
||||
*
|
||||
|
@ -340,19 +391,26 @@ const main = () => {
|
|||
//
|
||||
// Note that some Electron APIs can only be used after this event occurs.
|
||||
app.on("ready", async () => {
|
||||
// Create window and prepare for renderer
|
||||
// Create window and prepare for the renderer.
|
||||
mainWindow = await createMainWindow();
|
||||
attachIPCHandlers();
|
||||
attachFSWatchIPCHandlers(createWatcher(mainWindow));
|
||||
registerStreamProtocol();
|
||||
handleDownloads(mainWindow);
|
||||
handleExternalLinks(mainWindow);
|
||||
addAllowOriginHeader(mainWindow);
|
||||
|
||||
// Start loading the renderer
|
||||
// Configure the renderer's environment.
|
||||
setDownloadPath(mainWindow.webContents);
|
||||
allowExternalLinks(mainWindow.webContents);
|
||||
|
||||
// TODO(MR): Remove or resurrect
|
||||
// The commit that introduced this header override had the message
|
||||
// "fix cors issue for uploads". Not sure what that means, so disabling
|
||||
// it for now to see why exactly this is required.
|
||||
// addAllowOriginHeader(mainWindow);
|
||||
|
||||
// Start loading the renderer.
|
||||
mainWindow.loadURL(rendererURL);
|
||||
|
||||
// Continue on with the rest of the startup sequence
|
||||
// Continue on with the rest of the startup sequence.
|
||||
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
|
||||
setupTrayItem(mainWindow);
|
||||
if (!isDev) setupAutoUpdater(mainWindow);
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { dialog } from "electron/main";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { ElectronFile } from "../types/ipc";
|
||||
import { getDirFilePaths, getElectronFile } from "./services/fs";
|
||||
import { getElectronFile } from "./services/fs";
|
||||
import { getElectronFilesFromGoogleZip } from "./services/upload";
|
||||
|
||||
export const selectDirectory = async () => {
|
||||
|
@ -34,6 +35,23 @@ export const showUploadDirsDialog = async () => {
|
|||
return await Promise.all(filePaths.map(getElectronFile));
|
||||
};
|
||||
|
||||
// https://stackoverflow.com/a/63111390
|
||||
const getDirFilePaths = async (dirPath: string) => {
|
||||
if (!(await fs.stat(dirPath)).isDirectory()) {
|
||||
return [dirPath];
|
||||
}
|
||||
|
||||
let files: string[] = [];
|
||||
const filePaths = await fs.readdir(dirPath);
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
const absolute = path.join(dirPath, filePath);
|
||||
files = [...files, ...(await getDirFilePaths(absolute))];
|
||||
}
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
export const showUploadZipDialog = async () => {
|
||||
const selectedFiles = await dialog.showOpenDialog({
|
||||
properties: ["openFile", "multiSelections"],
|
||||
|
|
|
@ -27,3 +27,5 @@ export const fsIsDir = async (dirPath: string) => {
|
|||
const stat = await fs.stat(dirPath);
|
||||
return stat.isDirectory();
|
||||
};
|
||||
|
||||
export const fsSize = (path: string) => fs.stat(path).then((s) => s.size);
|
||||
|
|
|
@ -1,54 +1,4 @@
|
|||
import { BrowserWindow, app, shell } from "electron";
|
||||
import { existsSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { rendererURL } from "../main";
|
||||
|
||||
export function handleDownloads(mainWindow: BrowserWindow) {
|
||||
mainWindow.webContents.session.on("will-download", (_, item) => {
|
||||
item.setSavePath(
|
||||
getUniqueSavePath(item.getFilename(), app.getPath("downloads")),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleExternalLinks(mainWindow: BrowserWindow) {
|
||||
mainWindow.webContents.setWindowOpenHandler(({ url }) => {
|
||||
if (!url.startsWith(rendererURL)) {
|
||||
shell.openExternal(url);
|
||||
return { action: "deny" };
|
||||
} else {
|
||||
return { action: "allow" };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function getUniqueSavePath(filename: string, directory: string): string {
|
||||
let uniqueFileSavePath = path.join(directory, filename);
|
||||
const { name: filenameWithoutExtension, ext: extension } =
|
||||
path.parse(filename);
|
||||
let n = 0;
|
||||
while (existsSync(uniqueFileSavePath)) {
|
||||
n++;
|
||||
// filter need to remove undefined extension from the array
|
||||
// else [`${fileName}`, undefined].join(".") will lead to `${fileName}.` as joined string
|
||||
const fileNameWithNumberedSuffix = [
|
||||
`${filenameWithoutExtension}(${n})`,
|
||||
extension,
|
||||
]
|
||||
.filter((x) => x) // filters out undefined/null values
|
||||
.join("");
|
||||
uniqueFileSavePath = path.join(directory, fileNameWithNumberedSuffix);
|
||||
}
|
||||
return uniqueFileSavePath;
|
||||
}
|
||||
|
||||
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
|
||||
const headers: Record<string, string[]> = {};
|
||||
for (const key of Object.keys(responseHeaders)) {
|
||||
headers[key.toLowerCase()] = responseHeaders[key];
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
import { BrowserWindow } from "electron";
|
||||
|
||||
export function addAllowOriginHeader(mainWindow: BrowserWindow) {
|
||||
mainWindow.webContents.session.webRequest.onHeadersReceived(
|
||||
|
@ -61,3 +11,11 @@ export function addAllowOriginHeader(mainWindow: BrowserWindow) {
|
|||
},
|
||||
);
|
||||
}
|
||||
|
||||
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
|
||||
const headers: Record<string, string[]> = {};
|
||||
for (const key of Object.keys(responseHeaders)) {
|
||||
headers[key.toLowerCase()] = responseHeaders[key];
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
|
|
@ -12,7 +12,6 @@ import type { FSWatcher } from "chokidar";
|
|||
import { ipcMain } from "electron/main";
|
||||
import type {
|
||||
CollectionMapping,
|
||||
ElectronFile,
|
||||
FolderWatch,
|
||||
PendingUploads,
|
||||
} from "../types/ipc";
|
||||
|
@ -30,6 +29,7 @@ import {
|
|||
fsRename,
|
||||
fsRm,
|
||||
fsRmdir,
|
||||
fsSize,
|
||||
fsWriteFile,
|
||||
} from "./fs";
|
||||
import { logToDisk } from "./log";
|
||||
|
@ -39,13 +39,12 @@ import {
|
|||
updateAndRestart,
|
||||
updateOnNextRestart,
|
||||
} from "./services/app-update";
|
||||
import { runFFmpegCmd } from "./services/ffmpeg";
|
||||
import { getDirFiles } from "./services/fs";
|
||||
import { ffmpegExec } from "./services/ffmpeg";
|
||||
import { convertToJPEG, generateImageThumbnail } from "./services/image";
|
||||
import {
|
||||
convertToJPEG,
|
||||
generateImageThumbnail,
|
||||
} from "./services/imageProcessor";
|
||||
import { clipImageEmbedding, clipTextEmbedding } from "./services/ml-clip";
|
||||
clipImageEmbedding,
|
||||
clipTextEmbeddingIfAvailable,
|
||||
} from "./services/ml-clip";
|
||||
import { detectFaces, faceEmbedding } from "./services/ml-face";
|
||||
import {
|
||||
clearStores,
|
||||
|
@ -66,7 +65,7 @@ import {
|
|||
watchUpdateIgnoredFiles,
|
||||
watchUpdateSyncedFiles,
|
||||
} from "./services/watch";
|
||||
import { openDirectory, openLogDirectory } from "./util";
|
||||
import { openDirectory, openLogDirectory } from "./utils-electron";
|
||||
|
||||
/**
|
||||
* Listen for IPC events sent/invoked by the renderer process, and route them to
|
||||
|
@ -140,27 +139,33 @@ export const attachIPCHandlers = () => {
|
|||
|
||||
ipcMain.handle("fsIsDir", (_, dirPath: string) => fsIsDir(dirPath));
|
||||
|
||||
ipcMain.handle("fsSize", (_, path: string) => fsSize(path));
|
||||
|
||||
// - Conversion
|
||||
|
||||
ipcMain.handle("convertToJPEG", (_, fileData, filename) =>
|
||||
convertToJPEG(fileData, filename),
|
||||
ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) =>
|
||||
convertToJPEG(imageData),
|
||||
);
|
||||
|
||||
ipcMain.handle(
|
||||
"generateImageThumbnail",
|
||||
(_, inputFile, maxDimension, maxSize) =>
|
||||
generateImageThumbnail(inputFile, maxDimension, maxSize),
|
||||
(
|
||||
_,
|
||||
dataOrPath: Uint8Array | string,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
) => generateImageThumbnail(dataOrPath, maxDimension, maxSize),
|
||||
);
|
||||
|
||||
ipcMain.handle(
|
||||
"runFFmpegCmd",
|
||||
"ffmpegExec",
|
||||
(
|
||||
_,
|
||||
cmd: string[],
|
||||
inputFile: File | ElectronFile,
|
||||
outputFileName: string,
|
||||
dontTimeout?: boolean,
|
||||
) => runFFmpegCmd(cmd, inputFile, outputFileName, dontTimeout),
|
||||
command: string[],
|
||||
dataOrPath: Uint8Array | string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
) => ffmpegExec(command, dataOrPath, outputFileExtension, timeoutMS),
|
||||
);
|
||||
|
||||
// - ML
|
||||
|
@ -169,8 +174,8 @@ export const attachIPCHandlers = () => {
|
|||
clipImageEmbedding(jpegImageData),
|
||||
);
|
||||
|
||||
ipcMain.handle("clipTextEmbedding", (_, text: string) =>
|
||||
clipTextEmbedding(text),
|
||||
ipcMain.handle("clipTextEmbeddingIfAvailable", (_, text: string) =>
|
||||
clipTextEmbeddingIfAvailable(text),
|
||||
);
|
||||
|
||||
ipcMain.handle("detectFaces", (_, input: Float32Array) =>
|
||||
|
@ -210,8 +215,6 @@ export const attachIPCHandlers = () => {
|
|||
ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) =>
|
||||
getElectronFilesFromGoogleZip(filePath),
|
||||
);
|
||||
|
||||
ipcMain.handle("getDirFiles", (_, dirPath: string) => getDirFiles(dirPath));
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import log from "electron-log";
|
||||
import util from "node:util";
|
||||
import { isDev } from "./util";
|
||||
import { isDev } from "./utils-electron";
|
||||
|
||||
/**
|
||||
* Initialize logging in the main process.
|
||||
|
|
|
@ -9,7 +9,7 @@ import { allowWindowClose } from "../main";
|
|||
import { forceCheckForAppUpdates } from "./services/app-update";
|
||||
import autoLauncher from "./services/auto-launcher";
|
||||
import { userPreferences } from "./stores/user-preferences";
|
||||
import { openLogDirectory } from "./util";
|
||||
import { isDev, openLogDirectory } from "./utils-electron";
|
||||
|
||||
/** Create and return the entries in the app's main menu bar */
|
||||
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
||||
|
@ -23,6 +23,9 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
|
||||
process.platform == "darwin" ? options : [];
|
||||
|
||||
const devOnly = (options: MenuItemConstructorOptions[]) =>
|
||||
isDev ? options : [];
|
||||
|
||||
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
|
||||
|
||||
const handleViewChangelog = () =>
|
||||
|
@ -139,7 +142,9 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
label: "View",
|
||||
submenu: [
|
||||
{ label: "Reload", role: "reload" },
|
||||
{ label: "Toggle Dev Tools", role: "toggleDevTools" },
|
||||
...devOnly([
|
||||
{ label: "Toggle Dev Tools", role: "toggleDevTools" },
|
||||
]),
|
||||
{ type: "separator" },
|
||||
{ label: "Toggle Full Screen", role: "togglefullscreen" },
|
||||
],
|
||||
|
|
|
@ -58,17 +58,17 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
|
|||
log.debug(() => "Attempting auto update");
|
||||
autoUpdater.downloadUpdate();
|
||||
|
||||
let timeout: NodeJS.Timeout;
|
||||
let timeoutId: ReturnType<typeof setTimeout>;
|
||||
const fiveMinutes = 5 * 60 * 1000;
|
||||
autoUpdater.on("update-downloaded", () => {
|
||||
timeout = setTimeout(
|
||||
timeoutId = setTimeout(
|
||||
() => showUpdateDialog({ autoUpdatable: true, version }),
|
||||
fiveMinutes,
|
||||
);
|
||||
});
|
||||
|
||||
autoUpdater.on("error", (error) => {
|
||||
clearTimeout(timeout);
|
||||
clearTimeout(timeoutId);
|
||||
log.error("Auto update failed", error);
|
||||
showUpdateDialog({ autoUpdatable: false, version });
|
||||
});
|
||||
|
|
|
@ -1,33 +1,32 @@
|
|||
import pathToFfmpeg from "ffmpeg-static";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { ElectronFile } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { generateTempFilePath, getTempDirPath } from "../temp";
|
||||
import { execAsync } from "../util";
|
||||
import { withTimeout } from "../utils";
|
||||
import { execAsync } from "../utils-electron";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
|
||||
|
||||
const INPUT_PATH_PLACEHOLDER = "INPUT";
|
||||
const FFMPEG_PLACEHOLDER = "FFMPEG";
|
||||
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
|
||||
/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
|
||||
const ffmpegPathPlaceholder = "FFMPEG";
|
||||
const inputPathPlaceholder = "INPUT";
|
||||
const outputPathPlaceholder = "OUTPUT";
|
||||
|
||||
/**
|
||||
* Run a ffmpeg command
|
||||
* Run a FFmpeg command
|
||||
*
|
||||
* [Note: FFMPEG in Electron]
|
||||
* [Note: FFmpeg in Electron]
|
||||
*
|
||||
* There is a wasm build of FFMPEG, but that is currently 10-20 times slower
|
||||
* There is a wasm build of FFmpeg, but that is currently 10-20 times slower
|
||||
* that the native build. That is slow enough to be unusable for our purposes.
|
||||
* https://ffmpegwasm.netlify.app/docs/performance
|
||||
*
|
||||
* So the alternative is to bundle a ffmpeg binary with our app. e.g.
|
||||
* So the alternative is to bundle a FFmpeg executable binary with our app. e.g.
|
||||
*
|
||||
* yarn add fluent-ffmpeg ffmpeg-static ffprobe-static
|
||||
*
|
||||
* (we only use ffmpeg-static, the rest are mentioned for completeness' sake).
|
||||
*
|
||||
* Interestingly, Electron already bundles an ffmpeg library (it comes from the
|
||||
* ffmpeg fork maintained by Chromium).
|
||||
* Interestingly, Electron already bundles an binary FFmpeg library (it comes
|
||||
* from the ffmpeg fork maintained by Chromium).
|
||||
* https://chromium.googlesource.com/chromium/third_party/ffmpeg
|
||||
* https://stackoverflow.com/questions/53963672/what-version-of-ffmpeg-is-bundled-inside-electron
|
||||
*
|
||||
|
@ -36,84 +35,74 @@ const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
|
|||
* $ file ente.app/Contents/Frameworks/Electron\ Framework.framework/Versions/Current/Libraries/libffmpeg.dylib
|
||||
* .../libffmpeg.dylib: Mach-O 64-bit dynamically linked shared library arm64
|
||||
*
|
||||
* I'm not sure if our code is supposed to be able to use it, and how.
|
||||
* But I'm not sure if our code is supposed to be able to use it, and how.
|
||||
*/
|
||||
export async function runFFmpegCmd(
|
||||
cmd: string[],
|
||||
inputFile: File | ElectronFile,
|
||||
outputFileName: string,
|
||||
dontTimeout?: boolean,
|
||||
) {
|
||||
let inputFilePath = null;
|
||||
let createdTempInputFile = null;
|
||||
export const ffmpegExec = async (
|
||||
command: string[],
|
||||
dataOrPath: Uint8Array | string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
): Promise<Uint8Array> => {
|
||||
// TODO (MR): This currently copies files for both input and output. This
|
||||
// needs to be tested extremely large video files when invoked downstream of
|
||||
// `convertToMP4` in the web code.
|
||||
|
||||
let inputFilePath: string;
|
||||
let isInputFileTemporary: boolean;
|
||||
if (dataOrPath instanceof Uint8Array) {
|
||||
inputFilePath = await makeTempFilePath();
|
||||
isInputFileTemporary = true;
|
||||
} else {
|
||||
inputFilePath = dataOrPath;
|
||||
isInputFileTemporary = false;
|
||||
}
|
||||
|
||||
const outputFilePath = await makeTempFilePath(outputFileExtension);
|
||||
try {
|
||||
if (!existsSync(inputFile.path)) {
|
||||
const tempFilePath = await generateTempFilePath(inputFile.name);
|
||||
await writeStream(tempFilePath, await inputFile.stream());
|
||||
inputFilePath = tempFilePath;
|
||||
createdTempInputFile = true;
|
||||
} else {
|
||||
inputFilePath = inputFile.path;
|
||||
}
|
||||
const outputFileData = await runFFmpegCmd_(
|
||||
cmd,
|
||||
if (dataOrPath instanceof Uint8Array)
|
||||
await fs.writeFile(inputFilePath, dataOrPath);
|
||||
|
||||
const cmd = substitutePlaceholders(
|
||||
command,
|
||||
inputFilePath,
|
||||
outputFileName,
|
||||
dontTimeout,
|
||||
outputFilePath,
|
||||
);
|
||||
return new File([outputFileData], outputFileName);
|
||||
|
||||
if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
|
||||
else await execAsync(cmd);
|
||||
|
||||
return fs.readFile(outputFilePath);
|
||||
} finally {
|
||||
if (createdTempInputFile) {
|
||||
await deleteTempFile(inputFilePath);
|
||||
try {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Could not clean up temp files", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export async function runFFmpegCmd_(
|
||||
cmd: string[],
|
||||
const substitutePlaceholders = (
|
||||
command: string[],
|
||||
inputFilePath: string,
|
||||
outputFileName: string,
|
||||
dontTimeout = false,
|
||||
) {
|
||||
let tempOutputFilePath: string;
|
||||
try {
|
||||
tempOutputFilePath = await generateTempFilePath(outputFileName);
|
||||
|
||||
cmd = cmd.map((cmdPart) => {
|
||||
if (cmdPart === FFMPEG_PLACEHOLDER) {
|
||||
return ffmpegBinaryPath();
|
||||
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
} else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
} else {
|
||||
return cmdPart;
|
||||
}
|
||||
});
|
||||
|
||||
if (dontTimeout) {
|
||||
await execAsync(cmd);
|
||||
outputFilePath: string,
|
||||
) =>
|
||||
command.map((segment) => {
|
||||
if (segment == ffmpegPathPlaceholder) {
|
||||
return ffmpegBinaryPath();
|
||||
} else if (segment == inputPathPlaceholder) {
|
||||
return inputFilePath;
|
||||
} else if (segment == outputPathPlaceholder) {
|
||||
return outputFilePath;
|
||||
} else {
|
||||
await promiseWithTimeout(execAsync(cmd), 30 * 1000);
|
||||
return segment;
|
||||
}
|
||||
|
||||
if (!existsSync(tempOutputFilePath)) {
|
||||
throw new Error("ffmpeg output file not found");
|
||||
}
|
||||
const outputFile = await fs.readFile(tempOutputFilePath);
|
||||
return new Uint8Array(outputFile);
|
||||
} catch (e) {
|
||||
log.error("FFMPEG command failed", e);
|
||||
throw e;
|
||||
} finally {
|
||||
await deleteTempFile(tempOutputFilePath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Return the path to the `ffmpeg` binary.
|
||||
*
|
||||
* At runtime, the ffmpeg binary is present in a path like (macOS example):
|
||||
* At runtime, the FFmpeg binary is present in a path like (macOS example):
|
||||
* `ente.app/Contents/Resources/app.asar.unpacked/node_modules/ffmpeg-static/ffmpeg`
|
||||
*/
|
||||
const ffmpegBinaryPath = () => {
|
||||
|
@ -122,40 +111,3 @@ const ffmpegBinaryPath = () => {
|
|||
// https://github.com/eugeneware/ffmpeg-static/issues/16
|
||||
return pathToFfmpeg.replace("app.asar", "app.asar.unpacked");
|
||||
};
|
||||
|
||||
export async function writeTempFile(fileStream: Uint8Array, fileName: string) {
|
||||
const tempFilePath = await generateTempFilePath(fileName);
|
||||
await fs.writeFile(tempFilePath, fileStream);
|
||||
return tempFilePath;
|
||||
}
|
||||
|
||||
export async function deleteTempFile(tempFilePath: string) {
|
||||
const tempDirPath = await getTempDirPath();
|
||||
if (!tempFilePath.startsWith(tempDirPath))
|
||||
log.error("Attempting to delete a non-temp file ${tempFilePath}");
|
||||
await fs.rm(tempFilePath, { force: true });
|
||||
}
|
||||
|
||||
const promiseWithTimeout = async <T>(
|
||||
request: Promise<T>,
|
||||
timeout: number,
|
||||
): Promise<T> => {
|
||||
const timeoutRef: {
|
||||
current: NodeJS.Timeout;
|
||||
} = { current: null };
|
||||
const rejectOnTimeout = new Promise<null>((_, reject) => {
|
||||
timeoutRef.current = setTimeout(
|
||||
() => reject(new Error("Operation timed out")),
|
||||
timeout,
|
||||
);
|
||||
});
|
||||
const requestWithTimeOutCancellation = async () => {
|
||||
const resp = await request;
|
||||
clearTimeout(timeoutRef.current);
|
||||
return resp;
|
||||
};
|
||||
return await Promise.race([
|
||||
requestWithTimeOutCancellation(),
|
||||
rejectOnTimeout,
|
||||
]);
|
||||
};
|
||||
|
|
|
@ -7,29 +7,6 @@ import log from "../log";
|
|||
|
||||
const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
|
||||
|
||||
export async function getDirFiles(dirPath: string) {
|
||||
const files = await getDirFilePaths(dirPath);
|
||||
const electronFiles = await Promise.all(files.map(getElectronFile));
|
||||
return electronFiles;
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/63111390
|
||||
export const getDirFilePaths = async (dirPath: string) => {
|
||||
if (!(await fs.stat(dirPath)).isDirectory()) {
|
||||
return [dirPath];
|
||||
}
|
||||
|
||||
let files: string[] = [];
|
||||
const filePaths = await fs.readdir(dirPath);
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
const absolute = path.join(dirPath, filePath);
|
||||
files = [...files, ...(await getDirFilePaths(absolute))];
|
||||
}
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
const getFileStream = async (filePath: string) => {
|
||||
const file = await fs.open(filePath, "r");
|
||||
let offset = 0;
|
||||
|
|
160
desktop/src/main/services/image.ts
Normal file
160
desktop/src/main/services/image.ts
Normal file
|
@ -0,0 +1,160 @@
|
|||
/** @file Image format conversions and thumbnail generation */
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
import { CustomErrorMessage } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { execAsync, isDev } from "../utils-electron";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
|
||||
|
||||
export const convertToJPEG = async (imageData: Uint8Array) => {
|
||||
const inputFilePath = await makeTempFilePath();
|
||||
const outputFilePath = await makeTempFilePath("jpeg");
|
||||
|
||||
// Construct the command first, it may throw NotAvailable on win32.
|
||||
const command = convertToJPEGCommand(inputFilePath, outputFilePath);
|
||||
|
||||
try {
|
||||
await fs.writeFile(inputFilePath, imageData);
|
||||
await execAsync(command);
|
||||
return new Uint8Array(await fs.readFile(outputFilePath));
|
||||
} finally {
|
||||
try {
|
||||
await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Could not clean up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const convertToJPEGCommand = (
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
) => {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
inputFilePath,
|
||||
"--out",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
case "linux":
|
||||
return [
|
||||
imageMagickPath(),
|
||||
inputFilePath,
|
||||
"-quality",
|
||||
"100%",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
default: // "win32"
|
||||
throw new Error(CustomErrorMessage.NotAvailable);
|
||||
}
|
||||
};
|
||||
|
||||
/** Path to the Linux image-magick executable bundled with our app */
|
||||
const imageMagickPath = () =>
|
||||
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
|
||||
|
||||
export const generateImageThumbnail = async (
|
||||
dataOrPath: Uint8Array | string,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> => {
|
||||
let inputFilePath: string;
|
||||
let isInputFileTemporary: boolean;
|
||||
if (dataOrPath instanceof Uint8Array) {
|
||||
inputFilePath = await makeTempFilePath();
|
||||
isInputFileTemporary = true;
|
||||
} else {
|
||||
inputFilePath = dataOrPath;
|
||||
isInputFileTemporary = false;
|
||||
}
|
||||
|
||||
const outputFilePath = await makeTempFilePath("jpeg");
|
||||
|
||||
// Construct the command first, it may throw `NotAvailable` on win32.
|
||||
let quality = 70;
|
||||
let command = generateImageThumbnailCommand(
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
maxDimension,
|
||||
quality,
|
||||
);
|
||||
|
||||
try {
|
||||
if (dataOrPath instanceof Uint8Array)
|
||||
await fs.writeFile(inputFilePath, dataOrPath);
|
||||
|
||||
let thumbnail: Uint8Array;
|
||||
do {
|
||||
await execAsync(command);
|
||||
thumbnail = new Uint8Array(await fs.readFile(outputFilePath));
|
||||
quality -= 10;
|
||||
command = generateImageThumbnailCommand(
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
maxDimension,
|
||||
quality,
|
||||
);
|
||||
} while (thumbnail.length > maxSize && quality > 50);
|
||||
return thumbnail;
|
||||
} finally {
|
||||
try {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Could not clean up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const generateImageThumbnailCommand = (
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
maxDimension: number,
|
||||
quality: number,
|
||||
) => {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
"-s",
|
||||
"formatOptions",
|
||||
`${quality}`,
|
||||
"-Z",
|
||||
`${maxDimension}`,
|
||||
inputFilePath,
|
||||
"--out",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
case "linux":
|
||||
return [
|
||||
imageMagickPath(),
|
||||
inputFilePath,
|
||||
"-auto-orient",
|
||||
"-define",
|
||||
`jpeg:size=${2 * maxDimension}x${2 * maxDimension}`,
|
||||
"-thumbnail",
|
||||
`${maxDimension}x${maxDimension}>`,
|
||||
"-unsharp",
|
||||
"0x.5",
|
||||
"-quality",
|
||||
`${quality}`,
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
default: // "win32"
|
||||
throw new Error(CustomErrorMessage.NotAvailable);
|
||||
}
|
||||
};
|
|
@ -1,288 +0,0 @@
|
|||
import { existsSync } from "fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
import { CustomErrors, ElectronFile } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { generateTempFilePath } from "../temp";
|
||||
import { execAsync, isDev } from "../util";
|
||||
import { deleteTempFile } from "./ffmpeg";
|
||||
|
||||
const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK";
|
||||
const MAX_DIMENSION_PLACEHOLDER = "MAX_DIMENSION";
|
||||
const SAMPLE_SIZE_PLACEHOLDER = "SAMPLE_SIZE";
|
||||
const INPUT_PATH_PLACEHOLDER = "INPUT";
|
||||
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
|
||||
const QUALITY_PLACEHOLDER = "QUALITY";
|
||||
|
||||
const MAX_QUALITY = 70;
|
||||
const MIN_QUALITY = 50;
|
||||
|
||||
const SIPS_HEIC_CONVERT_COMMAND_TEMPLATE = [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"--out",
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
"-s",
|
||||
"formatOptions",
|
||||
QUALITY_PLACEHOLDER,
|
||||
"-Z",
|
||||
MAX_DIMENSION_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"--out",
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE = [
|
||||
IMAGE_MAGICK_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"-quality",
|
||||
"100%",
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
|
||||
IMAGE_MAGICK_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"-auto-orient",
|
||||
"-define",
|
||||
`jpeg:size=${SAMPLE_SIZE_PLACEHOLDER}x${SAMPLE_SIZE_PLACEHOLDER}`,
|
||||
"-thumbnail",
|
||||
`${MAX_DIMENSION_PLACEHOLDER}x${MAX_DIMENSION_PLACEHOLDER}>`,
|
||||
"-unsharp",
|
||||
"0x.5",
|
||||
"-quality",
|
||||
QUALITY_PLACEHOLDER,
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const imageMagickStaticPath = () =>
|
||||
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
|
||||
|
||||
export async function convertToJPEG(
|
||||
fileData: Uint8Array,
|
||||
filename: string,
|
||||
): Promise<Uint8Array> {
|
||||
if (process.platform == "win32")
|
||||
throw Error(CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED);
|
||||
const convertedFileData = await convertToJPEG_(fileData, filename);
|
||||
return convertedFileData;
|
||||
}
|
||||
|
||||
async function convertToJPEG_(
|
||||
fileData: Uint8Array,
|
||||
filename: string,
|
||||
): Promise<Uint8Array> {
|
||||
let tempInputFilePath: string;
|
||||
let tempOutputFilePath: string;
|
||||
try {
|
||||
tempInputFilePath = await generateTempFilePath(filename);
|
||||
tempOutputFilePath = await generateTempFilePath("output.jpeg");
|
||||
|
||||
await fs.writeFile(tempInputFilePath, fileData);
|
||||
|
||||
await execAsync(
|
||||
constructConvertCommand(tempInputFilePath, tempOutputFilePath),
|
||||
);
|
||||
|
||||
return new Uint8Array(await fs.readFile(tempOutputFilePath));
|
||||
} catch (e) {
|
||||
log.error("Failed to convert HEIC", e);
|
||||
throw e;
|
||||
} finally {
|
||||
try {
|
||||
await fs.rm(tempInputFilePath, { force: true });
|
||||
} catch (e) {
|
||||
log.error(`Failed to remove tempInputFile ${tempInputFilePath}`, e);
|
||||
}
|
||||
try {
|
||||
await fs.rm(tempOutputFilePath, { force: true });
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function constructConvertCommand(
|
||||
tempInputFilePath: string,
|
||||
tempOutputFilePath: string,
|
||||
) {
|
||||
let convertCmd: string[];
|
||||
if (process.platform == "darwin") {
|
||||
convertCmd = SIPS_HEIC_CONVERT_COMMAND_TEMPLATE.map((cmdPart) => {
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return tempInputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
return cmdPart;
|
||||
});
|
||||
} else if (process.platform == "linux") {
|
||||
convertCmd = IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE.map(
|
||||
(cmdPart) => {
|
||||
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
|
||||
return imageMagickStaticPath();
|
||||
}
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return tempInputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
return cmdPart;
|
||||
},
|
||||
);
|
||||
} else {
|
||||
throw new Error(`Unsupported OS ${process.platform}`);
|
||||
}
|
||||
return convertCmd;
|
||||
}
|
||||
|
||||
export async function generateImageThumbnail(
|
||||
inputFile: File | ElectronFile,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> {
|
||||
let inputFilePath = null;
|
||||
let createdTempInputFile = null;
|
||||
try {
|
||||
if (process.platform == "win32")
|
||||
throw Error(
|
||||
CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED,
|
||||
);
|
||||
if (!existsSync(inputFile.path)) {
|
||||
const tempFilePath = await generateTempFilePath(inputFile.name);
|
||||
await writeStream(tempFilePath, await inputFile.stream());
|
||||
inputFilePath = tempFilePath;
|
||||
createdTempInputFile = true;
|
||||
} else {
|
||||
inputFilePath = inputFile.path;
|
||||
}
|
||||
const thumbnail = await generateImageThumbnail_(
|
||||
inputFilePath,
|
||||
maxDimension,
|
||||
maxSize,
|
||||
);
|
||||
return thumbnail;
|
||||
} finally {
|
||||
if (createdTempInputFile) {
|
||||
try {
|
||||
await deleteTempFile(inputFilePath);
|
||||
} catch (e) {
|
||||
log.error(`Failed to deleteTempFile ${inputFilePath}`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function generateImageThumbnail_(
|
||||
inputFilePath: string,
|
||||
width: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> {
|
||||
let tempOutputFilePath: string;
|
||||
let quality = MAX_QUALITY;
|
||||
try {
|
||||
tempOutputFilePath = await generateTempFilePath("thumb.jpeg");
|
||||
let thumbnail: Uint8Array;
|
||||
do {
|
||||
await execAsync(
|
||||
constructThumbnailGenerationCommand(
|
||||
inputFilePath,
|
||||
tempOutputFilePath,
|
||||
width,
|
||||
quality,
|
||||
),
|
||||
);
|
||||
thumbnail = new Uint8Array(await fs.readFile(tempOutputFilePath));
|
||||
quality -= 10;
|
||||
} while (thumbnail.length > maxSize && quality > MIN_QUALITY);
|
||||
return thumbnail;
|
||||
} catch (e) {
|
||||
log.error("Failed to generate image thumbnail", e);
|
||||
throw e;
|
||||
} finally {
|
||||
try {
|
||||
await fs.rm(tempOutputFilePath, { force: true });
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function constructThumbnailGenerationCommand(
|
||||
inputFilePath: string,
|
||||
tempOutputFilePath: string,
|
||||
maxDimension: number,
|
||||
quality: number,
|
||||
) {
|
||||
let thumbnailGenerationCmd: string[];
|
||||
if (process.platform == "darwin") {
|
||||
thumbnailGenerationCmd = SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map(
|
||||
(cmdPart) => {
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
if (cmdPart === MAX_DIMENSION_PLACEHOLDER) {
|
||||
return maxDimension.toString();
|
||||
}
|
||||
if (cmdPart === QUALITY_PLACEHOLDER) {
|
||||
return quality.toString();
|
||||
}
|
||||
return cmdPart;
|
||||
},
|
||||
);
|
||||
} else if (process.platform == "linux") {
|
||||
thumbnailGenerationCmd =
|
||||
IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map((cmdPart) => {
|
||||
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
|
||||
return imageMagickStaticPath();
|
||||
}
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
if (cmdPart.includes(SAMPLE_SIZE_PLACEHOLDER)) {
|
||||
return cmdPart.replaceAll(
|
||||
SAMPLE_SIZE_PLACEHOLDER,
|
||||
(2 * maxDimension).toString(),
|
||||
);
|
||||
}
|
||||
if (cmdPart.includes(MAX_DIMENSION_PLACEHOLDER)) {
|
||||
return cmdPart.replaceAll(
|
||||
MAX_DIMENSION_PLACEHOLDER,
|
||||
maxDimension.toString(),
|
||||
);
|
||||
}
|
||||
if (cmdPart === QUALITY_PLACEHOLDER) {
|
||||
return quality.toString();
|
||||
}
|
||||
return cmdPart;
|
||||
});
|
||||
} else {
|
||||
throw new Error(`Unsupported OS ${process.platform}`);
|
||||
}
|
||||
return thumbnailGenerationCmd;
|
||||
}
|
|
@ -5,115 +5,22 @@
|
|||
*
|
||||
* @see `web/apps/photos/src/services/clip-service.ts` for more details.
|
||||
*/
|
||||
import { existsSync } from "fs";
|
||||
import jpeg from "jpeg-js";
|
||||
import fs from "node:fs/promises";
|
||||
import * as ort from "onnxruntime-node";
|
||||
import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
|
||||
import { CustomErrors } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { generateTempFilePath } from "../temp";
|
||||
import { deleteTempFile } from "./ffmpeg";
|
||||
import {
|
||||
createInferenceSession,
|
||||
downloadModel,
|
||||
modelPathDownloadingIfNeeded,
|
||||
modelSavePath,
|
||||
} from "./ml";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
|
||||
import { makeCachedInferenceSession } from "./ml";
|
||||
|
||||
const textModelName = "clip-text-vit-32-uint8.onnx";
|
||||
const textModelByteSize = 64173509; // 61.2 MB
|
||||
|
||||
const imageModelName = "clip-image-vit-32-float32.onnx";
|
||||
const imageModelByteSize = 351468764; // 335.2 MB
|
||||
|
||||
let activeImageModelDownload: Promise<string> | undefined;
|
||||
|
||||
const imageModelPathDownloadingIfNeeded = async () => {
|
||||
try {
|
||||
if (activeImageModelDownload) {
|
||||
log.info("Waiting for CLIP image model download to finish");
|
||||
await activeImageModelDownload;
|
||||
} else {
|
||||
activeImageModelDownload = modelPathDownloadingIfNeeded(
|
||||
imageModelName,
|
||||
imageModelByteSize,
|
||||
);
|
||||
return await activeImageModelDownload;
|
||||
}
|
||||
} finally {
|
||||
activeImageModelDownload = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
let textModelDownloadInProgress = false;
|
||||
|
||||
/* TODO(MR): use the generic method. Then we can remove the exports for the
|
||||
internal details functions that we use here */
|
||||
const textModelPathDownloadingIfNeeded = async () => {
|
||||
if (textModelDownloadInProgress)
|
||||
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
|
||||
|
||||
const modelPath = modelSavePath(textModelName);
|
||||
if (!existsSync(modelPath)) {
|
||||
log.info("CLIP text model not found, downloading");
|
||||
textModelDownloadInProgress = true;
|
||||
downloadModel(modelPath, textModelName)
|
||||
.catch((e) => {
|
||||
// log but otherwise ignore
|
||||
log.error("CLIP text model download failed", e);
|
||||
})
|
||||
.finally(() => {
|
||||
textModelDownloadInProgress = false;
|
||||
});
|
||||
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
|
||||
} else {
|
||||
const localFileSize = (await fs.stat(modelPath)).size;
|
||||
if (localFileSize !== textModelByteSize) {
|
||||
log.error(
|
||||
`CLIP text model size ${localFileSize} does not match the expected size, downloading again`,
|
||||
);
|
||||
textModelDownloadInProgress = true;
|
||||
downloadModel(modelPath, textModelName)
|
||||
.catch((e) => {
|
||||
// log but otherwise ignore
|
||||
log.error("CLIP text model download failed", e);
|
||||
})
|
||||
.finally(() => {
|
||||
textModelDownloadInProgress = false;
|
||||
});
|
||||
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
|
||||
}
|
||||
}
|
||||
|
||||
return modelPath;
|
||||
};
|
||||
|
||||
let imageSessionPromise: Promise<any> | undefined;
|
||||
|
||||
const onnxImageSession = async () => {
|
||||
if (!imageSessionPromise) {
|
||||
imageSessionPromise = (async () => {
|
||||
const modelPath = await imageModelPathDownloadingIfNeeded();
|
||||
return createInferenceSession(modelPath);
|
||||
})();
|
||||
}
|
||||
return imageSessionPromise;
|
||||
};
|
||||
|
||||
let _textSession: any = null;
|
||||
|
||||
const onnxTextSession = async () => {
|
||||
if (!_textSession) {
|
||||
const modelPath = await textModelPathDownloadingIfNeeded();
|
||||
_textSession = await createInferenceSession(modelPath);
|
||||
}
|
||||
return _textSession;
|
||||
};
|
||||
const cachedCLIPImageSession = makeCachedInferenceSession(
|
||||
"clip-image-vit-32-float32.onnx",
|
||||
351468764 /* 335.2 MB */,
|
||||
);
|
||||
|
||||
export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
|
||||
const tempFilePath = await generateTempFilePath("");
|
||||
const tempFilePath = await makeTempFilePath();
|
||||
const imageStream = new Response(jpegImageData.buffer).body;
|
||||
await writeStream(tempFilePath, imageStream);
|
||||
try {
|
||||
|
@ -124,19 +31,20 @@ export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
|
|||
};
|
||||
|
||||
const clipImageEmbedding_ = async (jpegFilePath: string) => {
|
||||
const imageSession = await onnxImageSession();
|
||||
const session = await cachedCLIPImageSession();
|
||||
const t1 = Date.now();
|
||||
const rgbData = await getRGBData(jpegFilePath);
|
||||
const feeds = {
|
||||
input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]),
|
||||
};
|
||||
const t2 = Date.now();
|
||||
const results = await imageSession.run(feeds);
|
||||
const results = await session.run(feeds);
|
||||
log.debug(
|
||||
() =>
|
||||
`onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
|
||||
);
|
||||
const imageEmbedding = results["output"].data; // Float32Array
|
||||
/* Need these model specific casts to type the result */
|
||||
const imageEmbedding = results["output"].data as Float32Array;
|
||||
return normalizeEmbedding(imageEmbedding);
|
||||
};
|
||||
|
||||
|
@ -221,6 +129,11 @@ const normalizeEmbedding = (embedding: Float32Array) => {
|
|||
return embedding;
|
||||
};
|
||||
|
||||
const cachedCLIPTextSession = makeCachedInferenceSession(
|
||||
"clip-text-vit-32-uint8.onnx",
|
||||
64173509 /* 61.2 MB */,
|
||||
);
|
||||
|
||||
let _tokenizer: Tokenizer = null;
|
||||
const getTokenizer = () => {
|
||||
if (!_tokenizer) {
|
||||
|
@ -229,8 +142,21 @@ const getTokenizer = () => {
|
|||
return _tokenizer;
|
||||
};
|
||||
|
||||
export const clipTextEmbedding = async (text: string) => {
|
||||
const imageSession = await onnxTextSession();
|
||||
export const clipTextEmbeddingIfAvailable = async (text: string) => {
|
||||
const sessionOrStatus = await Promise.race([
|
||||
cachedCLIPTextSession(),
|
||||
"downloading-model",
|
||||
]);
|
||||
|
||||
// Don't wait for the download to complete
|
||||
if (typeof sessionOrStatus == "string") {
|
||||
log.info(
|
||||
"Ignoring CLIP text embedding request because model download is pending",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const session = sessionOrStatus;
|
||||
const t1 = Date.now();
|
||||
const tokenizer = getTokenizer();
|
||||
const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text));
|
||||
|
@ -238,11 +164,11 @@ export const clipTextEmbedding = async (text: string) => {
|
|||
input: new ort.Tensor("int32", tokenizedText, [1, 77]),
|
||||
};
|
||||
const t2 = Date.now();
|
||||
const results = await imageSession.run(feeds);
|
||||
const results = await session.run(feeds);
|
||||
log.debug(
|
||||
() =>
|
||||
`onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
|
||||
);
|
||||
const textEmbedding = results["output"].data;
|
||||
const textEmbedding = results["output"].data as Float32Array;
|
||||
return normalizeEmbedding(textEmbedding);
|
||||
};
|
||||
|
|
|
@ -8,78 +8,15 @@
|
|||
*/
|
||||
import * as ort from "onnxruntime-node";
|
||||
import log from "../log";
|
||||
import { createInferenceSession, modelPathDownloadingIfNeeded } from "./ml";
|
||||
import { makeCachedInferenceSession } from "./ml";
|
||||
|
||||
const faceDetectionModelName = "yolov5s_face_640_640_dynamic.onnx";
|
||||
const faceDetectionModelByteSize = 30762872; // 29.3 MB
|
||||
|
||||
const faceEmbeddingModelName = "mobilefacenet_opset15.onnx";
|
||||
const faceEmbeddingModelByteSize = 5286998; // 5 MB
|
||||
|
||||
let activeFaceDetectionModelDownload: Promise<string> | undefined;
|
||||
|
||||
const faceDetectionModelPathDownloadingIfNeeded = async () => {
|
||||
try {
|
||||
if (activeFaceDetectionModelDownload) {
|
||||
log.info("Waiting for face detection model download to finish");
|
||||
await activeFaceDetectionModelDownload;
|
||||
} else {
|
||||
activeFaceDetectionModelDownload = modelPathDownloadingIfNeeded(
|
||||
faceDetectionModelName,
|
||||
faceDetectionModelByteSize,
|
||||
);
|
||||
return await activeFaceDetectionModelDownload;
|
||||
}
|
||||
} finally {
|
||||
activeFaceDetectionModelDownload = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
let _faceDetectionSession: Promise<ort.InferenceSession> | undefined;
|
||||
|
||||
const faceDetectionSession = async () => {
|
||||
if (!_faceDetectionSession) {
|
||||
_faceDetectionSession =
|
||||
faceDetectionModelPathDownloadingIfNeeded().then((modelPath) =>
|
||||
createInferenceSession(modelPath),
|
||||
);
|
||||
}
|
||||
return _faceDetectionSession;
|
||||
};
|
||||
|
||||
let activeFaceEmbeddingModelDownload: Promise<string> | undefined;
|
||||
|
||||
const faceEmbeddingModelPathDownloadingIfNeeded = async () => {
|
||||
try {
|
||||
if (activeFaceEmbeddingModelDownload) {
|
||||
log.info("Waiting for face embedding model download to finish");
|
||||
await activeFaceEmbeddingModelDownload;
|
||||
} else {
|
||||
activeFaceEmbeddingModelDownload = modelPathDownloadingIfNeeded(
|
||||
faceEmbeddingModelName,
|
||||
faceEmbeddingModelByteSize,
|
||||
);
|
||||
return await activeFaceEmbeddingModelDownload;
|
||||
}
|
||||
} finally {
|
||||
activeFaceEmbeddingModelDownload = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
let _faceEmbeddingSession: Promise<ort.InferenceSession> | undefined;
|
||||
|
||||
const faceEmbeddingSession = async () => {
|
||||
if (!_faceEmbeddingSession) {
|
||||
_faceEmbeddingSession =
|
||||
faceEmbeddingModelPathDownloadingIfNeeded().then((modelPath) =>
|
||||
createInferenceSession(modelPath),
|
||||
);
|
||||
}
|
||||
return _faceEmbeddingSession;
|
||||
};
|
||||
const cachedFaceDetectionSession = makeCachedInferenceSession(
|
||||
"yolov5s_face_640_640_dynamic.onnx",
|
||||
30762872 /* 29.3 MB */,
|
||||
);
|
||||
|
||||
export const detectFaces = async (input: Float32Array) => {
|
||||
const session = await faceDetectionSession();
|
||||
const session = await cachedFaceDetectionSession();
|
||||
const t = Date.now();
|
||||
const feeds = {
|
||||
input: new ort.Tensor("float32", input, [1, 3, 640, 640]),
|
||||
|
@ -89,6 +26,11 @@ export const detectFaces = async (input: Float32Array) => {
|
|||
return results["output"].data;
|
||||
};
|
||||
|
||||
const cachedFaceEmbeddingSession = makeCachedInferenceSession(
|
||||
"mobilefacenet_opset15.onnx",
|
||||
5286998 /* 5 MB */,
|
||||
);
|
||||
|
||||
export const faceEmbedding = async (input: Float32Array) => {
|
||||
// Dimension of each face (alias)
|
||||
const mobileFaceNetFaceSize = 112;
|
||||
|
@ -98,11 +40,11 @@ export const faceEmbedding = async (input: Float32Array) => {
|
|||
const n = Math.round(input.length / (z * z * 3));
|
||||
const inputTensor = new ort.Tensor("float32", input, [n, z, z, 3]);
|
||||
|
||||
const session = await faceEmbeddingSession();
|
||||
const session = await cachedFaceEmbeddingSession();
|
||||
const t = Date.now();
|
||||
const feeds = { img_inputs: inputTensor };
|
||||
const results = await session.run(feeds);
|
||||
log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`);
|
||||
// TODO: What's with this type? It works in practice, but double check.
|
||||
return (results.embeddings as unknown as any)["cpuData"]; // as Float32Array;
|
||||
/* Need these model specific casts to extract and type the result */
|
||||
return (results.embeddings as unknown as any)["cpuData"] as Float32Array;
|
||||
};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
* @file AI/ML related functionality.
|
||||
* @file AI/ML related functionality, generic layer.
|
||||
*
|
||||
* @see also `ml-clip.ts`, `ml-face.ts`.
|
||||
*
|
||||
|
@ -18,6 +18,49 @@ import * as ort from "onnxruntime-node";
|
|||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
|
||||
/**
|
||||
* Return a function that can be used to trigger a download of the specified
|
||||
* model, and the creating of an ONNX inference session initialized using it.
|
||||
*
|
||||
* Multiple parallel calls to the returned function are fine, it ensures that
|
||||
* the the model will be downloaded and the session created using it only once.
|
||||
* All pending calls to it meanwhile will just await on the same promise.
|
||||
*
|
||||
* And once the promise is resolved, the create ONNX inference session will be
|
||||
* cached, so subsequent calls to the returned function will just reuse the same
|
||||
* session.
|
||||
*
|
||||
* {@link makeCachedInferenceSession} can itself be called anytime, it doesn't
|
||||
* actively trigger a download until the returned function is called.
|
||||
*
|
||||
* @param modelName The name of the model to download.
|
||||
* @param modelByteSize The size in bytes that we expect the model to have. If
|
||||
* the size of the downloaded model does not match the expected size, then we
|
||||
* will redownload it.
|
||||
*
|
||||
* @returns a function. calling that function returns a promise to an ONNX
|
||||
* session.
|
||||
*/
|
||||
export const makeCachedInferenceSession = (
|
||||
modelName: string,
|
||||
modelByteSize: number,
|
||||
) => {
|
||||
let session: Promise<ort.InferenceSession> | undefined;
|
||||
|
||||
const download = () =>
|
||||
modelPathDownloadingIfNeeded(modelName, modelByteSize);
|
||||
|
||||
const createSession = (modelPath: string) =>
|
||||
createInferenceSession(modelPath);
|
||||
|
||||
const cachedInferenceSession = () => {
|
||||
if (!session) session = download().then(createSession);
|
||||
return session;
|
||||
};
|
||||
|
||||
return cachedInferenceSession;
|
||||
};
|
||||
|
||||
/**
|
||||
* Download the model named {@link modelName} if we don't already have it.
|
||||
*
|
||||
|
@ -26,7 +69,7 @@ import { writeStream } from "../stream";
|
|||
*
|
||||
* @returns the path to the model on the local machine.
|
||||
*/
|
||||
export const modelPathDownloadingIfNeeded = async (
|
||||
const modelPathDownloadingIfNeeded = async (
|
||||
modelName: string,
|
||||
expectedByteSize: number,
|
||||
) => {
|
||||
|
@ -49,10 +92,10 @@ export const modelPathDownloadingIfNeeded = async (
|
|||
};
|
||||
|
||||
/** Return the path where the given {@link modelName} is meant to be saved */
|
||||
export const modelSavePath = (modelName: string) =>
|
||||
const modelSavePath = (modelName: string) =>
|
||||
path.join(app.getPath("userData"), "models", modelName);
|
||||
|
||||
export const downloadModel = async (saveLocation: string, name: string) => {
|
||||
const downloadModel = async (saveLocation: string, name: string) => {
|
||||
// `mkdir -p` the directory where we want to save the model.
|
||||
const saveDir = path.dirname(saveLocation);
|
||||
await fs.mkdir(saveDir, { recursive: true });
|
||||
|
@ -69,7 +112,7 @@ export const downloadModel = async (saveLocation: string, name: string) => {
|
|||
/**
|
||||
* Crete an ONNX {@link InferenceSession} with some defaults.
|
||||
*/
|
||||
export const createInferenceSession = async (modelPath: string) => {
|
||||
const createInferenceSession = async (modelPath: string) => {
|
||||
return await ort.InferenceSession.create(modelPath, {
|
||||
// Restrict the number of threads to 1
|
||||
intraOpNumThreads: 1,
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
/**
|
||||
* @file stream data to-from renderer using a custom protocol handler.
|
||||
*/
|
||||
import { protocol } from "electron/main";
|
||||
import { net, protocol } from "electron/main";
|
||||
import { createWriteStream, existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { Readable } from "node:stream";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import log from "./log";
|
||||
|
||||
/**
|
||||
* Register a protocol handler that we use for streaming large files between the
|
||||
* main process (node) and the renderer process (browser) layer.
|
||||
* main (Node.js) and renderer (Chromium) processes.
|
||||
*
|
||||
* [Note: IPC streams]
|
||||
*
|
||||
|
@ -17,11 +18,14 @@ import log from "./log";
|
|||
* across IPC. And passing the entire contents of the file is not feasible for
|
||||
* large video files because of the memory pressure the copying would entail.
|
||||
*
|
||||
* As an alternative, we register a custom protocol handler that can provided a
|
||||
* As an alternative, we register a custom protocol handler that can provides a
|
||||
* bi-directional stream. The renderer can stream data to the node side by
|
||||
* streaming the request. The node side can stream to the renderer side by
|
||||
* streaming the response.
|
||||
*
|
||||
* The stream is not full duplex - while both reads and writes can be streamed,
|
||||
* they need to be streamed separately.
|
||||
*
|
||||
* See also: [Note: Transferring large amount of data over IPC]
|
||||
*
|
||||
* Depends on {@link registerPrivilegedSchemes}.
|
||||
|
@ -29,29 +33,73 @@ import log from "./log";
|
|||
export const registerStreamProtocol = () => {
|
||||
protocol.handle("stream", async (request: Request) => {
|
||||
const url = request.url;
|
||||
// The request URL contains the command to run as the host, and the
|
||||
// pathname of the file as the path. For example,
|
||||
//
|
||||
// stream://write/path/to/file
|
||||
// host-pathname-----
|
||||
//
|
||||
const { host, pathname } = new URL(url);
|
||||
// Convert e.g. "%20" to spaces.
|
||||
const path = decodeURIComponent(pathname);
|
||||
switch (host) {
|
||||
/* stream://write/path/to/file */
|
||||
/* host-pathname----- */
|
||||
case "read":
|
||||
return handleRead(path);
|
||||
case "write":
|
||||
try {
|
||||
await writeStream(path, request.body);
|
||||
return new Response("", { status: 200 });
|
||||
} catch (e) {
|
||||
log.error(`Failed to write stream for ${url}`, e);
|
||||
return new Response(
|
||||
`Failed to write stream: ${e.message}`,
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
return handleWrite(path, request);
|
||||
default:
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleRead = async (path: string) => {
|
||||
try {
|
||||
const res = await net.fetch(pathToFileURL(path).toString());
|
||||
if (res.ok) {
|
||||
// net.fetch already seems to add "Content-Type" and "Last-Modified"
|
||||
// headers, but I couldn't find documentation for this. In any case,
|
||||
// since we already are stat-ting the file for the "Content-Length",
|
||||
// we explicitly add the "X-Last-Modified-Ms" too,
|
||||
//
|
||||
// 1. Guaranteeing its presence,
|
||||
//
|
||||
// 2. Having it be in the exact format we want (no string <-> date
|
||||
// conversions),
|
||||
//
|
||||
// 3. Retaining milliseconds.
|
||||
|
||||
const stat = await fs.stat(path);
|
||||
|
||||
// Add the file's size as the Content-Length header.
|
||||
const fileSize = stat.size;
|
||||
res.headers.set("Content-Length", `${fileSize}`);
|
||||
|
||||
// Add the file's last modified time (as epoch milliseconds).
|
||||
const mtimeMs = stat.mtimeMs;
|
||||
res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
|
||||
}
|
||||
return res;
|
||||
} catch (e) {
|
||||
log.error(`Failed to read stream at ${path}`, e);
|
||||
return new Response(`Failed to read stream: ${e.message}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleWrite = async (path: string, request: Request) => {
|
||||
try {
|
||||
await writeStream(path, request.body);
|
||||
return new Response("", { status: 200 });
|
||||
} catch (e) {
|
||||
log.error(`Failed to write stream to ${path}`, e);
|
||||
return new Response(`Failed to write stream: ${e.message}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a (web) ReadableStream to a file at the given {@link filePath}.
|
||||
*
|
||||
|
@ -92,10 +140,7 @@ const convertWebReadableStreamToNode = (readableStream: ReadableStream) => {
|
|||
return rs;
|
||||
};
|
||||
|
||||
const writeNodeStream = async (
|
||||
filePath: string,
|
||||
fileStream: NodeJS.ReadableStream,
|
||||
) => {
|
||||
const writeNodeStream = async (filePath: string, fileStream: Readable) => {
|
||||
const writeable = createWriteStream(filePath);
|
||||
|
||||
fileStream.on("error", (error) => {
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
import { app } from "electron/main";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
|
||||
const CHARACTERS =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
|
||||
export async function getTempDirPath() {
|
||||
const tempDirPath = path.join(app.getPath("temp"), "ente");
|
||||
await fs.mkdir(tempDirPath, { recursive: true });
|
||||
return tempDirPath;
|
||||
}
|
||||
|
||||
function generateTempName(length: number) {
|
||||
let result = "";
|
||||
|
||||
const charactersLength = CHARACTERS.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += CHARACTERS.charAt(
|
||||
Math.floor(Math.random() * charactersLength),
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export async function generateTempFilePath(formatSuffix: string) {
|
||||
let tempFilePath: string;
|
||||
do {
|
||||
const tempDirPath = await getTempDirPath();
|
||||
const namePrefix = generateTempName(10);
|
||||
tempFilePath = path.join(tempDirPath, namePrefix + "-" + formatSuffix);
|
||||
} while (existsSync(tempFilePath));
|
||||
return tempFilePath;
|
||||
}
|
|
@ -33,11 +33,9 @@ export const execAsync = (command: string | string[]) => {
|
|||
? shellescape(command)
|
||||
: command;
|
||||
const startTime = Date.now();
|
||||
log.debug(() => `Running shell command: ${escapedCommand}`);
|
||||
const result = execAsync_(escapedCommand);
|
||||
log.debug(
|
||||
() =>
|
||||
`Completed in ${Math.round(Date.now() - startTime)} ms (${escapedCommand})`,
|
||||
() => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`,
|
||||
);
|
||||
return result;
|
||||
};
|
63
desktop/src/main/utils-temp.ts
Normal file
63
desktop/src/main/utils-temp.ts
Normal file
|
@ -0,0 +1,63 @@
|
|||
import { app } from "electron/main";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
|
||||
/**
|
||||
* Our very own directory within the system temp directory. Go crazy, but
|
||||
* remember to clean up, especially in exception handlers.
|
||||
*/
|
||||
const enteTempDirPath = async () => {
|
||||
const result = path.join(app.getPath("temp"), "ente");
|
||||
await fs.mkdir(result, { recursive: true });
|
||||
return result;
|
||||
};
|
||||
|
||||
/** Generate a random string suitable for being used as a file name prefix */
|
||||
const randomPrefix = () => {
|
||||
const alphabet =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
|
||||
let result = "";
|
||||
for (let i = 0; i < 10; i++)
|
||||
result += alphabet[Math.floor(Math.random() * alphabet.length)];
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the path to a temporary file with the given {@link suffix}.
|
||||
*
|
||||
* The function returns the path to a file in the system temp directory (in an
|
||||
* Ente specific folder therin) with a random prefix and an (optional)
|
||||
* {@link extension}.
|
||||
*
|
||||
* It ensures that there is no existing item with the same name already.
|
||||
*
|
||||
* Use {@link deleteTempFile} to remove this file when you're done.
|
||||
*/
|
||||
export const makeTempFilePath = async (extension?: string) => {
|
||||
const tempDir = await enteTempDirPath();
|
||||
const suffix = extension ? "." + extension : "";
|
||||
let result: string;
|
||||
do {
|
||||
result = path.join(tempDir, randomPrefix() + suffix);
|
||||
} while (existsSync(result));
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a temporary file at the given path if it exists.
|
||||
*
|
||||
* This is the same as a vanilla {@link fs.rm}, except it first checks that the
|
||||
* given path is within the Ente specific directory in the system temp
|
||||
* directory. This acts as an additional safety check.
|
||||
*
|
||||
* @param tempFilePath The path to the temporary file to delete. This path
|
||||
* should've been previously created using {@link makeTempFilePath}.
|
||||
*/
|
||||
export const deleteTempFile = async (tempFilePath: string) => {
|
||||
const tempDir = await enteTempDirPath();
|
||||
if (!tempFilePath.startsWith(tempDir))
|
||||
throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`);
|
||||
await fs.rm(tempFilePath, { force: true });
|
||||
};
|
35
desktop/src/main/utils.ts
Normal file
35
desktop/src/main/utils.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
/**
|
||||
* @file grab bag of utitity functions.
|
||||
*
|
||||
* Many of these are verbatim copies of functions from web code since there
|
||||
* isn't currently a common package that both of them share.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Wait for {@link ms} milliseconds
|
||||
*
|
||||
* This function is a promisified `setTimeout`. It returns a promise that
|
||||
* resolves after {@link ms} milliseconds.
|
||||
*/
|
||||
export const wait = (ms: number) =>
|
||||
new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
||||
/**
|
||||
* Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
|
||||
* does not resolve within {@link timeoutMS}, then reject with a timeout error.
|
||||
*/
|
||||
export const withTimeout = async <T>(promise: Promise<T>, ms: number) => {
|
||||
let timeoutId: ReturnType<typeof setTimeout>;
|
||||
const rejectOnTimeout = new Promise<T>((_, reject) => {
|
||||
timeoutId = setTimeout(
|
||||
() => reject(new Error("Operation timed out")),
|
||||
ms,
|
||||
);
|
||||
});
|
||||
const promiseAndCancelTimeout = async () => {
|
||||
const result = await promise;
|
||||
clearTimeout(timeoutId);
|
||||
return result;
|
||||
};
|
||||
return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
|
||||
};
|
|
@ -122,40 +122,38 @@ const fsWriteFile = (path: string, contents: string): Promise<void> =>
|
|||
const fsIsDir = (dirPath: string): Promise<boolean> =>
|
||||
ipcRenderer.invoke("fsIsDir", dirPath);
|
||||
|
||||
// - AUDIT below this
|
||||
const fsSize = (path: string): Promise<number> =>
|
||||
ipcRenderer.invoke("fsSize", path);
|
||||
|
||||
// - Conversion
|
||||
|
||||
const convertToJPEG = (
|
||||
fileData: Uint8Array,
|
||||
filename: string,
|
||||
): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke("convertToJPEG", fileData, filename);
|
||||
const convertToJPEG = (imageData: Uint8Array): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke("convertToJPEG", imageData);
|
||||
|
||||
const generateImageThumbnail = (
|
||||
inputFile: File | ElectronFile,
|
||||
dataOrPath: Uint8Array | string,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke(
|
||||
"generateImageThumbnail",
|
||||
inputFile,
|
||||
dataOrPath,
|
||||
maxDimension,
|
||||
maxSize,
|
||||
);
|
||||
|
||||
const runFFmpegCmd = (
|
||||
cmd: string[],
|
||||
inputFile: File | ElectronFile,
|
||||
outputFileName: string,
|
||||
dontTimeout?: boolean,
|
||||
): Promise<File> =>
|
||||
const ffmpegExec = (
|
||||
command: string[],
|
||||
dataOrPath: Uint8Array | string,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke(
|
||||
"runFFmpegCmd",
|
||||
cmd,
|
||||
inputFile,
|
||||
outputFileName,
|
||||
dontTimeout,
|
||||
"ffmpegExec",
|
||||
command,
|
||||
dataOrPath,
|
||||
outputFileExtension,
|
||||
timeoutMS,
|
||||
);
|
||||
|
||||
// - ML
|
||||
|
@ -163,8 +161,10 @@ const runFFmpegCmd = (
|
|||
const clipImageEmbedding = (jpegImageData: Uint8Array): Promise<Float32Array> =>
|
||||
ipcRenderer.invoke("clipImageEmbedding", jpegImageData);
|
||||
|
||||
const clipTextEmbedding = (text: string): Promise<Float32Array> =>
|
||||
ipcRenderer.invoke("clipTextEmbedding", text);
|
||||
const clipTextEmbeddingIfAvailable = (
|
||||
text: string,
|
||||
): Promise<Float32Array | undefined> =>
|
||||
ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text);
|
||||
|
||||
const detectFaces = (input: Float32Array): Promise<Float32Array> =>
|
||||
ipcRenderer.invoke("detectFaces", input);
|
||||
|
@ -253,6 +253,7 @@ const setPendingUploadFiles = (
|
|||
): Promise<void> =>
|
||||
ipcRenderer.invoke("setPendingUploadFiles", type, filePaths);
|
||||
|
||||
// - TODO: AUDIT below this
|
||||
// -
|
||||
|
||||
const getElectronFilesFromGoogleZip = (
|
||||
|
@ -260,45 +261,46 @@ const getElectronFilesFromGoogleZip = (
|
|||
): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath);
|
||||
|
||||
const getDirFiles = (dirPath: string): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("getDirFiles", dirPath);
|
||||
|
||||
//
|
||||
// These objects exposed here will become available to the JS code in our
|
||||
// renderer (the web/ code) as `window.ElectronAPIs.*`
|
||||
//
|
||||
// There are a few related concepts at play here, and it might be worthwhile to
|
||||
// read their (excellent) documentation to get an understanding;
|
||||
//`
|
||||
// - ContextIsolation:
|
||||
// https://www.electronjs.org/docs/latest/tutorial/context-isolation
|
||||
//
|
||||
// - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
|
||||
//
|
||||
// [Note: Transferring large amount of data over IPC]
|
||||
//
|
||||
// Electron's IPC implementation uses the HTML standard Structured Clone
|
||||
// Algorithm to serialize objects passed between processes.
|
||||
// https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
|
||||
//
|
||||
// In particular, ArrayBuffer is eligible for structured cloning.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
|
||||
//
|
||||
// Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
|
||||
// operation when it happens across threads.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
|
||||
//
|
||||
// In our case though, we're not dealing with threads but separate processes. So
|
||||
// the ArrayBuffer will be copied:
|
||||
// > "parameters, errors and return values are **copied** when they're sent over
|
||||
// the bridge".
|
||||
// https://www.electronjs.org/docs/latest/api/context-bridge#methods
|
||||
//
|
||||
// The copy itself is relatively fast, but the problem with transfering large
|
||||
// amounts of data is potentially running out of memory during the copy.
|
||||
//
|
||||
// For an alternative, see [Note: IPC streams].
|
||||
//
|
||||
/**
|
||||
* These objects exposed here will become available to the JS code in our
|
||||
* renderer (the web/ code) as `window.ElectronAPIs.*`
|
||||
*
|
||||
* There are a few related concepts at play here, and it might be worthwhile to
|
||||
* read their (excellent) documentation to get an understanding;
|
||||
*`
|
||||
* - ContextIsolation:
|
||||
* https://www.electronjs.org/docs/latest/tutorial/context-isolation
|
||||
*
|
||||
* - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
|
||||
*
|
||||
* ---
|
||||
*
|
||||
* [Note: Transferring large amount of data over IPC]
|
||||
*
|
||||
* Electron's IPC implementation uses the HTML standard Structured Clone
|
||||
* Algorithm to serialize objects passed between processes.
|
||||
* https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
|
||||
*
|
||||
* In particular, ArrayBuffer is eligible for structured cloning.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
|
||||
*
|
||||
* Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
|
||||
* operation when it happens across threads.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
|
||||
*
|
||||
* In our case though, we're not dealing with threads but separate processes. So
|
||||
* the ArrayBuffer will be copied:
|
||||
*
|
||||
* > "parameters, errors and return values are **copied** when they're sent over
|
||||
* > the bridge".
|
||||
* >
|
||||
* > https://www.electronjs.org/docs/latest/api/context-bridge#methods
|
||||
*
|
||||
* The copy itself is relatively fast, but the problem with transfering large
|
||||
* amounts of data is potentially running out of memory during the copy.
|
||||
*
|
||||
* For an alternative, see [Note: IPC streams].
|
||||
*/
|
||||
contextBridge.exposeInMainWorld("electron", {
|
||||
// - General
|
||||
|
||||
|
@ -329,18 +331,19 @@ contextBridge.exposeInMainWorld("electron", {
|
|||
readTextFile: fsReadTextFile,
|
||||
writeFile: fsWriteFile,
|
||||
isDir: fsIsDir,
|
||||
size: fsSize,
|
||||
},
|
||||
|
||||
// - Conversion
|
||||
|
||||
convertToJPEG,
|
||||
generateImageThumbnail,
|
||||
runFFmpegCmd,
|
||||
ffmpegExec,
|
||||
|
||||
// - ML
|
||||
|
||||
clipImageEmbedding,
|
||||
clipTextEmbedding,
|
||||
clipTextEmbeddingIfAvailable,
|
||||
detectFaces,
|
||||
faceEmbedding,
|
||||
|
||||
|
@ -374,5 +377,4 @@ contextBridge.exposeInMainWorld("electron", {
|
|||
// -
|
||||
|
||||
getElectronFilesFromGoogleZip,
|
||||
getDirFiles,
|
||||
});
|
||||
|
|
|
@ -32,28 +32,13 @@ export interface PendingUploads {
|
|||
}
|
||||
|
||||
/**
|
||||
* Errors that have special semantics on the web side.
|
||||
* See: [Note: Custom errors across Electron/Renderer boundary]
|
||||
*
|
||||
* [Note: Custom errors across Electron/Renderer boundary]
|
||||
*
|
||||
* We need to use the `message` field to disambiguate between errors thrown by
|
||||
* the main process when invoked from the renderer process. This is because:
|
||||
*
|
||||
* > Errors thrown throw `handle` in the main process are not transparent as
|
||||
* > they are serialized and only the `message` property from the original error
|
||||
* > is provided to the renderer process.
|
||||
* >
|
||||
* > - https://www.electronjs.org/docs/latest/tutorial/ipc
|
||||
* >
|
||||
* > Ref: https://github.com/electron/electron/issues/24427
|
||||
* Note: this is not a type, and cannot be used in preload.js; it is only meant
|
||||
* for use in the main process code.
|
||||
*/
|
||||
export const CustomErrors = {
|
||||
WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
|
||||
"Windows native image processing is not supported",
|
||||
UNSUPPORTED_PLATFORM: (platform: string, arch: string) =>
|
||||
`Unsupported platform - ${platform} ${arch}`,
|
||||
MODEL_DOWNLOAD_PENDING:
|
||||
"Model download pending, skipping clip search request",
|
||||
export const CustomErrorMessage = {
|
||||
NotAvailable: "This feature in not available on the current OS/arch",
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -18,7 +18,7 @@ A guide written by Green, an ente.io lover
|
|||
Migrating from Authy can be tiring, as you cannot export your 2FA codes through
|
||||
the app, meaning that you would have to reconfigure 2FA for all of your accounts
|
||||
for your new 2FA authenticator. However, easier ways exist to export your codes
|
||||
out of Authy. This guide will cover two of the most used methods for mograting
|
||||
out of Authy. This guide will cover two of the most used methods for migrating
|
||||
from Authy to Ente Authenticator.
|
||||
|
||||
> [!CAUTION]
|
||||
|
|
|
@ -1 +1 @@
|
|||
ente фотографии
|
||||
ente Фото
|
||||
|
|
|
@ -12,19 +12,19 @@ PODS:
|
|||
- Flutter
|
||||
- file_saver (0.0.1):
|
||||
- Flutter
|
||||
- Firebase/CoreOnly (10.22.0):
|
||||
- FirebaseCore (= 10.22.0)
|
||||
- Firebase/Messaging (10.22.0):
|
||||
- Firebase/CoreOnly (10.24.0):
|
||||
- FirebaseCore (= 10.24.0)
|
||||
- Firebase/Messaging (10.24.0):
|
||||
- Firebase/CoreOnly
|
||||
- FirebaseMessaging (~> 10.22.0)
|
||||
- firebase_core (2.29.0):
|
||||
- Firebase/CoreOnly (= 10.22.0)
|
||||
- FirebaseMessaging (~> 10.24.0)
|
||||
- firebase_core (2.30.0):
|
||||
- Firebase/CoreOnly (= 10.24.0)
|
||||
- Flutter
|
||||
- firebase_messaging (14.7.19):
|
||||
- Firebase/Messaging (= 10.22.0)
|
||||
- firebase_messaging (14.8.1):
|
||||
- Firebase/Messaging (= 10.24.0)
|
||||
- firebase_core
|
||||
- Flutter
|
||||
- FirebaseCore (10.22.0):
|
||||
- FirebaseCore (10.24.0):
|
||||
- FirebaseCoreInternal (~> 10.0)
|
||||
- GoogleUtilities/Environment (~> 7.12)
|
||||
- GoogleUtilities/Logger (~> 7.12)
|
||||
|
@ -35,7 +35,7 @@ PODS:
|
|||
- GoogleUtilities/Environment (~> 7.8)
|
||||
- GoogleUtilities/UserDefaults (~> 7.8)
|
||||
- PromisesObjC (~> 2.1)
|
||||
- FirebaseMessaging (10.22.0):
|
||||
- FirebaseMessaging (10.24.0):
|
||||
- FirebaseCore (~> 10.0)
|
||||
- FirebaseInstallations (~> 10.0)
|
||||
- GoogleDataTransport (~> 9.3)
|
||||
|
@ -177,7 +177,7 @@ PODS:
|
|||
- SDWebImage (5.19.1):
|
||||
- SDWebImage/Core (= 5.19.1)
|
||||
- SDWebImage/Core (5.19.1)
|
||||
- SDWebImageWebPCoder (0.14.5):
|
||||
- SDWebImageWebPCoder (0.14.6):
|
||||
- libwebp (~> 1.0)
|
||||
- SDWebImage/Core (~> 5.17)
|
||||
- Sentry/HybridSDK (8.21.0):
|
||||
|
@ -195,14 +195,14 @@ PODS:
|
|||
- sqflite (0.0.3):
|
||||
- Flutter
|
||||
- FlutterMacOS
|
||||
- sqlite3 (3.45.1):
|
||||
- sqlite3/common (= 3.45.1)
|
||||
- sqlite3/common (3.45.1)
|
||||
- sqlite3/fts5 (3.45.1):
|
||||
- "sqlite3 (3.45.3+1)":
|
||||
- "sqlite3/common (= 3.45.3+1)"
|
||||
- "sqlite3/common (3.45.3+1)"
|
||||
- "sqlite3/fts5 (3.45.3+1)":
|
||||
- sqlite3/common
|
||||
- sqlite3/perf-threadsafe (3.45.1):
|
||||
- "sqlite3/perf-threadsafe (3.45.3+1)":
|
||||
- sqlite3/common
|
||||
- sqlite3/rtree (3.45.1):
|
||||
- "sqlite3/rtree (3.45.3+1)":
|
||||
- sqlite3/common
|
||||
- sqlite3_flutter_libs (0.0.1):
|
||||
- Flutter
|
||||
|
@ -410,13 +410,13 @@ SPEC CHECKSUMS:
|
|||
dart_ui_isolate: d5bcda83ca4b04f129d70eb90110b7a567aece14
|
||||
device_info_plus: c6fb39579d0f423935b0c9ce7ee2f44b71b9fce6
|
||||
file_saver: 503e386464dbe118f630e17b4c2e1190fa0cf808
|
||||
Firebase: 797fd7297b7e1be954432743a0b3f90038e45a71
|
||||
firebase_core: aaadbddb3cb2ee3792b9804f9dbb63e5f6f7b55c
|
||||
firebase_messaging: e65050bf9b187511d80ea3a4de7cf5573d2c7543
|
||||
FirebaseCore: 0326ec9b05fbed8f8716cddbf0e36894a13837f7
|
||||
Firebase: 91fefd38712feb9186ea8996af6cbdef41473442
|
||||
firebase_core: 66b99b4fb4e5d7cc4e88d4c195fe986681f3466a
|
||||
firebase_messaging: 0eb0425d28b4f4af147cdd4adcaf7c0100df28ed
|
||||
FirebaseCore: 11dc8a16dfb7c5e3c3f45ba0e191a33ac4f50894
|
||||
FirebaseCoreInternal: bcb5acffd4ea05e12a783ecf835f2210ce3dc6af
|
||||
FirebaseInstallations: 8f581fca6478a50705d2bd2abd66d306e0f5736e
|
||||
FirebaseMessaging: 9f71037fd9db3376a4caa54e5a3949d1027b4b6e
|
||||
FirebaseMessaging: 4d52717dd820707cc4eadec5eb981b4832ec8d5d
|
||||
fk_user_agent: 1f47ec39291e8372b1d692b50084b0d54103c545
|
||||
Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7
|
||||
flutter_email_sender: 02d7443217d8c41483223627972bfdc09f74276b
|
||||
|
@ -458,14 +458,14 @@ SPEC CHECKSUMS:
|
|||
receive_sharing_intent: 6837b01768e567fe8562182397bf43d63d8c6437
|
||||
screen_brightness_ios: 715ca807df953bf676d339f11464e438143ee625
|
||||
SDWebImage: 40b0b4053e36c660a764958bff99eed16610acbb
|
||||
SDWebImageWebPCoder: c94f09adbca681822edad9e532ac752db713eabf
|
||||
SDWebImageWebPCoder: e38c0a70396191361d60c092933e22c20d5b1380
|
||||
Sentry: ebc12276bd17613a114ab359074096b6b3725203
|
||||
sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5
|
||||
SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe
|
||||
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
|
||||
shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
|
||||
sqflite: 673a0e54cc04b7d6dba8d24fb8095b31c3a99eec
|
||||
sqlite3: 73b7fc691fdc43277614250e04d183740cb15078
|
||||
sqlite3: 02d1f07eaaa01f80a1c16b4b31dfcbb3345ee01a
|
||||
sqlite3_flutter_libs: af0e8fe9bce48abddd1ffdbbf839db0302d72d80
|
||||
Toast: 1f5ea13423a1e6674c4abdac5be53587ae481c4e
|
||||
uni_links: d97da20c7701486ba192624d99bffaaffcfc298a
|
||||
|
|
|
@ -64,6 +64,9 @@ class _EnteAppState extends State<EnteApp> with WidgetsBindingObserver {
|
|||
}
|
||||
|
||||
void _checkForWidgetLaunch() {
|
||||
if (Platform.isIOS) {
|
||||
return;
|
||||
}
|
||||
hw.HomeWidget.initiallyLaunchedFromHomeWidget().then(
|
||||
(uri) => HomeWidgetService.instance.onLaunchFromWidget(uri, context),
|
||||
);
|
||||
|
|
|
@ -39,13 +39,6 @@ const dragSensitivity = 8;
|
|||
|
||||
const supportEmail = 'support@ente.io';
|
||||
|
||||
// Default values for various feature flags
|
||||
class FFDefault {
|
||||
static const bool enableStripe = true;
|
||||
static const bool disableCFWorker = false;
|
||||
static const bool enablePasskey = false;
|
||||
}
|
||||
|
||||
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
|
||||
const multipartPartSize = 20 * 1024 * 1024;
|
||||
|
||||
|
|
|
@ -22,12 +22,12 @@ import 'package:photos/db/upload_locks_db.dart';
|
|||
import 'package:photos/ente_theme_data.dart';
|
||||
import "package:photos/face/db.dart";
|
||||
import "package:photos/l10n/l10n.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/app_lifecycle_service.dart';
|
||||
import 'package:photos/services/billing_service.dart';
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import "package:photos/services/entity_service.dart";
|
||||
import 'package:photos/services/favorites_service.dart';
|
||||
import 'package:photos/services/feature_flag_service.dart';
|
||||
import 'package:photos/services/home_widget_service.dart';
|
||||
import 'package:photos/services/local_file_update_service.dart';
|
||||
import 'package:photos/services/local_sync_service.dart';
|
||||
|
@ -182,6 +182,7 @@ Future<void> _init(bool isBackground, {String via = ''}) async {
|
|||
_isProcessRunning = true;
|
||||
_logger.info("Initializing... inBG =$isBackground via: $via");
|
||||
final SharedPreferences preferences = await SharedPreferences.getInstance();
|
||||
|
||||
await _logFGHeartBeatInfo();
|
||||
unawaited(_scheduleHeartBeat(preferences, isBackground));
|
||||
AppLifecycleService.instance.init(preferences);
|
||||
|
@ -195,6 +196,7 @@ Future<void> _init(bool isBackground, {String via = ''}) async {
|
|||
CryptoUtil.init();
|
||||
await Configuration.instance.init();
|
||||
await NetworkClient.instance.init();
|
||||
ServiceLocator.instance.init(preferences, NetworkClient.instance.enteDio);
|
||||
await UserService.instance.init();
|
||||
await EntityService.instance.init();
|
||||
LocationService.instance.init(preferences);
|
||||
|
@ -229,7 +231,7 @@ Future<void> _init(bool isBackground, {String via = ''}) async {
|
|||
);
|
||||
});
|
||||
}
|
||||
unawaited(FeatureFlagService.instance.init());
|
||||
|
||||
unawaited(SemanticSearchService.instance.init());
|
||||
MachineLearningController.instance.init();
|
||||
// Can not including existing tf/ml binaries as they are not being built
|
||||
|
@ -379,7 +381,7 @@ Future<void> _logFGHeartBeatInfo() async {
|
|||
final String lastRun = lastFGTaskHeartBeatTime == 0
|
||||
? 'never'
|
||||
: DateTime.fromMicrosecondsSinceEpoch(lastFGTaskHeartBeatTime).toString();
|
||||
_logger.info('isAlreaduunningFG: $isRunningInFG, last Beat: $lastRun');
|
||||
_logger.info('isAlreadyRunningFG: $isRunningInFG, last Beat: $lastRun');
|
||||
}
|
||||
|
||||
void _scheduleSuicide(Duration duration, [String? taskID]) {
|
||||
|
|
|
@ -9,7 +9,7 @@ import 'package:photos/core/constants.dart';
|
|||
import 'package:photos/models/file/file_type.dart';
|
||||
import 'package:photos/models/location/location.dart';
|
||||
import "package:photos/models/metadata/file_magic.dart";
|
||||
import 'package:photos/services/feature_flag_service.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/utils/date_time_util.dart';
|
||||
import 'package:photos/utils/exif_util.dart';
|
||||
import 'package:photos/utils/file_uploader_util.dart';
|
||||
|
@ -247,8 +247,7 @@ class EnteFile {
|
|||
return "$localFileServer/$uploadedFileID";
|
||||
}
|
||||
final endpoint = Configuration.instance.getHttpEndpoint();
|
||||
if (endpoint != kDefaultProductionEndpoint ||
|
||||
FeatureFlagService.instance.disableCFWorker()) {
|
||||
if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) {
|
||||
return endpoint + "/files/download/" + uploadedFileID.toString();
|
||||
} else {
|
||||
return "https://files.ente.io/?fileID=" + uploadedFileID.toString();
|
||||
|
@ -264,8 +263,7 @@ class EnteFile {
|
|||
return "$localFileServer/thumb/$uploadedFileID";
|
||||
}
|
||||
final endpoint = Configuration.instance.getHttpEndpoint();
|
||||
if (endpoint != kDefaultProductionEndpoint ||
|
||||
FeatureFlagService.instance.disableCFWorker()) {
|
||||
if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) {
|
||||
return endpoint + "/files/preview/" + uploadedFileID.toString();
|
||||
} else {
|
||||
return "https://thumbnails.ente.io/?fileID=" + uploadedFileID.toString();
|
||||
|
|
28
mobile/lib/service_locator.dart
Normal file
28
mobile/lib/service_locator.dart
Normal file
|
@ -0,0 +1,28 @@
|
|||
import "package:dio/dio.dart";
|
||||
import "package:ente_feature_flag/ente_feature_flag.dart";
|
||||
import "package:shared_preferences/shared_preferences.dart";
|
||||
|
||||
class ServiceLocator {
|
||||
late final SharedPreferences prefs;
|
||||
late final Dio enteDio;
|
||||
|
||||
// instance
|
||||
ServiceLocator._privateConstructor();
|
||||
|
||||
static final ServiceLocator instance = ServiceLocator._privateConstructor();
|
||||
|
||||
init(SharedPreferences prefs, Dio enteDio) {
|
||||
this.prefs = prefs;
|
||||
this.enteDio = enteDio;
|
||||
}
|
||||
}
|
||||
|
||||
FlagService? _flagService;
|
||||
|
||||
FlagService get flagService {
|
||||
_flagService ??= FlagService(
|
||||
ServiceLocator.instance.prefs,
|
||||
ServiceLocator.instance.enteDio,
|
||||
);
|
||||
return _flagService!;
|
||||
}
|
|
@ -30,9 +30,9 @@ import 'package:photos/models/collection/collection_items.dart';
|
|||
import 'package:photos/models/file/file.dart';
|
||||
import "package:photos/models/files_split.dart";
|
||||
import "package:photos/models/metadata/collection_magic.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/app_lifecycle_service.dart';
|
||||
import "package:photos/services/favorites_service.dart";
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import 'package:photos/services/file_magic_service.dart';
|
||||
import 'package:photos/services/local_sync_service.dart';
|
||||
import 'package:photos/services/remote_sync_service.dart';
|
||||
|
@ -189,6 +189,23 @@ class CollectionsService {
|
|||
return result;
|
||||
}
|
||||
|
||||
bool allowUpload(int collectionID) {
|
||||
final Collection? c = _collectionIDToCollections[collectionID];
|
||||
if (c == null) {
|
||||
_logger.info('discardUpload: collectionMissing $collectionID');
|
||||
return false;
|
||||
}
|
||||
if (c.isDeleted) {
|
||||
_logger.info('discardUpload: collectionDeleted $collectionID');
|
||||
return false;
|
||||
}
|
||||
if (!c.isOwner(_config.getUserID()!)) {
|
||||
_logger.info('discardUpload: notOwner $collectionID');
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
Future<List<Collection>> getArchivedCollection() async {
|
||||
final allCollections = getCollectionsForUI();
|
||||
return allCollections
|
||||
|
@ -1162,7 +1179,7 @@ class CollectionsService {
|
|||
await _addToCollection(dstCollectionID, splitResult.ownedByCurrentUser);
|
||||
}
|
||||
if (splitResult.ownedByOtherUsers.isNotEmpty) {
|
||||
if (!FeatureFlagService.instance.isInternalUserOrDebugBuild()) {
|
||||
if (!flagService.internalUser) {
|
||||
throw ArgumentError('Cannot add files owned by other users');
|
||||
}
|
||||
late final List<EnteFile> filesToCopy;
|
||||
|
|
|
@ -1,142 +0,0 @@
|
|||
import 'dart:convert';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:logging/logging.dart';
|
||||
import 'package:photos/core/configuration.dart';
|
||||
import 'package:photos/core/constants.dart';
|
||||
import 'package:photos/core/network/network.dart';
|
||||
import 'package:shared_preferences/shared_preferences.dart';
|
||||
|
||||
class FeatureFlagService {
|
||||
FeatureFlagService._privateConstructor();
|
||||
|
||||
static final FeatureFlagService instance =
|
||||
FeatureFlagService._privateConstructor();
|
||||
static const _featureFlagsKey = "feature_flags_key";
|
||||
static final _internalUserIDs = const String.fromEnvironment(
|
||||
"internal_user_ids",
|
||||
defaultValue: "1,2,3,4,191,125,1580559962388044,1580559962392434,10000025",
|
||||
).split(",").map((element) {
|
||||
return int.parse(element);
|
||||
}).toSet();
|
||||
|
||||
final _logger = Logger("FeatureFlagService");
|
||||
FeatureFlags? _featureFlags;
|
||||
late SharedPreferences _prefs;
|
||||
|
||||
Future<void> init() async {
|
||||
_prefs = await SharedPreferences.getInstance();
|
||||
// Fetch feature flags from network in async manner.
|
||||
// Intention of delay is to give more CPU cycles to other tasks
|
||||
Future.delayed(
|
||||
const Duration(seconds: 5),
|
||||
() {
|
||||
fetchFeatureFlags();
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
FeatureFlags _getFeatureFlags() {
|
||||
_featureFlags ??=
|
||||
FeatureFlags.fromJson(_prefs.getString(_featureFlagsKey)!);
|
||||
// if nothing is cached, use defaults as temporary fallback
|
||||
if (_featureFlags == null) {
|
||||
return FeatureFlags.defaultFlags;
|
||||
}
|
||||
return _featureFlags!;
|
||||
}
|
||||
|
||||
bool disableCFWorker() {
|
||||
try {
|
||||
return _getFeatureFlags().disableCFWorker;
|
||||
} catch (e) {
|
||||
_logger.severe(e);
|
||||
return FFDefault.disableCFWorker;
|
||||
}
|
||||
}
|
||||
|
||||
bool enableStripe() {
|
||||
if (Platform.isIOS) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
return _getFeatureFlags().enableStripe;
|
||||
} catch (e) {
|
||||
_logger.severe(e);
|
||||
return FFDefault.enableStripe;
|
||||
}
|
||||
}
|
||||
|
||||
bool enablePasskey() {
|
||||
try {
|
||||
if (isInternalUserOrDebugBuild()) {
|
||||
return true;
|
||||
}
|
||||
return _getFeatureFlags().enablePasskey;
|
||||
} catch (e) {
|
||||
_logger.info('error in enablePasskey check', e);
|
||||
return FFDefault.enablePasskey;
|
||||
}
|
||||
}
|
||||
|
||||
bool isInternalUserOrDebugBuild() {
|
||||
final String? email = Configuration.instance.getEmail();
|
||||
final userID = Configuration.instance.getUserID();
|
||||
return (email != null && email.endsWith("@ente.io")) ||
|
||||
_internalUserIDs.contains(userID) ||
|
||||
kDebugMode;
|
||||
}
|
||||
|
||||
Future<void> fetchFeatureFlags() async {
|
||||
try {
|
||||
final response = await NetworkClient.instance
|
||||
.getDio()
|
||||
.get("https://static.ente.io/feature_flags.json");
|
||||
final flagsResponse = FeatureFlags.fromMap(response.data);
|
||||
await _prefs.setString(_featureFlagsKey, flagsResponse.toJson());
|
||||
_featureFlags = flagsResponse;
|
||||
} catch (e) {
|
||||
_logger.severe("Failed to sync feature flags ", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class FeatureFlags {
|
||||
static FeatureFlags defaultFlags = FeatureFlags(
|
||||
disableCFWorker: FFDefault.disableCFWorker,
|
||||
enableStripe: FFDefault.enableStripe,
|
||||
enablePasskey: FFDefault.enablePasskey,
|
||||
);
|
||||
|
||||
final bool disableCFWorker;
|
||||
final bool enableStripe;
|
||||
final bool enablePasskey;
|
||||
|
||||
FeatureFlags({
|
||||
required this.disableCFWorker,
|
||||
required this.enableStripe,
|
||||
required this.enablePasskey,
|
||||
});
|
||||
|
||||
Map<String, dynamic> toMap() {
|
||||
return {
|
||||
"disableCFWorker": disableCFWorker,
|
||||
"enableStripe": enableStripe,
|
||||
"enablePasskey": enablePasskey,
|
||||
};
|
||||
}
|
||||
|
||||
String toJson() => json.encode(toMap());
|
||||
|
||||
factory FeatureFlags.fromJson(String source) =>
|
||||
FeatureFlags.fromMap(json.decode(source));
|
||||
|
||||
factory FeatureFlags.fromMap(Map<String, dynamic> json) {
|
||||
return FeatureFlags(
|
||||
disableCFWorker: json["disableCFWorker"] ?? FFDefault.disableCFWorker,
|
||||
enableStripe: json["enableStripe"] ?? FFDefault.enableStripe,
|
||||
enablePasskey: json["enablePasskey"] ?? FFDefault.enablePasskey,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -23,9 +23,9 @@ import "package:photos/models/file/extensions/file_props.dart";
|
|||
import 'package:photos/models/file/file.dart';
|
||||
import 'package:photos/models/file/file_type.dart';
|
||||
import 'package:photos/models/upload_strategy.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/app_lifecycle_service.dart';
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import 'package:photos/services/ignored_files_service.dart';
|
||||
import 'package:photos/services/local_file_update_service.dart';
|
||||
import "package:photos/services/notification_service.dart";
|
||||
|
@ -185,7 +185,7 @@ class RemoteSyncService {
|
|||
rethrow;
|
||||
} else {
|
||||
_logger.severe("Error executing remote sync ", e, s);
|
||||
if (FeatureFlagService.instance.isInternalUserOrDebugBuild()) {
|
||||
if (flagService.internalUser) {
|
||||
rethrow;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,9 +73,13 @@ class UpdateService {
|
|||
return _latestVersion;
|
||||
}
|
||||
|
||||
Future<bool> shouldShowUpdateNoification() async {
|
||||
Future<bool> shouldShowUpdateNotification() async {
|
||||
final shouldUpdate = await this.shouldUpdate();
|
||||
|
||||
if (!shouldUpdate) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final lastNotificationShownTime =
|
||||
_prefs.getInt(kUpdateAvailableShownTimeKey) ?? 0;
|
||||
final now = DateTime.now().microsecondsSinceEpoch;
|
||||
|
@ -87,7 +91,7 @@ class UpdateService {
|
|||
}
|
||||
|
||||
Future<void> showUpdateNotification() async {
|
||||
if (await shouldShowUpdateNoification()) {
|
||||
if (await shouldShowUpdateNotification()) {
|
||||
// ignore: unawaited_futures
|
||||
NotificationService.instance.showNotification(
|
||||
"Update available",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import 'package:flutter/cupertino.dart';
|
||||
import 'package:photos/core/configuration.dart';
|
||||
import 'package:photos/services/feature_flag_service.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/update_service.dart';
|
||||
import "package:photos/ui/payment/store_subscription_page.dart";
|
||||
import 'package:photos/ui/payment/stripe_subscription_page.dart';
|
||||
|
@ -9,8 +9,7 @@ StatefulWidget getSubscriptionPage({bool isOnBoarding = false}) {
|
|||
if (UpdateService.instance.isIndependentFlavor()) {
|
||||
return StripeSubscriptionPage(isOnboarding: isOnBoarding);
|
||||
}
|
||||
if (FeatureFlagService.instance.enableStripe() &&
|
||||
_isUserCreatedPostStripeSupport()) {
|
||||
if (flagService.enableStripe && _isUserCreatedPostStripeSupport()) {
|
||||
return StripeSubscriptionPage(isOnboarding: isOnBoarding);
|
||||
} else {
|
||||
return StoreSubscriptionPage(isOnboarding: isOnBoarding);
|
||||
|
|
|
@ -5,7 +5,7 @@ import "package:intl/intl.dart";
|
|||
import "package:photos/core/event_bus.dart";
|
||||
import 'package:photos/events/embedding_updated_event.dart';
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/machine_learning/semantic_search/frameworks/ml_framework.dart';
|
||||
import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart';
|
||||
import "package:photos/theme/ente_theme.dart";
|
||||
|
@ -151,7 +151,7 @@ class _MachineLearningSettingsPageState
|
|||
const SizedBox(
|
||||
height: 12,
|
||||
),
|
||||
FeatureFlagService.instance.isInternalUserOrDebugBuild()
|
||||
flagService.internalUser
|
||||
? MenuItemWidget(
|
||||
leadingIcon: Icons.delete_sweep_outlined,
|
||||
captionedTextWidget: CaptionedTextWidget(
|
||||
|
|
|
@ -10,7 +10,7 @@ import 'package:photos/events/two_factor_status_change_event.dart';
|
|||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/l10n/l10n.dart";
|
||||
import "package:photos/models/user_details.dart";
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import 'package:photos/service_locator.dart';
|
||||
import 'package:photos/services/local_authentication_service.dart';
|
||||
import "package:photos/services/passkey_service.dart";
|
||||
import 'package:photos/services/user_service.dart';
|
||||
|
@ -70,8 +70,6 @@ class _SecuritySectionWidgetState extends State<SecuritySectionWidget> {
|
|||
final Completer completer = Completer();
|
||||
final List<Widget> children = [];
|
||||
if (_config.hasConfiguredAccount()) {
|
||||
final bool isInternalUser =
|
||||
FeatureFlagService.instance.isInternalUserOrDebugBuild();
|
||||
children.addAll(
|
||||
[
|
||||
sectionOptionSpacing,
|
||||
|
@ -103,8 +101,8 @@ class _SecuritySectionWidgetState extends State<SecuritySectionWidget> {
|
|||
},
|
||||
),
|
||||
),
|
||||
if (isInternalUser) sectionOptionSpacing,
|
||||
if (isInternalUser)
|
||||
if (flagService.passKeyEnabled) sectionOptionSpacing,
|
||||
if (flagService.passKeyEnabled)
|
||||
MenuItemWidget(
|
||||
captionedTextWidget: CaptionedTextWidget(
|
||||
title: context.l10n.passkey,
|
||||
|
|
|
@ -7,6 +7,7 @@ import 'package:photos/core/configuration.dart';
|
|||
import 'package:photos/core/event_bus.dart';
|
||||
import 'package:photos/events/opened_settings_event.dart';
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import "package:photos/services/storage_bonus_service.dart";
|
||||
import 'package:photos/theme/colors.dart';
|
||||
import 'package:photos/theme/ente_theme.dart';
|
||||
|
@ -144,7 +145,7 @@ class SettingsPage extends StatelessWidget {
|
|||
const AboutSectionWidget(),
|
||||
]);
|
||||
|
||||
if (hasLoggedIn) {
|
||||
if (hasLoggedIn && flagService.internalUser) {
|
||||
contents.addAll([sectionSpacing, const DebugSectionWidget()]);
|
||||
contents.addAll([sectionSpacing, const FaceDebugSectionWidget()]);
|
||||
}
|
||||
|
|
|
@ -195,7 +195,7 @@ class _HomeWidgetState extends State<HomeWidget> {
|
|||
},
|
||||
);
|
||||
_initDeepLinks();
|
||||
UpdateService.instance.shouldShowUpdateNoification().then((value) {
|
||||
UpdateService.instance.shouldShowUpdateNotification().then((value) {
|
||||
Future.delayed(Duration.zero, () {
|
||||
if (value) {
|
||||
showDialog(
|
||||
|
|
|
@ -7,7 +7,7 @@ import 'package:path_provider/path_provider.dart';
|
|||
import 'package:photos/core/cache/video_cache_manager.dart';
|
||||
import 'package:photos/core/configuration.dart';
|
||||
import "package:photos/generated/l10n.dart";
|
||||
import 'package:photos/services/feature_flag_service.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/theme/ente_theme.dart';
|
||||
import 'package:photos/ui/components/buttons/icon_button_widget.dart';
|
||||
import 'package:photos/ui/components/captioned_text_widget.dart';
|
||||
|
@ -34,7 +34,7 @@ class _AppStorageViewerState extends State<AppStorageViewer> {
|
|||
|
||||
@override
|
||||
void initState() {
|
||||
internalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild();
|
||||
internalUser = flagService.internalUser;
|
||||
addPath();
|
||||
super.initState();
|
||||
}
|
||||
|
|
|
@ -18,8 +18,8 @@ import 'package:photos/models/files_split.dart';
|
|||
import 'package:photos/models/gallery_type.dart';
|
||||
import "package:photos/models/metadata/common_keys.dart";
|
||||
import 'package:photos/models/selected_files.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import 'package:photos/services/hidden_service.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart';
|
||||
import "package:photos/services/machine_learning/face_ml/person/person_service.dart";
|
||||
|
@ -108,7 +108,7 @@ class _FileSelectionActionsWidgetState
|
|||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
_isInternalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild();
|
||||
_isInternalUser = flagService.internalUser;
|
||||
final ownedFilesCount = split.ownedByCurrentUser.length;
|
||||
final ownedAndPendingUploadFilesCount =
|
||||
ownedFilesCount + split.pendingUploads.length;
|
||||
|
|
|
@ -18,8 +18,8 @@ import 'package:photos/models/file/trash_file.dart';
|
|||
import 'package:photos/models/ignored_file.dart';
|
||||
import "package:photos/models/metadata/common_keys.dart";
|
||||
import 'package:photos/models/selected_files.dart';
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import 'package:photos/services/hidden_service.dart';
|
||||
import 'package:photos/services/ignored_files_service.dart';
|
||||
import 'package:photos/services/local_sync_service.dart';
|
||||
|
@ -141,16 +141,10 @@ class FileAppBarState extends State<FileAppBar> {
|
|||
);
|
||||
}
|
||||
// only show fav option for files owned by the user
|
||||
if ((isOwnedByUser ||
|
||||
FeatureFlagService.instance.isInternalUserOrDebugBuild()) &&
|
||||
if ((isOwnedByUser || flagService.internalUser) &&
|
||||
!isFileHidden &&
|
||||
isFileUploaded) {
|
||||
_actions.add(
|
||||
Padding(
|
||||
padding: const EdgeInsets.all(8),
|
||||
child: FavoriteWidget(widget.file),
|
||||
),
|
||||
);
|
||||
_actions.add(FavoriteWidget(widget.file));
|
||||
}
|
||||
if (!isFileUploaded) {
|
||||
_actions.add(
|
||||
|
|
|
@ -9,7 +9,7 @@ import 'package:photos/core/constants.dart';
|
|||
import "package:photos/generated/l10n.dart";
|
||||
import "package:photos/models/file/extensions/file_props.dart";
|
||||
import 'package:photos/models/file/file.dart';
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/services/files_service.dart';
|
||||
import "package:photos/ui/actions/file/file_actions.dart";
|
||||
import 'package:photos/ui/viewer/file/thumbnail_widget.dart';
|
||||
|
@ -161,8 +161,7 @@ class _VideoWidgetState extends State<VideoWidget> {
|
|||
}
|
||||
}).onError(
|
||||
(error, stackTrace) {
|
||||
if (mounted &&
|
||||
FeatureFlagService.instance.isInternalUserOrDebugBuild()) {
|
||||
if (mounted && flagService.internalUser) {
|
||||
if (error is Exception) {
|
||||
showErrorDialogForException(
|
||||
context: context,
|
||||
|
|
|
@ -44,11 +44,13 @@ class _FavoriteWidgetState extends State<FavoriteWidget> {
|
|||
final bool isLiked = snapshot.data ?? false;
|
||||
return _isLoading
|
||||
? const EnteLoadingWidget(
|
||||
size: 12,
|
||||
size: 14,
|
||||
padding: 2,
|
||||
) // Add this line
|
||||
: LikeButton(
|
||||
size: 24,
|
||||
isLiked: isLiked,
|
||||
padding: const EdgeInsets.all(2),
|
||||
onTap: (oldValue) async {
|
||||
if (widget.file.uploadedFileID == null ||
|
||||
widget.file.ownerID !=
|
||||
|
|
|
@ -19,8 +19,8 @@ import 'package:photos/models/device_collection.dart';
|
|||
import 'package:photos/models/gallery_type.dart';
|
||||
import "package:photos/models/metadata/common_keys.dart";
|
||||
import 'package:photos/models/selected_files.dart';
|
||||
import 'package:photos/service_locator.dart';
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import 'package:photos/services/sync_service.dart';
|
||||
import 'package:photos/services/update_service.dart';
|
||||
import 'package:photos/ui/actions/collection/collection_sharing_actions.dart';
|
||||
|
@ -96,7 +96,7 @@ class _GalleryAppBarWidgetState extends State<GalleryAppBarWidget> {
|
|||
_selectedFilesListener = () {
|
||||
setState(() {});
|
||||
};
|
||||
isInternalUser = FeatureFlagService.instance.isInternalUserOrDebugBuild();
|
||||
isInternalUser = flagService.internalUser;
|
||||
collectionActions = CollectionActions(CollectionsService.instance);
|
||||
widget.selectedFiles.addListener(_selectedFilesListener);
|
||||
_userAuthEventSubscription =
|
||||
|
|
|
@ -5,7 +5,7 @@ import "package:flutter/services.dart";
|
|||
import "package:photos/generated/l10n.dart";
|
||||
import 'package:photos/models/button_result.dart';
|
||||
import 'package:photos/models/typedefs.dart';
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import 'package:photos/theme/colors.dart';
|
||||
import 'package:photos/ui/common/loading_widget.dart';
|
||||
import 'package:photos/ui/common/progress_dialog.dart';
|
||||
|
@ -91,8 +91,7 @@ String parseErrorForUI(
|
|||
}
|
||||
}
|
||||
// return generic error if the user is not internal and the error is not in debug mode
|
||||
if (!(FeatureFlagService.instance.isInternalUserOrDebugBuild() &&
|
||||
kDebugMode)) {
|
||||
if (!(flagService.internalUser && kDebugMode)) {
|
||||
return genericError;
|
||||
}
|
||||
String errorInfo = "";
|
||||
|
|
|
@ -29,7 +29,6 @@ import "package:photos/models/metadata/file_magic.dart";
|
|||
import 'package:photos/models/upload_url.dart';
|
||||
import "package:photos/models/user_details.dart";
|
||||
import 'package:photos/services/collections_service.dart';
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import "package:photos/services/file_magic_service.dart";
|
||||
import 'package:photos/services/local_sync_service.dart';
|
||||
import 'package:photos/services/sync_service.dart';
|
||||
|
@ -402,6 +401,16 @@ class FileUploader {
|
|||
_logger.severe('Trying to upload file with missing localID');
|
||||
return file;
|
||||
}
|
||||
if (!CollectionsService.instance.allowUpload(collectionID)) {
|
||||
_logger.warning(
|
||||
'Upload not allowed for collection $collectionID',
|
||||
);
|
||||
if (!file.isUploaded && file.generatedID != null) {
|
||||
_logger.info("Deleting file entry for " + file.toString());
|
||||
await FilesDB.instance.deleteByGeneratedID(file.generatedID!);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
final String lockKey = file.localID!;
|
||||
|
||||
|
@ -497,7 +506,7 @@ class FileUploader {
|
|||
|
||||
// Calculate the number of parts for the file. Multiple part upload
|
||||
// is only enabled for internal users and debug builds till it's battle tested.
|
||||
final count = FeatureFlagService.instance.isInternalUserOrDebugBuild()
|
||||
final count = kDebugMode
|
||||
? await calculatePartCount(
|
||||
await encryptedFile.length(),
|
||||
)
|
||||
|
|
|
@ -6,7 +6,7 @@ import "package:dio/dio.dart";
|
|||
import "package:logging/logging.dart";
|
||||
import "package:photos/core/constants.dart";
|
||||
import "package:photos/core/network/network.dart";
|
||||
import "package:photos/services/feature_flag_service.dart";
|
||||
import "package:photos/service_locator.dart";
|
||||
import "package:photos/utils/xml_parser_util.dart";
|
||||
|
||||
final _enteDio = NetworkClient.instance.enteDio;
|
||||
|
@ -58,7 +58,7 @@ Future<int> calculatePartCount(int fileSize) async {
|
|||
Future<MultipartUploadURLs> getMultipartUploadURLs(int count) async {
|
||||
try {
|
||||
assert(
|
||||
FeatureFlagService.instance.isInternalUserOrDebugBuild(),
|
||||
flagService.internalUser,
|
||||
"Multipart upload should not be enabled for external users.",
|
||||
);
|
||||
final response = await _enteDio.get(
|
||||
|
|
10
mobile/plugins/ente_feature_flag/.metadata
Normal file
10
mobile/plugins/ente_feature_flag/.metadata
Normal file
|
@ -0,0 +1,10 @@
|
|||
# This file tracks properties of this Flutter project.
|
||||
# Used by Flutter tool to assess capabilities and perform upgrades etc.
|
||||
#
|
||||
# This file should be version controlled and should not be manually edited.
|
||||
|
||||
version:
|
||||
revision: 0b8abb4724aa590dd0f429683339b1e045a1594d
|
||||
channel: stable
|
||||
|
||||
project_type: plugin
|
1
mobile/plugins/ente_feature_flag/analysis_options.yaml
Normal file
1
mobile/plugins/ente_feature_flag/analysis_options.yaml
Normal file
|
@ -0,0 +1 @@
|
|||
include: ../../analysis_options.yaml
|
|
@ -0,0 +1 @@
|
|||
export 'src/service.dart';
|
66
mobile/plugins/ente_feature_flag/lib/src/model.dart
Normal file
66
mobile/plugins/ente_feature_flag/lib/src/model.dart
Normal file
|
@ -0,0 +1,66 @@
|
|||
import "dart:convert";
|
||||
import "dart:io";
|
||||
|
||||
import "package:flutter/foundation.dart";
|
||||
|
||||
class RemoteFlags {
|
||||
final bool enableStripe;
|
||||
final bool disableCFWorker;
|
||||
final bool mapEnabled;
|
||||
final bool faceSearchEnabled;
|
||||
final bool passKeyEnabled;
|
||||
final bool recoveryKeyVerified;
|
||||
final bool internalUser;
|
||||
final bool betaUser;
|
||||
|
||||
RemoteFlags({
|
||||
required this.enableStripe,
|
||||
required this.disableCFWorker,
|
||||
required this.mapEnabled,
|
||||
required this.faceSearchEnabled,
|
||||
required this.passKeyEnabled,
|
||||
required this.recoveryKeyVerified,
|
||||
required this.internalUser,
|
||||
required this.betaUser,
|
||||
});
|
||||
|
||||
static RemoteFlags defaultValue = RemoteFlags(
|
||||
enableStripe: Platform.isAndroid,
|
||||
disableCFWorker: false,
|
||||
mapEnabled: false,
|
||||
faceSearchEnabled: false,
|
||||
passKeyEnabled: false,
|
||||
recoveryKeyVerified: false,
|
||||
internalUser: kDebugMode,
|
||||
betaUser: kDebugMode,
|
||||
);
|
||||
|
||||
String toJson() => json.encode(toMap());
|
||||
Map<String, dynamic> toMap() {
|
||||
return {
|
||||
'enableStripe': enableStripe,
|
||||
'disableCFWorker': disableCFWorker,
|
||||
'mapEnabled': mapEnabled,
|
||||
'faceSearchEnabled': faceSearchEnabled,
|
||||
'passKeyEnabled': passKeyEnabled,
|
||||
'recoveryKeyVerified': recoveryKeyVerified,
|
||||
'internalUser': internalUser,
|
||||
'betaUser': betaUser,
|
||||
};
|
||||
}
|
||||
|
||||
factory RemoteFlags.fromMap(Map<String, dynamic> map) {
|
||||
return RemoteFlags(
|
||||
enableStripe: map['enableStripe'] ?? defaultValue.enableStripe,
|
||||
disableCFWorker: map['disableCFWorker'] ?? defaultValue.disableCFWorker,
|
||||
mapEnabled: map['mapEnabled'] ?? defaultValue.mapEnabled,
|
||||
faceSearchEnabled:
|
||||
map['faceSearchEnabled'] ?? defaultValue.faceSearchEnabled,
|
||||
passKeyEnabled: map['passKeyEnabled'] ?? defaultValue.passKeyEnabled,
|
||||
recoveryKeyVerified:
|
||||
map['recoveryKeyVerified'] ?? defaultValue.recoveryKeyVerified,
|
||||
internalUser: map['internalUser'] ?? defaultValue.internalUser,
|
||||
betaUser: map['betaUser'] ?? defaultValue.betaUser,
|
||||
);
|
||||
}
|
||||
}
|
75
mobile/plugins/ente_feature_flag/lib/src/service.dart
Normal file
75
mobile/plugins/ente_feature_flag/lib/src/service.dart
Normal file
|
@ -0,0 +1,75 @@
|
|||
// ignore_for_file: always_use_package_imports
|
||||
|
||||
import "dart:convert";
|
||||
import "dart:developer";
|
||||
import "dart:io";
|
||||
|
||||
import "package:dio/dio.dart";
|
||||
import "package:flutter/foundation.dart";
|
||||
import "package:shared_preferences/shared_preferences.dart";
|
||||
|
||||
import "model.dart";
|
||||
|
||||
class FlagService {
|
||||
final SharedPreferences _prefs;
|
||||
final Dio _enteDio;
|
||||
late final bool _usingEnteEmail;
|
||||
|
||||
FlagService(this._prefs, this._enteDio) {
|
||||
_usingEnteEmail = _prefs.getString("email")?.endsWith("@ente.io") ?? false;
|
||||
Future.delayed(const Duration(seconds: 5), () {
|
||||
_fetch();
|
||||
});
|
||||
}
|
||||
|
||||
RemoteFlags? _flags;
|
||||
|
||||
RemoteFlags get flags {
|
||||
try {
|
||||
if (!_prefs.containsKey("remote_flags")) {
|
||||
_fetch().ignore();
|
||||
}
|
||||
_flags ??= RemoteFlags.fromMap(
|
||||
jsonDecode(_prefs.getString("remote_flags") ?? "{}"),
|
||||
);
|
||||
return _flags!;
|
||||
} catch (e) {
|
||||
debugPrint("Failed to get feature flags $e");
|
||||
return RemoteFlags.defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> _fetch() async {
|
||||
try {
|
||||
if (!_prefs.containsKey("token")) {
|
||||
log("token not found, skip", name: "FlagService");
|
||||
return;
|
||||
}
|
||||
log("fetching feature flags", name: "FlagService");
|
||||
final response = await _enteDio.get("/remote-store/feature-flags");
|
||||
final remoteFlags = RemoteFlags.fromMap(response.data);
|
||||
await _prefs.setString("remote_flags", remoteFlags.toJson());
|
||||
_flags = remoteFlags;
|
||||
} catch (e) {
|
||||
debugPrint("Failed to sync feature flags $e");
|
||||
}
|
||||
}
|
||||
|
||||
bool get disableCFWorker => flags.disableCFWorker;
|
||||
|
||||
bool get internalUser => flags.internalUser || _usingEnteEmail || kDebugMode;
|
||||
|
||||
bool get betaUser => flags.betaUser;
|
||||
|
||||
bool get internalOrBetaUser => internalUser || betaUser;
|
||||
|
||||
bool get enableStripe => Platform.isIOS ? false : flags.enableStripe;
|
||||
|
||||
bool get mapEnabled => flags.mapEnabled;
|
||||
|
||||
bool get faceSearchEnabled => flags.faceSearchEnabled;
|
||||
|
||||
bool get passKeyEnabled => flags.passKeyEnabled || internalOrBetaUser;
|
||||
|
||||
bool get recoveryKeyVerified => flags.recoveryKeyVerified;
|
||||
}
|
277
mobile/plugins/ente_feature_flag/pubspec.lock
Normal file
277
mobile/plugins/ente_feature_flag/pubspec.lock
Normal file
|
@ -0,0 +1,277 @@
|
|||
# Generated by pub
|
||||
# See https://dart.dev/tools/pub/glossary#lockfile
|
||||
packages:
|
||||
characters:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: characters
|
||||
sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.3.0"
|
||||
collection:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: collection
|
||||
sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.18.0"
|
||||
dio:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: dio
|
||||
sha256: "7d328c4d898a61efc3cd93655a0955858e29a0aa647f0f9e02d59b3bb275e2e8"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "4.0.6"
|
||||
ffi:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: ffi
|
||||
sha256: "493f37e7df1804778ff3a53bd691d8692ddf69702cf4c1c1096a2e41b4779e21"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.1.2"
|
||||
file:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: file
|
||||
sha256: "5fc22d7c25582e38ad9a8515372cd9a93834027aacf1801cf01164dac0ffa08c"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "7.0.0"
|
||||
flutter:
|
||||
dependency: "direct main"
|
||||
description: flutter
|
||||
source: sdk
|
||||
version: "0.0.0"
|
||||
flutter_lints:
|
||||
dependency: "direct dev"
|
||||
description:
|
||||
name: flutter_lints
|
||||
sha256: "9e8c3858111da373efc5aa341de011d9bd23e2c5c5e0c62bccf32438e192d7b1"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.0.2"
|
||||
flutter_web_plugins:
|
||||
dependency: transitive
|
||||
description: flutter
|
||||
source: sdk
|
||||
version: "0.0.0"
|
||||
http_parser:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: http_parser
|
||||
sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "4.0.2"
|
||||
lints:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: lints
|
||||
sha256: cbf8d4b858bb0134ef3ef87841abdf8d63bfc255c266b7bf6b39daa1085c4290
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.0.0"
|
||||
material_color_utilities:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: material_color_utilities
|
||||
sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.8.0"
|
||||
meta:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: meta
|
||||
sha256: d584fa6707a52763a52446f02cc621b077888fb63b93bbcb1143a7be5a0c0c04
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.11.0"
|
||||
path:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: path
|
||||
sha256: "087ce49c3f0dc39180befefc60fdb4acd8f8620e5682fe2476afd0b3688bb4af"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.9.0"
|
||||
path_provider_linux:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: path_provider_linux
|
||||
sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.2.1"
|
||||
path_provider_platform_interface:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: path_provider_platform_interface
|
||||
sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.1.2"
|
||||
path_provider_windows:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: path_provider_windows
|
||||
sha256: "8bc9f22eee8690981c22aa7fc602f5c85b497a6fb2ceb35ee5a5e5ed85ad8170"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.2.1"
|
||||
platform:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: platform
|
||||
sha256: "12220bb4b65720483f8fa9450b4332347737cf8213dd2840d8b2c823e47243ec"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.1.4"
|
||||
plugin_platform_interface:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: plugin_platform_interface
|
||||
sha256: "4820fbfdb9478b1ebae27888254d445073732dae3d6ea81f0b7e06d5dedc3f02"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.1.8"
|
||||
shared_preferences:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: shared_preferences
|
||||
sha256: d3bbe5553a986e83980916ded2f0b435ef2e1893dfaa29d5a7a790d0eca12180
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.2.3"
|
||||
shared_preferences_android:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: shared_preferences_android
|
||||
sha256: "1ee8bf911094a1b592de7ab29add6f826a7331fb854273d55918693d5364a1f2"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.2.2"
|
||||
shared_preferences_foundation:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: shared_preferences_foundation
|
||||
sha256: "7708d83064f38060c7b39db12aefe449cb8cdc031d6062280087bc4cdb988f5c"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.3.5"
|
||||
shared_preferences_linux:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: shared_preferences_linux
|
||||
sha256: "9f2cbcf46d4270ea8be39fa156d86379077c8a5228d9dfdb1164ae0bb93f1faa"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.3.2"
|
||||
shared_preferences_platform_interface:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: shared_preferences_platform_interface
|
||||
sha256: "22e2ecac9419b4246d7c22bfbbda589e3acf5c0351137d87dd2939d984d37c3b"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.3.2"
|
||||
shared_preferences_web:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: shared_preferences_web
|
||||
sha256: "9aee1089b36bd2aafe06582b7d7817fd317ef05fc30e6ba14bff247d0933042a"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.3.0"
|
||||
shared_preferences_windows:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: shared_preferences_windows
|
||||
sha256: "841ad54f3c8381c480d0c9b508b89a34036f512482c407e6df7a9c4aa2ef8f59"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.3.2"
|
||||
sky_engine:
|
||||
dependency: transitive
|
||||
description: flutter
|
||||
source: sdk
|
||||
version: "0.0.99"
|
||||
source_span:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: source_span
|
||||
sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.10.0"
|
||||
stack_trace:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: stack_trace
|
||||
sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.11.1"
|
||||
string_scanner:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: string_scanner
|
||||
sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.2.0"
|
||||
term_glyph:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: term_glyph
|
||||
sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.2.1"
|
||||
typed_data:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: typed_data
|
||||
sha256: facc8d6582f16042dd49f2463ff1bd6e2c9ef9f3d5da3d9b087e244a7b564b3c
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.3.2"
|
||||
vector_math:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: vector_math
|
||||
sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.1.4"
|
||||
web:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: web
|
||||
sha256: "97da13628db363c635202ad97068d47c5b8aa555808e7a9411963c533b449b27"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.5.1"
|
||||
win32:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: win32
|
||||
sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "5.4.0"
|
||||
xdg_directories:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: xdg_directories
|
||||
sha256: faea9dee56b520b55a566385b84f2e8de55e7496104adada9962e0bd11bcff1d
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.0.4"
|
||||
sdks:
|
||||
dart: ">=3.3.0 <4.0.0"
|
||||
flutter: ">=3.19.0"
|
19
mobile/plugins/ente_feature_flag/pubspec.yaml
Normal file
19
mobile/plugins/ente_feature_flag/pubspec.yaml
Normal file
|
@ -0,0 +1,19 @@
|
|||
name: ente_feature_flag
|
||||
version: 0.0.1
|
||||
publish_to: none
|
||||
|
||||
environment:
|
||||
sdk: '>=3.3.0 <4.0.0'
|
||||
|
||||
dependencies:
|
||||
collection:
|
||||
dio: ^4.0.6
|
||||
flutter:
|
||||
sdk: flutter
|
||||
shared_preferences: ^2.0.5
|
||||
stack_trace:
|
||||
|
||||
dev_dependencies:
|
||||
flutter_lints:
|
||||
|
||||
flutter:
|
|
@ -442,6 +442,13 @@ packages:
|
|||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.1.17"
|
||||
ente_feature_flag:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
path: "plugins/ente_feature_flag"
|
||||
relative: true
|
||||
source: path
|
||||
version: "0.0.1"
|
||||
equatable:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
|
@ -559,10 +566,10 @@ packages:
|
|||
dependency: "direct main"
|
||||
description:
|
||||
name: firebase_core
|
||||
sha256: a864d1b6afd25497a3b57b016886d1763df52baaa69758a46723164de8d187fe
|
||||
sha256: "6b1152a5af3b1cfe7e45309e96fc1aa14873f410f7aadb3878aa7812acfa7531"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.29.0"
|
||||
version: "2.30.0"
|
||||
firebase_core_platform_interface:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -583,10 +590,10 @@ packages:
|
|||
dependency: "direct main"
|
||||
description:
|
||||
name: firebase_messaging
|
||||
sha256: e41586e0fd04fe9a40424f8b0053d0832e6d04f49e020cdaf9919209a28497e9
|
||||
sha256: "87e3eda0ecdfeadb5fd1cf0dc5153aea5307a0cfca751c4b1ac97bfdd805660e"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "14.7.19"
|
||||
version: "14.8.1"
|
||||
firebase_messaging_platform_interface:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
|
|
@ -48,6 +48,8 @@ dependencies:
|
|||
dotted_border: ^2.1.0
|
||||
dropdown_button2: ^2.0.0
|
||||
email_validator: ^2.0.1
|
||||
ente_feature_flag:
|
||||
path: plugins/ente_feature_flag
|
||||
equatable: ^2.0.5
|
||||
event_bus: ^2.0.0
|
||||
exif: ^3.0.0
|
||||
|
@ -60,8 +62,8 @@ dependencies:
|
|||
file_saver:
|
||||
# Use forked version till this PR is merged: https://github.com/incrediblezayed/file_saver/pull/87
|
||||
git: https://github.com/jesims/file_saver.git
|
||||
firebase_core: ^2.13.1
|
||||
firebase_messaging: ^14.6.2
|
||||
firebase_core: ^2.30.0
|
||||
firebase_messaging: ^14.8.0
|
||||
fk_user_agent: ^2.0.1
|
||||
flutter:
|
||||
sdk: flutter
|
||||
|
@ -98,7 +100,7 @@ dependencies:
|
|||
isar_flutter_libs: ^3.1.0+1
|
||||
json_annotation: ^4.8.0
|
||||
latlong2: ^0.9.0
|
||||
like_button: ^2.0.2
|
||||
like_button: ^2.0.5
|
||||
loading_animations: ^2.1.0
|
||||
local_auth: ^2.1.5
|
||||
local_auth_android:
|
||||
|
|
|
@ -194,7 +194,7 @@ func main() {
|
|||
commonBillController := commonbilling.NewController(storagBonusRepo, userRepo, usageRepo)
|
||||
appStoreController := controller.NewAppStoreController(defaultPlan,
|
||||
billingRepo, fileRepo, userRepo, commonBillController)
|
||||
|
||||
remoteStoreController := &remoteStoreCtrl.Controller{Repo: remoteStoreRepository}
|
||||
playStoreController := controller.NewPlayStoreController(defaultPlan,
|
||||
billingRepo, fileRepo, userRepo, storagBonusRepo, commonBillController)
|
||||
stripeController := controller.NewStripeController(plans, stripeClients,
|
||||
|
@ -610,6 +610,7 @@ func main() {
|
|||
UserAuthRepo: userAuthRepo,
|
||||
UserController: userController,
|
||||
FamilyController: familyController,
|
||||
RemoteStoreController: remoteStoreController,
|
||||
FileRepo: fileRepo,
|
||||
StorageBonusRepo: storagBonusRepo,
|
||||
BillingRepo: billingRepo,
|
||||
|
@ -631,6 +632,7 @@ func main() {
|
|||
adminAPI.PUT("/user/change-email", adminHandler.ChangeEmail)
|
||||
adminAPI.DELETE("/user/delete", adminHandler.DeleteUser)
|
||||
adminAPI.POST("/user/recover", adminHandler.RecoverAccount)
|
||||
adminAPI.POST("/user/update-flag", adminHandler.UpdateFeatureFlag)
|
||||
adminAPI.GET("/email-hash", adminHandler.GetEmailHash)
|
||||
adminAPI.POST("/emails-from-hashes", adminHandler.GetEmailsFromHashes)
|
||||
adminAPI.PUT("/user/subscription", adminHandler.UpdateSubscription)
|
||||
|
@ -658,7 +660,6 @@ func main() {
|
|||
privateAPI.DELETE("/authenticator/entity", authenticatorHandler.DeleteEntity)
|
||||
privateAPI.GET("/authenticator/entity/diff", authenticatorHandler.GetDiff)
|
||||
|
||||
remoteStoreController := &remoteStoreCtrl.Controller{Repo: remoteStoreRepository}
|
||||
dataCleanupController := &dataCleanupCtrl.DeleteUserCleanupController{
|
||||
Repo: dataCleanupRepository,
|
||||
UserRepo: userRepo,
|
||||
|
@ -672,6 +673,7 @@ func main() {
|
|||
|
||||
privateAPI.POST("/remote-store/update", remoteStoreHandler.InsertOrUpdate)
|
||||
privateAPI.GET("/remote-store", remoteStoreHandler.GetKey)
|
||||
privateAPI.GET("/remote-store/feature-flags", remoteStoreHandler.GetFeatureFlags)
|
||||
|
||||
pushHandler := &api.PushHandler{PushController: pushController}
|
||||
privateAPI.POST("/push/token", pushHandler.AddToken)
|
||||
|
@ -837,7 +839,7 @@ func setupAndStartCrons(userAuthRepo *repo.UserAuthRepository, publicCollectionR
|
|||
|
||||
schedule(c, "@every 24h", func() {
|
||||
_ = userAuthRepo.RemoveDeletedTokens(timeUtil.MicrosecondBeforeDays(30))
|
||||
_ = castDb.DeleteOldCodes(context.Background(), timeUtil.MicrosecondBeforeDays(1))
|
||||
_ = castDb.DeleteOldSessions(context.Background(), timeUtil.MicrosecondBeforeDays(7))
|
||||
_ = publicCollectionRepo.CleanupAccessHistory(context.Background())
|
||||
})
|
||||
|
||||
|
@ -895,6 +897,8 @@ func setupAndStartCrons(userAuthRepo *repo.UserAuthRepository, publicCollectionR
|
|||
})
|
||||
|
||||
schedule(c, "@every 30m", func() {
|
||||
// delete unclaimed codes older than 60 minutes
|
||||
_ = castDb.DeleteUnclaimedCodes(context.Background(), timeUtil.MicrosecondsBeforeMinutes(60))
|
||||
dataCleanupCtrl.DeleteDataCron()
|
||||
})
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ require you to clone the repository or build any images.
|
|||
+ image: ghcr.io/ente-io/server
|
||||
```
|
||||
|
||||
4. Create an (empty) configuration file. Yyou can later put your custom
|
||||
4. Create an (empty) configuration file. You can later put your custom
|
||||
configuration in this if needed.
|
||||
|
||||
```sh
|
||||
|
|
|
@ -149,6 +149,12 @@ var ErrCastPermissionDenied = ApiError{
|
|||
HttpStatusCode: http.StatusForbidden,
|
||||
}
|
||||
|
||||
var ErrCastIPMismatch = ApiError{
|
||||
Code: "CAST_IP_MISMATCH",
|
||||
Message: "IP mismatch",
|
||||
HttpStatusCode: http.StatusForbidden,
|
||||
}
|
||||
|
||||
type ErrorCode string
|
||||
|
||||
const (
|
||||
|
|
|
@ -13,3 +13,66 @@ type UpdateKeyValueRequest struct {
|
|||
Key string `json:"key" binding:"required"`
|
||||
Value string `json:"value" binding:"required"`
|
||||
}
|
||||
|
||||
type AdminUpdateKeyValueRequest struct {
|
||||
UserID int64 `json:"userID" binding:"required"`
|
||||
Key string `json:"key" binding:"required"`
|
||||
Value string `json:"value" binding:"required"`
|
||||
}
|
||||
|
||||
type FeatureFlagResponse struct {
|
||||
EnableStripe bool `json:"enableStripe"`
|
||||
// If true, the mobile client will stop using CF worker to download files
|
||||
DisableCFWorker bool `json:"disableCFWorker"`
|
||||
MapEnabled bool `json:"mapEnabled"`
|
||||
FaceSearchEnabled bool `json:"faceSearchEnabled"`
|
||||
PassKeyEnabled bool `json:"passKeyEnabled"`
|
||||
RecoveryKeyVerified bool `json:"recoveryKeyVerified"`
|
||||
InternalUser bool `json:"internalUser"`
|
||||
BetaUser bool `json:"betaUser"`
|
||||
}
|
||||
|
||||
type FlagKey string
|
||||
|
||||
const (
|
||||
RecoveryKeyVerified FlagKey = "recoveryKeyVerified"
|
||||
MapEnabled FlagKey = "mapEnabled"
|
||||
FaceSearchEnabled FlagKey = "faceSearchEnabled"
|
||||
PassKeyEnabled FlagKey = "passKeyEnabled"
|
||||
IsInternalUser FlagKey = "internalUser"
|
||||
IsBetaUser FlagKey = "betaUser"
|
||||
)
|
||||
|
||||
func (k FlagKey) String() string {
|
||||
return string(k)
|
||||
}
|
||||
|
||||
// UserEditable returns true if the key is user editable
|
||||
func (k FlagKey) UserEditable() bool {
|
||||
switch k {
|
||||
case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled, PassKeyEnabled:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (k FlagKey) IsAdminEditable() bool {
|
||||
switch k {
|
||||
case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled:
|
||||
return false
|
||||
case IsInternalUser, IsBetaUser, PassKeyEnabled:
|
||||
return true
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
func (k FlagKey) IsBoolType() bool {
|
||||
switch k {
|
||||
case RecoveryKeyVerified, MapEnabled, FaceSearchEnabled, PassKeyEnabled, IsInternalUser, IsBetaUser:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
|
1
server/migrations/84_add_cast_column.down.sql
Normal file
1
server/migrations/84_add_cast_column.down.sql
Normal file
|
@ -0,0 +1 @@
|
|||
ALTER TABLE casting DROP COLUMN IF EXISTS ip;
|
5
server/migrations/84_add_cast_column.up.sql
Normal file
5
server/migrations/84_add_cast_column.up.sql
Normal file
|
@ -0,0 +1,5 @@
|
|||
--- Delete all rows from casting table and add a non-nullable column called ip
|
||||
BEGIN;
|
||||
DELETE FROM casting;
|
||||
ALTER TABLE casting ADD COLUMN ip text NOT NULL;
|
||||
COMMIT;
|
|
@ -3,6 +3,7 @@ package api
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/ente-io/museum/pkg/controller/remotestore"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
@ -43,6 +44,7 @@ type AdminHandler struct {
|
|||
BillingController *controller.BillingController
|
||||
UserController *user.UserController
|
||||
FamilyController *family.Controller
|
||||
RemoteStoreController *remotestore.Controller
|
||||
ObjectCleanupController *controller.ObjectCleanupController
|
||||
MailingListsController *controller.MailingListsController
|
||||
DiscordController *discord.DiscordController
|
||||
|
@ -260,6 +262,32 @@ func (h *AdminHandler) RemovePasskeys(c *gin.Context) {
|
|||
c.JSON(http.StatusOK, gin.H{})
|
||||
}
|
||||
|
||||
func (h *AdminHandler) UpdateFeatureFlag(c *gin.Context) {
|
||||
var request ente.AdminUpdateKeyValueRequest
|
||||
if err := c.ShouldBindJSON(&request); err != nil {
|
||||
handler.Error(c, stacktrace.Propagate(ente.ErrBadRequest, "Bad request"))
|
||||
return
|
||||
}
|
||||
go h.DiscordController.NotifyAdminAction(
|
||||
fmt.Sprintf("Admin (%d) updating flag:%s to val:%s for %d", auth.GetUserID(c.Request.Header), request.Key, request.Value, request.UserID))
|
||||
|
||||
logger := logrus.WithFields(logrus.Fields{
|
||||
"user_id": request.UserID,
|
||||
"admin_id": auth.GetUserID(c.Request.Header),
|
||||
"req_id": requestid.Get(c),
|
||||
"req_ctx": "update_feature_flag",
|
||||
})
|
||||
logger.Info("Start update")
|
||||
err := h.RemoteStoreController.AdminInsertOrUpdate(c, request)
|
||||
if err != nil {
|
||||
logger.WithError(err).Error("Failed to update flag")
|
||||
handler.Error(c, stacktrace.Propagate(err, ""))
|
||||
return
|
||||
}
|
||||
logger.Info("successfully updated flag")
|
||||
c.JSON(http.StatusOK, gin.H{})
|
||||
}
|
||||
|
||||
func (h *AdminHandler) CloseFamily(c *gin.Context) {
|
||||
|
||||
var request ente.AdminOpsForUserRequest
|
||||
|
|
|
@ -49,3 +49,13 @@ func (h *RemoteStoreHandler) GetKey(c *gin.Context) {
|
|||
}
|
||||
c.JSON(http.StatusOK, resp)
|
||||
}
|
||||
|
||||
// GetFeatureFlags returns all the feature flags and value for given user
|
||||
func (h *RemoteStoreHandler) GetFeatureFlags(c *gin.Context) {
|
||||
resp, err := h.Controller.GetFeatureFlags(c)
|
||||
if err != nil {
|
||||
handler.Error(c, stacktrace.Propagate(err, "failed to get feature flags"))
|
||||
return
|
||||
}
|
||||
c.JSON(http.StatusOK, resp)
|
||||
}
|
||||
|
|
|
@ -2,12 +2,15 @@ package cast
|
|||
|
||||
import (
|
||||
"context"
|
||||
"github.com/ente-io/museum/ente"
|
||||
"github.com/ente-io/museum/ente/cast"
|
||||
"github.com/ente-io/museum/pkg/controller/access"
|
||||
castRepo "github.com/ente-io/museum/pkg/repo/cast"
|
||||
"github.com/ente-io/museum/pkg/utils/auth"
|
||||
"github.com/ente-io/museum/pkg/utils/network"
|
||||
"github.com/ente-io/stacktrace"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Controller struct {
|
||||
|
@ -24,12 +27,24 @@ func NewController(castRepo *castRepo.Repository,
|
|||
}
|
||||
}
|
||||
|
||||
func (c *Controller) RegisterDevice(ctx context.Context, request *cast.RegisterDeviceRequest) (string, error) {
|
||||
return c.CastRepo.AddCode(ctx, request.DeviceCode, request.PublicKey)
|
||||
func (c *Controller) RegisterDevice(ctx *gin.Context, request *cast.RegisterDeviceRequest) (string, error) {
|
||||
return c.CastRepo.AddCode(ctx, request.DeviceCode, request.PublicKey, network.GetClientIP(ctx))
|
||||
}
|
||||
|
||||
func (c *Controller) GetPublicKey(ctx context.Context, deviceCode string) (string, error) {
|
||||
return c.CastRepo.GetPubKey(ctx, deviceCode)
|
||||
func (c *Controller) GetPublicKey(ctx *gin.Context, deviceCode string) (string, error) {
|
||||
pubKey, ip, err := c.CastRepo.GetPubKeyAndIp(ctx, deviceCode)
|
||||
if err != nil {
|
||||
return "", stacktrace.Propagate(err, "")
|
||||
}
|
||||
if ip != network.GetClientIP(ctx) {
|
||||
logrus.WithFields(logrus.Fields{
|
||||
"deviceCode": deviceCode,
|
||||
"ip": ip,
|
||||
"clientIP": network.GetClientIP(ctx),
|
||||
}).Warn("GetPublicKey: IP mismatch")
|
||||
return "", &ente.ErrCastIPMismatch
|
||||
}
|
||||
return pubKey, nil
|
||||
}
|
||||
|
||||
func (c *Controller) GetEncCastData(ctx context.Context, deviceCode string) (*string, error) {
|
||||
|
|
|
@ -64,8 +64,12 @@ func (c *FileController) validateFileCreateOrUpdateReq(userID int64, file ente.F
|
|||
if !strings.HasPrefix(file.File.ObjectKey, objectPathPrefix) || !strings.HasPrefix(file.Thumbnail.ObjectKey, objectPathPrefix) {
|
||||
return stacktrace.Propagate(ente.ErrBadRequest, "Incorrect object key reported")
|
||||
}
|
||||
if file.EncryptedKey == "" || file.KeyDecryptionNonce == "" {
|
||||
return stacktrace.Propagate(ente.ErrBadRequest, "EncryptedKey and KeyDecryptionNonce are required")
|
||||
isCreateFileReq := file.ID == 0
|
||||
// Check for attributes for fileCreation. We don't send key details on update
|
||||
if isCreateFileReq {
|
||||
if file.EncryptedKey == "" || file.KeyDecryptionNonce == "" {
|
||||
return stacktrace.Propagate(ente.ErrBadRequest, "EncryptedKey and KeyDecryptionNonce are required")
|
||||
}
|
||||
}
|
||||
if file.File.DecryptionHeader == "" || file.Thumbnail.DecryptionHeader == "" {
|
||||
return stacktrace.Propagate(ente.ErrBadRequest, "DecryptionHeader for file & thumb is required")
|
||||
|
@ -73,18 +77,24 @@ func (c *FileController) validateFileCreateOrUpdateReq(userID int64, file ente.F
|
|||
if file.UpdationTime == 0 {
|
||||
return stacktrace.Propagate(ente.ErrBadRequest, "UpdationTime is required")
|
||||
}
|
||||
collection, err := c.CollectionRepo.Get(file.CollectionID)
|
||||
if err != nil {
|
||||
return stacktrace.Propagate(err, "")
|
||||
}
|
||||
// Verify that user owns the collection.
|
||||
// Warning: Do not remove this check
|
||||
if collection.Owner.ID != userID || file.OwnerID != userID {
|
||||
return stacktrace.Propagate(ente.ErrPermissionDenied, "")
|
||||
}
|
||||
if collection.IsDeleted {
|
||||
return stacktrace.Propagate(ente.ErrNotFound, "collection has been deleted")
|
||||
if isCreateFileReq {
|
||||
collection, err := c.CollectionRepo.Get(file.CollectionID)
|
||||
if err != nil {
|
||||
return stacktrace.Propagate(err, "")
|
||||
}
|
||||
// Verify that user owns the collection.
|
||||
// Warning: Do not remove this check
|
||||
if collection.Owner.ID != userID {
|
||||
return stacktrace.Propagate(ente.ErrPermissionDenied, "collection doesn't belong to user")
|
||||
}
|
||||
if collection.IsDeleted {
|
||||
return stacktrace.Propagate(ente.ErrNotFound, "collection has been deleted")
|
||||
}
|
||||
if file.OwnerID != userID {
|
||||
return stacktrace.Propagate(ente.ErrPermissionDenied, "file ownerID doesn't match with userID")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ package remotestore
|
|||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/ente-io/museum/ente"
|
||||
"github.com/ente-io/museum/pkg/repo/remotestore"
|
||||
|
@ -16,12 +17,22 @@ type Controller struct {
|
|||
Repo *remotestore.Repository
|
||||
}
|
||||
|
||||
// Insert of update the key's value
|
||||
// InsertOrUpdate the key's value
|
||||
func (c *Controller) InsertOrUpdate(ctx *gin.Context, request ente.UpdateKeyValueRequest) error {
|
||||
if err := _validateRequest(request.Key, request.Value, false); err != nil {
|
||||
return err
|
||||
}
|
||||
userID := auth.GetUserID(ctx.Request.Header)
|
||||
return c.Repo.InsertOrUpdate(ctx, userID, request.Key, request.Value)
|
||||
}
|
||||
|
||||
func (c *Controller) AdminInsertOrUpdate(ctx *gin.Context, request ente.AdminUpdateKeyValueRequest) error {
|
||||
if err := _validateRequest(request.Key, request.Value, true); err != nil {
|
||||
return err
|
||||
}
|
||||
return c.Repo.InsertOrUpdate(ctx, request.UserID, request.Key, request.Value)
|
||||
}
|
||||
|
||||
func (c *Controller) Get(ctx *gin.Context, req ente.GetValueRequest) (*ente.GetValueResponse, error) {
|
||||
userID := auth.GetUserID(ctx.Request.Header)
|
||||
value, err := c.Repo.GetValue(ctx, userID, req.Key)
|
||||
|
@ -34,3 +45,50 @@ func (c *Controller) Get(ctx *gin.Context, req ente.GetValueRequest) (*ente.GetV
|
|||
}
|
||||
return &ente.GetValueResponse{Value: value}, nil
|
||||
}
|
||||
|
||||
func (c *Controller) GetFeatureFlags(ctx *gin.Context) (*ente.FeatureFlagResponse, error) {
|
||||
userID := auth.GetUserID(ctx.Request.Header)
|
||||
values, err := c.Repo.GetAllValues(ctx, userID)
|
||||
if err != nil {
|
||||
return nil, stacktrace.Propagate(err, "")
|
||||
}
|
||||
response := &ente.FeatureFlagResponse{
|
||||
EnableStripe: true, // enable stripe for all
|
||||
DisableCFWorker: false,
|
||||
}
|
||||
for key, value := range values {
|
||||
flag := ente.FlagKey(key)
|
||||
if !flag.IsBoolType() {
|
||||
continue
|
||||
}
|
||||
switch flag {
|
||||
case ente.RecoveryKeyVerified:
|
||||
response.RecoveryKeyVerified = value == "true"
|
||||
case ente.MapEnabled:
|
||||
response.MapEnabled = value == "true"
|
||||
case ente.FaceSearchEnabled:
|
||||
response.FaceSearchEnabled = value == "true"
|
||||
case ente.PassKeyEnabled:
|
||||
response.PassKeyEnabled = value == "true"
|
||||
case ente.IsInternalUser:
|
||||
response.InternalUser = value == "true"
|
||||
case ente.IsBetaUser:
|
||||
response.BetaUser = value == "true"
|
||||
}
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func _validateRequest(key, value string, byAdmin bool) error {
|
||||
flag := ente.FlagKey(key)
|
||||
if !flag.UserEditable() && !byAdmin {
|
||||
return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("key %s is not user editable", key)), "key not user editable")
|
||||
}
|
||||
if byAdmin && !flag.IsAdminEditable() {
|
||||
return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("key %s is not admin editable", key)), "key not admin editable")
|
||||
}
|
||||
if flag.IsBoolType() && value != "true" && value != "false" {
|
||||
return stacktrace.Propagate(ente.NewBadRequestWithMessage(fmt.Sprintf("value %s is not allowed", value)), "value not allowed")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -150,6 +150,7 @@ func (r *RateLimitMiddleware) getLimiter(reqPath string, reqMethod string) *limi
|
|||
reqPath == "/public-collection/verify-password" ||
|
||||
reqPath == "/family/accept-invite" ||
|
||||
reqPath == "/users/srp/attributes" ||
|
||||
(reqPath == "/cast/device-info/" && reqMethod == "POST") ||
|
||||
reqPath == "/users/srp/verify-session" ||
|
||||
reqPath == "/family/invite-info/:token" ||
|
||||
reqPath == "/family/add-member" ||
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"github.com/ente-io/museum/pkg/utils/random"
|
||||
"github.com/ente-io/stacktrace"
|
||||
"github.com/google/uuid"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"strings"
|
||||
)
|
||||
|
||||
|
@ -14,7 +15,7 @@ type Repository struct {
|
|||
DB *sql.DB
|
||||
}
|
||||
|
||||
func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string) (string, error) {
|
||||
func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string, ip string) (string, error) {
|
||||
var codeValue string
|
||||
var err error
|
||||
if code == nil || *code == "" {
|
||||
|
@ -25,7 +26,7 @@ func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string) (
|
|||
} else {
|
||||
codeValue = strings.TrimSpace(*code)
|
||||
}
|
||||
_, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id) VALUES ($1, $2, $3)", codeValue, pubKey, uuid.New())
|
||||
_, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
@ -38,17 +39,17 @@ func (r *Repository) InsertCastData(ctx context.Context, castUserID int64, code
|
|||
return err
|
||||
}
|
||||
|
||||
func (r *Repository) GetPubKey(ctx context.Context, code string) (string, error) {
|
||||
var pubKey string
|
||||
row := r.DB.QueryRowContext(ctx, "SELECT public_key FROM casting WHERE code = $1 and is_deleted=false", code)
|
||||
err := row.Scan(&pubKey)
|
||||
func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, string, error) {
|
||||
var pubKey, ip string
|
||||
row := r.DB.QueryRowContext(ctx, "SELECT public_key, ip FROM casting WHERE code = $1 and is_deleted=false", code)
|
||||
err := row.Scan(&pubKey, &ip)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return "", ente.ErrNotFoundError.NewErr("code not found")
|
||||
return "", "", ente.ErrNotFoundError.NewErr("code not found")
|
||||
}
|
||||
return "", err
|
||||
return "", "", err
|
||||
}
|
||||
return pubKey, nil
|
||||
return pubKey, ip, nil
|
||||
}
|
||||
|
||||
func (r *Repository) GetEncCastData(ctx context.Context, code string) (*string, error) {
|
||||
|
@ -89,12 +90,27 @@ func (r *Repository) UpdateLastUsedAtForToken(ctx context.Context, token string)
|
|||
return nil
|
||||
}
|
||||
|
||||
// DeleteOldCodes that are not associated with a collection and are older than the given time
|
||||
func (r *Repository) DeleteOldCodes(ctx context.Context, expirtyTime int64) error {
|
||||
_, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1 and is_deleted=false and collection_id is null", expirtyTime)
|
||||
// DeleteUnclaimedCodes that are not associated with a collection and are older than the given time
|
||||
func (r *Repository) DeleteUnclaimedCodes(ctx context.Context, expiryTime int64) error {
|
||||
result, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1 and is_deleted=false and collection_id is null", expiryTime)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if rows, rErr := result.RowsAffected(); rErr == nil && rows > 0 {
|
||||
log.Infof("Deleted %d unclaimed codes", rows)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeleteOldSessions where last used at is older than the given time
|
||||
func (r *Repository) DeleteOldSessions(ctx context.Context, expiryTime int64) error {
|
||||
result, err := r.DB.ExecContext(ctx, "DELETE FROM casting WHERE last_used_at < $1", expiryTime)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if rows, rErr := result.RowsAffected(); rErr == nil && rows > 0 {
|
||||
log.Infof("Deleted %d old sessions", rows)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@ type Repository struct {
|
|||
DB *sql.DB
|
||||
}
|
||||
|
||||
//
|
||||
func (r *Repository) InsertOrUpdate(ctx context.Context, userID int64, key string, value string) error {
|
||||
_, err := r.DB.ExecContext(ctx, `INSERT INTO remote_store(user_id, key_name, key_value) VALUES ($1,$2,$3)
|
||||
ON CONFLICT (user_id, key_name) DO UPDATE SET key_value = $3;
|
||||
|
@ -40,3 +39,25 @@ func (r *Repository) GetValue(ctx context.Context, userID int64, key string) (st
|
|||
}
|
||||
return keyValue, nil
|
||||
}
|
||||
|
||||
// GetAllValues fetches and return all the key value pairs for given user_id
|
||||
func (r *Repository) GetAllValues(ctx context.Context, userID int64) (map[string]string, error) {
|
||||
rows, err := r.DB.QueryContext(ctx, `SELECT key_name, key_value FROM remote_store
|
||||
WHERE user_id = $1`,
|
||||
userID, // $1
|
||||
)
|
||||
if err != nil {
|
||||
return nil, stacktrace.Propagate(err, "reading value failed")
|
||||
}
|
||||
defer rows.Close()
|
||||
values := make(map[string]string)
|
||||
for rows.Next() {
|
||||
var key, value string
|
||||
err := rows.Scan(&key, &value)
|
||||
if err != nil {
|
||||
return nil, stacktrace.Propagate(err, "reading value failed")
|
||||
}
|
||||
values[key] = value
|
||||
}
|
||||
return values, nil
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
"@/next": "*",
|
||||
"@ente/accounts": "*",
|
||||
"@ente/eslint-config": "*",
|
||||
"@ente/shared": "*",
|
||||
"mime-types": "^2.1.35"
|
||||
"@ente/shared": "*"
|
||||
}
|
||||
}
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 164 B |
|
@ -1,24 +0,0 @@
|
|||
export enum CollectionType {
|
||||
folder = "folder",
|
||||
favorites = "favorites",
|
||||
album = "album",
|
||||
uncategorized = "uncategorized",
|
||||
}
|
||||
|
||||
export enum CollectionSummaryType {
|
||||
folder = "folder",
|
||||
favorites = "favorites",
|
||||
album = "album",
|
||||
archive = "archive",
|
||||
trash = "trash",
|
||||
uncategorized = "uncategorized",
|
||||
all = "all",
|
||||
outgoingShare = "outgoingShare",
|
||||
incomingShareViewer = "incomingShareViewer",
|
||||
incomingShareCollaborator = "incomingShareCollaborator",
|
||||
sharedOnlyViaLink = "sharedOnlyViaLink",
|
||||
archived = "archived",
|
||||
defaultHidden = "defaultHidden",
|
||||
hiddenItems = "hiddenItems",
|
||||
pinned = "pinned",
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
export enum FILE_TYPE {
|
||||
IMAGE,
|
||||
VIDEO,
|
||||
LIVE_PHOTO,
|
||||
OTHERS,
|
||||
}
|
||||
|
||||
export const RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"raf",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
|
@ -1,41 +1,13 @@
|
|||
import { FILE_TYPE } from "constants/file";
|
||||
import { FileTypeInfo } from "types/upload";
|
||||
|
||||
// list of format that were missed by type-detection for some files.
|
||||
export const WHITELISTED_FILE_FORMATS: FileTypeInfo[] = [
|
||||
{ fileType: FILE_TYPE.IMAGE, exactType: "jpeg", mimeType: "image/jpeg" },
|
||||
{ fileType: FILE_TYPE.IMAGE, exactType: "jpg", mimeType: "image/jpeg" },
|
||||
{ fileType: FILE_TYPE.VIDEO, exactType: "webm", mimeType: "video/webm" },
|
||||
{ fileType: FILE_TYPE.VIDEO, exactType: "mod", mimeType: "video/mpeg" },
|
||||
{ fileType: FILE_TYPE.VIDEO, exactType: "mp4", mimeType: "video/mp4" },
|
||||
{ fileType: FILE_TYPE.IMAGE, exactType: "gif", mimeType: "image/gif" },
|
||||
{ fileType: FILE_TYPE.VIDEO, exactType: "dv", mimeType: "video/x-dv" },
|
||||
{
|
||||
fileType: FILE_TYPE.VIDEO,
|
||||
exactType: "wmv",
|
||||
mimeType: "video/x-ms-asf",
|
||||
},
|
||||
{
|
||||
fileType: FILE_TYPE.VIDEO,
|
||||
exactType: "hevc",
|
||||
mimeType: "video/hevc",
|
||||
},
|
||||
{
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
exactType: "raf",
|
||||
mimeType: "image/x-fuji-raf",
|
||||
},
|
||||
{
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
exactType: "orf",
|
||||
mimeType: "image/x-olympus-orf",
|
||||
},
|
||||
|
||||
{
|
||||
fileType: FILE_TYPE.IMAGE,
|
||||
exactType: "crw",
|
||||
mimeType: "image/x-canon-crw",
|
||||
},
|
||||
export const RAW_FORMATS = [
|
||||
"heic",
|
||||
"rw2",
|
||||
"tiff",
|
||||
"arw",
|
||||
"cr3",
|
||||
"cr2",
|
||||
"raf",
|
||||
"nef",
|
||||
"psd",
|
||||
"dng",
|
||||
"tif",
|
||||
];
|
||||
|
||||
export const KNOWN_NON_MEDIA_FORMATS = ["xmp", "html", "txt"];
|
||||
|
|
|
@ -42,52 +42,77 @@ export default function PairingMode() {
|
|||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!cast) return;
|
||||
if (isCastReady) return;
|
||||
if (!cast) {
|
||||
return;
|
||||
}
|
||||
if (isCastReady) {
|
||||
return;
|
||||
}
|
||||
const context = cast.framework.CastReceiverContext.getInstance();
|
||||
|
||||
try {
|
||||
const options = new cast.framework.CastReceiverOptions();
|
||||
options.maxInactivity = 3600;
|
||||
options.customNamespaces = Object.assign({});
|
||||
options.customNamespaces["urn:x-cast:pair-request"] =
|
||||
cast.framework.system.MessageType.JSON;
|
||||
|
||||
options.disableIdleTimeout = true;
|
||||
context.set;
|
||||
|
||||
context.addCustomMessageListener(
|
||||
"urn:x-cast:pair-request",
|
||||
messageReceiveHandler,
|
||||
);
|
||||
|
||||
// listen to close request and stop the context
|
||||
context.addEventListener(
|
||||
cast.framework.system.EventType.SENDER_DISCONNECTED,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
(_) => {
|
||||
context.stop();
|
||||
},
|
||||
);
|
||||
context.start(options);
|
||||
setIsCastReady(true);
|
||||
} catch (e) {
|
||||
log.error("failed to create cast context", e);
|
||||
}
|
||||
setIsCastReady(true);
|
||||
|
||||
return () => {
|
||||
context.stop();
|
||||
// context.stop();
|
||||
};
|
||||
}, [cast, isCastReady]);
|
||||
}, [cast]);
|
||||
|
||||
const messageReceiveHandler = (message: {
|
||||
type: string;
|
||||
senderId: string;
|
||||
data: any;
|
||||
}) => {
|
||||
cast.framework.CastReceiverContext.getInstance().sendCustomMessage(
|
||||
"urn:x-cast:pair-request",
|
||||
message.senderId,
|
||||
{
|
||||
code: digits.join(""),
|
||||
},
|
||||
);
|
||||
try {
|
||||
cast.framework.CastReceiverContext.getInstance().sendCustomMessage(
|
||||
"urn:x-cast:pair-request",
|
||||
message.senderId,
|
||||
{
|
||||
code: digits.join(""),
|
||||
},
|
||||
);
|
||||
} catch (e) {
|
||||
log.error("failed to send message", e);
|
||||
}
|
||||
};
|
||||
|
||||
const init = async () => {
|
||||
const data = generateSecureData(6);
|
||||
setDigits(convertDataToDecimalString(data).split(""));
|
||||
const keypair = await generateKeyPair();
|
||||
setPublicKeyB64(await toB64(keypair.publicKey));
|
||||
setPrivateKeyB64(await toB64(keypair.privateKey));
|
||||
try {
|
||||
const data = generateSecureData(6);
|
||||
setDigits(convertDataToDecimalString(data).split(""));
|
||||
const keypair = await generateKeyPair();
|
||||
setPublicKeyB64(await toB64(keypair.publicKey));
|
||||
setPrivateKeyB64(await toB64(keypair.privateKey));
|
||||
} catch (e) {
|
||||
log.error("failed to generate keypair", e);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
const generateKeyPair = async () => {
|
||||
|
@ -227,21 +252,6 @@ export default function PairingMode() {
|
|||
</a>{" "}
|
||||
for help
|
||||
</p>
|
||||
<div
|
||||
style={{
|
||||
position: "fixed",
|
||||
bottom: "20px",
|
||||
right: "20px",
|
||||
backgroundColor: "white",
|
||||
display: "flex",
|
||||
justifyContent: "center",
|
||||
alignItems: "center",
|
||||
padding: "10px",
|
||||
borderRadius: "10px",
|
||||
}}
|
||||
>
|
||||
<img src="/images/help-qrcode.webp" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import log from "@/next/log";
|
||||
import PairedSuccessfullyOverlay from "components/PairedSuccessfullyOverlay";
|
||||
import { PhotoAuditorium } from "components/PhotoAuditorium";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { useRouter } from "next/router";
|
||||
import { useEffect, useState } from "react";
|
||||
import {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import HTTPService from "@ente/shared/network/HTTPService";
|
||||
import { getCastFileURL } from "@ente/shared/network/api";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { EnteFile } from "types/file";
|
||||
import { generateStreamFromArrayBuffer } from "utils/file";
|
||||
|
||||
|
|
28
web/apps/cast/src/services/detect-type.ts
Normal file
28
web/apps/cast/src/services/detect-type.ts
Normal file
|
@ -0,0 +1,28 @@
|
|||
import { KnownFileTypeInfos } from "@/media/file-type";
|
||||
import { lowercaseExtension } from "@/next/file";
|
||||
import FileType from "file-type";
|
||||
|
||||
/**
|
||||
* Try to deduce the MIME type for the given {@link file}. Return the MIME type
|
||||
* string if successful _and_ if it is an image or a video, otherwise return
|
||||
* `undefined`.
|
||||
*
|
||||
* It first peeks into the file's initial contents to detect the MIME type. If
|
||||
* that doesn't give any results, it tries to deduce it from the file's name.
|
||||
*/
|
||||
export const detectMediaMIMEType = async (file: File): Promise<string> => {
|
||||
const chunkSizeForTypeDetection = 4100;
|
||||
const fileChunk = file.slice(0, chunkSizeForTypeDetection);
|
||||
const chunk = new Uint8Array(await fileChunk.arrayBuffer());
|
||||
const result = await FileType.fromBuffer(chunk);
|
||||
|
||||
const mime = result?.mime;
|
||||
if (mime) {
|
||||
if (mime.startsWith("image/") || mime.startsWith("video/")) return mime;
|
||||
else throw new Error(`Detected MIME type ${mime} is not a media file`);
|
||||
}
|
||||
|
||||
const ext = lowercaseExtension(file.name);
|
||||
if (!ext) return undefined;
|
||||
return KnownFileTypeInfos.find((f) => f.extension == ext)?.mimeType;
|
||||
};
|
|
@ -1,14 +0,0 @@
|
|||
import { convertBytesToHumanReadable } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
|
||||
export async function getUint8ArrayView(file: Blob): Promise<Uint8Array> {
|
||||
try {
|
||||
return new Uint8Array(await file.arrayBuffer());
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to read file blob of size ${convertBytesToHumanReadable(file.size)}`,
|
||||
e,
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
import { nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import {
|
||||
KNOWN_NON_MEDIA_FORMATS,
|
||||
WHITELISTED_FILE_FORMATS,
|
||||
} from "constants/upload";
|
||||
import FileType from "file-type";
|
||||
import { FileTypeInfo } from "types/upload";
|
||||
import { getUint8ArrayView } from "./readerService";
|
||||
|
||||
const TYPE_VIDEO = "video";
|
||||
const TYPE_IMAGE = "image";
|
||||
const CHUNK_SIZE_FOR_TYPE_DETECTION = 4100;
|
||||
|
||||
export async function getFileType(receivedFile: File): Promise<FileTypeInfo> {
|
||||
try {
|
||||
let fileType: FILE_TYPE;
|
||||
|
||||
const typeResult = await extractFileType(receivedFile);
|
||||
const mimTypeParts: string[] = typeResult.mime?.split("/");
|
||||
if (mimTypeParts?.length !== 2) {
|
||||
throw Error(CustomError.INVALID_MIME_TYPE(typeResult.mime));
|
||||
}
|
||||
|
||||
switch (mimTypeParts[0]) {
|
||||
case TYPE_IMAGE:
|
||||
fileType = FILE_TYPE.IMAGE;
|
||||
break;
|
||||
case TYPE_VIDEO:
|
||||
fileType = FILE_TYPE.VIDEO;
|
||||
break;
|
||||
default:
|
||||
throw Error(CustomError.NON_MEDIA_FILE);
|
||||
}
|
||||
return {
|
||||
fileType,
|
||||
exactType: typeResult.ext,
|
||||
mimeType: typeResult.mime,
|
||||
};
|
||||
} catch (e) {
|
||||
const ne = nameAndExtension(receivedFile.name);
|
||||
const fileFormat = ne[1].toLowerCase();
|
||||
const whiteListedFormat = WHITELISTED_FILE_FORMATS.find(
|
||||
(a) => a.exactType === fileFormat,
|
||||
);
|
||||
if (whiteListedFormat) {
|
||||
return whiteListedFormat;
|
||||
}
|
||||
if (KNOWN_NON_MEDIA_FORMATS.includes(fileFormat)) {
|
||||
throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
|
||||
}
|
||||
if (e.message === CustomError.NON_MEDIA_FILE) {
|
||||
log.error(`unsupported file format ${fileFormat}`, e);
|
||||
throw Error(CustomError.UNSUPPORTED_FILE_FORMAT);
|
||||
}
|
||||
log.error(`type detection failed for format ${fileFormat}`, e);
|
||||
throw Error(CustomError.TYPE_DETECTION_FAILED(fileFormat));
|
||||
}
|
||||
}
|
||||
|
||||
async function extractFileType(file: File) {
|
||||
const fileBlobChunk = file.slice(0, CHUNK_SIZE_FOR_TYPE_DETECTION);
|
||||
const fileDataChunk = await getUint8ArrayView(fileBlobChunk);
|
||||
return getFileTypeFromBuffer(fileDataChunk);
|
||||
}
|
||||
|
||||
async function getFileTypeFromBuffer(buffer: Uint8Array) {
|
||||
const result = await FileType.fromBuffer(buffer);
|
||||
if (!result?.mime) {
|
||||
let logableInfo = "";
|
||||
try {
|
||||
logableInfo = `result: ${JSON.stringify(result)}`;
|
||||
} catch (e) {
|
||||
logableInfo = "failed to stringify result";
|
||||
}
|
||||
throw Error(`mimetype missing from file type result - ${logableInfo}`);
|
||||
}
|
||||
return result;
|
||||
}
|
|
@ -1,4 +1,3 @@
|
|||
import { CollectionSummaryType, CollectionType } from "constants/collection";
|
||||
import { EnteFile } from "types/file";
|
||||
import {
|
||||
EncryptedMagicMetadata,
|
||||
|
@ -20,6 +19,13 @@ export interface CollectionUser {
|
|||
role: COLLECTION_ROLE;
|
||||
}
|
||||
|
||||
enum CollectionType {
|
||||
folder = "folder",
|
||||
favorites = "favorites",
|
||||
album = "album",
|
||||
uncategorized = "uncategorized",
|
||||
}
|
||||
|
||||
export interface EncryptedCollection {
|
||||
id: number;
|
||||
owner: CollectionUser;
|
||||
|
@ -32,7 +38,7 @@ export interface EncryptedCollection {
|
|||
type: CollectionType;
|
||||
attributes: collectionAttributes;
|
||||
sharees: CollectionUser[];
|
||||
publicURLs?: PublicURL[];
|
||||
publicURLs?: unknown;
|
||||
updationTime: number;
|
||||
isDeleted: boolean;
|
||||
magicMetadata: EncryptedMagicMetadata;
|
||||
|
@ -61,54 +67,6 @@ export interface Collection
|
|||
// define a method on Collection interface to return the sync key as collection.id-time
|
||||
// this is used to store the last sync time of a collection in local storage
|
||||
|
||||
export interface PublicURL {
|
||||
url: string;
|
||||
deviceLimit: number;
|
||||
validTill: number;
|
||||
enableDownload: boolean;
|
||||
enableCollect: boolean;
|
||||
passwordEnabled: boolean;
|
||||
nonce?: string;
|
||||
opsLimit?: number;
|
||||
memLimit?: number;
|
||||
}
|
||||
|
||||
export interface UpdatePublicURL {
|
||||
collectionID: number;
|
||||
disablePassword?: boolean;
|
||||
enableDownload?: boolean;
|
||||
enableCollect?: boolean;
|
||||
validTill?: number;
|
||||
deviceLimit?: number;
|
||||
passHash?: string;
|
||||
nonce?: string;
|
||||
opsLimit?: number;
|
||||
memLimit?: number;
|
||||
}
|
||||
|
||||
export interface CreatePublicAccessTokenRequest {
|
||||
collectionID: number;
|
||||
validTill?: number;
|
||||
deviceLimit?: number;
|
||||
}
|
||||
|
||||
export interface EncryptedFileKey {
|
||||
id: number;
|
||||
encryptedKey: string;
|
||||
keyDecryptionNonce: string;
|
||||
}
|
||||
|
||||
export interface AddToCollectionRequest {
|
||||
collectionID: number;
|
||||
files: EncryptedFileKey[];
|
||||
}
|
||||
|
||||
export interface MoveToCollectionRequest {
|
||||
fromCollectionID: number;
|
||||
toCollectionID: number;
|
||||
files: EncryptedFileKey[];
|
||||
}
|
||||
|
||||
export interface collectionAttributes {
|
||||
encryptedPath?: string;
|
||||
pathDecryptionNonce?: string;
|
||||
|
@ -116,11 +74,6 @@ export interface collectionAttributes {
|
|||
|
||||
export type CollectionToFileMap = Map<number, EnteFile>;
|
||||
|
||||
export interface RemoveFromCollectionRequest {
|
||||
collectionID: number;
|
||||
fileIDs: number[];
|
||||
}
|
||||
|
||||
export interface CollectionMagicMetadataProps {
|
||||
visibility?: VISIBILITY_STATE;
|
||||
subType?: SUB_TYPE;
|
||||
|
@ -144,16 +97,4 @@ export interface CollectionPublicMagicMetadataProps {
|
|||
export type CollectionPublicMagicMetadata =
|
||||
MagicMetadataCore<CollectionPublicMagicMetadataProps>;
|
||||
|
||||
export interface CollectionSummary {
|
||||
id: number;
|
||||
name: string;
|
||||
type: CollectionSummaryType;
|
||||
coverFile: EnteFile;
|
||||
latestFile: EnteFile;
|
||||
fileCount: number;
|
||||
updationTime: number;
|
||||
order?: number;
|
||||
}
|
||||
|
||||
export type CollectionSummaries = Map<number, CollectionSummary>;
|
||||
export type CollectionFilesCount = Map<number, number>;
|
|
@ -1,9 +1,9 @@
|
|||
import type { Metadata } from "@/media/types/file";
|
||||
import {
|
||||
EncryptedMagicMetadata,
|
||||
MagicMetadataCore,
|
||||
VISIBILITY_STATE,
|
||||
} from "types/magicMetadata";
|
||||
import { Metadata } from "types/upload";
|
||||
|
||||
export interface MetadataFileAttributes {
|
||||
encryptedData: string;
|
||||
|
@ -64,25 +64,6 @@ export interface EnteFile
|
|||
isConverted?: boolean;
|
||||
}
|
||||
|
||||
export interface TrashRequest {
|
||||
items: TrashRequestItems[];
|
||||
}
|
||||
|
||||
export interface TrashRequestItems {
|
||||
fileID: number;
|
||||
collectionID: number;
|
||||
}
|
||||
|
||||
export interface FileWithUpdatedMagicMetadata {
|
||||
file: EnteFile;
|
||||
updatedMagicMetadata: FileMagicMetadata;
|
||||
}
|
||||
|
||||
export interface FileWithUpdatedPublicMagicMetadata {
|
||||
file: EnteFile;
|
||||
updatedPublicMagicMetadata: FilePublicMagicMetadata;
|
||||
}
|
||||
|
||||
export interface FileMagicMetadataProps {
|
||||
visibility?: VISIBILITY_STATE;
|
||||
filePaths?: string[];
|
||||
|
|
|
@ -1,107 +0,0 @@
|
|||
import {
|
||||
B64EncryptionResult,
|
||||
LocalFileAttributes,
|
||||
} from "@ente/shared/crypto/types";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import {
|
||||
FilePublicMagicMetadata,
|
||||
FilePublicMagicMetadataProps,
|
||||
MetadataFileAttributes,
|
||||
S3FileAttributes,
|
||||
} from "types/file";
|
||||
import { EncryptedMagicMetadata } from "types/magicMetadata";
|
||||
|
||||
export interface DataStream {
|
||||
stream: ReadableStream<Uint8Array>;
|
||||
chunkCount: number;
|
||||
}
|
||||
|
||||
export function isDataStream(object: any): object is DataStream {
|
||||
return "stream" in object;
|
||||
}
|
||||
|
||||
export type Logger = (message: string) => void;
|
||||
|
||||
export interface Metadata {
|
||||
title: string;
|
||||
creationTime: number;
|
||||
modificationTime: number;
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
fileType: FILE_TYPE;
|
||||
hasStaticThumbnail?: boolean;
|
||||
hash?: string;
|
||||
imageHash?: string;
|
||||
videoHash?: string;
|
||||
localID?: number;
|
||||
version?: number;
|
||||
deviceFolder?: string;
|
||||
}
|
||||
|
||||
export interface FileTypeInfo {
|
||||
fileType: FILE_TYPE;
|
||||
exactType: string;
|
||||
mimeType?: string;
|
||||
imageType?: string;
|
||||
videoType?: string;
|
||||
}
|
||||
|
||||
export interface UploadURL {
|
||||
url: string;
|
||||
objectKey: string;
|
||||
}
|
||||
|
||||
export interface FileInMemory {
|
||||
filedata: Uint8Array | DataStream;
|
||||
thumbnail: Uint8Array;
|
||||
hasStaticThumbnail: boolean;
|
||||
}
|
||||
|
||||
export interface FileWithMetadata
|
||||
extends Omit<FileInMemory, "hasStaticThumbnail"> {
|
||||
metadata: Metadata;
|
||||
localID: number;
|
||||
pubMagicMetadata: FilePublicMagicMetadata;
|
||||
}
|
||||
|
||||
export interface EncryptedFile {
|
||||
file: ProcessedFile;
|
||||
fileKey: B64EncryptionResult;
|
||||
}
|
||||
export interface ProcessedFile {
|
||||
file: LocalFileAttributes<Uint8Array | DataStream>;
|
||||
thumbnail: LocalFileAttributes<Uint8Array>;
|
||||
metadata: LocalFileAttributes<string>;
|
||||
pubMagicMetadata: EncryptedMagicMetadata;
|
||||
localID: number;
|
||||
}
|
||||
export interface BackupedFile {
|
||||
file: S3FileAttributes;
|
||||
thumbnail: S3FileAttributes;
|
||||
metadata: MetadataFileAttributes;
|
||||
pubMagicMetadata: EncryptedMagicMetadata;
|
||||
}
|
||||
|
||||
export interface UploadFile extends BackupedFile {
|
||||
collectionID: number;
|
||||
encryptedKey: string;
|
||||
keyDecryptionNonce: string;
|
||||
}
|
||||
|
||||
export interface ParsedExtractedMetadata {
|
||||
location: Location;
|
||||
creationTime: number;
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
export interface PublicUploadProps {
|
||||
token: string;
|
||||
passwordToken: string;
|
||||
accessedThroughSharedURL: boolean;
|
||||
}
|
||||
|
||||
export interface ExtractMetadataResult {
|
||||
metadata: Metadata;
|
||||
publicMagicMetadata: FilePublicMagicMetadataProps;
|
||||
}
|
|
@ -1,9 +1,10 @@
|
|||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import log from "@/next/log";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { FILE_TYPE, RAW_FORMATS } from "constants/file";
|
||||
import { RAW_FORMATS } from "constants/upload";
|
||||
import CastDownloadManager from "services/castDownloadManager";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import { detectMediaMIMEType } from "services/detect-type";
|
||||
import {
|
||||
EncryptedEnteFile,
|
||||
EnteFile,
|
||||
|
@ -103,18 +104,6 @@ export function isRawFileFromFileName(fileName: string) {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* [Note: File name for local EnteFile objects]
|
||||
*
|
||||
* The title property in a file's metadata is the original file's name. The
|
||||
* metadata of a file cannot be edited. So if later on the file's name is
|
||||
* changed, then the edit is stored in the `editedName` property of the public
|
||||
* metadata of the file.
|
||||
*
|
||||
* This function merges these edits onto the file object that we use locally.
|
||||
* Effectively, post this step, the file's metadata.title can be used in lieu of
|
||||
* its filename.
|
||||
*/
|
||||
export function mergeMetadata(files: EnteFile[]): EnteFile[] {
|
||||
return files.map((file) => {
|
||||
if (file.pubMagicMetadata?.data.editedTime) {
|
||||
|
@ -143,10 +132,11 @@ export const getPreviewableImage = async (
|
|||
);
|
||||
fileBlob = new Blob([imageData]);
|
||||
}
|
||||
const fileType = await getFileType(
|
||||
const mimeType = await detectMediaMIMEType(
|
||||
new File([fileBlob], file.metadata.title),
|
||||
);
|
||||
fileBlob = new Blob([fileBlob], { type: fileType.mimeType });
|
||||
if (!mimeType) return undefined;
|
||||
fileBlob = new Blob([fileBlob], { type: mimeType });
|
||||
return fileBlob;
|
||||
} catch (e) {
|
||||
log.error("failed to download file", e);
|
|
@ -21,7 +21,6 @@ const load = (() => {
|
|||
cast,
|
||||
});
|
||||
});
|
||||
|
||||
document.body.appendChild(script);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
"exifr": "^7.1.3",
|
||||
"fast-srp-hap": "^2.0.4",
|
||||
"ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm",
|
||||
"file-type": "^16.5.4",
|
||||
"formik": "^2.1.5",
|
||||
"hdbscan": "0.0.1-alpha.5",
|
||||
"heic-convert": "^2.0.0",
|
||||
|
@ -30,6 +29,7 @@
|
|||
"leaflet-defaulticon-compatibility": "^0.1.1",
|
||||
"localforage": "^1.9.0",
|
||||
"memoize-one": "^6.0.0",
|
||||
"mime-types": "^2.1.35",
|
||||
"ml-matrix": "^6.10.4",
|
||||
"otpauth": "^9.0.2",
|
||||
"p-debounce": "^4.0.0",
|
||||
|
|
|
@ -5,10 +5,9 @@ import {
|
|||
MobileDateTimePicker,
|
||||
} from "@mui/x-date-pickers";
|
||||
import { AdapterDateFns } from "@mui/x-date-pickers/AdapterDateFns";
|
||||
import {
|
||||
MAX_EDITED_CREATION_TIME,
|
||||
MIN_EDITED_CREATION_TIME,
|
||||
} from "constants/file";
|
||||
|
||||
const MIN_EDITED_CREATION_TIME = new Date(1800, 0, 1);
|
||||
const MAX_EDITED_CREATION_TIME = new Date();
|
||||
|
||||
interface Props {
|
||||
initialValue?: Date;
|
||||
|
|
|
@ -13,7 +13,7 @@ import { useFormik } from "formik";
|
|||
import { t } from "i18next";
|
||||
import { GalleryContext } from "pages/gallery";
|
||||
import React, { useContext, useEffect, useState } from "react";
|
||||
import { updateCreationTimeWithExif } from "services/updateCreationTimeWithExif";
|
||||
import { updateCreationTimeWithExif } from "services/fix-exif";
|
||||
import { EnteFile } from "types/file";
|
||||
import EnteDateTimePicker from "./EnteDateTimePicker";
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import log from "@/next/log";
|
||||
import { PHOTOS_PAGES } from "@ente/shared/constants/pages";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
|
@ -5,7 +6,6 @@ import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
|
|||
import { styled } from "@mui/material";
|
||||
import PhotoViewer from "components/PhotoViewer";
|
||||
import { TRASH_SECTION } from "constants/collection";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { useRouter } from "next/router";
|
||||
import { GalleryContext } from "pages/gallery";
|
||||
import PhotoSwipe from "photoswipe";
|
||||
|
|
|
@ -3,7 +3,6 @@ import { FlexWrapper } from "@ente/shared/components/Container";
|
|||
import Close from "@mui/icons-material/Close";
|
||||
import Done from "@mui/icons-material/Done";
|
||||
import { Box, IconButton, TextField } from "@mui/material";
|
||||
import { MAX_CAPTION_SIZE } from "constants/file";
|
||||
import { Formik } from "formik";
|
||||
import { t } from "i18next";
|
||||
import { useState } from "react";
|
||||
|
@ -12,6 +11,8 @@ import { changeCaption, updateExistingFilePubMetadata } from "utils/file";
|
|||
import * as Yup from "yup";
|
||||
import { SmallLoadingSpinner } from "../styledComponents/SmallLoadingSpinner";
|
||||
|
||||
export const MAX_CAPTION_SIZE = 5000;
|
||||
|
||||
interface formValues {
|
||||
caption: string;
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import { nameAndExtension } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { FlexWrapper } from "@ente/shared/components/Container";
|
||||
import PhotoOutlined from "@mui/icons-material/PhotoOutlined";
|
||||
import VideocamOutlined from "@mui/icons-material/VideocamOutlined";
|
||||
import Box from "@mui/material/Box";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import { useEffect, useState } from "react";
|
||||
import { EnteFile } from "types/file";
|
||||
import { makeHumanReadableStorage } from "utils/billing";
|
||||
|
|
|
@ -17,7 +17,7 @@ import { t } from "i18next";
|
|||
import { AppContext } from "pages/_app";
|
||||
import { GalleryContext } from "pages/gallery";
|
||||
import { useContext, useEffect, useMemo, useState } from "react";
|
||||
import { getEXIFLocation } from "services/upload/exifService";
|
||||
import { getEXIFLocation } from "services/exif";
|
||||
import { EnteFile } from "types/file";
|
||||
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
|
||||
import {
|
||||
|
|
|
@ -42,11 +42,10 @@ import { t } from "i18next";
|
|||
import mime from "mime-types";
|
||||
import { AppContext } from "pages/_app";
|
||||
import { getLocalCollections } from "services/collectionService";
|
||||
import { detectFileTypeInfo } from "services/detect-type";
|
||||
import downloadManager from "services/download";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import uploadManager from "services/upload/uploadManager";
|
||||
import { EnteFile } from "types/file";
|
||||
import { FileWithCollection } from "types/upload";
|
||||
import { getEditorCloseConfirmationMessage } from "utils/ui";
|
||||
import ColoursMenu from "./ColoursMenu";
|
||||
import CropMenu, { cropRegionOfCanvas, getCropRegionArgs } from "./CropMenu";
|
||||
|
@ -486,7 +485,7 @@ const ImageEditorOverlay = (props: IProps) => {
|
|||
if (!canvasRef.current) return;
|
||||
|
||||
const editedFile = await getEditedFile();
|
||||
const fileType = await getFileType(editedFile);
|
||||
const fileType = await detectFileTypeInfo(editedFile);
|
||||
const tempImgURL = URL.createObjectURL(
|
||||
new Blob([editedFile], { type: fileType.mimeType }),
|
||||
);
|
||||
|
@ -507,15 +506,15 @@ const ImageEditorOverlay = (props: IProps) => {
|
|||
);
|
||||
|
||||
const editedFile = await getEditedFile();
|
||||
const file: FileWithCollection = {
|
||||
file: editedFile,
|
||||
collectionID: props.file.collectionID,
|
||||
const file = {
|
||||
fileOrPath: editedFile,
|
||||
localID: 1,
|
||||
collectionID: props.file.collectionID,
|
||||
};
|
||||
|
||||
uploadManager.prepareForNewUpload();
|
||||
uploadManager.showUploadProgressDialog();
|
||||
uploadManager.queueFilesForUpload([file], [collection]);
|
||||
uploadManager.uploadFiles([file], [collection]);
|
||||
setFileURL(null);
|
||||
props.onClose();
|
||||
props.closePhotoViewer();
|
||||
|
|
|
@ -10,12 +10,13 @@ import { EnteFile } from "types/file";
|
|||
import {
|
||||
copyFileToClipboard,
|
||||
downloadSingleFile,
|
||||
getFileExtension,
|
||||
getFileFromURL,
|
||||
isRawFile,
|
||||
isSupportedRawFormat,
|
||||
} from "utils/file";
|
||||
|
||||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import { lowercaseExtension } from "@/next/file";
|
||||
import { FlexWrapper } from "@ente/shared/components/Container";
|
||||
import EnteSpinner from "@ente/shared/components/EnteSpinner";
|
||||
import AlbumOutlined from "@mui/icons-material/AlbumOutlined";
|
||||
|
@ -34,7 +35,6 @@ import InfoIcon from "@mui/icons-material/InfoOutlined";
|
|||
import ReplayIcon from "@mui/icons-material/Replay";
|
||||
import ZoomInOutlinedIcon from "@mui/icons-material/ZoomInOutlined";
|
||||
import { Box, Button, styled } from "@mui/material";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import {
|
||||
defaultLivePhotoDefaultOptions,
|
||||
photoSwipeV4Events,
|
||||
|
@ -43,10 +43,10 @@ import { t } from "i18next";
|
|||
import isElectron from "is-electron";
|
||||
import { AppContext } from "pages/_app";
|
||||
import { GalleryContext } from "pages/gallery";
|
||||
import { detectFileTypeInfo } from "services/detect-type";
|
||||
import downloadManager, { LoadedLivePhotoSourceURL } from "services/download";
|
||||
import { getParsedExifData } from "services/exif";
|
||||
import { trashFiles } from "services/fileService";
|
||||
import { getFileType } from "services/typeDetectionService";
|
||||
import { getParsedExifData } from "services/upload/exifService";
|
||||
import { SetFilesDownloadProgressAttributesCreator } from "types/gallery";
|
||||
import { isClipboardItemPresent } from "utils/common";
|
||||
import { pauseVideo, playVideo } from "utils/photoFrame";
|
||||
|
@ -348,7 +348,7 @@ function PhotoViewer(props: Iprops) {
|
|||
}
|
||||
|
||||
function updateShowEditButton(file: EnteFile) {
|
||||
const extension = getFileExtension(file.metadata.title);
|
||||
const extension = lowercaseExtension(file.metadata.title);
|
||||
const isSupported =
|
||||
!isRawFile(extension) || isSupportedRawFormat(extension);
|
||||
setShowEditButton(
|
||||
|
@ -594,7 +594,7 @@ function PhotoViewer(props: Iprops) {
|
|||
.image;
|
||||
fileObject = await getFileFromURL(url, file.metadata.title);
|
||||
}
|
||||
const fileTypeInfo = await getFileType(fileObject);
|
||||
const fileTypeInfo = await detectFileTypeInfo(fileObject);
|
||||
const exifData = await getParsedExifData(
|
||||
fileObject,
|
||||
fileTypeInfo,
|
||||
|
@ -611,9 +611,8 @@ function PhotoViewer(props: Iprops) {
|
|||
}
|
||||
} catch (e) {
|
||||
setExif({ key: file.src, value: null });
|
||||
const fileExtension = getFileExtension(file.metadata.title);
|
||||
log.error(
|
||||
`checkExifAvailable failed for extension ${fileExtension}`,
|
||||
`checkExifAvailable failed for file ${file.metadata.title}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import { Overlay } from "@ente/shared/components/Container";
|
||||
import PhotoOutlined from "@mui/icons-material/PhotoOutlined";
|
||||
import PlayCircleOutlineOutlined from "@mui/icons-material/PlayCircleOutlineOutlined";
|
||||
import { styled } from "@mui/material";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
|
||||
interface Iprops {
|
||||
fileType: FILE_TYPE;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue