Browse Source

[web] Upload refactoring - Part x/x (happy paths working checkpoint) (#1547)

Checkpoint
- Both upload and export work now (in both the desktop and web apps)
- Uploads after individual selection and takeout uploads still need to
be fixed when in desktop
Manav Rathi 1 year ago
parent
commit
c61d58f6ea
34 changed files with 600 additions and 705 deletions
  1. 0 5
      desktop/.eslintrc.js
  2. 71 15
      desktop/src/main.ts
  3. 19 1
      desktop/src/main/dialogs.ts
  4. 1 43
      desktop/src/main/init.ts
  5. 0 3
      desktop/src/main/ipc.ts
  6. 0 23
      desktop/src/main/services/fs.ts
  7. 1 1
      desktop/src/main/services/ml-clip.ts
  8. 12 8
      desktop/src/main/stream.ts
  9. 1 3
      desktop/src/main/utils-electron.ts
  10. 0 4
      desktop/src/preload.ts
  11. 5 23
      web/apps/photos/src/components/Sidebar/DebugSection.tsx
  12. 113 9
      web/apps/photos/src/components/Upload/Uploader.tsx
  13. 10 2
      web/apps/photos/src/components/WatchFolder.tsx
  14. 0 45
      web/apps/photos/src/constants/upload.ts
  15. 1 1
      web/apps/photos/src/services/deduplicationService.ts
  16. 2 3
      web/apps/photos/src/services/detect-type.ts
  17. 0 42
      web/apps/photos/src/services/readerService.ts
  18. 1 1
      web/apps/photos/src/services/upload/publicUploadHttpClient.ts
  19. 30 2
      web/apps/photos/src/services/upload/thumbnail.ts
  20. 29 1
      web/apps/photos/src/services/upload/uploadHttpClient.ts
  21. 253 165
      web/apps/photos/src/services/upload/uploadService.ts
  22. 8 2
      web/apps/photos/src/services/watch.ts
  23. 4 3
      web/apps/photos/src/utils/native-stream.ts
  24. 0 128
      web/apps/photos/src/utils/upload/index.ts
  25. 0 29
      web/apps/photos/src/utils/upload/uploadRetrier.ts
  26. 7 3
      web/apps/photos/src/worker/ffmpeg.worker.ts
  27. 0 111
      web/apps/photos/tests/zip-file-reading.test.ts
  28. 4 0
      web/packages/media/file.ts
  29. 13 5
      web/packages/media/live-photo.ts
  30. 1 1
      web/packages/next/log.ts
  31. 0 1
      web/packages/next/types/ipc.ts
  32. 0 14
      web/packages/shared/crypto/types.ts
  33. 14 0
      web/packages/shared/hooks/useFileInput.tsx
  34. 0 8
      web/packages/shared/utils/data-stream.ts

+ 0 - 5
desktop/.eslintrc.js

@@ -7,11 +7,6 @@ module.exports = {
         // "plugin:@typescript-eslint/strict-type-checked",
         // "plugin:@typescript-eslint/stylistic-type-checked",
     ],
-    /* Temporarily add a global
-       Enhancement: Remove me */
-    globals: {
-        NodeJS: "readonly",
-    },
     plugins: ["@typescript-eslint"],
     parser: "@typescript-eslint/parser",
     parserOptions: {

+ 71 - 15
desktop/src/main.ts

@@ -8,18 +8,15 @@
  *
  * https://www.electronjs.org/docs/latest/tutorial/process-model#the-main-process
  */
-import { nativeImage } from "electron";
-import { app, BrowserWindow, Menu, protocol, Tray } from "electron/main";
+
+import { nativeImage, shell } from "electron/common";
+import type { WebContents } from "electron/main";
+import { BrowserWindow, Menu, Tray, app, protocol } from "electron/main";
 import serveNextAt from "next-electron-server";
 import { existsSync } from "node:fs";
 import fs from "node:fs/promises";
 import os from "node:os";
 import path from "node:path";
-import {
-    addAllowOriginHeader,
-    handleDownloads,
-    handleExternalLinks,
-} from "./main/init";
 import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc";
 import log, { initLogging } from "./main/log";
 import { createApplicationMenu, createTrayContextMenu } from "./main/menu";
@@ -34,7 +31,7 @@ import { isDev } from "./main/utils-electron";
 /**
  * The URL where the renderer HTML is being served from.
  */
-export const rendererURL = "ente://app";
+const rendererURL = "ente://app";
 
 /**
  * We want to hide our window instead of closing it when the user presses the
@@ -209,7 +206,7 @@ const createMainWindow = async () => {
     //  webContents is not responding to input messages for > 30 seconds."
     window.webContents.on("unresponsive", () => {
         log.error(
-            "Main window's webContents are unresponsive, will restart the renderer process",
+            "MainWindow's webContents are unresponsive, will restart the renderer process",
         );
         window.webContents.forcefullyCrashRenderer();
     });
@@ -240,6 +237,58 @@ const createMainWindow = async () => {
     return window;
 };
 
+/**
+ * Automatically set the save path for user initiated downloads to the system's
+ * "downloads" directory instead of asking the user to select a save location.
+ */
+export const setDownloadPath = (webContents: WebContents) => {
+    webContents.session.on("will-download", (_, item) => {
+        item.setSavePath(
+            uniqueSavePath(app.getPath("downloads"), item.getFilename()),
+        );
+    });
+};
+
+const uniqueSavePath = (dirPath: string, fileName: string) => {
+    const { name, ext } = path.parse(fileName);
+
+    let savePath = path.join(dirPath, fileName);
+    let n = 1;
+    while (existsSync(savePath)) {
+        const suffixedName = [`${name}(${n})`, ext].filter((x) => x).join(".");
+        savePath = path.join(dirPath, suffixedName);
+        n++;
+    }
+    return savePath;
+};
+
+/**
+ * Allow opening external links, e.g. when the user clicks on the "Feature
+ * requests" button in the sidebar (to open our GitHub repository), or when they
+ * click the "Support" button to send an email to support.
+ *
+ * @param webContents The renderer to configure.
+ */
+export const allowExternalLinks = (webContents: WebContents) => {
+    // By default, if the user were open a link, say
+    // https://github.com/ente-io/ente/discussions, then it would open a _new_
+    // BrowserWindow within our app.
+    //
+    // This is not the behaviour we want; what we want is to ask the system to
+    // handle the link (e.g. open the URL in the default browser, or if it is a
+    // mailto: link, then open the user's mail client).
+    //
+    // Returning `action` "deny" accomplishes this.
+    webContents.setWindowOpenHandler(({ url }) => {
+        if (!url.startsWith(rendererURL)) {
+            shell.openExternal(url);
+            return { action: "deny" };
+        } else {
+            return { action: "allow" };
+        }
+    });
+};
+
 /**
  * Add an icon for our app in the system tray.
  *
@@ -342,19 +391,26 @@ const main = () => {
     //
     // Note that some Electron APIs can only be used after this event occurs.
     app.on("ready", async () => {
-        // Create window and prepare for renderer
+        // Create window and prepare for the renderer.
         mainWindow = await createMainWindow();
         attachIPCHandlers();
         attachFSWatchIPCHandlers(createWatcher(mainWindow));
         registerStreamProtocol();
-        handleDownloads(mainWindow);
-        handleExternalLinks(mainWindow);
-        addAllowOriginHeader(mainWindow);
 
-        // Start loading the renderer
+        // Configure the renderer's environment.
+        setDownloadPath(mainWindow.webContents);
+        allowExternalLinks(mainWindow.webContents);
+
+        // TODO(MR): Remove or resurrect
+        // The commit that introduced this header override had the message
+        // "fix cors issue for uploads". Not sure what that means, so disabling
+        // it for now to see why exactly this is required.
+        // addAllowOriginHeader(mainWindow);
+
+        // Start loading the renderer.
         mainWindow.loadURL(rendererURL);
 
-        // Continue on with the rest of the startup sequence
+        // Continue on with the rest of the startup sequence.
         Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
         setupTrayItem(mainWindow);
         if (!isDev) setupAutoUpdater(mainWindow);

+ 19 - 1
desktop/src/main/dialogs.ts

@@ -1,7 +1,8 @@
 import { dialog } from "electron/main";
+import fs from "node:fs/promises";
 import path from "node:path";
 import type { ElectronFile } from "../types/ipc";
-import { getDirFilePaths, getElectronFile } from "./services/fs";
+import { getElectronFile } from "./services/fs";
 import { getElectronFilesFromGoogleZip } from "./services/upload";
 
 export const selectDirectory = async () => {
@@ -34,6 +35,23 @@ export const showUploadDirsDialog = async () => {
     return await Promise.all(filePaths.map(getElectronFile));
 };
 
+// https://stackoverflow.com/a/63111390
+const getDirFilePaths = async (dirPath: string) => {
+    if (!(await fs.stat(dirPath)).isDirectory()) {
+        return [dirPath];
+    }
+
+    let files: string[] = [];
+    const filePaths = await fs.readdir(dirPath);
+
+    for (const filePath of filePaths) {
+        const absolute = path.join(dirPath, filePath);
+        files = [...files, ...(await getDirFilePaths(absolute))];
+    }
+
+    return files;
+};
+
 export const showUploadZipDialog = async () => {
     const selectedFiles = await dialog.showOpenDialog({
         properties: ["openFile", "multiSelections"],

+ 1 - 43
desktop/src/main/init.ts

@@ -1,46 +1,4 @@
-import { BrowserWindow, app, shell } from "electron";
-import { existsSync } from "node:fs";
-import path from "node:path";
-import { rendererURL } from "../main";
-
-export function handleDownloads(mainWindow: BrowserWindow) {
-    mainWindow.webContents.session.on("will-download", (_, item) => {
-        item.setSavePath(
-            getUniqueSavePath(item.getFilename(), app.getPath("downloads")),
-        );
-    });
-}
-
-function getUniqueSavePath(filename: string, directory: string): string {
-    let uniqueFileSavePath = path.join(directory, filename);
-    const { name: filenameWithoutExtension, ext: extension } =
-        path.parse(filename);
-    let n = 0;
-    while (existsSync(uniqueFileSavePath)) {
-        n++;
-        // filter need to remove undefined extension from the array
-        // else [`${fileName}`, undefined].join(".") will lead to `${fileName}.` as joined string
-        const fileNameWithNumberedSuffix = [
-            `${filenameWithoutExtension}(${n})`,
-            extension,
-        ]
-            .filter((x) => x) // filters out undefined/null values
-            .join("");
-        uniqueFileSavePath = path.join(directory, fileNameWithNumberedSuffix);
-    }
-    return uniqueFileSavePath;
-}
-
-export function handleExternalLinks(mainWindow: BrowserWindow) {
-    mainWindow.webContents.setWindowOpenHandler(({ url }) => {
-        if (!url.startsWith(rendererURL)) {
-            shell.openExternal(url);
-            return { action: "deny" };
-        } else {
-            return { action: "allow" };
-        }
-    });
-}
+import { BrowserWindow } from "electron";
 
 export function addAllowOriginHeader(mainWindow: BrowserWindow) {
     mainWindow.webContents.session.webRequest.onHeadersReceived(

+ 0 - 3
desktop/src/main/ipc.ts

@@ -40,7 +40,6 @@ import {
     updateOnNextRestart,
 } from "./services/app-update";
 import { ffmpegExec } from "./services/ffmpeg";
-import { getDirFiles } from "./services/fs";
 import { convertToJPEG, generateImageThumbnail } from "./services/image";
 import {
     clipImageEmbedding,
@@ -216,8 +215,6 @@ export const attachIPCHandlers = () => {
     ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) =>
         getElectronFilesFromGoogleZip(filePath),
     );
-
-    ipcMain.handle("getDirFiles", (_, dirPath: string) => getDirFiles(dirPath));
 };
 
 /**

+ 0 - 23
desktop/src/main/services/fs.ts

@@ -7,29 +7,6 @@ import log from "../log";
 
 const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
 
-export async function getDirFiles(dirPath: string) {
-    const files = await getDirFilePaths(dirPath);
-    const electronFiles = await Promise.all(files.map(getElectronFile));
-    return electronFiles;
-}
-
-// https://stackoverflow.com/a/63111390
-export const getDirFilePaths = async (dirPath: string) => {
-    if (!(await fs.stat(dirPath)).isDirectory()) {
-        return [dirPath];
-    }
-
-    let files: string[] = [];
-    const filePaths = await fs.readdir(dirPath);
-
-    for (const filePath of filePaths) {
-        const absolute = path.join(dirPath, filePath);
-        files = [...files, ...(await getDirFilePaths(absolute))];
-    }
-
-    return files;
-};
-
 const getFileStream = async (filePath: string) => {
     const file = await fs.open(filePath, "r");
     let offset = 0;

+ 1 - 1
desktop/src/main/services/ml-clip.ts

@@ -150,7 +150,7 @@ export const clipTextEmbeddingIfAvailable = async (text: string) => {
 
     // Don't wait for the download to complete
     if (typeof sessionOrStatus == "string") {
-        console.log(
+        log.info(
             "Ignoring CLIP text embedding request because model download is pending",
         );
         return undefined;

+ 12 - 8
desktop/src/main/stream.ts

@@ -57,10 +57,17 @@ const handleRead = async (path: string) => {
     try {
         const res = await net.fetch(pathToFileURL(path).toString());
         if (res.ok) {
-            // net.fetch defaults to text/plain, which might be fine
-            // in practice, but as an extra precaution indicate that
-            // this is binary data.
-            res.headers.set("Content-Type", "application/octet-stream");
+            // net.fetch already seems to add "Content-Type" and "Last-Modified"
+            // headers, but I couldn't find documentation for this. In any case,
+            // since we already are stat-ting the file for the "Content-Length",
+            // we explicitly add the "X-Last-Modified-Ms" too,
+            //
+            // 1. Guaranteeing its presence,
+            //
+            // 2. Having it be in the exact format we want (no string <-> date
+            //    conversions),
+            //
+            // 3. Retaining milliseconds.
 
             const stat = await fs.stat(path);
 
@@ -133,10 +140,7 @@ const convertWebReadableStreamToNode = (readableStream: ReadableStream) => {
     return rs;
 };
 
-const writeNodeStream = async (
-    filePath: string,
-    fileStream: NodeJS.ReadableStream,
-) => {
+const writeNodeStream = async (filePath: string, fileStream: Readable) => {
     const writeable = createWriteStream(filePath);
 
     fileStream.on("error", (error) => {

+ 1 - 3
desktop/src/main/utils-electron.ts

@@ -33,11 +33,9 @@ export const execAsync = (command: string | string[]) => {
         ? shellescape(command)
         : command;
     const startTime = Date.now();
-    log.debug(() => `Running shell command: ${escapedCommand}`);
     const result = execAsync_(escapedCommand);
     log.debug(
-        () =>
-            `Completed in ${Math.round(Date.now() - startTime)} ms (${escapedCommand})`,
+        () => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`,
     );
     return result;
 };

+ 0 - 4
desktop/src/preload.ts

@@ -261,9 +261,6 @@ const getElectronFilesFromGoogleZip = (
 ): Promise<ElectronFile[]> =>
     ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath);
 
-const getDirFiles = (dirPath: string): Promise<ElectronFile[]> =>
-    ipcRenderer.invoke("getDirFiles", dirPath);
-
 /**
  * These objects exposed here will become available to the JS code in our
  * renderer (the web/ code) as `window.ElectronAPIs.*`
@@ -380,5 +377,4 @@ contextBridge.exposeInMainWorld("electron", {
     // -
 
     getElectronFilesFromGoogleZip,
-    getDirFiles,
 });

+ 5 - 23
web/apps/photos/src/components/Sidebar/DebugSection.tsx

@@ -9,10 +9,6 @@ import { useContext, useEffect, useState } from "react";
 import { Trans } from "react-i18next";
 import { isInternalUser } from "utils/user";
 import { testUpload } from "../../../tests/upload.test";
-import {
-    testZipFileReading,
-    testZipWithRootFileReadingTest,
-} from "../../../tests/zip-file-reading.test";
 
 export default function DebugSection() {
     const appContext = useContext(AppContext);
@@ -62,25 +58,11 @@ export default function DebugSection() {
                 </Typography>
             )}
             {isInternalUser() && (
-                <>
-                    <EnteMenuItem
-                        variant="secondary"
-                        onClick={testUpload}
-                        label={"Test Upload"}
-                    />
-
-                    <EnteMenuItem
-                        variant="secondary"
-                        onClick={testZipFileReading}
-                        label="Test Zip file reading"
-                    />
-
-                    <EnteMenuItem
-                        variant="secondary"
-                        onClick={testZipWithRootFileReadingTest}
-                        label="Zip with Root file Test"
-                    />
-                </>
+                <EnteMenuItem
+                    variant="secondary"
+                    onClick={testUpload}
+                    label={"Test Upload"}
+                />
             )}
         </>
     );

+ 113 - 9
web/apps/photos/src/components/Upload/Uploader.tsx

@@ -5,7 +5,7 @@ import { CustomError } from "@ente/shared/error";
 import { isPromise } from "@ente/shared/utils";
 import DiscFullIcon from "@mui/icons-material/DiscFull";
 import UserNameInputDialog from "components/UserNameInputDialog";
-import { PICKED_UPLOAD_TYPE, UPLOAD_STAGES } from "constants/upload";
+import { UPLOAD_STAGES } from "constants/upload";
 import { t } from "i18next";
 import isElectron from "is-electron";
 import { AppContext } from "pages/_app";
@@ -13,6 +13,7 @@ import { GalleryContext } from "pages/gallery";
 import { useContext, useEffect, useRef, useState } from "react";
 import billingService from "services/billingService";
 import { getLatestCollections } from "services/collectionService";
+import { exportMetadataDirectoryName } from "services/export";
 import {
     getPublicCollectionUID,
     getPublicCollectionUploaderName,
@@ -28,6 +29,7 @@ import type {
 import uploadManager, {
     setToUploadCollection,
 } from "services/upload/uploadManager";
+import { fopFileName } from "services/upload/uploadService";
 import watcher from "services/watch";
 import { NotificationAttributes } from "types/Notification";
 import { Collection } from "types/collection";
@@ -45,13 +47,6 @@ import {
     getDownloadAppMessage,
     getRootLevelFileWithFolderNotAllowMessage,
 } from "utils/ui";
-import {
-    DEFAULT_IMPORT_SUGGESTION,
-    getImportSuggestion,
-    groupFilesBasedOnParentFolder,
-    pruneHiddenFiles,
-    type ImportSuggestion,
-} from "utils/upload";
 import { SetCollectionNamerAttributes } from "../Collections/CollectionNamer";
 import { CollectionMappingChoiceModal } from "./CollectionMappingChoiceModal";
 import UploadProgress from "./UploadProgress";
@@ -59,6 +54,12 @@ import UploadTypeSelector from "./UploadTypeSelector";
 
 const FIRST_ALBUM_NAME = "My First Album";
 
+enum PICKED_UPLOAD_TYPE {
+    FILES = "files",
+    FOLDERS = "folders",
+    ZIPS = "zips",
+}
+
 interface Props {
     syncWithRemote: (force?: boolean, silent?: boolean) => Promise<void>;
     closeCollectionSelector?: () => void;
@@ -362,9 +363,12 @@ export default function Uploader(props: Props) {
             } else if (desktopFilePaths && desktopFilePaths.length > 0) {
                 // File selection from our desktop app
                 fileOrPathsToUpload.current = desktopFilePaths;
-                setDesktopFilePaths(undefined);
+                setDesktopFilePaths([]);
             }
 
+            log.debug(() => "Uploader received:");
+            log.debug(() => fileOrPathsToUpload.current);
+
             fileOrPathsToUpload.current = pruneHiddenFiles(
                 fileOrPathsToUpload.current,
             );
@@ -876,3 +880,103 @@ async function waitAndRun(
     }
     await task();
 }
+
+// This is used to prompt the user the make upload strategy choice
+interface ImportSuggestion {
+    rootFolderName: string;
+    hasNestedFolders: boolean;
+    hasRootLevelFileWithFolder: boolean;
+}
+
+const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = {
+    rootFolderName: "",
+    hasNestedFolders: false,
+    hasRootLevelFileWithFolder: false,
+};
+
+function getImportSuggestion(
+    uploadType: PICKED_UPLOAD_TYPE,
+    paths: string[],
+): ImportSuggestion {
+    if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) {
+        return DEFAULT_IMPORT_SUGGESTION;
+    }
+
+    const getCharCount = (str: string) => (str.match(/\//g) ?? []).length;
+    paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2));
+    const firstPath = paths[0];
+    const lastPath = paths[paths.length - 1];
+
+    const L = firstPath.length;
+    let i = 0;
+    const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/"));
+    const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/"));
+
+    while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++;
+    let commonPathPrefix = firstPath.substring(0, i);
+
+    if (commonPathPrefix) {
+        commonPathPrefix = commonPathPrefix.substring(
+            0,
+            commonPathPrefix.lastIndexOf("/"),
+        );
+        if (commonPathPrefix) {
+            commonPathPrefix = commonPathPrefix.substring(
+                commonPathPrefix.lastIndexOf("/") + 1,
+            );
+        }
+    }
+    return {
+        rootFolderName: commonPathPrefix || null,
+        hasNestedFolders: firstFileFolder !== lastFileFolder,
+        hasRootLevelFileWithFolder: firstFileFolder === "",
+    };
+}
+
+// This function groups files that are that have the same parent folder into collections
+// For Example, for user files have a directory structure like this
+//              a
+//            / |  \
+//           b  j   c
+//          /|\    /  \
+//         e f g   h  i
+//
+// The files will grouped into 3 collections.
+// [a => [j],
+// b => [e,f,g],
+// c => [h, i]]
+const groupFilesBasedOnParentFolder = (fileOrPaths: (File | string)[]) => {
+    const result = new Map<string, (File | string)[]>();
+    for (const fileOrPath of fileOrPaths) {
+        const filePath =
+            /* TODO(MR): ElectronFile */
+            typeof fileOrPath == "string"
+                ? fileOrPath
+                : (fileOrPath["path"] as string);
+
+        let folderPath = filePath.substring(0, filePath.lastIndexOf("/"));
+        // If the parent folder of a file is "metadata"
+        // we consider it to be part of the parent folder
+        // For Eg,For FileList  -> [a/x.png, a/metadata/x.png.json]
+        // they will both we grouped into the collection "a"
+        // This is cluster the metadata json files in the same collection as the file it is for
+        if (folderPath.endsWith(exportMetadataDirectoryName)) {
+            folderPath = folderPath.substring(0, folderPath.lastIndexOf("/"));
+        }
+        const folderName = folderPath.substring(
+            folderPath.lastIndexOf("/") + 1,
+        );
+        if (!folderName) throw Error("Unexpected empty folder name");
+        if (!result.has(folderName)) result.set(folderName, []);
+        result.get(folderName).push(fileOrPath);
+    }
+    return result;
+};
+
+/**
+ * Filter out hidden files from amongst {@link fileOrPaths}.
+ *
+ * Hidden files are those whose names begin with a "." (dot).
+ */
+const pruneHiddenFiles = (fileOrPaths: (File | string)[]) =>
+    fileOrPaths.filter((f) => !fopFileName(f).startsWith("."));

+ 10 - 2
web/apps/photos/src/components/WatchFolder.tsx

@@ -1,5 +1,5 @@
 import { ensureElectron } from "@/next/electron";
-import { basename } from "@/next/file";
+import { basename, dirname } from "@/next/file";
 import type { CollectionMapping, FolderWatch } from "@/next/types/ipc";
 import { ensure } from "@/utils/ensure";
 import {
@@ -32,7 +32,6 @@ import { t } from "i18next";
 import { AppContext } from "pages/_app";
 import React, { useContext, useEffect, useState } from "react";
 import watcher from "services/watch";
-import { areAllInSameDirectory } from "utils/upload";
 
 interface WatchFolderProps {
     open: boolean;
@@ -324,3 +323,12 @@ const EntryOptions: React.FC<EntryOptionsProps> = ({ confirmStopWatching }) => {
         </OverflowMenu>
     );
 };
+
+/**
+ * Return true if all the paths in the given list are items that belong to the
+ * same (arbitrary) directory.
+ *
+ * Empty list of paths is considered to be in the same directory.
+ */
+const areAllInSameDirectory = (paths: string[]) =>
+    new Set(paths.map(dirname)).size == 1;

+ 0 - 45
web/apps/photos/src/constants/upload.ts

@@ -1,15 +1,5 @@
-import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
 import { Location } from "types/metadata";
 
-// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
-export const MULTIPART_PART_SIZE = 20 * 1024 * 1024;
-
-export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE;
-
-export const FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor(
-    MULTIPART_PART_SIZE / FILE_READER_CHUNK_SIZE,
-);
-
 export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random();
 
 export const NULL_LOCATION: Location = { latitude: null, longitude: null };
@@ -34,38 +24,3 @@ export enum UPLOAD_RESULT {
     UPLOADED_WITH_STATIC_THUMBNAIL,
     ADDED_SYMLINK,
 }
-
-export enum PICKED_UPLOAD_TYPE {
-    FILES = "files",
-    FOLDERS = "folders",
-    ZIPS = "zips",
-}
-
-export const BLACK_THUMBNAIL_BASE64 =
-    "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" +
-    "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" +
-    "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" +
-    "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" +
-    "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" +
-    "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" +
-    "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" +
-    "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" +
-    "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" +
-    "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" +
-    "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
-    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
-    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" +
-    "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
-    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
-    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
-    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
-    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
-    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
-    "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
-    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
-    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
-    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" +
-    "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
-    "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
-    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
-    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k=";

+ 1 - 1
web/apps/photos/src/services/deduplicationService.ts

@@ -1,3 +1,4 @@
+import { hasFileHash } from "@/media/file";
 import { FILE_TYPE } from "@/media/file-type";
 import type { Metadata } from "@/media/types/file";
 import log from "@/next/log";
@@ -5,7 +6,6 @@ import HTTPService from "@ente/shared/network/HTTPService";
 import { getEndpoint } from "@ente/shared/network/api";
 import { getToken } from "@ente/shared/storage/localStorage/helpers";
 import { EnteFile } from "types/file";
-import { hasFileHash } from "utils/upload";
 
 const ENDPOINT = getEndpoint();
 

+ 2 - 3
web/apps/photos/src/services/detect-type.ts

@@ -93,8 +93,7 @@ const readInitialChunkOfFile = async (file: File) => {
 
 const detectFileTypeFromBuffer = async (buffer: Uint8Array) => {
     const result = await FileType.fromBuffer(buffer);
-    if (!result?.ext || !result?.mime) {
-        throw Error(`Could not deduce file type from buffer`);
-    }
+    if (!result)
+        throw Error("Could not deduce file type from the file's contents");
     return result;
 };

+ 0 - 42
web/apps/photos/src/services/readerService.ts

@@ -1,42 +0,0 @@
-import { ElectronFile } from "@/next/types/file";
-
-export function getFileStream(file: File, chunkSize: number) {
-    const fileChunkReader = fileChunkReaderMaker(file, chunkSize);
-
-    const stream = new ReadableStream<Uint8Array>({
-        async pull(controller: ReadableStreamDefaultController) {
-            const chunk = await fileChunkReader.next();
-            if (chunk.done) {
-                controller.close();
-            } else {
-                controller.enqueue(chunk.value);
-            }
-        },
-    });
-    const chunkCount = Math.ceil(file.size / chunkSize);
-    return {
-        stream,
-        chunkCount,
-    };
-}
-
-async function* fileChunkReaderMaker(file: File, chunkSize: number) {
-    let offset = 0;
-    while (offset < file.size) {
-        const chunk = file.slice(offset, chunkSize + offset);
-        yield new Uint8Array(await chunk.arrayBuffer());
-        offset += chunkSize;
-    }
-    return null;
-}
-
-export async function getElectronFileStream(
-    file: ElectronFile,
-    chunkSize: number,
-) {
-    const chunkCount = Math.ceil(file.size / chunkSize);
-    return {
-        stream: await file.stream(),
-        chunkCount,
-    };
-}

+ 1 - 1
web/apps/photos/src/services/upload/publicUploadHttpClient.ts

@@ -3,7 +3,7 @@ import { CustomError, handleUploadError } from "@ente/shared/error";
 import HTTPService from "@ente/shared/network/HTTPService";
 import { getEndpoint } from "@ente/shared/network/api";
 import { EnteFile } from "types/file";
-import { retryHTTPCall } from "utils/upload/uploadRetrier";
+import { retryHTTPCall } from "./uploadHttpClient";
 import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService";
 
 const ENDPOINT = getEndpoint();

+ 30 - 2
web/apps/photos/src/services/upload/thumbnail.ts

@@ -2,7 +2,6 @@ import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type";
 import log from "@/next/log";
 import { type Electron } from "@/next/types/ipc";
 import { withTimeout } from "@ente/shared/utils";
-import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
 import * as ffmpeg from "services/ffmpeg";
 import { heicToJPEG } from "services/heic-convert";
 
@@ -206,4 +205,33 @@ export const generateThumbnailNative = async (
  * fails.
  */
 export const fallbackThumbnail = () =>
-    Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) => c.charCodeAt(0));
+    Uint8Array.from(atob(blackThumbnailB64), (c) => c.charCodeAt(0));
+
+const blackThumbnailB64 =
+    "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" +
+    "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" +
+    "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" +
+    "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" +
+    "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" +
+    "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" +
+    "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" +
+    "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" +
+    "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" +
+    "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" +
+    "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" +
+    "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
+    "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" +
+    "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" +
+    "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" +
+    "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" +
+    "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" +
+    "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k=";

+ 29 - 1
web/apps/photos/src/services/upload/uploadHttpClient.ts

@@ -3,8 +3,8 @@ import { CustomError, handleUploadError } from "@ente/shared/error";
 import HTTPService from "@ente/shared/network/HTTPService";
 import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api";
 import { getToken } from "@ente/shared/storage/localStorage/helpers";
+import { wait } from "@ente/shared/utils";
 import { EnteFile } from "types/file";
-import { retryHTTPCall } from "utils/upload/uploadRetrier";
 import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService";
 
 const ENDPOINT = getEndpoint();
@@ -236,3 +236,31 @@ class UploadHttpClient {
 }
 
 export default new UploadHttpClient();
+
+const retrySleepTimeInMilliSeconds = [2000, 5000, 10000];
+
+export async function retryHTTPCall(
+    func: () => Promise<any>,
+    checkForBreakingError?: (error) => void,
+): Promise<any> {
+    const retrier = async (
+        func: () => Promise<any>,
+        attemptNumber: number = 0,
+    ) => {
+        try {
+            const resp = await func();
+            return resp;
+        } catch (e) {
+            if (checkForBreakingError) {
+                checkForBreakingError(e);
+            }
+            if (attemptNumber < retrySleepTimeInMilliSeconds.length) {
+                await wait(retrySleepTimeInMilliSeconds[attemptNumber]);
+                return await retrier(func, attemptNumber + 1);
+            } else {
+                throw e;
+            }
+        }
+    };
+    return await retrier(func);
+}

+ 253 - 165
web/apps/photos/src/services/upload/uploadService.ts

@@ -1,3 +1,4 @@
+import { hasFileHash } from "@/media/file";
 import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type";
 import { encodeLivePhoto } from "@/media/live-photo";
 import type { Metadata } from "@/media/types/file";
@@ -6,19 +7,12 @@ import { basename } from "@/next/file";
 import log from "@/next/log";
 import { CustomErrorMessage } from "@/next/types/ipc";
 import { ensure } from "@/utils/ensure";
+import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants";
 import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
-import {
-    B64EncryptionResult,
-    EncryptionResult,
-    LocalFileAttributes,
-} from "@ente/shared/crypto/types";
+import { B64EncryptionResult } from "@ente/shared/crypto/types";
 import { CustomError, handleUploadError } from "@ente/shared/error";
-import { isDataStream, type DataStream } from "@ente/shared/utils/data-stream";
 import { Remote } from "comlink";
 import {
-    FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
-    FILE_READER_CHUNK_SIZE,
-    MULTIPART_PART_SIZE,
     NULL_LOCATION,
     RANDOM_PERCENTAGE_PROGRESS_FOR_PUT,
     UPLOAD_RESULT,
@@ -45,10 +39,8 @@ import {
     updateMagicMetadata,
 } from "utils/magicMetadata";
 import { readStream } from "utils/native-stream";
-import { hasFileHash } from "utils/upload";
 import * as convert from "xml-js";
 import { detectFileTypeInfoFromChunk } from "../detect-type";
-import { getFileStream } from "../readerService";
 import { tryParseEpochMicrosecondsFromFileName } from "./date";
 import publicUploadHttpClient from "./publicUploadHttpClient";
 import type { ParsedMetadataJSON } from "./takeout";
@@ -61,6 +53,51 @@ import {
 import UploadHttpClient from "./uploadHttpClient";
 import type { UploadableFile } from "./uploadManager";
 
+/**
+ * A readable stream for a file, and its associated size and last modified time.
+ *
+ * This is the in-memory representation of the `fileOrPath` type that we usually
+ * pass around. See: [Note: Reading a fileOrPath]
+ */
+interface FileStream {
+    /**
+     * A stream of the file's contents
+     *
+     * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
+     * (except the last chunk which can be smaller since a file would rarely
+     * align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
+     *
+     * Note: A stream can only be read once!
+     */
+    stream: ReadableStream<Uint8Array>;
+    /**
+     * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
+     * sized (except the last one).
+     */
+    chunkCount: number;
+    /**
+     * The size in bytes of the underlying file.
+     */
+    fileSize: number;
+    /**
+     * The modification time of the file, in epoch milliseconds.
+     */
+    lastModifiedMs: number;
+    /**
+     * Set to the underlying {@link File} when we also have access to it.
+     */
+    file?: File;
+}
+
+/**
+ * If the stream we have is more than 5 ENCRYPTION_CHUNK_SIZE chunks, then use
+ * multipart uploads for it, with each multipart-part containing 5 chunks.
+ *
+ * ENCRYPTION_CHUNK_SIZE is 4 MB, and the number of chunks in a single upload
+ * part is 5, so each part is (up to) 20 MB.
+ */
+const multipartChunksPerPart = 5;
+
 /** Upload files to cloud storage */
 class UploadService {
     private uploadURLs: UploadURL[] = [];
@@ -165,14 +202,14 @@ export const fopSize = async (fileOrPath: File | string): Promise<number> =>
 
 /* -- Various intermediate type used during upload -- */
 
-interface UploadAsset2 {
+interface UploadAsset {
     isLivePhoto?: boolean;
     fileOrPath?: File | string;
     livePhotoAssets?: LivePhotoAssets;
 }
 
-interface FileInMemory {
-    filedata: Uint8Array | DataStream;
+interface ThumbnailedFile {
+    fileStreamOrData: FileStream | Uint8Array;
     /** The JPEG data of the generated thumbnail */
     thumbnail: Uint8Array;
     /**
@@ -182,7 +219,7 @@ interface FileInMemory {
     hasStaticThumbnail: boolean;
 }
 
-interface FileWithMetadata extends Omit<FileInMemory, "hasStaticThumbnail"> {
+interface FileWithMetadata extends Omit<ThumbnailedFile, "hasStaticThumbnail"> {
     metadata: Metadata;
     localID: number;
     pubMagicMetadata: FilePublicMagicMetadata;
@@ -193,8 +230,38 @@ interface EncryptedFile {
     fileKey: B64EncryptionResult;
 }
 
+interface EncryptedFileStream {
+    /**
+     * A stream of the file's encrypted contents
+     *
+     * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks
+     * (except the last chunk which can be smaller since a file would rarely
+     * align exactly to a ENCRYPTION_CHUNK_SIZE multiple).
+     */
+    stream: ReadableStream<Uint8Array>;
+    /**
+     * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE
+     * sized (except the last one).
+     */
+    chunkCount: number;
+}
+
+interface LocalFileAttributes<
+    T extends string | Uint8Array | EncryptedFileStream,
+> {
+    encryptedData: T;
+    decryptionHeader: string;
+}
+
+interface EncryptionResult<
+    T extends string | Uint8Array | EncryptedFileStream,
+> {
+    file: LocalFileAttributes<T>;
+    key: string;
+}
+
 interface ProcessedFile {
-    file: LocalFileAttributes<Uint8Array | DataStream>;
+    file: LocalFileAttributes<Uint8Array | EncryptedFileStream>;
     thumbnail: LocalFileAttributes<Uint8Array>;
     metadata: LocalFileAttributes<string>;
     pubMagicMetadata: EncryptedMagicMetadata;
@@ -325,10 +392,8 @@ export const uploader = async (
 
         abortIfCancelled();
 
-        const { filedata, thumbnail, hasStaticThumbnail } = await readAsset(
-            fileTypeInfo,
-            uploadAsset,
-        );
+        const { fileStreamOrData, thumbnail, hasStaticThumbnail } =
+            await readAsset(fileTypeInfo, uploadAsset);
 
         if (hasStaticThumbnail) metadata.hasStaticThumbnail = true;
 
@@ -341,7 +406,7 @@ export const uploader = async (
 
         const fileWithMetadata: FileWithMetadata = {
             localID,
-            filedata,
+            fileStreamOrData,
             thumbnail,
             metadata,
             pubMagicMetadata,
@@ -403,13 +468,21 @@ export const uploader = async (
 /**
  * Read the given file or path into an in-memory representation.
  *
- * See: [Note: Reading a fileOrPath]
+ * [Note: Reading a fileOrPath]
  *
  * The file can be either a web
  * [File](https://developer.mozilla.org/en-US/docs/Web/API/File) or the absolute
- * path to a file on desk. When and why, read on.
+ * path to a file on desk.
+ *
+ * tl;dr; There are three cases:
+ *
+ * 1. web / File
+ * 2. desktop / File
+ * 3. desktop / path
  *
- * This code gets invoked in two contexts:
+ * For the when and why, read on.
+ *
+ * The code that accesses files (e.g. uplaads) gets invoked in two contexts:
  *
  * 1. web: the normal mode, when we're running in as a web app in the browser.
  *
@@ -426,47 +499,47 @@ export const uploader = async (
  *
  * In the desktop context, this can be either a File or a path.
  *
- * 1. If the user provided us this file via some user interaction (say a drag
+ * 2. If the user provided us this file via some user interaction (say a drag
  *    and a drop), this'll still be a File.
  *
- * 2. However, when running in the desktop app we have the ability to access
+ * 3. However, when running in the desktop app we have the ability to access
  *    absolute paths on the user's file system. For example, if the user asks us
  *    to watch certain folders on their disk for changes, we'll be able to pick
  *    up new images being added, and in such cases, the parameter here will be a
  *    path. Another example is when resuming an previously interrupted upload -
  *    we'll only have the path at hand in such cases, not the File object.
  *
+ * Case 2, when we're provided a path, is simple. We don't have a choice, since
+ * we cannot still programmatically construct a File object (we can construct it
+ * on the Node.js layer, but it can't then be transferred over the IPC
+ * boundary). So all our operations use the path itself.
+ *
+ * Case 3 involves a choice on a use-case basis, since
+ *
+ * (a) unlike in the web context, such File objects also have the full path.
+ *     See: [Note: File paths when running under Electron].
+ *
+ * (b) neither File nor the path is a better choice for all use cases.
+ *
  * The advantage of the File object is that the browser has already read it into
  * memory for us. The disadvantage comes in the case where we need to
  * communicate with the native Node.js layer of our desktop app. Since this
  * communication happens over IPC, the File's contents need to be serialized and
  * copied, which is a bummer for large videos etc.
- *
- * So when we do have a path, we first try to see if we can perform IPC using
- * the path itself (e.g. when generating thumbnails). Eventually, we'll need to
- * read the file once when we need to encrypt and upload it, but if we're smart
- * we can do all the rest of the IPC operations using the path itself, and for
- * the read during upload using a streaming IPC mechanism.
  */
 const readFileOrPath = async (
     fileOrPath: File | string,
-): Promise<{
-    dataOrStream: Uint8Array | DataStream;
-    fileSize: number;
-    lastModifiedMs: number;
-}> => {
-    let dataOrStream: Uint8Array | DataStream;
+): Promise<FileStream> => {
+    let underlyingStream: ReadableStream;
+    let file: File | undefined;
     let fileSize: number;
     let lastModifiedMs: number;
 
     if (fileOrPath instanceof File) {
-        const file = fileOrPath;
+        file = fileOrPath;
+        underlyingStream = file.stream();
         fileSize = file.size;
         lastModifiedMs = file.lastModified;
-        dataOrStream =
-            fileSize > MULTIPART_PART_SIZE
-                ? getFileStream(file, FILE_READER_CHUNK_SIZE)
-                : new Uint8Array(await file.arrayBuffer());
     } else {
         const path = fileOrPath;
         const {
@@ -474,33 +547,46 @@ const readFileOrPath = async (
             size,
             lastModifiedMs: lm,
         } = await readStream(ensureElectron(), path);
+        underlyingStream = response.body;
         fileSize = size;
         lastModifiedMs = lm;
-        if (size > MULTIPART_PART_SIZE) {
-            const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE);
-            dataOrStream = { stream: response.body, chunkCount };
-        } else {
-            dataOrStream = new Uint8Array(await response.arrayBuffer());
-        }
     }
 
-    return { dataOrStream, fileSize, lastModifiedMs };
-};
+    const N = ENCRYPTION_CHUNK_SIZE;
+    const chunkCount = Math.ceil(fileSize / ENCRYPTION_CHUNK_SIZE);
+
+    // Pipe the underlying stream through a transformer that emits
+    // ENCRYPTION_CHUNK_SIZE-ed chunks (except the last one, which can be
+    // smaller).
+    let pending: Uint8Array | undefined;
+    const transformer = new TransformStream<Uint8Array, Uint8Array>({
+        async transform(
+            chunk: Uint8Array,
+            controller: TransformStreamDefaultController,
+        ) {
+            let next: Uint8Array;
+            if (pending) {
+                next = new Uint8Array(pending.length + chunk.length);
+                next.set(pending);
+                next.set(chunk, pending.length);
+                pending = undefined;
+            } else {
+                next = chunk;
+            }
+            while (next.length >= N) {
+                controller.enqueue(next.slice(0, N));
+                next = next.slice(N);
+            }
+            if (next.length) pending = next;
+        },
+        flush(controller: TransformStreamDefaultController) {
+            if (pending) controller.enqueue(pending);
+        },
+    });
 
-/** A variant of {@readFileOrPath} that always returns an {@link DataStream}. */
-const readFileOrPathStream = async (
-    fileOrPath: File | string,
-): Promise<DataStream> => {
-    if (fileOrPath instanceof File) {
-        return getFileStream(fileOrPath, FILE_READER_CHUNK_SIZE);
-    } else {
-        const { response, size } = await readStream(
-            ensureElectron(),
-            fileOrPath,
-        );
-        const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE);
-        return { stream: response.body, chunkCount };
-    }
+    const stream = underlyingStream.pipeThrough(transformer);
+
+    return { stream, chunkCount, fileSize, lastModifiedMs, file };
 };
 
 interface ReadAssetDetailsResult {
@@ -517,7 +603,7 @@ const readAssetDetails = async ({
     isLivePhoto,
     livePhotoAssets,
     fileOrPath,
-}: UploadAsset2): Promise<ReadAssetDetailsResult> =>
+}: UploadAsset): Promise<ReadAssetDetailsResult> =>
     isLivePhoto
         ? readLivePhotoDetails(livePhotoAssets)
         : readImageOrVideoDetails(fileOrPath);
@@ -549,18 +635,14 @@ const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => {
  * @param fileOrPath See: [Note: Reading a fileOrPath]
  */
 const readImageOrVideoDetails = async (fileOrPath: File | string) => {
-    const { dataOrStream, fileSize, lastModifiedMs } =
+    const { stream, fileSize, lastModifiedMs } =
         await readFileOrPath(fileOrPath);
 
     const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => {
-        if (dataOrStream instanceof Uint8Array) {
-            return dataOrStream;
-        } else {
-            const reader = dataOrStream.stream.getReader();
-            const chunk = ensure((await reader.read()).value);
-            await reader.cancel();
-            return chunk;
-        }
+        const reader = stream.getReader();
+        const chunk = ensure((await reader.read()).value);
+        await reader.cancel();
+        return chunk;
     }, fopFileName(fileOrPath));
 
     return { fileTypeInfo, fileSize, lastModifiedMs };
@@ -587,7 +669,7 @@ interface ExtractAssetMetadataResult {
  * {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum.
  */
 const extractAssetMetadata = async (
-    { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2,
+    { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
     fileTypeInfo: FileTypeInfo,
     lastModifiedMs: number,
     collectionID: number,
@@ -759,7 +841,7 @@ const computeHash = async (
     fileOrPath: File | string,
     worker: Remote<DedicatedCryptoWorker>,
 ) => {
-    const { stream, chunkCount } = await readFileOrPathStream(fileOrPath);
+    const { stream, chunkCount } = await readFileOrPath(fileOrPath);
     const hashState = await worker.initChunkHashing();
 
     const streamReader = stream.getReader();
@@ -828,8 +910,8 @@ const areFilesSameNoHash = (f: Metadata, g: Metadata) => {
 
 const readAsset = async (
     fileTypeInfo: FileTypeInfo,
-    { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2,
-) =>
+    { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
+): Promise<ThumbnailedFile> =>
     isLivePhoto
         ? await readLivePhoto(livePhotoAssets, fileTypeInfo)
         : await readImageOrVideo(fileOrPath, fileTypeInfo);
@@ -838,9 +920,8 @@ const readLivePhoto = async (
     livePhotoAssets: LivePhotoAssets,
     fileTypeInfo: FileTypeInfo,
 ) => {
-    const readImage = await readFileOrPath(livePhotoAssets.image);
     const {
-        filedata: imageDataOrStream,
+        fileStreamOrData: imageFileStreamOrData,
         thumbnail,
         hasStaticThumbnail,
     } = await withThumbnail(
@@ -849,28 +930,29 @@ const readLivePhoto = async (
             extension: fileTypeInfo.imageType,
             fileType: FILE_TYPE.IMAGE,
         },
-        readImage.dataOrStream,
-        readImage.fileSize,
+        await readFileOrPath(livePhotoAssets.image),
     );
-    const readVideo = await readFileOrPath(livePhotoAssets.video);
+    const videoFileStreamOrData = await readFileOrPath(livePhotoAssets.video);
 
-    // We can revisit this later, but the existing code always read the entire
-    // file into memory here, and to avoid changing the rest of the scaffolding
-    // retain the same behaviour.
+    // The JS zip library that encodeLivePhoto uses does not support
+    // ReadableStreams, so pass the file (blob) if we have one, otherwise read
+    // the entire stream into memory and pass the resultant data.
     //
-    // This is a reasonable assumption too, since the videos corresponding to
-    // live photos are only a couple of seconds long.
-    const toData = async (dataOrStream: Uint8Array | DataStream) =>
-        dataOrStream instanceof Uint8Array
-            ? dataOrStream
-            : await readEntireStream(dataOrStream.stream);
+    // This is a reasonable behaviour since the videos corresponding to live
+    // photos are only a couple of seconds long (we have already done a
+    // pre-flight check to ensure their size is small in `areLivePhotoAssets`).
+    const fileOrData = async (sd: FileStream | Uint8Array) => {
+        const _fs = async ({ file, stream }: FileStream) =>
+            file ? file : await readEntireStream(stream);
+        return sd instanceof Uint8Array ? sd : _fs(sd);
+    };
 
     return {
-        filedata: await encodeLivePhoto({
+        fileStreamOrData: await encodeLivePhoto({
             imageFileName: fopFileName(livePhotoAssets.image),
-            imageData: await toData(imageDataOrStream),
+            imageFileOrData: await fileOrData(imageFileStreamOrData),
             videoFileName: fopFileName(livePhotoAssets.video),
-            videoData: await toData(readVideo.dataOrStream),
+            videoFileOrData: await fileOrData(videoFileStreamOrData),
         }),
         thumbnail,
         hasStaticThumbnail,
@@ -881,8 +963,8 @@ const readImageOrVideo = async (
     fileOrPath: File | string,
     fileTypeInfo: FileTypeInfo,
 ) => {
-    const { dataOrStream, fileSize } = await readFileOrPath(fileOrPath);
-    return withThumbnail(fileOrPath, fileTypeInfo, dataOrStream, fileSize);
+    const fileStream = await readFileOrPath(fileOrPath);
+    return withThumbnail(fileOrPath, fileTypeInfo, fileStream);
 };
 
 // TODO(MR): Merge with the uploader
@@ -906,17 +988,17 @@ const moduleState = new ModuleState();
  * Augment the given {@link dataOrStream} with thumbnail information.
  *
  * This is a companion method for {@link readFileOrPath}, and can be used to
- * convert the result of {@link readFileOrPath} into an {@link FileInMemory}.
+ * convert the result of {@link readFileOrPath} into an {@link ThumbnailedFile}.
  *
- * Note: The returned dataOrStream might be different from the one that we
- * provide to it.
+ * Note: The `fileStream` in the returned ThumbnailedFile may be different from
+ * the one passed to the function.
  */
 const withThumbnail = async (
     fileOrPath: File | string,
     fileTypeInfo: FileTypeInfo,
-    dataOrStream: Uint8Array | DataStream,
-    fileSize: number,
-): Promise<FileInMemory> => {
+    fileStream: FileStream,
+): Promise<ThumbnailedFile> => {
+    let fileData: Uint8Array | undefined;
     let thumbnail: Uint8Array | undefined;
     let hasStaticThumbnail = false;
 
@@ -925,30 +1007,16 @@ const withThumbnail = async (
         fileTypeInfo.fileType == FILE_TYPE.IMAGE &&
         moduleState.isNativeImageThumbnailGenerationNotAvailable;
 
-    // 1. Native thumbnail generation.
+    // 1. Native thumbnail generation using file's path.
     if (electron && !notAvailable) {
         try {
-            if (fileOrPath instanceof File) {
-                if (dataOrStream instanceof Uint8Array) {
-                    thumbnail = await generateThumbnailNative(
-                        electron,
-                        dataOrStream,
-                        fileTypeInfo,
-                    );
-                } else {
-                    // This was large enough to need streaming, and trying to
-                    // read it into memory or copying over IPC might cause us to
-                    // run out of memory. So skip the native generation for it,
-                    // instead let it get processed by the browser based
-                    // thumbnailer (case 2).
-                }
-            } else {
-                thumbnail = await generateThumbnailNative(
-                    electron,
-                    fileOrPath,
-                    fileTypeInfo,
-                );
-            }
+            // When running in the context of our desktop app, File paths will
+            // be absolute. See: [Note: File paths when running under Electron].
+            thumbnail = await generateThumbnailNative(
+                electron,
+                fileOrPath instanceof File ? fileOrPath["path"] : fileOrPath,
+                fileTypeInfo,
+            );
         } catch (e) {
             if (e.message == CustomErrorMessage.NotAvailable) {
                 moduleState.isNativeImageThumbnailGenerationNotAvailable = true;
@@ -961,38 +1029,47 @@ const withThumbnail = async (
     if (!thumbnail) {
         let blob: Blob | undefined;
         if (fileOrPath instanceof File) {
-            // 2. Browser based thumbnail generation for `File`s.
+            // 2. Browser based thumbnail generation for File (blobs).
             blob = fileOrPath;
         } else {
             // 3. Browser based thumbnail generation for paths.
-            if (dataOrStream instanceof Uint8Array) {
-                blob = new Blob([dataOrStream]);
+            //
+            // There are two reasons why we could get here:
+            //
+            // - We're running under Electron, but thumbnail generation is not
+            //   available. This is currently only a specific scenario for image
+            //   files on Windows.
+            //
+            // - We're running under the Electron, but the thumbnail generation
+            //   otherwise failed for some exception.
+            //
+            // The fallback in this case involves reading the entire stream into
+            // memory, and passing that data across the IPC boundary in a single
+            // go (i.e. not in a streaming manner). This is risky for videos of
+            // unbounded sizes, plus that isn't the expected scenario. So
+            // instead of trying to cater for arbitrary exceptions, we only run
+            // this fallback to cover for the case where thumbnail generation
+            // was not available for an image file on Windows. If/when we add
+            // support of native thumbnailing on Windows too, this entire branch
+            // can be removed.
+
+            if (fileTypeInfo.fileType == FILE_TYPE.IMAGE) {
+                const data = await readEntireStream(fileStream.stream);
+                blob = new Blob([data]);
+
+                // The Readable stream cannot be read twice, so use the data
+                // directly for subsequent steps.
+                fileData = data;
             } else {
-                // Read the stream into memory. Don't try this fallback for huge
-                // files though lest we run out of memory.
-                if (fileSize < 100 * 1024 * 1024 /* 100 MB */) {
-                    const data = await readEntireStream(dataOrStream.stream);
-                    // The Readable stream cannot be read twice, so also
-                    // overwrite the stream with the data we read.
-                    dataOrStream = data;
-                    blob = new Blob([data]);
-                } else {
-                    // There isn't a normal scenario where this should happen.
-                    // Case 1, should've already worked, and the only known
-                    // reason it'd have been  skipped is for image files on
-                    // Windows, but those should be less than 100 MB.
-                    //
-                    // So don't risk running out of memory for a case we don't
-                    // comprehend.
-                    log.error(
-                        `Not using browser based thumbnail generation fallback for large file at path ${fileOrPath}`,
-                    );
-                }
+                log.warn(
+                    `Not using browser based thumbnail generation fallback for video at path ${fileOrPath}`,
+                );
             }
         }
 
         try {
-            thumbnail = await generateThumbnailWeb(blob, fileTypeInfo);
+            if (blob)
+                thumbnail = await generateThumbnailWeb(blob, fileTypeInfo);
         } catch (e) {
             log.error("Web thumbnail creation failed", e);
         }
@@ -1004,7 +1081,7 @@ const withThumbnail = async (
     }
 
     return {
-        filedata: dataOrStream,
+        fileStreamOrData: fileData ?? fileStream,
         thumbnail,
         hasStaticThumbnail,
     };
@@ -1029,7 +1106,7 @@ const encryptFile = async (
     worker: Remote<DedicatedCryptoWorker>,
 ): Promise<EncryptedFile> => {
     const { key: fileKey, file: encryptedFiledata } = await encryptFiledata(
-        file.filedata,
+        file.fileStreamOrData,
         worker,
     );
 
@@ -1071,15 +1148,15 @@ const encryptFile = async (
 };
 
 const encryptFiledata = async (
-    filedata: Uint8Array | DataStream,
+    fileStreamOrData: FileStream | Uint8Array,
     worker: Remote<DedicatedCryptoWorker>,
-): Promise<EncryptionResult<Uint8Array | DataStream>> =>
-    isDataStream(filedata)
-        ? await encryptFileStream(filedata, worker)
-        : await worker.encryptFile(filedata);
+): Promise<EncryptionResult<Uint8Array | EncryptedFileStream>> =>
+    fileStreamOrData instanceof Uint8Array
+        ? await worker.encryptFile(fileStreamOrData)
+        : await encryptFileStream(fileStreamOrData, worker);
 
 const encryptFileStream = async (
-    fileData: DataStream,
+    fileData: FileStream,
     worker: Remote<DedicatedCryptoWorker>,
 ) => {
     const { stream, chunkCount } = fileData;
@@ -1120,27 +1197,38 @@ const uploadToBucket = async (
     try {
         let fileObjectKey: string = null;
 
-        if (isDataStream(file.file.encryptedData)) {
+        const encryptedData = file.file.encryptedData;
+        if (
+            !(encryptedData instanceof Uint8Array) &&
+            encryptedData.chunkCount >= multipartChunksPerPart
+        ) {
+            // We have a stream, and it is more than multipartChunksPerPart
+            // chunks long, so use a multipart upload to upload it.
             fileObjectKey = await uploadStreamUsingMultipart(
                 file.localID,
-                file.file.encryptedData,
+                encryptedData,
                 makeProgessTracker,
                 isCFUploadProxyDisabled,
                 abortIfCancelled,
             );
         } else {
+            const data =
+                encryptedData instanceof Uint8Array
+                    ? encryptedData
+                    : await readEntireStream(encryptedData.stream);
+
             const progressTracker = makeProgessTracker(file.localID);
             const fileUploadURL = await uploadService.getUploadURL();
             if (!isCFUploadProxyDisabled) {
                 fileObjectKey = await UploadHttpClient.putFileV2(
                     fileUploadURL,
-                    file.file.encryptedData as Uint8Array,
+                    data,
                     progressTracker,
                 );
             } else {
                 fileObjectKey = await UploadHttpClient.putFile(
                     fileUploadURL,
-                    file.file.encryptedData as Uint8Array,
+                    data,
                     progressTracker,
                 );
             }
@@ -1189,13 +1277,13 @@ interface PartEtag {
 
 async function uploadStreamUsingMultipart(
     fileLocalID: number,
-    dataStream: DataStream,
+    dataStream: EncryptedFileStream,
     makeProgessTracker: MakeProgressTracker,
     isCFUploadProxyDisabled: boolean,
     abortIfCancelled: () => void,
 ) {
     const uploadPartCount = Math.ceil(
-        dataStream.chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART,
+        dataStream.chunkCount / multipartChunksPerPart,
     );
     const multipartUploadURLs =
         await uploadService.fetchMultipartUploadURLs(uploadPartCount);
@@ -1255,7 +1343,7 @@ async function combineChunksToFormUploadPart(
     streamReader: ReadableStreamDefaultReader<Uint8Array>,
 ) {
     const combinedChunks = [];
-    for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) {
+    for (let i = 0; i < multipartChunksPerPart; i++) {
         const { done, value: chunk } = await streamReader.read();
         if (done) {
             break;

+ 8 - 2
web/apps/photos/src/services/watch.ts

@@ -20,7 +20,6 @@ import uploadManager, {
 import { Collection } from "types/collection";
 import { EncryptedEnteFile } from "types/file";
 import { groupFilesBasedOnCollectionID } from "utils/file";
-import { isHiddenFile } from "utils/upload";
 import { removeFromCollection } from "./collectionService";
 import { getLocalFiles } from "./fileService";
 
@@ -596,6 +595,13 @@ const pathsToUpload = (paths: string[], watch: FolderWatch) =>
         // Files that are on disk but not yet synced or ignored.
         .filter((path) => !isSyncedOrIgnoredPath(path, watch));
 
+/**
+ * Return true if the file at the given {@link path} is hidden.
+ *
+ * Hidden files are those whose names begin with a "." (dot).
+ */
+const isHiddenFile = (path: string) => basename(path).startsWith(".");
+
 /**
  * Return the paths to previously synced files that are no longer on disk and so
  * must be removed from the Ente collection.
@@ -611,7 +617,7 @@ const isSyncedOrIgnoredPath = (path: string, watch: FolderWatch) =>
 
 const collectionNameForPath = (path: string, watch: FolderWatch) =>
     watch.collectionMapping == "root"
-        ? dirname(watch.folderPath)
+        ? basename(watch.folderPath)
         : parentDirectoryName(path);
 
 const parentDirectoryName = (path: string) => basename(dirname(path));

+ 4 - 3
web/apps/photos/src/utils/native-stream.ts

@@ -51,10 +51,11 @@ export const readStream = async (
 };
 
 const readNumericHeader = (res: Response, key: string) => {
-    const value = +res.headers[key];
+    const valueText = res.headers.get(key);
+    const value = +valueText;
     if (isNaN(value))
         throw new Error(
-            `Expected a numeric ${key} when reading a stream response: ${res}`,
+            `Expected a numeric ${key} when reading a stream response, instead got ${valueText}`,
         );
     return value;
 };
@@ -101,7 +102,7 @@ export const writeStream = async (
         // GET can't have a body
         method: "POST",
         body: stream,
-        // @ts-expect-error TypeScript's libdom.d.ts does not include the
+        // --@ts-expect-error TypeScript's libdom.d.ts does not include the
         // "duplex" parameter, e.g. see
         // https://github.com/node-fetch/node-fetch/issues/1769.
         duplex: "half",

+ 0 - 128
web/apps/photos/src/utils/upload/index.ts

@@ -1,128 +0,0 @@
-import type { Metadata } from "@/media/types/file";
-import { basename, dirname } from "@/next/file";
-import { PICKED_UPLOAD_TYPE } from "constants/upload";
-import isElectron from "is-electron";
-import { exportMetadataDirectoryName } from "services/export";
-import { fopFileName } from "services/upload/uploadService";
-
-export const hasFileHash = (file: Metadata) =>
-    file.hash || (file.imageHash && file.videoHash);
-
-/**
- * Return true if all the paths in the given list are items that belong to the
- * same (arbitrary) directory.
- *
- * Empty list of paths is considered to be in the same directory.
- */
-export const areAllInSameDirectory = (paths: string[]) =>
-    new Set(paths.map(dirname)).size == 1;
-
-// This is used to prompt the user the make upload strategy choice
-export interface ImportSuggestion {
-    rootFolderName: string;
-    hasNestedFolders: boolean;
-    hasRootLevelFileWithFolder: boolean;
-}
-
-export const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = {
-    rootFolderName: "",
-    hasNestedFolders: false,
-    hasRootLevelFileWithFolder: false,
-};
-
-export function getImportSuggestion(
-    uploadType: PICKED_UPLOAD_TYPE,
-    paths: string[],
-): ImportSuggestion {
-    if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) {
-        return DEFAULT_IMPORT_SUGGESTION;
-    }
-
-    const getCharCount = (str: string) => (str.match(/\//g) ?? []).length;
-    paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2));
-    const firstPath = paths[0];
-    const lastPath = paths[paths.length - 1];
-
-    const L = firstPath.length;
-    let i = 0;
-    const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/"));
-    const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/"));
-
-    while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++;
-    let commonPathPrefix = firstPath.substring(0, i);
-
-    if (commonPathPrefix) {
-        commonPathPrefix = commonPathPrefix.substring(
-            0,
-            commonPathPrefix.lastIndexOf("/"),
-        );
-        if (commonPathPrefix) {
-            commonPathPrefix = commonPathPrefix.substring(
-                commonPathPrefix.lastIndexOf("/") + 1,
-            );
-        }
-    }
-    return {
-        rootFolderName: commonPathPrefix || null,
-        hasNestedFolders: firstFileFolder !== lastFileFolder,
-        hasRootLevelFileWithFolder: firstFileFolder === "",
-    };
-}
-
-// This function groups files that are that have the same parent folder into collections
-// For Example, for user files have a directory structure like this
-//              a
-//            / |  \
-//           b  j   c
-//          /|\    /  \
-//         e f g   h  i
-//
-// The files will grouped into 3 collections.
-// [a => [j],
-// b => [e,f,g],
-// c => [h, i]]
-export const groupFilesBasedOnParentFolder = (
-    fileOrPaths: (File | string)[],
-) => {
-    const result = new Map<string, (File | string)[]>();
-    for (const fileOrPath of fileOrPaths) {
-        const filePath =
-            /* TODO(MR): ElectronFile */
-            typeof fileOrPath == "string"
-                ? fileOrPath
-                : (fileOrPath["path"] as string);
-
-        let folderPath = filePath.substring(0, filePath.lastIndexOf("/"));
-        // If the parent folder of a file is "metadata"
-        // we consider it to be part of the parent folder
-        // For Eg,For FileList  -> [a/x.png, a/metadata/x.png.json]
-        // they will both we grouped into the collection "a"
-        // This is cluster the metadata json files in the same collection as the file it is for
-        if (folderPath.endsWith(exportMetadataDirectoryName)) {
-            folderPath = folderPath.substring(0, folderPath.lastIndexOf("/"));
-        }
-        const folderName = folderPath.substring(
-            folderPath.lastIndexOf("/") + 1,
-        );
-        if (!folderName) throw Error("Unexpected empty folder name");
-        if (!result.has(folderName)) result.set(folderName, []);
-        result.get(folderName).push(fileOrPath);
-    }
-    return result;
-};
-
-/**
- * Filter out hidden files from amongst {@link fileOrPaths}.
- *
- * Hidden files are those whose names begin with a "." (dot).
- */
-
-export const pruneHiddenFiles = (fileOrPaths: (File | string)[]) =>
-    fileOrPaths.filter((f) => !fopFileName(f).startsWith("."));
-
-/**
- * Return true if the file at the given {@link path} is hidden.
- *
- * Hidden files are those whose names begin with a "." (dot).
- */
-export const isHiddenFile = (path: string) => basename(path).startsWith(".");

+ 0 - 29
web/apps/photos/src/utils/upload/uploadRetrier.ts

@@ -1,29 +0,0 @@
-import { wait } from "@ente/shared/utils";
-
-const retrySleepTimeInMilliSeconds = [2000, 5000, 10000];
-
-export async function retryHTTPCall(
-    func: () => Promise<any>,
-    checkForBreakingError?: (error) => void,
-): Promise<any> {
-    const retrier = async (
-        func: () => Promise<any>,
-        attemptNumber: number = 0,
-    ) => {
-        try {
-            const resp = await func();
-            return resp;
-        } catch (e) {
-            if (checkForBreakingError) {
-                checkForBreakingError(e);
-            }
-            if (attemptNumber < retrySleepTimeInMilliSeconds.length) {
-                await wait(retrySleepTimeInMilliSeconds[attemptNumber]);
-                return await retrier(func, attemptNumber + 1);
-            } else {
-                throw e;
-            }
-        }
-    };
-    return await retrier(func);
-}

+ 7 - 3
web/apps/photos/src/worker/ffmpeg.worker.ts

@@ -62,12 +62,16 @@ const ffmpegExec = async (
     const inputData = new Uint8Array(await blob.arrayBuffer());
 
     try {
-        ffmpeg.FS("writeFile", inputPath, inputData);
+        const startTime = Date.now();
 
-        log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")}`);
+        ffmpeg.FS("writeFile", inputPath, inputData);
         await ffmpeg.run(...cmd);
 
-        return ffmpeg.FS("readFile", outputPath);
+        const result = ffmpeg.FS("readFile", outputPath);
+
+        const ms = Math.round(Date.now() - startTime);
+        log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`);
+        return result;
     } finally {
         try {
             ffmpeg.FS("unlink", inputPath);

+ 0 - 111
web/apps/photos/tests/zip-file-reading.test.ts

@@ -1,111 +0,0 @@
-import { getFileNameSize } from "@/next/file";
-import type { DataStream } from "@ente/shared/utils/data-stream";
-import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload";
-import { getElectronFileStream, getFileStream } from "services/readerService";
-import { getImportSuggestion } from "utils/upload";
-
-// This was for used to verify that converting from the browser readable stream
-// to the node readable stream correctly handles files that align on the 4 MB
-// data boundary. This expects a zip file containing random files of various
-// sizes starting from 1M to 20M.
-export const testZipFileReading = async () => {
-    try {
-        const electron = globalThis.electron;
-        if (!electron) {
-            console.log("testZipFileReading Check is for desktop only");
-            return;
-        }
-        if (!process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH) {
-            throw Error(
-                "upload test failed NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH missing",
-            );
-        }
-        const files = await electron.getElectronFilesFromGoogleZip(
-            process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH,
-        );
-        if (!files?.length) {
-            throw Error(
-                `testZipFileReading Check failed ❌
-                No files selected`,
-            );
-        }
-        console.log("test zip file reading check started");
-        let i = 0;
-        for (const file of files) {
-            i++;
-            let filedata: DataStream;
-            if (file instanceof File) {
-                filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
-            } else {
-                filedata = await getElectronFileStream(
-                    file,
-                    FILE_READER_CHUNK_SIZE,
-                );
-            }
-            const streamReader = filedata.stream.getReader();
-            for (let i = 0; i < filedata.chunkCount; i++) {
-                const { done } = await streamReader.read();
-                if (done) {
-                    throw Error(
-                        `testZipFileReading Check failed ❌
-                        ${getFileNameSize(
-                            file,
-                        )} less than expected chunks, expected: ${
-                            filedata.chunkCount
-                        }, got ${i - 1}`,
-                    );
-                }
-            }
-            const { done } = await streamReader.read();
-
-            if (!done) {
-                throw Error(
-                    `testZipFileReading Check failed ❌
-                     ${getFileNameSize(
-                         file,
-                     )}  more than expected chunks, expected: ${
-                         filedata.chunkCount
-                     }`,
-                );
-            }
-            console.log(`${i}/${files.length} passed ✅`);
-        }
-        console.log("test zip file reading check passed ✅");
-    } catch (e) {
-        console.log(e);
-    }
-};
-
-// This was used when fixing a bug around handling a zip file that has a photo
-// at the root.
-export const testZipWithRootFileReadingTest = async () => {
-    try {
-        const electron = globalThis.electron;
-        if (!electron) {
-            console.log("testZipFileReading Check is for desktop only");
-            return;
-        }
-        if (!process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH) {
-            throw Error(
-                "upload test failed NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH missing",
-            );
-        }
-        const files = await electron.getElectronFilesFromGoogleZip(
-            process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH,
-        );
-
-        const importSuggestion = getImportSuggestion(
-            PICKED_UPLOAD_TYPE.ZIPS,
-            files.map((file) => file["path"]),
-        );
-        if (!importSuggestion.rootFolderName) {
-            throw Error(
-                `testZipWithRootFileReadingTest Check failed ❌
-            rootFolderName is missing`,
-            );
-        }
-        console.log("testZipWithRootFileReadingTest passed ✅");
-    } catch (e) {
-        console.log(e);
-    }
-};

+ 4 - 0
web/packages/media/file.ts

@@ -0,0 +1,4 @@
+import type { Metadata } from "./types/file";
+
+export const hasFileHash = (file: Metadata) =>
+    !!file.hash || (!!file.imageHash && !!file.videoHash);

+ 13 - 5
web/packages/media/live-photo.ts

@@ -110,6 +110,14 @@ export const decodeLivePhoto = async (
     return { imageFileName, imageData, videoFileName, videoData };
 };
 
+/** Variant of {@link LivePhoto}, but one that allows files and data. */
+interface EncodeLivePhotoInput {
+    imageFileName: string;
+    imageFileOrData: File | Uint8Array;
+    videoFileName: string;
+    videoFileOrData: File | Uint8Array;
+}
+
 /**
  * Return a binary serialized representation of a live photo.
  *
@@ -122,15 +130,15 @@ export const decodeLivePhoto = async (
  */
 export const encodeLivePhoto = async ({
     imageFileName,
-    imageData,
+    imageFileOrData,
     videoFileName,
-    videoData,
-}: LivePhoto) => {
+    videoFileOrData,
+}: EncodeLivePhotoInput) => {
     const [, imageExt] = nameAndExtension(imageFileName);
     const [, videoExt] = nameAndExtension(videoFileName);
 
     const zip = new JSZip();
-    zip.file(fileNameFromComponents(["image", imageExt]), imageData);
-    zip.file(fileNameFromComponents(["video", videoExt]), videoData);
+    zip.file(fileNameFromComponents(["image", imageExt]), imageFileOrData);
+    zip.file(fileNameFromComponents(["video", videoExt]), videoFileOrData);
     return await zip.generateAsync({ type: "uint8array" });
 };

+ 1 - 1
web/packages/next/log.ts

@@ -34,7 +34,7 @@ const messageWithError = (message: string, e?: unknown) => {
     if (e instanceof Error) {
         // In practice, we expect ourselves to be called with Error objects, so
         // this is the happy path so to say.
-        return `${e.name}: ${e.message}\n${e.stack}`;
+        es = [`${e.name}: ${e.message}`, e.stack].filter((x) => x).join("\n");
     } else {
         // For the rest rare cases, use the default string serialization of e.
         es = String(e);

+ 0 - 1
web/packages/next/types/ipc.ts

@@ -503,7 +503,6 @@ export interface Electron {
     getElectronFilesFromGoogleZip: (
         filePath: string,
     ) => Promise<ElectronFile[]>;
-    getDirFiles: (dirPath: string) => Promise<ElectronFile[]>;
 }
 
 /**

+ 0 - 14
web/packages/shared/crypto/types.ts

@@ -1,17 +1,3 @@
-import type { DataStream } from "../utils/data-stream";
-
-export interface LocalFileAttributes<
-    T extends string | Uint8Array | DataStream,
-> {
-    encryptedData: T;
-    decryptionHeader: string;
-}
-
-export interface EncryptionResult<T extends string | Uint8Array | DataStream> {
-    file: LocalFileAttributes<T>;
-    key: string;
-}
-
 export interface B64EncryptionResult {
     encryptedData: string;
     key: string;

+ 14 - 0
web/packages/shared/hooks/useFileInput.tsx

@@ -1,5 +1,19 @@
 import { useCallback, useRef, useState } from "react";
 
+/*
+ * TODO (MR): Understand how this is happening, and validate it further (on
+ * first glance this is correct).
+ *
+ * [Note: File paths when running under Electron]
+ *
+ * We have access to the absolute path of the web {@link File} object when we
+ * are running in the context of our desktop app.
+ *
+ * This is in contrast to the `webkitRelativePath` that we get when we're
+ * running in the browser, which is the relative path to the directory that the
+ * user selected (or just the name of the file if the user selected or
+ * drag/dropped a single one).
+ */
 export interface FileWithPath extends File {
     readonly path?: string;
 }

+ 0 - 8
web/packages/shared/utils/data-stream.ts

@@ -1,8 +0,0 @@
-export interface DataStream {
-    stream: ReadableStream<Uint8Array>;
-    chunkCount: number;
-}
-
-export function isDataStream(object: any): object is DataStream {
-    return "stream" in object;
-}