diff --git a/desktop/src/main/fs.ts b/desktop/src/main/fs.ts index fc181cf46..2428d3a80 100644 --- a/desktop/src/main/fs.ts +++ b/desktop/src/main/fs.ts @@ -27,5 +27,3 @@ export const fsIsDir = async (dirPath: string) => { const stat = await fs.stat(dirPath); return stat.isDirectory(); }; - -export const fsSize = (path: string) => fs.stat(path).then((s) => s.size); diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts index 825a2ed32..df6ab7c8e 100644 --- a/desktop/src/main/ipc.ts +++ b/desktop/src/main/ipc.ts @@ -14,6 +14,7 @@ import type { CollectionMapping, FolderWatch, PendingUploads, + ZipItem, } from "../types/ipc"; import { selectDirectory, @@ -29,7 +30,6 @@ import { fsRename, fsRm, fsRmdir, - fsSize, fsWriteFile, } from "./fs"; import { logToDisk } from "./log"; @@ -52,10 +52,13 @@ import { saveEncryptionKey, } from "./services/store"; import { - getElectronFilesFromGoogleZip, + clearPendingUploads, + listZipItems, + markUploadedFiles, + markUploadedZipItems, + pathOrZipItemSize, pendingUploads, - setPendingUploadCollection, - setPendingUploadFiles, + setPendingUploads, } from "./services/upload"; import { watchAdd, @@ -139,8 +142,6 @@ export const attachIPCHandlers = () => { ipcMain.handle("fsIsDir", (_, dirPath: string) => fsIsDir(dirPath)); - ipcMain.handle("fsSize", (_, path: string) => fsSize(path)); - // - Conversion ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) => @@ -151,10 +152,10 @@ export const attachIPCHandlers = () => { "generateImageThumbnail", ( _, - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, - ) => generateImageThumbnail(dataOrPath, maxDimension, maxSize), + ) => generateImageThumbnail(dataOrPathOrZipItem, maxDimension, maxSize), ); ipcMain.handle( @@ -162,10 +163,16 @@ export const attachIPCHandlers = () => { ( _, command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, - ) => ffmpegExec(command, dataOrPath, outputFileExtension, timeoutMS), + ) => + ffmpegExec( + command, + dataOrPathOrZipItem, + outputFileExtension, + timeoutMS, + ), ); // - ML @@ -198,23 +205,31 @@ export const attachIPCHandlers = () => { // - Upload + ipcMain.handle("listZipItems", (_, zipPath: string) => + listZipItems(zipPath), + ); + + ipcMain.handle("pathOrZipItemSize", (_, pathOrZipItem: string | ZipItem) => + pathOrZipItemSize(pathOrZipItem), + ); + ipcMain.handle("pendingUploads", () => pendingUploads()); - ipcMain.handle("setPendingUploadCollection", (_, collectionName: string) => - setPendingUploadCollection(collectionName), + ipcMain.handle("setPendingUploads", (_, pendingUploads: PendingUploads) => + setPendingUploads(pendingUploads), ); ipcMain.handle( - "setPendingUploadFiles", - (_, type: PendingUploads["type"], filePaths: string[]) => - setPendingUploadFiles(type, filePaths), + "markUploadedFiles", + (_, paths: PendingUploads["filePaths"]) => markUploadedFiles(paths), ); - // - - - ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) => - getElectronFilesFromGoogleZip(filePath), + ipcMain.handle( + "markUploadedZipItems", + (_, items: PendingUploads["zipItems"]) => markUploadedZipItems(items), ); + + ipcMain.handle("clearPendingUploads", () => clearPendingUploads()); }; /** diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts index ed3542f6a..35977409a 100644 --- a/desktop/src/main/services/ffmpeg.ts +++ b/desktop/src/main/services/ffmpeg.ts @@ -1,9 +1,14 @@ import pathToFfmpeg from "ffmpeg-static"; import fs from "node:fs/promises"; +import type { ZipItem } from "../../types/ipc"; import log from "../log"; import { withTimeout } from "../utils"; import { execAsync } from "../utils-electron"; -import { deleteTempFile, makeTempFilePath } from "../utils-temp"; +import { + deleteTempFile, + makeFileForDataOrPathOrZipItem, + makeTempFilePath, +} from "../utils-temp"; /* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */ const ffmpegPathPlaceholder = "FFMPEG"; @@ -39,28 +44,24 @@ const outputPathPlaceholder = "OUTPUT"; */ export const ffmpegExec = async ( command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, ): Promise => { - // TODO (MR): This currently copies files for both input and output. This - // needs to be tested extremely large video files when invoked downstream of - // `convertToMP4` in the web code. + // TODO (MR): This currently copies files for both input (when + // dataOrPathOrZipItem is data) and output. This needs to be tested + // extremely large video files when invoked downstream of `convertToMP4` in + // the web code. - let inputFilePath: string; - let isInputFileTemporary: boolean; - if (dataOrPath instanceof Uint8Array) { - inputFilePath = await makeTempFilePath(); - isInputFileTemporary = true; - } else { - inputFilePath = dataOrPath; - isInputFileTemporary = false; - } + const { + path: inputFilePath, + isFileTemporary: isInputFileTemporary, + writeToTemporaryFile: writeToTemporaryInputFile, + } = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem); const outputFilePath = await makeTempFilePath(outputFileExtension); try { - if (dataOrPath instanceof Uint8Array) - await fs.writeFile(inputFilePath, dataOrPath); + await writeToTemporaryInputFile(); const cmd = substitutePlaceholders( command, diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts index 26b4b351e..c48e87c5b 100644 --- a/desktop/src/main/services/image.ts +++ b/desktop/src/main/services/image.ts @@ -2,10 +2,14 @@ import fs from "node:fs/promises"; import path from "path"; -import { CustomErrorMessage } from "../../types/ipc"; +import { CustomErrorMessage, type ZipItem } from "../../types/ipc"; import log from "../log"; import { execAsync, isDev } from "../utils-electron"; -import { deleteTempFile, makeTempFilePath } from "../utils-temp"; +import { + deleteTempFile, + makeFileForDataOrPathOrZipItem, + makeTempFilePath, +} from "../utils-temp"; export const convertToJPEG = async (imageData: Uint8Array) => { const inputFilePath = await makeTempFilePath(); @@ -63,19 +67,15 @@ const imageMagickPath = () => path.join(isDev ? "build" : process.resourcesPath, "image-magick"); export const generateImageThumbnail = async ( - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, ): Promise => { - let inputFilePath: string; - let isInputFileTemporary: boolean; - if (dataOrPath instanceof Uint8Array) { - inputFilePath = await makeTempFilePath(); - isInputFileTemporary = true; - } else { - inputFilePath = dataOrPath; - isInputFileTemporary = false; - } + const { + path: inputFilePath, + isFileTemporary: isInputFileTemporary, + writeToTemporaryFile: writeToTemporaryInputFile, + } = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem); const outputFilePath = await makeTempFilePath("jpeg"); @@ -89,8 +89,7 @@ export const generateImageThumbnail = async ( ); try { - if (dataOrPath instanceof Uint8Array) - await fs.writeFile(inputFilePath, dataOrPath); + await writeToTemporaryInputFile(); let thumbnail: Uint8Array; do { diff --git a/desktop/src/main/services/upload.ts b/desktop/src/main/services/upload.ts index 88c2d88d1..9b24cc0ea 100644 --- a/desktop/src/main/services/upload.ts +++ b/desktop/src/main/services/upload.ts @@ -1,73 +1,104 @@ import StreamZip from "node-stream-zip"; +import fs from "node:fs/promises"; import { existsSync } from "original-fs"; import path from "path"; -import { ElectronFile, type PendingUploads } from "../../types/ipc"; -import { - uploadStatusStore, - type UploadStatusStore, -} from "../stores/upload-status"; -import { getElectronFile, getZipFileStream } from "./fs"; +import type { ElectronFile, PendingUploads, ZipItem } from "../../types/ipc"; +import { uploadStatusStore } from "../stores/upload-status"; +import { getZipFileStream } from "./fs"; -export const pendingUploads = async () => { - const collectionName = uploadStatusStore.get("collectionName"); - const filePaths = validSavedPaths("files"); - const zipPaths = validSavedPaths("zips"); +export const listZipItems = async (zipPath: string): Promise => { + const zip = new StreamZip.async({ file: zipPath }); - let files: ElectronFile[] = []; - let type: PendingUploads["type"]; + const entries = await zip.entries(); + const entryNames: string[] = []; - if (zipPaths.length) { - type = "zips"; - for (const zipPath of zipPaths) { - files = [ - ...files, - ...(await getElectronFilesFromGoogleZip(zipPath)), - ]; + for (const entry of Object.values(entries)) { + const basename = path.basename(entry.name); + // Ignore "hidden" files (files whose names begins with a dot). + if (entry.isFile && basename.length > 0 && basename[0] != ".") { + // `entry.name` is the path within the zip. + entryNames.push(entry.name); } - const pendingFilePaths = new Set(filePaths); - files = files.filter((file) => pendingFilePaths.has(file.path)); - } else if (filePaths.length) { - type = "files"; - files = await Promise.all(filePaths.map(getElectronFile)); } + zip.close(); + + return entryNames.map((entryName) => [zipPath, entryName]); +}; + +export const pathOrZipItemSize = async ( + pathOrZipItem: string | ZipItem, +): Promise => { + if (typeof pathOrZipItem == "string") { + const stat = await fs.stat(pathOrZipItem); + return stat.size; + } else { + const [zipPath, entryName] = pathOrZipItem; + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + const size = entry.size; + zip.close(); + return size; + } +}; + +export const pendingUploads = async (): Promise => { + const collectionName = uploadStatusStore.get("collectionName"); + + const allFilePaths = uploadStatusStore.get("filePaths") ?? []; + const filePaths = allFilePaths.filter((f) => existsSync(f)); + + const allZipItems = uploadStatusStore.get("zipItems"); + let zipItems: typeof allZipItems; + + // Migration code - May 2024. Remove after a bit. + // + // The older store formats will not have zipItems and instead will have + // zipPaths. If we find such a case, read the zipPaths and enqueue all of + // their files as zipItems in the result. + // + // This potentially can be cause us to try reuploading an already uploaded + // file, but the dedup logic will kick in at that point so no harm will come + // off it. + if (allZipItems === undefined) { + const allZipPaths = uploadStatusStore.get("filePaths"); + const zipPaths = allZipPaths.filter((f) => existsSync(f)); + zipItems = []; + for (const zip of zipPaths) + zipItems = zipItems.concat(await listZipItems(zip)); + } else { + zipItems = allZipItems.filter(([z]) => existsSync(z)); + } + + if (filePaths.length == 0 && zipItems.length == 0) return undefined; + return { - files, collectionName, - type, + filePaths, + zipItems, }; }; -export const validSavedPaths = (type: PendingUploads["type"]) => { - const key = storeKey(type); - const savedPaths = (uploadStatusStore.get(key) as string[]) ?? []; - const paths = savedPaths.filter((p) => existsSync(p)); - uploadStatusStore.set(key, paths); - return paths; +export const setPendingUploads = async (pendingUploads: PendingUploads) => + uploadStatusStore.set(pendingUploads); + +export const markUploadedFiles = async (paths: string[]) => { + const existing = uploadStatusStore.get("filePaths"); + const updated = existing.filter((p) => !paths.includes(p)); + uploadStatusStore.set("filePaths", updated); }; -export const setPendingUploadCollection = (collectionName: string) => { - if (collectionName) uploadStatusStore.set("collectionName", collectionName); - else uploadStatusStore.delete("collectionName"); -}; - -export const setPendingUploadFiles = ( - type: PendingUploads["type"], - filePaths: string[], +export const markUploadedZipItems = async ( + items: [zipPath: string, entryName: string][], ) => { - const key = storeKey(type); - if (filePaths) uploadStatusStore.set(key, filePaths); - else uploadStatusStore.delete(key); + const existing = uploadStatusStore.get("zipItems"); + const updated = existing.filter( + (z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]), + ); + uploadStatusStore.set("zipItems", updated); }; -const storeKey = (type: PendingUploads["type"]): keyof UploadStatusStore => { - switch (type) { - case "zips": - return "zipPaths"; - case "files": - return "filePaths"; - } -}; +export const clearPendingUploads = () => uploadStatusStore.clear(); export const getElectronFilesFromGoogleZip = async (filePath: string) => { const zip = new StreamZip.async({ @@ -85,6 +116,8 @@ export const getElectronFilesFromGoogleZip = async (filePath: string) => { } } + zip.close(); + return files; }; diff --git a/desktop/src/main/stores/upload-status.ts b/desktop/src/main/stores/upload-status.ts index 25af7a49e..472f38a7f 100644 --- a/desktop/src/main/stores/upload-status.ts +++ b/desktop/src/main/stores/upload-status.ts @@ -1,27 +1,56 @@ import Store, { Schema } from "electron-store"; export interface UploadStatusStore { - filePaths: string[]; - zipPaths: string[]; - collectionName: string; + /** + * The collection to which we're uploading, or the root collection. + * + * Not all pending uploads will have an associated collection. + */ + collectionName?: string; + /** + * Paths to regular files that are pending upload. + * + * This should generally be present, albeit empty, but it is marked optional + * in sympathy with its siblings. + */ + filePaths?: string[]; + /** + * Each item is the path to a zip file and the name of an entry within it. + * + * This is marked optional since legacy stores will not have it. + */ + zipItems?: [zipPath: string, entryName: string][]; + /** + * @deprecated Legacy paths to zip files, now subsumed into zipItems. + */ + zipPaths?: string[]; } const uploadStatusSchema: Schema = { + collectionName: { + type: "string", + }, filePaths: { type: "array", items: { type: "string", }, }, + zipItems: { + type: "array", + items: { + type: "array", + items: { + type: "string", + }, + }, + }, zipPaths: { type: "array", items: { type: "string", }, }, - collectionName: { - type: "string", - }, }; export const uploadStatusStore = new Store({ diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts index 88d85db8e..b37970cfa 100644 --- a/desktop/src/main/stream.ts +++ b/desktop/src/main/stream.ts @@ -2,6 +2,7 @@ * @file stream data to-from renderer using a custom protocol handler. */ import { net, protocol } from "electron/main"; +import StreamZip from "node-stream-zip"; import { createWriteStream, existsSync } from "node:fs"; import fs from "node:fs/promises"; import { Readable } from "node:stream"; @@ -34,17 +35,23 @@ export const registerStreamProtocol = () => { protocol.handle("stream", async (request: Request) => { const url = request.url; // The request URL contains the command to run as the host, and the - // pathname of the file as the path. For example, + // pathname of the file as the path. An additional path can be specified + // as the URL hash. // - // stream://write/path/to/file - // host-pathname----- + // For example, // - const { host, pathname } = new URL(url); + // stream://write/path/to/file#/path/to/another/file + // host[pathname----] [pathname-2---------] + // + const { host, pathname, hash } = new URL(url); // Convert e.g. "%20" to spaces. const path = decodeURIComponent(pathname); + const hashPath = decodeURIComponent(hash); switch (host) { case "read": return handleRead(path); + case "read-zip": + return handleReadZip(path, hashPath); case "write": return handleWrite(path, request); default: @@ -88,6 +95,39 @@ const handleRead = async (path: string) => { } }; +const handleReadZip = async (zipPath: string, entryName: string) => { + try { + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + const stream = await zip.stream(entry); + // TODO(MR): when to call zip.close() + + return new Response(Readable.toWeb(new Readable(stream)), { + headers: { + // We don't know the exact type, but it doesn't really matter, + // just set it to a generic binary content-type so that the + // browser doesn't tinker with it thinking of it as text. + "Content-Type": "application/octet-stream", + "Content-Length": `${entry.size}`, + // While it is documented that entry.time is the modification + // time, the units are not mentioned. By seeing the source code, + // we can verify that it is indeed epoch milliseconds. See + // `parseZipTime` in the node-stream-zip source, + // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js + "X-Last-Modified-Ms": `${entry.time}`, + }, + }); + } catch (e) { + log.error( + `Failed to read entry ${entryName} from zip file at ${zipPath}`, + e, + ); + return new Response(`Failed to read stream: ${e.message}`, { + status: 500, + }); + } +}; + const handleWrite = async (path: string, request: Request) => { try { await writeStream(path, request.body); diff --git a/desktop/src/main/utils-temp.ts b/desktop/src/main/utils-temp.ts index a52daf619..3f3a6081e 100644 --- a/desktop/src/main/utils-temp.ts +++ b/desktop/src/main/utils-temp.ts @@ -1,7 +1,9 @@ import { app } from "electron/main"; +import StreamZip from "node-stream-zip"; import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import path from "path"; +import type { ZipItem } from "../types/ipc"; /** * Our very own directory within the system temp directory. Go crazy, but @@ -61,3 +63,64 @@ export const deleteTempFile = async (tempFilePath: string) => { throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`); await fs.rm(tempFilePath, { force: true }); }; + +/** The result of {@link makeFileForDataOrPathOrZipItem}. */ +interface FileForDataOrPathOrZipItem { + /** + * The path to the file (possibly temporary). + */ + path: string; + /** + * `true` if {@link path} points to a temporary file which should be deleted + * once we are done processing. + */ + isFileTemporary: boolean; + /** + * If set, this'll be a function that can be called to actually write the + * contents of the source `Uint8Array | string | ZipItem` into the file at + * {@link path}. + * + * It will be undefined if the source is already a path since nothing needs + * to be written in that case. In the other two cases this function will + * write the data or zip item into the file at {@link path}. + */ + writeToTemporaryFile?: () => Promise; +} + +/** + * Return the path to a file, a boolean indicating if this is a temporary path + * that needs to be deleted after processing, and a function to write the given + * {@link dataOrPathOrZipItem} into that temporary file if needed. + * + * @param dataOrPathOrZipItem The contents of the file, or the path to an + * existing file, or a (path to a zip file, name of an entry within that zip + * file) tuple. + */ +export const makeFileForDataOrPathOrZipItem = async ( + dataOrPathOrZipItem: Uint8Array | string | ZipItem, +): Promise => { + let path: string; + let isFileTemporary: boolean; + let writeToTemporaryFile: () => Promise | undefined; + + if (typeof dataOrPathOrZipItem == "string") { + path = dataOrPathOrZipItem; + isFileTemporary = false; + } else { + path = await makeTempFilePath(); + isFileTemporary = true; + if (dataOrPathOrZipItem instanceof Uint8Array) { + writeToTemporaryFile = () => + fs.writeFile(path, dataOrPathOrZipItem); + } else { + writeToTemporaryFile = async () => { + const [zipPath, entryName] = dataOrPathOrZipItem; + const zip = new StreamZip.async({ file: zipPath }); + await zip.extract(entryName, path); + zip.close(); + }; + } + } + + return { path, isFileTemporary, writeToTemporaryFile }; +}; diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts index 18fb55013..61955b524 100644 --- a/desktop/src/preload.ts +++ b/desktop/src/preload.ts @@ -37,7 +37,7 @@ * - [main] desktop/src/main/ipc.ts contains impl */ -import { contextBridge, ipcRenderer } from "electron/renderer"; +import { contextBridge, ipcRenderer, webUtils } from "electron/renderer"; // While we can't import other code, we can import types since they're just // needed when compiling and will not be needed or looked around for at runtime. @@ -47,6 +47,7 @@ import type { ElectronFile, FolderWatch, PendingUploads, + ZipItem, } from "./types/ipc"; // - General @@ -122,36 +123,33 @@ const fsWriteFile = (path: string, contents: string): Promise => const fsIsDir = (dirPath: string): Promise => ipcRenderer.invoke("fsIsDir", dirPath); -const fsSize = (path: string): Promise => - ipcRenderer.invoke("fsSize", path); - // - Conversion const convertToJPEG = (imageData: Uint8Array): Promise => ipcRenderer.invoke("convertToJPEG", imageData); const generateImageThumbnail = ( - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, ): Promise => ipcRenderer.invoke( "generateImageThumbnail", - dataOrPath, + dataOrPathOrZipItem, maxDimension, maxSize, ); const ffmpegExec = ( command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, ): Promise => ipcRenderer.invoke( "ffmpegExec", command, - dataOrPath, + dataOrPathOrZipItem, outputFileExtension, timeoutMS, ); @@ -241,25 +239,29 @@ const watchFindFiles = (folderPath: string): Promise => // - Upload +const pathForFile = (file: File) => webUtils.getPathForFile(file); + +const listZipItems = (zipPath: string): Promise => + ipcRenderer.invoke("listZipItems", zipPath); + +const pathOrZipItemSize = (pathOrZipItem: string | ZipItem): Promise => + ipcRenderer.invoke("pathOrZipItemSize", pathOrZipItem); + const pendingUploads = (): Promise => ipcRenderer.invoke("pendingUploads"); -const setPendingUploadCollection = (collectionName: string): Promise => - ipcRenderer.invoke("setPendingUploadCollection", collectionName); +const setPendingUploads = (pendingUploads: PendingUploads): Promise => + ipcRenderer.invoke("setPendingUploads", pendingUploads); -const setPendingUploadFiles = ( - type: PendingUploads["type"], - filePaths: string[], -): Promise => - ipcRenderer.invoke("setPendingUploadFiles", type, filePaths); +const markUploadedFiles = (paths: PendingUploads["filePaths"]): Promise => + ipcRenderer.invoke("markUploadedFiles", paths); -// - TODO: AUDIT below this -// - +const markUploadedZipItems = ( + items: PendingUploads["zipItems"], +): Promise => ipcRenderer.invoke("markUploadedZipItems", items); -const getElectronFilesFromGoogleZip = ( - filePath: string, -): Promise => - ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath); +const clearPendingUploads = (): Promise => + ipcRenderer.invoke("clearPendingUploads"); /** * These objects exposed here will become available to the JS code in our @@ -331,7 +333,6 @@ contextBridge.exposeInMainWorld("electron", { readTextFile: fsReadTextFile, writeFile: fsWriteFile, isDir: fsIsDir, - size: fsSize, }, // - Conversion @@ -370,11 +371,12 @@ contextBridge.exposeInMainWorld("electron", { // - Upload + pathForFile, + listZipItems, + pathOrZipItemSize, pendingUploads, - setPendingUploadCollection, - setPendingUploadFiles, - - // - - - getElectronFilesFromGoogleZip, + setPendingUploads, + markUploadedFiles, + markUploadedZipItems, + clearPendingUploads, }); diff --git a/desktop/src/types/ipc.ts b/desktop/src/types/ipc.ts index 3fa375eab..6e47b7a3a 100644 --- a/desktop/src/types/ipc.ts +++ b/desktop/src/types/ipc.ts @@ -25,10 +25,12 @@ export interface FolderWatchSyncedFile { collectionID: number; } +export type ZipItem = [zipPath: string, entryName: string]; + export interface PendingUploads { collectionName: string; - type: "files" | "zips"; - files: ElectronFile[]; + filePaths: string[]; + zipItems: ZipItem[]; } /** diff --git a/mobile/lib/ui/common/popup_item.dart b/mobile/lib/ui/common/popup_item.dart new file mode 100644 index 000000000..5f32104af --- /dev/null +++ b/mobile/lib/ui/common/popup_item.dart @@ -0,0 +1,38 @@ +import 'package:flutter/material.dart'; + +class EntePopupMenuItem extends PopupMenuItem { + final String label; + final IconData? icon; + final Widget? iconWidget; + + EntePopupMenuItem( + this.label, { + required T value, + this.icon, + this.iconWidget, + Key? key, + }) : assert( + icon != null || iconWidget != null, + 'Either icon or iconWidget must be provided.', + ), + assert( + !(icon != null && iconWidget != null), + 'Only one of icon or iconWidget can be provided.', + ), + super( + value: value, + key: key, + child: Row( + children: [ + if (iconWidget != null) + iconWidget + else if (icon != null) + Icon(icon), + const Padding( + padding: EdgeInsets.all(8), + ), + Text(label), + ], + ), // Initially empty, will be populated in build + ); +} diff --git a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart index 1f9fb0bbb..83a55975f 100644 --- a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart +++ b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart @@ -24,6 +24,7 @@ import 'package:photos/services/collections_service.dart'; import 'package:photos/services/sync_service.dart'; import 'package:photos/services/update_service.dart'; import 'package:photos/ui/actions/collection/collection_sharing_actions.dart'; +import "package:photos/ui/common/popup_item.dart"; import 'package:photos/ui/components/action_sheet_widget.dart'; import 'package:photos/ui/components/buttons/button_widget.dart'; import 'package:photos/ui/components/models/button_type.dart'; @@ -319,263 +320,117 @@ class _GalleryAppBarWidgetState extends State { ), ); } - final List> items = []; - if (galleryType.canRename()) { - items.add( - PopupMenuItem( + final List> items = []; + items.addAll([ + if (galleryType.canRename()) + EntePopupMenuItem( + isQuickLink + ? S.of(context).convertToAlbum + : S.of(context).renameAlbum, value: AlbumPopupAction.rename, - child: Row( - children: [ - Icon(isQuickLink ? Icons.photo_album_outlined : Icons.edit), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isQuickLink - ? S.of(context).convertToAlbum - : S.of(context).renameAlbum, - ), - ], - ), + icon: isQuickLink ? Icons.photo_album_outlined : Icons.edit, ), - ); - } - if (galleryType.canSetCover()) { - items.add( - PopupMenuItem( + if (galleryType.canSetCover()) + EntePopupMenuItem( + S.of(context).setCover, value: AlbumPopupAction.setCover, - child: Row( - children: [ - const Icon(Icons.image_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).setCover), - ], - ), + icon: Icons.image_outlined, ), - ); - } - if (galleryType.showMap()) { - items.add( - PopupMenuItem( + if (galleryType.showMap()) + EntePopupMenuItem( + S.of(context).map, value: AlbumPopupAction.map, - child: Row( - children: [ - const Icon(Icons.map_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).map), - ], - ), + icon: Icons.map_outlined, ), - ); - } - - if (galleryType.canSort()) { - items.add( - PopupMenuItem( + if (galleryType.canSort()) + EntePopupMenuItem( + S.of(context).sortAlbumsBy, value: AlbumPopupAction.sort, - child: Row( - children: [ - const Icon(Icons.sort_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - S.of(context).sortAlbumsBy, - ), - ], - ), + icon: Icons.sort_outlined, ), - ); - } - - if (galleryType == GalleryType.uncategorized) { - items.add( - PopupMenuItem( + if (galleryType == GalleryType.uncategorized) + EntePopupMenuItem( + S.of(context).cleanUncategorized, value: AlbumPopupAction.cleanUncategorized, - child: Row( - children: [ - const Icon(Icons.crop_original_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).cleanUncategorized), - ], - ), + icon: Icons.crop_original_outlined, ), - ); - } - if (galleryType.canPin()) { - items.add( - PopupMenuItem( + if (galleryType.canPin()) + EntePopupMenuItem( + widget.collection!.isPinned + ? S.of(context).unpinAlbum + : S.of(context).pinAlbum, value: AlbumPopupAction.pinAlbum, - child: Row( - children: [ - widget.collection!.isPinned - ? const Icon(CupertinoIcons.pin_slash) - : Transform.rotate( - angle: 45 * math.pi / 180, // rotate by 45 degrees - child: const Icon(CupertinoIcons.pin), - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - widget.collection!.isPinned - ? S.of(context).unpinAlbum - : S.of(context).pinAlbum, - ), - ], - ), + iconWidget: widget.collection!.isPinned + ? const Icon(CupertinoIcons.pin_slash) + : Transform.rotate( + angle: 45 * math.pi / 180, // rotate by 45 degrees + child: const Icon(CupertinoIcons.pin), + ), ), - ); - } + ]); final bool isArchived = widget.collection?.isArchived() ?? false; final bool isHidden = widget.collection?.isHidden() ?? false; - // Do not show archive option for favorite collection. If collection is - // already archived, allow user to unarchive that collection. - if (isArchived || (galleryType.canArchive() && !isHidden)) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.ownedArchive, - child: Row( - children: [ - Icon(isArchived ? Icons.unarchive : Icons.archive_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isArchived - ? S.of(context).unarchiveAlbum - : S.of(context).archiveAlbum, - ), - ], - ), - ), - ); - } - if (!isArchived && galleryType.canHide()) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.ownedHide, - child: Row( - children: [ - Icon( - isHidden - ? Icons.visibility_outlined - : Icons.visibility_off_outlined, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isHidden ? S.of(context).unhide : S.of(context).hide, - ), - ], - ), - ), - ); - } - if (widget.collection != null && isInternalUser) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.playOnTv, - child: Row( - children: [ - const Icon(Icons.tv_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(context.l10n.playOnTv), - ], - ), - ), - ); - } - if (galleryType.canDelete()) { - items.add( - PopupMenuItem( - value: isQuickLink - ? AlbumPopupAction.removeLink - : AlbumPopupAction.delete, - child: Row( - children: [ - Icon( - isQuickLink - ? Icons.remove_circle_outline - : Icons.delete_outline, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isQuickLink - ? S.of(context).removeLink - : S.of(context).deleteAlbum, - ), - ], + items.addAll( + [ + // Do not show archive option for favorite collection. If collection is + // already archived, allow user to unarchive that collection. + if (isArchived || (galleryType.canArchive() && !isHidden)) + EntePopupMenuItem( + value: AlbumPopupAction.ownedArchive, + isArchived + ? S.of(context).unarchiveAlbum + : S.of(context).archiveAlbum, + icon: isArchived ? Icons.unarchive : Icons.archive_outlined, ), - ), - ); - } - - if (galleryType == GalleryType.sharedCollection) { - final bool hasShareeArchived = widget.collection!.hasShareeArchived(); - items.add( - PopupMenuItem( - value: AlbumPopupAction.sharedArchive, - child: Row( - children: [ - Icon( - hasShareeArchived ? Icons.unarchive : Icons.archive_outlined, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - hasShareeArchived - ? S.of(context).unarchiveAlbum - : S.of(context).archiveAlbum, - ), - ], + if (!isArchived && galleryType.canHide()) + EntePopupMenuItem( + value: AlbumPopupAction.ownedHide, + isHidden ? S.of(context).unhide : S.of(context).hide, + icon: isHidden + ? Icons.visibility_outlined + : Icons.visibility_off_outlined, ), - ), - ); - items.add( - PopupMenuItem( - value: AlbumPopupAction.leave, - child: Row( - children: [ - const Icon(Icons.logout), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).leaveAlbum), - ], + if (widget.collection != null && isInternalUser) + EntePopupMenuItem( + value: AlbumPopupAction.playOnTv, + context.l10n.playOnTv, + icon: Icons.tv_outlined, ), - ), - ); - } - if (galleryType == GalleryType.localFolder) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.freeUpSpace, - child: Row( - children: [ - const Icon(Icons.delete_sweep_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).freeUpDeviceSpace), - ], + if (galleryType.canDelete()) + EntePopupMenuItem( + isQuickLink ? S.of(context).removeLink : S.of(context).deleteAlbum, + value: isQuickLink + ? AlbumPopupAction.removeLink + : AlbumPopupAction.delete, + icon: isQuickLink + ? Icons.remove_circle_outline + : Icons.delete_outline, ), - ), - ); - } + if (galleryType == GalleryType.sharedCollection) + EntePopupMenuItem( + widget.collection!.hasShareeArchived() + ? S.of(context).unarchiveAlbum + : S.of(context).archiveAlbum, + value: AlbumPopupAction.sharedArchive, + icon: widget.collection!.hasShareeArchived() + ? Icons.unarchive + : Icons.archive_outlined, + ), + if (galleryType == GalleryType.sharedCollection) + EntePopupMenuItem( + S.of(context).leaveAlbum, + value: AlbumPopupAction.leave, + icon: Icons.logout, + ), + if (galleryType == GalleryType.localFolder) + EntePopupMenuItem( + S.of(context).freeUpDeviceSpace, + value: AlbumPopupAction.freeUpSpace, + icon: Icons.delete_sweep_outlined, + ), + ], + ); if (items.isNotEmpty) { actions.add( PopupMenuButton( diff --git a/web/apps/auth/src/pages/_app.tsx b/web/apps/auth/src/pages/_app.tsx index bf1093c90..a5aa55f98 100644 --- a/web/apps/auth/src/pages/_app.tsx +++ b/web/apps/auth/src/pages/_app.tsx @@ -140,7 +140,7 @@ export default function App({ Component, pageProps }: AppProps) { {showNavbar && } - {offline && t("OFFLINE_MSG")} + {isI18nReady && offline && t("OFFLINE_MSG")} diff --git a/web/apps/photos/src/components/PhotoFrame.tsx b/web/apps/photos/src/components/PhotoFrame.tsx index 8c935ee27..f7db350da 100644 --- a/web/apps/photos/src/components/PhotoFrame.tsx +++ b/web/apps/photos/src/components/PhotoFrame.tsx @@ -308,11 +308,7 @@ const PhotoFrame = ({ item: EnteFile, ) => { log.info( - `[${ - item.id - }] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${ - item.isSourceLoaded - } fetching:${fetching[item.id]}`, + `[${item.id}] getSlideData called for thumbnail: ${!!item.msrc} sourceLoaded: ${item.isSourceLoaded} fetching:${fetching[item.id]}`, ); if (!item.msrc) { @@ -327,9 +323,7 @@ const PhotoFrame = ({ try { updateURL(index)(item.id, url); log.info( - `[${ - item.id - }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, + `[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -381,7 +375,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, dummyImgSrcUrl); log.info( - `[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -415,7 +409,7 @@ const PhotoFrame = ({ true, ); log.info( - `[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for live photo complete, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -433,7 +427,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, srcURLs); log.info( - `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -476,9 +470,7 @@ const PhotoFrame = ({ try { updateURL(index)(item.id, item.msrc, true); log.info( - `[${ - item.id - }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, + `[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -495,7 +487,7 @@ const PhotoFrame = ({ } try { log.info( - `[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`, + `[${item.id}] new file getConvertedVideo request ${item.metadata.title}}`, ); fetching[item.id] = true; @@ -504,7 +496,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, srcURL, true); log.info( - `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { diff --git a/web/apps/photos/src/components/PhotoList/dedupe.tsx b/web/apps/photos/src/components/PhotoList/dedupe.tsx index 9c86ba24f..7181f6267 100644 --- a/web/apps/photos/src/components/PhotoList/dedupe.tsx +++ b/web/apps/photos/src/components/PhotoList/dedupe.tsx @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import { Box, styled } from "@mui/material"; import { @@ -20,6 +19,7 @@ import { } from "react-window"; import { Duplicate } from "services/deduplicationService"; import { EnteFile } from "types/file"; +import { convertBytesToHumanReadable } from "utils/file"; export enum ITEM_TYPE { TIME = "TIME", diff --git a/web/apps/photos/src/components/PhotoList/index.tsx b/web/apps/photos/src/components/PhotoList/index.tsx index 48454fa69..91f712df1 100644 --- a/web/apps/photos/src/components/PhotoList/index.tsx +++ b/web/apps/photos/src/components/PhotoList/index.tsx @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import { formatDate, getDate, isSameDay } from "@ente/shared/time/format"; import { Box, Checkbox, Link, Typography, styled } from "@mui/material"; @@ -23,6 +22,7 @@ import { areEqual, } from "react-window"; import { EnteFile } from "types/file"; +import { convertBytesToHumanReadable } from "utils/file"; import { handleSelectCreator } from "utils/photoFrame"; import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx index ff795aca7..42edddbf1 100644 --- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx @@ -507,14 +507,14 @@ const ImageEditorOverlay = (props: IProps) => { const editedFile = await getEditedFile(); const file = { - fileOrPath: editedFile, + uploadItem: editedFile, localID: 1, collectionID: props.file.collectionID, }; uploadManager.prepareForNewUpload(); uploadManager.showUploadProgressDialog(); - uploadManager.uploadFiles([file], [collection]); + uploadManager.uploadItems([file], [collection]); setFileURL(null); props.onClose(); props.closePhotoViewer(); diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx index d7485398f..fdc6ee932 100644 --- a/web/apps/photos/src/components/Upload/Uploader.tsx +++ b/web/apps/photos/src/components/Upload/Uploader.tsx @@ -1,6 +1,7 @@ +import { basename } from "@/next/file"; import log from "@/next/log"; -import { ElectronFile } from "@/next/types/file"; -import type { CollectionMapping, Electron } from "@/next/types/ipc"; +import { type FileAndPath } from "@/next/types/file"; +import type { CollectionMapping, Electron, ZipItem } from "@/next/types/ipc"; import { CustomError } from "@ente/shared/error"; import { isPromise } from "@ente/shared/utils"; import DiscFullIcon from "@mui/icons-material/DiscFull"; @@ -19,17 +20,15 @@ import { getPublicCollectionUploaderName, savePublicCollectionUploaderName, } from "services/publicCollectionService"; +import type { UploadItem } from "services/upload/types"; import type { - FileWithCollection, InProgressUpload, SegregatedFinishedUploads, UploadCounter, UploadFileNames, + UploadItemWithCollection, } from "services/upload/uploadManager"; -import uploadManager, { - setToUploadCollection, -} from "services/upload/uploadManager"; -import { fopFileName } from "services/upload/uploadService"; +import uploadManager from "services/upload/uploadManager"; import watcher from "services/watch"; import { NotificationAttributes } from "types/Notification"; import { Collection } from "types/collection"; @@ -52,8 +51,6 @@ import { CollectionMappingChoiceModal } from "./CollectionMappingChoiceModal"; import UploadProgress from "./UploadProgress"; import UploadTypeSelector from "./UploadTypeSelector"; -const FIRST_ALBUM_NAME = "My First Album"; - enum PICKED_UPLOAD_TYPE { FILES = "files", FOLDERS = "folders", @@ -74,17 +71,29 @@ interface Props { isFirstUpload?: boolean; uploadTypeSelectorView: boolean; showSessionExpiredMessage: () => void; - showUploadFilesDialog: () => void; - showUploadDirsDialog: () => void; - webFolderSelectorFiles: File[]; - webFileSelectorFiles: File[]; dragAndDropFiles: File[]; + openFileSelector: () => void; + fileSelectorFiles: File[]; + openFolderSelector: () => void; + folderSelectorFiles: File[]; + openZipFileSelector?: () => void; + fileSelectorZipFiles?: File[]; uploadCollection?: Collection; uploadTypeSelectorIntent: UploadTypeSelectorIntent; activeCollection?: Collection; } -export default function Uploader(props: Props) { +export default function Uploader({ + isFirstUpload, + dragAndDropFiles, + openFileSelector, + fileSelectorFiles, + openFolderSelector, + folderSelectorFiles, + openZipFileSelector, + fileSelectorZipFiles, + ...props +}: Props) { const appContext = useContext(AppContext); const galleryContext = useContext(GalleryContext); const publicCollectionGalleryContext = useContext( @@ -116,26 +125,55 @@ export default function Uploader(props: Props) { ); /** - * {@link File}s that the user drag-dropped or selected for uploads. This is - * the only type of selection that is possible when we're running in the - * browser. + * {@link File}s that the user drag-dropped or selected for uploads (web). + * + * This is the only type of selection that is possible when we're running in + * the browser. */ const [webFiles, setWebFiles] = useState([]); + /** + * {@link File}s that the user drag-dropped or selected for uploads, + * augmented with their paths (desktop). + * + * These siblings of {@link webFiles} come into play when we are running in + * the context of our desktop app. + */ + const [desktopFiles, setDesktopFiles] = useState([]); /** * Paths of file to upload that we've received over the IPC bridge from the * code running in the Node.js layer of our desktop app. + * + * Unlike {@link filesWithPaths} which are still user initiated, + * {@link desktopFilePaths} can be set via programmatic action. For example, + * if the user has setup a folder watch, and a new file is added on their + * local file system in one of the watched folders, then the relevant path + * of the new file would get added to {@link desktopFilePaths}. */ const [desktopFilePaths, setDesktopFilePaths] = useState([]); /** - * TODO(MR): When? + * (zip file path, entry within zip file) tuples for zip files that the user + * is trying to upload. + * + * These are only set when we are running in the context of our desktop app. + * They may be set either on a user action (when the user selects or + * drag-drops zip files) or programmatically (when the app is trying to + * resume pending uploads from a previous session). */ - const [electronFiles, setElectronFiles] = useState([]); + const [desktopZipItems, setDesktopZipItems] = useState([]); /** - * Consolidated and cleaned list obtained from {@link webFiles} and - * {@link desktopFilePaths}. + * Consolidated and cleaned list obtained from {@link webFiles}, + * {@link desktopFiles}, {@link desktopFilePaths} and + * {@link desktopZipItems}. + * + * Augment each {@link UploadItem} with its "path" (relative path or name in + * the case of {@link webFiles}, absolute path in the case of + * {@link desktopFiles}, {@link desktopFilePaths}, and the path within the + * zip file for {@link desktopZipItems}). + * + * See the documentation of {@link UploadItem} for more details. */ - const fileOrPathsToUpload = useRef<(File | string)[]>([]); + const uploadItemsAndPaths = useRef<[UploadItem, string][]>([]); /** * If true, then the next upload we'll be processing was initiated by our @@ -149,9 +187,12 @@ export default function Uploader(props: Props) { */ const pendingDesktopUploadCollectionName = useRef(""); - // This is set when the user choses a type to upload from the upload type selector dialog + /** + * This is set to thue user's choice when the user chooses one of the + * predefined type to upload from the upload type selector dialog + */ const pickedUploadType = useRef(null); - const zipPaths = useRef(null); + const currentUploadPromise = useRef>(null); const uploadRunning = useRef(false); const uploaderNameRef = useRef(null); @@ -166,9 +207,9 @@ export default function Uploader(props: Props) { setChoiceModalView(false); uploadRunning.current = false; }; + const handleCollectionSelectorCancel = () => { uploadRunning.current = false; - appContext.resetSharedFiles(); }; const handleUserNameInputDialogClose = () => { @@ -192,6 +233,7 @@ export default function Uploader(props: Props) { publicCollectionGalleryContext, appContext.isCFProxyDisabled, ); + if (uploadManager.isUploadRunning()) { setUploadProgressView(true); } @@ -215,16 +257,15 @@ export default function Uploader(props: Props) { watcher.init(upload, requestSyncWithRemote); electron.pendingUploads().then((pending) => { - if (pending) { - log.info("Resuming pending desktop upload", pending); - resumeDesktopUpload( - pending.type == "files" - ? PICKED_UPLOAD_TYPE.FILES - : PICKED_UPLOAD_TYPE.ZIPS, - pending.files, - pending.collectionName, - ); - } + if (!pending) return; + + const { collectionName, filePaths, zipItems } = pending; + + log.info("Resuming pending upload", pending); + isPendingDesktopUpload.current = true; + pendingDesktopUploadCollectionName.current = collectionName; + setDesktopFilePaths(filePaths); + setDesktopZipItems(zipItems); }); } }, [ @@ -234,181 +275,176 @@ export default function Uploader(props: Props) { appContext.isCFProxyDisabled, ]); - // this handles the change of selectorFiles changes on web when user selects - // files for upload through the opened file/folder selector or dragAndDrop them - // the webFiles state is update which triggers the upload of those files + // Handle selected files when user selects files for upload through the open + // file / open folder selection dialog, or drag-and-drops them. useEffect(() => { if (appContext.watchFolderView) { // if watch folder dialog is open don't catch the dropped file // as they are folder being dropped for watching return; } - if ( - pickedUploadType.current === PICKED_UPLOAD_TYPE.FOLDERS && - props.webFolderSelectorFiles?.length > 0 - ) { - log.info(`received folder upload request`); - setWebFiles(props.webFolderSelectorFiles); - } else if ( - pickedUploadType.current === PICKED_UPLOAD_TYPE.FILES && - props.webFileSelectorFiles?.length > 0 - ) { - log.info(`received file upload request`); - setWebFiles(props.webFileSelectorFiles); - } else if (props.dragAndDropFiles?.length > 0) { - isDragAndDrop.current = true; - if (electron) { - const main = async () => { - try { - log.info(`uploading dropped files from desktop app`); - // check and parse dropped files which are zip files - let electronFiles = [] as ElectronFile[]; - for (const file of props.dragAndDropFiles) { - if (file.name.endsWith(".zip")) { - const zipFiles = - await electron.getElectronFilesFromGoogleZip( - (file as any).path, - ); - log.info( - `zip file - ${file.name} contains ${zipFiles.length} files`, - ); - electronFiles = [...electronFiles, ...zipFiles]; - } else { - // type cast to ElectronFile as the file is dropped from desktop app - // type file and ElectronFile should be interchangeable, but currently they have some differences. - // Typescript is giving error - // Conversion of type 'File' to type 'ElectronFile' may be a mistake because neither type sufficiently - // overlaps with the other. If this was intentional, convert the expression to 'unknown' first. - // Type 'File' is missing the following properties from type 'ElectronFile': path, blob - // for now patching by type casting first to unknown and then to ElectronFile - // TODO: fix types and remove type cast - electronFiles.push( - file as unknown as ElectronFile, - ); - } - } - log.info( - `uploading dropped files from desktop app - ${electronFiles.length} files found`, - ); - setElectronFiles(electronFiles); - } catch (e) { - log.error("failed to upload desktop dropped files", e); - setWebFiles(props.dragAndDropFiles); - } - }; - main(); - } else { - log.info(`uploading dropped files from web app`); - setWebFiles(props.dragAndDropFiles); - } + + let files: File[]; + + switch (pickedUploadType.current) { + case PICKED_UPLOAD_TYPE.FILES: + files = fileSelectorFiles; + break; + + case PICKED_UPLOAD_TYPE.FOLDERS: + files = folderSelectorFiles; + break; + + case PICKED_UPLOAD_TYPE.ZIPS: + files = fileSelectorZipFiles; + break; + + default: + files = dragAndDropFiles; + break; + } + + if (electron) { + desktopFilesAndZipItems(electron, files).then( + ({ fileAndPaths, zipItems }) => { + setDesktopFiles(fileAndPaths); + setDesktopZipItems(zipItems); + }, + ); + } else { + setWebFiles(files); } }, [ - props.dragAndDropFiles, - props.webFileSelectorFiles, - props.webFolderSelectorFiles, + dragAndDropFiles, + fileSelectorFiles, + folderSelectorFiles, + fileSelectorZipFiles, ]); + // Trigger an upload when any of the dependencies change. useEffect(() => { - if ( - desktopFilePaths.length > 0 || - electronFiles.length > 0 || - webFiles.length > 0 || - appContext.sharedFiles?.length > 0 - ) { - log.info( - `upload request type: ${ - desktopFilePaths.length > 0 - ? "desktopFilePaths" - : electronFiles.length > 0 - ? "electronFiles" - : webFiles.length > 0 - ? "webFiles" - : "sharedFiles" - } count ${ - desktopFilePaths.length + - electronFiles.length + - webFiles.length + - (appContext.sharedFiles?.length ?? 0) - }`, - ); - if (uploadManager.isUploadRunning()) { - if (watcher.isUploadRunning()) { - // Pause watch folder sync on user upload - log.info( - "Folder watcher was uploading, pausing it to first run user upload", - ); - watcher.pauseRunningSync(); - } else { - log.info( - "Ignoring new upload request because an upload is already running", - ); - return; - } - } - uploadRunning.current = true; - props.closeUploadTypeSelector(); - props.setLoading(true); - if (webFiles?.length > 0) { - // File selection by drag and drop or selection of file. - fileOrPathsToUpload.current = webFiles; - setWebFiles([]); - } else if (appContext.sharedFiles?.length > 0) { - fileOrPathsToUpload.current = appContext.sharedFiles; - appContext.resetSharedFiles(); - } else if (electronFiles?.length > 0) { - // File selection from desktop app - deprecated - log.warn("Using deprecated code path for ElectronFiles"); - fileOrPathsToUpload.current = electronFiles.map((f) => f.path); - setElectronFiles([]); - } else if (desktopFilePaths && desktopFilePaths.length > 0) { - // File selection from our desktop app - fileOrPathsToUpload.current = desktopFilePaths; - setDesktopFilePaths([]); - } + const allItemAndPaths = [ + /* TODO(MR): ElectronFile | use webkitRelativePath || name here */ + webFiles.map((f) => [f, f["path"] ?? f.name]), + desktopFiles.map((fp) => [fp, fp.path]), + desktopFilePaths.map((p) => [p, p]), + desktopZipItems.map((ze) => [ze, ze[1]]), + ].flat() as [UploadItem, string][]; - log.debug(() => "Uploader received:"); - log.debug(() => fileOrPathsToUpload.current); + if (allItemAndPaths.length == 0) return; - fileOrPathsToUpload.current = pruneHiddenFiles( - fileOrPathsToUpload.current, - ); - if (fileOrPathsToUpload.current.length === 0) { - props.setLoading(false); + if (uploadManager.isUploadRunning()) { + if (watcher.isUploadRunning()) { + log.info("Pausing watch folder sync to prioritize user upload"); + watcher.pauseRunningSync(); + } else { + log.info( + "Ignoring new upload request when upload is already running", + ); + return; + } + } + + uploadRunning.current = true; + props.closeUploadTypeSelector(); + props.setLoading(true); + + setWebFiles([]); + setDesktopFiles([]); + setDesktopFilePaths([]); + setDesktopZipItems([]); + + // Remove hidden files (files whose names begins with a "."). + const prunedItemAndPaths = allItemAndPaths.filter( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + ([_, p]) => !basename(p).startsWith("."), + ); + + uploadItemsAndPaths.current = prunedItemAndPaths; + if (uploadItemsAndPaths.current.length === 0) { + props.setLoading(false); + return; + } + + const importSuggestion = getImportSuggestion( + pickedUploadType.current, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + prunedItemAndPaths.map(([_, p]) => p), + ); + setImportSuggestion(importSuggestion); + + log.debug(() => "Uploader invoked:"); + log.debug(() => uploadItemsAndPaths.current); + log.debug(() => importSuggestion); + + const _pickedUploadType = pickedUploadType.current; + pickedUploadType.current = null; + props.setLoading(false); + + (async () => { + if (publicCollectionGalleryContext.accessedThroughSharedURL) { + const uploaderName = await getPublicCollectionUploaderName( + getPublicCollectionUID( + publicCollectionGalleryContext.token, + ), + ); + uploaderNameRef.current = uploaderName; + showUserNameInputDialog(); return; } - const importSuggestion = getImportSuggestion( - pickedUploadType.current, - fileOrPathsToUpload.current.map((file) => - /** TODO(MR): Is path valid for Web files? */ - typeof file == "string" ? file : file["path"], - ), - ); - setImportSuggestion(importSuggestion); + if (isPendingDesktopUpload.current) { + isPendingDesktopUpload.current = false; + if (pendingDesktopUploadCollectionName.current) { + uploadFilesToNewCollections( + "root", + pendingDesktopUploadCollectionName.current, + ); + pendingDesktopUploadCollectionName.current = null; + } else { + uploadFilesToNewCollections("parent"); + } + return; + } - handleCollectionCreationAndUpload( - importSuggestion, - props.isFirstUpload, - pickedUploadType.current, - publicCollectionGalleryContext.accessedThroughSharedURL, - ); - pickedUploadType.current = null; - props.setLoading(false); - } - }, [webFiles, appContext.sharedFiles, electronFiles, desktopFilePaths]); + if (electron && _pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { + uploadFilesToNewCollections("parent"); + return; + } - const resumeDesktopUpload = async ( - type: PICKED_UPLOAD_TYPE, - electronFiles: ElectronFile[], - collectionName: string, - ) => { - if (electronFiles && electronFiles?.length > 0) { - isPendingDesktopUpload.current = true; - pendingDesktopUploadCollectionName.current = collectionName; - pickedUploadType.current = type; - setElectronFiles(electronFiles); - } - }; + if (isFirstUpload && !importSuggestion.rootFolderName) { + importSuggestion.rootFolderName = t( + "autogenerated_first_album_name", + ); + } + + if (isDragAndDrop.current) { + isDragAndDrop.current = false; + if ( + props.activeCollection && + props.activeCollection.owner.id === galleryContext.user?.id + ) { + uploadFilesToExistingCollection(props.activeCollection); + return; + } + } + + let showNextModal = () => {}; + if (importSuggestion.hasNestedFolders) { + showNextModal = () => setChoiceModalView(true); + } else { + showNextModal = () => + showCollectionCreateModal(importSuggestion.rootFolderName); + } + + props.setCollectionSelectorAttributes({ + callback: uploadFilesToExistingCollection, + onCancel: handleCollectionSelectorCancel, + showNextModal, + intent: CollectionSelectorIntent.upload, + }); + })(); + }, [webFiles, desktopFiles, desktopFilePaths, desktopZipItems]); const preCollectionCreationAction = async () => { props.closeCollectionSelector?.(); @@ -421,100 +457,78 @@ export default function Uploader(props: Props) { collection: Collection, uploaderName?: string, ) => { - try { - log.info( - `Uploading files existing collection id ${collection.id} (${collection.name})`, - ); - await preCollectionCreationAction(); - const filesWithCollectionToUpload = fileOrPathsToUpload.current.map( - (fileOrPath, index) => ({ - fileOrPath, - localID: index, - collectionID: collection.id, - }), - ); - await waitInQueueAndUploadFiles( - filesWithCollectionToUpload, - [collection], - uploaderName, - ); - } catch (e) { - log.error("Failed to upload files to existing collection", e); - } + await preCollectionCreationAction(); + const uploadItemsWithCollection = uploadItemsAndPaths.current.map( + ([uploadItem], index) => ({ + uploadItem, + localID: index, + collectionID: collection.id, + }), + ); + await waitInQueueAndUploadFiles( + uploadItemsWithCollection, + [collection], + uploaderName, + ); + uploadItemsAndPaths.current = null; }; const uploadFilesToNewCollections = async ( mapping: CollectionMapping, collectionName?: string, ) => { - try { - log.info( - `Uploading files to collection using ${mapping} mapping (${collectionName ?? ""})`, + await preCollectionCreationAction(); + let uploadItemsWithCollection: UploadItemWithCollection[] = []; + const collections: Collection[] = []; + let collectionNameToUploadItems = new Map(); + if (mapping == "root") { + collectionNameToUploadItems.set( + collectionName, + uploadItemsAndPaths.current.map(([i]) => i), ); - await preCollectionCreationAction(); - let filesWithCollectionToUpload: FileWithCollection[] = []; - const collections: Collection[] = []; - let collectionNameToFileOrPaths = new Map< - string, - (File | string)[] - >(); - if (mapping == "root") { - collectionNameToFileOrPaths.set( - collectionName, - fileOrPathsToUpload.current, - ); - } else { - collectionNameToFileOrPaths = groupFilesBasedOnParentFolder( - fileOrPathsToUpload.current, - ); - } - try { - const existingCollections = await getLatestCollections(); - let index = 0; - for (const [ - collectionName, - fileOrPaths, - ] of collectionNameToFileOrPaths) { - const collection = await getOrCreateAlbum( - collectionName, - existingCollections, - ); - collections.push(collection); - props.setCollections([ - ...existingCollections, - ...collections, - ]); - filesWithCollectionToUpload = [ - ...filesWithCollectionToUpload, - ...fileOrPaths.map((fileOrPath) => ({ - localID: index++, - collectionID: collection.id, - fileOrPath, - })), - ]; - } - } catch (e) { - closeUploadProgress(); - log.error("Failed to create album", e); - appContext.setDialogMessage({ - title: t("ERROR"), - close: { variant: "critical" }, - content: t("CREATE_ALBUM_FAILED"), - }); - throw e; - } - await waitInQueueAndUploadFiles( - filesWithCollectionToUpload, - collections, + } else { + collectionNameToUploadItems = groupFilesBasedOnParentFolder( + uploadItemsAndPaths.current, ); - fileOrPathsToUpload.current = null; - } catch (e) { - log.error("Failed to upload files to new collections", e); } + try { + const existingCollections = await getLatestCollections(); + let index = 0; + for (const [ + collectionName, + uploadItems, + ] of collectionNameToUploadItems) { + const collection = await getOrCreateAlbum( + collectionName, + existingCollections, + ); + collections.push(collection); + props.setCollections([...existingCollections, ...collections]); + uploadItemsWithCollection = [ + ...uploadItemsWithCollection, + ...uploadItems.map((uploadItem) => ({ + localID: index++, + collectionID: collection.id, + uploadItem, + })), + ]; + } + } catch (e) { + closeUploadProgress(); + log.error("Failed to create album", e); + appContext.setDialogMessage({ + title: t("ERROR"), + close: { variant: "critical" }, + content: t("CREATE_ALBUM_FAILED"), + }); + throw e; + } + await waitInQueueAndUploadFiles(uploadItemsWithCollection, collections); + uploadItemsAndPaths.current = null; }; const waitInQueueAndUploadFiles = async ( - filesWithCollectionToUploadIn: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) => { @@ -523,7 +537,7 @@ export default function Uploader(props: Props) { currentPromise, async () => await uploadFiles( - filesWithCollectionToUploadIn, + uploadItemsWithCollection, collections, uploaderName, ), @@ -544,7 +558,7 @@ export default function Uploader(props: Props) { } const uploadFiles = async ( - filesWithCollectionToUploadIn: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) => { @@ -555,27 +569,16 @@ export default function Uploader(props: Props) { !isPendingDesktopUpload.current && !watcher.isUploadRunning() ) { - await setToUploadCollection(collections); - if (zipPaths.current) { - await electron.setPendingUploadFiles( - "zips", - zipPaths.current, - ); - zipPaths.current = null; - } - await electron.setPendingUploadFiles( - "files", - filesWithCollectionToUploadIn.map( - // TODO(MR): ElectronFile - ({ fileOrPath }) => - typeof fileOrPath == "string" - ? fileOrPath - : (fileOrPath as any as ElectronFile).path, - ), + setPendingUploads( + electron, + collections, + uploadItemsWithCollection + .map(({ uploadItem }) => uploadItem) + .filter((x) => x), ); } - const wereFilesProcessed = await uploadManager.uploadFiles( - filesWithCollectionToUploadIn, + const wereFilesProcessed = await uploadManager.uploadItems( + uploadItemsWithCollection, collections, uploaderName, ); @@ -583,11 +586,12 @@ export default function Uploader(props: Props) { if (isElectron()) { if (watcher.isUploadRunning()) { await watcher.allFileUploadsDone( - filesWithCollectionToUploadIn, + uploadItemsWithCollection, collections, ); } else if (watcher.isSyncPaused()) { - // resume the service after user upload is done + // Resume folder watch after the user upload that + // interrupted it is done. watcher.resumePausedSync(); } } @@ -603,11 +607,11 @@ export default function Uploader(props: Props) { const retryFailed = async () => { try { log.info("Retrying failed uploads"); - const { files, collections } = - uploadManager.getFailedFilesWithCollections(); + const { items, collections } = + uploadManager.getFailedItemsWithCollections(); const uploaderName = uploadManager.getUploaderName(); await preUploadAction(); - await uploadManager.uploadFiles(files, collections, uploaderName); + await uploadManager.uploadItems(items, collections, uploaderName); } catch (e) { log.error("Retrying failed uploads failed", e); showUserFacingError(e.message); @@ -662,128 +666,28 @@ export default function Uploader(props: Props) { }); }; - const handleCollectionCreationAndUpload = async ( - importSuggestion: ImportSuggestion, - isFirstUpload: boolean, - pickedUploadType: PICKED_UPLOAD_TYPE, - accessedThroughSharedURL?: boolean, - ) => { - try { - if (accessedThroughSharedURL) { - const uploaderName = await getPublicCollectionUploaderName( - getPublicCollectionUID( - publicCollectionGalleryContext.token, - ), - ); - uploaderNameRef.current = uploaderName; - showUserNameInputDialog(); - return; - } - - if (isPendingDesktopUpload.current) { - isPendingDesktopUpload.current = false; - if (pendingDesktopUploadCollectionName.current) { - uploadFilesToNewCollections( - "root", - pendingDesktopUploadCollectionName.current, - ); - pendingDesktopUploadCollectionName.current = null; - } else { - uploadFilesToNewCollections("parent"); - } - return; - } - - if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { - uploadFilesToNewCollections("parent"); - return; - } - - if (isFirstUpload && !importSuggestion.rootFolderName) { - importSuggestion.rootFolderName = FIRST_ALBUM_NAME; - } - - if (isDragAndDrop.current) { - isDragAndDrop.current = false; - if ( - props.activeCollection && - props.activeCollection.owner.id === galleryContext.user?.id - ) { - uploadFilesToExistingCollection(props.activeCollection); - return; - } - } - - let showNextModal = () => {}; - if (importSuggestion.hasNestedFolders) { - showNextModal = () => setChoiceModalView(true); - } else { - showNextModal = () => - showCollectionCreateModal(importSuggestion.rootFolderName); - } - - props.setCollectionSelectorAttributes({ - callback: uploadFilesToExistingCollection, - onCancel: handleCollectionSelectorCancel, - showNextModal, - intent: CollectionSelectorIntent.upload, - }); - } catch (e) { - // TODO(MR): Why? - log.warn("Ignoring error in handleCollectionCreationAndUpload", e); - } - }; - - const handleDesktopUpload = async ( - type: PICKED_UPLOAD_TYPE, - electron: Electron, - ) => { - let files: ElectronFile[]; - pickedUploadType.current = type; - if (type === PICKED_UPLOAD_TYPE.FILES) { - files = await electron.showUploadFilesDialog(); - } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { - files = await electron.showUploadDirsDialog(); - } else { - const response = await electron.showUploadZipDialog(); - files = response.files; - zipPaths.current = response.zipPaths; - } - if (files?.length > 0) { - log.info( - ` desktop upload for type:${type} and fileCount: ${files?.length} requested`, - ); - setElectronFiles(files); - props.closeUploadTypeSelector(); - } - }; - - const handleWebUpload = async (type: PICKED_UPLOAD_TYPE) => { - pickedUploadType.current = type; - if (type === PICKED_UPLOAD_TYPE.FILES) { - props.showUploadFilesDialog(); - } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { - props.showUploadDirsDialog(); - } else { - appContext.setDialogMessage(getDownloadAppMessage()); - } - }; - const cancelUploads = () => { uploadManager.cancelRunningUpload(); }; - const handleUpload = (type) => () => { - if (electron) { - handleDesktopUpload(type, electron); + const handleUpload = (type: PICKED_UPLOAD_TYPE) => { + pickedUploadType.current = type; + if (type === PICKED_UPLOAD_TYPE.FILES) { + openFileSelector(); + } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { + openFolderSelector(); } else { - handleWebUpload(type); + if (openZipFileSelector && electron) { + openZipFileSelector(); + } else { + appContext.setDialogMessage(getDownloadAppMessage()); + } } }; - const handleFileUpload = handleUpload(PICKED_UPLOAD_TYPE.FILES); - const handleFolderUpload = handleUpload(PICKED_UPLOAD_TYPE.FOLDERS); - const handleZipUpload = handleUpload(PICKED_UPLOAD_TYPE.ZIPS); + const handleFileUpload = () => handleUpload(PICKED_UPLOAD_TYPE.FILES); + const handleFolderUpload = () => handleUpload(PICKED_UPLOAD_TYPE.FOLDERS); + const handleZipUpload = () => handleUpload(PICKED_UPLOAD_TYPE.ZIPS); const handlePublicUpload = async ( uploaderName: string, @@ -807,28 +711,33 @@ export default function Uploader(props: Props) { } }; - const handleUploadToSingleCollection = () => { - uploadToSingleNewCollection(importSuggestion.rootFolderName); - }; - - const handleUploadToMultipleCollections = () => { - if (importSuggestion.hasRootLevelFileWithFolder) { - appContext.setDialogMessage( - getRootLevelFileWithFolderNotAllowMessage(), - ); - return; - } - uploadFilesToNewCollections("parent"); - }; - const didSelectCollectionMapping = (mapping: CollectionMapping) => { switch (mapping) { case "root": - handleUploadToSingleCollection(); + uploadToSingleNewCollection( + // rootFolderName would be empty here if one edge case: + // - User drags and drops a mixture of files and folders + // - They select the "upload to multiple albums" option + // - The see the error, close the error + // - Then they select the "upload to single album" option + // + // In such a flow, we'll reach here with an empty + // rootFolderName. The proper fix for this would be + // rearrange the flow and ask them to name the album here, + // but we currently don't have support for chaining modals. + // So in the meanwhile, keep a fallback album name at hand. + importSuggestion.rootFolderName ?? + t("autogenerated_default_album_name"), + ); break; case "parent": - handleUploadToMultipleCollections(); - break; + if (importSuggestion.hasRootLevelFileWithFolder) { + appContext.setDialogMessage( + getRootLevelFileWithFolderNotAllowMessage(), + ); + } else { + uploadFilesToNewCollections("parent"); + } } }; @@ -864,7 +773,7 @@ export default function Uploader(props: Props) { open={userNameInputDialogView} onClose={handleUserNameInputDialogClose} onNameSubmit={handlePublicUpload} - toUploadFilesCount={fileOrPathsToUpload.current?.length} + toUploadFilesCount={uploadItemsAndPaths.current?.length} uploaderName={uploaderNameRef.current} /> @@ -881,6 +790,25 @@ async function waitAndRun( await task(); } +const desktopFilesAndZipItems = async ( + electron: Electron, + files: File[], +): Promise<{ fileAndPaths: FileAndPath[]; zipItems: ZipItem[] }> => { + const fileAndPaths: FileAndPath[] = []; + let zipItems: ZipItem[] = []; + + for (const file of files) { + const path = electron.pathForFile(file); + if (file.name.endsWith(".zip")) { + zipItems = zipItems.concat(await electron.listZipItems(path)); + } else { + fileAndPaths.push({ file, path }); + } + } + + return { fileAndPaths, zipItems }; +}; + // This is used to prompt the user the make upload strategy choice interface ImportSuggestion { rootFolderName: string; @@ -945,16 +873,12 @@ function getImportSuggestion( // [a => [j], // b => [e,f,g], // c => [h, i]] -const groupFilesBasedOnParentFolder = (fileOrPaths: (File | string)[]) => { - const result = new Map(); - for (const fileOrPath of fileOrPaths) { - const filePath = - /* TODO(MR): ElectronFile */ - typeof fileOrPath == "string" - ? fileOrPath - : (fileOrPath["path"] as string); - - let folderPath = filePath.substring(0, filePath.lastIndexOf("/")); +const groupFilesBasedOnParentFolder = ( + uploadItemsAndPaths: [UploadItem, string][], +) => { + const result = new Map(); + for (const [uploadItem, pathOrName] of uploadItemsAndPaths) { + let folderPath = pathOrName.substring(0, pathOrName.lastIndexOf("/")); // If the parent folder of a file is "metadata" // we consider it to be part of the parent folder // For Eg,For FileList -> [a/x.png, a/metadata/x.png.json] @@ -968,15 +892,47 @@ const groupFilesBasedOnParentFolder = (fileOrPaths: (File | string)[]) => { ); if (!folderName) throw Error("Unexpected empty folder name"); if (!result.has(folderName)) result.set(folderName, []); - result.get(folderName).push(fileOrPath); + result.get(folderName).push(uploadItem); } return result; }; -/** - * Filter out hidden files from amongst {@link fileOrPaths}. - * - * Hidden files are those whose names begin with a "." (dot). - */ -const pruneHiddenFiles = (fileOrPaths: (File | string)[]) => - fileOrPaths.filter((f) => !fopFileName(f).startsWith(".")); +export const setPendingUploads = async ( + electron: Electron, + collections: Collection[], + uploadItems: UploadItem[], +) => { + let collectionName: string | undefined; + /* collection being one suggest one of two things + 1. Either the user has upload to a single existing collection + 2. Created a new single collection to upload to + may have had multiple folder, but chose to upload + to one album + hence saving the collection name when upload collection count is 1 + helps the info of user choosing this options + and on next upload we can directly start uploading to this collection + */ + if (collections.length == 1) { + collectionName = collections[0].name; + } + + const filePaths: string[] = []; + const zipItems: ZipItem[] = []; + for (const item of uploadItems) { + if (item instanceof File) { + throw new Error("Unexpected web file for a desktop pending upload"); + } else if (typeof item == "string") { + filePaths.push(item); + } else if (Array.isArray(item)) { + zipItems.push(item); + } else { + filePaths.push(item.path); + } + } + + await electron.setPendingUploads({ + collectionName, + filePaths, + zipItems: zipItems, + }); +}; diff --git a/web/apps/photos/src/components/UploadSelectorInputs.tsx b/web/apps/photos/src/components/UploadSelectorInputs.tsx index 1b110d532..13e33fc6d 100644 --- a/web/apps/photos/src/components/UploadSelectorInputs.tsx +++ b/web/apps/photos/src/components/UploadSelectorInputs.tsx @@ -2,12 +2,16 @@ export default function UploadSelectorInputs({ getDragAndDropInputProps, getFileSelectorInputProps, getFolderSelectorInputProps, + getZipFileSelectorInputProps, }) { return ( <> + {getZipFileSelectorInputProps && ( + + )} ); } diff --git a/web/apps/photos/src/pages/_app.tsx b/web/apps/photos/src/pages/_app.tsx index 4b5fe3107..0e80d0df9 100644 --- a/web/apps/photos/src/pages/_app.tsx +++ b/web/apps/photos/src/pages/_app.tsx @@ -80,8 +80,6 @@ const redirectMap = new Map([ type AppContextType = { showNavBar: (show: boolean) => void; - sharedFiles: File[]; - resetSharedFiles: () => void; mlSearchEnabled: boolean; mapEnabled: boolean; updateMlSearchEnabled: (enabled: boolean) => Promise; @@ -114,7 +112,6 @@ export default function App({ Component, pageProps }: AppProps) { typeof window !== "undefined" && !window.navigator.onLine, ); const [showNavbar, setShowNavBar] = useState(false); - const [sharedFiles, setSharedFiles] = useState(null); const [redirectName, setRedirectName] = useState(null); const [mlSearchEnabled, setMlSearchEnabled] = useState(false); const [mapEnabled, setMapEnabled] = useState(false); @@ -227,7 +224,6 @@ export default function App({ Component, pageProps }: AppProps) { const setUserOnline = () => setOffline(false); const setUserOffline = () => setOffline(true); - const resetSharedFiles = () => setSharedFiles(null); useEffect(() => { const redirectTo = async (redirect) => { @@ -352,22 +348,8 @@ export default function App({ Component, pageProps }: AppProps) { {showNavbar && } - {offline && t("OFFLINE_MSG")} + {isI18nReady && offline && t("OFFLINE_MSG")} - {sharedFiles && - (router.pathname === "/gallery" ? ( - - {t("files_to_be_uploaded", { - count: sharedFiles.length, - })} - - ) : ( - - {t("login_to_upload_files", { - count: sharedFiles.length, - })} - - ))} {blockingLoad && ( @@ -1112,7 +1121,6 @@ export default function Gallery() { null, false, )} - uploadTypeSelectorIntent={uploadTypeSelectorIntent} setLoading={setBlockingLoad} setCollectionNamerAttributes={setCollectionNamerAttributes} setShouldDisableDropzone={setShouldDisableDropzone} @@ -1121,13 +1129,18 @@ export default function Gallery() { isFirstUpload={ !hasNonSystemCollections(collectionSummaries) } - webFileSelectorFiles={webFileSelectorFiles} - webFolderSelectorFiles={webFolderSelectorFiles} - dragAndDropFiles={dragAndDropFiles} - uploadTypeSelectorView={uploadTypeSelectorView} - showUploadFilesDialog={openFileSelector} - showUploadDirsDialog={openFolderSelector} - showSessionExpiredMessage={showSessionExpiredMessage} + {...{ + dragAndDropFiles, + openFileSelector, + fileSelectorFiles, + openFolderSelector, + folderSelectorFiles, + openZipFileSelector, + fileSelectorZipFiles, + uploadTypeSelectorIntent, + uploadTypeSelectorView, + showSessionExpiredMessage, + }} /> * * Also move its associated metadata JSON to Trash. * - * @param exportDir The root directory on the user's filesystem where we are + * @param exportDir The root directory on the user's file system where we are * exporting to. * */ const moveToTrash = async ( diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts index 6fc2404e2..4dfdb3f64 100644 --- a/web/apps/photos/src/services/ffmpeg.ts +++ b/web/apps/photos/src/services/ffmpeg.ts @@ -1,4 +1,3 @@ -import { ElectronFile } from "@/next/types/file"; import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time"; @@ -11,6 +10,11 @@ import { import { NULL_LOCATION } from "constants/upload"; import type { ParsedExtractedMetadata } from "types/metadata"; import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; +import { + toDataOrPathOrZipEntry, + type DesktopUploadItem, + type UploadItem, +} from "./upload/types"; /** * Generate a thumbnail for the given video using a wasm FFmpeg running in a web @@ -51,7 +55,7 @@ const _generateVideoThumbnail = async ( * for the new files that the user is adding. * * @param dataOrPath The input video's data or the path to the video on the - * user's local filesystem. See: [Note: Reading a fileOrPath]. + * user's local file system. See: [Note: Reading a UploadItem]. * * @returns JPEG data of the generated thumbnail. * @@ -59,12 +63,12 @@ const _generateVideoThumbnail = async ( */ export const generateVideoThumbnailNative = async ( electron: Electron, - dataOrPath: Uint8Array | string, + desktopUploadItem: DesktopUploadItem, ) => _generateVideoThumbnail((seekTime: number) => electron.ffmpegExec( makeGenThumbnailCommand(seekTime), - dataOrPath, + toDataOrPathOrZipEntry(desktopUploadItem), "jpeg", 0, ), @@ -93,18 +97,23 @@ const makeGenThumbnailCommand = (seekTime: number) => [ * This function is called during upload, when we need to extract the metadata * of videos that the user is uploading. * - * @param fileOrPath A {@link File}, or the absolute path to a file on the + * @param uploadItem A {@link File}, or the absolute path to a file on the * user's local filesytem. A path can only be provided when we're running in the * context of our desktop app. */ export const extractVideoMetadata = async ( - fileOrPath: File | string, + uploadItem: UploadItem, ): Promise => { const command = extractVideoMetadataCommand; const outputData = - fileOrPath instanceof File - ? await ffmpegExecWeb(command, fileOrPath, "txt", 0) - : await electron.ffmpegExec(command, fileOrPath, "txt", 0); + uploadItem instanceof File + ? await ffmpegExecWeb(command, uploadItem, "txt", 0) + : await electron.ffmpegExec( + command, + toDataOrPathOrZipEntry(uploadItem), + "txt", + 0, + ); return parseFFmpegExtractedMetadata(outputData); }; @@ -200,23 +209,6 @@ function parseCreationTime(creationTime: string) { return dateTime; } -/** Called when viewing a file */ -export async function convertToMP4(file: File) { - return await ffmpegExec2( - [ - ffmpegPathPlaceholder, - "-i", - inputPathPlaceholder, - "-preset", - "ultrafast", - outputPathPlaceholder, - ], - file, - "mp4", - 30 * 1000, - ); -} - /** * Run the given FFmpeg command using a wasm FFmpeg running in a web worker. * @@ -234,55 +226,53 @@ const ffmpegExecWeb = async ( }; /** - * Run the given FFmpeg command using a native FFmpeg binary bundled with our - * desktop app. + * Convert a video from a format that is not supported in the browser to MP4. + * + * This function is called when the user views a video or a live photo, and we + * want to play it back. The idea is to convert it to MP4 which has much more + * universal support in browsers. + * + * @param blob The video blob. + * + * @returns The mp4 video data. + */ +export const convertToMP4 = async (blob: Blob) => + ffmpegExecNativeOrWeb( + [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-preset", + "ultrafast", + outputPathPlaceholder, + ], + blob, + "mp4", + 30 * 1000, + ); + +/** + * Run the given FFmpeg command using a native FFmpeg binary when we're running + * in the context of our desktop app, otherwise using the browser based wasm + * FFmpeg implemenation. * * See also: {@link ffmpegExecWeb}. */ -/* -TODO(MR): Remove me -const ffmpegExecNative = async ( - electron: Electron, +const ffmpegExecNativeOrWeb = async ( command: string[], blob: Blob, - timeoutMs: number = 0, -) => { - const electron = globalThis.electron; - if (electron) { - const data = new Uint8Array(await blob.arrayBuffer()); - return await electron.ffmpegExec(command, data, timeoutMs); - } else { - const worker = await workerFactory.lazy(); - return await worker.exec(command, blob, timeoutMs); - } -}; -*/ - -const ffmpegExec2 = async ( - command: string[], - inputFile: File | ElectronFile, outputFileExtension: string, - timeoutMS: number = 0, + timeoutMs: number, ) => { const electron = globalThis.electron; - if (electron || false) { - throw new Error("WIP"); - // return electron.ffmpegExec( - // command, - // /* TODO(MR): ElectronFile changes */ - // inputFile as unknown as string, - // outputFileName, - // timeoutMS, - // ); - } else { - /* TODO(MR): ElectronFile changes */ - return ffmpegExecWeb( + if (electron) + return electron.ffmpegExec( command, - inputFile as File, + new Uint8Array(await blob.arrayBuffer()), outputFileExtension, - timeoutMS, + timeoutMs, ); - } + else return ffmpegExecWeb(command, blob, outputFileExtension, timeoutMs); }; /** Lazily create a singleton instance of our worker */ diff --git a/web/apps/photos/src/services/heic-convert.ts b/web/apps/photos/src/services/heic-convert.ts index 478cce218..c2ea19839 100644 --- a/web/apps/photos/src/services/heic-convert.ts +++ b/web/apps/photos/src/services/heic-convert.ts @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import log from "@/next/log"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { CustomError } from "@ente/shared/error"; @@ -51,15 +50,10 @@ class HEICConverter { const startTime = Date.now(); const convertedHEIC = await worker.heicToJPEG(fileBlob); - log.info( - `originalFileSize:${convertBytesToHumanReadable( - fileBlob?.size, - )},convertedFileSize:${convertBytesToHumanReadable( - convertedHEIC?.size, - )}, heic conversion time: ${ - Date.now() - startTime - }ms `, + const ms = Math.round( + Date.now() - startTime, ); + log.debug(() => `heic => jpeg (${ms} ms)`); clearTimeout(timeout); resolve(convertedHEIC); } catch (e) { @@ -71,18 +65,7 @@ class HEICConverter { ); if (!convertedHEIC || convertedHEIC?.size === 0) { log.error( - `converted heic fileSize is Zero - ${JSON.stringify( - { - originalFileSize: - convertBytesToHumanReadable( - fileBlob?.size ?? 0, - ), - convertedFileSize: - convertBytesToHumanReadable( - convertedHEIC?.size ?? 0, - ), - }, - )}`, + `Converted HEIC file is empty (original was ${fileBlob?.size} bytes)`, ); } await new Promise((resolve) => { @@ -94,7 +77,7 @@ class HEICConverter { this.workerPool.push(convertWorker); return convertedHEIC; } catch (e) { - log.error("heic conversion failed", e); + log.error("HEIC conversion failed", e); convertWorker.terminate(); this.workerPool.push(createComlinkWorker()); throw e; diff --git a/web/apps/photos/src/services/upload/takeout.ts b/web/apps/photos/src/services/upload/takeout.ts index 5cd16130e..24c0a9d26 100644 --- a/web/apps/photos/src/services/upload/takeout.ts +++ b/web/apps/photos/src/services/upload/takeout.ts @@ -5,6 +5,8 @@ import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; import { NULL_LOCATION } from "constants/upload"; import type { Location } from "types/metadata"; +import { readStream } from "utils/native-stream"; +import type { UploadItem } from "./types"; export interface ParsedMetadataJSON { creationTime: number; @@ -75,21 +77,29 @@ function getFileOriginalName(fileName: string) { /** Try to parse the contents of a metadata JSON file from a Google Takeout. */ export const tryParseTakeoutMetadataJSON = async ( - fileOrPath: File | string, + uploadItem: UploadItem, ): Promise => { try { - const text = - fileOrPath instanceof File - ? await fileOrPath.text() - : await ensureElectron().fs.readTextFile(fileOrPath); - - return parseMetadataJSONText(text); + return parseMetadataJSONText(await uploadItemText(uploadItem)); } catch (e) { log.error("Failed to parse takeout metadata JSON", e); return undefined; } }; +const uploadItemText = async (uploadItem: UploadItem) => { + if (uploadItem instanceof File) { + return await uploadItem.text(); + } else if (typeof uploadItem == "string") { + return await ensureElectron().fs.readTextFile(uploadItem); + } else if (Array.isArray(uploadItem)) { + const { response } = await readStream(ensureElectron(), uploadItem); + return await response.text(); + } else { + return await uploadItem.file.text(); + } +}; + const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = { creationTime: null, modificationTime: null, diff --git a/web/apps/photos/src/services/upload/thumbnail.ts b/web/apps/photos/src/services/upload/thumbnail.ts index a44c941f1..1dd448376 100644 --- a/web/apps/photos/src/services/upload/thumbnail.ts +++ b/web/apps/photos/src/services/upload/thumbnail.ts @@ -4,6 +4,7 @@ import { type Electron } from "@/next/types/ipc"; import { withTimeout } from "@ente/shared/utils"; import * as ffmpeg from "services/ffmpeg"; import { heicToJPEG } from "services/heic-convert"; +import { toDataOrPathOrZipEntry, type DesktopUploadItem } from "./types"; /** Maximum width or height of the generated thumbnail */ const maxThumbnailDimension = 720; @@ -178,7 +179,7 @@ const percentageSizeDiff = ( * object which we use to perform IPC with the Node.js side of our desktop app. * * @param dataOrPath Contents of an image or video file, or the path to the - * image or video file on the user's local filesystem, whose thumbnail we want + * image or video file on the user's local file system, whose thumbnail we want * to generate. * * @param fileTypeInfo The type information for {@link dataOrPath}. @@ -189,16 +190,16 @@ const percentageSizeDiff = ( */ export const generateThumbnailNative = async ( electron: Electron, - dataOrPath: Uint8Array | string, + desktopUploadItem: DesktopUploadItem, fileTypeInfo: FileTypeInfo, ): Promise => fileTypeInfo.fileType === FILE_TYPE.IMAGE ? await electron.generateImageThumbnail( - dataOrPath, + toDataOrPathOrZipEntry(desktopUploadItem), maxThumbnailDimension, maxThumbnailSize, ) - : ffmpeg.generateVideoThumbnailNative(electron, dataOrPath); + : ffmpeg.generateVideoThumbnailNative(electron, desktopUploadItem); /** * A fallback, black, thumbnail for use in cases where thumbnail generation diff --git a/web/apps/photos/src/services/upload/types.ts b/web/apps/photos/src/services/upload/types.ts new file mode 100644 index 000000000..05ad332d4 --- /dev/null +++ b/web/apps/photos/src/services/upload/types.ts @@ -0,0 +1,47 @@ +import type { FileAndPath } from "@/next/types/file"; +import type { ZipItem } from "@/next/types/ipc"; + +/** + * An item to upload is one of the following: + * + * 1. A file drag-and-dropped or selected by the user when we are running in the + * web browser. These is the {@link File} case. + * + * 2. A file drag-and-dropped or selected by the user when we are running in the + * context of our desktop app. In such cases, we also have the absolute path + * of the file in the user's local file system. This is the + * {@link FileAndPath} case. + * + * 3. A file path programmatically requested by the desktop app. For example, we + * might be resuming a previously interrupted upload after an app restart + * (thus we no longer have access to the {@link File} from case 2). Or we + * could be uploading a file this is in one of the folders the user has asked + * us to watch for changes. This is the `string` case. + * + * 4. A file within a zip file on the user's local file system. This too is only + * possible when we are running in the context of our desktop app. The user + * might have drag-and-dropped or selected a zip file, or it might be a zip + * file that they'd previously selected but we now are resuming an + * interrupted upload of. Either ways, what we have is a tuple containing the + * (path to zip file, and the name of an entry within that zip file). This is + * the {@link ZipItem} case. + * + * Also see: [Note: Reading a UploadItem]. + */ +export type UploadItem = File | FileAndPath | string | ZipItem; + +/** + * The of cases of {@link UploadItem} that apply when we're running in the + * context of our desktop app. + */ +export type DesktopUploadItem = Exclude; + +/** + * For each of cases of {@link UploadItem} that apply when we're running in the + * context of our desktop app, return a value that can be passed to + * {@link Electron} functions over IPC. + */ +export const toDataOrPathOrZipEntry = (desktopUploadItem: DesktopUploadItem) => + typeof desktopUploadItem == "string" || Array.isArray(desktopUploadItem) + ? desktopUploadItem + : desktopUploadItem.path; diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts index 665cd76c8..99fe6ced3 100644 --- a/web/apps/photos/src/services/upload/uploadManager.ts +++ b/web/apps/photos/src/services/upload/uploadManager.ts @@ -3,7 +3,6 @@ import { potentialFileTypeFromExtension } from "@/media/live-photo"; import { ensureElectron } from "@/next/electron"; import { lowercaseExtension, nameAndExtension } from "@/next/file"; import log from "@/next/log"; -import { ElectronFile } from "@/next/types/file"; import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ensure } from "@/utils/ensure"; @@ -36,7 +35,8 @@ import { tryParseTakeoutMetadataJSON, type ParsedMetadataJSON, } from "./takeout"; -import UploadService, { fopFileName, fopSize, uploader } from "./uploadService"; +import type { UploadItem } from "./types"; +import UploadService, { uploadItemFileName, uploader } from "./uploadService"; export type FileID = number; @@ -83,17 +83,17 @@ export interface ProgressUpdater { /** The number of uploads to process in parallel. */ const maxConcurrentUploads = 4; -export interface FileWithCollection { +export interface UploadItemWithCollection { localID: number; collectionID: number; isLivePhoto?: boolean; - fileOrPath?: File | string; + uploadItem?: UploadItem; livePhotoAssets?: LivePhotoAssets; } export interface LivePhotoAssets { - image: File | string; - video: File | string; + image: UploadItem; + video: UploadItem; } export interface PublicUploadProps { @@ -320,9 +320,9 @@ class UploadManager { ComlinkWorker >(maxConcurrentUploads); private parsedMetadataJSONMap: Map; - private filesToBeUploaded: ClusteredFile[]; - private remainingFiles: ClusteredFile[] = []; - private failedFiles: ClusteredFile[]; + private itemsToBeUploaded: ClusteredUploadItem[]; + private remainingItems: ClusteredUploadItem[] = []; + private failedItems: ClusteredUploadItem[]; private existingFiles: EnteFile[]; private setFiles: SetFiles; private collections: Map; @@ -359,9 +359,9 @@ class UploadManager { } private resetState() { - this.filesToBeUploaded = []; - this.remainingFiles = []; - this.failedFiles = []; + this.itemsToBeUploaded = []; + this.remainingItems = []; + this.failedItems = []; this.parsedMetadataJSONMap = new Map(); this.uploaderName = null; @@ -387,62 +387,62 @@ class UploadManager { * It is an error to call this method when there is already an in-progress * upload. * - * @param filesWithCollectionToUploadIn The files to upload, each paired - * with the id of the collection that they should be uploaded into. + * @param itemsWithCollection The items to upload, each paired with the id + * of the collection that they should be uploaded into. * * @returns `true` if at least one file was processed */ - public async uploadFiles( - filesWithCollectionToUploadIn: FileWithCollection[], + public async uploadItems( + itemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) { if (this.uploadInProgress) throw new Error("Cannot run multiple uploads at once"); - log.info(`Uploading ${filesWithCollectionToUploadIn.length} files`); + log.info(`Uploading ${itemsWithCollection.length} files`); this.uploadInProgress = true; this.uploaderName = uploaderName; try { await this.updateExistingFilesAndCollections(collections); - const namedFiles = filesWithCollectionToUploadIn.map( - makeFileWithCollectionIDAndName, + const namedItems = itemsWithCollection.map( + makeUploadItemWithCollectionIDAndName, ); - this.uiService.setFiles(namedFiles); + this.uiService.setFiles(namedItems); - const [metadataFiles, mediaFiles] = - splitMetadataAndMediaFiles(namedFiles); + const [metadataItems, mediaItems] = + splitMetadataAndMediaItems(namedItems); - if (metadataFiles.length) { + if (metadataItems.length) { this.uiService.setUploadStage( UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES, ); - await this.parseMetadataJSONFiles(metadataFiles); + await this.parseMetadataJSONFiles(metadataItems); } - if (mediaFiles.length) { - const clusteredMediaFiles = await clusterLivePhotos(mediaFiles); + if (mediaItems.length) { + const clusteredMediaItems = await clusterLivePhotos(mediaItems); this.abortIfCancelled(); // Live photos might've been clustered together, reset the list // of files to reflect that. - this.uiService.setFiles(clusteredMediaFiles); + this.uiService.setFiles(clusteredMediaItems); this.uiService.setHasLivePhoto( - mediaFiles.length != clusteredMediaFiles.length, + mediaItems.length != clusteredMediaItems.length, ); - await this.uploadMediaFiles(clusteredMediaFiles); + await this.uploadMediaItems(clusteredMediaItems); } } catch (e) { if (e.message === CustomError.UPLOAD_CANCELLED) { if (isElectron()) { - this.remainingFiles = []; + this.remainingItems = []; await cancelRemainingUploads(); } } else { @@ -479,14 +479,18 @@ class UploadManager { ); } - private async parseMetadataJSONFiles(files: FileWithCollectionIDAndName[]) { - this.uiService.reset(files.length); + private async parseMetadataJSONFiles( + items: UploadItemWithCollectionIDAndName[], + ) { + this.uiService.reset(items.length); - for (const { fileOrPath, fileName, collectionID } of files) { + for (const { uploadItem, fileName, collectionID } of items) { this.abortIfCancelled(); log.info(`Parsing metadata JSON ${fileName}`); - const metadataJSON = await tryParseTakeoutMetadataJSON(fileOrPath); + const metadataJSON = await tryParseTakeoutMetadataJSON( + ensure(uploadItem), + ); if (metadataJSON) { this.parsedMetadataJSONMap.set( getMetadataJSONMapKeyForJSON(collectionID, fileName), @@ -497,48 +501,48 @@ class UploadManager { } } - private async uploadMediaFiles(mediaFiles: ClusteredFile[]) { - this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles]; + private async uploadMediaItems(mediaItems: ClusteredUploadItem[]) { + this.itemsToBeUploaded = [...this.itemsToBeUploaded, ...mediaItems]; if (isElectron()) { - this.remainingFiles = [...this.remainingFiles, ...mediaFiles]; + this.remainingItems = [...this.remainingItems, ...mediaItems]; } - this.uiService.reset(mediaFiles.length); + this.uiService.reset(mediaItems.length); - await UploadService.setFileCount(mediaFiles.length); + await UploadService.setFileCount(mediaItems.length); this.uiService.setUploadStage(UPLOAD_STAGES.UPLOADING); const uploadProcesses = []; for ( let i = 0; - i < maxConcurrentUploads && this.filesToBeUploaded.length > 0; + i < maxConcurrentUploads && this.itemsToBeUploaded.length > 0; i++ ) { this.cryptoWorkers[i] = getDedicatedCryptoWorker(); const worker = await this.cryptoWorkers[i].remote; - uploadProcesses.push(this.uploadNextFileInQueue(worker)); + uploadProcesses.push(this.uploadNextItemInQueue(worker)); } await Promise.all(uploadProcesses); } - private async uploadNextFileInQueue(worker: Remote) { + private async uploadNextItemInQueue(worker: Remote) { const uiService = this.uiService; - while (this.filesToBeUploaded.length > 0) { + while (this.itemsToBeUploaded.length > 0) { this.abortIfCancelled(); - const clusteredFile = this.filesToBeUploaded.pop(); - const { localID, collectionID } = clusteredFile; + const clusteredItem = this.itemsToBeUploaded.pop(); + const { localID, collectionID } = clusteredItem; const collection = this.collections.get(collectionID); - const uploadableFile = { ...clusteredFile, collection }; + const uploadableItem = { ...clusteredItem, collection }; uiService.setFileProgress(localID, 0); await wait(0); const { uploadResult, uploadedFile } = await uploader( - uploadableFile, + uploadableItem, this.uploaderName, this.existingFiles, this.parsedMetadataJSONMap, @@ -560,7 +564,7 @@ class UploadManager { ); const finalUploadResult = await this.postUploadTask( - uploadableFile, + uploadableItem, uploadResult, uploadedFile, ); @@ -572,20 +576,20 @@ class UploadManager { } private async postUploadTask( - uploadableFile: UploadableFile, + uploadableItem: UploadableUploadItem, uploadResult: UPLOAD_RESULT, uploadedFile: EncryptedEnteFile | EnteFile | undefined, ) { log.info( - `Uploaded ${uploadableFile.fileName} with result ${uploadResult}`, + `Uploaded ${uploadableItem.fileName} with result ${uploadResult}`, ); try { let decryptedFile: EnteFile; - await this.removeFromPendingUploads(uploadableFile); + await this.removeFromPendingUploads(uploadableItem); switch (uploadResult) { case UPLOAD_RESULT.FAILED: case UPLOAD_RESULT.BLOCKED: - this.failedFiles.push(uploadableFile); + this.failedItems.push(uploadableItem); break; case UPLOAD_RESULT.ALREADY_UPLOADED: decryptedFile = uploadedFile as EnteFile; @@ -598,7 +602,7 @@ class UploadManager { case UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL: decryptedFile = await decryptFile( uploadedFile as EncryptedEnteFile, - uploadableFile.collection.key, + uploadableItem.collection.key, ); break; case UPLOAD_RESULT.UNSUPPORTED: @@ -619,8 +623,8 @@ class UploadManager { eventBus.emit(Events.FILE_UPLOADED, { enteFile: decryptedFile, localFile: - uploadableFile.fileOrPath ?? - uploadableFile.livePhotoAssets.image, + uploadableItem.uploadItem ?? + uploadableItem.livePhotoAssets.image, }); } catch (e) { log.warn("Ignoring error in fileUploaded handlers", e); @@ -629,7 +633,7 @@ class UploadManager { } await this.watchFolderCallback( uploadResult, - uploadableFile, + uploadableItem, uploadedFile as EncryptedEnteFile, ); return uploadResult; @@ -641,7 +645,7 @@ class UploadManager { private async watchFolderCallback( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: ClusteredFile, + fileWithCollection: ClusteredUploadItem, uploadedFile: EncryptedEnteFile, ) { if (isElectron()) { @@ -661,9 +665,9 @@ class UploadManager { uploadCancelService.requestUploadCancelation(); } - public getFailedFilesWithCollections() { + public getFailedItemsWithCollections() { return { - files: this.failedFiles, + items: this.failedItems, collections: [...this.collections.values()], }; } @@ -684,13 +688,15 @@ class UploadManager { this.setFiles((files) => sortFiles([...files, decryptedFile])); } - private async removeFromPendingUploads({ localID }: ClusteredFile) { + private async removeFromPendingUploads( + clusteredUploadItem: ClusteredUploadItem, + ) { const electron = globalThis.electron; if (electron) { - this.remainingFiles = this.remainingFiles.filter( - (f) => f.localID != localID, + this.remainingItems = this.remainingItems.filter( + (f) => f.localID != clusteredUploadItem.localID, ); - await updatePendingUploads(electron, this.remainingFiles); + await markUploaded(electron, clusteredUploadItem); } } @@ -709,24 +715,25 @@ export default new UploadManager(); * As files progress through stages, they get more and more bits tacked on to * them. These types document the journey. * - * - The input is {@link FileWithCollection}. This can either be a new - * {@link FileWithCollection}, in which case it'll only have a - * {@link localID}, {@link collectionID} and a {@link fileOrPath}. Or it could - * be a retry, in which case it'll not have a {@link fileOrPath} but instead + * - The input is {@link UploadItemWithCollection}. This can either be a new + * {@link UploadItemWithCollection}, in which case it'll only have a + * {@link localID}, {@link collectionID} and a {@link uploadItem}. Or it could + * be a retry, in which case it'll not have a {@link uploadItem} but instead * will have data from a previous stage (concretely, it'll just be a - * relabelled {@link ClusteredFile}), like a snake eating its tail. + * relabelled {@link ClusteredUploadItem}), like a snake eating its tail. * - * - Immediately we convert it to {@link FileWithCollectionIDAndName}. This is - * to mostly systematize what we have, and also attach a {@link fileName}. + * - Immediately we convert it to {@link UploadItemWithCollectionIDAndName}. + * This is to mostly systematize what we have, and also attach a + * {@link fileName}. * * - These then get converted to "assets", whereby both parts of a live photo - * are combined. This is a {@link ClusteredFile}. + * are combined. This is a {@link ClusteredUploadItem}. * - * - On to the {@link ClusteredFile} we attach the corresponding - * {@link collection}, giving us {@link UploadableFile}. This is what gets - * queued and then passed to the {@link uploader}. + * - On to the {@link ClusteredUploadItem} we attach the corresponding + * {@link collection}, giving us {@link UploadableUploadItem}. This is what + * gets queued and then passed to the {@link uploader}. */ -type FileWithCollectionIDAndName = { +type UploadItemWithCollectionIDAndName = { /** A unique ID for the duration of the upload */ localID: number; /** The ID of the collection to which this file should be uploaded. */ @@ -740,64 +747,57 @@ type FileWithCollectionIDAndName = { /** `true` if this is a live photo. */ isLivePhoto?: boolean; /* Valid for non-live photos */ - fileOrPath?: File | string; + uploadItem?: UploadItem; /* Valid for live photos */ livePhotoAssets?: LivePhotoAssets; }; -const makeFileWithCollectionIDAndName = ( - f: FileWithCollection, -): FileWithCollectionIDAndName => { - const fileOrPath = f.fileOrPath; - /* TODO(MR): ElectronFile */ - if (!(fileOrPath instanceof File || typeof fileOrPath == "string")) - throw new Error(`Unexpected file ${f}`); - - return { - localID: ensure(f.localID), - collectionID: ensure(f.collectionID), - fileName: ensure( - f.isLivePhoto - ? fopFileName(f.livePhotoAssets.image) - : fopFileName(fileOrPath), - ), - isLivePhoto: f.isLivePhoto, - fileOrPath: fileOrPath, - livePhotoAssets: f.livePhotoAssets, - }; -}; +const makeUploadItemWithCollectionIDAndName = ( + f: UploadItemWithCollection, +): UploadItemWithCollectionIDAndName => ({ + localID: ensure(f.localID), + collectionID: ensure(f.collectionID), + fileName: ensure( + f.isLivePhoto + ? uploadItemFileName(f.livePhotoAssets.image) + : uploadItemFileName(f.uploadItem), + ), + isLivePhoto: f.isLivePhoto, + uploadItem: f.uploadItem, + livePhotoAssets: f.livePhotoAssets, +}); /** - * A file with both parts of a live photo clubbed together. + * An upload item with both parts of a live photo clubbed together. * * See: [Note: Intermediate file types during upload]. */ -type ClusteredFile = { +type ClusteredUploadItem = { localID: number; collectionID: number; fileName: string; isLivePhoto: boolean; - fileOrPath?: File | string; + uploadItem?: UploadItem; livePhotoAssets?: LivePhotoAssets; }; /** - * The file that we hand off to the uploader. Essentially {@link ClusteredFile} - * with the {@link collection} attached to it. + * The file that we hand off to the uploader. Essentially + * {@link ClusteredUploadItem} with the {@link collection} attached to it. * * See: [Note: Intermediate file types during upload]. */ -export type UploadableFile = ClusteredFile & { +export type UploadableUploadItem = ClusteredUploadItem & { collection: Collection; }; -const splitMetadataAndMediaFiles = ( - files: FileWithCollectionIDAndName[], +const splitMetadataAndMediaItems = ( + items: UploadItemWithCollectionIDAndName[], ): [ - metadata: FileWithCollectionIDAndName[], - media: FileWithCollectionIDAndName[], + metadata: UploadItemWithCollectionIDAndName[], + media: UploadItemWithCollectionIDAndName[], ] => - files.reduce( + items.reduce( ([metadata, media], f) => { if (lowercaseExtension(f.fileName) == "json") metadata.push(f); else media.push(f); @@ -806,59 +806,58 @@ const splitMetadataAndMediaFiles = ( [[], []], ); -export const setToUploadCollection = async (collections: Collection[]) => { - let collectionName: string = null; - /* collection being one suggest one of two things - 1. Either the user has upload to a single existing collection - 2. Created a new single collection to upload to - may have had multiple folder, but chose to upload - to one album - hence saving the collection name when upload collection count is 1 - helps the info of user choosing this options - and on next upload we can directly start uploading to this collection - */ - if (collections.length === 1) { - collectionName = collections[0].name; +const markUploaded = async (electron: Electron, item: ClusteredUploadItem) => { + // TODO: This can be done better + if (item.isLivePhoto) { + const [p0, p1] = [ + item.livePhotoAssets.image, + item.livePhotoAssets.video, + ]; + if (Array.isArray(p0) && Array.isArray(p1)) { + electron.markUploadedZipItems([p0, p1]); + } else if (typeof p0 == "string" && typeof p1 == "string") { + electron.markUploadedFiles([p0, p1]); + } else if ( + p0 && + typeof p0 == "object" && + "path" in p0 && + p1 && + typeof p1 == "object" && + "path" in p1 + ) { + electron.markUploadedFiles([p0.path, p1.path]); + } else { + throw new Error( + "Attempting to mark upload completion of unexpected desktop upload items", + ); + } + } else { + const p = ensure(item.uploadItem); + if (Array.isArray(p)) { + electron.markUploadedZipItems([p]); + } else if (typeof p == "string") { + electron.markUploadedFiles([p]); + } else if (p && typeof p == "object" && "path" in p) { + electron.markUploadedFiles([p.path]); + } else { + throw new Error( + "Attempting to mark upload completion of unexpected desktop upload items", + ); + } } - await ensureElectron().setPendingUploadCollection(collectionName); }; -const updatePendingUploads = async ( - electron: Electron, - files: ClusteredFile[], -) => { - const paths = files - .map((file) => - file.isLivePhoto - ? [file.livePhotoAssets.image, file.livePhotoAssets.video] - : [file.fileOrPath], - ) - .flat() - .map((f) => getFilePathElectron(f)); - await electron.setPendingUploadFiles("files", paths); -}; - -/** - * NOTE: a stop gap measure, only meant to be called by code that is running in - * the context of a desktop app initiated upload - */ -export const getFilePathElectron = (file: File | ElectronFile | string) => - typeof file == "string" ? file : (file as ElectronFile).path; - -const cancelRemainingUploads = async () => { - const electron = ensureElectron(); - await electron.setPendingUploadCollection(undefined); - await electron.setPendingUploadFiles("zips", []); - await electron.setPendingUploadFiles("files", []); -}; +const cancelRemainingUploads = () => ensureElectron().clearPendingUploads(); /** * Go through the given files, combining any sibling image + video assets into a * single live photo when appropriate. */ -const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { - const result: ClusteredFile[] = []; - files +const clusterLivePhotos = async ( + items: UploadItemWithCollectionIDAndName[], +) => { + const result: ClusteredUploadItem[] = []; + items .sort((f, g) => nameAndExtension(f.fileName)[0].localeCompare( nameAndExtension(g.fileName)[0], @@ -866,22 +865,22 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { ) .sort((f, g) => f.collectionID - g.collectionID); let index = 0; - while (index < files.length - 1) { - const f = files[index]; - const g = files[index + 1]; + while (index < items.length - 1) { + const f = items[index]; + const g = items[index + 1]; const fFileType = potentialFileTypeFromExtension(f.fileName); const gFileType = potentialFileTypeFromExtension(g.fileName); const fa: PotentialLivePhotoAsset = { fileName: f.fileName, fileType: fFileType, collectionID: f.collectionID, - fileOrPath: f.fileOrPath, + uploadItem: f.uploadItem, }; const ga: PotentialLivePhotoAsset = { fileName: g.fileName, fileType: gFileType, collectionID: g.collectionID, - fileOrPath: g.fileOrPath, + uploadItem: g.uploadItem, }; if (await areLivePhotoAssets(fa, ga)) { const [image, video] = @@ -892,8 +891,8 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { fileName: image.fileName, isLivePhoto: true, livePhotoAssets: { - image: image.fileOrPath, - video: video.fileOrPath, + image: image.uploadItem, + video: video.uploadItem, }, }); index += 2; @@ -905,9 +904,9 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { index += 1; } } - if (index === files.length - 1) { + if (index === items.length - 1) { result.push({ - ...files[index], + ...items[index], isLivePhoto: false, }); } @@ -918,7 +917,7 @@ interface PotentialLivePhotoAsset { fileName: string; fileType: FILE_TYPE; collectionID: number; - fileOrPath: File | string; + uploadItem: UploadItem; } const areLivePhotoAssets = async ( @@ -961,11 +960,11 @@ const areLivePhotoAssets = async ( // we use doesn't support stream as a input. const maxAssetSize = 20 * 1024 * 1024; /* 20MB */ - const fSize = await fopSize(f.fileOrPath); - const gSize = await fopSize(g.fileOrPath); + const fSize = await uploadItemSize(f.uploadItem); + const gSize = await uploadItemSize(g.uploadItem); if (fSize > maxAssetSize || gSize > maxAssetSize) { log.info( - `Not classifying assets with too large sizes ${[fSize, gSize]} as a live photo`, + `Not classifying files with too large sizes (${fSize} and ${gSize} bytes) as a live photo`, ); return false; } @@ -998,3 +997,15 @@ const removePotentialLivePhotoSuffix = (name: string, suffix?: string) => { return foundSuffix ? name.slice(0, foundSuffix.length * -1) : name; }; + +/** + * Return the size of the given {@link uploadItem}. + */ +const uploadItemSize = async (uploadItem: UploadItem): Promise => { + if (uploadItem instanceof File) return uploadItem.size; + if (typeof uploadItem == "string") + return ensureElectron().pathOrZipItemSize(uploadItem); + if (Array.isArray(uploadItem)) + return ensureElectron().pathOrZipItemSize(uploadItem); + return uploadItem.file.size; +}; diff --git a/web/apps/photos/src/services/upload/uploadService.ts b/web/apps/photos/src/services/upload/uploadService.ts index d49b32129..7d3303884 100644 --- a/web/apps/photos/src/services/upload/uploadService.ts +++ b/web/apps/photos/src/services/upload/uploadService.ts @@ -50,14 +50,15 @@ import { generateThumbnailNative, generateThumbnailWeb, } from "./thumbnail"; +import type { UploadItem } from "./types"; import UploadHttpClient from "./uploadHttpClient"; -import type { UploadableFile } from "./uploadManager"; +import type { UploadableUploadItem } from "./uploadManager"; /** * A readable stream for a file, and its associated size and last modified time. * - * This is the in-memory representation of the `fileOrPath` type that we usually - * pass around. See: [Note: Reading a fileOrPath] + * This is the in-memory representation of the {@link UploadItem} type that we + * usually pass around. See: [Note: Reading a UploadItem] */ interface FileStream { /** @@ -181,30 +182,20 @@ const uploadService = new UploadService(); export default uploadService; /** - * Return the file name for the given {@link fileOrPath}. - * - * @param fileOrPath The {@link File}, or the path to it. Note that it is only - * valid to specify a path if we are running in the context of our desktop app. + * Return the file name for the given {@link uploadItem}. */ -export const fopFileName = (fileOrPath: File | string) => - typeof fileOrPath == "string" ? basename(fileOrPath) : fileOrPath.name; - -/** - * Return the size of the given {@link fileOrPath}. - * - * @param fileOrPath The {@link File}, or the path to it. Note that it is only - * valid to specify a path if we are running in the context of our desktop app. - */ -export const fopSize = async (fileOrPath: File | string): Promise => - fileOrPath instanceof File - ? fileOrPath.size - : await ensureElectron().fs.size(fileOrPath); +export const uploadItemFileName = (uploadItem: UploadItem) => { + if (uploadItem instanceof File) return uploadItem.name; + if (typeof uploadItem == "string") return basename(uploadItem); + if (Array.isArray(uploadItem)) return basename(uploadItem[1]); + return uploadItem.file.name; +}; /* -- Various intermediate type used during upload -- */ interface UploadAsset { isLivePhoto?: boolean; - fileOrPath?: File | string; + uploadItem?: UploadItem; livePhotoAssets?: LivePhotoAssets; } @@ -311,14 +302,14 @@ interface UploadResponse { } /** - * Upload the given {@link UploadableFile} + * Upload the given {@link UploadableUploadItem} * * This is lower layer implementation of the upload. It is invoked by * {@link UploadManager} after it has assembled all the relevant bits we need to * go forth and upload. */ export const uploader = async ( - { collection, localID, fileName, ...uploadAsset }: UploadableFile, + { collection, localID, fileName, ...uploadAsset }: UploadableUploadItem, uploaderName: string, existingFiles: EnteFile[], parsedMetadataJSONMap: Map, @@ -466,19 +457,21 @@ export const uploader = async ( }; /** - * Read the given file or path into an in-memory representation. + * Read the given file or path or zip item into an in-memory representation. * - * [Note: Reading a fileOrPath] + * [Note: Reading a UploadItem] * * The file can be either a web - * [File](https://developer.mozilla.org/en-US/docs/Web/API/File) or the absolute - * path to a file on desk. + * [File](https://developer.mozilla.org/en-US/docs/Web/API/File), the absolute + * path to a file on desk, a combination of these two, or a entry in a zip file + * on the user's local file system. * - * tl;dr; There are three cases: + * tl;dr; There are four cases: * * 1. web / File - * 2. desktop / File + * 2. desktop / File (+ path) * 3. desktop / path + * 4. desktop / ZipItem * * For the when and why, read on. * @@ -490,66 +483,73 @@ export const uploader = async ( * * In the web context, we'll always get a File, since within the browser we * cannot programmatically construct paths to or arbitrarily access files on the - * user's filesystem. Note that even if we were to have an absolute path at - * hand, we cannot programmatically create such File objects to arbitrary - * absolute paths on user's local filesystem for security reasons. + * user's file system. + * + * > Note that even if we were to somehow have an absolute path at hand, we + * cannot programmatically create such File objects to arbitrary absolute + * paths on user's local file system for security reasons. * * So in the web context, this will always be a File we get as a result of an - * explicit user interaction (e.g. drag and drop). + * explicit user interaction (e.g. drag and drop or using a file selector). * - * In the desktop context, this can be either a File or a path. + * In the desktop context, this can be either a File (+ path), or a path, or an + * entry within a zip file. * * 2. If the user provided us this file via some user interaction (say a drag - * and a drop), this'll still be a File. + * and a drop), this'll still be a File. But unlike in the web context, we + * also have access to the full path of this file. * - * 3. However, when running in the desktop app we have the ability to access - * absolute paths on the user's file system. For example, if the user asks us - * to watch certain folders on their disk for changes, we'll be able to pick - * up new images being added, and in such cases, the parameter here will be a - * path. Another example is when resuming an previously interrupted upload - - * we'll only have the path at hand in such cases, not the File object. + * 3. In addition, when running in the desktop app we have the ability to + * initate programmatic access absolute paths on the user's file system. For + * example, if the user asks us to watch certain folders on their disk for + * changes, we'll be able to pick up new images being added, and in such + * cases, the parameter here will be a path. Another example is when resuming + * an previously interrupted upload - we'll only have the path at hand in + * such cases, not the original File object since the app subsequently + * restarted. * - * Case 2, when we're provided a path, is simple. We don't have a choice, since - * we cannot still programmatically construct a File object (we can construct it - * on the Node.js layer, but it can't then be transferred over the IPC - * boundary). So all our operations use the path itself. + * 4. The user might've also initiated an upload of a zip file (or we might be + * resuming one). In such cases we will get a tuple (path to the zip file on + * the local file system, and the name of the entry within that zip file). * - * Case 3 involves a choice on a use-case basis, since + * Case 3 and 4, when we're provided a path, are simple. We don't have a choice, + * since we cannot still programmatically construct a File object (we can + * construct it on the Node.js layer, but it can't then be transferred over the + * IPC boundary). So all our operations use the path itself. * - * (a) unlike in the web context, such File objects also have the full path. - * See: [Note: File paths when running under Electron]. + * Case 2 involves a choice on a use-case basis. Neither File nor the path is a + * better choice for all use cases. * - * (b) neither File nor the path is a better choice for all use cases. - * - * The advantage of the File object is that the browser has already read it into - * memory for us. The disadvantage comes in the case where we need to - * communicate with the native Node.js layer of our desktop app. Since this - * communication happens over IPC, the File's contents need to be serialized and - * copied, which is a bummer for large videos etc. + * > The advantage of the File object is that the browser has already read it + * into memory for us. The disadvantage comes in the case where we need to + * communicate with the native Node.js layer of our desktop app. Since this + * communication happens over IPC, the File's contents need to be serialized + * and copied, which is a bummer for large videos etc. */ -const readFileOrPath = async ( - fileOrPath: File | string, -): Promise => { +const readUploadItem = async (uploadItem: UploadItem): Promise => { let underlyingStream: ReadableStream; let file: File | undefined; let fileSize: number; let lastModifiedMs: number; - if (fileOrPath instanceof File) { - file = fileOrPath; - underlyingStream = file.stream(); - fileSize = file.size; - lastModifiedMs = file.lastModified; - } else { - const path = fileOrPath; + if (typeof uploadItem == "string" || Array.isArray(uploadItem)) { const { response, size, lastModifiedMs: lm, - } = await readStream(ensureElectron(), path); + } = await readStream(ensureElectron(), uploadItem); underlyingStream = response.body; fileSize = size; lastModifiedMs = lm; + } else { + if (uploadItem instanceof File) { + file = uploadItem; + } else { + file = uploadItem.file; + } + underlyingStream = file.stream(); + fileSize = file.size; + lastModifiedMs = file.lastModified; } const N = ENCRYPTION_CHUNK_SIZE; @@ -596,17 +596,17 @@ interface ReadAssetDetailsResult { } /** - * Read the file(s) to determine the type, size and last modified time of the - * given {@link asset}. + * Read the associated file(s) to determine the type, size and last modified + * time of the given {@link asset}. */ const readAssetDetails = async ({ isLivePhoto, livePhotoAssets, - fileOrPath, + uploadItem, }: UploadAsset): Promise => isLivePhoto ? readLivePhotoDetails(livePhotoAssets) - : readImageOrVideoDetails(fileOrPath); + : readImageOrVideoDetails(uploadItem); const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => { const img = await readImageOrVideoDetails(image); @@ -632,18 +632,18 @@ const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => { * While we're at it, also return the size of the file, and its last modified * time (expressed as epoch milliseconds). * - * @param fileOrPath See: [Note: Reading a fileOrPath] + * @param uploadItem See: [Note: Reading a UploadItem] */ -const readImageOrVideoDetails = async (fileOrPath: File | string) => { +const readImageOrVideoDetails = async (uploadItem: UploadItem) => { const { stream, fileSize, lastModifiedMs } = - await readFileOrPath(fileOrPath); + await readUploadItem(uploadItem); const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => { const reader = stream.getReader(); const chunk = ensure((await reader.read()).value); await reader.cancel(); return chunk; - }, fopFileName(fileOrPath)); + }, uploadItemFileName(uploadItem)); return { fileTypeInfo, fileSize, lastModifiedMs }; }; @@ -669,7 +669,7 @@ interface ExtractAssetMetadataResult { * {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum. */ const extractAssetMetadata = async ( - { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset, + { isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, collectionID: number, @@ -686,7 +686,7 @@ const extractAssetMetadata = async ( worker, ) : await extractImageOrVideoMetadata( - fileOrPath, + uploadItem, fileTypeInfo, lastModifiedMs, collectionID, @@ -721,7 +721,7 @@ const extractLivePhotoMetadata = async ( return { metadata: { ...imageMetadata, - title: fopFileName(livePhotoAssets.image), + title: uploadItemFileName(livePhotoAssets.image), fileType: FILE_TYPE.LIVE_PHOTO, imageHash: imageMetadata.hash, videoHash: videoHash, @@ -732,33 +732,33 @@ const extractLivePhotoMetadata = async ( }; const extractImageOrVideoMetadata = async ( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, collectionID: number, parsedMetadataJSONMap: Map, worker: Remote, ) => { - const fileName = fopFileName(fileOrPath); + const fileName = uploadItemFileName(uploadItem); const { fileType } = fileTypeInfo; let extractedMetadata: ParsedExtractedMetadata; if (fileType === FILE_TYPE.IMAGE) { extractedMetadata = (await tryExtractImageMetadata( - fileOrPath, + uploadItem, fileTypeInfo, lastModifiedMs, )) ?? NULL_EXTRACTED_METADATA; } else if (fileType === FILE_TYPE.VIDEO) { extractedMetadata = - (await tryExtractVideoMetadata(fileOrPath)) ?? + (await tryExtractVideoMetadata(uploadItem)) ?? NULL_EXTRACTED_METADATA; } else { - throw new Error(`Unexpected file type ${fileType} for ${fileOrPath}`); + throw new Error(`Unexpected file type ${fileType} for ${uploadItem}`); } - const hash = await computeHash(fileOrPath, worker); + const hash = await computeHash(uploadItem, worker); const modificationTime = lastModifiedMs * 1000; const creationTime = @@ -802,46 +802,48 @@ const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = { }; async function tryExtractImageMetadata( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, ): Promise { let file: File; - if (fileOrPath instanceof File) { - file = fileOrPath; - } else { - const path = fileOrPath; + if (typeof uploadItem == "string" || Array.isArray(uploadItem)) { // The library we use for extracting EXIF from images, exifr, doesn't // support streams. But unlike videos, for images it is reasonable to // read the entire stream into memory here. - const { response } = await readStream(ensureElectron(), path); + const { response } = await readStream(ensureElectron(), uploadItem); + const path = typeof uploadItem == "string" ? uploadItem : uploadItem[1]; file = new File([await response.arrayBuffer()], basename(path), { lastModified: lastModifiedMs, }); + } else if (uploadItem instanceof File) { + file = uploadItem; + } else { + file = uploadItem.file; } try { return await parseImageMetadata(file, fileTypeInfo); } catch (e) { - log.error(`Failed to extract image metadata for ${fileOrPath}`, e); + log.error(`Failed to extract image metadata for ${uploadItem}`, e); return undefined; } } -const tryExtractVideoMetadata = async (fileOrPath: File | string) => { +const tryExtractVideoMetadata = async (uploadItem: UploadItem) => { try { - return await ffmpeg.extractVideoMetadata(fileOrPath); + return await ffmpeg.extractVideoMetadata(uploadItem); } catch (e) { - log.error(`Failed to extract video metadata for ${fileOrPath}`, e); + log.error(`Failed to extract video metadata for ${uploadItem}`, e); return undefined; } }; const computeHash = async ( - fileOrPath: File | string, + uploadItem: UploadItem, worker: Remote, ) => { - const { stream, chunkCount } = await readFileOrPath(fileOrPath); + const { stream, chunkCount } = await readUploadItem(uploadItem); const hashState = await worker.initChunkHashing(); const streamReader = stream.getReader(); @@ -910,11 +912,11 @@ const areFilesSameNoHash = (f: Metadata, g: Metadata) => { const readAsset = async ( fileTypeInfo: FileTypeInfo, - { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset, + { isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset, ): Promise => isLivePhoto ? await readLivePhoto(livePhotoAssets, fileTypeInfo) - : await readImageOrVideo(fileOrPath, fileTypeInfo); + : await readImageOrVideo(uploadItem, fileTypeInfo); const readLivePhoto = async ( livePhotoAssets: LivePhotoAssets, @@ -930,28 +932,28 @@ const readLivePhoto = async ( extension: fileTypeInfo.imageType, fileType: FILE_TYPE.IMAGE, }, - await readFileOrPath(livePhotoAssets.image), + await readUploadItem(livePhotoAssets.image), ); - const videoFileStreamOrData = await readFileOrPath(livePhotoAssets.video); + const videoFileStreamOrData = await readUploadItem(livePhotoAssets.video); // The JS zip library that encodeLivePhoto uses does not support // ReadableStreams, so pass the file (blob) if we have one, otherwise read // the entire stream into memory and pass the resultant data. // // This is a reasonable behaviour since the videos corresponding to live - // photos are only a couple of seconds long (we have already done a - // pre-flight check to ensure their size is small in `areLivePhotoAssets`). + // photos are only a couple of seconds long (we've already done a pre-flight + // check during areLivePhotoAssets to ensure their size is small). const fileOrData = async (sd: FileStream | Uint8Array) => { - const _fs = async ({ file, stream }: FileStream) => + const fos = async ({ file, stream }: FileStream) => file ? file : await readEntireStream(stream); - return sd instanceof Uint8Array ? sd : _fs(sd); + return sd instanceof Uint8Array ? sd : fos(sd); }; return { fileStreamOrData: await encodeLivePhoto({ - imageFileName: fopFileName(livePhotoAssets.image), + imageFileName: uploadItemFileName(livePhotoAssets.image), imageFileOrData: await fileOrData(imageFileStreamOrData), - videoFileName: fopFileName(livePhotoAssets.video), + videoFileName: uploadItemFileName(livePhotoAssets.video), videoFileOrData: await fileOrData(videoFileStreamOrData), }), thumbnail, @@ -960,11 +962,11 @@ const readLivePhoto = async ( }; const readImageOrVideo = async ( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, ) => { - const fileStream = await readFileOrPath(fileOrPath); - return withThumbnail(fileOrPath, fileTypeInfo, fileStream); + const fileStream = await readUploadItem(uploadItem); + return withThumbnail(uploadItem, fileTypeInfo, fileStream); }; // TODO(MR): Merge with the uploader @@ -987,14 +989,17 @@ const moduleState = new ModuleState(); /** * Augment the given {@link dataOrStream} with thumbnail information. * - * This is a companion method for {@link readFileOrPath}, and can be used to - * convert the result of {@link readFileOrPath} into an {@link ThumbnailedFile}. + * This is a companion method for {@link readUploadItem}, and can be used to + * convert the result of {@link readUploadItem} into an {@link ThumbnailedFile}. * - * Note: The `fileStream` in the returned ThumbnailedFile may be different from - * the one passed to the function. + * @param uploadItem The {@link UploadItem} where the given {@link fileStream} + * came from. + * + * Note: The `fileStream` in the returned {@link ThumbnailedFile} may be + * different from the one passed to the function. */ const withThumbnail = async ( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, fileStream: FileStream, ): Promise => { @@ -1007,14 +1012,12 @@ const withThumbnail = async ( fileTypeInfo.fileType == FILE_TYPE.IMAGE && moduleState.isNativeImageThumbnailGenerationNotAvailable; - // 1. Native thumbnail generation using file's path. - if (electron && !notAvailable) { + // 1. Native thumbnail generation using items's (effective) path. + if (electron && !notAvailable && !(uploadItem instanceof File)) { try { - // When running in the context of our desktop app, File paths will - // be absolute. See: [Note: File paths when running under Electron]. thumbnail = await generateThumbnailNative( electron, - fileOrPath instanceof File ? fileOrPath["path"] : fileOrPath, + uploadItem, fileTypeInfo, ); } catch (e) { @@ -1028,9 +1031,9 @@ const withThumbnail = async ( if (!thumbnail) { let blob: Blob | undefined; - if (fileOrPath instanceof File) { + if (uploadItem instanceof File) { // 2. Browser based thumbnail generation for File (blobs). - blob = fileOrPath; + blob = uploadItem; } else { // 3. Browser based thumbnail generation for paths. // @@ -1046,12 +1049,14 @@ const withThumbnail = async ( // The fallback in this case involves reading the entire stream into // memory, and passing that data across the IPC boundary in a single // go (i.e. not in a streaming manner). This is risky for videos of - // unbounded sizes, plus that isn't the expected scenario. So - // instead of trying to cater for arbitrary exceptions, we only run - // this fallback to cover for the case where thumbnail generation - // was not available for an image file on Windows. If/when we add - // support of native thumbnailing on Windows too, this entire branch - // can be removed. + // unbounded sizes, plus we shouldn't even be getting here unless + // something went wrong. + // + // So instead of trying to cater for arbitrary exceptions, we only + // run this fallback to cover for the case where thumbnail + // generation was not available for an image file on Windows. + // If/when we add support of native thumbnailing on Windows too, + // this entire branch can be removed. if (fileTypeInfo.fileType == FILE_TYPE.IMAGE) { const data = await readEntireStream(fileStream.stream); @@ -1062,7 +1067,7 @@ const withThumbnail = async ( fileData = data; } else { log.warn( - `Not using browser based thumbnail generation fallback for video at path ${fileOrPath}`, + `Not using browser based thumbnail generation fallback for video at path ${uploadItem}`, ); } } diff --git a/web/apps/photos/src/services/watch.ts b/web/apps/photos/src/services/watch.ts index 4de5881aa..82d3b2f4e 100644 --- a/web/apps/photos/src/services/watch.ts +++ b/web/apps/photos/src/services/watch.ts @@ -15,7 +15,7 @@ import { ensureString } from "@/utils/ensure"; import { UPLOAD_RESULT } from "constants/upload"; import debounce from "debounce"; import uploadManager, { - type FileWithCollection, + type UploadItemWithCollection, } from "services/upload/uploadManager"; import { Collection } from "types/collection"; import { EncryptedEnteFile } from "types/file"; @@ -317,16 +317,17 @@ class FolderWatcher { } /** - * Callback invoked by the uploader whenever a file we requested to + * Callback invoked by the uploader whenever a item we requested to * {@link upload} gets uploaded. */ async onFileUpload( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: FileWithCollection, + item: UploadItemWithCollection, file: EncryptedEnteFile, ) { - // The files we get here will have fileWithCollection.file as a string, - // not as a File or a ElectronFile + // Re the usage of ensureString: For desktop watch, the only possibility + // for a UploadItem is for it to be a string (the absolute path to a + // file on disk). if ( [ UPLOAD_RESULT.ADDED_SYMLINK, @@ -335,18 +336,18 @@ class FolderWatcher { UPLOAD_RESULT.ALREADY_UPLOADED, ].includes(fileUploadResult) ) { - if (fileWithCollection.isLivePhoto) { + if (item.isLivePhoto) { this.uploadedFileForPath.set( - ensureString(fileWithCollection.livePhotoAssets.image), + ensureString(item.livePhotoAssets.image), file, ); this.uploadedFileForPath.set( - ensureString(fileWithCollection.livePhotoAssets.video), + ensureString(item.livePhotoAssets.video), file, ); } else { this.uploadedFileForPath.set( - ensureString(fileWithCollection.fileOrPath), + ensureString(item.uploadItem), file, ); } @@ -355,17 +356,15 @@ class FolderWatcher { fileUploadResult, ) ) { - if (fileWithCollection.isLivePhoto) { + if (item.isLivePhoto) { this.unUploadableFilePaths.add( - ensureString(fileWithCollection.livePhotoAssets.image), + ensureString(item.livePhotoAssets.image), ); this.unUploadableFilePaths.add( - ensureString(fileWithCollection.livePhotoAssets.video), + ensureString(item.livePhotoAssets.video), ); } else { - this.unUploadableFilePaths.add( - ensureString(fileWithCollection.fileOrPath), - ); + this.unUploadableFilePaths.add(ensureString(item.uploadItem)); } } } @@ -375,7 +374,7 @@ class FolderWatcher { * {@link upload} get uploaded. */ async allFileUploadsDone( - filesWithCollection: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], ) { const electron = ensureElectron(); @@ -384,14 +383,15 @@ class FolderWatcher { log.debug(() => JSON.stringify({ f: "watch/allFileUploadsDone", - filesWithCollection, + uploadItemsWithCollection, collections, watch, }), ); - const { syncedFiles, ignoredFiles } = - this.deduceSyncedAndIgnored(filesWithCollection); + const { syncedFiles, ignoredFiles } = this.deduceSyncedAndIgnored( + uploadItemsWithCollection, + ); if (syncedFiles.length > 0) await electron.watch.updateSyncedFiles( @@ -411,7 +411,9 @@ class FolderWatcher { this.debouncedRunNextEvent(); } - private deduceSyncedAndIgnored(filesWithCollection: FileWithCollection[]) { + private deduceSyncedAndIgnored( + uploadItemsWithCollection: UploadItemWithCollection[], + ) { const syncedFiles: FolderWatch["syncedFiles"] = []; const ignoredFiles: FolderWatch["ignoredFiles"] = []; @@ -430,14 +432,13 @@ class FolderWatcher { this.unUploadableFilePaths.delete(path); }; - for (const fileWithCollection of filesWithCollection) { - if (fileWithCollection.isLivePhoto) { - const imagePath = ensureString( - fileWithCollection.livePhotoAssets.image, - ); - const videoPath = ensureString( - fileWithCollection.livePhotoAssets.video, - ); + for (const item of uploadItemsWithCollection) { + // Re the usage of ensureString: For desktop watch, the only + // possibility for a UploadItem is for it to be a string (the + // absolute path to a file on disk). + if (item.isLivePhoto) { + const imagePath = ensureString(item.livePhotoAssets.image); + const videoPath = ensureString(item.livePhotoAssets.video); const imageFile = this.uploadedFileForPath.get(imagePath); const videoFile = this.uploadedFileForPath.get(videoPath); @@ -453,7 +454,7 @@ class FolderWatcher { markIgnored(videoPath); } } else { - const path = ensureString(fileWithCollection.fileOrPath); + const path = ensureString(item.uploadItem); const file = this.uploadedFileForPath.get(path); if (file) { markSynced(file, path); diff --git a/web/apps/photos/src/utils/file/index.ts b/web/apps/photos/src/utils/file/index.ts index 5d7762abf..abbc8b0fa 100644 --- a/web/apps/photos/src/utils/file/index.ts +++ b/web/apps/photos/src/utils/file/index.ts @@ -116,6 +116,19 @@ export async function getUpdatedEXIFFileForDownload( } } +export function convertBytesToHumanReadable( + bytes: number, + precision = 2, +): string { + if (bytes === 0 || isNaN(bytes)) { + return "0 MB"; + } + + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; + return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i]; +} + export async function downloadFile(file: EnteFile) { try { const fileReader = new FileReader(); diff --git a/web/apps/photos/src/utils/native-fs.ts b/web/apps/photos/src/utils/native-fs.ts index 2ef896302..27ebdd1c1 100644 --- a/web/apps/photos/src/utils/native-fs.ts +++ b/web/apps/photos/src/utils/native-fs.ts @@ -1,5 +1,5 @@ /** - * @file Utilities for native filesystem access. + * @file Utilities for native file system access. * * While they don't have any direct dependencies to our desktop app, they were * written for use by the code that runs in our desktop app. diff --git a/web/apps/photos/src/utils/native-stream.ts b/web/apps/photos/src/utils/native-stream.ts index 85d54b790..8ada6070c 100644 --- a/web/apps/photos/src/utils/native-stream.ts +++ b/web/apps/photos/src/utils/native-stream.ts @@ -2,12 +2,14 @@ * @file Streaming IPC communication with the Node.js layer of our desktop app. * * NOTE: These functions only work when we're running in our desktop app. + * + * See: [Note: IPC streams]. */ -import type { Electron } from "@/next/types/ipc"; +import type { Electron, ZipItem } from "@/next/types/ipc"; /** - * Stream the given file from the user's local filesystem. + * Stream the given file or zip entry from the user's local file system. * * This only works when we're running in our desktop app since it uses the * "stream://" protocol handler exposed by our custom code in the Node.js layer. @@ -16,8 +18,9 @@ import type { Electron } from "@/next/types/ipc"; * To avoid accidentally invoking it in a non-desktop app context, it requires * the {@link Electron} object as a parameter (even though it doesn't use it). * - * @param path The path on the file on the user's local filesystem whose - * contents we want to stream. + * @param pathOrZipItem Either the path on the file on the user's local file + * system whose contents we want to stream. Or a tuple containing the path to a + * zip file and the name of the entry within it. * * @return A ({@link Response}, size, lastModifiedMs) triple. * @@ -32,16 +35,23 @@ import type { Electron } from "@/next/types/ipc"; */ export const readStream = async ( _: Electron, - path: string, + pathOrZipItem: string | ZipItem, ): Promise<{ response: Response; size: number; lastModifiedMs: number }> => { - const req = new Request(`stream://read${path}`, { - method: "GET", - }); + let url: URL; + if (typeof pathOrZipItem == "string") { + url = new URL(`stream://read${pathOrZipItem}`); + } else { + const [zipPath, entryName] = pathOrZipItem; + url = new URL(`stream://read${zipPath}`); + url.hash = entryName; + } + + const req = new Request(url, { method: "GET" }); const res = await fetch(req); if (!res.ok) throw new Error( - `Failed to read stream from ${path}: HTTP ${res.status}`, + `Failed to read stream from ${url}: HTTP ${res.status}`, ); const size = readNumericHeader(res, "Content-Length"); diff --git a/web/docs/storage.md b/web/docs/storage.md index d01654b23..9f19a6a46 100644 --- a/web/docs/storage.md +++ b/web/docs/storage.md @@ -34,6 +34,6 @@ meant for larger, tabular data. OPFS is used for caching entire files when we're running under Electron (the Web Cache API is used in the browser). -As it name suggests, it is an entire filesystem, private for us ("origin"). In +As it name suggests, it is an entire file system, private for us ("origin"). In is not undbounded though, and the storage is not guaranteed to be persistent (at least with the APIs we use), hence the cache designation. diff --git a/web/packages/next/file.ts b/web/packages/next/file.ts index 56d27b79b..bd2c04393 100644 --- a/web/packages/next/file.ts +++ b/web/packages/next/file.ts @@ -1,5 +1,3 @@ -import type { ElectronFile } from "./types/file"; - /** * The two parts of a file name - the name itself, and an (optional) extension. * @@ -82,27 +80,3 @@ export const dirname = (path: string) => { } return pathComponents.join("/"); }; - -/** - * Return a short description of the given {@link fileOrPath} suitable for - * helping identify it in log messages. - */ -export const fopLabel = (fileOrPath: File | string) => - fileOrPath instanceof File ? `File(${fileOrPath.name})` : fileOrPath; - -export function getFileNameSize(file: File | ElectronFile) { - return `${file.name}_${convertBytesToHumanReadable(file.size)}`; -} - -export function convertBytesToHumanReadable( - bytes: number, - precision = 2, -): string { - if (bytes === 0 || isNaN(bytes)) { - return "0 MB"; - } - - const i = Math.floor(Math.log(bytes) / Math.log(1024)); - const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; - return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i]; -} diff --git a/web/packages/next/locales/bg-BG/translation.json b/web/packages/next/locales/bg-BG/translation.json index 1661e8fac..28689ba49 100644 --- a/web/packages/next/locales/bg-BG/translation.json +++ b/web/packages/next/locales/bg-BG/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/de-DE/translation.json b/web/packages/next/locales/de-DE/translation.json index de7980f3e..a0ee15a7c 100644 --- a/web/packages/next/locales/de-DE/translation.json +++ b/web/packages/next/locales/de-DE/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Ein Fehler trat auf beim Anmelden mit dem Passkey auf.", "TRY_AGAIN": "Erneut versuchen", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Folge den Schritten in deinem Browser, um mit dem Anmelden fortzufahren.", - "LOGIN_WITH_PASSKEY": "Mit Passkey anmelden" + "LOGIN_WITH_PASSKEY": "Mit Passkey anmelden", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/en-US/translation.json b/web/packages/next/locales/en-US/translation.json index 5fdb380d5..b3debe5aa 100644 --- a/web/packages/next/locales/en-US/translation.json +++ b/web/packages/next/locales/en-US/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "An error occurred while logging in with passkey.", "TRY_AGAIN": "Try again", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Follow the steps from your browser to continue logging in.", - "LOGIN_WITH_PASSKEY": "Login with passkey" + "LOGIN_WITH_PASSKEY": "Login with passkey", + "autogenerated_first_album_name": "My First Album", + "autogenerated_default_album_name": "New Album" } diff --git a/web/packages/next/locales/es-ES/translation.json b/web/packages/next/locales/es-ES/translation.json index 543551457..a01d322b7 100644 --- a/web/packages/next/locales/es-ES/translation.json +++ b/web/packages/next/locales/es-ES/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fa-IR/translation.json b/web/packages/next/locales/fa-IR/translation.json index 9dc5ccb7a..0c3749d13 100644 --- a/web/packages/next/locales/fa-IR/translation.json +++ b/web/packages/next/locales/fa-IR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fi-FI/translation.json b/web/packages/next/locales/fi-FI/translation.json index 2d2a56b54..d945fcde3 100644 --- a/web/packages/next/locales/fi-FI/translation.json +++ b/web/packages/next/locales/fi-FI/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fr-FR/translation.json b/web/packages/next/locales/fr-FR/translation.json index 308728b98..f3113202f 100644 --- a/web/packages/next/locales/fr-FR/translation.json +++ b/web/packages/next/locales/fr-FR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Une erreur s'est produite lors de la connexion avec le code d'accès.", "TRY_AGAIN": "Réessayer", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Suivez les étapes de votre navigateur pour poursuivre la connexion.", - "LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès" + "LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/it-IT/translation.json b/web/packages/next/locales/it-IT/translation.json index b66131ad7..bf555911c 100644 --- a/web/packages/next/locales/it-IT/translation.json +++ b/web/packages/next/locales/it-IT/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/ko-KR/translation.json b/web/packages/next/locales/ko-KR/translation.json index 63b6491de..aee2c6cd5 100644 --- a/web/packages/next/locales/ko-KR/translation.json +++ b/web/packages/next/locales/ko-KR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/nl-NL/translation.json b/web/packages/next/locales/nl-NL/translation.json index c12a38f8b..62b846b14 100644 --- a/web/packages/next/locales/nl-NL/translation.json +++ b/web/packages/next/locales/nl-NL/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Er is een fout opgetreden tijdens het inloggen met een passkey.", "TRY_AGAIN": "Probeer opnieuw", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Volg de stappen van je browser om door te gaan met inloggen.", - "LOGIN_WITH_PASSKEY": "Inloggen met passkey" + "LOGIN_WITH_PASSKEY": "Inloggen met passkey", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/pt-BR/translation.json b/web/packages/next/locales/pt-BR/translation.json index 5749591d1..dfe0030c5 100644 --- a/web/packages/next/locales/pt-BR/translation.json +++ b/web/packages/next/locales/pt-BR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Ocorreu um erro ao entrar com a chave de acesso.", "TRY_AGAIN": "Tente novamente", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Siga os passos do seu navegador para continuar acessando.", - "LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso" + "LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/pt-PT/translation.json b/web/packages/next/locales/pt-PT/translation.json index 20ec4d9ea..f6980b56e 100644 --- a/web/packages/next/locales/pt-PT/translation.json +++ b/web/packages/next/locales/pt-PT/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/ru-RU/translation.json b/web/packages/next/locales/ru-RU/translation.json index 95c4f6c58..5d036c6c8 100644 --- a/web/packages/next/locales/ru-RU/translation.json +++ b/web/packages/next/locales/ru-RU/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "При входе в систему с помощью пароля произошла ошибка.", "TRY_AGAIN": "Пробовать снова", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Следуйте инструкциям в вашем браузере, чтобы продолжить вход в систему.", - "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля" + "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/sv-SE/translation.json b/web/packages/next/locales/sv-SE/translation.json index 77462524d..ba6ecee09 100644 --- a/web/packages/next/locales/sv-SE/translation.json +++ b/web/packages/next/locales/sv-SE/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/th-TH/translation.json b/web/packages/next/locales/th-TH/translation.json index 2d2a56b54..d945fcde3 100644 --- a/web/packages/next/locales/th-TH/translation.json +++ b/web/packages/next/locales/th-TH/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/tr-TR/translation.json b/web/packages/next/locales/tr-TR/translation.json index 2d2a56b54..d945fcde3 100644 --- a/web/packages/next/locales/tr-TR/translation.json +++ b/web/packages/next/locales/tr-TR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/zh-CN/translation.json b/web/packages/next/locales/zh-CN/translation.json index 7a76b58b6..d2345f1ae 100644 --- a/web/packages/next/locales/zh-CN/translation.json +++ b/web/packages/next/locales/zh-CN/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "使用通行密钥登录时出错。", "TRY_AGAIN": "重试", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "按照浏览器中提示的步骤继续登录。", - "LOGIN_WITH_PASSKEY": "使用通行密钥来登录" + "LOGIN_WITH_PASSKEY": "使用通行密钥来登录", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/types/file.ts b/web/packages/next/types/file.ts index 75641e3a2..6dd1032cd 100644 --- a/web/packages/next/types/file.ts +++ b/web/packages/next/types/file.ts @@ -16,6 +16,17 @@ export interface ElectronFile { arrayBuffer: () => Promise; } +/** + * When we are running in the context of our desktop app, we have access to the + * absolute path of {@link File} objects. This convenience type clubs these two + * bits of information, saving us the need to query the path again and again + * using the {@link getPathForFile} method of {@link Electron}. + */ +export interface FileAndPath { + file: File; + path: string; +} + export interface EventQueueItem { type: "upload" | "trash"; folderPath: string; diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts index 1622a820d..173b12b17 100644 --- a/web/packages/next/types/ipc.ts +++ b/web/packages/next/types/ipc.ts @@ -123,17 +123,17 @@ export interface Electron { skipAppUpdate: (version: string) => void; /** - * A subset of filesystem access APIs. + * A subset of file system access APIs. * * The renderer process, being a web process, does not have full access to - * the local filesystem apart from files explicitly dragged and dropped (or + * the local file system apart from files explicitly dragged and dropped (or * selected by the user in a native file open dialog). * - * The main process, however, has full filesystem access (limited only be an + * The main process, however, has full fil system access (limited only be an * OS level sandbox on the entire process). * * When we're running in the desktop app, we want to better utilize the - * local filesystem access to provide more integrated features to the user - + * local file system access to provide more integrated features to the user; * things that are not currently possible using web technologies. For * example, continuous exports to an arbitrary user chosen location on disk, * or watching some folders for changes and syncing them automatically. @@ -189,11 +189,6 @@ export interface Electron { * directory. */ isDir: (dirPath: string) => Promise; - - /** - * Return the size in bytes of the file at {@link path}. - */ - size: (path: string) => Promise; }; // - Conversion @@ -226,22 +221,27 @@ export interface Electron { * not yet possible, this function will throw an error with the * {@link CustomErrorMessage.NotAvailable} message. * - * @param dataOrPath The raw image data (the contents of the image file), or - * the path to the image file, whose thumbnail we want to generate. + * @param dataOrPathOrZipItem The file whose thumbnail we want to generate. + * It can be provided as raw image data (the contents of the image file), or + * the path to the image file, or a tuple containing the path of the zip + * file along with the name of an entry in it. + * * @param maxDimension The maximum width or height of the generated * thumbnail. + * * @param maxSize Maximum size (in bytes) of the generated thumbnail. * * @returns JPEG data of the generated thumbnail. */ generateImageThumbnail: ( - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, ) => Promise; /** - * Execute a FFmpeg {@link command} on the given {@link dataOrPath}. + * Execute a FFmpeg {@link command} on the given + * {@link dataOrPathOrZipItem}. * * This executes the command using a FFmpeg executable we bundle with our * desktop app. We also have a wasm FFmpeg wasm implementation that we use @@ -254,10 +254,11 @@ export interface Electron { * (respectively {@link inputPathPlaceholder}, * {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}). * - * @param dataOrPath The bytes of the input file, or the path to the input - * file on the user's local disk. In both cases, the data gets serialized to - * a temporary file, and then that path gets substituted in the FFmpeg - * {@link command} in lieu of {@link inputPathPlaceholder}. + * @param dataOrPathOrZipItem The bytes of the input file, or the path to + * the input file on the user's local disk, or the path to a zip file on the + * user's disk and the name of an entry in it. In all three cases, the data + * gets serialized to a temporary file, and then that path gets substituted + * in the FFmpeg {@link command} in lieu of {@link inputPathPlaceholder}. * * @param outputFileExtension The extension (without the dot, e.g. "jpeg") * to use for the output file that we ask FFmpeg to create in @@ -273,7 +274,7 @@ export interface Electron { */ ffmpegExec: ( command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, ) => Promise; @@ -465,44 +466,80 @@ export interface Electron { // - Upload + /** + * Return the file system path that this File object points to. + * + * This method is a bit different from the other methods on the Electron + * object in the sense that there is no actual IPC happening - the + * implementation of this method is completely in the preload script. Thus + * we can pass it an otherwise unserializable File object. + * + * Consequently, it is also _not_ async. + */ + pathForFile: (file: File) => string; + + /** + * Get the list of files that are present in the given zip file. + * + * @param zipPath The path of the zip file on the user's local file system. + * + * @returns A list of (zipPath, entryName) tuples, one for each file in the + * given zip. Directories are traversed recursively, but the directory + * entries themselves will be excluded from the returned list. File entries + * whose file name begins with a dot (i.e. "hidden" files) will also be + * excluded. + * + * To read the contents of the files themselves, see [Note: IPC streams]. + */ + listZipItems: (zipPath: string) => Promise; + + /** + * Return the size in bytes of the file at the given path or of a particular + * entry within a zip file. + */ + pathOrZipItemSize: (pathOrZipItem: string | ZipItem) => Promise; + /** * Return any pending uploads that were previously enqueued but haven't yet * been completed. * - * The state of pending uploads is persisted in the Node.js layer. + * Return undefined if there are no such pending uploads. * - * Note that we might have both outstanding zip and regular file uploads at - * the same time. In such cases, the zip file ones get precedence. + * The state of pending uploads is persisted in the Node.js layer. Or app + * start, we read in this data from the Node.js layer via this IPC method. + * The Node.js code returns the persisted data after filtering out any files + * that no longer exist on disk. */ pendingUploads: () => Promise; /** - * Set or clear the name of the collection where the pending upload is - * directed to. + * Set the state of pending uploads. + * + * - Typically, this would be called at the start of an upload. + * + * - Thereafter, as each item gets uploaded one by one, we'd call + * {@link markUploadedFiles} or {@link markUploadedZipItems}. + * + * - Finally, once the upload completes (or gets cancelled), we'd call + * {@link clearPendingUploads} to complete the circle. */ - setPendingUploadCollection: (collectionName: string) => Promise; + setPendingUploads: (pendingUploads: PendingUploads) => Promise; /** - * Update the list of files (of {@link type}) associated with the pending - * upload. + * Mark the given files (given by their {@link paths}) as having been + * uploaded. */ - setPendingUploadFiles: ( - type: PendingUploads["type"], - filePaths: string[], - ) => Promise; + markUploadedFiles: (paths: PendingUploads["filePaths"]) => Promise; - /* - * TODO: AUDIT below this - Some of the types we use below are not copyable - * across process boundaries, and such functions will (expectedly) fail at - * runtime. For such functions, find an efficient alternative or refactor - * the dataflow. + /** + * Mark the given {@link ZipItem}s as having been uploaded. */ + markUploadedZipItems: (items: PendingUploads["zipItems"]) => Promise; - // - - - getElectronFilesFromGoogleZip: ( - filePath: string, - ) => Promise; + /** + * Clear any pending uploads. + */ + clearPendingUploads: () => Promise; } /** @@ -588,14 +625,41 @@ export interface FolderWatchSyncedFile { } /** - * When the user starts an upload, we remember the files they'd selected or drag - * and dropped so that we can resume (if needed) when the app restarts after - * being stopped in the middle of the uploads. + * A particular file within a zip file. + * + * When the user uploads a zip file, we create a "zip item" for each entry + * within the zip file. Each such entry is a tuple containing the (path to a zip + * file itself, and the name of an entry within it). + * + * The name of the entry is not just the file name, but rather is the full path + * of the file within the zip. That is, each entry name uniquely identifies a + * particular file within the given zip. + */ +export type ZipItem = [zipPath: string, entryName: string]; + +/** + * State about pending and in-progress uploads. + * + * When the user starts an upload, we remember the files they'd selected (or + * drag-dropped) so that we can resume if they restart the app in before the + * uploads have been completed. This state is kept on the Electron side, and + * this object is the IPC intermediary. */ export interface PendingUploads { - /** The collection to which we're uploading */ + /** + * The collection to which we're uploading, or the root collection. + * + * This is name of the collection (when uploading to a singular collection) + * or the root collection (when uploading to separate * albums) to which we + * these uploads are meant to go to. See {@link CollectionMapping}. + */ collectionName: string; - /* The upload can be either of a Google Takeout zip, or regular files */ - type: "files" | "zips"; - files: ElectronFile[]; + /** + * Paths of regular files that need to be uploaded. + */ + filePaths: string[]; + /** + * {@link ZipItem} (zip path and entry name) that need to be uploaded. + */ + zipItems: ZipItem[]; } diff --git a/web/packages/shared/hooks/useFileInput.tsx b/web/packages/shared/hooks/useFileInput.tsx index b53fecb58..4eb346d39 100644 --- a/web/packages/shared/hooks/useFileInput.tsx +++ b/web/packages/shared/hooks/useFileInput.tsx @@ -1,24 +1,63 @@ import { useCallback, useRef, useState } from "react"; -/* - * TODO (MR): Understand how this is happening, and validate it further (on - * first glance this is correct). - * +/** * [Note: File paths when running under Electron] * * We have access to the absolute path of the web {@link File} object when we * are running in the context of our desktop app. * + * https://www.electronjs.org/docs/latest/api/file-object + * * This is in contrast to the `webkitRelativePath` that we get when we're * running in the browser, which is the relative path to the directory that the * user selected (or just the name of the file if the user selected or * drag/dropped a single one). + * + * Note that this is a deprecated approach. From Electron docs: + * + * > Warning: The path property that Electron adds to the File interface is + * > deprecated and will be removed in a future Electron release. We recommend + * > you use `webUtils.getPathForFile` instead. */ export interface FileWithPath extends File { readonly path?: string; } -export default function useFileInput({ directory }: { directory?: boolean }) { +interface UseFileInputParams { + directory?: boolean; + accept?: string; +} + +/** + * Return three things: + * + * - A function that can be called to trigger the showing of the select file / + * directory dialog. + * + * - The list of properties that should be passed to a dummy `input` element + * that needs to be created to anchor the select file dialog. This input HTML + * element is not going to be visible, but it needs to be part of the DOM fro + * the open trigger to have effect. + * + * - The list of files that the user selected. This will be a list even if the + * user selected directories - in that case, it will be the recursive list of + * files within this directory. + * + * @param param0 + * + * - If {@link directory} is true, the file open dialog will ask the user to + * select directories. Otherwise it'll ask the user to select files. + * + * - If {@link accept} is specified, it'll restrict the type of files that the + * user can select by setting the "accept" attribute of the underlying HTML + * input element we use to surface the file selector dialog. For value of + * accept can be an extension or a MIME type (See + * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept). + */ +export default function useFileInput({ + directory, + accept, +}: UseFileInputParams) { const [selectedFiles, setSelectedFiles] = useState([]); const inputRef = useRef(); @@ -48,6 +87,7 @@ export default function useFileInput({ directory }: { directory?: boolean }) { ...(directory ? { directory: "", webkitdirectory: "" } : {}), ref: inputRef, onChange: handleChange, + ...(accept ? { accept } : {}), }), [], );