Merge branch 'ente_popup_menu' into mobile_face

This commit is contained in:
Neeraj Gupta 2024-04-30 14:48:12 +05:30
commit 49d5370d47
57 changed files with 1641 additions and 1428 deletions

View file

@ -27,5 +27,3 @@ export const fsIsDir = async (dirPath: string) => {
const stat = await fs.stat(dirPath);
return stat.isDirectory();
};
export const fsSize = (path: string) => fs.stat(path).then((s) => s.size);

View file

@ -14,6 +14,7 @@ import type {
CollectionMapping,
FolderWatch,
PendingUploads,
ZipItem,
} from "../types/ipc";
import {
selectDirectory,
@ -29,7 +30,6 @@ import {
fsRename,
fsRm,
fsRmdir,
fsSize,
fsWriteFile,
} from "./fs";
import { logToDisk } from "./log";
@ -52,10 +52,13 @@ import {
saveEncryptionKey,
} from "./services/store";
import {
getElectronFilesFromGoogleZip,
clearPendingUploads,
listZipItems,
markUploadedFiles,
markUploadedZipItems,
pathOrZipItemSize,
pendingUploads,
setPendingUploadCollection,
setPendingUploadFiles,
setPendingUploads,
} from "./services/upload";
import {
watchAdd,
@ -139,8 +142,6 @@ export const attachIPCHandlers = () => {
ipcMain.handle("fsIsDir", (_, dirPath: string) => fsIsDir(dirPath));
ipcMain.handle("fsSize", (_, path: string) => fsSize(path));
// - Conversion
ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) =>
@ -151,10 +152,10 @@ export const attachIPCHandlers = () => {
"generateImageThumbnail",
(
_,
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
maxDimension: number,
maxSize: number,
) => generateImageThumbnail(dataOrPath, maxDimension, maxSize),
) => generateImageThumbnail(dataOrPathOrZipItem, maxDimension, maxSize),
);
ipcMain.handle(
@ -162,10 +163,16 @@ export const attachIPCHandlers = () => {
(
_,
command: string[],
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
) => ffmpegExec(command, dataOrPath, outputFileExtension, timeoutMS),
) =>
ffmpegExec(
command,
dataOrPathOrZipItem,
outputFileExtension,
timeoutMS,
),
);
// - ML
@ -198,23 +205,31 @@ export const attachIPCHandlers = () => {
// - Upload
ipcMain.handle("listZipItems", (_, zipPath: string) =>
listZipItems(zipPath),
);
ipcMain.handle("pathOrZipItemSize", (_, pathOrZipItem: string | ZipItem) =>
pathOrZipItemSize(pathOrZipItem),
);
ipcMain.handle("pendingUploads", () => pendingUploads());
ipcMain.handle("setPendingUploadCollection", (_, collectionName: string) =>
setPendingUploadCollection(collectionName),
ipcMain.handle("setPendingUploads", (_, pendingUploads: PendingUploads) =>
setPendingUploads(pendingUploads),
);
ipcMain.handle(
"setPendingUploadFiles",
(_, type: PendingUploads["type"], filePaths: string[]) =>
setPendingUploadFiles(type, filePaths),
"markUploadedFiles",
(_, paths: PendingUploads["filePaths"]) => markUploadedFiles(paths),
);
// -
ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) =>
getElectronFilesFromGoogleZip(filePath),
ipcMain.handle(
"markUploadedZipItems",
(_, items: PendingUploads["zipItems"]) => markUploadedZipItems(items),
);
ipcMain.handle("clearPendingUploads", () => clearPendingUploads());
};
/**

View file

@ -1,9 +1,14 @@
import pathToFfmpeg from "ffmpeg-static";
import fs from "node:fs/promises";
import type { ZipItem } from "../../types/ipc";
import log from "../log";
import { withTimeout } from "../utils";
import { execAsync } from "../utils-electron";
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
import {
deleteTempFile,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils-temp";
/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
const ffmpegPathPlaceholder = "FFMPEG";
@ -39,28 +44,24 @@ const outputPathPlaceholder = "OUTPUT";
*/
export const ffmpegExec = async (
command: string[],
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
): Promise<Uint8Array> => {
// TODO (MR): This currently copies files for both input and output. This
// needs to be tested extremely large video files when invoked downstream of
// `convertToMP4` in the web code.
// TODO (MR): This currently copies files for both input (when
// dataOrPathOrZipItem is data) and output. This needs to be tested
// extremely large video files when invoked downstream of `convertToMP4` in
// the web code.
let inputFilePath: string;
let isInputFileTemporary: boolean;
if (dataOrPath instanceof Uint8Array) {
inputFilePath = await makeTempFilePath();
isInputFileTemporary = true;
} else {
inputFilePath = dataOrPath;
isInputFileTemporary = false;
}
const {
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
writeToTemporaryFile: writeToTemporaryInputFile,
} = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem);
const outputFilePath = await makeTempFilePath(outputFileExtension);
try {
if (dataOrPath instanceof Uint8Array)
await fs.writeFile(inputFilePath, dataOrPath);
await writeToTemporaryInputFile();
const cmd = substitutePlaceholders(
command,

View file

@ -2,10 +2,14 @@
import fs from "node:fs/promises";
import path from "path";
import { CustomErrorMessage } from "../../types/ipc";
import { CustomErrorMessage, type ZipItem } from "../../types/ipc";
import log from "../log";
import { execAsync, isDev } from "../utils-electron";
import { deleteTempFile, makeTempFilePath } from "../utils-temp";
import {
deleteTempFile,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils-temp";
export const convertToJPEG = async (imageData: Uint8Array) => {
const inputFilePath = await makeTempFilePath();
@ -63,19 +67,15 @@ const imageMagickPath = () =>
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
export const generateImageThumbnail = async (
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
maxDimension: number,
maxSize: number,
): Promise<Uint8Array> => {
let inputFilePath: string;
let isInputFileTemporary: boolean;
if (dataOrPath instanceof Uint8Array) {
inputFilePath = await makeTempFilePath();
isInputFileTemporary = true;
} else {
inputFilePath = dataOrPath;
isInputFileTemporary = false;
}
const {
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
writeToTemporaryFile: writeToTemporaryInputFile,
} = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem);
const outputFilePath = await makeTempFilePath("jpeg");
@ -89,8 +89,7 @@ export const generateImageThumbnail = async (
);
try {
if (dataOrPath instanceof Uint8Array)
await fs.writeFile(inputFilePath, dataOrPath);
await writeToTemporaryInputFile();
let thumbnail: Uint8Array;
do {

View file

@ -1,73 +1,104 @@
import StreamZip from "node-stream-zip";
import fs from "node:fs/promises";
import { existsSync } from "original-fs";
import path from "path";
import { ElectronFile, type PendingUploads } from "../../types/ipc";
import {
uploadStatusStore,
type UploadStatusStore,
} from "../stores/upload-status";
import { getElectronFile, getZipFileStream } from "./fs";
import type { ElectronFile, PendingUploads, ZipItem } from "../../types/ipc";
import { uploadStatusStore } from "../stores/upload-status";
import { getZipFileStream } from "./fs";
export const pendingUploads = async () => {
const collectionName = uploadStatusStore.get("collectionName");
const filePaths = validSavedPaths("files");
const zipPaths = validSavedPaths("zips");
export const listZipItems = async (zipPath: string): Promise<ZipItem[]> => {
const zip = new StreamZip.async({ file: zipPath });
let files: ElectronFile[] = [];
let type: PendingUploads["type"];
const entries = await zip.entries();
const entryNames: string[] = [];
if (zipPaths.length) {
type = "zips";
for (const zipPath of zipPaths) {
files = [
...files,
...(await getElectronFilesFromGoogleZip(zipPath)),
];
for (const entry of Object.values(entries)) {
const basename = path.basename(entry.name);
// Ignore "hidden" files (files whose names begins with a dot).
if (entry.isFile && basename.length > 0 && basename[0] != ".") {
// `entry.name` is the path within the zip.
entryNames.push(entry.name);
}
const pendingFilePaths = new Set(filePaths);
files = files.filter((file) => pendingFilePaths.has(file.path));
} else if (filePaths.length) {
type = "files";
files = await Promise.all(filePaths.map(getElectronFile));
}
zip.close();
return entryNames.map((entryName) => [zipPath, entryName]);
};
export const pathOrZipItemSize = async (
pathOrZipItem: string | ZipItem,
): Promise<number> => {
if (typeof pathOrZipItem == "string") {
const stat = await fs.stat(pathOrZipItem);
return stat.size;
} else {
const [zipPath, entryName] = pathOrZipItem;
const zip = new StreamZip.async({ file: zipPath });
const entry = await zip.entry(entryName);
const size = entry.size;
zip.close();
return size;
}
};
export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
const collectionName = uploadStatusStore.get("collectionName");
const allFilePaths = uploadStatusStore.get("filePaths") ?? [];
const filePaths = allFilePaths.filter((f) => existsSync(f));
const allZipItems = uploadStatusStore.get("zipItems");
let zipItems: typeof allZipItems;
// Migration code - May 2024. Remove after a bit.
//
// The older store formats will not have zipItems and instead will have
// zipPaths. If we find such a case, read the zipPaths and enqueue all of
// their files as zipItems in the result.
//
// This potentially can be cause us to try reuploading an already uploaded
// file, but the dedup logic will kick in at that point so no harm will come
// off it.
if (allZipItems === undefined) {
const allZipPaths = uploadStatusStore.get("filePaths");
const zipPaths = allZipPaths.filter((f) => existsSync(f));
zipItems = [];
for (const zip of zipPaths)
zipItems = zipItems.concat(await listZipItems(zip));
} else {
zipItems = allZipItems.filter(([z]) => existsSync(z));
}
if (filePaths.length == 0 && zipItems.length == 0) return undefined;
return {
files,
collectionName,
type,
filePaths,
zipItems,
};
};
export const validSavedPaths = (type: PendingUploads["type"]) => {
const key = storeKey(type);
const savedPaths = (uploadStatusStore.get(key) as string[]) ?? [];
const paths = savedPaths.filter((p) => existsSync(p));
uploadStatusStore.set(key, paths);
return paths;
export const setPendingUploads = async (pendingUploads: PendingUploads) =>
uploadStatusStore.set(pendingUploads);
export const markUploadedFiles = async (paths: string[]) => {
const existing = uploadStatusStore.get("filePaths");
const updated = existing.filter((p) => !paths.includes(p));
uploadStatusStore.set("filePaths", updated);
};
export const setPendingUploadCollection = (collectionName: string) => {
if (collectionName) uploadStatusStore.set("collectionName", collectionName);
else uploadStatusStore.delete("collectionName");
};
export const setPendingUploadFiles = (
type: PendingUploads["type"],
filePaths: string[],
export const markUploadedZipItems = async (
items: [zipPath: string, entryName: string][],
) => {
const key = storeKey(type);
if (filePaths) uploadStatusStore.set(key, filePaths);
else uploadStatusStore.delete(key);
const existing = uploadStatusStore.get("zipItems");
const updated = existing.filter(
(z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]),
);
uploadStatusStore.set("zipItems", updated);
};
const storeKey = (type: PendingUploads["type"]): keyof UploadStatusStore => {
switch (type) {
case "zips":
return "zipPaths";
case "files":
return "filePaths";
}
};
export const clearPendingUploads = () => uploadStatusStore.clear();
export const getElectronFilesFromGoogleZip = async (filePath: string) => {
const zip = new StreamZip.async({
@ -85,6 +116,8 @@ export const getElectronFilesFromGoogleZip = async (filePath: string) => {
}
}
zip.close();
return files;
};

View file

@ -1,27 +1,56 @@
import Store, { Schema } from "electron-store";
export interface UploadStatusStore {
filePaths: string[];
zipPaths: string[];
collectionName: string;
/**
* The collection to which we're uploading, or the root collection.
*
* Not all pending uploads will have an associated collection.
*/
collectionName?: string;
/**
* Paths to regular files that are pending upload.
*
* This should generally be present, albeit empty, but it is marked optional
* in sympathy with its siblings.
*/
filePaths?: string[];
/**
* Each item is the path to a zip file and the name of an entry within it.
*
* This is marked optional since legacy stores will not have it.
*/
zipItems?: [zipPath: string, entryName: string][];
/**
* @deprecated Legacy paths to zip files, now subsumed into zipItems.
*/
zipPaths?: string[];
}
const uploadStatusSchema: Schema<UploadStatusStore> = {
collectionName: {
type: "string",
},
filePaths: {
type: "array",
items: {
type: "string",
},
},
zipItems: {
type: "array",
items: {
type: "array",
items: {
type: "string",
},
},
},
zipPaths: {
type: "array",
items: {
type: "string",
},
},
collectionName: {
type: "string",
},
};
export const uploadStatusStore = new Store({

View file

@ -2,6 +2,7 @@
* @file stream data to-from renderer using a custom protocol handler.
*/
import { net, protocol } from "electron/main";
import StreamZip from "node-stream-zip";
import { createWriteStream, existsSync } from "node:fs";
import fs from "node:fs/promises";
import { Readable } from "node:stream";
@ -34,17 +35,23 @@ export const registerStreamProtocol = () => {
protocol.handle("stream", async (request: Request) => {
const url = request.url;
// The request URL contains the command to run as the host, and the
// pathname of the file as the path. For example,
// pathname of the file as the path. An additional path can be specified
// as the URL hash.
//
// stream://write/path/to/file
// host-pathname-----
// For example,
//
const { host, pathname } = new URL(url);
// stream://write/path/to/file#/path/to/another/file
// host[pathname----] [pathname-2---------]
//
const { host, pathname, hash } = new URL(url);
// Convert e.g. "%20" to spaces.
const path = decodeURIComponent(pathname);
const hashPath = decodeURIComponent(hash);
switch (host) {
case "read":
return handleRead(path);
case "read-zip":
return handleReadZip(path, hashPath);
case "write":
return handleWrite(path, request);
default:
@ -88,6 +95,39 @@ const handleRead = async (path: string) => {
}
};
const handleReadZip = async (zipPath: string, entryName: string) => {
try {
const zip = new StreamZip.async({ file: zipPath });
const entry = await zip.entry(entryName);
const stream = await zip.stream(entry);
// TODO(MR): when to call zip.close()
return new Response(Readable.toWeb(new Readable(stream)), {
headers: {
// We don't know the exact type, but it doesn't really matter,
// just set it to a generic binary content-type so that the
// browser doesn't tinker with it thinking of it as text.
"Content-Type": "application/octet-stream",
"Content-Length": `${entry.size}`,
// While it is documented that entry.time is the modification
// time, the units are not mentioned. By seeing the source code,
// we can verify that it is indeed epoch milliseconds. See
// `parseZipTime` in the node-stream-zip source,
// https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js
"X-Last-Modified-Ms": `${entry.time}`,
},
});
} catch (e) {
log.error(
`Failed to read entry ${entryName} from zip file at ${zipPath}`,
e,
);
return new Response(`Failed to read stream: ${e.message}`, {
status: 500,
});
}
};
const handleWrite = async (path: string, request: Request) => {
try {
await writeStream(path, request.body);

View file

@ -1,7 +1,9 @@
import { app } from "electron/main";
import StreamZip from "node-stream-zip";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "path";
import type { ZipItem } from "../types/ipc";
/**
* Our very own directory within the system temp directory. Go crazy, but
@ -61,3 +63,64 @@ export const deleteTempFile = async (tempFilePath: string) => {
throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`);
await fs.rm(tempFilePath, { force: true });
};
/** The result of {@link makeFileForDataOrPathOrZipItem}. */
interface FileForDataOrPathOrZipItem {
/**
* The path to the file (possibly temporary).
*/
path: string;
/**
* `true` if {@link path} points to a temporary file which should be deleted
* once we are done processing.
*/
isFileTemporary: boolean;
/**
* If set, this'll be a function that can be called to actually write the
* contents of the source `Uint8Array | string | ZipItem` into the file at
* {@link path}.
*
* It will be undefined if the source is already a path since nothing needs
* to be written in that case. In the other two cases this function will
* write the data or zip item into the file at {@link path}.
*/
writeToTemporaryFile?: () => Promise<void>;
}
/**
* Return the path to a file, a boolean indicating if this is a temporary path
* that needs to be deleted after processing, and a function to write the given
* {@link dataOrPathOrZipItem} into that temporary file if needed.
*
* @param dataOrPathOrZipItem The contents of the file, or the path to an
* existing file, or a (path to a zip file, name of an entry within that zip
* file) tuple.
*/
export const makeFileForDataOrPathOrZipItem = async (
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
): Promise<FileForDataOrPathOrZipItem> => {
let path: string;
let isFileTemporary: boolean;
let writeToTemporaryFile: () => Promise<void> | undefined;
if (typeof dataOrPathOrZipItem == "string") {
path = dataOrPathOrZipItem;
isFileTemporary = false;
} else {
path = await makeTempFilePath();
isFileTemporary = true;
if (dataOrPathOrZipItem instanceof Uint8Array) {
writeToTemporaryFile = () =>
fs.writeFile(path, dataOrPathOrZipItem);
} else {
writeToTemporaryFile = async () => {
const [zipPath, entryName] = dataOrPathOrZipItem;
const zip = new StreamZip.async({ file: zipPath });
await zip.extract(entryName, path);
zip.close();
};
}
}
return { path, isFileTemporary, writeToTemporaryFile };
};

View file

@ -37,7 +37,7 @@
* - [main] desktop/src/main/ipc.ts contains impl
*/
import { contextBridge, ipcRenderer } from "electron/renderer";
import { contextBridge, ipcRenderer, webUtils } from "electron/renderer";
// While we can't import other code, we can import types since they're just
// needed when compiling and will not be needed or looked around for at runtime.
@ -47,6 +47,7 @@ import type {
ElectronFile,
FolderWatch,
PendingUploads,
ZipItem,
} from "./types/ipc";
// - General
@ -122,36 +123,33 @@ const fsWriteFile = (path: string, contents: string): Promise<void> =>
const fsIsDir = (dirPath: string): Promise<boolean> =>
ipcRenderer.invoke("fsIsDir", dirPath);
const fsSize = (path: string): Promise<number> =>
ipcRenderer.invoke("fsSize", path);
// - Conversion
const convertToJPEG = (imageData: Uint8Array): Promise<Uint8Array> =>
ipcRenderer.invoke("convertToJPEG", imageData);
const generateImageThumbnail = (
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
maxDimension: number,
maxSize: number,
): Promise<Uint8Array> =>
ipcRenderer.invoke(
"generateImageThumbnail",
dataOrPath,
dataOrPathOrZipItem,
maxDimension,
maxSize,
);
const ffmpegExec = (
command: string[],
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
): Promise<Uint8Array> =>
ipcRenderer.invoke(
"ffmpegExec",
command,
dataOrPath,
dataOrPathOrZipItem,
outputFileExtension,
timeoutMS,
);
@ -241,25 +239,29 @@ const watchFindFiles = (folderPath: string): Promise<string[]> =>
// - Upload
const pathForFile = (file: File) => webUtils.getPathForFile(file);
const listZipItems = (zipPath: string): Promise<ZipItem[]> =>
ipcRenderer.invoke("listZipItems", zipPath);
const pathOrZipItemSize = (pathOrZipItem: string | ZipItem): Promise<number> =>
ipcRenderer.invoke("pathOrZipItemSize", pathOrZipItem);
const pendingUploads = (): Promise<PendingUploads | undefined> =>
ipcRenderer.invoke("pendingUploads");
const setPendingUploadCollection = (collectionName: string): Promise<void> =>
ipcRenderer.invoke("setPendingUploadCollection", collectionName);
const setPendingUploads = (pendingUploads: PendingUploads): Promise<void> =>
ipcRenderer.invoke("setPendingUploads", pendingUploads);
const setPendingUploadFiles = (
type: PendingUploads["type"],
filePaths: string[],
): Promise<void> =>
ipcRenderer.invoke("setPendingUploadFiles", type, filePaths);
const markUploadedFiles = (paths: PendingUploads["filePaths"]): Promise<void> =>
ipcRenderer.invoke("markUploadedFiles", paths);
// - TODO: AUDIT below this
// -
const markUploadedZipItems = (
items: PendingUploads["zipItems"],
): Promise<void> => ipcRenderer.invoke("markUploadedZipItems", items);
const getElectronFilesFromGoogleZip = (
filePath: string,
): Promise<ElectronFile[]> =>
ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath);
const clearPendingUploads = (): Promise<void> =>
ipcRenderer.invoke("clearPendingUploads");
/**
* These objects exposed here will become available to the JS code in our
@ -331,7 +333,6 @@ contextBridge.exposeInMainWorld("electron", {
readTextFile: fsReadTextFile,
writeFile: fsWriteFile,
isDir: fsIsDir,
size: fsSize,
},
// - Conversion
@ -370,11 +371,12 @@ contextBridge.exposeInMainWorld("electron", {
// - Upload
pathForFile,
listZipItems,
pathOrZipItemSize,
pendingUploads,
setPendingUploadCollection,
setPendingUploadFiles,
// -
getElectronFilesFromGoogleZip,
setPendingUploads,
markUploadedFiles,
markUploadedZipItems,
clearPendingUploads,
});

View file

@ -25,10 +25,12 @@ export interface FolderWatchSyncedFile {
collectionID: number;
}
export type ZipItem = [zipPath: string, entryName: string];
export interface PendingUploads {
collectionName: string;
type: "files" | "zips";
files: ElectronFile[];
filePaths: string[];
zipItems: ZipItem[];
}
/**

View file

@ -0,0 +1,38 @@
import 'package:flutter/material.dart';
class EntePopupMenuItem<T> extends PopupMenuItem<T> {
final String label;
final IconData? icon;
final Widget? iconWidget;
EntePopupMenuItem(
this.label, {
required T value,
this.icon,
this.iconWidget,
Key? key,
}) : assert(
icon != null || iconWidget != null,
'Either icon or iconWidget must be provided.',
),
assert(
!(icon != null && iconWidget != null),
'Only one of icon or iconWidget can be provided.',
),
super(
value: value,
key: key,
child: Row(
children: [
if (iconWidget != null)
iconWidget
else if (icon != null)
Icon(icon),
const Padding(
padding: EdgeInsets.all(8),
),
Text(label),
],
), // Initially empty, will be populated in build
);
}

View file

@ -24,6 +24,7 @@ import 'package:photos/services/collections_service.dart';
import 'package:photos/services/sync_service.dart';
import 'package:photos/services/update_service.dart';
import 'package:photos/ui/actions/collection/collection_sharing_actions.dart';
import "package:photos/ui/common/popup_item.dart";
import 'package:photos/ui/components/action_sheet_widget.dart';
import 'package:photos/ui/components/buttons/button_widget.dart';
import 'package:photos/ui/components/models/button_type.dart';
@ -319,263 +320,117 @@ class _GalleryAppBarWidgetState extends State<GalleryAppBarWidget> {
),
);
}
final List<PopupMenuItem<AlbumPopupAction>> items = [];
if (galleryType.canRename()) {
items.add(
PopupMenuItem(
final List<EntePopupMenuItem<AlbumPopupAction>> items = [];
items.addAll([
if (galleryType.canRename())
EntePopupMenuItem(
isQuickLink
? S.of(context).convertToAlbum
: S.of(context).renameAlbum,
value: AlbumPopupAction.rename,
child: Row(
children: [
Icon(isQuickLink ? Icons.photo_album_outlined : Icons.edit),
const Padding(
padding: EdgeInsets.all(8),
),
Text(
isQuickLink
? S.of(context).convertToAlbum
: S.of(context).renameAlbum,
),
],
),
icon: isQuickLink ? Icons.photo_album_outlined : Icons.edit,
),
);
}
if (galleryType.canSetCover()) {
items.add(
PopupMenuItem(
if (galleryType.canSetCover())
EntePopupMenuItem(
S.of(context).setCover,
value: AlbumPopupAction.setCover,
child: Row(
children: [
const Icon(Icons.image_outlined),
const Padding(
padding: EdgeInsets.all(8),
),
Text(S.of(context).setCover),
],
),
icon: Icons.image_outlined,
),
);
}
if (galleryType.showMap()) {
items.add(
PopupMenuItem(
if (galleryType.showMap())
EntePopupMenuItem(
S.of(context).map,
value: AlbumPopupAction.map,
child: Row(
children: [
const Icon(Icons.map_outlined),
const Padding(
padding: EdgeInsets.all(8),
),
Text(S.of(context).map),
],
),
icon: Icons.map_outlined,
),
);
}
if (galleryType.canSort()) {
items.add(
PopupMenuItem(
if (galleryType.canSort())
EntePopupMenuItem(
S.of(context).sortAlbumsBy,
value: AlbumPopupAction.sort,
child: Row(
children: [
const Icon(Icons.sort_outlined),
const Padding(
padding: EdgeInsets.all(8),
),
Text(
S.of(context).sortAlbumsBy,
),
],
),
icon: Icons.sort_outlined,
),
);
}
if (galleryType == GalleryType.uncategorized) {
items.add(
PopupMenuItem(
if (galleryType == GalleryType.uncategorized)
EntePopupMenuItem(
S.of(context).cleanUncategorized,
value: AlbumPopupAction.cleanUncategorized,
child: Row(
children: [
const Icon(Icons.crop_original_outlined),
const Padding(
padding: EdgeInsets.all(8),
),
Text(S.of(context).cleanUncategorized),
],
),
icon: Icons.crop_original_outlined,
),
);
}
if (galleryType.canPin()) {
items.add(
PopupMenuItem(
if (galleryType.canPin())
EntePopupMenuItem(
widget.collection!.isPinned
? S.of(context).unpinAlbum
: S.of(context).pinAlbum,
value: AlbumPopupAction.pinAlbum,
child: Row(
children: [
widget.collection!.isPinned
? const Icon(CupertinoIcons.pin_slash)
: Transform.rotate(
angle: 45 * math.pi / 180, // rotate by 45 degrees
child: const Icon(CupertinoIcons.pin),
),
const Padding(
padding: EdgeInsets.all(8),
),
Text(
widget.collection!.isPinned
? S.of(context).unpinAlbum
: S.of(context).pinAlbum,
),
],
),
iconWidget: widget.collection!.isPinned
? const Icon(CupertinoIcons.pin_slash)
: Transform.rotate(
angle: 45 * math.pi / 180, // rotate by 45 degrees
child: const Icon(CupertinoIcons.pin),
),
),
);
}
]);
final bool isArchived = widget.collection?.isArchived() ?? false;
final bool isHidden = widget.collection?.isHidden() ?? false;
// Do not show archive option for favorite collection. If collection is
// already archived, allow user to unarchive that collection.
if (isArchived || (galleryType.canArchive() && !isHidden)) {
items.add(
PopupMenuItem(
value: AlbumPopupAction.ownedArchive,
child: Row(
children: [
Icon(isArchived ? Icons.unarchive : Icons.archive_outlined),
const Padding(
padding: EdgeInsets.all(8),
),
Text(
isArchived
? S.of(context).unarchiveAlbum
: S.of(context).archiveAlbum,
),
],
),
),
);
}
if (!isArchived && galleryType.canHide()) {
items.add(
PopupMenuItem(
value: AlbumPopupAction.ownedHide,
child: Row(
children: [
Icon(
isHidden
? Icons.visibility_outlined
: Icons.visibility_off_outlined,
),
const Padding(
padding: EdgeInsets.all(8),
),
Text(
isHidden ? S.of(context).unhide : S.of(context).hide,
),
],
),
),
);
}
if (widget.collection != null && isInternalUser) {
items.add(
PopupMenuItem(
value: AlbumPopupAction.playOnTv,
child: Row(
children: [
const Icon(Icons.tv_outlined),
const Padding(
padding: EdgeInsets.all(8),
),
Text(context.l10n.playOnTv),
],
),
),
);
}
if (galleryType.canDelete()) {
items.add(
PopupMenuItem(
value: isQuickLink
? AlbumPopupAction.removeLink
: AlbumPopupAction.delete,
child: Row(
children: [
Icon(
isQuickLink
? Icons.remove_circle_outline
: Icons.delete_outline,
),
const Padding(
padding: EdgeInsets.all(8),
),
Text(
isQuickLink
? S.of(context).removeLink
: S.of(context).deleteAlbum,
),
],
items.addAll(
[
// Do not show archive option for favorite collection. If collection is
// already archived, allow user to unarchive that collection.
if (isArchived || (galleryType.canArchive() && !isHidden))
EntePopupMenuItem(
value: AlbumPopupAction.ownedArchive,
isArchived
? S.of(context).unarchiveAlbum
: S.of(context).archiveAlbum,
icon: isArchived ? Icons.unarchive : Icons.archive_outlined,
),
),
);
}
if (galleryType == GalleryType.sharedCollection) {
final bool hasShareeArchived = widget.collection!.hasShareeArchived();
items.add(
PopupMenuItem(
value: AlbumPopupAction.sharedArchive,
child: Row(
children: [
Icon(
hasShareeArchived ? Icons.unarchive : Icons.archive_outlined,
),
const Padding(
padding: EdgeInsets.all(8),
),
Text(
hasShareeArchived
? S.of(context).unarchiveAlbum
: S.of(context).archiveAlbum,
),
],
if (!isArchived && galleryType.canHide())
EntePopupMenuItem(
value: AlbumPopupAction.ownedHide,
isHidden ? S.of(context).unhide : S.of(context).hide,
icon: isHidden
? Icons.visibility_outlined
: Icons.visibility_off_outlined,
),
),
);
items.add(
PopupMenuItem(
value: AlbumPopupAction.leave,
child: Row(
children: [
const Icon(Icons.logout),
const Padding(
padding: EdgeInsets.all(8),
),
Text(S.of(context).leaveAlbum),
],
if (widget.collection != null && isInternalUser)
EntePopupMenuItem(
value: AlbumPopupAction.playOnTv,
context.l10n.playOnTv,
icon: Icons.tv_outlined,
),
),
);
}
if (galleryType == GalleryType.localFolder) {
items.add(
PopupMenuItem(
value: AlbumPopupAction.freeUpSpace,
child: Row(
children: [
const Icon(Icons.delete_sweep_outlined),
const Padding(
padding: EdgeInsets.all(8),
),
Text(S.of(context).freeUpDeviceSpace),
],
if (galleryType.canDelete())
EntePopupMenuItem(
isQuickLink ? S.of(context).removeLink : S.of(context).deleteAlbum,
value: isQuickLink
? AlbumPopupAction.removeLink
: AlbumPopupAction.delete,
icon: isQuickLink
? Icons.remove_circle_outline
: Icons.delete_outline,
),
),
);
}
if (galleryType == GalleryType.sharedCollection)
EntePopupMenuItem(
widget.collection!.hasShareeArchived()
? S.of(context).unarchiveAlbum
: S.of(context).archiveAlbum,
value: AlbumPopupAction.sharedArchive,
icon: widget.collection!.hasShareeArchived()
? Icons.unarchive
: Icons.archive_outlined,
),
if (galleryType == GalleryType.sharedCollection)
EntePopupMenuItem(
S.of(context).leaveAlbum,
value: AlbumPopupAction.leave,
icon: Icons.logout,
),
if (galleryType == GalleryType.localFolder)
EntePopupMenuItem(
S.of(context).freeUpDeviceSpace,
value: AlbumPopupAction.freeUpSpace,
icon: Icons.delete_sweep_outlined,
),
],
);
if (items.isNotEmpty) {
actions.add(
PopupMenuButton(

View file

@ -140,7 +140,7 @@ export default function App({ Component, pageProps }: AppProps) {
<CssBaseline enableColorScheme />
{showNavbar && <AppNavbar isMobile={isMobile} />}
<MessageContainer>
{offline && t("OFFLINE_MSG")}
{isI18nReady && offline && t("OFFLINE_MSG")}
</MessageContainer>
<LoadingBar color="#51cd7c" ref={loadingBar} />

View file

@ -308,11 +308,7 @@ const PhotoFrame = ({
item: EnteFile,
) => {
log.info(
`[${
item.id
}] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${
item.isSourceLoaded
} fetching:${fetching[item.id]}`,
`[${item.id}] getSlideData called for thumbnail: ${!!item.msrc} sourceLoaded: ${item.isSourceLoaded} fetching:${fetching[item.id]}`,
);
if (!item.msrc) {
@ -327,9 +323,7 @@ const PhotoFrame = ({
try {
updateURL(index)(item.id, url);
log.info(
`[${
item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
`[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`,
);
instance.invalidateCurrItems();
if ((instance as any).isOpen()) {
@ -381,7 +375,7 @@ const PhotoFrame = ({
try {
await updateSrcURL(index, item.id, dummyImgSrcUrl);
log.info(
`[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`,
`[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded: ${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
if ((instance as any).isOpen()) {
@ -415,7 +409,7 @@ const PhotoFrame = ({
true,
);
log.info(
`[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`,
`[${item.id}] calling invalidateCurrItems for live photo complete, source loaded: ${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
if ((instance as any).isOpen()) {
@ -433,7 +427,7 @@ const PhotoFrame = ({
try {
await updateSrcURL(index, item.id, srcURLs);
log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
`[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
if ((instance as any).isOpen()) {
@ -476,9 +470,7 @@ const PhotoFrame = ({
try {
updateURL(index)(item.id, item.msrc, true);
log.info(
`[${
item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
`[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`,
);
instance.invalidateCurrItems();
if ((instance as any).isOpen()) {
@ -495,7 +487,7 @@ const PhotoFrame = ({
}
try {
log.info(
`[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`,
`[${item.id}] new file getConvertedVideo request ${item.metadata.title}}`,
);
fetching[item.id] = true;
@ -504,7 +496,7 @@ const PhotoFrame = ({
try {
await updateSrcURL(index, item.id, srcURL, true);
log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
`[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
if ((instance as any).isOpen()) {

View file

@ -1,4 +1,3 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { FlexWrapper } from "@ente/shared/components/Container";
import { Box, styled } from "@mui/material";
import {
@ -20,6 +19,7 @@ import {
} from "react-window";
import { Duplicate } from "services/deduplicationService";
import { EnteFile } from "types/file";
import { convertBytesToHumanReadable } from "utils/file";
export enum ITEM_TYPE {
TIME = "TIME",

View file

@ -1,4 +1,3 @@
import { convertBytesToHumanReadable } from "@/next/file";
import { FlexWrapper } from "@ente/shared/components/Container";
import { formatDate, getDate, isSameDay } from "@ente/shared/time/format";
import { Box, Checkbox, Link, Typography, styled } from "@mui/material";
@ -23,6 +22,7 @@ import {
areEqual,
} from "react-window";
import { EnteFile } from "types/file";
import { convertBytesToHumanReadable } from "utils/file";
import { handleSelectCreator } from "utils/photoFrame";
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";

View file

@ -507,14 +507,14 @@ const ImageEditorOverlay = (props: IProps) => {
const editedFile = await getEditedFile();
const file = {
fileOrPath: editedFile,
uploadItem: editedFile,
localID: 1,
collectionID: props.file.collectionID,
};
uploadManager.prepareForNewUpload();
uploadManager.showUploadProgressDialog();
uploadManager.uploadFiles([file], [collection]);
uploadManager.uploadItems([file], [collection]);
setFileURL(null);
props.onClose();
props.closePhotoViewer();

File diff suppressed because it is too large Load diff

View file

@ -2,12 +2,16 @@ export default function UploadSelectorInputs({
getDragAndDropInputProps,
getFileSelectorInputProps,
getFolderSelectorInputProps,
getZipFileSelectorInputProps,
}) {
return (
<>
<input {...getDragAndDropInputProps()} />
<input {...getFileSelectorInputProps()} />
<input {...getFolderSelectorInputProps()} />
{getZipFileSelectorInputProps && (
<input {...getZipFileSelectorInputProps()} />
)}
</>
);
}

View file

@ -80,8 +80,6 @@ const redirectMap = new Map([
type AppContextType = {
showNavBar: (show: boolean) => void;
sharedFiles: File[];
resetSharedFiles: () => void;
mlSearchEnabled: boolean;
mapEnabled: boolean;
updateMlSearchEnabled: (enabled: boolean) => Promise<void>;
@ -114,7 +112,6 @@ export default function App({ Component, pageProps }: AppProps) {
typeof window !== "undefined" && !window.navigator.onLine,
);
const [showNavbar, setShowNavBar] = useState(false);
const [sharedFiles, setSharedFiles] = useState<File[]>(null);
const [redirectName, setRedirectName] = useState<string>(null);
const [mlSearchEnabled, setMlSearchEnabled] = useState(false);
const [mapEnabled, setMapEnabled] = useState(false);
@ -227,7 +224,6 @@ export default function App({ Component, pageProps }: AppProps) {
const setUserOnline = () => setOffline(false);
const setUserOffline = () => setOffline(true);
const resetSharedFiles = () => setSharedFiles(null);
useEffect(() => {
const redirectTo = async (redirect) => {
@ -352,22 +348,8 @@ export default function App({ Component, pageProps }: AppProps) {
<CssBaseline enableColorScheme />
{showNavbar && <AppNavbar isMobile={isMobile} />}
<MessageContainer>
{offline && t("OFFLINE_MSG")}
{isI18nReady && offline && t("OFFLINE_MSG")}
</MessageContainer>
{sharedFiles &&
(router.pathname === "/gallery" ? (
<MessageContainer>
{t("files_to_be_uploaded", {
count: sharedFiles.length,
})}
</MessageContainer>
) : (
<MessageContainer>
{t("login_to_upload_files", {
count: sharedFiles.length,
})}
</MessageContainer>
))}
<LoadingBar color="#51cd7c" ref={loadingBar} />
<DialogBox
@ -394,8 +376,6 @@ export default function App({ Component, pageProps }: AppProps) {
showNavBar,
mlSearchEnabled,
updateMlSearchEnabled,
sharedFiles,
resetSharedFiles,
startLoading,
finishLoading,
closeMessageDialog,

View file

@ -211,19 +211,27 @@ export default function Gallery() {
disabled: shouldDisableDropzone,
});
const {
selectedFiles: webFileSelectorFiles,
selectedFiles: fileSelectorFiles,
open: openFileSelector,
getInputProps: getFileSelectorInputProps,
} = useFileInput({
directory: false,
});
const {
selectedFiles: webFolderSelectorFiles,
selectedFiles: folderSelectorFiles,
open: openFolderSelector,
getInputProps: getFolderSelectorInputProps,
} = useFileInput({
directory: true,
});
const {
selectedFiles: fileSelectorZipFiles,
open: openZipFileSelector,
getInputProps: getZipFileSelectorInputProps,
} = useFileInput({
directory: false,
accept: ".zip",
});
const [isInSearchMode, setIsInSearchMode] = useState(false);
const [searchResultSummary, setSetSearchResultSummary] =
@ -1023,6 +1031,7 @@ export default function Gallery() {
getDragAndDropInputProps={getDragAndDropInputProps}
getFileSelectorInputProps={getFileSelectorInputProps}
getFolderSelectorInputProps={getFolderSelectorInputProps}
getZipFileSelectorInputProps={getZipFileSelectorInputProps}
/>
{blockingLoad && (
<LoadingOverlay>
@ -1112,7 +1121,6 @@ export default function Gallery() {
null,
false,
)}
uploadTypeSelectorIntent={uploadTypeSelectorIntent}
setLoading={setBlockingLoad}
setCollectionNamerAttributes={setCollectionNamerAttributes}
setShouldDisableDropzone={setShouldDisableDropzone}
@ -1121,13 +1129,18 @@ export default function Gallery() {
isFirstUpload={
!hasNonSystemCollections(collectionSummaries)
}
webFileSelectorFiles={webFileSelectorFiles}
webFolderSelectorFiles={webFolderSelectorFiles}
dragAndDropFiles={dragAndDropFiles}
uploadTypeSelectorView={uploadTypeSelectorView}
showUploadFilesDialog={openFileSelector}
showUploadDirsDialog={openFolderSelector}
showSessionExpiredMessage={showSessionExpiredMessage}
{...{
dragAndDropFiles,
openFileSelector,
fileSelectorFiles,
openFolderSelector,
folderSelectorFiles,
openZipFileSelector,
fileSelectorZipFiles,
uploadTypeSelectorIntent,
uploadTypeSelectorView,
showSessionExpiredMessage,
}}
/>
<Sidebar
collectionSummaries={collectionSummaries}

View file

@ -118,14 +118,14 @@ export default function PublicCollectionGallery() {
disabled: shouldDisableDropzone,
});
const {
selectedFiles: webFileSelectorFiles,
selectedFiles: fileSelectorFiles,
open: openFileSelector,
getInputProps: getFileSelectorInputProps,
} = useFileInput({
directory: false,
});
const {
selectedFiles: webFolderSelectorFiles,
selectedFiles: folderSelectorFiles,
open: openFolderSelector,
getInputProps: getFolderSelectorInputProps,
} = useFileInput({
@ -550,6 +550,7 @@ export default function PublicCollectionGallery() {
getDragAndDropInputProps={getDragAndDropInputProps}
getFileSelectorInputProps={getFileSelectorInputProps}
getFolderSelectorInputProps={getFolderSelectorInputProps}
getZipFileSelectorInputProps={undefined}
/>
<SharedAlbumNavbar
showUploadButton={
@ -582,17 +583,19 @@ export default function PublicCollectionGallery() {
setLoading={setBlockingLoad}
setShouldDisableDropzone={setShouldDisableDropzone}
setFiles={setPublicFiles}
webFileSelectorFiles={webFileSelectorFiles}
webFolderSelectorFiles={webFolderSelectorFiles}
dragAndDropFiles={dragAndDropFiles}
uploadTypeSelectorView={uploadTypeSelectorView}
closeUploadTypeSelector={closeUploadTypeSelectorView}
showUploadFilesDialog={openFileSelector}
showUploadDirsDialog={openFolderSelector}
showSessionExpiredMessage={showPublicLinkExpiredMessage}
uploadTypeSelectorIntent={
UploadTypeSelectorIntent.collectPhotos
}
{...{
dragAndDropFiles,
openFileSelector,
fileSelectorFiles,
openFolderSelector,
folderSelectorFiles,
}}
/>
<FilesDownloadProgress
attributesList={filesDownloadProgressAttributesList}

View file

@ -10,7 +10,7 @@ import { Events, eventBus } from "@ente/shared/events";
import { isPlaybackPossible } from "@ente/shared/media/video-playback";
import { Remote } from "comlink";
import isElectron from "is-electron";
import * as ffmpegService from "services/ffmpeg";
import * as ffmpeg from "services/ffmpeg";
import { EnteFile } from "types/file";
import { generateStreamFromArrayBuffer, getRenderableImage } from "utils/file";
import { PhotosDownloadClient } from "./clients/photos";
@ -610,17 +610,13 @@ async function getPlayableVideo(
if (!forceConvert && !runOnWeb && !isElectron()) {
return null;
}
log.info(
`video format not supported, converting it name: ${videoNameTitle}`,
);
const mp4ConvertedVideo = await ffmpegService.convertToMP4(
new File([videoBlob], videoNameTitle),
);
log.info(`video successfully converted ${videoNameTitle}`);
return new Blob([mp4ConvertedVideo]);
// TODO(MR): This might not work for very large (~ GB) videos. Test.
log.info(`Converting video ${videoNameTitle} to mp4`);
const convertedVideoData = await ffmpeg.convertToMP4(videoBlob);
return new Blob([convertedVideoData]);
}
} catch (e) {
log.error("video conversion failed", e);
log.error("Video conversion failed", e);
return null;
}
}

View file

@ -46,13 +46,13 @@ const exportRecordFileName = "export_status.json";
/**
* Name of the top level directory which we create underneath the selected
* directory when the user starts an export to the filesystem.
* directory when the user starts an export to the file system.
*/
const exportDirectoryName = "Ente Photos";
/**
* Name of the directory in which we put our metadata when exporting to the
* filesystem.
* Name of the directory in which we put our metadata when exporting to the file
* system.
*/
export const exportMetadataDirectoryName = "metadata";
@ -1378,7 +1378,7 @@ const isExportInProgress = (exportStage: ExportStage) =>
*
* Also move its associated metadata JSON to Trash.
*
* @param exportDir The root directory on the user's filesystem where we are
* @param exportDir The root directory on the user's file system where we are
* exporting to.
* */
const moveToTrash = async (

View file

@ -1,4 +1,3 @@
import { ElectronFile } from "@/next/types/file";
import type { Electron } from "@/next/types/ipc";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time";
@ -11,6 +10,11 @@ import {
import { NULL_LOCATION } from "constants/upload";
import type { ParsedExtractedMetadata } from "types/metadata";
import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker";
import {
toDataOrPathOrZipEntry,
type DesktopUploadItem,
type UploadItem,
} from "./upload/types";
/**
* Generate a thumbnail for the given video using a wasm FFmpeg running in a web
@ -51,7 +55,7 @@ const _generateVideoThumbnail = async (
* for the new files that the user is adding.
*
* @param dataOrPath The input video's data or the path to the video on the
* user's local filesystem. See: [Note: Reading a fileOrPath].
* user's local file system. See: [Note: Reading a UploadItem].
*
* @returns JPEG data of the generated thumbnail.
*
@ -59,12 +63,12 @@ const _generateVideoThumbnail = async (
*/
export const generateVideoThumbnailNative = async (
electron: Electron,
dataOrPath: Uint8Array | string,
desktopUploadItem: DesktopUploadItem,
) =>
_generateVideoThumbnail((seekTime: number) =>
electron.ffmpegExec(
makeGenThumbnailCommand(seekTime),
dataOrPath,
toDataOrPathOrZipEntry(desktopUploadItem),
"jpeg",
0,
),
@ -93,18 +97,23 @@ const makeGenThumbnailCommand = (seekTime: number) => [
* This function is called during upload, when we need to extract the metadata
* of videos that the user is uploading.
*
* @param fileOrPath A {@link File}, or the absolute path to a file on the
* @param uploadItem A {@link File}, or the absolute path to a file on the
* user's local filesytem. A path can only be provided when we're running in the
* context of our desktop app.
*/
export const extractVideoMetadata = async (
fileOrPath: File | string,
uploadItem: UploadItem,
): Promise<ParsedExtractedMetadata> => {
const command = extractVideoMetadataCommand;
const outputData =
fileOrPath instanceof File
? await ffmpegExecWeb(command, fileOrPath, "txt", 0)
: await electron.ffmpegExec(command, fileOrPath, "txt", 0);
uploadItem instanceof File
? await ffmpegExecWeb(command, uploadItem, "txt", 0)
: await electron.ffmpegExec(
command,
toDataOrPathOrZipEntry(uploadItem),
"txt",
0,
);
return parseFFmpegExtractedMetadata(outputData);
};
@ -200,23 +209,6 @@ function parseCreationTime(creationTime: string) {
return dateTime;
}
/** Called when viewing a file */
export async function convertToMP4(file: File) {
return await ffmpegExec2(
[
ffmpegPathPlaceholder,
"-i",
inputPathPlaceholder,
"-preset",
"ultrafast",
outputPathPlaceholder,
],
file,
"mp4",
30 * 1000,
);
}
/**
* Run the given FFmpeg command using a wasm FFmpeg running in a web worker.
*
@ -234,55 +226,53 @@ const ffmpegExecWeb = async (
};
/**
* Run the given FFmpeg command using a native FFmpeg binary bundled with our
* desktop app.
* Convert a video from a format that is not supported in the browser to MP4.
*
* This function is called when the user views a video or a live photo, and we
* want to play it back. The idea is to convert it to MP4 which has much more
* universal support in browsers.
*
* @param blob The video blob.
*
* @returns The mp4 video data.
*/
export const convertToMP4 = async (blob: Blob) =>
ffmpegExecNativeOrWeb(
[
ffmpegPathPlaceholder,
"-i",
inputPathPlaceholder,
"-preset",
"ultrafast",
outputPathPlaceholder,
],
blob,
"mp4",
30 * 1000,
);
/**
* Run the given FFmpeg command using a native FFmpeg binary when we're running
* in the context of our desktop app, otherwise using the browser based wasm
* FFmpeg implemenation.
*
* See also: {@link ffmpegExecWeb}.
*/
/*
TODO(MR): Remove me
const ffmpegExecNative = async (
electron: Electron,
const ffmpegExecNativeOrWeb = async (
command: string[],
blob: Blob,
timeoutMs: number = 0,
) => {
const electron = globalThis.electron;
if (electron) {
const data = new Uint8Array(await blob.arrayBuffer());
return await electron.ffmpegExec(command, data, timeoutMs);
} else {
const worker = await workerFactory.lazy();
return await worker.exec(command, blob, timeoutMs);
}
};
*/
const ffmpegExec2 = async (
command: string[],
inputFile: File | ElectronFile,
outputFileExtension: string,
timeoutMS: number = 0,
timeoutMs: number,
) => {
const electron = globalThis.electron;
if (electron || false) {
throw new Error("WIP");
// return electron.ffmpegExec(
// command,
// /* TODO(MR): ElectronFile changes */
// inputFile as unknown as string,
// outputFileName,
// timeoutMS,
// );
} else {
/* TODO(MR): ElectronFile changes */
return ffmpegExecWeb(
if (electron)
return electron.ffmpegExec(
command,
inputFile as File,
new Uint8Array(await blob.arrayBuffer()),
outputFileExtension,
timeoutMS,
timeoutMs,
);
}
else return ffmpegExecWeb(command, blob, outputFileExtension, timeoutMs);
};
/** Lazily create a singleton instance of our worker */

View file

@ -1,4 +1,3 @@
import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error";
@ -51,15 +50,10 @@ class HEICConverter {
const startTime = Date.now();
const convertedHEIC =
await worker.heicToJPEG(fileBlob);
log.info(
`originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size,
)},convertedFileSize:${convertBytesToHumanReadable(
convertedHEIC?.size,
)}, heic conversion time: ${
Date.now() - startTime
}ms `,
const ms = Math.round(
Date.now() - startTime,
);
log.debug(() => `heic => jpeg (${ms} ms)`);
clearTimeout(timeout);
resolve(convertedHEIC);
} catch (e) {
@ -71,18 +65,7 @@ class HEICConverter {
);
if (!convertedHEIC || convertedHEIC?.size === 0) {
log.error(
`converted heic fileSize is Zero - ${JSON.stringify(
{
originalFileSize:
convertBytesToHumanReadable(
fileBlob?.size ?? 0,
),
convertedFileSize:
convertBytesToHumanReadable(
convertedHEIC?.size ?? 0,
),
},
)}`,
`Converted HEIC file is empty (original was ${fileBlob?.size} bytes)`,
);
}
await new Promise((resolve) => {
@ -94,7 +77,7 @@ class HEICConverter {
this.workerPool.push(convertWorker);
return convertedHEIC;
} catch (e) {
log.error("heic conversion failed", e);
log.error("HEIC conversion failed", e);
convertWorker.terminate();
this.workerPool.push(createComlinkWorker());
throw e;

View file

@ -5,6 +5,8 @@ import { nameAndExtension } from "@/next/file";
import log from "@/next/log";
import { NULL_LOCATION } from "constants/upload";
import type { Location } from "types/metadata";
import { readStream } from "utils/native-stream";
import type { UploadItem } from "./types";
export interface ParsedMetadataJSON {
creationTime: number;
@ -75,21 +77,29 @@ function getFileOriginalName(fileName: string) {
/** Try to parse the contents of a metadata JSON file from a Google Takeout. */
export const tryParseTakeoutMetadataJSON = async (
fileOrPath: File | string,
uploadItem: UploadItem,
): Promise<ParsedMetadataJSON | undefined> => {
try {
const text =
fileOrPath instanceof File
? await fileOrPath.text()
: await ensureElectron().fs.readTextFile(fileOrPath);
return parseMetadataJSONText(text);
return parseMetadataJSONText(await uploadItemText(uploadItem));
} catch (e) {
log.error("Failed to parse takeout metadata JSON", e);
return undefined;
}
};
const uploadItemText = async (uploadItem: UploadItem) => {
if (uploadItem instanceof File) {
return await uploadItem.text();
} else if (typeof uploadItem == "string") {
return await ensureElectron().fs.readTextFile(uploadItem);
} else if (Array.isArray(uploadItem)) {
const { response } = await readStream(ensureElectron(), uploadItem);
return await response.text();
} else {
return await uploadItem.file.text();
}
};
const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = {
creationTime: null,
modificationTime: null,

View file

@ -4,6 +4,7 @@ import { type Electron } from "@/next/types/ipc";
import { withTimeout } from "@ente/shared/utils";
import * as ffmpeg from "services/ffmpeg";
import { heicToJPEG } from "services/heic-convert";
import { toDataOrPathOrZipEntry, type DesktopUploadItem } from "./types";
/** Maximum width or height of the generated thumbnail */
const maxThumbnailDimension = 720;
@ -178,7 +179,7 @@ const percentageSizeDiff = (
* object which we use to perform IPC with the Node.js side of our desktop app.
*
* @param dataOrPath Contents of an image or video file, or the path to the
* image or video file on the user's local filesystem, whose thumbnail we want
* image or video file on the user's local file system, whose thumbnail we want
* to generate.
*
* @param fileTypeInfo The type information for {@link dataOrPath}.
@ -189,16 +190,16 @@ const percentageSizeDiff = (
*/
export const generateThumbnailNative = async (
electron: Electron,
dataOrPath: Uint8Array | string,
desktopUploadItem: DesktopUploadItem,
fileTypeInfo: FileTypeInfo,
): Promise<Uint8Array> =>
fileTypeInfo.fileType === FILE_TYPE.IMAGE
? await electron.generateImageThumbnail(
dataOrPath,
toDataOrPathOrZipEntry(desktopUploadItem),
maxThumbnailDimension,
maxThumbnailSize,
)
: ffmpeg.generateVideoThumbnailNative(electron, dataOrPath);
: ffmpeg.generateVideoThumbnailNative(electron, desktopUploadItem);
/**
* A fallback, black, thumbnail for use in cases where thumbnail generation

View file

@ -0,0 +1,47 @@
import type { FileAndPath } from "@/next/types/file";
import type { ZipItem } from "@/next/types/ipc";
/**
* An item to upload is one of the following:
*
* 1. A file drag-and-dropped or selected by the user when we are running in the
* web browser. These is the {@link File} case.
*
* 2. A file drag-and-dropped or selected by the user when we are running in the
* context of our desktop app. In such cases, we also have the absolute path
* of the file in the user's local file system. This is the
* {@link FileAndPath} case.
*
* 3. A file path programmatically requested by the desktop app. For example, we
* might be resuming a previously interrupted upload after an app restart
* (thus we no longer have access to the {@link File} from case 2). Or we
* could be uploading a file this is in one of the folders the user has asked
* us to watch for changes. This is the `string` case.
*
* 4. A file within a zip file on the user's local file system. This too is only
* possible when we are running in the context of our desktop app. The user
* might have drag-and-dropped or selected a zip file, or it might be a zip
* file that they'd previously selected but we now are resuming an
* interrupted upload of. Either ways, what we have is a tuple containing the
* (path to zip file, and the name of an entry within that zip file). This is
* the {@link ZipItem} case.
*
* Also see: [Note: Reading a UploadItem].
*/
export type UploadItem = File | FileAndPath | string | ZipItem;
/**
* The of cases of {@link UploadItem} that apply when we're running in the
* context of our desktop app.
*/
export type DesktopUploadItem = Exclude<UploadItem, File>;
/**
* For each of cases of {@link UploadItem} that apply when we're running in the
* context of our desktop app, return a value that can be passed to
* {@link Electron} functions over IPC.
*/
export const toDataOrPathOrZipEntry = (desktopUploadItem: DesktopUploadItem) =>
typeof desktopUploadItem == "string" || Array.isArray(desktopUploadItem)
? desktopUploadItem
: desktopUploadItem.path;

View file

@ -3,7 +3,6 @@ import { potentialFileTypeFromExtension } from "@/media/live-photo";
import { ensureElectron } from "@/next/electron";
import { lowercaseExtension, nameAndExtension } from "@/next/file";
import log from "@/next/log";
import { ElectronFile } from "@/next/types/file";
import type { Electron } from "@/next/types/ipc";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { ensure } from "@/utils/ensure";
@ -36,7 +35,8 @@ import {
tryParseTakeoutMetadataJSON,
type ParsedMetadataJSON,
} from "./takeout";
import UploadService, { fopFileName, fopSize, uploader } from "./uploadService";
import type { UploadItem } from "./types";
import UploadService, { uploadItemFileName, uploader } from "./uploadService";
export type FileID = number;
@ -83,17 +83,17 @@ export interface ProgressUpdater {
/** The number of uploads to process in parallel. */
const maxConcurrentUploads = 4;
export interface FileWithCollection {
export interface UploadItemWithCollection {
localID: number;
collectionID: number;
isLivePhoto?: boolean;
fileOrPath?: File | string;
uploadItem?: UploadItem;
livePhotoAssets?: LivePhotoAssets;
}
export interface LivePhotoAssets {
image: File | string;
video: File | string;
image: UploadItem;
video: UploadItem;
}
export interface PublicUploadProps {
@ -320,9 +320,9 @@ class UploadManager {
ComlinkWorker<typeof DedicatedCryptoWorker>
>(maxConcurrentUploads);
private parsedMetadataJSONMap: Map<string, ParsedMetadataJSON>;
private filesToBeUploaded: ClusteredFile[];
private remainingFiles: ClusteredFile[] = [];
private failedFiles: ClusteredFile[];
private itemsToBeUploaded: ClusteredUploadItem[];
private remainingItems: ClusteredUploadItem[] = [];
private failedItems: ClusteredUploadItem[];
private existingFiles: EnteFile[];
private setFiles: SetFiles;
private collections: Map<number, Collection>;
@ -359,9 +359,9 @@ class UploadManager {
}
private resetState() {
this.filesToBeUploaded = [];
this.remainingFiles = [];
this.failedFiles = [];
this.itemsToBeUploaded = [];
this.remainingItems = [];
this.failedItems = [];
this.parsedMetadataJSONMap = new Map<string, ParsedMetadataJSON>();
this.uploaderName = null;
@ -387,62 +387,62 @@ class UploadManager {
* It is an error to call this method when there is already an in-progress
* upload.
*
* @param filesWithCollectionToUploadIn The files to upload, each paired
* with the id of the collection that they should be uploaded into.
* @param itemsWithCollection The items to upload, each paired with the id
* of the collection that they should be uploaded into.
*
* @returns `true` if at least one file was processed
*/
public async uploadFiles(
filesWithCollectionToUploadIn: FileWithCollection[],
public async uploadItems(
itemsWithCollection: UploadItemWithCollection[],
collections: Collection[],
uploaderName?: string,
) {
if (this.uploadInProgress)
throw new Error("Cannot run multiple uploads at once");
log.info(`Uploading ${filesWithCollectionToUploadIn.length} files`);
log.info(`Uploading ${itemsWithCollection.length} files`);
this.uploadInProgress = true;
this.uploaderName = uploaderName;
try {
await this.updateExistingFilesAndCollections(collections);
const namedFiles = filesWithCollectionToUploadIn.map(
makeFileWithCollectionIDAndName,
const namedItems = itemsWithCollection.map(
makeUploadItemWithCollectionIDAndName,
);
this.uiService.setFiles(namedFiles);
this.uiService.setFiles(namedItems);
const [metadataFiles, mediaFiles] =
splitMetadataAndMediaFiles(namedFiles);
const [metadataItems, mediaItems] =
splitMetadataAndMediaItems(namedItems);
if (metadataFiles.length) {
if (metadataItems.length) {
this.uiService.setUploadStage(
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
);
await this.parseMetadataJSONFiles(metadataFiles);
await this.parseMetadataJSONFiles(metadataItems);
}
if (mediaFiles.length) {
const clusteredMediaFiles = await clusterLivePhotos(mediaFiles);
if (mediaItems.length) {
const clusteredMediaItems = await clusterLivePhotos(mediaItems);
this.abortIfCancelled();
// Live photos might've been clustered together, reset the list
// of files to reflect that.
this.uiService.setFiles(clusteredMediaFiles);
this.uiService.setFiles(clusteredMediaItems);
this.uiService.setHasLivePhoto(
mediaFiles.length != clusteredMediaFiles.length,
mediaItems.length != clusteredMediaItems.length,
);
await this.uploadMediaFiles(clusteredMediaFiles);
await this.uploadMediaItems(clusteredMediaItems);
}
} catch (e) {
if (e.message === CustomError.UPLOAD_CANCELLED) {
if (isElectron()) {
this.remainingFiles = [];
this.remainingItems = [];
await cancelRemainingUploads();
}
} else {
@ -479,14 +479,18 @@ class UploadManager {
);
}
private async parseMetadataJSONFiles(files: FileWithCollectionIDAndName[]) {
this.uiService.reset(files.length);
private async parseMetadataJSONFiles(
items: UploadItemWithCollectionIDAndName[],
) {
this.uiService.reset(items.length);
for (const { fileOrPath, fileName, collectionID } of files) {
for (const { uploadItem, fileName, collectionID } of items) {
this.abortIfCancelled();
log.info(`Parsing metadata JSON ${fileName}`);
const metadataJSON = await tryParseTakeoutMetadataJSON(fileOrPath);
const metadataJSON = await tryParseTakeoutMetadataJSON(
ensure(uploadItem),
);
if (metadataJSON) {
this.parsedMetadataJSONMap.set(
getMetadataJSONMapKeyForJSON(collectionID, fileName),
@ -497,48 +501,48 @@ class UploadManager {
}
}
private async uploadMediaFiles(mediaFiles: ClusteredFile[]) {
this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles];
private async uploadMediaItems(mediaItems: ClusteredUploadItem[]) {
this.itemsToBeUploaded = [...this.itemsToBeUploaded, ...mediaItems];
if (isElectron()) {
this.remainingFiles = [...this.remainingFiles, ...mediaFiles];
this.remainingItems = [...this.remainingItems, ...mediaItems];
}
this.uiService.reset(mediaFiles.length);
this.uiService.reset(mediaItems.length);
await UploadService.setFileCount(mediaFiles.length);
await UploadService.setFileCount(mediaItems.length);
this.uiService.setUploadStage(UPLOAD_STAGES.UPLOADING);
const uploadProcesses = [];
for (
let i = 0;
i < maxConcurrentUploads && this.filesToBeUploaded.length > 0;
i < maxConcurrentUploads && this.itemsToBeUploaded.length > 0;
i++
) {
this.cryptoWorkers[i] = getDedicatedCryptoWorker();
const worker = await this.cryptoWorkers[i].remote;
uploadProcesses.push(this.uploadNextFileInQueue(worker));
uploadProcesses.push(this.uploadNextItemInQueue(worker));
}
await Promise.all(uploadProcesses);
}
private async uploadNextFileInQueue(worker: Remote<DedicatedCryptoWorker>) {
private async uploadNextItemInQueue(worker: Remote<DedicatedCryptoWorker>) {
const uiService = this.uiService;
while (this.filesToBeUploaded.length > 0) {
while (this.itemsToBeUploaded.length > 0) {
this.abortIfCancelled();
const clusteredFile = this.filesToBeUploaded.pop();
const { localID, collectionID } = clusteredFile;
const clusteredItem = this.itemsToBeUploaded.pop();
const { localID, collectionID } = clusteredItem;
const collection = this.collections.get(collectionID);
const uploadableFile = { ...clusteredFile, collection };
const uploadableItem = { ...clusteredItem, collection };
uiService.setFileProgress(localID, 0);
await wait(0);
const { uploadResult, uploadedFile } = await uploader(
uploadableFile,
uploadableItem,
this.uploaderName,
this.existingFiles,
this.parsedMetadataJSONMap,
@ -560,7 +564,7 @@ class UploadManager {
);
const finalUploadResult = await this.postUploadTask(
uploadableFile,
uploadableItem,
uploadResult,
uploadedFile,
);
@ -572,20 +576,20 @@ class UploadManager {
}
private async postUploadTask(
uploadableFile: UploadableFile,
uploadableItem: UploadableUploadItem,
uploadResult: UPLOAD_RESULT,
uploadedFile: EncryptedEnteFile | EnteFile | undefined,
) {
log.info(
`Uploaded ${uploadableFile.fileName} with result ${uploadResult}`,
`Uploaded ${uploadableItem.fileName} with result ${uploadResult}`,
);
try {
let decryptedFile: EnteFile;
await this.removeFromPendingUploads(uploadableFile);
await this.removeFromPendingUploads(uploadableItem);
switch (uploadResult) {
case UPLOAD_RESULT.FAILED:
case UPLOAD_RESULT.BLOCKED:
this.failedFiles.push(uploadableFile);
this.failedItems.push(uploadableItem);
break;
case UPLOAD_RESULT.ALREADY_UPLOADED:
decryptedFile = uploadedFile as EnteFile;
@ -598,7 +602,7 @@ class UploadManager {
case UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL:
decryptedFile = await decryptFile(
uploadedFile as EncryptedEnteFile,
uploadableFile.collection.key,
uploadableItem.collection.key,
);
break;
case UPLOAD_RESULT.UNSUPPORTED:
@ -619,8 +623,8 @@ class UploadManager {
eventBus.emit(Events.FILE_UPLOADED, {
enteFile: decryptedFile,
localFile:
uploadableFile.fileOrPath ??
uploadableFile.livePhotoAssets.image,
uploadableItem.uploadItem ??
uploadableItem.livePhotoAssets.image,
});
} catch (e) {
log.warn("Ignoring error in fileUploaded handlers", e);
@ -629,7 +633,7 @@ class UploadManager {
}
await this.watchFolderCallback(
uploadResult,
uploadableFile,
uploadableItem,
uploadedFile as EncryptedEnteFile,
);
return uploadResult;
@ -641,7 +645,7 @@ class UploadManager {
private async watchFolderCallback(
fileUploadResult: UPLOAD_RESULT,
fileWithCollection: ClusteredFile,
fileWithCollection: ClusteredUploadItem,
uploadedFile: EncryptedEnteFile,
) {
if (isElectron()) {
@ -661,9 +665,9 @@ class UploadManager {
uploadCancelService.requestUploadCancelation();
}
public getFailedFilesWithCollections() {
public getFailedItemsWithCollections() {
return {
files: this.failedFiles,
items: this.failedItems,
collections: [...this.collections.values()],
};
}
@ -684,13 +688,15 @@ class UploadManager {
this.setFiles((files) => sortFiles([...files, decryptedFile]));
}
private async removeFromPendingUploads({ localID }: ClusteredFile) {
private async removeFromPendingUploads(
clusteredUploadItem: ClusteredUploadItem,
) {
const electron = globalThis.electron;
if (electron) {
this.remainingFiles = this.remainingFiles.filter(
(f) => f.localID != localID,
this.remainingItems = this.remainingItems.filter(
(f) => f.localID != clusteredUploadItem.localID,
);
await updatePendingUploads(electron, this.remainingFiles);
await markUploaded(electron, clusteredUploadItem);
}
}
@ -709,24 +715,25 @@ export default new UploadManager();
* As files progress through stages, they get more and more bits tacked on to
* them. These types document the journey.
*
* - The input is {@link FileWithCollection}. This can either be a new
* {@link FileWithCollection}, in which case it'll only have a
* {@link localID}, {@link collectionID} and a {@link fileOrPath}. Or it could
* be a retry, in which case it'll not have a {@link fileOrPath} but instead
* - The input is {@link UploadItemWithCollection}. This can either be a new
* {@link UploadItemWithCollection}, in which case it'll only have a
* {@link localID}, {@link collectionID} and a {@link uploadItem}. Or it could
* be a retry, in which case it'll not have a {@link uploadItem} but instead
* will have data from a previous stage (concretely, it'll just be a
* relabelled {@link ClusteredFile}), like a snake eating its tail.
* relabelled {@link ClusteredUploadItem}), like a snake eating its tail.
*
* - Immediately we convert it to {@link FileWithCollectionIDAndName}. This is
* to mostly systematize what we have, and also attach a {@link fileName}.
* - Immediately we convert it to {@link UploadItemWithCollectionIDAndName}.
* This is to mostly systematize what we have, and also attach a
* {@link fileName}.
*
* - These then get converted to "assets", whereby both parts of a live photo
* are combined. This is a {@link ClusteredFile}.
* are combined. This is a {@link ClusteredUploadItem}.
*
* - On to the {@link ClusteredFile} we attach the corresponding
* {@link collection}, giving us {@link UploadableFile}. This is what gets
* queued and then passed to the {@link uploader}.
* - On to the {@link ClusteredUploadItem} we attach the corresponding
* {@link collection}, giving us {@link UploadableUploadItem}. This is what
* gets queued and then passed to the {@link uploader}.
*/
type FileWithCollectionIDAndName = {
type UploadItemWithCollectionIDAndName = {
/** A unique ID for the duration of the upload */
localID: number;
/** The ID of the collection to which this file should be uploaded. */
@ -740,64 +747,57 @@ type FileWithCollectionIDAndName = {
/** `true` if this is a live photo. */
isLivePhoto?: boolean;
/* Valid for non-live photos */
fileOrPath?: File | string;
uploadItem?: UploadItem;
/* Valid for live photos */
livePhotoAssets?: LivePhotoAssets;
};
const makeFileWithCollectionIDAndName = (
f: FileWithCollection,
): FileWithCollectionIDAndName => {
const fileOrPath = f.fileOrPath;
/* TODO(MR): ElectronFile */
if (!(fileOrPath instanceof File || typeof fileOrPath == "string"))
throw new Error(`Unexpected file ${f}`);
return {
localID: ensure(f.localID),
collectionID: ensure(f.collectionID),
fileName: ensure(
f.isLivePhoto
? fopFileName(f.livePhotoAssets.image)
: fopFileName(fileOrPath),
),
isLivePhoto: f.isLivePhoto,
fileOrPath: fileOrPath,
livePhotoAssets: f.livePhotoAssets,
};
};
const makeUploadItemWithCollectionIDAndName = (
f: UploadItemWithCollection,
): UploadItemWithCollectionIDAndName => ({
localID: ensure(f.localID),
collectionID: ensure(f.collectionID),
fileName: ensure(
f.isLivePhoto
? uploadItemFileName(f.livePhotoAssets.image)
: uploadItemFileName(f.uploadItem),
),
isLivePhoto: f.isLivePhoto,
uploadItem: f.uploadItem,
livePhotoAssets: f.livePhotoAssets,
});
/**
* A file with both parts of a live photo clubbed together.
* An upload item with both parts of a live photo clubbed together.
*
* See: [Note: Intermediate file types during upload].
*/
type ClusteredFile = {
type ClusteredUploadItem = {
localID: number;
collectionID: number;
fileName: string;
isLivePhoto: boolean;
fileOrPath?: File | string;
uploadItem?: UploadItem;
livePhotoAssets?: LivePhotoAssets;
};
/**
* The file that we hand off to the uploader. Essentially {@link ClusteredFile}
* with the {@link collection} attached to it.
* The file that we hand off to the uploader. Essentially
* {@link ClusteredUploadItem} with the {@link collection} attached to it.
*
* See: [Note: Intermediate file types during upload].
*/
export type UploadableFile = ClusteredFile & {
export type UploadableUploadItem = ClusteredUploadItem & {
collection: Collection;
};
const splitMetadataAndMediaFiles = (
files: FileWithCollectionIDAndName[],
const splitMetadataAndMediaItems = (
items: UploadItemWithCollectionIDAndName[],
): [
metadata: FileWithCollectionIDAndName[],
media: FileWithCollectionIDAndName[],
metadata: UploadItemWithCollectionIDAndName[],
media: UploadItemWithCollectionIDAndName[],
] =>
files.reduce(
items.reduce(
([metadata, media], f) => {
if (lowercaseExtension(f.fileName) == "json") metadata.push(f);
else media.push(f);
@ -806,59 +806,58 @@ const splitMetadataAndMediaFiles = (
[[], []],
);
export const setToUploadCollection = async (collections: Collection[]) => {
let collectionName: string = null;
/* collection being one suggest one of two things
1. Either the user has upload to a single existing collection
2. Created a new single collection to upload to
may have had multiple folder, but chose to upload
to one album
hence saving the collection name when upload collection count is 1
helps the info of user choosing this options
and on next upload we can directly start uploading to this collection
*/
if (collections.length === 1) {
collectionName = collections[0].name;
const markUploaded = async (electron: Electron, item: ClusteredUploadItem) => {
// TODO: This can be done better
if (item.isLivePhoto) {
const [p0, p1] = [
item.livePhotoAssets.image,
item.livePhotoAssets.video,
];
if (Array.isArray(p0) && Array.isArray(p1)) {
electron.markUploadedZipItems([p0, p1]);
} else if (typeof p0 == "string" && typeof p1 == "string") {
electron.markUploadedFiles([p0, p1]);
} else if (
p0 &&
typeof p0 == "object" &&
"path" in p0 &&
p1 &&
typeof p1 == "object" &&
"path" in p1
) {
electron.markUploadedFiles([p0.path, p1.path]);
} else {
throw new Error(
"Attempting to mark upload completion of unexpected desktop upload items",
);
}
} else {
const p = ensure(item.uploadItem);
if (Array.isArray(p)) {
electron.markUploadedZipItems([p]);
} else if (typeof p == "string") {
electron.markUploadedFiles([p]);
} else if (p && typeof p == "object" && "path" in p) {
electron.markUploadedFiles([p.path]);
} else {
throw new Error(
"Attempting to mark upload completion of unexpected desktop upload items",
);
}
}
await ensureElectron().setPendingUploadCollection(collectionName);
};
const updatePendingUploads = async (
electron: Electron,
files: ClusteredFile[],
) => {
const paths = files
.map((file) =>
file.isLivePhoto
? [file.livePhotoAssets.image, file.livePhotoAssets.video]
: [file.fileOrPath],
)
.flat()
.map((f) => getFilePathElectron(f));
await electron.setPendingUploadFiles("files", paths);
};
/**
* NOTE: a stop gap measure, only meant to be called by code that is running in
* the context of a desktop app initiated upload
*/
export const getFilePathElectron = (file: File | ElectronFile | string) =>
typeof file == "string" ? file : (file as ElectronFile).path;
const cancelRemainingUploads = async () => {
const electron = ensureElectron();
await electron.setPendingUploadCollection(undefined);
await electron.setPendingUploadFiles("zips", []);
await electron.setPendingUploadFiles("files", []);
};
const cancelRemainingUploads = () => ensureElectron().clearPendingUploads();
/**
* Go through the given files, combining any sibling image + video assets into a
* single live photo when appropriate.
*/
const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => {
const result: ClusteredFile[] = [];
files
const clusterLivePhotos = async (
items: UploadItemWithCollectionIDAndName[],
) => {
const result: ClusteredUploadItem[] = [];
items
.sort((f, g) =>
nameAndExtension(f.fileName)[0].localeCompare(
nameAndExtension(g.fileName)[0],
@ -866,22 +865,22 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => {
)
.sort((f, g) => f.collectionID - g.collectionID);
let index = 0;
while (index < files.length - 1) {
const f = files[index];
const g = files[index + 1];
while (index < items.length - 1) {
const f = items[index];
const g = items[index + 1];
const fFileType = potentialFileTypeFromExtension(f.fileName);
const gFileType = potentialFileTypeFromExtension(g.fileName);
const fa: PotentialLivePhotoAsset = {
fileName: f.fileName,
fileType: fFileType,
collectionID: f.collectionID,
fileOrPath: f.fileOrPath,
uploadItem: f.uploadItem,
};
const ga: PotentialLivePhotoAsset = {
fileName: g.fileName,
fileType: gFileType,
collectionID: g.collectionID,
fileOrPath: g.fileOrPath,
uploadItem: g.uploadItem,
};
if (await areLivePhotoAssets(fa, ga)) {
const [image, video] =
@ -892,8 +891,8 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => {
fileName: image.fileName,
isLivePhoto: true,
livePhotoAssets: {
image: image.fileOrPath,
video: video.fileOrPath,
image: image.uploadItem,
video: video.uploadItem,
},
});
index += 2;
@ -905,9 +904,9 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => {
index += 1;
}
}
if (index === files.length - 1) {
if (index === items.length - 1) {
result.push({
...files[index],
...items[index],
isLivePhoto: false,
});
}
@ -918,7 +917,7 @@ interface PotentialLivePhotoAsset {
fileName: string;
fileType: FILE_TYPE;
collectionID: number;
fileOrPath: File | string;
uploadItem: UploadItem;
}
const areLivePhotoAssets = async (
@ -961,11 +960,11 @@ const areLivePhotoAssets = async (
// we use doesn't support stream as a input.
const maxAssetSize = 20 * 1024 * 1024; /* 20MB */
const fSize = await fopSize(f.fileOrPath);
const gSize = await fopSize(g.fileOrPath);
const fSize = await uploadItemSize(f.uploadItem);
const gSize = await uploadItemSize(g.uploadItem);
if (fSize > maxAssetSize || gSize > maxAssetSize) {
log.info(
`Not classifying assets with too large sizes ${[fSize, gSize]} as a live photo`,
`Not classifying files with too large sizes (${fSize} and ${gSize} bytes) as a live photo`,
);
return false;
}
@ -998,3 +997,15 @@ const removePotentialLivePhotoSuffix = (name: string, suffix?: string) => {
return foundSuffix ? name.slice(0, foundSuffix.length * -1) : name;
};
/**
* Return the size of the given {@link uploadItem}.
*/
const uploadItemSize = async (uploadItem: UploadItem): Promise<number> => {
if (uploadItem instanceof File) return uploadItem.size;
if (typeof uploadItem == "string")
return ensureElectron().pathOrZipItemSize(uploadItem);
if (Array.isArray(uploadItem))
return ensureElectron().pathOrZipItemSize(uploadItem);
return uploadItem.file.size;
};

View file

@ -50,14 +50,15 @@ import {
generateThumbnailNative,
generateThumbnailWeb,
} from "./thumbnail";
import type { UploadItem } from "./types";
import UploadHttpClient from "./uploadHttpClient";
import type { UploadableFile } from "./uploadManager";
import type { UploadableUploadItem } from "./uploadManager";
/**
* A readable stream for a file, and its associated size and last modified time.
*
* This is the in-memory representation of the `fileOrPath` type that we usually
* pass around. See: [Note: Reading a fileOrPath]
* This is the in-memory representation of the {@link UploadItem} type that we
* usually pass around. See: [Note: Reading a UploadItem]
*/
interface FileStream {
/**
@ -181,30 +182,20 @@ const uploadService = new UploadService();
export default uploadService;
/**
* Return the file name for the given {@link fileOrPath}.
*
* @param fileOrPath The {@link File}, or the path to it. Note that it is only
* valid to specify a path if we are running in the context of our desktop app.
* Return the file name for the given {@link uploadItem}.
*/
export const fopFileName = (fileOrPath: File | string) =>
typeof fileOrPath == "string" ? basename(fileOrPath) : fileOrPath.name;
/**
* Return the size of the given {@link fileOrPath}.
*
* @param fileOrPath The {@link File}, or the path to it. Note that it is only
* valid to specify a path if we are running in the context of our desktop app.
*/
export const fopSize = async (fileOrPath: File | string): Promise<number> =>
fileOrPath instanceof File
? fileOrPath.size
: await ensureElectron().fs.size(fileOrPath);
export const uploadItemFileName = (uploadItem: UploadItem) => {
if (uploadItem instanceof File) return uploadItem.name;
if (typeof uploadItem == "string") return basename(uploadItem);
if (Array.isArray(uploadItem)) return basename(uploadItem[1]);
return uploadItem.file.name;
};
/* -- Various intermediate type used during upload -- */
interface UploadAsset {
isLivePhoto?: boolean;
fileOrPath?: File | string;
uploadItem?: UploadItem;
livePhotoAssets?: LivePhotoAssets;
}
@ -311,14 +302,14 @@ interface UploadResponse {
}
/**
* Upload the given {@link UploadableFile}
* Upload the given {@link UploadableUploadItem}
*
* This is lower layer implementation of the upload. It is invoked by
* {@link UploadManager} after it has assembled all the relevant bits we need to
* go forth and upload.
*/
export const uploader = async (
{ collection, localID, fileName, ...uploadAsset }: UploadableFile,
{ collection, localID, fileName, ...uploadAsset }: UploadableUploadItem,
uploaderName: string,
existingFiles: EnteFile[],
parsedMetadataJSONMap: Map<string, ParsedMetadataJSON>,
@ -466,19 +457,21 @@ export const uploader = async (
};
/**
* Read the given file or path into an in-memory representation.
* Read the given file or path or zip item into an in-memory representation.
*
* [Note: Reading a fileOrPath]
* [Note: Reading a UploadItem]
*
* The file can be either a web
* [File](https://developer.mozilla.org/en-US/docs/Web/API/File) or the absolute
* path to a file on desk.
* [File](https://developer.mozilla.org/en-US/docs/Web/API/File), the absolute
* path to a file on desk, a combination of these two, or a entry in a zip file
* on the user's local file system.
*
* tl;dr; There are three cases:
* tl;dr; There are four cases:
*
* 1. web / File
* 2. desktop / File
* 2. desktop / File (+ path)
* 3. desktop / path
* 4. desktop / ZipItem
*
* For the when and why, read on.
*
@ -490,66 +483,73 @@ export const uploader = async (
*
* In the web context, we'll always get a File, since within the browser we
* cannot programmatically construct paths to or arbitrarily access files on the
* user's filesystem. Note that even if we were to have an absolute path at
* hand, we cannot programmatically create such File objects to arbitrary
* absolute paths on user's local filesystem for security reasons.
* user's file system.
*
* > Note that even if we were to somehow have an absolute path at hand, we
* cannot programmatically create such File objects to arbitrary absolute
* paths on user's local file system for security reasons.
*
* So in the web context, this will always be a File we get as a result of an
* explicit user interaction (e.g. drag and drop).
* explicit user interaction (e.g. drag and drop or using a file selector).
*
* In the desktop context, this can be either a File or a path.
* In the desktop context, this can be either a File (+ path), or a path, or an
* entry within a zip file.
*
* 2. If the user provided us this file via some user interaction (say a drag
* and a drop), this'll still be a File.
* and a drop), this'll still be a File. But unlike in the web context, we
* also have access to the full path of this file.
*
* 3. However, when running in the desktop app we have the ability to access
* absolute paths on the user's file system. For example, if the user asks us
* to watch certain folders on their disk for changes, we'll be able to pick
* up new images being added, and in such cases, the parameter here will be a
* path. Another example is when resuming an previously interrupted upload -
* we'll only have the path at hand in such cases, not the File object.
* 3. In addition, when running in the desktop app we have the ability to
* initate programmatic access absolute paths on the user's file system. For
* example, if the user asks us to watch certain folders on their disk for
* changes, we'll be able to pick up new images being added, and in such
* cases, the parameter here will be a path. Another example is when resuming
* an previously interrupted upload - we'll only have the path at hand in
* such cases, not the original File object since the app subsequently
* restarted.
*
* Case 2, when we're provided a path, is simple. We don't have a choice, since
* we cannot still programmatically construct a File object (we can construct it
* on the Node.js layer, but it can't then be transferred over the IPC
* boundary). So all our operations use the path itself.
* 4. The user might've also initiated an upload of a zip file (or we might be
* resuming one). In such cases we will get a tuple (path to the zip file on
* the local file system, and the name of the entry within that zip file).
*
* Case 3 involves a choice on a use-case basis, since
* Case 3 and 4, when we're provided a path, are simple. We don't have a choice,
* since we cannot still programmatically construct a File object (we can
* construct it on the Node.js layer, but it can't then be transferred over the
* IPC boundary). So all our operations use the path itself.
*
* (a) unlike in the web context, such File objects also have the full path.
* See: [Note: File paths when running under Electron].
* Case 2 involves a choice on a use-case basis. Neither File nor the path is a
* better choice for all use cases.
*
* (b) neither File nor the path is a better choice for all use cases.
*
* The advantage of the File object is that the browser has already read it into
* memory for us. The disadvantage comes in the case where we need to
* communicate with the native Node.js layer of our desktop app. Since this
* communication happens over IPC, the File's contents need to be serialized and
* copied, which is a bummer for large videos etc.
* > The advantage of the File object is that the browser has already read it
* into memory for us. The disadvantage comes in the case where we need to
* communicate with the native Node.js layer of our desktop app. Since this
* communication happens over IPC, the File's contents need to be serialized
* and copied, which is a bummer for large videos etc.
*/
const readFileOrPath = async (
fileOrPath: File | string,
): Promise<FileStream> => {
const readUploadItem = async (uploadItem: UploadItem): Promise<FileStream> => {
let underlyingStream: ReadableStream;
let file: File | undefined;
let fileSize: number;
let lastModifiedMs: number;
if (fileOrPath instanceof File) {
file = fileOrPath;
underlyingStream = file.stream();
fileSize = file.size;
lastModifiedMs = file.lastModified;
} else {
const path = fileOrPath;
if (typeof uploadItem == "string" || Array.isArray(uploadItem)) {
const {
response,
size,
lastModifiedMs: lm,
} = await readStream(ensureElectron(), path);
} = await readStream(ensureElectron(), uploadItem);
underlyingStream = response.body;
fileSize = size;
lastModifiedMs = lm;
} else {
if (uploadItem instanceof File) {
file = uploadItem;
} else {
file = uploadItem.file;
}
underlyingStream = file.stream();
fileSize = file.size;
lastModifiedMs = file.lastModified;
}
const N = ENCRYPTION_CHUNK_SIZE;
@ -596,17 +596,17 @@ interface ReadAssetDetailsResult {
}
/**
* Read the file(s) to determine the type, size and last modified time of the
* given {@link asset}.
* Read the associated file(s) to determine the type, size and last modified
* time of the given {@link asset}.
*/
const readAssetDetails = async ({
isLivePhoto,
livePhotoAssets,
fileOrPath,
uploadItem,
}: UploadAsset): Promise<ReadAssetDetailsResult> =>
isLivePhoto
? readLivePhotoDetails(livePhotoAssets)
: readImageOrVideoDetails(fileOrPath);
: readImageOrVideoDetails(uploadItem);
const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => {
const img = await readImageOrVideoDetails(image);
@ -632,18 +632,18 @@ const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => {
* While we're at it, also return the size of the file, and its last modified
* time (expressed as epoch milliseconds).
*
* @param fileOrPath See: [Note: Reading a fileOrPath]
* @param uploadItem See: [Note: Reading a UploadItem]
*/
const readImageOrVideoDetails = async (fileOrPath: File | string) => {
const readImageOrVideoDetails = async (uploadItem: UploadItem) => {
const { stream, fileSize, lastModifiedMs } =
await readFileOrPath(fileOrPath);
await readUploadItem(uploadItem);
const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => {
const reader = stream.getReader();
const chunk = ensure((await reader.read()).value);
await reader.cancel();
return chunk;
}, fopFileName(fileOrPath));
}, uploadItemFileName(uploadItem));
return { fileTypeInfo, fileSize, lastModifiedMs };
};
@ -669,7 +669,7 @@ interface ExtractAssetMetadataResult {
* {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum.
*/
const extractAssetMetadata = async (
{ isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
{ isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset,
fileTypeInfo: FileTypeInfo,
lastModifiedMs: number,
collectionID: number,
@ -686,7 +686,7 @@ const extractAssetMetadata = async (
worker,
)
: await extractImageOrVideoMetadata(
fileOrPath,
uploadItem,
fileTypeInfo,
lastModifiedMs,
collectionID,
@ -721,7 +721,7 @@ const extractLivePhotoMetadata = async (
return {
metadata: {
...imageMetadata,
title: fopFileName(livePhotoAssets.image),
title: uploadItemFileName(livePhotoAssets.image),
fileType: FILE_TYPE.LIVE_PHOTO,
imageHash: imageMetadata.hash,
videoHash: videoHash,
@ -732,33 +732,33 @@ const extractLivePhotoMetadata = async (
};
const extractImageOrVideoMetadata = async (
fileOrPath: File | string,
uploadItem: UploadItem,
fileTypeInfo: FileTypeInfo,
lastModifiedMs: number,
collectionID: number,
parsedMetadataJSONMap: Map<string, ParsedMetadataJSON>,
worker: Remote<DedicatedCryptoWorker>,
) => {
const fileName = fopFileName(fileOrPath);
const fileName = uploadItemFileName(uploadItem);
const { fileType } = fileTypeInfo;
let extractedMetadata: ParsedExtractedMetadata;
if (fileType === FILE_TYPE.IMAGE) {
extractedMetadata =
(await tryExtractImageMetadata(
fileOrPath,
uploadItem,
fileTypeInfo,
lastModifiedMs,
)) ?? NULL_EXTRACTED_METADATA;
} else if (fileType === FILE_TYPE.VIDEO) {
extractedMetadata =
(await tryExtractVideoMetadata(fileOrPath)) ??
(await tryExtractVideoMetadata(uploadItem)) ??
NULL_EXTRACTED_METADATA;
} else {
throw new Error(`Unexpected file type ${fileType} for ${fileOrPath}`);
throw new Error(`Unexpected file type ${fileType} for ${uploadItem}`);
}
const hash = await computeHash(fileOrPath, worker);
const hash = await computeHash(uploadItem, worker);
const modificationTime = lastModifiedMs * 1000;
const creationTime =
@ -802,46 +802,48 @@ const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = {
};
async function tryExtractImageMetadata(
fileOrPath: File | string,
uploadItem: UploadItem,
fileTypeInfo: FileTypeInfo,
lastModifiedMs: number,
): Promise<ParsedExtractedMetadata> {
let file: File;
if (fileOrPath instanceof File) {
file = fileOrPath;
} else {
const path = fileOrPath;
if (typeof uploadItem == "string" || Array.isArray(uploadItem)) {
// The library we use for extracting EXIF from images, exifr, doesn't
// support streams. But unlike videos, for images it is reasonable to
// read the entire stream into memory here.
const { response } = await readStream(ensureElectron(), path);
const { response } = await readStream(ensureElectron(), uploadItem);
const path = typeof uploadItem == "string" ? uploadItem : uploadItem[1];
file = new File([await response.arrayBuffer()], basename(path), {
lastModified: lastModifiedMs,
});
} else if (uploadItem instanceof File) {
file = uploadItem;
} else {
file = uploadItem.file;
}
try {
return await parseImageMetadata(file, fileTypeInfo);
} catch (e) {
log.error(`Failed to extract image metadata for ${fileOrPath}`, e);
log.error(`Failed to extract image metadata for ${uploadItem}`, e);
return undefined;
}
}
const tryExtractVideoMetadata = async (fileOrPath: File | string) => {
const tryExtractVideoMetadata = async (uploadItem: UploadItem) => {
try {
return await ffmpeg.extractVideoMetadata(fileOrPath);
return await ffmpeg.extractVideoMetadata(uploadItem);
} catch (e) {
log.error(`Failed to extract video metadata for ${fileOrPath}`, e);
log.error(`Failed to extract video metadata for ${uploadItem}`, e);
return undefined;
}
};
const computeHash = async (
fileOrPath: File | string,
uploadItem: UploadItem,
worker: Remote<DedicatedCryptoWorker>,
) => {
const { stream, chunkCount } = await readFileOrPath(fileOrPath);
const { stream, chunkCount } = await readUploadItem(uploadItem);
const hashState = await worker.initChunkHashing();
const streamReader = stream.getReader();
@ -910,11 +912,11 @@ const areFilesSameNoHash = (f: Metadata, g: Metadata) => {
const readAsset = async (
fileTypeInfo: FileTypeInfo,
{ isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset,
{ isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset,
): Promise<ThumbnailedFile> =>
isLivePhoto
? await readLivePhoto(livePhotoAssets, fileTypeInfo)
: await readImageOrVideo(fileOrPath, fileTypeInfo);
: await readImageOrVideo(uploadItem, fileTypeInfo);
const readLivePhoto = async (
livePhotoAssets: LivePhotoAssets,
@ -930,28 +932,28 @@ const readLivePhoto = async (
extension: fileTypeInfo.imageType,
fileType: FILE_TYPE.IMAGE,
},
await readFileOrPath(livePhotoAssets.image),
await readUploadItem(livePhotoAssets.image),
);
const videoFileStreamOrData = await readFileOrPath(livePhotoAssets.video);
const videoFileStreamOrData = await readUploadItem(livePhotoAssets.video);
// The JS zip library that encodeLivePhoto uses does not support
// ReadableStreams, so pass the file (blob) if we have one, otherwise read
// the entire stream into memory and pass the resultant data.
//
// This is a reasonable behaviour since the videos corresponding to live
// photos are only a couple of seconds long (we have already done a
// pre-flight check to ensure their size is small in `areLivePhotoAssets`).
// photos are only a couple of seconds long (we've already done a pre-flight
// check during areLivePhotoAssets to ensure their size is small).
const fileOrData = async (sd: FileStream | Uint8Array) => {
const _fs = async ({ file, stream }: FileStream) =>
const fos = async ({ file, stream }: FileStream) =>
file ? file : await readEntireStream(stream);
return sd instanceof Uint8Array ? sd : _fs(sd);
return sd instanceof Uint8Array ? sd : fos(sd);
};
return {
fileStreamOrData: await encodeLivePhoto({
imageFileName: fopFileName(livePhotoAssets.image),
imageFileName: uploadItemFileName(livePhotoAssets.image),
imageFileOrData: await fileOrData(imageFileStreamOrData),
videoFileName: fopFileName(livePhotoAssets.video),
videoFileName: uploadItemFileName(livePhotoAssets.video),
videoFileOrData: await fileOrData(videoFileStreamOrData),
}),
thumbnail,
@ -960,11 +962,11 @@ const readLivePhoto = async (
};
const readImageOrVideo = async (
fileOrPath: File | string,
uploadItem: UploadItem,
fileTypeInfo: FileTypeInfo,
) => {
const fileStream = await readFileOrPath(fileOrPath);
return withThumbnail(fileOrPath, fileTypeInfo, fileStream);
const fileStream = await readUploadItem(uploadItem);
return withThumbnail(uploadItem, fileTypeInfo, fileStream);
};
// TODO(MR): Merge with the uploader
@ -987,14 +989,17 @@ const moduleState = new ModuleState();
/**
* Augment the given {@link dataOrStream} with thumbnail information.
*
* This is a companion method for {@link readFileOrPath}, and can be used to
* convert the result of {@link readFileOrPath} into an {@link ThumbnailedFile}.
* This is a companion method for {@link readUploadItem}, and can be used to
* convert the result of {@link readUploadItem} into an {@link ThumbnailedFile}.
*
* Note: The `fileStream` in the returned ThumbnailedFile may be different from
* the one passed to the function.
* @param uploadItem The {@link UploadItem} where the given {@link fileStream}
* came from.
*
* Note: The `fileStream` in the returned {@link ThumbnailedFile} may be
* different from the one passed to the function.
*/
const withThumbnail = async (
fileOrPath: File | string,
uploadItem: UploadItem,
fileTypeInfo: FileTypeInfo,
fileStream: FileStream,
): Promise<ThumbnailedFile> => {
@ -1007,14 +1012,12 @@ const withThumbnail = async (
fileTypeInfo.fileType == FILE_TYPE.IMAGE &&
moduleState.isNativeImageThumbnailGenerationNotAvailable;
// 1. Native thumbnail generation using file's path.
if (electron && !notAvailable) {
// 1. Native thumbnail generation using items's (effective) path.
if (electron && !notAvailable && !(uploadItem instanceof File)) {
try {
// When running in the context of our desktop app, File paths will
// be absolute. See: [Note: File paths when running under Electron].
thumbnail = await generateThumbnailNative(
electron,
fileOrPath instanceof File ? fileOrPath["path"] : fileOrPath,
uploadItem,
fileTypeInfo,
);
} catch (e) {
@ -1028,9 +1031,9 @@ const withThumbnail = async (
if (!thumbnail) {
let blob: Blob | undefined;
if (fileOrPath instanceof File) {
if (uploadItem instanceof File) {
// 2. Browser based thumbnail generation for File (blobs).
blob = fileOrPath;
blob = uploadItem;
} else {
// 3. Browser based thumbnail generation for paths.
//
@ -1046,12 +1049,14 @@ const withThumbnail = async (
// The fallback in this case involves reading the entire stream into
// memory, and passing that data across the IPC boundary in a single
// go (i.e. not in a streaming manner). This is risky for videos of
// unbounded sizes, plus that isn't the expected scenario. So
// instead of trying to cater for arbitrary exceptions, we only run
// this fallback to cover for the case where thumbnail generation
// was not available for an image file on Windows. If/when we add
// support of native thumbnailing on Windows too, this entire branch
// can be removed.
// unbounded sizes, plus we shouldn't even be getting here unless
// something went wrong.
//
// So instead of trying to cater for arbitrary exceptions, we only
// run this fallback to cover for the case where thumbnail
// generation was not available for an image file on Windows.
// If/when we add support of native thumbnailing on Windows too,
// this entire branch can be removed.
if (fileTypeInfo.fileType == FILE_TYPE.IMAGE) {
const data = await readEntireStream(fileStream.stream);
@ -1062,7 +1067,7 @@ const withThumbnail = async (
fileData = data;
} else {
log.warn(
`Not using browser based thumbnail generation fallback for video at path ${fileOrPath}`,
`Not using browser based thumbnail generation fallback for video at path ${uploadItem}`,
);
}
}

View file

@ -15,7 +15,7 @@ import { ensureString } from "@/utils/ensure";
import { UPLOAD_RESULT } from "constants/upload";
import debounce from "debounce";
import uploadManager, {
type FileWithCollection,
type UploadItemWithCollection,
} from "services/upload/uploadManager";
import { Collection } from "types/collection";
import { EncryptedEnteFile } from "types/file";
@ -317,16 +317,17 @@ class FolderWatcher {
}
/**
* Callback invoked by the uploader whenever a file we requested to
* Callback invoked by the uploader whenever a item we requested to
* {@link upload} gets uploaded.
*/
async onFileUpload(
fileUploadResult: UPLOAD_RESULT,
fileWithCollection: FileWithCollection,
item: UploadItemWithCollection,
file: EncryptedEnteFile,
) {
// The files we get here will have fileWithCollection.file as a string,
// not as a File or a ElectronFile
// Re the usage of ensureString: For desktop watch, the only possibility
// for a UploadItem is for it to be a string (the absolute path to a
// file on disk).
if (
[
UPLOAD_RESULT.ADDED_SYMLINK,
@ -335,18 +336,18 @@ class FolderWatcher {
UPLOAD_RESULT.ALREADY_UPLOADED,
].includes(fileUploadResult)
) {
if (fileWithCollection.isLivePhoto) {
if (item.isLivePhoto) {
this.uploadedFileForPath.set(
ensureString(fileWithCollection.livePhotoAssets.image),
ensureString(item.livePhotoAssets.image),
file,
);
this.uploadedFileForPath.set(
ensureString(fileWithCollection.livePhotoAssets.video),
ensureString(item.livePhotoAssets.video),
file,
);
} else {
this.uploadedFileForPath.set(
ensureString(fileWithCollection.fileOrPath),
ensureString(item.uploadItem),
file,
);
}
@ -355,17 +356,15 @@ class FolderWatcher {
fileUploadResult,
)
) {
if (fileWithCollection.isLivePhoto) {
if (item.isLivePhoto) {
this.unUploadableFilePaths.add(
ensureString(fileWithCollection.livePhotoAssets.image),
ensureString(item.livePhotoAssets.image),
);
this.unUploadableFilePaths.add(
ensureString(fileWithCollection.livePhotoAssets.video),
ensureString(item.livePhotoAssets.video),
);
} else {
this.unUploadableFilePaths.add(
ensureString(fileWithCollection.fileOrPath),
);
this.unUploadableFilePaths.add(ensureString(item.uploadItem));
}
}
}
@ -375,7 +374,7 @@ class FolderWatcher {
* {@link upload} get uploaded.
*/
async allFileUploadsDone(
filesWithCollection: FileWithCollection[],
uploadItemsWithCollection: UploadItemWithCollection[],
collections: Collection[],
) {
const electron = ensureElectron();
@ -384,14 +383,15 @@ class FolderWatcher {
log.debug(() =>
JSON.stringify({
f: "watch/allFileUploadsDone",
filesWithCollection,
uploadItemsWithCollection,
collections,
watch,
}),
);
const { syncedFiles, ignoredFiles } =
this.deduceSyncedAndIgnored(filesWithCollection);
const { syncedFiles, ignoredFiles } = this.deduceSyncedAndIgnored(
uploadItemsWithCollection,
);
if (syncedFiles.length > 0)
await electron.watch.updateSyncedFiles(
@ -411,7 +411,9 @@ class FolderWatcher {
this.debouncedRunNextEvent();
}
private deduceSyncedAndIgnored(filesWithCollection: FileWithCollection[]) {
private deduceSyncedAndIgnored(
uploadItemsWithCollection: UploadItemWithCollection[],
) {
const syncedFiles: FolderWatch["syncedFiles"] = [];
const ignoredFiles: FolderWatch["ignoredFiles"] = [];
@ -430,14 +432,13 @@ class FolderWatcher {
this.unUploadableFilePaths.delete(path);
};
for (const fileWithCollection of filesWithCollection) {
if (fileWithCollection.isLivePhoto) {
const imagePath = ensureString(
fileWithCollection.livePhotoAssets.image,
);
const videoPath = ensureString(
fileWithCollection.livePhotoAssets.video,
);
for (const item of uploadItemsWithCollection) {
// Re the usage of ensureString: For desktop watch, the only
// possibility for a UploadItem is for it to be a string (the
// absolute path to a file on disk).
if (item.isLivePhoto) {
const imagePath = ensureString(item.livePhotoAssets.image);
const videoPath = ensureString(item.livePhotoAssets.video);
const imageFile = this.uploadedFileForPath.get(imagePath);
const videoFile = this.uploadedFileForPath.get(videoPath);
@ -453,7 +454,7 @@ class FolderWatcher {
markIgnored(videoPath);
}
} else {
const path = ensureString(fileWithCollection.fileOrPath);
const path = ensureString(item.uploadItem);
const file = this.uploadedFileForPath.get(path);
if (file) {
markSynced(file, path);

View file

@ -116,6 +116,19 @@ export async function getUpdatedEXIFFileForDownload(
}
}
export function convertBytesToHumanReadable(
bytes: number,
precision = 2,
): string {
if (bytes === 0 || isNaN(bytes)) {
return "0 MB";
}
const i = Math.floor(Math.log(bytes) / Math.log(1024));
const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i];
}
export async function downloadFile(file: EnteFile) {
try {
const fileReader = new FileReader();

View file

@ -1,5 +1,5 @@
/**
* @file Utilities for native filesystem access.
* @file Utilities for native file system access.
*
* While they don't have any direct dependencies to our desktop app, they were
* written for use by the code that runs in our desktop app.

View file

@ -2,12 +2,14 @@
* @file Streaming IPC communication with the Node.js layer of our desktop app.
*
* NOTE: These functions only work when we're running in our desktop app.
*
* See: [Note: IPC streams].
*/
import type { Electron } from "@/next/types/ipc";
import type { Electron, ZipItem } from "@/next/types/ipc";
/**
* Stream the given file from the user's local filesystem.
* Stream the given file or zip entry from the user's local file system.
*
* This only works when we're running in our desktop app since it uses the
* "stream://" protocol handler exposed by our custom code in the Node.js layer.
@ -16,8 +18,9 @@ import type { Electron } from "@/next/types/ipc";
* To avoid accidentally invoking it in a non-desktop app context, it requires
* the {@link Electron} object as a parameter (even though it doesn't use it).
*
* @param path The path on the file on the user's local filesystem whose
* contents we want to stream.
* @param pathOrZipItem Either the path on the file on the user's local file
* system whose contents we want to stream. Or a tuple containing the path to a
* zip file and the name of the entry within it.
*
* @return A ({@link Response}, size, lastModifiedMs) triple.
*
@ -32,16 +35,23 @@ import type { Electron } from "@/next/types/ipc";
*/
export const readStream = async (
_: Electron,
path: string,
pathOrZipItem: string | ZipItem,
): Promise<{ response: Response; size: number; lastModifiedMs: number }> => {
const req = new Request(`stream://read${path}`, {
method: "GET",
});
let url: URL;
if (typeof pathOrZipItem == "string") {
url = new URL(`stream://read${pathOrZipItem}`);
} else {
const [zipPath, entryName] = pathOrZipItem;
url = new URL(`stream://read${zipPath}`);
url.hash = entryName;
}
const req = new Request(url, { method: "GET" });
const res = await fetch(req);
if (!res.ok)
throw new Error(
`Failed to read stream from ${path}: HTTP ${res.status}`,
`Failed to read stream from ${url}: HTTP ${res.status}`,
);
const size = readNumericHeader(res, "Content-Length");

View file

@ -34,6 +34,6 @@ meant for larger, tabular data.
OPFS is used for caching entire files when we're running under Electron (the Web
Cache API is used in the browser).
As it name suggests, it is an entire filesystem, private for us ("origin"). In
As it name suggests, it is an entire file system, private for us ("origin"). In
is not undbounded though, and the storage is not guaranteed to be persistent (at
least with the APIs we use), hence the cache designation.

View file

@ -1,5 +1,3 @@
import type { ElectronFile } from "./types/file";
/**
* The two parts of a file name - the name itself, and an (optional) extension.
*
@ -82,27 +80,3 @@ export const dirname = (path: string) => {
}
return pathComponents.join("/");
};
/**
* Return a short description of the given {@link fileOrPath} suitable for
* helping identify it in log messages.
*/
export const fopLabel = (fileOrPath: File | string) =>
fileOrPath instanceof File ? `File(${fileOrPath.name})` : fileOrPath;
export function getFileNameSize(file: File | ElectronFile) {
return `${file.name}_${convertBytesToHumanReadable(file.size)}`;
}
export function convertBytesToHumanReadable(
bytes: number,
precision = 2,
): string {
if (bytes === 0 || isNaN(bytes)) {
return "0 MB";
}
const i = Math.floor(Math.log(bytes) / Math.log(1024));
const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i];
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "Ein Fehler trat auf beim Anmelden mit dem Passkey auf.",
"TRY_AGAIN": "Erneut versuchen",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Folge den Schritten in deinem Browser, um mit dem Anmelden fortzufahren.",
"LOGIN_WITH_PASSKEY": "Mit Passkey anmelden"
"LOGIN_WITH_PASSKEY": "Mit Passkey anmelden",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "An error occurred while logging in with passkey.",
"TRY_AGAIN": "Try again",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Follow the steps from your browser to continue logging in.",
"LOGIN_WITH_PASSKEY": "Login with passkey"
"LOGIN_WITH_PASSKEY": "Login with passkey",
"autogenerated_first_album_name": "My First Album",
"autogenerated_default_album_name": "New Album"
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "Une erreur s'est produite lors de la connexion avec le code d'accès.",
"TRY_AGAIN": "Réessayer",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Suivez les étapes de votre navigateur pour poursuivre la connexion.",
"LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès"
"LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "Er is een fout opgetreden tijdens het inloggen met een passkey.",
"TRY_AGAIN": "Probeer opnieuw",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Volg de stappen van je browser om door te gaan met inloggen.",
"LOGIN_WITH_PASSKEY": "Inloggen met passkey"
"LOGIN_WITH_PASSKEY": "Inloggen met passkey",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "Ocorreu um erro ao entrar com a chave de acesso.",
"TRY_AGAIN": "Tente novamente",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Siga os passos do seu navegador para continuar acessando.",
"LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso"
"LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "При входе в систему с помощью пароля произошла ошибка.",
"TRY_AGAIN": "Пробовать снова",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Следуйте инструкциям в вашем браузере, чтобы продолжить вход в систему.",
"LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля"
"LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "",
"TRY_AGAIN": "",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
"LOGIN_WITH_PASSKEY": ""
"LOGIN_WITH_PASSKEY": "",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -621,5 +621,7 @@
"PASSKEY_LOGIN_ERRORED": "使用通行密钥登录时出错。",
"TRY_AGAIN": "重试",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "按照浏览器中提示的步骤继续登录。",
"LOGIN_WITH_PASSKEY": "使用通行密钥来登录"
"LOGIN_WITH_PASSKEY": "使用通行密钥来登录",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
}

View file

@ -16,6 +16,17 @@ export interface ElectronFile {
arrayBuffer: () => Promise<Uint8Array>;
}
/**
* When we are running in the context of our desktop app, we have access to the
* absolute path of {@link File} objects. This convenience type clubs these two
* bits of information, saving us the need to query the path again and again
* using the {@link getPathForFile} method of {@link Electron}.
*/
export interface FileAndPath {
file: File;
path: string;
}
export interface EventQueueItem {
type: "upload" | "trash";
folderPath: string;

View file

@ -123,17 +123,17 @@ export interface Electron {
skipAppUpdate: (version: string) => void;
/**
* A subset of filesystem access APIs.
* A subset of file system access APIs.
*
* The renderer process, being a web process, does not have full access to
* the local filesystem apart from files explicitly dragged and dropped (or
* the local file system apart from files explicitly dragged and dropped (or
* selected by the user in a native file open dialog).
*
* The main process, however, has full filesystem access (limited only be an
* The main process, however, has full fil system access (limited only be an
* OS level sandbox on the entire process).
*
* When we're running in the desktop app, we want to better utilize the
* local filesystem access to provide more integrated features to the user -
* local file system access to provide more integrated features to the user;
* things that are not currently possible using web technologies. For
* example, continuous exports to an arbitrary user chosen location on disk,
* or watching some folders for changes and syncing them automatically.
@ -189,11 +189,6 @@ export interface Electron {
* directory.
*/
isDir: (dirPath: string) => Promise<boolean>;
/**
* Return the size in bytes of the file at {@link path}.
*/
size: (path: string) => Promise<number>;
};
// - Conversion
@ -226,22 +221,27 @@ export interface Electron {
* not yet possible, this function will throw an error with the
* {@link CustomErrorMessage.NotAvailable} message.
*
* @param dataOrPath The raw image data (the contents of the image file), or
* the path to the image file, whose thumbnail we want to generate.
* @param dataOrPathOrZipItem The file whose thumbnail we want to generate.
* It can be provided as raw image data (the contents of the image file), or
* the path to the image file, or a tuple containing the path of the zip
* file along with the name of an entry in it.
*
* @param maxDimension The maximum width or height of the generated
* thumbnail.
*
* @param maxSize Maximum size (in bytes) of the generated thumbnail.
*
* @returns JPEG data of the generated thumbnail.
*/
generateImageThumbnail: (
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
maxDimension: number,
maxSize: number,
) => Promise<Uint8Array>;
/**
* Execute a FFmpeg {@link command} on the given {@link dataOrPath}.
* Execute a FFmpeg {@link command} on the given
* {@link dataOrPathOrZipItem}.
*
* This executes the command using a FFmpeg executable we bundle with our
* desktop app. We also have a wasm FFmpeg wasm implementation that we use
@ -254,10 +254,11 @@ export interface Electron {
* (respectively {@link inputPathPlaceholder},
* {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}).
*
* @param dataOrPath The bytes of the input file, or the path to the input
* file on the user's local disk. In both cases, the data gets serialized to
* a temporary file, and then that path gets substituted in the FFmpeg
* {@link command} in lieu of {@link inputPathPlaceholder}.
* @param dataOrPathOrZipItem The bytes of the input file, or the path to
* the input file on the user's local disk, or the path to a zip file on the
* user's disk and the name of an entry in it. In all three cases, the data
* gets serialized to a temporary file, and then that path gets substituted
* in the FFmpeg {@link command} in lieu of {@link inputPathPlaceholder}.
*
* @param outputFileExtension The extension (without the dot, e.g. "jpeg")
* to use for the output file that we ask FFmpeg to create in
@ -273,7 +274,7 @@ export interface Electron {
*/
ffmpegExec: (
command: string[],
dataOrPath: Uint8Array | string,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
) => Promise<Uint8Array>;
@ -465,44 +466,80 @@ export interface Electron {
// - Upload
/**
* Return the file system path that this File object points to.
*
* This method is a bit different from the other methods on the Electron
* object in the sense that there is no actual IPC happening - the
* implementation of this method is completely in the preload script. Thus
* we can pass it an otherwise unserializable File object.
*
* Consequently, it is also _not_ async.
*/
pathForFile: (file: File) => string;
/**
* Get the list of files that are present in the given zip file.
*
* @param zipPath The path of the zip file on the user's local file system.
*
* @returns A list of (zipPath, entryName) tuples, one for each file in the
* given zip. Directories are traversed recursively, but the directory
* entries themselves will be excluded from the returned list. File entries
* whose file name begins with a dot (i.e. "hidden" files) will also be
* excluded.
*
* To read the contents of the files themselves, see [Note: IPC streams].
*/
listZipItems: (zipPath: string) => Promise<ZipItem[]>;
/**
* Return the size in bytes of the file at the given path or of a particular
* entry within a zip file.
*/
pathOrZipItemSize: (pathOrZipItem: string | ZipItem) => Promise<number>;
/**
* Return any pending uploads that were previously enqueued but haven't yet
* been completed.
*
* The state of pending uploads is persisted in the Node.js layer.
* Return undefined if there are no such pending uploads.
*
* Note that we might have both outstanding zip and regular file uploads at
* the same time. In such cases, the zip file ones get precedence.
* The state of pending uploads is persisted in the Node.js layer. Or app
* start, we read in this data from the Node.js layer via this IPC method.
* The Node.js code returns the persisted data after filtering out any files
* that no longer exist on disk.
*/
pendingUploads: () => Promise<PendingUploads | undefined>;
/**
* Set or clear the name of the collection where the pending upload is
* directed to.
* Set the state of pending uploads.
*
* - Typically, this would be called at the start of an upload.
*
* - Thereafter, as each item gets uploaded one by one, we'd call
* {@link markUploadedFiles} or {@link markUploadedZipItems}.
*
* - Finally, once the upload completes (or gets cancelled), we'd call
* {@link clearPendingUploads} to complete the circle.
*/
setPendingUploadCollection: (collectionName: string) => Promise<void>;
setPendingUploads: (pendingUploads: PendingUploads) => Promise<void>;
/**
* Update the list of files (of {@link type}) associated with the pending
* upload.
* Mark the given files (given by their {@link paths}) as having been
* uploaded.
*/
setPendingUploadFiles: (
type: PendingUploads["type"],
filePaths: string[],
) => Promise<void>;
markUploadedFiles: (paths: PendingUploads["filePaths"]) => Promise<void>;
/*
* TODO: AUDIT below this - Some of the types we use below are not copyable
* across process boundaries, and such functions will (expectedly) fail at
* runtime. For such functions, find an efficient alternative or refactor
* the dataflow.
/**
* Mark the given {@link ZipItem}s as having been uploaded.
*/
markUploadedZipItems: (items: PendingUploads["zipItems"]) => Promise<void>;
// -
getElectronFilesFromGoogleZip: (
filePath: string,
) => Promise<ElectronFile[]>;
/**
* Clear any pending uploads.
*/
clearPendingUploads: () => Promise<void>;
}
/**
@ -588,14 +625,41 @@ export interface FolderWatchSyncedFile {
}
/**
* When the user starts an upload, we remember the files they'd selected or drag
* and dropped so that we can resume (if needed) when the app restarts after
* being stopped in the middle of the uploads.
* A particular file within a zip file.
*
* When the user uploads a zip file, we create a "zip item" for each entry
* within the zip file. Each such entry is a tuple containing the (path to a zip
* file itself, and the name of an entry within it).
*
* The name of the entry is not just the file name, but rather is the full path
* of the file within the zip. That is, each entry name uniquely identifies a
* particular file within the given zip.
*/
export type ZipItem = [zipPath: string, entryName: string];
/**
* State about pending and in-progress uploads.
*
* When the user starts an upload, we remember the files they'd selected (or
* drag-dropped) so that we can resume if they restart the app in before the
* uploads have been completed. This state is kept on the Electron side, and
* this object is the IPC intermediary.
*/
export interface PendingUploads {
/** The collection to which we're uploading */
/**
* The collection to which we're uploading, or the root collection.
*
* This is name of the collection (when uploading to a singular collection)
* or the root collection (when uploading to separate * albums) to which we
* these uploads are meant to go to. See {@link CollectionMapping}.
*/
collectionName: string;
/* The upload can be either of a Google Takeout zip, or regular files */
type: "files" | "zips";
files: ElectronFile[];
/**
* Paths of regular files that need to be uploaded.
*/
filePaths: string[];
/**
* {@link ZipItem} (zip path and entry name) that need to be uploaded.
*/
zipItems: ZipItem[];
}

View file

@ -1,24 +1,63 @@
import { useCallback, useRef, useState } from "react";
/*
* TODO (MR): Understand how this is happening, and validate it further (on
* first glance this is correct).
*
/**
* [Note: File paths when running under Electron]
*
* We have access to the absolute path of the web {@link File} object when we
* are running in the context of our desktop app.
*
* https://www.electronjs.org/docs/latest/api/file-object
*
* This is in contrast to the `webkitRelativePath` that we get when we're
* running in the browser, which is the relative path to the directory that the
* user selected (or just the name of the file if the user selected or
* drag/dropped a single one).
*
* Note that this is a deprecated approach. From Electron docs:
*
* > Warning: The path property that Electron adds to the File interface is
* > deprecated and will be removed in a future Electron release. We recommend
* > you use `webUtils.getPathForFile` instead.
*/
export interface FileWithPath extends File {
readonly path?: string;
}
export default function useFileInput({ directory }: { directory?: boolean }) {
interface UseFileInputParams {
directory?: boolean;
accept?: string;
}
/**
* Return three things:
*
* - A function that can be called to trigger the showing of the select file /
* directory dialog.
*
* - The list of properties that should be passed to a dummy `input` element
* that needs to be created to anchor the select file dialog. This input HTML
* element is not going to be visible, but it needs to be part of the DOM fro
* the open trigger to have effect.
*
* - The list of files that the user selected. This will be a list even if the
* user selected directories - in that case, it will be the recursive list of
* files within this directory.
*
* @param param0
*
* - If {@link directory} is true, the file open dialog will ask the user to
* select directories. Otherwise it'll ask the user to select files.
*
* - If {@link accept} is specified, it'll restrict the type of files that the
* user can select by setting the "accept" attribute of the underlying HTML
* input element we use to surface the file selector dialog. For value of
* accept can be an extension or a MIME type (See
* https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept).
*/
export default function useFileInput({
directory,
accept,
}: UseFileInputParams) {
const [selectedFiles, setSelectedFiles] = useState<File[]>([]);
const inputRef = useRef<HTMLInputElement>();
@ -48,6 +87,7 @@ export default function useFileInput({ directory }: { directory?: boolean }) {
...(directory ? { directory: "", webkitdirectory: "" } : {}),
ref: inputRef,
onChange: handleChange,
...(accept ? { accept } : {}),
}),
[],
);