diff --git a/.github/workflows/auth-release.yml b/.github/workflows/auth-release.yml index cc3e598e3..174b6c1d3 100644 --- a/.github/workflows/auth-release.yml +++ b/.github/workflows/auth-release.yml @@ -17,8 +17,8 @@ name: "Release (auth)" # We use a suffix like `-test` to indicate that these are test tags, and that # they belong to a pre-release. # -# If you need to do multiple tests, add a +x at the end of the tag. e.g. -# `auth-v1.2.3-test+1`. +# If you need to do multiple tests, add a .x at the end of the tag. e.g. +# `auth-v1.2.3-test.1`. # # Once the testing is done, also delete the tag(s) please. @@ -85,7 +85,7 @@ jobs: - name: Install dependencies for desktop build run: | sudo apt-get update -y - sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 + sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5 sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu' - name: Install appimagetool diff --git a/.github/workflows/desktop-lint.yml b/.github/workflows/desktop-lint.yml new file mode 100644 index 000000000..0b8263f3d --- /dev/null +++ b/.github/workflows/desktop-lint.yml @@ -0,0 +1,30 @@ +name: "Lint (desktop)" + +on: + # Run on every push to a branch other than main that changes desktop/ + push: + branches-ignore: [main, "deploy/**"] + paths: + - "desktop/**" + - ".github/workflows/desktop-lint.yml" + +jobs: + lint: + runs-on: ubuntu-latest + defaults: + run: + working-directory: desktop + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "desktop/yarn.lock" + + - run: yarn install + + - run: yarn lint diff --git a/.github/workflows/mobile-internal-release.yml b/.github/workflows/mobile-internal-release.yml index 9779a5d7a..4ee736742 100644 --- a/.github/workflows/mobile-internal-release.yml +++ b/.github/workflows/mobile-internal-release.yml @@ -54,3 +54,4 @@ jobs: packageName: io.ente.photos releaseFiles: mobile/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab track: internal + changesNotSentForReview: true diff --git a/auth/linux/packaging/rpm/make_config.yaml b/auth/linux/packaging/rpm/make_config.yaml index 5d5f3aab5..e82dd63bf 100644 --- a/auth/linux/packaging/rpm/make_config.yaml +++ b/auth/linux/packaging/rpm/make_config.yaml @@ -11,7 +11,7 @@ display_name: Auth requires: - libsqlite3x - - webkit2gtk-4.0 + - webkit2gtk4.0 - libsodium - libsecret - libappindicator diff --git a/auth/pubspec.lock b/auth/pubspec.lock index 2d61b77c3..772416042 100644 --- a/auth/pubspec.lock +++ b/auth/pubspec.lock @@ -293,9 +293,9 @@ packages: dependency: "direct main" description: path: "packages/desktop_webview_window" - ref: HEAD - resolved-ref: "8cbbf9cd6efcfee5e0f420a36f7f8e7e64b667a1" - url: "https://github.com/MixinNetwork/flutter-plugins" + ref: fix-webkit-version + resolved-ref: fe2223e4edfecdbb3a97bb9e3ced73db4ae9d979 + url: "https://github.com/ente-io/flutter-desktopwebview-fork" source: git version: "0.2.4" device_info_plus: diff --git a/auth/pubspec.yaml b/auth/pubspec.yaml index 0487eb128..b7a35b699 100644 --- a/auth/pubspec.yaml +++ b/auth/pubspec.yaml @@ -20,7 +20,8 @@ dependencies: convert: ^3.1.1 desktop_webview_window: git: - url: https://github.com/MixinNetwork/flutter-plugins + url: https://github.com/ente-io/flutter-desktopwebview-fork + ref: fix-webkit-version path: packages/desktop_webview_window device_info_plus: ^9.1.1 dio: ^5.4.0 diff --git a/cli/README.md b/cli/README.md index 8fc9aa694..40858da0f 100644 --- a/cli/README.md +++ b/cli/README.md @@ -36,7 +36,8 @@ ente --help ### Accounts -If you wish, you can add multiple accounts (your own and that of your family members) and export all data using this tool. +If you wish, you can add multiple accounts (your own and that of your family +members) and export all data using this tool. #### Add an account @@ -44,6 +45,12 @@ If you wish, you can add multiple accounts (your own and that of your family mem ente account add ``` +> [!NOTE] +> +> `ente account add` does not create new accounts, it just adds pre-existing +> accounts to the list of accounts that the CLI knows about so that you can use +> them for other actions. + #### List accounts ```shell diff --git a/desktop/.eslintrc.js b/desktop/.eslintrc.js index a47eb483f..44d03ef0c 100644 --- a/desktop/.eslintrc.js +++ b/desktop/.eslintrc.js @@ -1,26 +1,36 @@ /* eslint-env node */ module.exports = { + root: true, extends: [ "eslint:recommended", "plugin:@typescript-eslint/eslint-recommended", - /* What we really want eventually */ - // "plugin:@typescript-eslint/strict-type-checked", - // "plugin:@typescript-eslint/stylistic-type-checked", + "plugin:@typescript-eslint/strict-type-checked", + "plugin:@typescript-eslint/stylistic-type-checked", ], - /* Temporarily add a global - Enhancement: Remove me */ - globals: { - NodeJS: "readonly", - }, plugins: ["@typescript-eslint"], parser: "@typescript-eslint/parser", parserOptions: { project: true, }, - root: true, ignorePatterns: [".eslintrc.js", "app", "out", "dist"], env: { es2022: true, node: true, }, + rules: { + /* Allow numbers to be used in template literals */ + "@typescript-eslint/restrict-template-expressions": [ + "error", + { + allowNumber: true, + }, + ], + /* Allow void expressions as the entire body of an arrow function */ + "@typescript-eslint/no-confusing-void-expression": [ + "error", + { + ignoreArrowShorthand: true, + }, + ], + }, }; diff --git a/desktop/.github/workflows/build.yml b/desktop/.github/workflows/build.yml deleted file mode 100644 index acd744c05..000000000 --- a/desktop/.github/workflows/build.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Build/release - -on: - push: - tags: - - v* - -jobs: - release: - runs-on: ${{ matrix.os }} - - strategy: - matrix: - os: [macos-latest, ubuntu-latest, windows-latest] - - steps: - - name: Check out Git repository - uses: actions/checkout@v3 - with: - submodules: recursive - - - name: Install Node.js, NPM and Yarn - uses: actions/setup-node@v3 - with: - node-version: 20 - - - name: Prepare for app notarization - if: startsWith(matrix.os, 'macos') - # Import Apple API key for app notarization on macOS - run: | - mkdir -p ~/private_keys/ - echo '${{ secrets.api_key }}' > ~/private_keys/AuthKey_${{ secrets.api_key_id }}.p8 - - - name: Install libarchive-tools for pacman build # Related https://github.com/electron-userland/electron-builder/issues/4181 - if: startsWith(matrix.os, 'ubuntu') - run: sudo apt-get install libarchive-tools - - - name: Ente Electron Builder Action - uses: ente-io/action-electron-builder@v1.0.0 - with: - # GitHub token, automatically provided to the action - # (No need to define this secret in the repo settings) - github_token: ${{ secrets.github_token }} - - # If the commit is tagged with a version (e.g. "v1.0.0"), - # release the app after building - release: ${{ startsWith(github.ref, 'refs/tags/v') }} - - mac_certs: ${{ secrets.mac_certs }} - mac_certs_password: ${{ secrets.mac_certs_password }} - env: - # macOS notarization API key - API_KEY_ID: ${{ secrets.api_key_id }} - API_KEY_ISSUER_ID: ${{ secrets.api_key_issuer_id}} - USE_HARD_LINKS: false diff --git a/desktop/.github/workflows/desktop-release.yml b/desktop/.github/workflows/desktop-release.yml new file mode 100644 index 000000000..7013d3e57 --- /dev/null +++ b/desktop/.github/workflows/desktop-release.yml @@ -0,0 +1,90 @@ +name: "Release" + +# This will create a new draft release with public artifacts. +# +# Note that a release will only get created if there is an associated tag +# (GitHub releases need a corresponding tag). +# +# The canonical source for this action is in the repository where we keep the +# source code for the Ente Photos desktop app: https://github.com/ente-io/ente +# +# However, it actually lives and runs in the repository that we use for making +# releases: https://github.com/ente-io/photos-desktop +# +# We need two repositories because Electron updater currently doesn't work well +# with monorepos. For more details, see `docs/release.md`. + +on: + push: + # Run when a tag matching the pattern "v*"" is pushed. + # + # See: [Note: Testing release workflows that are triggered by tags]. + tags: + - "v*" + +jobs: + release: + runs-on: ${{ matrix.os }} + + defaults: + run: + working-directory: desktop + + strategy: + matrix: + os: [macos-latest] + # Commented for testing + # os: [macos-latest, ubuntu-latest, windows-latest] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + # Checkout the tag photosd-v1.x.x from the source code + # repository when we're invoked for tag v1.x.x on the releases + # repository. + repository: ente-io/ente + ref: photosd-${{ github.ref_name }} + submodules: recursive + + - name: Setup node + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install dependencies + run: yarn install + + - name: Prepare for app notarization + if: startsWith(matrix.os, 'macos') + # Import Apple API key for app notarization on macOS + run: | + mkdir -p ~/private_keys/ + echo '${{ secrets.API_KEY }}' > ~/private_keys/AuthKey_${{ secrets.API_KEY_ID }}.p8 + + - name: Install libarchive-tools for pacman build + if: startsWith(matrix.os, 'ubuntu') + # See: + # https://github.com/electron-userland/electron-builder/issues/4181 + run: sudo apt-get install libarchive-tools + + - name: Build + uses: ente-io/action-electron-builder@v1.0.0 + with: + package_root: desktop + + # GitHub token, automatically provided to the action + # (No need to define this secret in the repo settings) + github_token: ${{ secrets.GITHUB_TOKEN }} + + # If the commit is tagged with a version (e.g. "v1.0.0"), + # release the app after building. + release: ${{ startsWith(github.ref, 'refs/tags/v') }} + + mac_certs: ${{ secrets.MAC_CERTS }} + mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }} + env: + # macOS notarization API key details + API_KEY_ID: ${{ secrets.API_KEY_ID }} + API_KEY_ISSUER_ID: ${{ secrets.API_KEY_ISSUER_ID }} + USE_HARD_LINKS: false diff --git a/desktop/CHANGELOG.md b/desktop/CHANGELOG.md index 83d2123d8..eb118a424 100644 --- a/desktop/CHANGELOG.md +++ b/desktop/CHANGELOG.md @@ -1,5 +1,13 @@ # CHANGELOG +## v1.7.0 (Unreleased) + +v1.7 is a major rewrite to improve the security of our app. We have enabled +sandboxing and disabled node integration for the renderer process. All this +required restructuring our IPC mechanisms, which resulted in a lot of under the +hood changes. The outcome is a more secure app that also uses the latest and +greatest Electron recommendations. + ## v1.6.63 ### New diff --git a/desktop/README.md b/desktop/README.md index 05149f5d0..39b7663fa 100644 --- a/desktop/README.md +++ b/desktop/README.md @@ -10,12 +10,6 @@ To know more about Ente, see [our main README](../README.md) or visit ## Building from source -> [!CAUTION] -> -> We're improving the security of the desktop app further by migrating to -> Electron's sandboxing and contextIsolation. These updates are still WIP and -> meanwhile the instructions below might not fully work on the main branch. - Fetch submodules ```sh diff --git a/desktop/docs/dependencies.md b/desktop/docs/dependencies.md index b159b13eb..605235703 100644 --- a/desktop/docs/dependencies.md +++ b/desktop/docs/dependencies.md @@ -13,7 +13,7 @@ Electron embeds Chromium and Node.js in the generated app's binary. The generated app thus consists of two separate processes - the _main_ process, and a _renderer_ process. -- The _main_ process is runs the embedded node. This process can deal with the +- The _main_ process runs the embedded node. This process can deal with the host OS - it is conceptually like a `node` repl running on your machine. In our case, the TypeScript code (in the `src/` directory) gets transpiled by `tsc` into JavaScript in the `build/app/` directory, which gets bundled in @@ -90,6 +90,9 @@ Some extra ones specific to the code here are: Unix commands in our `package.json` scripts. This allows us to use the same commands (like `ln`) across different platforms like Linux and Windows. +- [@tsconfig/recommended](https://github.com/tsconfig/bases) gives us a base + tsconfig for the Node.js version that our current Electron version uses. + ## Functionality ### Format conversion diff --git a/desktop/docs/release.md b/desktop/docs/release.md index 7254e26fc..da807b572 100644 --- a/desktop/docs/release.md +++ b/desktop/docs/release.md @@ -1,43 +1,47 @@ ## Releases -> [!NOTE] -> -> TODO(MR): This document needs to be audited and changed as we do the first -> release from this new monorepo. +Conceptually, the release is straightforward: We push a tag, a GitHub workflow +gets triggered that creates a draft release with artifacts built from that tag. +We then publish that release. The download links on our website, and existing +apps already know how to check for the latest GitHub release and update +accordingly. -The Github Action that builds the desktop binaries is triggered by pushing a tag -matching the pattern `photos-desktop-v1.2.3`. This value should match the -version in `package.json`. +The complication comes by the fact that Electron Updater (the mechanism that we +use for auto updates) doesn't work well with monorepos. So we need to keep a +separate (non-mono) repository just for doing releases. -So the process for doing a release would be. +- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente). -1. Create a new branch (can be named anything). On this branch, include your - changes. +- Releases are done from + [ente-io/photos-desktop](https://github.com/ente-io/photos-desktop). -2. Mention the changes in `CHANGELOG.md`. +## Workflow -3. Changing the `version` in `package.json` to `1.x.x`. +The workflow is: -4. Commit and push to remote +1. Finalize the changes in the source repo. + + - Update the CHANGELOG. + - Update the version in `package.json` + - `git commit -m 'Release v1.x.x'` + - Open PR, merge into main. + +2. Tag this commit with a tag matching the pattern `photosd-v1.2.3`, where + `1.2.3` is the version in `package.json` ```sh - git add package.json && git commit -m 'Release v1.x.x' - git tag v1.x.x - git push && git push --tags + git tag photosd-v1.x.x + git push origin photosd-v1.x.x ``` -This by itself will already trigger a new release. The GitHub action will create -a new draft release that can then be used as descibed below. +3. Head over to the releases repository and run the trigger script, passing it + the tag _without_ the `photosd-` prefix. -To wrap up, we also need to merge back these changes into main. So for that, + ```sh + ./.github/trigger-release.sh v1.x.x + ``` -5. Open a PR for the branch that we're working on (where the above tag was - pushed from) to get it merged into main. - -6. In this PR, also increase the version number for the next release train. That - is, supposed we just released `v4.0.1`. Then we'll change the version number - in main to `v4.0.2-next.0`. Each pre-release will modify the `next.0` part. - Finally, at the time of the next release, this'll become `v4.0.2`. +## Post build The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts defined in the `build` value in `package.json`. @@ -46,29 +50,11 @@ defined in the `build` value in `package.json`. - Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`) - macOS - A universal DMG -Additionally, the GitHub action notarizes the macOS DMG. For this it needs -credentials provided via GitHub secrets. +Additionally, the GitHub action notarizes and signs the macOS DMG (For this it +uses credentials provided via GitHub secrets). -During the build the Sentry webpack plugin checks to see if SENTRY_AUTH_TOKEN is -defined. If so, it uploads the sourcemaps for the renderer process to Sentry -(For our GitHub action, the SENTRY_AUTH_TOKEN is defined as a GitHub secret). - -The sourcemaps for the main (node) process are currently not sent to Sentry -(this works fine in practice since the node process files are not minified, we -only run `tsc`). - -Once the build is done, a draft release with all these artifacts attached is -created. The build is idempotent, so if something goes wrong and we need to -re-run the GitHub action, just delete the draft release (if it got created) and -start a new run by pushing a new tag (if some code changes are required). - -If no code changes are required, say the build failed for some transient network -or sentry issue, we can even be re-run by the build by going to Github Action -age and rerun from there. This will re-trigger for the same tag. - -If everything goes well, we'll have a release on GitHub, and the corresponding -source maps for the renderer process uploaded to Sentry. There isn't anything -else to do: +To rollout the build, we need to publish the draft release. Thereafter, +everything is automated: - The website automatically redirects to the latest release on GitHub when people try to download. @@ -76,7 +62,7 @@ else to do: - The file formats with support auto update (Windows `exe`, the Linux AppImage and the macOS DMG) also check the latest GitHub release automatically to download and apply the update (the rest of the formats don't support auto - updates). + updates yet). - We're not putting the desktop app in other stores currently. It is available as a `brew cask`, but we only had to open a PR to add the initial formula, @@ -87,6 +73,4 @@ else to do: We can also publish the draft releases by checking the "pre-release" option. Such releases don't cause any of the channels (our website, or the desktop app auto updater, or brew) to be notified, instead these are useful for giving links -to pre-release builds to customers. Generally, in the version number for these -we'll add a label to the version, e.g. the "beta.x" in `1.x.x-beta.x`. This -should be done both in `package.json`, and what we tag the commit with. +to pre-release builds to customers. diff --git a/desktop/package.json b/desktop/package.json index 69d54f75b..dc5ed9dba 100644 --- a/desktop/package.json +++ b/desktop/package.json @@ -1,6 +1,6 @@ { "name": "ente", - "version": "1.6.63", + "version": "1.7.0-beta.0", "private": true, "description": "Desktop client for Ente Photos", "author": "Ente ", @@ -15,8 +15,11 @@ "dev-main": "tsc && electron app/main.js", "dev-renderer": "cd ../web && yarn install && yarn dev:photos", "postinstall": "electron-builder install-app-deps", - "lint": "yarn prettier --check . && eslint --ext .ts src", - "lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src" + "lint": "yarn prettier --check --log-level warn . && eslint --ext .ts src && yarn tsc", + "lint-fix": "yarn prettier --write --log-level warn . && eslint --fix --ext .ts src && yarn tsc" + }, + "resolutions": { + "jackspeak": "2.1.1" }, "dependencies": { "any-shell-escape": "^0.1", @@ -34,12 +37,13 @@ "onnxruntime-node": "^1.17" }, "devDependencies": { + "@tsconfig/node20": "^20.1.4", "@types/auto-launch": "^5.0", "@types/ffmpeg-static": "^3.0", "@typescript-eslint/eslint-plugin": "^7", "@typescript-eslint/parser": "^7", "concurrently": "^8", - "electron": "^29", + "electron": "^30", "electron-builder": "^24", "electron-builder-notarize": "^1.5", "eslint": "^8", diff --git a/desktop/src/main.ts b/desktop/src/main.ts index a8a8a5610..49b316206 100644 --- a/desktop/src/main.ts +++ b/desktop/src/main.ts @@ -8,18 +8,15 @@ * * https://www.electronjs.org/docs/latest/tutorial/process-model#the-main-process */ -import { nativeImage } from "electron"; -import { app, BrowserWindow, Menu, protocol, Tray } from "electron/main"; + +import { nativeImage, shell } from "electron/common"; +import type { WebContents } from "electron/main"; +import { BrowserWindow, Menu, Tray, app, protocol } from "electron/main"; import serveNextAt from "next-electron-server"; import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import os from "node:os"; import path from "node:path"; -import { - addAllowOriginHeader, - handleDownloads, - handleExternalLinks, -} from "./main/init"; import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc"; import log, { initLogging } from "./main/log"; import { createApplicationMenu, createTrayContextMenu } from "./main/menu"; @@ -29,12 +26,12 @@ import { createWatcher } from "./main/services/watch"; import { userPreferences } from "./main/stores/user-preferences"; import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch"; import { registerStreamProtocol } from "./main/stream"; -import { isDev } from "./main/utils-electron"; +import { isDev } from "./main/utils/electron"; /** * The URL where the renderer HTML is being served from. */ -export const rendererURL = "ente://app"; +const rendererURL = "ente://app"; /** * We want to hide our window instead of closing it when the user presses the @@ -130,50 +127,18 @@ const registerPrivilegedSchemes = () => { { scheme: "stream", privileges: { - // TODO(MR): Remove the commented bits if we don't end up - // needing them by the time the IPC refactoring is done. - - // Prevent the insecure origin issues when fetching this - // secure: true, - // Allow the web fetch API in the renderer to use this scheme. supportFetchAPI: true, - // Allow it to be used with video tags. - // stream: true, }, }, ]); }; -/** - * [Note: Increased disk cache for the desktop app] - * - * Set the "disk-cache-size" command line flag to ask the Chromium process to - * use a larger size for the caches that it keeps on disk. This allows us to use - * the web based caching mechanisms on both the web and the desktop app, just - * ask the embedded Chromium to be a bit more generous in disk usage when - * running as the desktop app. - * - * The size we provide is in bytes. - * https://www.electronjs.org/docs/latest/api/command-line-switches#--disk-cache-sizesize - * - * Note that increasing the disk cache size does not guarantee that Chromium - * will respect in verbatim, it uses its own heuristics atop this hint. - * https://superuser.com/questions/378991/what-is-chrome-default-cache-size-limit/1577693#1577693 - * - * See also: [Note: Caching files]. - */ -const increaseDiskCache = () => - app.commandLine.appendSwitch( - "disk-cache-size", - `${5 * 1024 * 1024 * 1024}`, // 5 GB - ); - /** * Create an return the {@link BrowserWindow} that will form our app's UI. * * This window will show the HTML served from {@link rendererURL}. */ -const createMainWindow = async () => { +const createMainWindow = () => { // Create the main window. This'll show our web content. const window = new BrowserWindow({ webPreferences: { @@ -187,7 +152,7 @@ const createMainWindow = async () => { show: false, }); - const wasAutoLaunched = await autoLauncher.wasAutoLaunched(); + const wasAutoLaunched = autoLauncher.wasAutoLaunched(); if (wasAutoLaunched) { // Don't automatically show the app's window if we were auto-launched. // On macOS, also hide the dock icon on macOS. @@ -201,7 +166,7 @@ const createMainWindow = async () => { if (isDev) window.webContents.openDevTools(); window.webContents.on("render-process-gone", (_, details) => { - log.error(`render-process-gone: ${details}`); + log.error(`render-process-gone: ${details.reason}`); window.webContents.reload(); }); @@ -209,7 +174,7 @@ const createMainWindow = async () => { // webContents is not responding to input messages for > 30 seconds." window.webContents.on("unresponsive", () => { log.error( - "Main window's webContents are unresponsive, will restart the renderer process", + "MainWindow's webContents are unresponsive, will restart the renderer process", ); window.webContents.forcefullyCrashRenderer(); }); @@ -230,7 +195,7 @@ const createMainWindow = async () => { }); window.on("show", () => { - if (process.platform == "darwin") app.dock.show(); + if (process.platform == "darwin") void app.dock.show(); }); // Let ipcRenderer know when mainWindow is in the foreground so that it can @@ -240,6 +205,58 @@ const createMainWindow = async () => { return window; }; +/** + * Automatically set the save path for user initiated downloads to the system's + * "downloads" directory instead of asking the user to select a save location. + */ +export const setDownloadPath = (webContents: WebContents) => { + webContents.session.on("will-download", (_, item) => { + item.setSavePath( + uniqueSavePath(app.getPath("downloads"), item.getFilename()), + ); + }); +}; + +const uniqueSavePath = (dirPath: string, fileName: string) => { + const { name, ext } = path.parse(fileName); + + let savePath = path.join(dirPath, fileName); + let n = 1; + while (existsSync(savePath)) { + const suffixedName = [`${name}(${n})`, ext].filter((x) => x).join("."); + savePath = path.join(dirPath, suffixedName); + n++; + } + return savePath; +}; + +/** + * Allow opening external links, e.g. when the user clicks on the "Feature + * requests" button in the sidebar (to open our GitHub repository), or when they + * click the "Support" button to send an email to support. + * + * @param webContents The renderer to configure. + */ +export const allowExternalLinks = (webContents: WebContents) => { + // By default, if the user were open a link, say + // https://github.com/ente-io/ente/discussions, then it would open a _new_ + // BrowserWindow within our app. + // + // This is not the behaviour we want; what we want is to ask the system to + // handle the link (e.g. open the URL in the default browser, or if it is a + // mailto: link, then open the user's mail client). + // + // Returning `action` "deny" accomplishes this. + webContents.setWindowOpenHandler(({ url }) => { + if (!url.startsWith(rendererURL)) { + void shell.openExternal(url); + return { action: "deny" }; + } else { + return { action: "allow" }; + } + }); +}; + /** * Add an icon for our app in the system tray. * @@ -272,24 +289,24 @@ const setupTrayItem = (mainWindow: BrowserWindow) => { * Older versions of our app used to maintain a cache dir using the main * process. This has been deprecated in favor of using a normal web cache. * - * See [Note: Increased disk cache for the desktop app] - * * Delete the old cache dir if it exists. This code was added March 2024, and * can be removed after some time once most people have upgraded to newer * versions. */ const deleteLegacyDiskCacheDirIfExists = async () => { - // The existing code was passing "cache" as a parameter to getPath. This is - // incorrect if we go by the types - "cache" is not a valid value for the - // parameter to `app.getPath`. + // The existing code was passing "cache" as a parameter to getPath. // - // It might be an issue in the types, since at runtime it seems to work. For - // example, on macOS I get `~/Library/Caches`. + // However, "cache" is not a valid parameter to getPath. It works! (for + // example, on macOS I get `~/Library/Caches`), but it is intentionally not + // documented as part of the public API: + // + // - docs: remove "cache" from app.getPath + // https://github.com/electron/electron/pull/33509 // // Irrespective, we replicate the original behaviour so that we get back the - // same path that the old got was getting. + // same path that the old code was getting. // - // @ts-expect-error + // @ts-expect-error "cache" works but is not part of the public API. const cacheDir = path.join(app.getPath("cache"), "ente"); if (existsSync(cacheDir)) { log.info(`Removing legacy disk cache from ${cacheDir}`); @@ -326,7 +343,6 @@ const main = () => { // The order of the next two calls is important setupRendererServer(); registerPrivilegedSchemes(); - increaseDiskCache(); migrateLegacyWatchStoreIfNeeded(); app.on("second-instance", () => { @@ -341,32 +357,35 @@ const main = () => { // Emitted once, when Electron has finished initializing. // // Note that some Electron APIs can only be used after this event occurs. - app.on("ready", async () => { - // Create window and prepare for renderer - mainWindow = await createMainWindow(); - attachIPCHandlers(); - attachFSWatchIPCHandlers(createWatcher(mainWindow)); - registerStreamProtocol(); - handleDownloads(mainWindow); - handleExternalLinks(mainWindow); - addAllowOriginHeader(mainWindow); + void app.whenReady().then(() => { + void (async () => { + // Create window and prepare for the renderer. + mainWindow = createMainWindow(); + attachIPCHandlers(); + attachFSWatchIPCHandlers(createWatcher(mainWindow)); + registerStreamProtocol(); - // Start loading the renderer - mainWindow.loadURL(rendererURL); + // Configure the renderer's environment. + setDownloadPath(mainWindow.webContents); + allowExternalLinks(mainWindow.webContents); - // Continue on with the rest of the startup sequence - Menu.setApplicationMenu(await createApplicationMenu(mainWindow)); - setupTrayItem(mainWindow); - if (!isDev) setupAutoUpdater(mainWindow); + // Start loading the renderer. + void mainWindow.loadURL(rendererURL); - try { - deleteLegacyDiskCacheDirIfExists(); - deleteLegacyKeysStoreIfExists(); - } catch (e) { - // Log but otherwise ignore errors during non-critical startup - // actions. - log.error("Ignoring startup error", e); - } + // Continue on with the rest of the startup sequence. + Menu.setApplicationMenu(await createApplicationMenu(mainWindow)); + setupTrayItem(mainWindow); + if (!isDev) setupAutoUpdater(mainWindow); + + try { + await deleteLegacyDiskCacheDirIfExists(); + await deleteLegacyKeysStoreIfExists(); + } catch (e) { + // Log but otherwise ignore errors during non-critical startup + // actions. + log.error("Ignoring startup error", e); + } + })(); }); // This is a macOS only event. Show our window when the user activates the diff --git a/desktop/src/main/dialogs.ts b/desktop/src/main/dialogs.ts deleted file mode 100644 index 2f91f5c40..000000000 --- a/desktop/src/main/dialogs.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { dialog } from "electron/main"; -import path from "node:path"; -import type { ElectronFile } from "../types/ipc"; -import { getDirFilePaths, getElectronFile } from "./services/fs"; -import { getElectronFilesFromGoogleZip } from "./services/upload"; - -export const selectDirectory = async () => { - const result = await dialog.showOpenDialog({ - properties: ["openDirectory"], - }); - if (result.filePaths && result.filePaths.length > 0) { - return result.filePaths[0]?.split(path.sep)?.join(path.posix.sep); - } -}; - -export const showUploadFilesDialog = async () => { - const selectedFiles = await dialog.showOpenDialog({ - properties: ["openFile", "multiSelections"], - }); - const filePaths = selectedFiles.filePaths; - return await Promise.all(filePaths.map(getElectronFile)); -}; - -export const showUploadDirsDialog = async () => { - const dir = await dialog.showOpenDialog({ - properties: ["openDirectory", "multiSelections"], - }); - - let filePaths: string[] = []; - for (const dirPath of dir.filePaths) { - filePaths = [...filePaths, ...(await getDirFilePaths(dirPath))]; - } - - return await Promise.all(filePaths.map(getElectronFile)); -}; - -export const showUploadZipDialog = async () => { - const selectedFiles = await dialog.showOpenDialog({ - properties: ["openFile", "multiSelections"], - filters: [{ name: "Zip File", extensions: ["zip"] }], - }); - const filePaths = selectedFiles.filePaths; - - let files: ElectronFile[] = []; - - for (const filePath of filePaths) { - files = [...files, ...(await getElectronFilesFromGoogleZip(filePath))]; - } - - return { - zipPaths: filePaths, - files, - }; -}; diff --git a/desktop/src/main/fs.ts b/desktop/src/main/fs.ts deleted file mode 100644 index fc181cf46..000000000 --- a/desktop/src/main/fs.ts +++ /dev/null @@ -1,31 +0,0 @@ -/** - * @file file system related functions exposed over the context bridge. - */ -import { existsSync } from "node:fs"; -import fs from "node:fs/promises"; - -export const fsExists = (path: string) => existsSync(path); - -export const fsRename = (oldPath: string, newPath: string) => - fs.rename(oldPath, newPath); - -export const fsMkdirIfNeeded = (dirPath: string) => - fs.mkdir(dirPath, { recursive: true }); - -export const fsRmdir = (path: string) => fs.rmdir(path); - -export const fsRm = (path: string) => fs.rm(path); - -export const fsReadTextFile = async (filePath: string) => - fs.readFile(filePath, "utf-8"); - -export const fsWriteFile = (path: string, contents: string) => - fs.writeFile(path, contents); - -export const fsIsDir = async (dirPath: string) => { - if (!existsSync(dirPath)) return false; - const stat = await fs.stat(dirPath); - return stat.isDirectory(); -}; - -export const fsSize = (path: string) => fs.stat(path).then((s) => s.size); diff --git a/desktop/src/main/init.ts b/desktop/src/main/init.ts deleted file mode 100644 index 1b078dc98..000000000 --- a/desktop/src/main/init.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { BrowserWindow, app, shell } from "electron"; -import { existsSync } from "node:fs"; -import path from "node:path"; -import { rendererURL } from "../main"; - -export function handleDownloads(mainWindow: BrowserWindow) { - mainWindow.webContents.session.on("will-download", (_, item) => { - item.setSavePath( - getUniqueSavePath(item.getFilename(), app.getPath("downloads")), - ); - }); -} - -function getUniqueSavePath(filename: string, directory: string): string { - let uniqueFileSavePath = path.join(directory, filename); - const { name: filenameWithoutExtension, ext: extension } = - path.parse(filename); - let n = 0; - while (existsSync(uniqueFileSavePath)) { - n++; - // filter need to remove undefined extension from the array - // else [`${fileName}`, undefined].join(".") will lead to `${fileName}.` as joined string - const fileNameWithNumberedSuffix = [ - `${filenameWithoutExtension}(${n})`, - extension, - ] - .filter((x) => x) // filters out undefined/null values - .join(""); - uniqueFileSavePath = path.join(directory, fileNameWithNumberedSuffix); - } - return uniqueFileSavePath; -} - -export function handleExternalLinks(mainWindow: BrowserWindow) { - mainWindow.webContents.setWindowOpenHandler(({ url }) => { - if (!url.startsWith(rendererURL)) { - shell.openExternal(url); - return { action: "deny" }; - } else { - return { action: "allow" }; - } - }); -} - -export function addAllowOriginHeader(mainWindow: BrowserWindow) { - mainWindow.webContents.session.webRequest.onHeadersReceived( - (details, callback) => { - details.responseHeaders = lowerCaseHeaders(details.responseHeaders); - details.responseHeaders["access-control-allow-origin"] = ["*"]; - callback({ - responseHeaders: details.responseHeaders, - }); - }, - ); -} - -function lowerCaseHeaders(responseHeaders: Record) { - const headers: Record = {}; - for (const key of Object.keys(responseHeaders)) { - headers[key.toLowerCase()] = responseHeaders[key]; - } - return headers; -} diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts index 2475d7789..f59969202 100644 --- a/desktop/src/main/ipc.ts +++ b/desktop/src/main/ipc.ts @@ -14,13 +14,21 @@ import type { CollectionMapping, FolderWatch, PendingUploads, + ZipItem, } from "../types/ipc"; +import { logToDisk } from "./log"; import { + appVersion, + skipAppUpdate, + updateAndRestart, + updateOnNextRestart, +} from "./services/app-update"; +import { + openDirectory, + openLogDirectory, selectDirectory, - showUploadDirsDialog, - showUploadFilesDialog, - showUploadZipDialog, -} from "./dialogs"; +} from "./services/dir"; +import { ffmpegExec } from "./services/ffmpeg"; import { fsExists, fsIsDir, @@ -29,18 +37,8 @@ import { fsRename, fsRm, fsRmdir, - fsSize, fsWriteFile, -} from "./fs"; -import { logToDisk } from "./log"; -import { - appVersion, - skipAppUpdate, - updateAndRestart, - updateOnNextRestart, -} from "./services/app-update"; -import { ffmpegExec } from "./services/ffmpeg"; -import { getDirFiles } from "./services/fs"; +} from "./services/fs"; import { convertToJPEG, generateImageThumbnail } from "./services/image"; import { clipImageEmbedding, @@ -53,20 +51,23 @@ import { saveEncryptionKey, } from "./services/store"; import { - getElectronFilesFromGoogleZip, + clearPendingUploads, + listZipItems, + markUploadedFiles, + markUploadedZipItems, + pathOrZipItemSize, pendingUploads, - setPendingUploadCollection, - setPendingUploadFiles, + setPendingUploads, } from "./services/upload"; import { watchAdd, watchFindFiles, watchGet, watchRemove, + watchReset, watchUpdateIgnoredFiles, watchUpdateSyncedFiles, } from "./services/watch"; -import { openDirectory, openLogDirectory } from "./utils-electron"; /** * Listen for IPC events sent/invoked by the renderer process, and route them to @@ -93,16 +94,20 @@ export const attachIPCHandlers = () => { ipcMain.handle("appVersion", () => appVersion()); - ipcMain.handle("openDirectory", (_, dirPath) => openDirectory(dirPath)); + ipcMain.handle("openDirectory", (_, dirPath: string) => + openDirectory(dirPath), + ); ipcMain.handle("openLogDirectory", () => openLogDirectory()); // See [Note: Catching exception during .send/.on] - ipcMain.on("logToDisk", (_, message) => logToDisk(message)); + ipcMain.on("logToDisk", (_, message: string) => logToDisk(message)); + + ipcMain.handle("selectDirectory", () => selectDirectory()); ipcMain.on("clearStores", () => clearStores()); - ipcMain.handle("saveEncryptionKey", (_, encryptionKey) => + ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) => saveEncryptionKey(encryptionKey), ); @@ -112,21 +117,23 @@ export const attachIPCHandlers = () => { ipcMain.on("updateAndRestart", () => updateAndRestart()); - ipcMain.on("updateOnNextRestart", (_, version) => + ipcMain.on("updateOnNextRestart", (_, version: string) => updateOnNextRestart(version), ); - ipcMain.on("skipAppUpdate", (_, version) => skipAppUpdate(version)); + ipcMain.on("skipAppUpdate", (_, version: string) => skipAppUpdate(version)); // - FS - ipcMain.handle("fsExists", (_, path) => fsExists(path)); + ipcMain.handle("fsExists", (_, path: string) => fsExists(path)); ipcMain.handle("fsRename", (_, oldPath: string, newPath: string) => fsRename(oldPath, newPath), ); - ipcMain.handle("fsMkdirIfNeeded", (_, dirPath) => fsMkdirIfNeeded(dirPath)); + ipcMain.handle("fsMkdirIfNeeded", (_, dirPath: string) => + fsMkdirIfNeeded(dirPath), + ); ipcMain.handle("fsRmdir", (_, path: string) => fsRmdir(path)); @@ -140,8 +147,6 @@ export const attachIPCHandlers = () => { ipcMain.handle("fsIsDir", (_, dirPath: string) => fsIsDir(dirPath)); - ipcMain.handle("fsSize", (_, path: string) => fsSize(path)); - // - Conversion ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) => @@ -152,10 +157,10 @@ export const attachIPCHandlers = () => { "generateImageThumbnail", ( _, - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, - ) => generateImageThumbnail(dataOrPath, maxDimension, maxSize), + ) => generateImageThumbnail(dataOrPathOrZipItem, maxDimension, maxSize), ); ipcMain.handle( @@ -163,10 +168,16 @@ export const attachIPCHandlers = () => { ( _, command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, - ) => ffmpegExec(command, dataOrPath, outputFileExtension, timeoutMS), + ) => + ffmpegExec( + command, + dataOrPathOrZipItem, + outputFileExtension, + timeoutMS, + ), ); // - ML @@ -187,37 +198,33 @@ export const attachIPCHandlers = () => { faceEmbedding(input), ); - // - File selection - - ipcMain.handle("selectDirectory", () => selectDirectory()); - - ipcMain.handle("showUploadFilesDialog", () => showUploadFilesDialog()); - - ipcMain.handle("showUploadDirsDialog", () => showUploadDirsDialog()); - - ipcMain.handle("showUploadZipDialog", () => showUploadZipDialog()); - // - Upload + ipcMain.handle("listZipItems", (_, zipPath: string) => + listZipItems(zipPath), + ); + + ipcMain.handle("pathOrZipItemSize", (_, pathOrZipItem: string | ZipItem) => + pathOrZipItemSize(pathOrZipItem), + ); + ipcMain.handle("pendingUploads", () => pendingUploads()); - ipcMain.handle("setPendingUploadCollection", (_, collectionName: string) => - setPendingUploadCollection(collectionName), + ipcMain.handle("setPendingUploads", (_, pendingUploads: PendingUploads) => + setPendingUploads(pendingUploads), ); ipcMain.handle( - "setPendingUploadFiles", - (_, type: PendingUploads["type"], filePaths: string[]) => - setPendingUploadFiles(type, filePaths), + "markUploadedFiles", + (_, paths: PendingUploads["filePaths"]) => markUploadedFiles(paths), ); - // - - - ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) => - getElectronFilesFromGoogleZip(filePath), + ipcMain.handle( + "markUploadedZipItems", + (_, items: PendingUploads["zipItems"]) => markUploadedZipItems(items), ); - ipcMain.handle("getDirFiles", (_, dirPath: string) => getDirFiles(dirPath)); + ipcMain.handle("clearPendingUploads", () => clearPendingUploads()); }; /** @@ -257,4 +264,6 @@ export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => { ipcMain.handle("watchFindFiles", (_, folderPath: string) => watchFindFiles(folderPath), ); + + ipcMain.handle("watchReset", () => watchReset(watcher)); }; diff --git a/desktop/src/main/log.ts b/desktop/src/main/log.ts index 22ebb5300..cf1404a90 100644 --- a/desktop/src/main/log.ts +++ b/desktop/src/main/log.ts @@ -1,15 +1,15 @@ import log from "electron-log"; import util from "node:util"; -import { isDev } from "./utils-electron"; +import { isDev } from "./utils/electron"; /** * Initialize logging in the main process. * * This will set our underlying logger up to log to a file named `ente.log`, * - * - on Linux at ~/.config/ente/logs/main.log - * - on macOS at ~/Library/Logs/ente/main.log - * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\main.log + * - on Linux at ~/.config/ente/logs/ente.log + * - on macOS at ~/Library/Logs/ente/ente.log + * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log * * On dev builds, it will also log to the console. */ @@ -65,7 +65,7 @@ const logError_ = (message: string) => { if (isDev) console.error(`[error] ${message}`); }; -const logInfo = (...params: any[]) => { +const logInfo = (...params: unknown[]) => { const message = params .map((p) => (typeof p == "string" ? p : util.inspect(p))) .join(" "); @@ -73,7 +73,7 @@ const logInfo = (...params: any[]) => { if (isDev) console.log(`[info] ${message}`); }; -const logDebug = (param: () => any) => { +const logDebug = (param: () => unknown) => { if (isDev) { const p = param(); console.log(`[debug] ${typeof p == "string" ? p : util.inspect(p)}`); diff --git a/desktop/src/main/menu.ts b/desktop/src/main/menu.ts index 12b1ee17d..b6fa7acfe 100644 --- a/desktop/src/main/menu.ts +++ b/desktop/src/main/menu.ts @@ -8,8 +8,9 @@ import { import { allowWindowClose } from "../main"; import { forceCheckForAppUpdates } from "./services/app-update"; import autoLauncher from "./services/auto-launcher"; +import { openLogDirectory } from "./services/dir"; import { userPreferences } from "./stores/user-preferences"; -import { isDev, openLogDirectory } from "./utils-electron"; +import { isDev } from "./utils/electron"; /** Create and return the entries in the app's main menu bar */ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { @@ -18,7 +19,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { // Whenever the menu is redrawn the current value of these variables is used // to set the checked state for the various settings checkboxes. let isAutoLaunchEnabled = await autoLauncher.isEnabled(); - let shouldHideDockIcon = userPreferences.get("hideDockIcon"); + let shouldHideDockIcon = !!userPreferences.get("hideDockIcon"); const macOSOnly = (options: MenuItemConstructorOptions[]) => process.platform == "darwin" ? options : []; @@ -29,12 +30,12 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow); const handleViewChangelog = () => - shell.openExternal( + void shell.openExternal( "https://github.com/ente-io/ente/blob/main/desktop/CHANGELOG.md", ); const toggleAutoLaunch = () => { - autoLauncher.toggleAutoLaunch(); + void autoLauncher.toggleAutoLaunch(); isAutoLaunchEnabled = !isAutoLaunchEnabled; }; @@ -45,13 +46,15 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { shouldHideDockIcon = !shouldHideDockIcon; }; - const handleHelp = () => shell.openExternal("https://help.ente.io/photos/"); + const handleHelp = () => + void shell.openExternal("https://help.ente.io/photos/"); - const handleSupport = () => shell.openExternal("mailto:support@ente.io"); + const handleSupport = () => + void shell.openExternal("mailto:support@ente.io"); - const handleBlog = () => shell.openExternal("https://ente.io/blog/"); + const handleBlog = () => void shell.openExternal("https://ente.io/blog/"); - const handleViewLogs = openLogDirectory; + const handleViewLogs = () => void openLogDirectory(); return Menu.buildFromTemplate([ { diff --git a/desktop/src/main/services/app-update.ts b/desktop/src/main/services/app-update.ts index e20d42fb7..8d66cb8c3 100644 --- a/desktop/src/main/services/app-update.ts +++ b/desktop/src/main/services/app-update.ts @@ -12,8 +12,8 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => { autoUpdater.autoDownload = false; const oneDay = 1 * 24 * 60 * 60 * 1000; - setInterval(() => checkForUpdatesAndNotify(mainWindow), oneDay); - checkForUpdatesAndNotify(mainWindow); + setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay); + void checkForUpdatesAndNotify(mainWindow); }; /** @@ -22,7 +22,7 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => { export const forceCheckForAppUpdates = (mainWindow: BrowserWindow) => { userPreferences.delete("skipAppVersion"); userPreferences.delete("muteUpdateNotificationVersion"); - checkForUpdatesAndNotify(mainWindow); + void checkForUpdatesAndNotify(mainWindow); }; const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { @@ -36,18 +36,21 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { log.debug(() => `Update check found version ${version}`); + if (!version) + throw new Error("Unexpected empty version obtained from auto-updater"); + if (compareVersions(version, app.getVersion()) <= 0) { log.debug(() => "Skipping update, already at latest version"); return; } - if (version === userPreferences.get("skipAppVersion")) { + if (version == userPreferences.get("skipAppVersion")) { log.info(`User chose to skip version ${version}`); return; } const mutedVersion = userPreferences.get("muteUpdateNotificationVersion"); - if (version === mutedVersion) { + if (version == mutedVersion) { log.info(`User has muted update notifications for version ${version}`); return; } @@ -56,7 +59,7 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { mainWindow.webContents.send("appUpdateAvailable", update); log.debug(() => "Attempting auto update"); - autoUpdater.downloadUpdate(); + await autoUpdater.downloadUpdate(); let timeoutId: ReturnType; const fiveMinutes = 5 * 60 * 1000; diff --git a/desktop/src/main/services/auto-launcher.ts b/desktop/src/main/services/auto-launcher.ts index c704f7399..4e97a0225 100644 --- a/desktop/src/main/services/auto-launcher.ts +++ b/desktop/src/main/services/auto-launcher.ts @@ -38,7 +38,7 @@ class AutoLauncher { } } - async wasAutoLaunched() { + wasAutoLaunched() { if (this.autoLaunch) { return app.commandLine.hasSwitch("hidden"); } else { diff --git a/desktop/src/main/services/dir.ts b/desktop/src/main/services/dir.ts new file mode 100644 index 000000000..d375648f6 --- /dev/null +++ b/desktop/src/main/services/dir.ts @@ -0,0 +1,51 @@ +import { shell } from "electron/common"; +import { app, dialog } from "electron/main"; +import path from "node:path"; +import { posixPath } from "../utils/electron"; + +export const selectDirectory = async () => { + const result = await dialog.showOpenDialog({ + properties: ["openDirectory"], + }); + const dirPath = result.filePaths[0]; + return dirPath ? posixPath(dirPath) : undefined; +}; + +/** + * Open the given {@link dirPath} in the system's folder viewer. + * + * For example, on macOS this'll open {@link dirPath} in Finder. + */ +export const openDirectory = async (dirPath: string) => { + // We need to use `path.normalize` because `shell.openPath; does not support + // POSIX path, it needs to be a platform specific path: + // https://github.com/electron/electron/issues/28831#issuecomment-826370589 + const res = await shell.openPath(path.normalize(dirPath)); + // `shell.openPath` resolves with a string containing the error message + // corresponding to the failure if a failure occurred, otherwise "". + if (res) throw new Error(`Failed to open directory ${dirPath}: res`); +}; + +/** + * Open the app's log directory in the system's folder viewer. + * + * @see {@link openDirectory} + */ +export const openLogDirectory = () => openDirectory(logDirectoryPath()); + +/** + * Return the path where the logs for the app are saved. + * + * [Note: Electron app paths] + * + * By default, these paths are at the following locations: + * + * - macOS: `~/Library/Application Support/ente` + * - Linux: `~/.config/ente` + * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local\ente` + * - Windows: C:\Users\\AppData\Local\ + * + * https://www.electronjs.org/docs/latest/api/app + * + */ +const logDirectoryPath = () => app.getPath("logs"); diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts index ed3542f6a..0a5c4eed2 100644 --- a/desktop/src/main/services/ffmpeg.ts +++ b/desktop/src/main/services/ffmpeg.ts @@ -1,9 +1,14 @@ import pathToFfmpeg from "ffmpeg-static"; import fs from "node:fs/promises"; +import type { ZipItem } from "../../types/ipc"; import log from "../log"; -import { withTimeout } from "../utils"; -import { execAsync } from "../utils-electron"; -import { deleteTempFile, makeTempFilePath } from "../utils-temp"; +import { ensure, withTimeout } from "../utils/common"; +import { execAsync } from "../utils/electron"; +import { + deleteTempFile, + makeFileForDataOrPathOrZipItem, + makeTempFilePath, +} from "../utils/temp"; /* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */ const ffmpegPathPlaceholder = "FFMPEG"; @@ -39,28 +44,24 @@ const outputPathPlaceholder = "OUTPUT"; */ export const ffmpegExec = async ( command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, ): Promise => { - // TODO (MR): This currently copies files for both input and output. This - // needs to be tested extremely large video files when invoked downstream of - // `convertToMP4` in the web code. + // TODO (MR): This currently copies files for both input (when + // dataOrPathOrZipItem is data) and output. This needs to be tested + // extremely large video files when invoked downstream of `convertToMP4` in + // the web code. - let inputFilePath: string; - let isInputFileTemporary: boolean; - if (dataOrPath instanceof Uint8Array) { - inputFilePath = await makeTempFilePath(); - isInputFileTemporary = true; - } else { - inputFilePath = dataOrPath; - isInputFileTemporary = false; - } + const { + path: inputFilePath, + isFileTemporary: isInputFileTemporary, + writeToTemporaryFile: writeToTemporaryInputFile, + } = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem); const outputFilePath = await makeTempFilePath(outputFileExtension); try { - if (dataOrPath instanceof Uint8Array) - await fs.writeFile(inputFilePath, dataOrPath); + await writeToTemporaryInputFile(); const cmd = substitutePlaceholders( command, @@ -109,5 +110,5 @@ const ffmpegBinaryPath = () => { // This substitution of app.asar by app.asar.unpacked is suggested by the // ffmpeg-static library author themselves: // https://github.com/eugeneware/ffmpeg-static/issues/16 - return pathToFfmpeg.replace("app.asar", "app.asar.unpacked"); + return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked"); }; diff --git a/desktop/src/main/services/fs.ts b/desktop/src/main/services/fs.ts index 30ccf146b..4570a4a33 100644 --- a/desktop/src/main/services/fs.ts +++ b/desktop/src/main/services/fs.ts @@ -1,177 +1,30 @@ -import StreamZip from "node-stream-zip"; +/** + * @file file system related functions exposed over the context bridge. + */ + import { existsSync } from "node:fs"; import fs from "node:fs/promises"; -import path from "node:path"; -import { ElectronFile } from "../../types/ipc"; -import log from "../log"; -const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024; +export const fsExists = (path: string) => existsSync(path); -export async function getDirFiles(dirPath: string) { - const files = await getDirFilePaths(dirPath); - const electronFiles = await Promise.all(files.map(getElectronFile)); - return electronFiles; -} +export const fsRename = (oldPath: string, newPath: string) => + fs.rename(oldPath, newPath); -// https://stackoverflow.com/a/63111390 -export const getDirFilePaths = async (dirPath: string) => { - if (!(await fs.stat(dirPath)).isDirectory()) { - return [dirPath]; - } +export const fsMkdirIfNeeded = (dirPath: string) => + fs.mkdir(dirPath, { recursive: true }); - let files: string[] = []; - const filePaths = await fs.readdir(dirPath); +export const fsRmdir = (path: string) => fs.rmdir(path); - for (const filePath of filePaths) { - const absolute = path.join(dirPath, filePath); - files = [...files, ...(await getDirFilePaths(absolute))]; - } +export const fsRm = (path: string) => fs.rm(path); - return files; -}; - -const getFileStream = async (filePath: string) => { - const file = await fs.open(filePath, "r"); - let offset = 0; - const readableStream = new ReadableStream({ - async pull(controller) { - try { - const buff = new Uint8Array(FILE_STREAM_CHUNK_SIZE); - const bytesRead = (await file.read( - buff, - 0, - FILE_STREAM_CHUNK_SIZE, - offset, - )) as unknown as number; - offset += bytesRead; - if (bytesRead === 0) { - controller.close(); - await file.close(); - } else { - controller.enqueue(buff.slice(0, bytesRead)); - } - } catch (e) { - await file.close(); - } - }, - async cancel() { - await file.close(); - }, - }); - return readableStream; -}; - -export async function getElectronFile(filePath: string): Promise { - const fileStats = await fs.stat(filePath); - return { - path: filePath.split(path.sep).join(path.posix.sep), - name: path.basename(filePath), - size: fileStats.size, - lastModified: fileStats.mtime.valueOf(), - stream: async () => { - if (!existsSync(filePath)) { - throw new Error("electronFile does not exist"); - } - return await getFileStream(filePath); - }, - blob: async () => { - if (!existsSync(filePath)) { - throw new Error("electronFile does not exist"); - } - const blob = await fs.readFile(filePath); - return new Blob([new Uint8Array(blob)]); - }, - arrayBuffer: async () => { - if (!existsSync(filePath)) { - throw new Error("electronFile does not exist"); - } - const blob = await fs.readFile(filePath); - return new Uint8Array(blob); - }, - }; -} - -export const getZipFileStream = async ( - zip: StreamZip.StreamZipAsync, - filePath: string, -) => { - const stream = await zip.stream(filePath); - const done = { - current: false, - }; - const inProgress = { - current: false, - }; - // eslint-disable-next-line no-unused-vars - let resolveObj: (value?: any) => void = null; - // eslint-disable-next-line no-unused-vars - let rejectObj: (reason?: any) => void = null; - stream.on("readable", () => { - try { - if (resolveObj) { - inProgress.current = true; - const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer; - if (chunk) { - resolveObj(new Uint8Array(chunk)); - resolveObj = null; - } - inProgress.current = false; - } - } catch (e) { - rejectObj(e); - } - }); - stream.on("end", () => { - try { - done.current = true; - if (resolveObj && !inProgress.current) { - resolveObj(null); - resolveObj = null; - } - } catch (e) { - rejectObj(e); - } - }); - stream.on("error", (e) => { - try { - done.current = true; - if (rejectObj) { - rejectObj(e); - rejectObj = null; - } - } catch (e) { - rejectObj(e); - } - }); - - const readStreamData = async () => { - return new Promise((resolve, reject) => { - const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer; - - if (chunk || done.current) { - resolve(chunk); - } else { - resolveObj = resolve; - rejectObj = reject; - } - }); - }; - - const readableStream = new ReadableStream({ - async pull(controller) { - try { - const data = await readStreamData(); - - if (data) { - controller.enqueue(data); - } else { - controller.close(); - } - } catch (e) { - log.error("Failed to pull from readableStream", e); - controller.close(); - } - }, - }); - return readableStream; +export const fsReadTextFile = async (filePath: string) => + fs.readFile(filePath, "utf-8"); + +export const fsWriteFile = (path: string, contents: string) => + fs.writeFile(path, contents); + +export const fsIsDir = async (dirPath: string) => { + if (!existsSync(dirPath)) return false; + const stat = await fs.stat(dirPath); + return stat.isDirectory(); }; diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts index 26b4b351e..957fe8120 100644 --- a/desktop/src/main/services/image.ts +++ b/desktop/src/main/services/image.ts @@ -1,11 +1,15 @@ /** @file Image format conversions and thumbnail generation */ import fs from "node:fs/promises"; -import path from "path"; -import { CustomErrorMessage } from "../../types/ipc"; +import path from "node:path"; +import { CustomErrorMessage, type ZipItem } from "../../types/ipc"; import log from "../log"; -import { execAsync, isDev } from "../utils-electron"; -import { deleteTempFile, makeTempFilePath } from "../utils-temp"; +import { execAsync, isDev } from "../utils/electron"; +import { + deleteTempFile, + makeFileForDataOrPathOrZipItem, + makeTempFilePath, +} from "../utils/temp"; export const convertToJPEG = async (imageData: Uint8Array) => { const inputFilePath = await makeTempFilePath(); @@ -63,19 +67,15 @@ const imageMagickPath = () => path.join(isDev ? "build" : process.resourcesPath, "image-magick"); export const generateImageThumbnail = async ( - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, ): Promise => { - let inputFilePath: string; - let isInputFileTemporary: boolean; - if (dataOrPath instanceof Uint8Array) { - inputFilePath = await makeTempFilePath(); - isInputFileTemporary = true; - } else { - inputFilePath = dataOrPath; - isInputFileTemporary = false; - } + const { + path: inputFilePath, + isFileTemporary: isInputFileTemporary, + writeToTemporaryFile: writeToTemporaryInputFile, + } = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem); const outputFilePath = await makeTempFilePath("jpeg"); @@ -89,8 +89,7 @@ export const generateImageThumbnail = async ( ); try { - if (dataOrPath instanceof Uint8Array) - await fs.writeFile(inputFilePath, dataOrPath); + await writeToTemporaryInputFile(); let thumbnail: Uint8Array; do { diff --git a/desktop/src/main/services/ml-clip.ts b/desktop/src/main/services/ml-clip.ts index a5f407f9e..e3dd99204 100644 --- a/desktop/src/main/services/ml-clip.ts +++ b/desktop/src/main/services/ml-clip.ts @@ -11,7 +11,8 @@ import * as ort from "onnxruntime-node"; import Tokenizer from "../../thirdparty/clip-bpe-ts/mod"; import log from "../log"; import { writeStream } from "../stream"; -import { deleteTempFile, makeTempFilePath } from "../utils-temp"; +import { ensure } from "../utils/common"; +import { deleteTempFile, makeTempFilePath } from "../utils/temp"; import { makeCachedInferenceSession } from "./ml"; const cachedCLIPImageSession = makeCachedInferenceSession( @@ -22,7 +23,7 @@ const cachedCLIPImageSession = makeCachedInferenceSession( export const clipImageEmbedding = async (jpegImageData: Uint8Array) => { const tempFilePath = await makeTempFilePath(); const imageStream = new Response(jpegImageData.buffer).body; - await writeStream(tempFilePath, imageStream); + await writeStream(tempFilePath, ensure(imageStream)); try { return await clipImageEmbedding_(tempFilePath); } finally { @@ -44,30 +45,30 @@ const clipImageEmbedding_ = async (jpegFilePath: string) => { `onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, ); /* Need these model specific casts to type the result */ - const imageEmbedding = results["output"].data as Float32Array; + const imageEmbedding = ensure(results.output).data as Float32Array; return normalizeEmbedding(imageEmbedding); }; -const getRGBData = async (jpegFilePath: string) => { +const getRGBData = async (jpegFilePath: string): Promise => { const jpegData = await fs.readFile(jpegFilePath); const rawImageData = jpeg.decode(jpegData, { useTArray: true, formatAsRGBA: false, }); - const nx: number = rawImageData.width; - const ny: number = rawImageData.height; - const inputImage: Uint8Array = rawImageData.data; + const nx = rawImageData.width; + const ny = rawImageData.height; + const inputImage = rawImageData.data; - const nx2: number = 224; - const ny2: number = 224; - const totalSize: number = 3 * nx2 * ny2; + const nx2 = 224; + const ny2 = 224; + const totalSize = 3 * nx2 * ny2; - const result: number[] = Array(totalSize).fill(0); - const scale: number = Math.max(nx, ny) / 224; + const result = Array(totalSize).fill(0); + const scale = Math.max(nx, ny) / 224; - const nx3: number = Math.round(nx / scale); - const ny3: number = Math.round(ny / scale); + const nx3 = Math.round(nx / scale); + const ny3 = Math.round(ny / scale); const mean: number[] = [0.48145466, 0.4578275, 0.40821073]; const std: number[] = [0.26862954, 0.26130258, 0.27577711]; @@ -76,40 +77,40 @@ const getRGBData = async (jpegFilePath: string) => { for (let x = 0; x < nx3; x++) { for (let c = 0; c < 3; c++) { // Linear interpolation - const sx: number = (x + 0.5) * scale - 0.5; - const sy: number = (y + 0.5) * scale - 0.5; + const sx = (x + 0.5) * scale - 0.5; + const sy = (y + 0.5) * scale - 0.5; - const x0: number = Math.max(0, Math.floor(sx)); - const y0: number = Math.max(0, Math.floor(sy)); + const x0 = Math.max(0, Math.floor(sx)); + const y0 = Math.max(0, Math.floor(sy)); - const x1: number = Math.min(x0 + 1, nx - 1); - const y1: number = Math.min(y0 + 1, ny - 1); + const x1 = Math.min(x0 + 1, nx - 1); + const y1 = Math.min(y0 + 1, ny - 1); - const dx: number = sx - x0; - const dy: number = sy - y0; + const dx = sx - x0; + const dy = sy - y0; - const j00: number = 3 * (y0 * nx + x0) + c; - const j01: number = 3 * (y0 * nx + x1) + c; - const j10: number = 3 * (y1 * nx + x0) + c; - const j11: number = 3 * (y1 * nx + x1) + c; + const j00 = 3 * (y0 * nx + x0) + c; + const j01 = 3 * (y0 * nx + x1) + c; + const j10 = 3 * (y1 * nx + x0) + c; + const j11 = 3 * (y1 * nx + x1) + c; - const v00: number = inputImage[j00]; - const v01: number = inputImage[j01]; - const v10: number = inputImage[j10]; - const v11: number = inputImage[j11]; + const v00 = inputImage[j00] ?? 0; + const v01 = inputImage[j01] ?? 0; + const v10 = inputImage[j10] ?? 0; + const v11 = inputImage[j11] ?? 0; - const v0: number = v00 * (1 - dx) + v01 * dx; - const v1: number = v10 * (1 - dx) + v11 * dx; + const v0 = v00 * (1 - dx) + v01 * dx; + const v1 = v10 * (1 - dx) + v11 * dx; - const v: number = v0 * (1 - dy) + v1 * dy; + const v = v0 * (1 - dy) + v1 * dy; - const v2: number = Math.min(Math.max(Math.round(v), 0), 255); + const v2 = Math.min(Math.max(Math.round(v), 0), 255); // createTensorWithDataList is dumb compared to reshape and // hence has to be given with one channel after another - const i: number = y * nx3 + x + (c % 3) * 224 * 224; + const i = y * nx3 + x + (c % 3) * 224 * 224; - result[i] = (v2 / 255 - mean[c]) / std[c]; + result[i] = (v2 / 255 - (mean[c] ?? 0)) / (std[c] ?? 1); } } } @@ -119,13 +120,12 @@ const getRGBData = async (jpegFilePath: string) => { const normalizeEmbedding = (embedding: Float32Array) => { let normalization = 0; - for (let index = 0; index < embedding.length; index++) { - normalization += embedding[index] * embedding[index]; - } + for (const v of embedding) normalization += v * v; + const sqrtNormalization = Math.sqrt(normalization); - for (let index = 0; index < embedding.length; index++) { - embedding[index] = embedding[index] / sqrtNormalization; - } + for (let index = 0; index < embedding.length; index++) + embedding[index] = ensure(embedding[index]) / sqrtNormalization; + return embedding; }; @@ -134,11 +134,9 @@ const cachedCLIPTextSession = makeCachedInferenceSession( 64173509 /* 61.2 MB */, ); -let _tokenizer: Tokenizer = null; +let _tokenizer: Tokenizer | undefined; const getTokenizer = () => { - if (!_tokenizer) { - _tokenizer = new Tokenizer(); - } + if (!_tokenizer) _tokenizer = new Tokenizer(); return _tokenizer; }; @@ -150,7 +148,7 @@ export const clipTextEmbeddingIfAvailable = async (text: string) => { // Don't wait for the download to complete if (typeof sessionOrStatus == "string") { - console.log( + log.info( "Ignoring CLIP text embedding request because model download is pending", ); return undefined; @@ -169,6 +167,6 @@ export const clipTextEmbeddingIfAvailable = async (text: string) => { () => `onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`, ); - const textEmbedding = results["output"].data as Float32Array; + const textEmbedding = ensure(results.output).data as Float32Array; return normalizeEmbedding(textEmbedding); }; diff --git a/desktop/src/main/services/ml-face.ts b/desktop/src/main/services/ml-face.ts index 2309d193c..976525255 100644 --- a/desktop/src/main/services/ml-face.ts +++ b/desktop/src/main/services/ml-face.ts @@ -8,6 +8,7 @@ */ import * as ort from "onnxruntime-node"; import log from "../log"; +import { ensure } from "../utils/common"; import { makeCachedInferenceSession } from "./ml"; const cachedFaceDetectionSession = makeCachedInferenceSession( @@ -23,7 +24,7 @@ export const detectFaces = async (input: Float32Array) => { }; const results = await session.run(feeds); log.debug(() => `onnx/yolo face detection took ${Date.now() - t} ms`); - return results["output"].data; + return ensure(results.output).data; }; const cachedFaceEmbeddingSession = makeCachedInferenceSession( @@ -46,5 +47,6 @@ export const faceEmbedding = async (input: Float32Array) => { const results = await session.run(feeds); log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`); /* Need these model specific casts to extract and type the result */ - return (results.embeddings as unknown as any)["cpuData"] as Float32Array; + return (results.embeddings as unknown as Record) + .cpuData as Float32Array; }; diff --git a/desktop/src/main/services/ml.ts b/desktop/src/main/services/ml.ts index 8292596a2..6b38bc74d 100644 --- a/desktop/src/main/services/ml.ts +++ b/desktop/src/main/services/ml.ts @@ -34,6 +34,7 @@ import { writeStream } from "../stream"; * actively trigger a download until the returned function is called. * * @param modelName The name of the model to download. + * * @param modelByteSize The size in bytes that we expect the model to have. If * the size of the downloaded model does not match the expected size, then we * will redownload it. @@ -99,13 +100,15 @@ const downloadModel = async (saveLocation: string, name: string) => { // `mkdir -p` the directory where we want to save the model. const saveDir = path.dirname(saveLocation); await fs.mkdir(saveDir, { recursive: true }); - // Download + // Download. log.info(`Downloading ML model from ${name}`); const url = `https://models.ente.io/${name}`; const res = await net.fetch(url); if (!res.ok) throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`); - // Save - await writeStream(saveLocation, res.body); + const body = res.body; + if (!body) throw new Error(`Received an null response for ${url}`); + // Save. + await writeStream(saveLocation, body); log.info(`Downloaded CLIP model ${name}`); }; @@ -114,9 +117,9 @@ const downloadModel = async (saveLocation: string, name: string) => { */ const createInferenceSession = async (modelPath: string) => { return await ort.InferenceSession.create(modelPath, { - // Restrict the number of threads to 1 + // Restrict the number of threads to 1. intraOpNumThreads: 1, - // Be more conservative with RAM usage + // Be more conservative with RAM usage. enableCpuMemArena: false, }); }; diff --git a/desktop/src/main/services/store.ts b/desktop/src/main/services/store.ts index 9ec65c8c3..20cc91ea4 100644 --- a/desktop/src/main/services/store.ts +++ b/desktop/src/main/services/store.ts @@ -9,20 +9,20 @@ import { watchStore } from "../stores/watch"; * This is useful to reset state when the user logs out. */ export const clearStores = () => { - uploadStatusStore.clear(); safeStorageStore.clear(); + uploadStatusStore.clear(); watchStore.clear(); }; -export const saveEncryptionKey = async (encryptionKey: string) => { - const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey); +export const saveEncryptionKey = (encryptionKey: string) => { + const encryptedKey = safeStorage.encryptString(encryptionKey); const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64"); safeStorageStore.set("encryptionKey", b64EncryptedKey); }; -export const encryptionKey = async (): Promise => { +export const encryptionKey = (): string | undefined => { const b64EncryptedKey = safeStorageStore.get("encryptionKey"); if (!b64EncryptedKey) return undefined; const keyBuffer = Buffer.from(b64EncryptedKey, "base64"); - return await safeStorage.decryptString(keyBuffer); + return safeStorage.decryptString(keyBuffer); }; diff --git a/desktop/src/main/services/upload.ts b/desktop/src/main/services/upload.ts index 88c2d88d1..f7d0436c0 100644 --- a/desktop/src/main/services/upload.ts +++ b/desktop/src/main/services/upload.ts @@ -1,116 +1,149 @@ import StreamZip from "node-stream-zip"; +import fs from "node:fs/promises"; +import path from "node:path"; import { existsSync } from "original-fs"; -import path from "path"; -import { ElectronFile, type PendingUploads } from "../../types/ipc"; -import { - uploadStatusStore, - type UploadStatusStore, -} from "../stores/upload-status"; -import { getElectronFile, getZipFileStream } from "./fs"; +import type { PendingUploads, ZipItem } from "../../types/ipc"; +import { uploadStatusStore } from "../stores/upload-status"; -export const pendingUploads = async () => { - const collectionName = uploadStatusStore.get("collectionName"); - const filePaths = validSavedPaths("files"); - const zipPaths = validSavedPaths("zips"); - - let files: ElectronFile[] = []; - let type: PendingUploads["type"]; - - if (zipPaths.length) { - type = "zips"; - for (const zipPath of zipPaths) { - files = [ - ...files, - ...(await getElectronFilesFromGoogleZip(zipPath)), - ]; - } - const pendingFilePaths = new Set(filePaths); - files = files.filter((file) => pendingFilePaths.has(file.path)); - } else if (filePaths.length) { - type = "files"; - files = await Promise.all(filePaths.map(getElectronFile)); - } - - return { - files, - collectionName, - type, - }; -}; - -export const validSavedPaths = (type: PendingUploads["type"]) => { - const key = storeKey(type); - const savedPaths = (uploadStatusStore.get(key) as string[]) ?? []; - const paths = savedPaths.filter((p) => existsSync(p)); - uploadStatusStore.set(key, paths); - return paths; -}; - -export const setPendingUploadCollection = (collectionName: string) => { - if (collectionName) uploadStatusStore.set("collectionName", collectionName); - else uploadStatusStore.delete("collectionName"); -}; - -export const setPendingUploadFiles = ( - type: PendingUploads["type"], - filePaths: string[], -) => { - const key = storeKey(type); - if (filePaths) uploadStatusStore.set(key, filePaths); - else uploadStatusStore.delete(key); -}; - -const storeKey = (type: PendingUploads["type"]): keyof UploadStatusStore => { - switch (type) { - case "zips": - return "zipPaths"; - case "files": - return "filePaths"; - } -}; - -export const getElectronFilesFromGoogleZip = async (filePath: string) => { - const zip = new StreamZip.async({ - file: filePath, - }); - const zipName = path.basename(filePath, ".zip"); +export const listZipItems = async (zipPath: string): Promise => { + const zip = new StreamZip.async({ file: zipPath }); const entries = await zip.entries(); - const files: ElectronFile[] = []; + const entryNames: string[] = []; for (const entry of Object.values(entries)) { const basename = path.basename(entry.name); - if (entry.isFile && basename.length > 0 && basename[0] !== ".") { - files.push(await getZipEntryAsElectronFile(zipName, zip, entry)); + // Ignore "hidden" files (files whose names begins with a dot). + if (entry.isFile && !basename.startsWith(".")) { + // `entry.name` is the path within the zip. + entryNames.push(entry.name); } } - return files; + await zip.close(); + + return entryNames.map((entryName) => [zipPath, entryName]); }; -export async function getZipEntryAsElectronFile( - zipName: string, - zip: StreamZip.StreamZipAsync, - entry: StreamZip.ZipEntry, -): Promise { +export const pathOrZipItemSize = async ( + pathOrZipItem: string | ZipItem, +): Promise => { + if (typeof pathOrZipItem == "string") { + const stat = await fs.stat(pathOrZipItem); + return stat.size; + } else { + const [zipPath, entryName] = pathOrZipItem; + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + if (!entry) + throw new Error( + `An entry with name ${entryName} does not exist in the zip file at ${zipPath}`, + ); + const size = entry.size; + await zip.close(); + return size; + } +}; + +export const pendingUploads = async (): Promise => { + const collectionName = uploadStatusStore.get("collectionName") ?? undefined; + + const allFilePaths = uploadStatusStore.get("filePaths") ?? []; + const filePaths = allFilePaths.filter((f) => existsSync(f)); + + const allZipItems = uploadStatusStore.get("zipItems"); + let zipItems: typeof allZipItems; + + // Migration code - May 2024. Remove after a bit. + // + // The older store formats will not have zipItems and instead will have + // zipPaths. If we find such a case, read the zipPaths and enqueue all of + // their files as zipItems in the result. + // + // This potentially can be cause us to try reuploading an already uploaded + // file, but the dedup logic will kick in at that point so no harm will come + // of it. + if (allZipItems === undefined) { + const allZipPaths = uploadStatusStore.get("filePaths") ?? []; + const zipPaths = allZipPaths.filter((f) => existsSync(f)); + zipItems = []; + for (const zip of zipPaths) + zipItems = zipItems.concat(await listZipItems(zip)); + } else { + zipItems = allZipItems.filter(([z]) => existsSync(z)); + } + + if (filePaths.length == 0 && zipItems.length == 0) return undefined; + return { - path: path - .join(zipName, entry.name) - .split(path.sep) - .join(path.posix.sep), - name: path.basename(entry.name), - size: entry.size, - lastModified: entry.time, - stream: async () => { - return await getZipFileStream(zip, entry.name); - }, - blob: async () => { - const buffer = await zip.entryData(entry.name); - return new Blob([new Uint8Array(buffer)]); - }, - arrayBuffer: async () => { - const buffer = await zip.entryData(entry.name); - return new Uint8Array(buffer); - }, + collectionName, + filePaths, + zipItems, }; -} +}; + +/** + * [Note: Missing values in electron-store] + * + * Suppose we were to create a store like this: + * + * const store = new Store({ + * schema: { + * foo: { type: "string" }, + * bars: { type: "array", items: { type: "string" } }, + * }, + * }); + * + * If we fetch `store.get("foo")` or `store.get("bars")`, we get `undefined`. + * But if we try to set these back to `undefined`, say `store.set("foo", + * someUndefValue)`, we get asked to + * + * TypeError: Use `delete()` to clear values + * + * This happens even if we do bulk object updates, e.g. with a JS object that + * has undefined keys: + * + * > TypeError: Setting a value of type `undefined` for key `collectionName` is + * > not allowed as it's not supported by JSON + * + * So what should the TypeScript type for "foo" be? + * + * If it is were to not include the possibility of `undefined`, then the type + * would lie because `store.get("foo")` can indeed be `undefined. But if we were + * to include the possibility of `undefined`, then trying to `store.set("foo", + * someUndefValue)` will throw. + * + * The approach we take is to rely on false-y values (empty strings and empty + * arrays) to indicate missing values, and then converting those to `undefined` + * when reading from the store, and converting `undefined` to the corresponding + * false-y value when writing. + */ +export const setPendingUploads = ({ + collectionName, + filePaths, + zipItems, +}: PendingUploads) => { + uploadStatusStore.set({ + collectionName: collectionName ?? "", + filePaths: filePaths, + zipItems: zipItems, + }); +}; + +export const markUploadedFiles = (paths: string[]) => { + const existing = uploadStatusStore.get("filePaths") ?? []; + const updated = existing.filter((p) => !paths.includes(p)); + uploadStatusStore.set("filePaths", updated); +}; + +export const markUploadedZipItems = ( + items: [zipPath: string, entryName: string][], +) => { + const existing = uploadStatusStore.get("zipItems") ?? []; + const updated = existing.filter( + (z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]), + ); + uploadStatusStore.set("zipItems", updated); +}; + +export const clearPendingUploads = () => uploadStatusStore.clear(); diff --git a/desktop/src/main/services/watch.ts b/desktop/src/main/services/watch.ts index 73a13c545..de66dcca1 100644 --- a/desktop/src/main/services/watch.ts +++ b/desktop/src/main/services/watch.ts @@ -3,9 +3,10 @@ import { BrowserWindow } from "electron/main"; import fs from "node:fs/promises"; import path from "node:path"; import { FolderWatch, type CollectionMapping } from "../../types/ipc"; -import { fsIsDir } from "../fs"; import log from "../log"; import { watchStore } from "../stores/watch"; +import { posixPath } from "../utils/electron"; +import { fsIsDir } from "./fs"; /** * Create and return a new file system watcher. @@ -34,8 +35,8 @@ export const createWatcher = (mainWindow: BrowserWindow) => { return watcher; }; -const eventData = (path: string): [string, FolderWatch] => { - path = posixPath(path); +const eventData = (platformPath: string): [string, FolderWatch] => { + const path = posixPath(platformPath); const watch = folderWatches().find((watch) => path.startsWith(watch.folderPath + "/"), @@ -46,23 +47,15 @@ const eventData = (path: string): [string, FolderWatch] => { return [path, watch]; }; -/** - * Convert a file system {@link filePath} that uses the local system specific - * path separators into a path that uses POSIX file separators. - */ -const posixPath = (filePath: string) => - filePath.split(path.sep).join(path.posix.sep); - -export const watchGet = (watcher: FSWatcher) => { - const [valid, deleted] = folderWatches().reduce( - ([valid, deleted], watch) => { - (fsIsDir(watch.folderPath) ? valid : deleted).push(watch); - return [valid, deleted]; - }, - [[], []], - ); - if (deleted.length) { - for (const watch of deleted) watchRemove(watcher, watch.folderPath); +export const watchGet = async (watcher: FSWatcher): Promise => { + const valid: FolderWatch[] = []; + const deletedPaths: string[] = []; + for (const watch of folderWatches()) { + if (await fsIsDir(watch.folderPath)) valid.push(watch); + else deletedPaths.push(watch.folderPath); + } + if (deletedPaths.length) { + await Promise.all(deletedPaths.map((p) => watchRemove(watcher, p))); setFolderWatches(valid); } return valid; @@ -80,7 +73,7 @@ export const watchAdd = async ( ) => { const watches = folderWatches(); - if (!fsIsDir(folderPath)) + if (!(await fsIsDir(folderPath))) throw new Error( `Attempting to add a folder watch for a folder path ${folderPath} that is not an existing directory`, ); @@ -104,7 +97,7 @@ export const watchAdd = async ( return watches; }; -export const watchRemove = async (watcher: FSWatcher, folderPath: string) => { +export const watchRemove = (watcher: FSWatcher, folderPath: string) => { const watches = folderWatches(); const filtered = watches.filter((watch) => watch.folderPath != folderPath); if (watches.length == filtered.length) @@ -157,3 +150,7 @@ export const watchFindFiles = async (dirPath: string) => { } return paths; }; + +export const watchReset = (watcher: FSWatcher) => { + watcher.unwatch(folderWatches().map((watch) => watch.folderPath)); +}; diff --git a/desktop/src/main/stores/safe-storage.ts b/desktop/src/main/stores/safe-storage.ts index 1e1369db8..040af1f3e 100644 --- a/desktop/src/main/stores/safe-storage.ts +++ b/desktop/src/main/stores/safe-storage.ts @@ -1,7 +1,7 @@ import Store, { Schema } from "electron-store"; interface SafeStorageStore { - encryptionKey: string; + encryptionKey?: string; } const safeStorageSchema: Schema = { diff --git a/desktop/src/main/stores/upload-status.ts b/desktop/src/main/stores/upload-status.ts index 25af7a49e..8cb2410df 100644 --- a/desktop/src/main/stores/upload-status.ts +++ b/desktop/src/main/stores/upload-status.ts @@ -1,27 +1,51 @@ import Store, { Schema } from "electron-store"; export interface UploadStatusStore { - filePaths: string[]; - zipPaths: string[]; - collectionName: string; + /** + * The collection to which we're uploading, or the root collection. + * + * Not all pending uploads will have an associated collection. + */ + collectionName?: string; + /** + * Paths to regular files that are pending upload. + */ + filePaths?: string[]; + /** + * Each item is the path to a zip file and the name of an entry within it. + */ + zipItems?: [zipPath: string, entryName: string][]; + /** + * @deprecated Legacy paths to zip files, now subsumed into zipItems. + */ + zipPaths?: string[]; } const uploadStatusSchema: Schema = { + collectionName: { + type: "string", + }, filePaths: { type: "array", items: { type: "string", }, }, + zipItems: { + type: "array", + items: { + type: "array", + items: { + type: "string", + }, + }, + }, zipPaths: { type: "array", items: { type: "string", }, }, - collectionName: { - type: "string", - }, }; export const uploadStatusStore = new Store({ diff --git a/desktop/src/main/stores/user-preferences.ts b/desktop/src/main/stores/user-preferences.ts index b4a02bc5b..f3b192989 100644 --- a/desktop/src/main/stores/user-preferences.ts +++ b/desktop/src/main/stores/user-preferences.ts @@ -1,7 +1,7 @@ import Store, { Schema } from "electron-store"; interface UserPreferences { - hideDockIcon: boolean; + hideDockIcon?: boolean; skipAppVersion?: string; muteUpdateNotificationVersion?: string; } diff --git a/desktop/src/main/stores/watch.ts b/desktop/src/main/stores/watch.ts index 7ee383038..59032c9ac 100644 --- a/desktop/src/main/stores/watch.ts +++ b/desktop/src/main/stores/watch.ts @@ -3,7 +3,7 @@ import { type FolderWatch } from "../../types/ipc"; import log from "../log"; interface WatchStore { - mappings: FolderWatchWithLegacyFields[]; + mappings?: FolderWatchWithLegacyFields[]; } type FolderWatchWithLegacyFields = FolderWatch & { @@ -54,8 +54,12 @@ export const watchStore = new Store({ */ export const migrateLegacyWatchStoreIfNeeded = () => { let needsUpdate = false; - const watches = watchStore.get("mappings")?.map((watch) => { + const updatedWatches = []; + for (const watch of watchStore.get("mappings") ?? []) { let collectionMapping = watch.collectionMapping; + // The required type defines the latest schema, but before migration + // this'll be undefined, so tell ESLint to calm down. + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition if (!collectionMapping) { collectionMapping = watch.uploadStrategy == 1 ? "parent" : "root"; needsUpdate = true; @@ -64,10 +68,10 @@ export const migrateLegacyWatchStoreIfNeeded = () => { delete watch.rootFolderName; needsUpdate = true; } - return { ...watch, collectionMapping }; - }); + updatedWatches.push({ ...watch, collectionMapping }); + } if (needsUpdate) { - watchStore.set("mappings", watches); + watchStore.set("mappings", updatedWatches); log.info("Migrated legacy watch store data to new schema"); } }; diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts index 21104028f..bae13aa12 100644 --- a/desktop/src/main/stream.ts +++ b/desktop/src/main/stream.ts @@ -2,11 +2,14 @@ * @file stream data to-from renderer using a custom protocol handler. */ import { net, protocol } from "electron/main"; +import StreamZip from "node-stream-zip"; import { createWriteStream, existsSync } from "node:fs"; import fs from "node:fs/promises"; import { Readable } from "node:stream"; +import { ReadableStream } from "node:stream/web"; import { pathToFileURL } from "node:url"; import log from "./log"; +import { ensure } from "./utils/common"; /** * Register a protocol handler that we use for streaming large files between the @@ -34,19 +37,18 @@ export const registerStreamProtocol = () => { protocol.handle("stream", async (request: Request) => { const url = request.url; // The request URL contains the command to run as the host, and the - // pathname of the file as the path. For example, - // - // stream://write/path/to/file - // host-pathname----- - // - const { host, pathname } = new URL(url); - // Convert e.g. "%20" to spaces. - const path = decodeURIComponent(pathname); + // pathname of the file(s) as the search params. + const { host, searchParams } = new URL(url); switch (host) { case "read": - return handleRead(path); + return handleRead(ensure(searchParams.get("path"))); + case "read-zip": + return handleReadZip( + ensure(searchParams.get("zipPath")), + ensure(searchParams.get("entryName")), + ); case "write": - return handleWrite(path, request); + return handleWrite(ensure(searchParams.get("path")), request); default: return new Response("", { status: 404 }); } @@ -57,10 +59,17 @@ const handleRead = async (path: string) => { try { const res = await net.fetch(pathToFileURL(path).toString()); if (res.ok) { - // net.fetch defaults to text/plain, which might be fine - // in practice, but as an extra precaution indicate that - // this is binary data. - res.headers.set("Content-Type", "application/octet-stream"); + // net.fetch already seems to add "Content-Type" and "Last-Modified" + // headers, but I couldn't find documentation for this. In any case, + // since we already are stat-ting the file for the "Content-Length", + // we explicitly add the "X-Last-Modified-Ms" too, + // + // 1. Guaranteeing its presence, + // + // 2. Having it be in the exact format we want (no string <-> date + // conversions), + // + // 3. Retaining milliseconds. const stat = await fs.stat(path); @@ -75,7 +84,54 @@ const handleRead = async (path: string) => { return res; } catch (e) { log.error(`Failed to read stream at ${path}`, e); - return new Response(`Failed to read stream: ${e.message}`, { + return new Response(`Failed to read stream: ${String(e)}`, { + status: 500, + }); + } +}; + +const handleReadZip = async (zipPath: string, entryName: string) => { + try { + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + if (!entry) return new Response("", { status: 404 }); + + // This returns an "old style" NodeJS.ReadableStream. + const stream = await zip.stream(entry); + // Convert it into a new style NodeJS.Readable. + const nodeReadable = new Readable().wrap(stream); + // Then convert it into a Web stream. + const webReadableStreamAny = Readable.toWeb(nodeReadable); + // However, we get a ReadableStream now. This doesn't go into the + // `BodyInit` expected by the Response constructor, which wants a + // ReadableStream. Force a cast. + const webReadableStream = + webReadableStreamAny as ReadableStream; + + // Close the zip handle when the underlying stream closes. + stream.on("end", () => void zip.close()); + + return new Response(webReadableStream, { + headers: { + // We don't know the exact type, but it doesn't really matter, + // just set it to a generic binary content-type so that the + // browser doesn't tinker with it thinking of it as text. + "Content-Type": "application/octet-stream", + "Content-Length": `${entry.size}`, + // While it is documented that entry.time is the modification + // time, the units are not mentioned. By seeing the source code, + // we can verify that it is indeed epoch milliseconds. See + // `parseZipTime` in the node-stream-zip source, + // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js + "X-Last-Modified-Ms": `${entry.time}`, + }, + }); + } catch (e) { + log.error( + `Failed to read entry ${entryName} from zip file at ${zipPath}`, + e, + ); + return new Response(`Failed to read stream: ${String(e)}`, { status: 500, }); } @@ -83,11 +139,11 @@ const handleRead = async (path: string) => { const handleWrite = async (path: string, request: Request) => { try { - await writeStream(path, request.body); + await writeStream(path, ensure(request.body)); return new Response("", { status: 200 }); } catch (e) { log.error(`Failed to write stream to ${path}`, e); - return new Response(`Failed to write stream: ${e.message}`, { + return new Response(`Failed to write stream: ${String(e)}`, { status: 500, }); } @@ -99,59 +155,29 @@ const handleWrite = async (path: string, request: Request) => { * The returned promise resolves when the write completes. * * @param filePath The local filesystem path where the file should be written. - * @param readableStream A [web - * ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) + * + * @param readableStream A web + * [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream). */ export const writeStream = (filePath: string, readableStream: ReadableStream) => - writeNodeStream(filePath, convertWebReadableStreamToNode(readableStream)); + writeNodeStream(filePath, Readable.fromWeb(readableStream)); -/** - * Convert a Web ReadableStream into a Node.js ReadableStream - * - * This can be used to, for example, write a ReadableStream obtained via - * `net.fetch` into a file using the Node.js `fs` APIs - */ -const convertWebReadableStreamToNode = (readableStream: ReadableStream) => { - const reader = readableStream.getReader(); - const rs = new Readable(); - - rs._read = async () => { - try { - const result = await reader.read(); - - if (!result.done) { - rs.push(Buffer.from(result.value)); - } else { - rs.push(null); - return; - } - } catch (e) { - rs.emit("error", e); - } - }; - - return rs; -}; - -const writeNodeStream = async ( - filePath: string, - fileStream: NodeJS.ReadableStream, -) => { +const writeNodeStream = async (filePath: string, fileStream: Readable) => { const writeable = createWriteStream(filePath); - fileStream.on("error", (error) => { - writeable.destroy(error); // Close the writable stream with an error + fileStream.on("error", (err) => { + writeable.destroy(err); // Close the writable stream with an error }); fileStream.pipe(writeable); await new Promise((resolve, reject) => { writeable.on("finish", resolve); - writeable.on("error", async (e: unknown) => { + writeable.on("error", (err) => { if (existsSync(filePath)) { - await fs.unlink(filePath); + void fs.unlink(filePath); } - reject(e); + reject(err); }); }); }; diff --git a/desktop/src/main/utils-temp.ts b/desktop/src/main/utils-temp.ts deleted file mode 100644 index a52daf619..000000000 --- a/desktop/src/main/utils-temp.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { app } from "electron/main"; -import { existsSync } from "node:fs"; -import fs from "node:fs/promises"; -import path from "path"; - -/** - * Our very own directory within the system temp directory. Go crazy, but - * remember to clean up, especially in exception handlers. - */ -const enteTempDirPath = async () => { - const result = path.join(app.getPath("temp"), "ente"); - await fs.mkdir(result, { recursive: true }); - return result; -}; - -/** Generate a random string suitable for being used as a file name prefix */ -const randomPrefix = () => { - const alphabet = - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - - let result = ""; - for (let i = 0; i < 10; i++) - result += alphabet[Math.floor(Math.random() * alphabet.length)]; - return result; -}; - -/** - * Return the path to a temporary file with the given {@link suffix}. - * - * The function returns the path to a file in the system temp directory (in an - * Ente specific folder therin) with a random prefix and an (optional) - * {@link extension}. - * - * It ensures that there is no existing item with the same name already. - * - * Use {@link deleteTempFile} to remove this file when you're done. - */ -export const makeTempFilePath = async (extension?: string) => { - const tempDir = await enteTempDirPath(); - const suffix = extension ? "." + extension : ""; - let result: string; - do { - result = path.join(tempDir, randomPrefix() + suffix); - } while (existsSync(result)); - return result; -}; - -/** - * Delete a temporary file at the given path if it exists. - * - * This is the same as a vanilla {@link fs.rm}, except it first checks that the - * given path is within the Ente specific directory in the system temp - * directory. This acts as an additional safety check. - * - * @param tempFilePath The path to the temporary file to delete. This path - * should've been previously created using {@link makeTempFilePath}. - */ -export const deleteTempFile = async (tempFilePath: string) => { - const tempDir = await enteTempDirPath(); - if (!tempFilePath.startsWith(tempDir)) - throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`); - await fs.rm(tempFilePath, { force: true }); -}; diff --git a/desktop/src/main/utils.ts b/desktop/src/main/utils/common.ts similarity index 67% rename from desktop/src/main/utils.ts rename to desktop/src/main/utils/common.ts index 132859a43..1f5016e61 100644 --- a/desktop/src/main/utils.ts +++ b/desktop/src/main/utils/common.ts @@ -1,10 +1,19 @@ /** - * @file grab bag of utitity functions. + * @file grab bag of utility functions. * - * Many of these are verbatim copies of functions from web code since there - * isn't currently a common package that both of them share. + * These are verbatim copies of functions from web code since there isn't + * currently a common package that both of them share. */ +/** + * Throw an exception if the given value is `null` or `undefined`. + */ +export const ensure = (v: T | null | undefined): T => { + if (v === null) throw new Error("Required value was null"); + if (v === undefined) throw new Error("Required value was not found"); + return v; +}; + /** * Wait for {@link ms} milliseconds * diff --git a/desktop/src/main/utils-electron.ts b/desktop/src/main/utils/electron.ts similarity index 51% rename from desktop/src/main/utils-electron.ts rename to desktop/src/main/utils/electron.ts index b997d738e..93e8565ef 100644 --- a/desktop/src/main/utils-electron.ts +++ b/desktop/src/main/utils/electron.ts @@ -1,14 +1,35 @@ import shellescape from "any-shell-escape"; -import { shell } from "electron"; /* TODO(MR): Why is this not in /main? */ import { app } from "electron/main"; import { exec } from "node:child_process"; import path from "node:path"; import { promisify } from "node:util"; -import log from "./log"; +import log from "../log"; /** `true` if the app is running in development mode. */ export const isDev = !app.isPackaged; +/** + * Convert a file system {@link platformPath} that uses the local system + * specific path separators into a path that uses POSIX file separators. + * + * For all paths that we persist or pass over the IPC boundary, we always use + * POSIX paths, even on Windows. + * + * Windows recognizes both forward and backslashes. This also works with drive + * names. c:\foo\bar and c:/foo/bar are both valid. + * + * > Almost all paths passed to Windows APIs are normalized. During + * > normalization, Windows performs the following steps: ... All forward + * > slashes (/) are converted into the standard Windows separator, the back + * > slash (\). + * > + * > https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats + */ +export const posixPath = (platformPath: string) => + path.sep == path.posix.sep + ? platformPath + : platformPath.split(path.sep).join(path.posix.sep); + /** * Run a shell command asynchronously. * @@ -33,49 +54,11 @@ export const execAsync = (command: string | string[]) => { ? shellescape(command) : command; const startTime = Date.now(); - log.debug(() => `Running shell command: ${escapedCommand}`); const result = execAsync_(escapedCommand); log.debug( - () => - `Completed in ${Math.round(Date.now() - startTime)} ms (${escapedCommand})`, + () => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`, ); return result; }; const execAsync_ = promisify(exec); - -/** - * Open the given {@link dirPath} in the system's folder viewer. - * - * For example, on macOS this'll open {@link dirPath} in Finder. - */ -export const openDirectory = async (dirPath: string) => { - const res = await shell.openPath(path.normalize(dirPath)); - // shell.openPath resolves with a string containing the error message - // corresponding to the failure if a failure occurred, otherwise "". - if (res) throw new Error(`Failed to open directory ${dirPath}: res`); -}; - -/** - * Open the app's log directory in the system's folder viewer. - * - * @see {@link openDirectory} - */ -export const openLogDirectory = () => openDirectory(logDirectoryPath()); - -/** - * Return the path where the logs for the app are saved. - * - * [Note: Electron app paths] - * - * By default, these paths are at the following locations: - * - * - macOS: `~/Library/Application Support/ente` - * - Linux: `~/.config/ente` - * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local\ente` - * - Windows: C:\Users\\AppData\Local\ - * - * https://www.electronjs.org/docs/latest/api/app - * - */ -const logDirectoryPath = () => app.getPath("logs"); diff --git a/desktop/src/main/utils/temp.ts b/desktop/src/main/utils/temp.ts new file mode 100644 index 000000000..11f7a5d84 --- /dev/null +++ b/desktop/src/main/utils/temp.ts @@ -0,0 +1,125 @@ +import { app } from "electron/main"; +import StreamZip from "node-stream-zip"; +import { existsSync } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import type { ZipItem } from "../../types/ipc"; +import { ensure } from "./common"; + +/** + * Our very own directory within the system temp directory. Go crazy, but + * remember to clean up, especially in exception handlers. + */ +const enteTempDirPath = async () => { + const result = path.join(app.getPath("temp"), "ente"); + await fs.mkdir(result, { recursive: true }); + return result; +}; + +/** Generate a random string suitable for being used as a file name prefix */ +const randomPrefix = () => { + const ch = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + const randomChar = () => ensure(ch[Math.floor(Math.random() * ch.length)]); + + return Array(10).fill("").map(randomChar).join(""); +}; + +/** + * Return the path to a temporary file with the given {@link suffix}. + * + * The function returns the path to a file in the system temp directory (in an + * Ente specific folder therin) with a random prefix and an (optional) + * {@link extension}. + * + * It ensures that there is no existing item with the same name already. + * + * Use {@link deleteTempFile} to remove this file when you're done. + */ +export const makeTempFilePath = async (extension?: string) => { + const tempDir = await enteTempDirPath(); + const suffix = extension ? "." + extension : ""; + let result: string; + do { + result = path.join(tempDir, randomPrefix() + suffix); + } while (existsSync(result)); + return result; +}; + +/** + * Delete a temporary file at the given path if it exists. + * + * This is the same as a vanilla {@link fs.rm}, except it first checks that the + * given path is within the Ente specific directory in the system temp + * directory. This acts as an additional safety check. + * + * @param tempFilePath The path to the temporary file to delete. This path + * should've been previously created using {@link makeTempFilePath}. + */ +export const deleteTempFile = async (tempFilePath: string) => { + const tempDir = await enteTempDirPath(); + if (!tempFilePath.startsWith(tempDir)) + throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`); + await fs.rm(tempFilePath, { force: true }); +}; + +/** The result of {@link makeFileForDataOrPathOrZipItem}. */ +interface FileForDataOrPathOrZipItem { + /** + * The path to the file (possibly temporary). + */ + path: string; + /** + * `true` if {@link path} points to a temporary file which should be deleted + * once we are done processing. + */ + isFileTemporary: boolean; + /** + * A function that can be called to actually write the contents of the + * source `Uint8Array | string | ZipItem` into the file at {@link path}. + * + * It will do nothing in the case when the source is already a path. In the + * other two cases this function will write the data or zip item into the + * file at {@link path}. + */ + writeToTemporaryFile: () => Promise; +} + +/** + * Return the path to a file, a boolean indicating if this is a temporary path + * that needs to be deleted after processing, and a function to write the given + * {@link dataOrPathOrZipItem} into that temporary file if needed. + * + * @param dataOrPathOrZipItem The contents of the file, or the path to an + * existing file, or a (path to a zip file, name of an entry within that zip + * file) tuple. + */ +export const makeFileForDataOrPathOrZipItem = async ( + dataOrPathOrZipItem: Uint8Array | string | ZipItem, +): Promise => { + let path: string; + let isFileTemporary: boolean; + let writeToTemporaryFile = async () => { + /* no-op */ + }; + + if (typeof dataOrPathOrZipItem == "string") { + path = dataOrPathOrZipItem; + isFileTemporary = false; + } else { + path = await makeTempFilePath(); + isFileTemporary = true; + if (dataOrPathOrZipItem instanceof Uint8Array) { + writeToTemporaryFile = () => + fs.writeFile(path, dataOrPathOrZipItem); + } else { + writeToTemporaryFile = async () => { + const [zipPath, entryName] = dataOrPathOrZipItem; + const zip = new StreamZip.async({ file: zipPath }); + await zip.extract(entryName, path); + await zip.close(); + }; + } + } + + return { path, isFileTemporary, writeToTemporaryFile }; +}; diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts index 68308aea9..407e541ff 100644 --- a/desktop/src/preload.ts +++ b/desktop/src/preload.ts @@ -37,37 +37,37 @@ * - [main] desktop/src/main/ipc.ts contains impl */ -import { contextBridge, ipcRenderer } from "electron/renderer"; +import { contextBridge, ipcRenderer, webUtils } from "electron/renderer"; // While we can't import other code, we can import types since they're just // needed when compiling and will not be needed or looked around for at runtime. import type { AppUpdate, CollectionMapping, - ElectronFile, FolderWatch, PendingUploads, + ZipItem, } from "./types/ipc"; // - General -const appVersion = (): Promise => ipcRenderer.invoke("appVersion"); +const appVersion = () => ipcRenderer.invoke("appVersion"); const logToDisk = (message: string): void => ipcRenderer.send("logToDisk", message); -const openDirectory = (dirPath: string): Promise => +const openDirectory = (dirPath: string) => ipcRenderer.invoke("openDirectory", dirPath); -const openLogDirectory = (): Promise => - ipcRenderer.invoke("openLogDirectory"); +const openLogDirectory = () => ipcRenderer.invoke("openLogDirectory"); + +const selectDirectory = () => ipcRenderer.invoke("selectDirectory"); const clearStores = () => ipcRenderer.send("clearStores"); -const encryptionKey = (): Promise => - ipcRenderer.invoke("encryptionKey"); +const encryptionKey = () => ipcRenderer.invoke("encryptionKey"); -const saveEncryptionKey = (encryptionKey: string): Promise => +const saveEncryptionKey = (encryptionKey: string) => ipcRenderer.invoke("saveEncryptionKey", encryptionKey); const onMainWindowFocus = (cb?: () => void) => { @@ -99,121 +99,90 @@ const skipAppUpdate = (version: string) => { // - FS -const fsExists = (path: string): Promise => - ipcRenderer.invoke("fsExists", path); +const fsExists = (path: string) => ipcRenderer.invoke("fsExists", path); -const fsMkdirIfNeeded = (dirPath: string): Promise => +const fsMkdirIfNeeded = (dirPath: string) => ipcRenderer.invoke("fsMkdirIfNeeded", dirPath); -const fsRename = (oldPath: string, newPath: string): Promise => +const fsRename = (oldPath: string, newPath: string) => ipcRenderer.invoke("fsRename", oldPath, newPath); -const fsRmdir = (path: string): Promise => - ipcRenderer.invoke("fsRmdir", path); +const fsRmdir = (path: string) => ipcRenderer.invoke("fsRmdir", path); -const fsRm = (path: string): Promise => ipcRenderer.invoke("fsRm", path); +const fsRm = (path: string) => ipcRenderer.invoke("fsRm", path); -const fsReadTextFile = (path: string): Promise => +const fsReadTextFile = (path: string) => ipcRenderer.invoke("fsReadTextFile", path); -const fsWriteFile = (path: string, contents: string): Promise => +const fsWriteFile = (path: string, contents: string) => ipcRenderer.invoke("fsWriteFile", path, contents); -const fsIsDir = (dirPath: string): Promise => - ipcRenderer.invoke("fsIsDir", dirPath); - -const fsSize = (path: string): Promise => - ipcRenderer.invoke("fsSize", path); +const fsIsDir = (dirPath: string) => ipcRenderer.invoke("fsIsDir", dirPath); // - Conversion -const convertToJPEG = (imageData: Uint8Array): Promise => +const convertToJPEG = (imageData: Uint8Array) => ipcRenderer.invoke("convertToJPEG", imageData); const generateImageThumbnail = ( - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, -): Promise => +) => ipcRenderer.invoke( "generateImageThumbnail", - dataOrPath, + dataOrPathOrZipItem, maxDimension, maxSize, ); const ffmpegExec = ( command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, -): Promise => +) => ipcRenderer.invoke( "ffmpegExec", command, - dataOrPath, + dataOrPathOrZipItem, outputFileExtension, timeoutMS, ); // - ML -const clipImageEmbedding = (jpegImageData: Uint8Array): Promise => +const clipImageEmbedding = (jpegImageData: Uint8Array) => ipcRenderer.invoke("clipImageEmbedding", jpegImageData); -const clipTextEmbeddingIfAvailable = ( - text: string, -): Promise => +const clipTextEmbeddingIfAvailable = (text: string) => ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text); -const detectFaces = (input: Float32Array): Promise => +const detectFaces = (input: Float32Array) => ipcRenderer.invoke("detectFaces", input); -const faceEmbedding = (input: Float32Array): Promise => +const faceEmbedding = (input: Float32Array) => ipcRenderer.invoke("faceEmbedding", input); -// - File selection - -// TODO: Deprecated - use dialogs on the renderer process itself - -const selectDirectory = (): Promise => - ipcRenderer.invoke("selectDirectory"); - -const showUploadFilesDialog = (): Promise => - ipcRenderer.invoke("showUploadFilesDialog"); - -const showUploadDirsDialog = (): Promise => - ipcRenderer.invoke("showUploadDirsDialog"); - -const showUploadZipDialog = (): Promise<{ - zipPaths: string[]; - files: ElectronFile[]; -}> => ipcRenderer.invoke("showUploadZipDialog"); - // - Watch -const watchGet = (): Promise => ipcRenderer.invoke("watchGet"); +const watchGet = () => ipcRenderer.invoke("watchGet"); -const watchAdd = ( - folderPath: string, - collectionMapping: CollectionMapping, -): Promise => +const watchAdd = (folderPath: string, collectionMapping: CollectionMapping) => ipcRenderer.invoke("watchAdd", folderPath, collectionMapping); -const watchRemove = (folderPath: string): Promise => +const watchRemove = (folderPath: string) => ipcRenderer.invoke("watchRemove", folderPath); const watchUpdateSyncedFiles = ( syncedFiles: FolderWatch["syncedFiles"], folderPath: string, -): Promise => - ipcRenderer.invoke("watchUpdateSyncedFiles", syncedFiles, folderPath); +) => ipcRenderer.invoke("watchUpdateSyncedFiles", syncedFiles, folderPath); const watchUpdateIgnoredFiles = ( ignoredFiles: FolderWatch["ignoredFiles"], folderPath: string, -): Promise => - ipcRenderer.invoke("watchUpdateIgnoredFiles", ignoredFiles, folderPath); +) => ipcRenderer.invoke("watchUpdateIgnoredFiles", ignoredFiles, folderPath); const watchOnAddFile = (f: (path: string, watch: FolderWatch) => void) => { ipcRenderer.removeAllListeners("watchAddFile"); @@ -236,33 +205,56 @@ const watchOnRemoveDir = (f: (path: string, watch: FolderWatch) => void) => { ); }; -const watchFindFiles = (folderPath: string): Promise => +const watchFindFiles = (folderPath: string) => ipcRenderer.invoke("watchFindFiles", folderPath); +const watchReset = async () => { + ipcRenderer.removeAllListeners("watchAddFile"); + ipcRenderer.removeAllListeners("watchRemoveFile"); + ipcRenderer.removeAllListeners("watchRemoveDir"); + await ipcRenderer.invoke("watchReset"); +}; + // - Upload -const pendingUploads = (): Promise => - ipcRenderer.invoke("pendingUploads"); +const pathForFile = (file: File) => { + const path = webUtils.getPathForFile(file); + // The path that we get back from `webUtils.getPathForFile` on Windows uses + // "/" as the path separator. Convert them to POSIX separators. + // + // Note that we do not have access to the path or the os module in the + // preload script, thus this hand rolled transformation. -const setPendingUploadCollection = (collectionName: string): Promise => - ipcRenderer.invoke("setPendingUploadCollection", collectionName); + // However that makes TypeScript fidgety since we it cannot find navigator, + // as we haven't included "lib": ["dom"] in our tsconfig to avoid making DOM + // APIs available to our main Node.js code. We could create a separate + // tsconfig just for the preload script, but for now let's go with a cast. + // + // @ts-expect-error navigator is not defined. + const platform = (navigator as { platform: string }).platform; + return platform.toLowerCase().includes("win") + ? path.split("\\").join("/") + : path; +}; -const setPendingUploadFiles = ( - type: PendingUploads["type"], - filePaths: string[], -): Promise => - ipcRenderer.invoke("setPendingUploadFiles", type, filePaths); +const listZipItems = (zipPath: string) => + ipcRenderer.invoke("listZipItems", zipPath); -// - TODO: AUDIT below this -// - +const pathOrZipItemSize = (pathOrZipItem: string | ZipItem) => + ipcRenderer.invoke("pathOrZipItemSize", pathOrZipItem); -const getElectronFilesFromGoogleZip = ( - filePath: string, -): Promise => - ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath); +const pendingUploads = () => ipcRenderer.invoke("pendingUploads"); -const getDirFiles = (dirPath: string): Promise => - ipcRenderer.invoke("getDirFiles", dirPath); +const setPendingUploads = (pendingUploads: PendingUploads) => + ipcRenderer.invoke("setPendingUploads", pendingUploads); + +const markUploadedFiles = (paths: PendingUploads["filePaths"]) => + ipcRenderer.invoke("markUploadedFiles", paths); + +const markUploadedZipItems = (items: PendingUploads["zipItems"]) => + ipcRenderer.invoke("markUploadedZipItems", items); + +const clearPendingUploads = () => ipcRenderer.invoke("clearPendingUploads"); /** * These objects exposed here will become available to the JS code in our @@ -311,6 +303,7 @@ contextBridge.exposeInMainWorld("electron", { logToDisk, openDirectory, openLogDirectory, + selectDirectory, clearStores, encryptionKey, saveEncryptionKey, @@ -334,7 +327,6 @@ contextBridge.exposeInMainWorld("electron", { readTextFile: fsReadTextFile, writeFile: fsWriteFile, isDir: fsIsDir, - size: fsSize, }, // - Conversion @@ -350,35 +342,29 @@ contextBridge.exposeInMainWorld("electron", { detectFaces, faceEmbedding, - // - File selection - - selectDirectory, - showUploadFilesDialog, - showUploadDirsDialog, - showUploadZipDialog, - // - Watch watch: { get: watchGet, add: watchAdd, remove: watchRemove, + updateSyncedFiles: watchUpdateSyncedFiles, + updateIgnoredFiles: watchUpdateIgnoredFiles, onAddFile: watchOnAddFile, onRemoveFile: watchOnRemoveFile, onRemoveDir: watchOnRemoveDir, findFiles: watchFindFiles, - updateSyncedFiles: watchUpdateSyncedFiles, - updateIgnoredFiles: watchUpdateIgnoredFiles, + reset: watchReset, }, // - Upload + pathForFile, + listZipItems, + pathOrZipItemSize, pendingUploads, - setPendingUploadCollection, - setPendingUploadFiles, - - // - - - getElectronFilesFromGoogleZip, - getDirFiles, + setPendingUploads, + markUploadedFiles, + markUploadedZipItems, + clearPendingUploads, }); diff --git a/desktop/src/thirdparty/clip-bpe-ts/mod.ts b/desktop/src/thirdparty/clip-bpe-ts/mod.ts index 6cdf246f7..4d00eef0e 100644 --- a/desktop/src/thirdparty/clip-bpe-ts/mod.ts +++ b/desktop/src/thirdparty/clip-bpe-ts/mod.ts @@ -1,3 +1,5 @@ +/* eslint-disable */ + import * as htmlEntities from "html-entities"; import bpeVocabData from "./bpe_simple_vocab_16e6"; // import ftfy from "https://deno.land/x/ftfy_pyodide@v0.1.1/mod.js"; @@ -410,6 +412,7 @@ export default class { newWord.push(first + second); i += 2; } else { + // @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code" newWord.push(word[i]); i += 1; } @@ -434,6 +437,7 @@ export default class { .map((b) => this.byteEncoder[b.charCodeAt(0) as number]) .join(""); bpeTokens.push( + // @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code" ...this.bpe(token) .split(" ") .map((bpeToken: string) => this.encoder[bpeToken]), @@ -458,6 +462,7 @@ export default class { .join(""); text = [...text] .map((c) => this.byteDecoder[c]) + // @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code" .map((v) => String.fromCharCode(v)) .join("") .replace(/<\/w>/g, " "); diff --git a/desktop/src/types/ipc.ts b/desktop/src/types/ipc.ts index 3fa375eab..f4985bfc7 100644 --- a/desktop/src/types/ipc.ts +++ b/desktop/src/types/ipc.ts @@ -25,10 +25,12 @@ export interface FolderWatchSyncedFile { collectionID: number; } +export type ZipItem = [zipPath: string, entryName: string]; + export interface PendingUploads { - collectionName: string; - type: "files" | "zips"; - files: ElectronFile[]; + collectionName: string | undefined; + filePaths: string[]; + zipItems: ZipItem[]; } /** @@ -40,25 +42,3 @@ export interface PendingUploads { export const CustomErrorMessage = { NotAvailable: "This feature in not available on the current OS/arch", }; - -/** - * Deprecated - Use File + webUtils.getPathForFile instead - * - * Electron used to augment the standard web - * [File](https://developer.mozilla.org/en-US/docs/Web/API/File) object with an - * additional `path` property. This is now deprecated, and will be removed in a - * future release. - * https://www.electronjs.org/docs/latest/api/file-object - * - * The alternative to the `path` property is to use `webUtils.getPathForFile` - * https://www.electronjs.org/docs/latest/api/web-utils - */ -export interface ElectronFile { - name: string; - path: string; - size: number; - lastModified: number; - stream: () => Promise>; - blob: () => Promise; - arrayBuffer: () => Promise; -} diff --git a/desktop/tsconfig.json b/desktop/tsconfig.json index 700ea3fa0..7806cd93a 100644 --- a/desktop/tsconfig.json +++ b/desktop/tsconfig.json @@ -3,71 +3,34 @@ into JavaScript that'll then be loaded and run by the main (node) process of our Electron app. */ + /* + * Recommended target, lib and other settings for code running in the + * version of Node.js bundled with Electron. + * + * Currently, with Electron 30, this is Node.js 20.11.1. + * https://www.electronjs.org/blog/electron-30-0 + */ + "extends": "@tsconfig/node20/tsconfig.json", + /* TSConfig docs: https://aka.ms/tsconfig.json */ - "compilerOptions": { - /* Recommended target, lib and other settings for code running in the - version of Node.js bundled with Electron. - - Currently, with Electron 29, this is Node.js 20.9 - https://www.electronjs.org/blog/electron-29-0 - - Note that we cannot do - - "extends": "@tsconfig/node20/tsconfig.json", - - because that sets "lib": ["es2023"]. However (and I don't fully - understand what's going on here), that breaks our compilation since - tsc can then not find type definitions of things like ReadableStream. - - Adding "dom" to "lib" (e.g. `"lib": ["es2023", "dom"]`) fixes the - issue, but that doesn't sound correct - the main Electron process - isn't running in a browser context. - - It is possible that we're using some of the types incorrectly. For - now, we just omit the "lib" definition and rely on the defaults for - the "target" we've chosen. This is also what the current - electron-forge starter does: - - yarn create electron-app electron-forge-starter -- --template=webpack-typescript - - Enhancement: Can revisit this later. - - Refs: - - https://github.com/electron/electron/issues/27092 - - https://github.com/electron/electron/issues/16146 - */ - - "target": "es2022", - "module": "node16", - - /* Enable various workarounds to play better with CJS libraries */ - "esModuleInterop": true, - /* Speed things up by not type checking `node_modules` */ - "skipLibCheck": true, - /* Emit the generated JS into `app/` */ "outDir": "app", - /* Temporary overrides to get things to compile with the older config */ - "strict": false, - "noImplicitAny": true - - /* Below is the state we want */ - /* Enable these one by one */ - // "strict": true, - /* Require the `type` modifier when importing types */ - // "verbatimModuleSyntax": true + /* We want this, but it causes "ESM syntax is not allowed in a CommonJS + module when 'verbatimModuleSyntax' is enabled" currently */ + /* "verbatimModuleSyntax": true, */ + "strict": true, /* Stricter than strict */ - // "noImplicitReturns": true, - // "noUnusedParameters": true, - // "noUnusedLocals": true, - // "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, + "noUnusedParameters": true, + "noUnusedLocals": true, + "noFallthroughCasesInSwitch": true, /* e.g. makes array indexing returns undefined */ - // "noUncheckedIndexedAccess": true, - // "exactOptionalPropertyTypes": true, + "noUncheckedIndexedAccess": true, + "exactOptionalPropertyTypes": true }, /* Transpile all `.ts` files in `src/` */ "include": ["src/**/*.ts"] diff --git a/desktop/yarn.lock b/desktop/yarn.lock index a5b86f1eb..d4338312b 100644 --- a/desktop/yarn.lock +++ b/desktop/yarn.lock @@ -7,11 +7,6 @@ resolved "https://registry.yarnpkg.com/7zip-bin/-/7zip-bin-5.2.0.tgz#7a03314684dd6572b7dfa89e68ce31d60286854d" integrity sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A== -"@aashutoshrathi/word-wrap@^1.2.3": - version "1.2.6" - resolved "https://registry.yarnpkg.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf" - integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA== - "@babel/code-frame@^7.0.0": version "7.24.2" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.2.tgz#718b4b19841809a58b29b68cde80bc5e1aa6d9ae" @@ -20,25 +15,25 @@ "@babel/highlight" "^7.24.2" picocolors "^1.0.0" -"@babel/helper-validator-identifier@^7.22.20": - version "7.22.20" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" - integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== +"@babel/helper-validator-identifier@^7.24.5": + version "7.24.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz#918b1a7fa23056603506370089bd990d8720db62" + integrity sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA== "@babel/highlight@^7.24.2": - version "7.24.2" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.2.tgz#3f539503efc83d3c59080a10e6634306e0370d26" - integrity sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA== + version "7.24.5" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.5.tgz#bc0613f98e1dd0720e99b2a9ee3760194a704b6e" + integrity sha512-8lLmua6AVh/8SLJRRVD6V8p73Hir9w5mJrhE+IPpILG31KKlI9iz5zmBYKcWPS59qSfgP9RaSBQSHHE81WKuEw== dependencies: - "@babel/helper-validator-identifier" "^7.22.20" + "@babel/helper-validator-identifier" "^7.24.5" chalk "^2.4.2" js-tokens "^4.0.0" picocolors "^1.0.0" "@babel/runtime@^7.21.0": - version "7.24.0" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.0.tgz#584c450063ffda59697021430cb47101b085951e" - integrity sha512-Chk32uHMg6TnQdvw2e9IlqPpFX/6NLuK0Ys2PqLb7/gL5uFn9mXvK715FGLlOLQrcO4qIkNHkvPGktzzXexsFw== + version "7.24.5" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c" + integrity sha512-Nms86NXrsaeU9vbBJKni6gXiEXZ4CVpYVzEjDH9Sb8vmZ3UljyA1GSOJl/6LGPO8EHLuSF9H+IxNXHPX8QHJ4g== dependencies: regenerator-runtime "^0.14.0" @@ -165,21 +160,16 @@ integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== "@humanwhocodes/object-schema@^2.0.2": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz#d9fae00a2d5cb40f92cfe64b47ad749fbc38f917" - integrity sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw== + version "2.0.3" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz#4a2868d75d6d6963e423bcf90b7fd1be343409d3" + integrity sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA== -"@isaacs/cliui@^8.0.2": - version "8.0.2" - resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" - integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== +"@isaacs/fs-minipass@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz#2d59ae3ab4b38fb4270bfa23d30f8e2e86c7fe32" + integrity sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w== dependencies: - string-width "^5.1.2" - string-width-cjs "npm:string-width@^4.2.0" - strip-ansi "^7.0.1" - strip-ansi-cjs "npm:strip-ansi@^6.0.1" - wrap-ansi "^8.1.0" - wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + minipass "^7.0.4" "@malept/cross-spawn-promise@^1.1.0": version "1.1.1" @@ -246,6 +236,11 @@ resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== +"@tsconfig/node20@^20.1.4": + version "20.1.4" + resolved "https://registry.yarnpkg.com/@tsconfig/node20/-/node20-20.1.4.tgz#3457d42eddf12d3bde3976186ab0cd22b85df928" + integrity sha512-sqgsT69YFeLWf5NtJ4Xq/xAF8p4ZQHlmGW74Nu2tD4+g5fAsposc4ZfaaPixVu4y01BEiDCWLRDCvDM5JOsRxg== + "@types/auto-launch@^5.0": version "5.0.5" resolved "https://registry.yarnpkg.com/@types/auto-launch/-/auto-launch-5.0.5.tgz#439ed36aaaea501e2e2cfbddd8a20c366c34863b" @@ -352,15 +347,15 @@ "@types/node" "*" "@typescript-eslint/eslint-plugin@^7": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.6.0.tgz#1f5df5cda490a0bcb6fbdd3382e19f1241024242" - integrity sha512-gKmTNwZnblUdnTIJu3e9kmeRRzV2j1a/LUO27KNNAnIC5zjy1aSvXSRp4rVNlmAoHlQ7HzX42NbKpcSr4jF80A== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz#c78e309fe967cb4de05b85cdc876fb95f8e01b6f" + integrity sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg== dependencies: "@eslint-community/regexpp" "^4.10.0" - "@typescript-eslint/scope-manager" "7.6.0" - "@typescript-eslint/type-utils" "7.6.0" - "@typescript-eslint/utils" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/type-utils" "7.8.0" + "@typescript-eslint/utils" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" graphemer "^1.4.0" ignore "^5.3.1" @@ -369,46 +364,46 @@ ts-api-utils "^1.3.0" "@typescript-eslint/parser@^7": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.6.0.tgz#0aca5de3045d68b36e88903d15addaf13d040a95" - integrity sha512-usPMPHcwX3ZoPWnBnhhorc14NJw9J4HpSXQX4urF2TPKG0au0XhJoZyX62fmvdHONUkmyUe74Hzm1//XA+BoYg== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.8.0.tgz#1e1db30c8ab832caffee5f37e677dbcb9357ddc8" + integrity sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ== dependencies: - "@typescript-eslint/scope-manager" "7.6.0" - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/typescript-estree" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" -"@typescript-eslint/scope-manager@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.6.0.tgz#1e9972f654210bd7500b31feadb61a233f5b5e9d" - integrity sha512-ngttyfExA5PsHSx0rdFgnADMYQi+Zkeiv4/ZxGYUWd0nLs63Ha0ksmp8VMxAIC0wtCFxMos7Lt3PszJssG/E6w== +"@typescript-eslint/scope-manager@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz#bb19096d11ec6b87fb6640d921df19b813e02047" + integrity sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g== dependencies: - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" -"@typescript-eslint/type-utils@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.6.0.tgz#644f75075f379827d25fe0713e252ccd4e4a428c" - integrity sha512-NxAfqAPNLG6LTmy7uZgpK8KcuiS2NZD/HlThPXQRGwz6u7MDBWRVliEEl1Gj6U7++kVJTpehkhZzCJLMK66Scw== +"@typescript-eslint/type-utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz#9de166f182a6e4d1c5da76e94880e91831e3e26f" + integrity sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A== dependencies: - "@typescript-eslint/typescript-estree" "7.6.0" - "@typescript-eslint/utils" "7.6.0" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/utils" "7.8.0" debug "^4.3.4" ts-api-utils "^1.3.0" -"@typescript-eslint/types@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.6.0.tgz#53dba7c30c87e5f10a731054266dd905f1fbae38" - integrity sha512-h02rYQn8J+MureCvHVVzhl69/GAfQGPQZmOMjG1KfCl7o3HtMSlPaPUAPu6lLctXI5ySRGIYk94clD/AUMCUgQ== +"@typescript-eslint/types@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.8.0.tgz#1fd2577b3ad883b769546e2d1ef379f929a7091d" + integrity sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw== -"@typescript-eslint/typescript-estree@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.6.0.tgz#112a3775563799fd3f011890ac8322f80830ac17" - integrity sha512-+7Y/GP9VuYibecrCQWSKgl3GvUM5cILRttpWtnAu8GNL9j11e4tbuGZmZjJ8ejnKYyBRb2ddGQ3rEFCq3QjMJw== +"@typescript-eslint/typescript-estree@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz#b028a9226860b66e623c1ee55cc2464b95d2987c" + integrity sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg== dependencies: - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/visitor-keys" "7.6.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" @@ -416,25 +411,25 @@ semver "^7.6.0" ts-api-utils "^1.3.0" -"@typescript-eslint/utils@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.6.0.tgz#e400d782280b6f724c8a1204269d984c79202282" - integrity sha512-x54gaSsRRI+Nwz59TXpCsr6harB98qjXYzsRxGqvA5Ue3kQH+FxS7FYU81g/omn22ML2pZJkisy6Q+ElK8pBCA== +"@typescript-eslint/utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.8.0.tgz#57a79f9c0c0740ead2f622e444cfaeeb9fd047cd" + integrity sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ== dependencies: "@eslint-community/eslint-utils" "^4.4.0" "@types/json-schema" "^7.0.15" "@types/semver" "^7.5.8" - "@typescript-eslint/scope-manager" "7.6.0" - "@typescript-eslint/types" "7.6.0" - "@typescript-eslint/typescript-estree" "7.6.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" semver "^7.6.0" -"@typescript-eslint/visitor-keys@7.6.0": - version "7.6.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.6.0.tgz#d1ce13145844379021e1f9bd102c1d78946f4e76" - integrity sha512-4eLB7t+LlNUmXzfOu1VAIAdkjbu5xNSerURS9X/S5TUKWFRpXRQZbmtPqgKmYx8bj3J0irtQXSiWAOY82v+cgw== +"@typescript-eslint/visitor-keys@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz#7285aab991da8bee411a42edbd5db760d22fdd91" + integrity sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA== dependencies: - "@typescript-eslint/types" "7.6.0" + "@typescript-eslint/types" "7.8.0" eslint-visitor-keys "^3.4.3" "@ungap/structured-clone@^1.2.0": @@ -487,25 +482,20 @@ ajv@^6.10.0, ajv@^6.12.0, ajv@^6.12.4: uri-js "^4.2.2" ajv@^8.0.0, ajv@^8.6.3: - version "8.12.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.12.0.tgz#d1a0527323e22f53562c567c00991577dfbe19d1" - integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== + version "8.13.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.13.0.tgz#a3939eaec9fb80d217ddf0c3376948c023f28c91" + integrity sha512-PRA911Blj99jR5RMeTunVbNXMF6Lp4vZXnk5GQjcnUWUTsrXtekg/pnmFFI2u/I36Y/2bITGS30GZCXei6uNkA== dependencies: - fast-deep-equal "^3.1.1" + fast-deep-equal "^3.1.3" json-schema-traverse "^1.0.0" require-from-string "^2.0.2" - uri-js "^4.2.2" + uri-js "^4.4.1" ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-regex@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.0.1.tgz#3183e38fae9a65d7cb5e53945cd5897d0260a06a" - integrity sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA== - ansi-styles@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" @@ -520,11 +510,6 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" -ansi-styles@^6.1.0: - version "6.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" - integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== - any-shell-escape@^0.1: version "0.1.1" resolved "https://registry.yarnpkg.com/any-shell-escape/-/any-shell-escape-0.1.1.tgz#d55ab972244c71a9a5e1ab0879f30bf110806959" @@ -824,6 +809,11 @@ chownr@^2.0.0: resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== +chownr@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-3.0.0.tgz#9855e64ecd240a9cc4267ce8a4aa5d24a1da15e4" + integrity sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g== + chromium-pickle-js@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/chromium-pickle-js/-/chromium-pickle-js-0.2.0.tgz#04a106672c18b085ab774d983dfa3ea138f22205" @@ -1026,7 +1016,7 @@ define-data-property@^1.0.1: es-errors "^1.3.0" gopd "^1.0.1" -define-properties@^1.1.3: +define-properties@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c" integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg== @@ -1127,15 +1117,10 @@ dotenv@^9.0.2: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-9.0.2.tgz#dacc20160935a37dea6364aa1bef819fb9b6ab05" integrity sha512-I9OvvrHp4pIARv4+x9iuewrWycX6CcZtoAu1XrzPxc5UygMJXJZYmBsynku8IkrJwgypE5DGNjDPmPRhDCptUg== -eastasianwidth@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" - integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== - ejs@^3.1.8: - version "3.1.9" - resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.9.tgz#03c9e8777fe12686a9effcef22303ca3d8eeb361" - integrity sha512-rC+QVNMJWv+MtPgkt0y+0rVEIdbtxVADApW9JXrUVlzHetgcyczP/E7DJmWJ4fJCZF2cPcBk0laWO9ZHMG3DmQ== + version "3.1.10" + resolved "https://registry.yarnpkg.com/ejs/-/ejs-3.1.10.tgz#69ab8358b14e896f80cc39e62087b88500c3ac3b" + integrity sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA== dependencies: jake "^10.8.5" @@ -1214,10 +1199,10 @@ electron-updater@^6.1: semver "^7.3.8" tiny-typed-emitter "^2.1.0" -electron@^29: - version "29.3.0" - resolved "https://registry.yarnpkg.com/electron/-/electron-29.3.0.tgz#8e65cb08e9c0952c66d3196e1b5c811c43b8c5b0" - integrity sha512-ZxFKm0/v48GSoBuO3DdnMlCYXefEUKUHLMsKxyXY4nZGgzbBKpF/X8haZa2paNj23CLfsCKBOtfc2vsEQiOOsA== +electron@^30: + version "30.0.2" + resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733" + integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ== dependencies: "@electron/get" "^2.0.0" "@types/node" "^20.9.0" @@ -1228,11 +1213,6 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -emoji-regex@^9.2.2: - version "9.2.2" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== - end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" @@ -1495,17 +1475,18 @@ find-up@^5.0.0: path-exists "^4.0.0" flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + version "3.2.0" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.2.0.tgz#2c0c2d5040c99b1632771a9d105725c0115363ee" + integrity sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw== dependencies: - flatted "^3.1.0" + flatted "^3.2.9" + keyv "^4.5.3" rimraf "^3.0.2" -flatted@^3.1.0: - version "3.2.6" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.6.tgz#022e9218c637f9f3fc9c35ab9c9193f05add60b2" - integrity sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ== +flatted@^3.2.9: + version "3.3.1" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.1.tgz#21db470729a6734d4997002f439cb308987f567a" + integrity sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw== foreground-child@^3.1.0: version "3.1.1" @@ -1621,16 +1602,16 @@ glob-parent@^6.0.2: dependencies: is-glob "^4.0.3" -glob@^10.3.10: - version "10.3.10" - resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.10.tgz#0351ebb809fd187fe421ab96af83d3a70715df4b" - integrity sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g== +glob@^10.3.10, glob@^10.3.7: + version "10.3.12" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.12.tgz#3a65c363c2e9998d220338e88a5f6ac97302960b" + integrity sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg== dependencies: foreground-child "^3.1.0" - jackspeak "^2.3.5" + jackspeak "^2.3.6" minimatch "^9.0.1" - minipass "^5.0.0 || ^6.0.2 || ^7.0.0" - path-scurry "^1.10.1" + minipass "^7.0.4" + path-scurry "^1.10.2" glob@^7.0.0, glob@^7.1.3, glob@^7.1.6: version "7.2.3" @@ -1664,11 +1645,12 @@ globals@^13.19.0: type-fest "^0.20.2" globalthis@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" - integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== + version "1.0.4" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.4.tgz#7430ed3a975d97bfb59bcce41f5cabbafa651236" + integrity sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ== dependencies: - define-properties "^1.1.3" + define-properties "^1.2.1" + gopd "^1.0.1" globby@^11.1.0: version "11.1.0" @@ -1949,12 +1931,12 @@ isexe@^2.0.0: resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== -jackspeak@^2.3.5: - version "2.3.6" - resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8" - integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ== +jackspeak@2.1.1, jackspeak@^2.3.6: + version "2.1.1" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.1.1.tgz#2a42db4cfbb7e55433c28b6f75d8b796af9669cd" + integrity sha512-juf9stUEwUaILepraGOWIJTLwg48bUnBmRqd2ln2Os1sW987zeoj/hzhbvRB95oMuS2ZTpjULmdwHNX4rzZIZw== dependencies: - "@isaacs/cliui" "^8.0.2" + cliui "^8.0.1" optionalDependencies: "@pkgjs/parseargs" "^0.11.0" @@ -2049,7 +2031,7 @@ jsonfile@^6.0.1: optionalDependencies: graceful-fs "^4.1.6" -keyv@^4.0.0: +keyv@^4.0.0, keyv@^4.5.3: version "4.5.4" resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== @@ -2121,6 +2103,11 @@ lowercase-keys@^2.0.0: resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lru-cache@^10.2.0: + version "10.2.2" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.2.tgz#48206bc114c1252940c41b25b41af5b545aca878" + integrity sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ== + lru-cache@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" @@ -2128,11 +2115,6 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" -"lru-cache@^9.1.1 || ^10.0.0": - version "10.2.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.0.tgz#0bd445ca57363465900f4d1f9bd8db343a4d95c3" - integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q== - matcher@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/matcher/-/matcher-3.0.0.tgz#bd9060f4c5b70aa8041ccc6f80368760994f30ca" @@ -2204,14 +2186,7 @@ minimatch@^5.0.1, minimatch@^5.1.1: dependencies: brace-expansion "^2.0.1" -minimatch@^9.0.1: - version "9.0.3" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" - integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== - dependencies: - brace-expansion "^2.0.1" - -minimatch@^9.0.4: +minimatch@^9.0.1, minimatch@^9.0.4: version "9.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== @@ -2235,7 +2210,7 @@ minipass@^5.0.0: resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4: version "7.0.4" resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c" integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ== @@ -2248,6 +2223,14 @@ minizlib@^2.1.1: minipass "^3.0.0" yallist "^4.0.0" +minizlib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-3.0.1.tgz#46d5329d1eb3c83924eff1d3b858ca0a31581012" + integrity sha512-umcy022ILvb5/3Djuu8LWeqUa8D68JaBzlttKeMWen48SjabqS3iY5w/vzeMzMUNhLDifyhbOwKDSznB1vvrwg== + dependencies: + minipass "^7.0.4" + rimraf "^5.0.5" + mkdirp@^0.5.1: version "0.5.6" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" @@ -2260,6 +2243,11 @@ mkdirp@^1.0.3: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== +mkdirp@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" + integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + ms@2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" @@ -2324,29 +2312,30 @@ onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -onnxruntime-common@1.17.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.17.0.tgz#b2534ce021b1c1b19182bec39aaea8d547d2013e" - integrity sha512-Vq1remJbCPITjDMJ04DA7AklUTnbYUp4vbnm6iL7ukSt+7VErH0NGYfekRSTjxxurEtX7w41PFfnQlE6msjPJw== +onnxruntime-common@1.17.3: + version "1.17.3" + resolved "https://registry.yarnpkg.com/onnxruntime-common/-/onnxruntime-common-1.17.3.tgz#aadc456477873a540ee3d611ae9cd4f3de7c43e5" + integrity sha512-IkbaDelNVX8cBfHFgsNADRIq2TlXMFWW+nG55mwWvQT4i0NZb32Jf35Pf6h9yjrnK78RjcnlNYaI37w394ovMw== onnxruntime-node@^1.17: - version "1.17.0" - resolved "https://registry.yarnpkg.com/onnxruntime-node/-/onnxruntime-node-1.17.0.tgz#38af0ba527cb44c1afb639bdcb4e549edba029a1" - integrity sha512-pRxdqSP3a6wtiFVkVX1V3/gsEMwBRUA9D2oYmcN3cjF+j+ILS+SIY2L7KxdWapsG6z64i5rUn8ijFZdIvbojBg== + version "1.17.3" + resolved "https://registry.yarnpkg.com/onnxruntime-node/-/onnxruntime-node-1.17.3.tgz#53b8b7ef68bf3834bba9d7be592e4c2d718d2018" + integrity sha512-NtbN1pfApTSEjVq46LrJ396aPP2Gjhy+oYZi5Bu1leDXAEvVap/BQ8CZELiLs7z0UnXy3xjJW23HiB4P3//FIw== dependencies: - onnxruntime-common "1.17.0" + onnxruntime-common "1.17.3" + tar "^7.0.1" optionator@^0.9.3: - version "0.9.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64" - integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg== + version "0.9.4" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.4.tgz#7ea1c1a5d91d764fb282139c88fe11e182a3a734" + integrity sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g== dependencies: - "@aashutoshrathi/word-wrap" "^1.2.3" deep-is "^0.1.3" fast-levenshtein "^2.0.6" levn "^0.4.1" prelude-ls "^1.2.1" type-check "^0.4.0" + word-wrap "^1.2.5" p-cancelable@^2.0.0: version "2.1.1" @@ -2440,12 +2429,12 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-scurry@^1.10.1: - version "1.10.1" - resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.1.tgz#9ba6bf5aa8500fe9fd67df4f0d9483b2b0bfc698" - integrity sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ== +path-scurry@^1.10.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.2.tgz#8f6357eb1239d5fa1da8b9f70e9c080675458ba7" + integrity sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA== dependencies: - lru-cache "^9.1.1 || ^10.0.0" + lru-cache "^10.2.0" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" path-type@^4.0.0: @@ -2655,6 +2644,13 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" +rimraf@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.5.tgz#9be65d2d6e683447d2e9013da2bf451139a61ccf" + integrity sha512-CqDakW+hMe/Bz202FPEymy68P+G50RfMQK+Qo5YUqc9SPipvbGjCGKd0RSKEelbsfQuw3g5NZDSrlZZAJurH1A== + dependencies: + glob "^10.3.7" + roarr@^2.15.3: version "2.15.4" resolved "https://registry.yarnpkg.com/roarr/-/roarr-2.15.4.tgz#f5fe795b7b838ccfe35dc608e0282b9eba2e7afd" @@ -2880,7 +2876,7 @@ stat-mode@^1.0.0: resolved "https://registry.yarnpkg.com/stat-mode/-/stat-mode-1.0.0.tgz#68b55cb61ea639ff57136f36b216a291800d1465" integrity sha512-jH9EhtKIjuXZ2cWxmXS8ZP80XyC3iasQxMDV8jzhNJpfDb7VbQLVW4Wvsxz9QZvzV+G4YoSfBUVKDOyxLzi/sg== -"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -2889,15 +2885,6 @@ stat-mode@^1.0.0: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string-width@^5.0.1, string-width@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" - integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== - dependencies: - eastasianwidth "^0.2.0" - emoji-regex "^9.2.2" - strip-ansi "^7.0.1" - string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" @@ -2905,20 +2892,13 @@ string_decoder@^1.1.1: dependencies: safe-buffer "~5.2.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" -strip-ansi@^7.0.1: - version "7.1.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" - integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== - dependencies: - ansi-regex "^6.0.1" - strip-json-comments@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" @@ -2977,6 +2957,18 @@ tar@^6.1.12: mkdirp "^1.0.3" yallist "^4.0.0" +tar@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-7.0.1.tgz#8f6ccebcd91b69e9767a6fc4892799e8b0e606d5" + integrity sha512-IjMhdQMZFpKsHEQT3woZVxBtCQY+0wk3CVxdRkGXEgyGa0dNS/ehPvOMr2nmfC7x5Zj2N+l6yZUpmICjLGS35w== + dependencies: + "@isaacs/fs-minipass" "^4.0.0" + chownr "^3.0.0" + minipass "^5.0.0" + minizlib "^3.0.1" + mkdirp "^3.0.1" + yallist "^5.0.0" + temp-file@^3.4.0: version "3.4.0" resolved "https://registry.yarnpkg.com/temp-file/-/temp-file-3.4.0.tgz#766ea28911c683996c248ef1a20eea04d51652c7" @@ -3031,12 +3023,7 @@ ts-api-utils@^1.3.0: resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.3.0.tgz#4b490e27129f1e8e686b45cc4ab63714dc60eea1" integrity sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ== -tslib@^2.1.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - -tslib@^2.6.2: +tslib@^2.1.0, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== @@ -3079,9 +3066,9 @@ typedarray@^0.0.6: integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== typescript@^5, typescript@^5.3.3: - version "5.4.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.3.tgz#5c6fedd4c87bee01cd7a528a30145521f8e0feff" - integrity sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg== + version "5.4.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611" + integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== undici-types@~5.26.4: version "5.26.5" @@ -3103,7 +3090,7 @@ untildify@^3.0.2: resolved "https://registry.yarnpkg.com/untildify/-/untildify-3.0.3.tgz#1e7b42b140bcfd922b22e70ca1265bfe3634c7c9" integrity sha512-iSk/J8efr8uPT/Z4eSUywnqyrQU7DSdMfdqK4iWEaUVVmcP5JcnpRqmVMwcwcnmI1ATFNgC5V90u09tBynNFKA== -uri-js@^4.2.2: +uri-js@^4.2.2, uri-js@^4.4.1: version "4.4.1" resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== @@ -3149,7 +3136,12 @@ winreg@1.2.4: resolved "https://registry.yarnpkg.com/winreg/-/winreg-1.2.4.tgz#ba065629b7a925130e15779108cf540990e98d1b" integrity sha512-IHpzORub7kYlb8A43Iig3reOvlcBJGX9gZ0WycHhghHtA65X0LYnMRuJs+aH1abVnMJztQkvQNlltnbPi5aGIA== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +word-wrap@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" + integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== + +wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -3158,15 +3150,6 @@ winreg@1.2.4: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^8.1.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" - integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== - dependencies: - ansi-styles "^6.1.0" - string-width "^5.0.1" - strip-ansi "^7.0.1" - wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" @@ -3187,6 +3170,11 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== +yallist@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-5.0.0.tgz#00e2de443639ed0d78fd87de0d27469fbcffb533" + integrity sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw== + yargs-parser@^21.1.1: version "21.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" diff --git a/docs/docs/auth/migration-guides/authy/index.md b/docs/docs/auth/migration-guides/authy/index.md index 48ce3965d..1a9228547 100644 --- a/docs/docs/auth/migration-guides/authy/index.md +++ b/docs/docs/auth/migration-guides/authy/index.md @@ -18,7 +18,7 @@ A guide written by Green, an ente.io lover Migrating from Authy can be tiring, as you cannot export your 2FA codes through the app, meaning that you would have to reconfigure 2FA for all of your accounts for your new 2FA authenticator. However, easier ways exist to export your codes -out of Authy. This guide will cover two of the most used methods for mograting +out of Authy. This guide will cover two of the most used methods for migrating from Authy to Ente Authenticator. > [!CAUTION] diff --git a/docs/docs/self-hosting/guides/custom-server/index.md b/docs/docs/self-hosting/guides/custom-server/index.md index bf695af30..a5ce76cc2 100644 --- a/docs/docs/self-hosting/guides/custom-server/index.md +++ b/docs/docs/self-hosting/guides/custom-server/index.md @@ -25,10 +25,13 @@ configure the endpoint the app should be connecting to. > You can download the CLI from > [here](https://github.com/ente-io/ente/releases?q=tag%3Acli-v0) -Define a config.yaml and put it either in the same directory as CLI or path -defined in env variable `ENTE_CLI_CONFIG_PATH` +Define a config.yaml and put it either in the same directory as where you run +the CLI from ("current working directory"), or in the path defined in env +variable `ENTE_CLI_CONFIG_PATH`: ```yaml endpoint: api: "http://localhost:8080" ``` + +(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) diff --git a/mobile/ios/Podfile.lock b/mobile/ios/Podfile.lock index 88bc70bf7..731514957 100644 --- a/mobile/ios/Podfile.lock +++ b/mobile/ios/Podfile.lock @@ -108,8 +108,6 @@ PODS: - FlutterMacOS - integration_test (0.0.1): - Flutter - - isar_flutter_libs (1.0.0): - - Flutter - libwebp (1.3.2): - libwebp/demux (= 1.3.2) - libwebp/mux (= 1.3.2) @@ -246,7 +244,6 @@ DEPENDENCIES: - image_editor_common (from `.symlinks/plugins/image_editor_common/ios`) - in_app_purchase_storekit (from `.symlinks/plugins/in_app_purchase_storekit/darwin`) - integration_test (from `.symlinks/plugins/integration_test/ios`) - - isar_flutter_libs (from `.symlinks/plugins/isar_flutter_libs/ios`) - local_auth_darwin (from `.symlinks/plugins/local_auth_darwin/darwin`) - local_auth_ios (from `.symlinks/plugins/local_auth_ios/ios`) - media_extension (from `.symlinks/plugins/media_extension/ios`) @@ -341,8 +338,6 @@ EXTERNAL SOURCES: :path: ".symlinks/plugins/in_app_purchase_storekit/darwin" integration_test: :path: ".symlinks/plugins/integration_test/ios" - isar_flutter_libs: - :path: ".symlinks/plugins/isar_flutter_libs/ios" local_auth_darwin: :path: ".symlinks/plugins/local_auth_darwin/darwin" local_auth_ios: @@ -427,7 +422,6 @@ SPEC CHECKSUMS: image_editor_common: d6f6644ae4a6de80481e89fe6d0a8c49e30b4b43 in_app_purchase_storekit: 0e4b3c2e43ba1e1281f4f46dd71b0593ce529892 integration_test: 13825b8a9334a850581300559b8839134b124670 - isar_flutter_libs: b69f437aeab9c521821c3f376198c4371fa21073 libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009 local_auth_darwin: c7e464000a6a89e952235699e32b329457608d98 local_auth_ios: 5046a18c018dd973247a0564496c8898dbb5adf9 diff --git a/mobile/ios/Runner.xcodeproj/project.pbxproj b/mobile/ios/Runner.xcodeproj/project.pbxproj index 89c492629..c88f9da38 100644 --- a/mobile/ios/Runner.xcodeproj/project.pbxproj +++ b/mobile/ios/Runner.xcodeproj/project.pbxproj @@ -308,7 +308,6 @@ "${BUILT_PRODUCTS_DIR}/image_editor_common/image_editor_common.framework", "${BUILT_PRODUCTS_DIR}/in_app_purchase_storekit/in_app_purchase_storekit.framework", "${BUILT_PRODUCTS_DIR}/integration_test/integration_test.framework", - "${BUILT_PRODUCTS_DIR}/isar_flutter_libs/isar_flutter_libs.framework", "${BUILT_PRODUCTS_DIR}/libwebp/libwebp.framework", "${BUILT_PRODUCTS_DIR}/local_auth_darwin/local_auth_darwin.framework", "${BUILT_PRODUCTS_DIR}/local_auth_ios/local_auth_ios.framework", @@ -390,7 +389,6 @@ "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/image_editor_common.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/in_app_purchase_storekit.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/integration_test.framework", - "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/isar_flutter_libs.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/libwebp.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/local_auth_darwin.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/local_auth_ios.framework", diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist index 037996520..cdbc23774 100644 --- a/mobile/ios/Runner/Info.plist +++ b/mobile/ios/Runner/Info.plist @@ -105,5 +105,14 @@ UIApplicationSupportsIndirectInputEvents + NSBonjourServices + + _googlecast._tcp + F5BCEC64._googlecast._tcp + + + NSLocalNetworkUsageDescription + ${PRODUCT_NAME} uses the local network to discover Cast-enabled devices on your WiFi + network. diff --git a/mobile/lib/db/embeddings_db.dart b/mobile/lib/db/embeddings_db.dart index a339d4d0d..0eb1d3f6d 100644 --- a/mobile/lib/db/embeddings_db.dart +++ b/mobile/lib/db/embeddings_db.dart @@ -1,79 +1,167 @@ import "dart:io"; +import "dart:typed_data"; -import "package:isar/isar.dart"; +import "package:path/path.dart"; import 'package:path_provider/path_provider.dart'; import "package:photos/core/event_bus.dart"; import "package:photos/events/embedding_updated_event.dart"; import "package:photos/models/embedding.dart"; +import "package:sqlite_async/sqlite_async.dart"; class EmbeddingsDB { - late final Isar _isar; - EmbeddingsDB._privateConstructor(); static final EmbeddingsDB instance = EmbeddingsDB._privateConstructor(); + static const databaseName = "ente.embeddings.db"; + static const tableName = "embeddings"; + static const columnFileID = "file_id"; + static const columnModel = "model"; + static const columnEmbedding = "embedding"; + static const columnUpdationTime = "updation_time"; + + static Future? _dbFuture; + + Future get _database async { + _dbFuture ??= _initDatabase(); + return _dbFuture!; + } + Future init() async { final dir = await getApplicationDocumentsDirectory(); - _isar = await Isar.open( - [EmbeddingSchema], - directory: dir.path, - ); - await _clearDeprecatedStore(dir); + await _clearDeprecatedStores(dir); + } + + Future _initDatabase() async { + final Directory documentsDirectory = + await getApplicationDocumentsDirectory(); + final String path = join(documentsDirectory.path, databaseName); + final migrations = SqliteMigrations() + ..add( + SqliteMigration( + 1, + (tx) async { + await tx.execute( + 'CREATE TABLE $tableName ($columnFileID INTEGER NOT NULL, $columnModel INTEGER NOT NULL, $columnEmbedding BLOB NOT NULL, $columnUpdationTime INTEGER, UNIQUE ($columnFileID, $columnModel))', + ); + }, + ), + ); + final database = SqliteDatabase(path: path); + await migrations.migrate(database); + return database; } Future clearTable() async { - await _isar.writeTxn(() => _isar.clear()); + final db = await _database; + await db.execute('DELETE * FROM $tableName'); } Future> getAll(Model model) async { - return _isar.embeddings.filter().modelEqualTo(model).findAll(); + final db = await _database; + final results = await db.getAll('SELECT * FROM $tableName'); + return _convertToEmbeddings(results); } - Future put(Embedding embedding) { - return _isar.writeTxn(() async { - await _isar.embeddings.putByIndex(Embedding.index, embedding); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + Future put(Embedding embedding) async { + final db = await _database; + await db.execute( + 'INSERT OR REPLACE INTO $tableName ($columnFileID, $columnModel, $columnEmbedding, $columnUpdationTime) VALUES (?, ?, ?, ?)', + _getRowFromEmbedding(embedding), + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } - Future putMany(List embeddings) { - return _isar.writeTxn(() async { - await _isar.embeddings.putAllByIndex(Embedding.index, embeddings); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + Future putMany(List embeddings) async { + final db = await _database; + final inputs = embeddings.map((e) => _getRowFromEmbedding(e)).toList(); + await db.executeBatch( + 'INSERT OR REPLACE INTO $tableName ($columnFileID, $columnModel, $columnEmbedding, $columnUpdationTime) values(?, ?, ?, ?)', + inputs, + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } Future> getUnsyncedEmbeddings() async { - return await _isar.embeddings.filter().updationTimeEqualTo(null).findAll(); + final db = await _database; + final results = await db.getAll( + 'SELECT * FROM $tableName WHERE $columnUpdationTime IS NULL', + ); + return _convertToEmbeddings(results); } Future deleteEmbeddings(List fileIDs) async { - await _isar.writeTxn(() async { - final embeddings = []; - for (final fileID in fileIDs) { - embeddings.addAll( - await _isar.embeddings.filter().fileIDEqualTo(fileID).findAll(), - ); - } - await _isar.embeddings.deleteAll(embeddings.map((e) => e.id).toList()); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + final db = await _database; + await db.execute( + 'DELETE FROM $tableName WHERE $columnFileID IN (${fileIDs.join(", ")})', + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } Future deleteAllForModel(Model model) async { - await _isar.writeTxn(() async { - final embeddings = - await _isar.embeddings.filter().modelEqualTo(model).findAll(); - await _isar.embeddings.deleteAll(embeddings.map((e) => e.id).toList()); - Bus.instance.fire(EmbeddingUpdatedEvent()); - }); + final db = await _database; + await db.execute( + 'DELETE FROM $tableName WHERE $columnModel = ?', + [modelToInt(model)!], + ); + Bus.instance.fire(EmbeddingUpdatedEvent()); } - Future _clearDeprecatedStore(Directory dir) async { - final deprecatedStore = Directory(dir.path + "/object-box-store"); - if (await deprecatedStore.exists()) { - await deprecatedStore.delete(recursive: true); + List _convertToEmbeddings(List> results) { + final List embeddings = []; + for (final result in results) { + embeddings.add(_getEmbeddingFromRow(result)); + } + return embeddings; + } + + Embedding _getEmbeddingFromRow(Map row) { + final fileID = row[columnFileID]; + final model = intToModel(row[columnModel])!; + final bytes = row[columnEmbedding] as Uint8List; + final list = Float32List.view(bytes.buffer); + return Embedding(fileID: fileID, model: model, embedding: list); + } + + List _getRowFromEmbedding(Embedding embedding) { + return [ + embedding.fileID, + modelToInt(embedding.model)!, + Float32List.fromList(embedding.embedding).buffer.asUint8List(), + embedding.updationTime, + ]; + } + + Future _clearDeprecatedStores(Directory dir) async { + final deprecatedObjectBox = Directory(dir.path + "/object-box-store"); + if (await deprecatedObjectBox.exists()) { + await deprecatedObjectBox.delete(recursive: true); + } + final deprecatedIsar = File(dir.path + "/default.isar"); + if (await deprecatedIsar.exists()) { + await deprecatedIsar.delete(); + } + } + + int? modelToInt(Model model) { + switch (model) { + case Model.onnxClip: + return 1; + case Model.ggmlClip: + return 2; + default: + return null; + } + } + + Model? intToModel(int model) { + switch (model) { + case 1: + return Model.onnxClip; + case 2: + return Model.ggmlClip; + default: + return null; } } } diff --git a/mobile/lib/db/files_db.dart b/mobile/lib/db/files_db.dart index fce650086..7022100b7 100644 --- a/mobile/lib/db/files_db.dart +++ b/mobile/lib/db/files_db.dart @@ -455,6 +455,7 @@ class FilesDB { } Future insert(EnteFile file) async { + _logger.info("Inserting $file"); final db = await instance.database; return db.insert( filesTable, diff --git a/mobile/lib/gateways/cast_gw.dart b/mobile/lib/gateways/cast_gw.dart index fb342c1a9..63735d678 100644 --- a/mobile/lib/gateways/cast_gw.dart +++ b/mobile/lib/gateways/cast_gw.dart @@ -12,10 +12,14 @@ class CastGateway { ); return response.data["publicKey"]; } catch (e) { - if (e is DioError && - e.response != null && - e.response!.statusCode == 404) { - return null; + if (e is DioError && e.response != null) { + if (e.response!.statusCode == 404) { + return null; + } else if (e.response!.statusCode == 403) { + throw CastIPMismatchException(); + } else { + rethrow; + } } rethrow; } @@ -48,3 +52,7 @@ class CastGateway { } } } + +class CastIPMismatchException implements Exception { + CastIPMismatchException(); +} diff --git a/mobile/lib/generated/intl/messages_en.dart b/mobile/lib/generated/intl/messages_en.dart index eef309aa5..9f7792f34 100644 --- a/mobile/lib/generated/intl/messages_en.dart +++ b/mobile/lib/generated/intl/messages_en.dart @@ -357,6 +357,13 @@ class MessageLookup extends MessageLookupByLibrary { "Authentication failed, please try again"), "authenticationSuccessful": MessageLookupByLibrary.simpleMessage("Authentication successful!"), + "autoCastDialogBody": MessageLookupByLibrary.simpleMessage( + "You\'ll see available Cast devices here."), + "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage( + "Make sure Local Network permissions are turned on for the Ente Photos app, in Settings."), + "autoPair": MessageLookupByLibrary.simpleMessage("Auto pair"), + "autoPairGoogle": MessageLookupByLibrary.simpleMessage( + "Auto Pair requires connecting to Google servers and only works with Chromecast supported devices. Google will not receive sensitive data, such as your photos."), "available": MessageLookupByLibrary.simpleMessage("Available"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("Backed up folders"), @@ -387,6 +394,10 @@ class MessageLookup extends MessageLookupByLibrary { "cannotAddMorePhotosAfterBecomingViewer": m9, "cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage("Cannot delete shared files"), + "castIPMismatchBody": MessageLookupByLibrary.simpleMessage( + "Please make sure you are on the same network as the TV."), + "castIPMismatchTitle": + MessageLookupByLibrary.simpleMessage("Failed to cast album"), "castInstruction": MessageLookupByLibrary.simpleMessage( "Visit cast.ente.io on the device you want to pair.\n\nEnter the code below to play the album on your TV."), "centerPoint": MessageLookupByLibrary.simpleMessage("Center point"), @@ -460,6 +471,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Confirm recovery key"), "confirmYourRecoveryKey": MessageLookupByLibrary.simpleMessage("Confirm your recovery key"), + "connectToDevice": + MessageLookupByLibrary.simpleMessage("Connect to device"), "contactFamilyAdmin": m12, "contactSupport": MessageLookupByLibrary.simpleMessage("Contact support"), @@ -721,6 +734,8 @@ class MessageLookup extends MessageLookupByLibrary { "filesBackedUpFromDevice": m22, "filesBackedUpInAlbum": m23, "filesDeleted": MessageLookupByLibrary.simpleMessage("Files deleted"), + "filesSavedToGallery": + MessageLookupByLibrary.simpleMessage("Files saved to gallery"), "flip": MessageLookupByLibrary.simpleMessage("Flip"), "forYourMemories": MessageLookupByLibrary.simpleMessage("for your memories"), @@ -902,6 +917,8 @@ class MessageLookup extends MessageLookupByLibrary { "manageParticipants": MessageLookupByLibrary.simpleMessage("Manage"), "manageSubscription": MessageLookupByLibrary.simpleMessage("Manage subscription"), + "manualPairDesc": MessageLookupByLibrary.simpleMessage( + "Pair with PIN works for any large screen device you want to play your album on."), "map": MessageLookupByLibrary.simpleMessage("Map"), "maps": MessageLookupByLibrary.simpleMessage("Maps"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), @@ -936,6 +953,8 @@ class MessageLookup extends MessageLookupByLibrary { "no": MessageLookupByLibrary.simpleMessage("No"), "noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage("No albums shared by you yet"), + "noDeviceFound": + MessageLookupByLibrary.simpleMessage("No device found"), "noDeviceLimit": MessageLookupByLibrary.simpleMessage("None"), "noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage( "You\'ve no files on this device that can be deleted"), @@ -982,6 +1001,9 @@ class MessageLookup extends MessageLookupByLibrary { "orPickAnExistingOne": MessageLookupByLibrary.simpleMessage("Or pick an existing one"), "pair": MessageLookupByLibrary.simpleMessage("Pair"), + "pairWithPin": MessageLookupByLibrary.simpleMessage("Pair with PIN"), + "pairingComplete": + MessageLookupByLibrary.simpleMessage("Pairing complete"), "passkey": MessageLookupByLibrary.simpleMessage("Passkey"), "passkeyAuthTitle": MessageLookupByLibrary.simpleMessage("Passkey verification"), @@ -1328,6 +1350,10 @@ class MessageLookup extends MessageLookupByLibrary { "sparkleSuccess": MessageLookupByLibrary.simpleMessage("✨ Success"), "startBackup": MessageLookupByLibrary.simpleMessage("Start backup"), "status": MessageLookupByLibrary.simpleMessage("Status"), + "stopCastingBody": MessageLookupByLibrary.simpleMessage( + "Do you want to stop casting?"), + "stopCastingTitle": + MessageLookupByLibrary.simpleMessage("Stop casting"), "storage": MessageLookupByLibrary.simpleMessage("Storage"), "storageBreakupFamily": MessageLookupByLibrary.simpleMessage("Family"), "storageBreakupYou": MessageLookupByLibrary.simpleMessage("You"), diff --git a/mobile/lib/generated/l10n.dart b/mobile/lib/generated/l10n.dart index 3fa9c2209..b564a6d3d 100644 --- a/mobile/lib/generated/l10n.dart +++ b/mobile/lib/generated/l10n.dart @@ -5945,6 +5945,16 @@ class S { ); } + /// `Files saved to gallery` + String get filesSavedToGallery { + return Intl.message( + 'Files saved to gallery', + name: 'filesSavedToGallery', + desc: '', + args: [], + ); + } + /// `Failed to save file to gallery` String get fileFailedToSaveToGallery { return Intl.message( @@ -8378,6 +8388,26 @@ class S { ); } + /// `Auto pair` + String get autoPair { + return Intl.message( + 'Auto pair', + name: 'autoPair', + desc: '', + args: [], + ); + } + + /// `Pair with PIN` + String get pairWithPin { + return Intl.message( + 'Pair with PIN', + name: 'pairWithPin', + desc: '', + args: [], + ); + } + /// `Device not found` String get deviceNotFound { return Intl.message( @@ -8563,6 +8593,116 @@ class S { args: [], ); } + + /// `Auto Pair requires connecting to Google servers and only works with Chromecast supported devices. Google will not receive sensitive data, such as your photos.` + String get autoPairGoogle { + return Intl.message( + 'Auto Pair requires connecting to Google servers and only works with Chromecast supported devices. Google will not receive sensitive data, such as your photos.', + name: 'autoPairGoogle', + desc: '', + args: [], + ); + } + + /// `Pair with PIN works for any large screen device you want to play your album on.` + String get manualPairDesc { + return Intl.message( + 'Pair with PIN works for any large screen device you want to play your album on.', + name: 'manualPairDesc', + desc: '', + args: [], + ); + } + + /// `Connect to device` + String get connectToDevice { + return Intl.message( + 'Connect to device', + name: 'connectToDevice', + desc: '', + args: [], + ); + } + + /// `You'll see available Cast devices here.` + String get autoCastDialogBody { + return Intl.message( + 'You\'ll see available Cast devices here.', + name: 'autoCastDialogBody', + desc: '', + args: [], + ); + } + + /// `Make sure Local Network permissions are turned on for the Ente Photos app, in Settings.` + String get autoCastiOSPermission { + return Intl.message( + 'Make sure Local Network permissions are turned on for the Ente Photos app, in Settings.', + name: 'autoCastiOSPermission', + desc: '', + args: [], + ); + } + + /// `No device found` + String get noDeviceFound { + return Intl.message( + 'No device found', + name: 'noDeviceFound', + desc: '', + args: [], + ); + } + + /// `Stop casting` + String get stopCastingTitle { + return Intl.message( + 'Stop casting', + name: 'stopCastingTitle', + desc: '', + args: [], + ); + } + + /// `Do you want to stop casting?` + String get stopCastingBody { + return Intl.message( + 'Do you want to stop casting?', + name: 'stopCastingBody', + desc: '', + args: [], + ); + } + + /// `Failed to cast album` + String get castIPMismatchTitle { + return Intl.message( + 'Failed to cast album', + name: 'castIPMismatchTitle', + desc: '', + args: [], + ); + } + + /// `Please make sure you are on the same network as the TV.` + String get castIPMismatchBody { + return Intl.message( + 'Please make sure you are on the same network as the TV.', + name: 'castIPMismatchBody', + desc: '', + args: [], + ); + } + + /// `Pairing complete` + String get pairingComplete { + return Intl.message( + 'Pairing complete', + name: 'pairingComplete', + desc: '', + args: [], + ); + } } class AppLocalizationDelegate extends LocalizationsDelegate { diff --git a/mobile/lib/l10n/intl_en.arb b/mobile/lib/l10n/intl_en.arb index 7115c6950..2f13dd1ba 100644 --- a/mobile/lib/l10n/intl_en.arb +++ b/mobile/lib/l10n/intl_en.arb @@ -835,6 +835,7 @@ "close": "Close", "setAs": "Set as", "fileSavedToGallery": "File saved to gallery", + "filesSavedToGallery": "Files saved to gallery", "fileFailedToSaveToGallery": "Failed to save file to gallery", "download": "Download", "pressAndHoldToPlayVideo": "Press and hold to play video", @@ -1195,6 +1196,8 @@ "verifyPasskey": "Verify passkey", "playOnTv": "Play album on TV", "pair": "Pair", + "autoPair": "Auto pair", + "pairWithPin": "Pair with PIN", "deviceNotFound": "Device not found", "castInstruction": "Visit cast.ente.io on the device you want to pair.\n\nEnter the code below to play the album on your TV.", "deviceCodeHint": "Enter the code", @@ -1212,5 +1215,16 @@ "endpointUpdatedMessage": "Endpoint updated successfully", "customEndpoint": "Connected to {endpoint}", "createCollaborativeLink": "Create collaborative link", - "search": "Search" + "search": "Search", + "autoPairGoogle": "Auto Pair requires connecting to Google servers and only works with Chromecast supported devices. Google will not receive sensitive data, such as your photos.", + "manualPairDesc": "Pair with PIN works for any large screen device you want to play your album on.", + "connectToDevice": "Connect to device", + "autoCastDialogBody": "You'll see available Cast devices here.", + "autoCastiOSPermission": "Make sure Local Network permissions are turned on for the Ente Photos app, in Settings.", + "noDeviceFound": "No device found", + "stopCastingTitle": "Stop casting", + "stopCastingBody": "Do you want to stop casting?", + "castIPMismatchTitle": "Failed to cast album", + "castIPMismatchBody": "Please make sure you are on the same network as the TV.", + "pairingComplete": "Pairing complete" } \ No newline at end of file diff --git a/mobile/lib/models/embedding.dart b/mobile/lib/models/embedding.dart index 1f78687b9..c8f742caa 100644 --- a/mobile/lib/models/embedding.dart +++ b/mobile/lib/models/embedding.dart @@ -1,17 +1,7 @@ import "dart:convert"; -import "package:isar/isar.dart"; - -part 'embedding.g.dart'; - -@collection class Embedding { - static const index = 'unique_file_model_embedding'; - - Id id = Isar.autoIncrement; final int fileID; - @enumerated - @Index(name: index, composite: [CompositeIndex('fileID')], unique: true, replace: true) final Model model; final List embedding; int? updationTime; diff --git a/mobile/lib/models/embedding.g.dart b/mobile/lib/models/embedding.g.dart deleted file mode 100644 index ca041a0d0..000000000 --- a/mobile/lib/models/embedding.g.dart +++ /dev/null @@ -1,1059 +0,0 @@ -// GENERATED CODE - DO NOT MODIFY BY HAND - -part of 'embedding.dart'; - -// ************************************************************************** -// IsarCollectionGenerator -// ************************************************************************** - -// coverage:ignore-file -// ignore_for_file: duplicate_ignore, non_constant_identifier_names, constant_identifier_names, invalid_use_of_protected_member, unnecessary_cast, prefer_const_constructors, lines_longer_than_80_chars, require_trailing_commas, inference_failure_on_function_invocation, unnecessary_parenthesis, unnecessary_raw_strings, unnecessary_null_checks, join_return_with_assignment, prefer_final_locals, avoid_js_rounded_ints, avoid_positional_boolean_parameters, always_specify_types - -extension GetEmbeddingCollection on Isar { - IsarCollection get embeddings => this.collection(); -} - -const EmbeddingSchema = CollectionSchema( - name: r'Embedding', - id: -8064100183150254587, - properties: { - r'embedding': PropertySchema( - id: 0, - name: r'embedding', - type: IsarType.doubleList, - ), - r'fileID': PropertySchema( - id: 1, - name: r'fileID', - type: IsarType.long, - ), - r'model': PropertySchema( - id: 2, - name: r'model', - type: IsarType.byte, - enumMap: _EmbeddingmodelEnumValueMap, - ), - r'updationTime': PropertySchema( - id: 3, - name: r'updationTime', - type: IsarType.long, - ) - }, - estimateSize: _embeddingEstimateSize, - serialize: _embeddingSerialize, - deserialize: _embeddingDeserialize, - deserializeProp: _embeddingDeserializeProp, - idName: r'id', - indexes: { - r'unique_file_model_embedding': IndexSchema( - id: 6248303800853228628, - name: r'unique_file_model_embedding', - unique: true, - replace: true, - properties: [ - IndexPropertySchema( - name: r'model', - type: IndexType.value, - caseSensitive: false, - ), - IndexPropertySchema( - name: r'fileID', - type: IndexType.value, - caseSensitive: false, - ) - ], - ) - }, - links: {}, - embeddedSchemas: {}, - getId: _embeddingGetId, - getLinks: _embeddingGetLinks, - attach: _embeddingAttach, - version: '3.1.0+1', -); - -int _embeddingEstimateSize( - Embedding object, - List offsets, - Map> allOffsets, -) { - var bytesCount = offsets.last; - bytesCount += 3 + object.embedding.length * 8; - return bytesCount; -} - -void _embeddingSerialize( - Embedding object, - IsarWriter writer, - List offsets, - Map> allOffsets, -) { - writer.writeDoubleList(offsets[0], object.embedding); - writer.writeLong(offsets[1], object.fileID); - writer.writeByte(offsets[2], object.model.index); - writer.writeLong(offsets[3], object.updationTime); -} - -Embedding _embeddingDeserialize( - Id id, - IsarReader reader, - List offsets, - Map> allOffsets, -) { - final object = Embedding( - embedding: reader.readDoubleList(offsets[0]) ?? [], - fileID: reader.readLong(offsets[1]), - model: _EmbeddingmodelValueEnumMap[reader.readByteOrNull(offsets[2])] ?? - Model.onnxClip, - updationTime: reader.readLongOrNull(offsets[3]), - ); - object.id = id; - return object; -} - -P _embeddingDeserializeProp

( - IsarReader reader, - int propertyId, - int offset, - Map> allOffsets, -) { - switch (propertyId) { - case 0: - return (reader.readDoubleList(offset) ?? []) as P; - case 1: - return (reader.readLong(offset)) as P; - case 2: - return (_EmbeddingmodelValueEnumMap[reader.readByteOrNull(offset)] ?? - Model.onnxClip) as P; - case 3: - return (reader.readLongOrNull(offset)) as P; - default: - throw IsarError('Unknown property with id $propertyId'); - } -} - -const _EmbeddingmodelEnumValueMap = { - 'onnxClip': 0, - 'ggmlClip': 1, -}; -const _EmbeddingmodelValueEnumMap = { - 0: Model.onnxClip, - 1: Model.ggmlClip, -}; - -Id _embeddingGetId(Embedding object) { - return object.id; -} - -List> _embeddingGetLinks(Embedding object) { - return []; -} - -void _embeddingAttach(IsarCollection col, Id id, Embedding object) { - object.id = id; -} - -extension EmbeddingByIndex on IsarCollection { - Future getByModelFileID(Model model, int fileID) { - return getByIndex(r'unique_file_model_embedding', [model, fileID]); - } - - Embedding? getByModelFileIDSync(Model model, int fileID) { - return getByIndexSync(r'unique_file_model_embedding', [model, fileID]); - } - - Future deleteByModelFileID(Model model, int fileID) { - return deleteByIndex(r'unique_file_model_embedding', [model, fileID]); - } - - bool deleteByModelFileIDSync(Model model, int fileID) { - return deleteByIndexSync(r'unique_file_model_embedding', [model, fileID]); - } - - Future> getAllByModelFileID( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return getAllByIndex(r'unique_file_model_embedding', values); - } - - List getAllByModelFileIDSync( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return getAllByIndexSync(r'unique_file_model_embedding', values); - } - - Future deleteAllByModelFileID( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return deleteAllByIndex(r'unique_file_model_embedding', values); - } - - int deleteAllByModelFileIDSync( - List modelValues, List fileIDValues) { - final len = modelValues.length; - assert(fileIDValues.length == len, - 'All index values must have the same length'); - final values = >[]; - for (var i = 0; i < len; i++) { - values.add([modelValues[i], fileIDValues[i]]); - } - - return deleteAllByIndexSync(r'unique_file_model_embedding', values); - } - - Future putByModelFileID(Embedding object) { - return putByIndex(r'unique_file_model_embedding', object); - } - - Id putByModelFileIDSync(Embedding object, {bool saveLinks = true}) { - return putByIndexSync(r'unique_file_model_embedding', object, - saveLinks: saveLinks); - } - - Future> putAllByModelFileID(List objects) { - return putAllByIndex(r'unique_file_model_embedding', objects); - } - - List putAllByModelFileIDSync(List objects, - {bool saveLinks = true}) { - return putAllByIndexSync(r'unique_file_model_embedding', objects, - saveLinks: saveLinks); - } -} - -extension EmbeddingQueryWhereSort - on QueryBuilder { - QueryBuilder anyId() { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(const IdWhereClause.any()); - }); - } - - QueryBuilder anyModelFileID() { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause( - const IndexWhereClause.any(indexName: r'unique_file_model_embedding'), - ); - }); - } -} - -extension EmbeddingQueryWhere - on QueryBuilder { - QueryBuilder idEqualTo(Id id) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IdWhereClause.between( - lower: id, - upper: id, - )); - }); - } - - QueryBuilder idNotEqualTo(Id id) { - return QueryBuilder.apply(this, (query) { - if (query.whereSort == Sort.asc) { - return query - .addWhereClause( - IdWhereClause.lessThan(upper: id, includeUpper: false), - ) - .addWhereClause( - IdWhereClause.greaterThan(lower: id, includeLower: false), - ); - } else { - return query - .addWhereClause( - IdWhereClause.greaterThan(lower: id, includeLower: false), - ) - .addWhereClause( - IdWhereClause.lessThan(upper: id, includeUpper: false), - ); - } - }); - } - - QueryBuilder idGreaterThan(Id id, - {bool include = false}) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause( - IdWhereClause.greaterThan(lower: id, includeLower: include), - ); - }); - } - - QueryBuilder idLessThan(Id id, - {bool include = false}) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause( - IdWhereClause.lessThan(upper: id, includeUpper: include), - ); - }); - } - - QueryBuilder idBetween( - Id lowerId, - Id upperId, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IdWhereClause.between( - lower: lowerId, - includeLower: includeLower, - upper: upperId, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder modelEqualToAnyFileID( - Model model) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.equalTo( - indexName: r'unique_file_model_embedding', - value: [model], - )); - }); - } - - QueryBuilder - modelNotEqualToAnyFileID(Model model) { - return QueryBuilder.apply(this, (query) { - if (query.whereSort == Sort.asc) { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [], - upper: [model], - includeUpper: false, - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - includeLower: false, - upper: [], - )); - } else { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - includeLower: false, - upper: [], - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [], - upper: [model], - includeUpper: false, - )); - } - }); - } - - QueryBuilder - modelGreaterThanAnyFileID( - Model model, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - includeLower: include, - upper: [], - )); - }); - } - - QueryBuilder modelLessThanAnyFileID( - Model model, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [], - upper: [model], - includeUpper: include, - )); - }); - } - - QueryBuilder modelBetweenAnyFileID( - Model lowerModel, - Model upperModel, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [lowerModel], - includeLower: includeLower, - upper: [upperModel], - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder modelFileIDEqualTo( - Model model, int fileID) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.equalTo( - indexName: r'unique_file_model_embedding', - value: [model, fileID], - )); - }); - } - - QueryBuilder - modelEqualToFileIDNotEqualTo(Model model, int fileID) { - return QueryBuilder.apply(this, (query) { - if (query.whereSort == Sort.asc) { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - upper: [model, fileID], - includeUpper: false, - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, fileID], - includeLower: false, - upper: [model], - )); - } else { - return query - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, fileID], - includeLower: false, - upper: [model], - )) - .addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - upper: [model, fileID], - includeUpper: false, - )); - } - }); - } - - QueryBuilder - modelEqualToFileIDGreaterThan( - Model model, - int fileID, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, fileID], - includeLower: include, - upper: [model], - )); - }); - } - - QueryBuilder - modelEqualToFileIDLessThan( - Model model, - int fileID, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model], - upper: [model, fileID], - includeUpper: include, - )); - }); - } - - QueryBuilder - modelEqualToFileIDBetween( - Model model, - int lowerFileID, - int upperFileID, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addWhereClause(IndexWhereClause.between( - indexName: r'unique_file_model_embedding', - lower: [model, lowerFileID], - includeLower: includeLower, - upper: [model, upperFileID], - includeUpper: includeUpper, - )); - }); - } -} - -extension EmbeddingQueryFilter - on QueryBuilder { - QueryBuilder - embeddingElementEqualTo( - double value, { - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'embedding', - value: value, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingElementGreaterThan( - double value, { - bool include = false, - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'embedding', - value: value, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingElementLessThan( - double value, { - bool include = false, - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'embedding', - value: value, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingElementBetween( - double lower, - double upper, { - bool includeLower = true, - bool includeUpper = true, - double epsilon = Query.epsilon, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'embedding', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - epsilon: epsilon, - )); - }); - } - - QueryBuilder - embeddingLengthEqualTo(int length) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - length, - true, - length, - true, - ); - }); - } - - QueryBuilder embeddingIsEmpty() { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - 0, - true, - 0, - true, - ); - }); - } - - QueryBuilder - embeddingIsNotEmpty() { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - 0, - false, - 999999, - true, - ); - }); - } - - QueryBuilder - embeddingLengthLessThan( - int length, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - 0, - true, - length, - include, - ); - }); - } - - QueryBuilder - embeddingLengthGreaterThan( - int length, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - length, - include, - 999999, - true, - ); - }); - } - - QueryBuilder - embeddingLengthBetween( - int lower, - int upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.listLength( - r'embedding', - lower, - includeLower, - upper, - includeUpper, - ); - }); - } - - QueryBuilder fileIDEqualTo( - int value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'fileID', - value: value, - )); - }); - } - - QueryBuilder fileIDGreaterThan( - int value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'fileID', - value: value, - )); - }); - } - - QueryBuilder fileIDLessThan( - int value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'fileID', - value: value, - )); - }); - } - - QueryBuilder fileIDBetween( - int lower, - int upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'fileID', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder idEqualTo( - Id value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'id', - value: value, - )); - }); - } - - QueryBuilder idGreaterThan( - Id value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'id', - value: value, - )); - }); - } - - QueryBuilder idLessThan( - Id value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'id', - value: value, - )); - }); - } - - QueryBuilder idBetween( - Id lower, - Id upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'id', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder modelEqualTo( - Model value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'model', - value: value, - )); - }); - } - - QueryBuilder modelGreaterThan( - Model value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'model', - value: value, - )); - }); - } - - QueryBuilder modelLessThan( - Model value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'model', - value: value, - )); - }); - } - - QueryBuilder modelBetween( - Model lower, - Model upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'model', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } - - QueryBuilder - updationTimeIsNull() { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(const FilterCondition.isNull( - property: r'updationTime', - )); - }); - } - - QueryBuilder - updationTimeIsNotNull() { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(const FilterCondition.isNotNull( - property: r'updationTime', - )); - }); - } - - QueryBuilder updationTimeEqualTo( - int? value) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.equalTo( - property: r'updationTime', - value: value, - )); - }); - } - - QueryBuilder - updationTimeGreaterThan( - int? value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.greaterThan( - include: include, - property: r'updationTime', - value: value, - )); - }); - } - - QueryBuilder - updationTimeLessThan( - int? value, { - bool include = false, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.lessThan( - include: include, - property: r'updationTime', - value: value, - )); - }); - } - - QueryBuilder updationTimeBetween( - int? lower, - int? upper, { - bool includeLower = true, - bool includeUpper = true, - }) { - return QueryBuilder.apply(this, (query) { - return query.addFilterCondition(FilterCondition.between( - property: r'updationTime', - lower: lower, - includeLower: includeLower, - upper: upper, - includeUpper: includeUpper, - )); - }); - } -} - -extension EmbeddingQueryObject - on QueryBuilder {} - -extension EmbeddingQueryLinks - on QueryBuilder {} - -extension EmbeddingQuerySortBy on QueryBuilder { - QueryBuilder sortByFileID() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.asc); - }); - } - - QueryBuilder sortByFileIDDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.desc); - }); - } - - QueryBuilder sortByModel() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.asc); - }); - } - - QueryBuilder sortByModelDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.desc); - }); - } - - QueryBuilder sortByUpdationTime() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.asc); - }); - } - - QueryBuilder sortByUpdationTimeDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.desc); - }); - } -} - -extension EmbeddingQuerySortThenBy - on QueryBuilder { - QueryBuilder thenByFileID() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.asc); - }); - } - - QueryBuilder thenByFileIDDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'fileID', Sort.desc); - }); - } - - QueryBuilder thenById() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'id', Sort.asc); - }); - } - - QueryBuilder thenByIdDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'id', Sort.desc); - }); - } - - QueryBuilder thenByModel() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.asc); - }); - } - - QueryBuilder thenByModelDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'model', Sort.desc); - }); - } - - QueryBuilder thenByUpdationTime() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.asc); - }); - } - - QueryBuilder thenByUpdationTimeDesc() { - return QueryBuilder.apply(this, (query) { - return query.addSortBy(r'updationTime', Sort.desc); - }); - } -} - -extension EmbeddingQueryWhereDistinct - on QueryBuilder { - QueryBuilder distinctByEmbedding() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'embedding'); - }); - } - - QueryBuilder distinctByFileID() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'fileID'); - }); - } - - QueryBuilder distinctByModel() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'model'); - }); - } - - QueryBuilder distinctByUpdationTime() { - return QueryBuilder.apply(this, (query) { - return query.addDistinctBy(r'updationTime'); - }); - } -} - -extension EmbeddingQueryProperty - on QueryBuilder { - QueryBuilder idProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'id'); - }); - } - - QueryBuilder, QQueryOperations> embeddingProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'embedding'); - }); - } - - QueryBuilder fileIDProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'fileID'); - }); - } - - QueryBuilder modelProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'model'); - }); - } - - QueryBuilder updationTimeProperty() { - return QueryBuilder.apply(this, (query) { - return query.addPropertyName(r'updationTime'); - }); - } -} diff --git a/mobile/lib/models/file/file.dart b/mobile/lib/models/file/file.dart index 2aa5a4558..d96a81e1c 100644 --- a/mobile/lib/models/file/file.dart +++ b/mobile/lib/models/file/file.dart @@ -308,7 +308,7 @@ class EnteFile { @override String toString() { return '''File(generatedID: $generatedID, localID: $localID, title: $title, - uploadedFileId: $uploadedFileID, modificationTime: $modificationTime, + type: $fileType, uploadedFileId: $uploadedFileID, modificationTime: $modificationTime, ownerID: $ownerID, collectionID: $collectionID, updationTime: $updationTime)'''; } diff --git a/mobile/lib/service_locator.dart b/mobile/lib/service_locator.dart index 0fec75b46..397703761 100644 --- a/mobile/lib/service_locator.dart +++ b/mobile/lib/service_locator.dart @@ -1,4 +1,6 @@ import "package:dio/dio.dart"; +import "package:ente_cast/ente_cast.dart"; +import "package:ente_cast_normal/ente_cast_normal.dart"; import "package:ente_feature_flag/ente_feature_flag.dart"; import "package:shared_preferences/shared_preferences.dart"; @@ -26,3 +28,9 @@ FlagService get flagService { ); return _flagService!; } + +CastService? _castService; +CastService get castService { + _castService ??= CastServiceImpl(); + return _castService!; +} diff --git a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart index f7d17f8b8..420b8c97f 100644 --- a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart +++ b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart @@ -19,7 +19,7 @@ class EmbeddingStore { static final EmbeddingStore instance = EmbeddingStore._privateConstructor(); - static const kEmbeddingsSyncTimeKey = "sync_time_embeddings_v2"; + static const kEmbeddingsSyncTimeKey = "sync_time_embeddings_v3"; final _logger = Logger("EmbeddingStore"); final _dio = NetworkClient.instance.enteDio; diff --git a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart index d1074053a..337ca913f 100644 --- a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart +++ b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart @@ -72,8 +72,8 @@ class SemanticSearchService { _mlFramework = _currentModel == Model.onnxClip ? ONNX(shouldDownloadOverMobileData) : GGML(shouldDownloadOverMobileData); - await EmbeddingsDB.instance.init(); await EmbeddingStore.instance.init(); + await EmbeddingsDB.instance.init(); await _loadEmbeddings(); Bus.instance.on().listen((event) { _embeddingLoaderDebouncer.run(() async { diff --git a/mobile/lib/ui/cast/auto.dart b/mobile/lib/ui/cast/auto.dart new file mode 100644 index 000000000..aed8ee0a5 --- /dev/null +++ b/mobile/lib/ui/cast/auto.dart @@ -0,0 +1,128 @@ +import "dart:io"; + +import "package:ente_cast/ente_cast.dart"; +import "package:flutter/material.dart"; +import "package:photos/generated/l10n.dart"; +import "package:photos/service_locator.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/common/loading_widget.dart"; +import "package:photos/utils/dialog_util.dart"; + +class AutoCastDialog extends StatefulWidget { + // async method that takes string as input + // and returns void + final void Function(String) onConnect; + AutoCastDialog( + this.onConnect, { + Key? key, + }) : super(key: key) {} + + @override + State createState() => _AutoCastDialogState(); +} + +class _AutoCastDialogState extends State { + final bool doesUserExist = true; + final Set _isDeviceTapInProgress = {}; + + @override + Widget build(BuildContext context) { + final textStyle = getEnteTextTheme(context); + final AlertDialog alert = AlertDialog( + title: Text( + S.of(context).connectToDevice, + style: textStyle.largeBold, + ), + content: Column( + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisSize: MainAxisSize.min, + children: [ + Text( + S.of(context).autoCastDialogBody, + style: textStyle.bodyMuted, + ), + if (Platform.isIOS) + Text( + S.of(context).autoCastiOSPermission, + style: textStyle.bodyMuted, + ), + const SizedBox(height: 16), + FutureBuilder>( + future: castService.searchDevices(), + builder: (context, snapshot) { + if (snapshot.hasError) { + return Center( + child: Text( + 'Error: ${snapshot.error.toString()}', + ), + ); + } else if (!snapshot.hasData) { + return const EnteLoadingWidget(); + } + + if (snapshot.data!.isEmpty) { + return Center(child: Text(S.of(context).noDeviceFound)); + } + + return Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: snapshot.data!.map((result) { + final device = result.$2; + final name = result.$1; + return GestureDetector( + onTap: () async { + if (_isDeviceTapInProgress.contains(device)) { + return; + } + setState(() { + _isDeviceTapInProgress.add(device); + }); + try { + await _connectToYourApp(context, device); + } catch (e) { + showGenericErrorDialog(context: context, error: e) + .ignore(); + } finally { + setState(() { + _isDeviceTapInProgress.remove(device); + }); + } + }, + child: Padding( + padding: const EdgeInsets.symmetric(vertical: 8.0), + child: Row( + children: [ + Expanded(child: Text(name)), + if (_isDeviceTapInProgress.contains(device)) + const EnteLoadingWidget(), + ], + ), + ), + ); + }).toList(), + ); + }, + ), + ], + ), + ); + return alert; + } + + Future _connectToYourApp( + BuildContext context, + Object castDevice, + ) async { + await castService.connectDevice( + context, + castDevice, + onMessage: (message) { + if (message.containsKey(CastMessageType.pairCode)) { + final code = message[CastMessageType.pairCode]!['code']; + widget.onConnect(code); + Navigator.of(context).pop(); + } + }, + ); + } +} diff --git a/mobile/lib/ui/cast/choose.dart b/mobile/lib/ui/cast/choose.dart new file mode 100644 index 000000000..7f0288733 --- /dev/null +++ b/mobile/lib/ui/cast/choose.dart @@ -0,0 +1,76 @@ +import "package:flutter/material.dart"; +import "package:photos/generated/l10n.dart"; +import "package:photos/l10n/l10n.dart"; +import "package:photos/theme/ente_theme.dart"; +import "package:photos/ui/components/buttons/button_widget.dart"; +import "package:photos/ui/components/models/button_type.dart"; + +class CastChooseDialog extends StatefulWidget { + const CastChooseDialog({ + Key? key, + }) : super(key: key); + + @override + State createState() => _CastChooseDialogState(); +} + +class _CastChooseDialogState extends State { + final bool doesUserExist = true; + + @override + Widget build(BuildContext context) { + final textStyle = getEnteTextTheme(context); + final AlertDialog alert = AlertDialog( + title: Text( + context.l10n.playOnTv, + style: textStyle.largeBold, + ), + content: Column( + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisSize: MainAxisSize.min, + children: [ + const SizedBox(height: 8), + Text( + S.of(context).autoPairGoogle, + style: textStyle.bodyMuted, + ), + const SizedBox(height: 12), + ButtonWidget( + labelText: S.of(context).autoPair, + icon: Icons.cast_outlined, + buttonType: ButtonType.neutral, + buttonSize: ButtonSize.large, + shouldStickToDarkTheme: true, + buttonAction: ButtonAction.first, + shouldSurfaceExecutionStates: false, + isInAlert: true, + onTap: () async { + Navigator.of(context).pop(ButtonAction.first); + }, + ), + const SizedBox(height: 36), + Text( + S.of(context).manualPairDesc, + style: textStyle.bodyMuted, + ), + const SizedBox(height: 12), + ButtonWidget( + labelText: S.of(context).pairWithPin, + buttonType: ButtonType.neutral, + // icon for pairing with TV manually + icon: Icons.tv_outlined, + buttonSize: ButtonSize.large, + isInAlert: true, + onTap: () async { + Navigator.of(context).pop(ButtonAction.second); + }, + shouldStickToDarkTheme: true, + buttonAction: ButtonAction.second, + shouldSurfaceExecutionStates: false, + ), + ], + ), + ); + return alert; + } +} diff --git a/mobile/lib/ui/common/popup_item.dart b/mobile/lib/ui/common/popup_item.dart new file mode 100644 index 000000000..5f32104af --- /dev/null +++ b/mobile/lib/ui/common/popup_item.dart @@ -0,0 +1,38 @@ +import 'package:flutter/material.dart'; + +class EntePopupMenuItem extends PopupMenuItem { + final String label; + final IconData? icon; + final Widget? iconWidget; + + EntePopupMenuItem( + this.label, { + required T value, + this.icon, + this.iconWidget, + Key? key, + }) : assert( + icon != null || iconWidget != null, + 'Either icon or iconWidget must be provided.', + ), + assert( + !(icon != null && iconWidget != null), + 'Only one of icon or iconWidget can be provided.', + ), + super( + value: value, + key: key, + child: Row( + children: [ + if (iconWidget != null) + iconWidget + else if (icon != null) + Icon(icon), + const Padding( + padding: EdgeInsets.all(8), + ), + Text(label), + ], + ), // Initially empty, will be populated in build + ); +} diff --git a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart index e2e29e021..a630e3354 100644 --- a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart +++ b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart @@ -3,6 +3,7 @@ import "dart:async"; import 'package:fast_base58/fast_base58.dart'; import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; +import "package:logging/logging.dart"; import "package:modal_bottom_sheet/modal_bottom_sheet.dart"; import 'package:photos/core/configuration.dart'; import "package:photos/generated/l10n.dart"; @@ -30,6 +31,8 @@ import 'package:photos/ui/sharing/manage_links_widget.dart'; import "package:photos/ui/tools/collage/collage_creator_page.dart"; import "package:photos/ui/viewer/location/update_location_data_widget.dart"; import 'package:photos/utils/delete_file_util.dart'; +import "package:photos/utils/dialog_util.dart"; +import "package:photos/utils/file_download_util.dart"; import 'package:photos/utils/magic_util.dart'; import 'package:photos/utils/navigation_util.dart'; import "package:photos/utils/share_util.dart"; @@ -56,6 +59,7 @@ class FileSelectionActionsWidget extends StatefulWidget { class _FileSelectionActionsWidgetState extends State { + static final _logger = Logger("FileSelectionActionsWidget"); late int currentUserID; late FilesSplit split; late CollectionActions collectionActions; @@ -115,6 +119,8 @@ class _FileSelectionActionsWidgetState !widget.selectedFiles.files.any( (element) => element.fileType == FileType.video, ); + final showDownloadOption = + widget.selectedFiles.files.any((element) => element.localID == null); //To animate adding and removing of [SelectedActionButton], add all items //and set [shouldShow] to false for items that should not be shown and true @@ -367,6 +373,16 @@ class _FileSelectionActionsWidgetState ); } + if (showDownloadOption) { + items.add( + SelectionActionButton( + labelText: S.of(context).download, + icon: Icons.cloud_download_outlined, + onTap: () => _download(widget.selectedFiles.files.toList()), + ), + ); + } + items.add( SelectionActionButton( labelText: S.of(context).share, @@ -379,41 +395,36 @@ class _FileSelectionActionsWidgetState ), ); - if (items.isNotEmpty) { - final scrollController = ScrollController(); - // h4ck: https://github.com/flutter/flutter/issues/57920#issuecomment-893970066 - return MediaQuery( - data: MediaQuery.of(context).removePadding(removeBottom: true), - child: SafeArea( - child: Scrollbar( - radius: const Radius.circular(1), - thickness: 2, - controller: scrollController, - thumbVisibility: true, - child: SingleChildScrollView( - physics: const BouncingScrollPhysics( - decelerationRate: ScrollDecelerationRate.fast, - ), - scrollDirection: Axis.horizontal, - child: Container( - padding: const EdgeInsets.only(bottom: 24), - child: Row( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - const SizedBox(width: 4), - ...items, - const SizedBox(width: 4), - ], - ), + final scrollController = ScrollController(); + // h4ck: https://github.com/flutter/flutter/issues/57920#issuecomment-893970066 + return MediaQuery( + data: MediaQuery.of(context).removePadding(removeBottom: true), + child: SafeArea( + child: Scrollbar( + radius: const Radius.circular(1), + thickness: 2, + controller: scrollController, + thumbVisibility: true, + child: SingleChildScrollView( + physics: const BouncingScrollPhysics( + decelerationRate: ScrollDecelerationRate.fast, + ), + scrollDirection: Axis.horizontal, + child: Container( + padding: const EdgeInsets.only(bottom: 24), + child: Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + const SizedBox(width: 4), + ...items, + const SizedBox(width: 4), + ], ), ), ), ), - ); - } else { - // TODO: Return "Select All" here - return const SizedBox.shrink(); - } + ), + ); } Future _moveFiles() async { @@ -647,4 +658,29 @@ class _FileSelectionActionsWidgetState widget.selectedFiles.clearAll(); } } + + Future _download(List files) async { + final dialog = createProgressDialog( + context, + S.of(context).downloading, + isDismissible: true, + ); + await dialog.show(); + try { + final futures = []; + for (final file in files) { + if (file.localID == null) { + futures.add(downloadToGallery(file)); + } + } + await Future.wait(futures); + await dialog.hide(); + widget.selectedFiles.clearAll(); + showToast(context, S.of(context).filesSavedToGallery); + } catch (e) { + _logger.warning("Failed to save files", e); + await dialog.hide(); + await showGenericErrorDialog(context: context, error: e); + } + } } diff --git a/mobile/lib/ui/viewer/file/file_app_bar.dart b/mobile/lib/ui/viewer/file/file_app_bar.dart index e029aeb89..2918924db 100644 --- a/mobile/lib/ui/viewer/file/file_app_bar.dart +++ b/mobile/lib/ui/viewer/file/file_app_bar.dart @@ -4,30 +4,23 @@ import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; import 'package:logging/logging.dart'; import 'package:media_extension/media_extension.dart'; -import 'package:path/path.dart' as file_path; -import 'package:photo_manager/photo_manager.dart'; -import 'package:photos/core/event_bus.dart'; -import 'package:photos/db/files_db.dart'; -import 'package:photos/events/local_photos_updated_event.dart'; import "package:photos/generated/l10n.dart"; import "package:photos/l10n/l10n.dart"; import "package:photos/models/file/extensions/file_props.dart"; import 'package:photos/models/file/file.dart'; import 'package:photos/models/file/file_type.dart'; import 'package:photos/models/file/trash_file.dart'; -import 'package:photos/models/ignored_file.dart'; import "package:photos/models/metadata/common_keys.dart"; import 'package:photos/models/selected_files.dart'; import "package:photos/service_locator.dart"; import 'package:photos/services/collections_service.dart'; import 'package:photos/services/hidden_service.dart'; -import 'package:photos/services/ignored_files_service.dart'; -import 'package:photos/services/local_sync_service.dart'; import 'package:photos/ui/collections/collection_action_sheet.dart'; import 'package:photos/ui/viewer/file/custom_app_bar.dart'; import "package:photos/ui/viewer/file_details/favorite_widget.dart"; import "package:photos/ui/viewer/file_details/upload_icon_widget.dart"; import 'package:photos/utils/dialog_util.dart'; +import "package:photos/utils/file_download_util.dart"; import 'package:photos/utils/file_util.dart'; import "package:photos/utils/magic_util.dart"; import 'package:photos/utils/toast_util.dart'; @@ -165,7 +158,7 @@ class FileAppBarState extends State { Icon( Platform.isAndroid ? Icons.download - : CupertinoIcons.cloud_download, + : Icons.cloud_download_outlined, color: Theme.of(context).iconTheme.color, ), const Padding( @@ -330,98 +323,16 @@ class FileAppBarState extends State { ); await dialog.show(); try { - final FileType type = file.fileType; - final bool downloadLivePhotoOnDroid = - type == FileType.livePhoto && Platform.isAndroid; - AssetEntity? savedAsset; - final File? fileToSave = await getFile(file); - //Disabling notifications for assets changing to insert the file into - //files db before triggering a sync. - await PhotoManager.stopChangeNotify(); - if (type == FileType.image) { - savedAsset = await PhotoManager.editor - .saveImageWithPath(fileToSave!.path, title: file.title!); - } else if (type == FileType.video) { - savedAsset = await PhotoManager.editor - .saveVideo(fileToSave!, title: file.title!); - } else if (type == FileType.livePhoto) { - final File? liveVideoFile = - await getFileFromServer(file, liveVideo: true); - if (liveVideoFile == null) { - throw AssertionError("Live video can not be null"); - } - if (downloadLivePhotoOnDroid) { - await _saveLivePhotoOnDroid(fileToSave!, liveVideoFile, file); - } else { - savedAsset = await PhotoManager.editor.darwin.saveLivePhoto( - imageFile: fileToSave!, - videoFile: liveVideoFile, - title: file.title!, - ); - } - } - - if (savedAsset != null) { - file.localID = savedAsset.id; - await FilesDB.instance.insert(file); - Bus.instance.fire( - LocalPhotosUpdatedEvent( - [file], - source: "download", - ), - ); - } else if (!downloadLivePhotoOnDroid && savedAsset == null) { - _logger.severe('Failed to save assert of type $type'); - } + await downloadToGallery(file); showToast(context, S.of(context).fileSavedToGallery); await dialog.hide(); } catch (e) { _logger.warning("Failed to save file", e); await dialog.hide(); await showGenericErrorDialog(context: context, error: e); - } finally { - await PhotoManager.startChangeNotify(); - LocalSyncService.instance.checkAndSync().ignore(); } } - Future _saveLivePhotoOnDroid( - File image, - File video, - EnteFile enteFile, - ) async { - debugPrint("Downloading LivePhoto on Droid"); - AssetEntity? savedAsset = await (PhotoManager.editor - .saveImageWithPath(image.path, title: enteFile.title!)); - if (savedAsset == null) { - throw Exception("Failed to save image of live photo"); - } - IgnoredFile ignoreVideoFile = IgnoredFile( - savedAsset.id, - savedAsset.title ?? '', - savedAsset.relativePath ?? 'remoteDownload', - "remoteDownload", - ); - await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); - final videoTitle = file_path.basenameWithoutExtension(enteFile.title!) + - file_path.extension(video.path); - savedAsset = (await (PhotoManager.editor.saveVideo( - video, - title: videoTitle, - ))); - if (savedAsset == null) { - throw Exception("Failed to save video of live photo"); - } - - ignoreVideoFile = IgnoredFile( - savedAsset.id, - savedAsset.title ?? videoTitle, - savedAsset.relativePath ?? 'remoteDownload', - "remoteDownload", - ); - await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); - } - Future _setAs(EnteFile file) async { final dialog = createProgressDialog(context, S.of(context).pleaseWait); await dialog.show(); diff --git a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart index 1f9fb0bbb..d7c3957b2 100644 --- a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart +++ b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart @@ -24,6 +24,9 @@ import 'package:photos/services/collections_service.dart'; import 'package:photos/services/sync_service.dart'; import 'package:photos/services/update_service.dart'; import 'package:photos/ui/actions/collection/collection_sharing_actions.dart'; +import "package:photos/ui/cast/auto.dart"; +import "package:photos/ui/cast/choose.dart"; +import "package:photos/ui/common/popup_item.dart"; import 'package:photos/ui/components/action_sheet_widget.dart'; import 'package:photos/ui/components/buttons/button_widget.dart'; import 'package:photos/ui/components/models/button_type.dart'; @@ -319,263 +322,136 @@ class _GalleryAppBarWidgetState extends State { ), ); } - final List> items = []; - if (galleryType.canRename()) { - items.add( - PopupMenuItem( + + if (widget.collection != null && castService.isSupported) { + actions.add( + Tooltip( + message: "Cast album", + child: IconButton( + icon: castService.getActiveSessions().isNotEmpty + ? const Icon(Icons.cast_connected_rounded) + : const Icon(Icons.cast_outlined), + onPressed: () async { + await _castChoiceDialog(); + if (mounted) { + setState(() {}); + } + }, + ), + ), + ); + } + final List> items = []; + items.addAll([ + if (galleryType.canRename()) + EntePopupMenuItem( + isQuickLink + ? S.of(context).convertToAlbum + : S.of(context).renameAlbum, value: AlbumPopupAction.rename, - child: Row( - children: [ - Icon(isQuickLink ? Icons.photo_album_outlined : Icons.edit), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isQuickLink - ? S.of(context).convertToAlbum - : S.of(context).renameAlbum, - ), - ], - ), + icon: isQuickLink ? Icons.photo_album_outlined : Icons.edit, ), - ); - } - if (galleryType.canSetCover()) { - items.add( - PopupMenuItem( + if (galleryType.canSetCover()) + EntePopupMenuItem( + S.of(context).setCover, value: AlbumPopupAction.setCover, - child: Row( - children: [ - const Icon(Icons.image_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).setCover), - ], - ), + icon: Icons.image_outlined, ), - ); - } - if (galleryType.showMap()) { - items.add( - PopupMenuItem( + if (galleryType.showMap()) + EntePopupMenuItem( + S.of(context).map, value: AlbumPopupAction.map, - child: Row( - children: [ - const Icon(Icons.map_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).map), - ], - ), + icon: Icons.map_outlined, ), - ); - } - - if (galleryType.canSort()) { - items.add( - PopupMenuItem( + if (galleryType.canSort()) + EntePopupMenuItem( + S.of(context).sortAlbumsBy, value: AlbumPopupAction.sort, - child: Row( - children: [ - const Icon(Icons.sort_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - S.of(context).sortAlbumsBy, - ), - ], - ), + icon: Icons.sort_outlined, ), - ); - } - - if (galleryType == GalleryType.uncategorized) { - items.add( - PopupMenuItem( + if (galleryType == GalleryType.uncategorized) + EntePopupMenuItem( + S.of(context).cleanUncategorized, value: AlbumPopupAction.cleanUncategorized, - child: Row( - children: [ - const Icon(Icons.crop_original_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).cleanUncategorized), - ], - ), + icon: Icons.crop_original_outlined, ), - ); - } - if (galleryType.canPin()) { - items.add( - PopupMenuItem( + if (galleryType.canPin()) + EntePopupMenuItem( + widget.collection!.isPinned + ? S.of(context).unpinAlbum + : S.of(context).pinAlbum, value: AlbumPopupAction.pinAlbum, - child: Row( - children: [ - widget.collection!.isPinned - ? const Icon(CupertinoIcons.pin_slash) - : Transform.rotate( - angle: 45 * math.pi / 180, // rotate by 45 degrees - child: const Icon(CupertinoIcons.pin), - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - widget.collection!.isPinned - ? S.of(context).unpinAlbum - : S.of(context).pinAlbum, - ), - ], - ), + iconWidget: widget.collection!.isPinned + ? const Icon(CupertinoIcons.pin_slash) + : Transform.rotate( + angle: 45 * math.pi / 180, // rotate by 45 degrees + child: const Icon(CupertinoIcons.pin), + ), ), - ); - } + ]); final bool isArchived = widget.collection?.isArchived() ?? false; final bool isHidden = widget.collection?.isHidden() ?? false; - // Do not show archive option for favorite collection. If collection is - // already archived, allow user to unarchive that collection. - if (isArchived || (galleryType.canArchive() && !isHidden)) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.ownedArchive, - child: Row( - children: [ - Icon(isArchived ? Icons.unarchive : Icons.archive_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isArchived - ? S.of(context).unarchiveAlbum - : S.of(context).archiveAlbum, - ), - ], - ), - ), - ); - } - if (!isArchived && galleryType.canHide()) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.ownedHide, - child: Row( - children: [ - Icon( - isHidden - ? Icons.visibility_outlined - : Icons.visibility_off_outlined, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isHidden ? S.of(context).unhide : S.of(context).hide, - ), - ], - ), - ), - ); - } - if (widget.collection != null && isInternalUser) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.playOnTv, - child: Row( - children: [ - const Icon(Icons.tv_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(context.l10n.playOnTv), - ], - ), - ), - ); - } - if (galleryType.canDelete()) { - items.add( - PopupMenuItem( - value: isQuickLink - ? AlbumPopupAction.removeLink - : AlbumPopupAction.delete, - child: Row( - children: [ - Icon( - isQuickLink - ? Icons.remove_circle_outline - : Icons.delete_outline, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - isQuickLink - ? S.of(context).removeLink - : S.of(context).deleteAlbum, - ), - ], + items.addAll( + [ + // Do not show archive option for favorite collection. If collection is + // already archived, allow user to unarchive that collection. + if (isArchived || (galleryType.canArchive() && !isHidden)) + EntePopupMenuItem( + value: AlbumPopupAction.ownedArchive, + isArchived + ? S.of(context).unarchiveAlbum + : S.of(context).archiveAlbum, + icon: isArchived ? Icons.unarchive : Icons.archive_outlined, ), - ), - ); - } - - if (galleryType == GalleryType.sharedCollection) { - final bool hasShareeArchived = widget.collection!.hasShareeArchived(); - items.add( - PopupMenuItem( - value: AlbumPopupAction.sharedArchive, - child: Row( - children: [ - Icon( - hasShareeArchived ? Icons.unarchive : Icons.archive_outlined, - ), - const Padding( - padding: EdgeInsets.all(8), - ), - Text( - hasShareeArchived - ? S.of(context).unarchiveAlbum - : S.of(context).archiveAlbum, - ), - ], + if (!isArchived && galleryType.canHide()) + EntePopupMenuItem( + value: AlbumPopupAction.ownedHide, + isHidden ? S.of(context).unhide : S.of(context).hide, + icon: isHidden + ? Icons.visibility_outlined + : Icons.visibility_off_outlined, ), - ), - ); - items.add( - PopupMenuItem( - value: AlbumPopupAction.leave, - child: Row( - children: [ - const Icon(Icons.logout), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).leaveAlbum), - ], + if (widget.collection != null && isInternalUser) + EntePopupMenuItem( + value: AlbumPopupAction.playOnTv, + context.l10n.playOnTv, + icon: Icons.tv_outlined, ), - ), - ); - } - if (galleryType == GalleryType.localFolder) { - items.add( - PopupMenuItem( - value: AlbumPopupAction.freeUpSpace, - child: Row( - children: [ - const Icon(Icons.delete_sweep_outlined), - const Padding( - padding: EdgeInsets.all(8), - ), - Text(S.of(context).freeUpDeviceSpace), - ], + if (galleryType.canDelete()) + EntePopupMenuItem( + isQuickLink ? S.of(context).removeLink : S.of(context).deleteAlbum, + value: isQuickLink + ? AlbumPopupAction.removeLink + : AlbumPopupAction.delete, + icon: isQuickLink + ? Icons.remove_circle_outline + : Icons.delete_outline, ), - ), - ); - } + if (galleryType == GalleryType.sharedCollection) + EntePopupMenuItem( + widget.collection!.hasShareeArchived() + ? S.of(context).unarchiveAlbum + : S.of(context).archiveAlbum, + value: AlbumPopupAction.sharedArchive, + icon: widget.collection!.hasShareeArchived() + ? Icons.unarchive + : Icons.archive_outlined, + ), + if (galleryType == GalleryType.sharedCollection) + EntePopupMenuItem( + S.of(context).leaveAlbum, + value: AlbumPopupAction.leave, + icon: Icons.logout, + ), + if (galleryType == GalleryType.localFolder) + EntePopupMenuItem( + S.of(context).freeUpDeviceSpace, + value: AlbumPopupAction.freeUpSpace, + icon: Icons.delete_sweep_outlined, + ), + ], + ); if (items.isNotEmpty) { actions.add( PopupMenuButton( @@ -603,7 +479,7 @@ class _GalleryAppBarWidgetState extends State { } else if (value == AlbumPopupAction.leave) { await _leaveAlbum(context); } else if (value == AlbumPopupAction.playOnTv) { - await castAlbum(); + await _castChoiceDialog(); } else if (value == AlbumPopupAction.freeUpSpace) { await _deleteBackedUpFiles(context); } else if (value == AlbumPopupAction.setCover) { @@ -838,10 +714,56 @@ class _GalleryAppBarWidgetState extends State { setState(() {}); } - Future castAlbum() async { + Future _castChoiceDialog() async { final gw = CastGateway(NetworkClient.instance.enteDio); + if (castService.getActiveSessions().isNotEmpty) { + await showChoiceDialog( + context, + title: S.of(context).stopCastingTitle, + firstButtonLabel: S.of(context).yes, + secondButtonLabel: S.of(context).no, + body: S.of(context).stopCastingBody, + firstButtonOnTap: () async { + gw.revokeAllTokens().ignore(); + await castService.closeActiveCasts(); + }, + ); + return; + } + // stop any existing cast session gw.revokeAllTokens().ignore(); + final result = await showDialog( + context: context, + barrierDismissible: true, + builder: (BuildContext context) { + return const CastChooseDialog(); + }, + ); + if (result == null) { + return; + } + // wait to allow the dialog to close + await Future.delayed(const Duration(milliseconds: 100)); + if (result == ButtonAction.first) { + await showDialog( + context: context, + barrierDismissible: true, + builder: (BuildContext context) { + return AutoCastDialog( + (device) async { + await _castPair(gw, device); + }, + ); + }, + ); + } + if (result == ButtonAction.second) { + await _pairWithPin(gw, ''); + } + } + + Future _pairWithPin(CastGateway gw, String code) async { await showTextInputDialog( context, title: context.l10n.playOnTv, @@ -849,28 +771,49 @@ class _GalleryAppBarWidgetState extends State { submitButtonLabel: S.of(context).pair, textInputType: TextInputType.streetAddress, hintText: context.l10n.deviceCodeHint, + showOnlyLoadingState: true, + alwaysShowSuccessState: false, + initialValue: code, onSubmit: (String text) async { - try { - final code = text.trim(); - final String? publicKey = await gw.getPublicKey(code); - if (publicKey == null) { - showToast(context, S.of(context).deviceNotFound); - return; - } - final String castToken = const Uuid().v4().toString(); - final castPayload = CollectionsService.instance - .getCastData(castToken, widget.collection!, publicKey); - await gw.publishCastPayload( - code, - castPayload, - widget.collection!.id, - castToken, - ); - } catch (e, s) { - _logger.severe("Failed to cast album", e, s); - await showGenericErrorDialog(context: context, error: e); + final bool paired = await _castPair(gw, text); + if (!paired) { + Future.delayed(Duration.zero, () => _pairWithPin(gw, code)); } }, ); } + + Future _castPair(CastGateway gw, String code) async { + try { + final String? publicKey = await gw.getPublicKey(code); + if (publicKey == null) { + showToast(context, S.of(context).deviceNotFound); + + return false; + } + final String castToken = const Uuid().v4().toString(); + final castPayload = CollectionsService.instance + .getCastData(castToken, widget.collection!, publicKey); + await gw.publishCastPayload( + code, + castPayload, + widget.collection!.id, + castToken, + ); + showToast(context, S.of(context).pairingComplete); + return true; + } catch (e, s) { + _logger.severe("Failed to cast album", e, s); + if (e is CastIPMismatchException) { + await showErrorDialog( + context, + S.of(context).castIPMismatchTitle, + S.of(context).castIPMismatchBody, + ); + } else { + await showGenericErrorDialog(context: context, error: e); + } + return false; + } + } } diff --git a/mobile/lib/utils/file_download_util.dart b/mobile/lib/utils/file_download_util.dart index f99a43527..a8847e3fd 100644 --- a/mobile/lib/utils/file_download_util.dart +++ b/mobile/lib/utils/file_download_util.dart @@ -4,14 +4,23 @@ import "package:computer/computer.dart"; import 'package:dio/dio.dart'; import "package:flutter/foundation.dart"; import 'package:logging/logging.dart'; +import 'package:path/path.dart' as file_path; +import "package:photo_manager/photo_manager.dart"; import 'package:photos/core/configuration.dart'; +import "package:photos/core/event_bus.dart"; import 'package:photos/core/network/network.dart'; +import "package:photos/db/files_db.dart"; +import "package:photos/events/local_photos_updated_event.dart"; import 'package:photos/models/file/file.dart'; import "package:photos/models/file/file_type.dart"; +import "package:photos/models/ignored_file.dart"; import 'package:photos/services/collections_service.dart'; +import "package:photos/services/ignored_files_service.dart"; +import "package:photos/services/local_sync_service.dart"; import 'package:photos/utils/crypto_util.dart'; import "package:photos/utils/data_util.dart"; import "package:photos/utils/fake_progress.dart"; +import "package:photos/utils/file_util.dart"; final _logger = Logger("file_download_util"); @@ -115,6 +124,97 @@ Future getFileKeyUsingBgWorker(EnteFile file) async { ); } +Future downloadToGallery(EnteFile file) async { + try { + final FileType type = file.fileType; + final bool downloadLivePhotoOnDroid = + type == FileType.livePhoto && Platform.isAndroid; + AssetEntity? savedAsset; + final File? fileToSave = await getFile(file); + //Disabling notifications for assets changing to insert the file into + //files db before triggering a sync. + await PhotoManager.stopChangeNotify(); + if (type == FileType.image) { + savedAsset = await PhotoManager.editor + .saveImageWithPath(fileToSave!.path, title: file.title!); + } else if (type == FileType.video) { + savedAsset = + await PhotoManager.editor.saveVideo(fileToSave!, title: file.title!); + } else if (type == FileType.livePhoto) { + final File? liveVideoFile = + await getFileFromServer(file, liveVideo: true); + if (liveVideoFile == null) { + throw AssertionError("Live video can not be null"); + } + if (downloadLivePhotoOnDroid) { + await _saveLivePhotoOnDroid(fileToSave!, liveVideoFile, file); + } else { + savedAsset = await PhotoManager.editor.darwin.saveLivePhoto( + imageFile: fileToSave!, + videoFile: liveVideoFile, + title: file.title!, + ); + } + } + + if (savedAsset != null) { + file.localID = savedAsset.id; + await FilesDB.instance.insert(file); + Bus.instance.fire( + LocalPhotosUpdatedEvent( + [file], + source: "download", + ), + ); + } else if (!downloadLivePhotoOnDroid && savedAsset == null) { + _logger.severe('Failed to save assert of type $type'); + } + } catch (e) { + _logger.severe("Failed to save file", e); + rethrow; + } finally { + await PhotoManager.startChangeNotify(); + LocalSyncService.instance.checkAndSync().ignore(); + } +} + +Future _saveLivePhotoOnDroid( + File image, + File video, + EnteFile enteFile, +) async { + debugPrint("Downloading LivePhoto on Droid"); + AssetEntity? savedAsset = await (PhotoManager.editor + .saveImageWithPath(image.path, title: enteFile.title!)); + if (savedAsset == null) { + throw Exception("Failed to save image of live photo"); + } + IgnoredFile ignoreVideoFile = IgnoredFile( + savedAsset.id, + savedAsset.title ?? '', + savedAsset.relativePath ?? 'remoteDownload', + "remoteDownload", + ); + await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); + final videoTitle = file_path.basenameWithoutExtension(enteFile.title!) + + file_path.extension(video.path); + savedAsset = (await (PhotoManager.editor.saveVideo( + video, + title: videoTitle, + ))); + if (savedAsset == null) { + throw Exception("Failed to save video of live photo"); + } + + ignoreVideoFile = IgnoredFile( + savedAsset.id, + savedAsset.title ?? videoTitle, + savedAsset.relativePath ?? 'remoteDownload', + "remoteDownload", + ); + await IgnoredFilesService.instance.cacheAndInsert([ignoreVideoFile]); +} + Uint8List _decryptFileKey(Map args) { final encryptedKey = CryptoUtil.base642bin(args["encryptedKey"]); final nonce = CryptoUtil.base642bin(args["keyDecryptionNonce"]); diff --git a/mobile/lib/utils/file_uploader.dart b/mobile/lib/utils/file_uploader.dart index d77bc95d7..f81f9d34b 100644 --- a/mobile/lib/utils/file_uploader.dart +++ b/mobile/lib/utils/file_uploader.dart @@ -357,10 +357,16 @@ class FileUploader { final List connections = await (Connectivity().checkConnectivity()); bool canUploadUnderCurrentNetworkConditions = true; - if (connections.any((element) => element == ConnectivityResult.mobile)) { - canUploadUnderCurrentNetworkConditions = - Configuration.instance.shouldBackupOverMobileData(); + if (!Configuration.instance.shouldBackupOverMobileData()) { + if (connections.any((element) => element == ConnectivityResult.mobile)) { + canUploadUnderCurrentNetworkConditions = false; + } else { + _logger.info( + "mobileBackupDisabled, backing up with connections: ${connections.map((e) => e.name).toString()}", + ); + } } + if (!canUploadUnderCurrentNetworkConditions) { throw WiFiUnavailableError(); } diff --git a/mobile/plugins/ente_cast/.metadata b/mobile/plugins/ente_cast/.metadata new file mode 100644 index 000000000..9fc7ede54 --- /dev/null +++ b/mobile/plugins/ente_cast/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 0b8abb4724aa590dd0f429683339b1e045a1594d + channel: stable + +project_type: plugin diff --git a/mobile/plugins/ente_cast/analysis_options.yaml b/mobile/plugins/ente_cast/analysis_options.yaml new file mode 100644 index 000000000..f04c6cf0f --- /dev/null +++ b/mobile/plugins/ente_cast/analysis_options.yaml @@ -0,0 +1 @@ +include: ../../analysis_options.yaml diff --git a/mobile/plugins/ente_cast/lib/ente_cast.dart b/mobile/plugins/ente_cast/lib/ente_cast.dart new file mode 100644 index 000000000..f421a9297 --- /dev/null +++ b/mobile/plugins/ente_cast/lib/ente_cast.dart @@ -0,0 +1,2 @@ +export 'src/model.dart'; +export 'src/service.dart'; diff --git a/mobile/plugins/ente_cast/lib/src/model.dart b/mobile/plugins/ente_cast/lib/src/model.dart new file mode 100644 index 000000000..e86582f76 --- /dev/null +++ b/mobile/plugins/ente_cast/lib/src/model.dart @@ -0,0 +1,5 @@ +// create enum for type of message for cast +enum CastMessageType { + pairCode, + alreadyCasting, +} diff --git a/mobile/plugins/ente_cast/lib/src/service.dart b/mobile/plugins/ente_cast/lib/src/service.dart new file mode 100644 index 000000000..2ab0961db --- /dev/null +++ b/mobile/plugins/ente_cast/lib/src/service.dart @@ -0,0 +1,18 @@ +import "package:ente_cast/src/model.dart"; +import "package:flutter/widgets.dart"; + +abstract class CastService { + bool get isSupported; + Future> searchDevices(); + Future connectDevice( + BuildContext context, + Object device, { + int? collectionID, + // callback that take a map of string, dynamic + void Function(Map>)? onMessage, + }); + // returns a map of sessionID to deviceNames + Map getActiveSessions(); + + Future closeActiveCasts(); +} diff --git a/mobile/plugins/ente_cast/pubspec.yaml b/mobile/plugins/ente_cast/pubspec.yaml new file mode 100644 index 000000000..967e147e9 --- /dev/null +++ b/mobile/plugins/ente_cast/pubspec.yaml @@ -0,0 +1,19 @@ +name: ente_cast +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=3.3.0 <4.0.0' + +dependencies: + collection: + dio: ^4.0.6 + flutter: + sdk: flutter + shared_preferences: ^2.0.5 + stack_trace: + +dev_dependencies: + flutter_lints: + +flutter: diff --git a/mobile/plugins/ente_cast_none/.metadata b/mobile/plugins/ente_cast_none/.metadata new file mode 100644 index 000000000..9fc7ede54 --- /dev/null +++ b/mobile/plugins/ente_cast_none/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 0b8abb4724aa590dd0f429683339b1e045a1594d + channel: stable + +project_type: plugin diff --git a/mobile/plugins/ente_cast_none/analysis_options.yaml b/mobile/plugins/ente_cast_none/analysis_options.yaml new file mode 100644 index 000000000..f04c6cf0f --- /dev/null +++ b/mobile/plugins/ente_cast_none/analysis_options.yaml @@ -0,0 +1 @@ +include: ../../analysis_options.yaml diff --git a/mobile/plugins/ente_cast_none/lib/ente_cast_none.dart b/mobile/plugins/ente_cast_none/lib/ente_cast_none.dart new file mode 100644 index 000000000..66a7132d8 --- /dev/null +++ b/mobile/plugins/ente_cast_none/lib/ente_cast_none.dart @@ -0,0 +1 @@ +export 'src/service.dart'; diff --git a/mobile/plugins/ente_cast_none/lib/src/service.dart b/mobile/plugins/ente_cast_none/lib/src/service.dart new file mode 100644 index 000000000..c78188973 --- /dev/null +++ b/mobile/plugins/ente_cast_none/lib/src/service.dart @@ -0,0 +1,35 @@ +import "package:ente_cast/ente_cast.dart"; +import "package:flutter/widgets.dart"; + +class CastServiceImpl extends CastService { + @override + Future connectDevice( + BuildContext context, + Object device, { + int? collectionID, + void Function(Map>)? onMessage, + }) { + throw UnimplementedError(); + } + + @override + bool get isSupported => false; + + @override + Future> searchDevices() { + // TODO: implement searchDevices + throw UnimplementedError(); + } + + @override + Future closeActiveCasts() { + // TODO: implement closeActiveCasts + throw UnimplementedError(); + } + + @override + Map getActiveSessions() { + // TODO: implement getActiveSessions + throw UnimplementedError(); + } +} diff --git a/mobile/plugins/ente_cast_none/pubspec.yaml b/mobile/plugins/ente_cast_none/pubspec.yaml new file mode 100644 index 000000000..a4559fac5 --- /dev/null +++ b/mobile/plugins/ente_cast_none/pubspec.yaml @@ -0,0 +1,18 @@ +name: ente_cast_none +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=3.3.0 <4.0.0' + +dependencies: + ente_cast: + path: ../ente_cast + flutter: + sdk: flutter + stack_trace: + +dev_dependencies: + flutter_lints: + +flutter: diff --git a/mobile/plugins/ente_cast_normal/.metadata b/mobile/plugins/ente_cast_normal/.metadata new file mode 100644 index 000000000..9fc7ede54 --- /dev/null +++ b/mobile/plugins/ente_cast_normal/.metadata @@ -0,0 +1,10 @@ +# This file tracks properties of this Flutter project. +# Used by Flutter tool to assess capabilities and perform upgrades etc. +# +# This file should be version controlled and should not be manually edited. + +version: + revision: 0b8abb4724aa590dd0f429683339b1e045a1594d + channel: stable + +project_type: plugin diff --git a/mobile/plugins/ente_cast_normal/analysis_options.yaml b/mobile/plugins/ente_cast_normal/analysis_options.yaml new file mode 100644 index 000000000..f04c6cf0f --- /dev/null +++ b/mobile/plugins/ente_cast_normal/analysis_options.yaml @@ -0,0 +1 @@ +include: ../../analysis_options.yaml diff --git a/mobile/plugins/ente_cast_normal/lib/ente_cast_normal.dart b/mobile/plugins/ente_cast_normal/lib/ente_cast_normal.dart new file mode 100644 index 000000000..66a7132d8 --- /dev/null +++ b/mobile/plugins/ente_cast_normal/lib/ente_cast_normal.dart @@ -0,0 +1 @@ +export 'src/service.dart'; diff --git a/mobile/plugins/ente_cast_normal/lib/src/service.dart b/mobile/plugins/ente_cast_normal/lib/src/service.dart new file mode 100644 index 000000000..04c501666 --- /dev/null +++ b/mobile/plugins/ente_cast_normal/lib/src/service.dart @@ -0,0 +1,100 @@ +import "dart:developer" as dev; + +import "package:cast/cast.dart"; +import "package:ente_cast/ente_cast.dart"; +import "package:flutter/material.dart"; + +class CastServiceImpl extends CastService { + final String _appId = 'F5BCEC64'; + final String _pairRequestNamespace = 'urn:x-cast:pair-request'; + final Map collectionIDToSessions = {}; + + @override + Future connectDevice( + BuildContext context, + Object device, { + int? collectionID, + void Function(Map>)? onMessage, + }) async { + final CastDevice castDevice = device as CastDevice; + final session = await CastSessionManager().startSession(castDevice); + session.messageStream.listen((message) { + if (message['type'] == "RECEIVER_STATUS") { + dev.log( + "got RECEIVER_STATUS, Send request to pair", + name: "CastServiceImpl", + ); + session.sendMessage(_pairRequestNamespace, {}); + } else { + if (onMessage != null && message.containsKey("code")) { + onMessage( + { + CastMessageType.pairCode: message, + }, + ); + } + print('receive message: $message'); + } + }); + + session.stateStream.listen((state) { + if (state == CastSessionState.connected) { + debugPrint("Send request to pair"); + session.sendMessage(_pairRequestNamespace, {}); + } else if (state == CastSessionState.closed) { + dev.log('Session closed', name: 'CastServiceImpl'); + } + }); + + debugPrint("Send request to launch"); + session.sendMessage(CastSession.kNamespaceReceiver, { + 'type': 'LAUNCH', + 'appId': _appId, // set the appId of your app here + }); + // session.sendMessage('urn:x-cast:pair-request', {}); + } + + @override + Future> searchDevices() { + return CastDiscoveryService().search().then((devices) { + return devices.map((device) => (device.name, device)).toList(); + }); + } + + @override + bool get isSupported => true; + + @override + Future closeActiveCasts() { + final sessions = CastSessionManager().sessions; + for (final session in sessions) { + debugPrint("send close message for ${session.sessionId}"); + Future(() { + session.sendMessage(CastSession.kNamespaceConnection, { + 'type': 'CLOSE', + }); + }).timeout( + const Duration(seconds: 5), + onTimeout: () { + debugPrint('sendMessage timed out after 5 seconds'); + }, + ); + debugPrint("close session ${session.sessionId}"); + session.close(); + } + CastSessionManager().sessions.clear(); + return Future.value(); + } + + @override + Map getActiveSessions() { + final sessions = CastSessionManager().sessions; + final Map result = {}; + for (final session in sessions) { + if (session.state == CastSessionState.connected) { + result[session.sessionId] = session.state.toString(); + } + } + return result; + } +} diff --git a/mobile/plugins/ente_cast_normal/pubspec.lock b/mobile/plugins/ente_cast_normal/pubspec.lock new file mode 100644 index 000000000..86051800c --- /dev/null +++ b/mobile/plugins/ente_cast_normal/pubspec.lock @@ -0,0 +1,333 @@ +# Generated by pub +# See https://dart.dev/tools/pub/glossary#lockfile +packages: + async: + dependency: transitive + description: + name: async + sha256: "947bfcf187f74dbc5e146c9eb9c0f10c9f8b30743e341481c1e2ed3ecc18c20c" + url: "https://pub.dev" + source: hosted + version: "2.11.0" + cast: + dependency: "direct main" + description: + path: "." + ref: multicast_version + resolved-ref: "1f39cd4d6efa9363e77b2439f0317bae0c92dda1" + url: "https://github.com/guyluz11/flutter_cast.git" + source: git + version: "2.0.9" + characters: + dependency: transitive + description: + name: characters + sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605" + url: "https://pub.dev" + source: hosted + version: "1.3.0" + collection: + dependency: transitive + description: + name: collection + sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a + url: "https://pub.dev" + source: hosted + version: "1.18.0" + dio: + dependency: transitive + description: + name: dio + sha256: "7d328c4d898a61efc3cd93655a0955858e29a0aa647f0f9e02d59b3bb275e2e8" + url: "https://pub.dev" + source: hosted + version: "4.0.6" + ente_cast: + dependency: "direct main" + description: + path: "../ente_cast" + relative: true + source: path + version: "0.0.1" + ffi: + dependency: transitive + description: + name: ffi + sha256: "493f37e7df1804778ff3a53bd691d8692ddf69702cf4c1c1096a2e41b4779e21" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + file: + dependency: transitive + description: + name: file + sha256: "5fc22d7c25582e38ad9a8515372cd9a93834027aacf1801cf01164dac0ffa08c" + url: "https://pub.dev" + source: hosted + version: "7.0.0" + fixnum: + dependency: transitive + description: + name: fixnum + sha256: "25517a4deb0c03aa0f32fd12db525856438902d9c16536311e76cdc57b31d7d1" + url: "https://pub.dev" + source: hosted + version: "1.1.0" + flutter: + dependency: "direct main" + description: flutter + source: sdk + version: "0.0.0" + flutter_lints: + dependency: "direct dev" + description: + name: flutter_lints + sha256: "9e8c3858111da373efc5aa341de011d9bd23e2c5c5e0c62bccf32438e192d7b1" + url: "https://pub.dev" + source: hosted + version: "3.0.2" + flutter_web_plugins: + dependency: transitive + description: flutter + source: sdk + version: "0.0.0" + http: + dependency: transitive + description: + name: http + sha256: "761a297c042deedc1ffbb156d6e2af13886bb305c2a343a4d972504cd67dd938" + url: "https://pub.dev" + source: hosted + version: "1.2.1" + http_parser: + dependency: transitive + description: + name: http_parser + sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b" + url: "https://pub.dev" + source: hosted + version: "4.0.2" + lints: + dependency: transitive + description: + name: lints + sha256: cbf8d4b858bb0134ef3ef87841abdf8d63bfc255c266b7bf6b39daa1085c4290 + url: "https://pub.dev" + source: hosted + version: "3.0.0" + material_color_utilities: + dependency: transitive + description: + name: material_color_utilities + sha256: "0e0a020085b65b6083975e499759762399b4475f766c21668c4ecca34ea74e5a" + url: "https://pub.dev" + source: hosted + version: "0.8.0" + meta: + dependency: transitive + description: + name: meta + sha256: d584fa6707a52763a52446f02cc621b077888fb63b93bbcb1143a7be5a0c0c04 + url: "https://pub.dev" + source: hosted + version: "1.11.0" + multicast_dns: + dependency: transitive + description: + name: multicast_dns + sha256: "316cc47a958d4bd3c67bd238fe8b44fdfb6133bad89cb191c0c3bd3edb14e296" + url: "https://pub.dev" + source: hosted + version: "0.3.2+6" + path: + dependency: transitive + description: + name: path + sha256: "087ce49c3f0dc39180befefc60fdb4acd8f8620e5682fe2476afd0b3688bb4af" + url: "https://pub.dev" + source: hosted + version: "1.9.0" + path_provider_linux: + dependency: transitive + description: + name: path_provider_linux + sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279 + url: "https://pub.dev" + source: hosted + version: "2.2.1" + path_provider_platform_interface: + dependency: transitive + description: + name: path_provider_platform_interface + sha256: "88f5779f72ba699763fa3a3b06aa4bf6de76c8e5de842cf6f29e2e06476c2334" + url: "https://pub.dev" + source: hosted + version: "2.1.2" + path_provider_windows: + dependency: transitive + description: + name: path_provider_windows + sha256: "8bc9f22eee8690981c22aa7fc602f5c85b497a6fb2ceb35ee5a5e5ed85ad8170" + url: "https://pub.dev" + source: hosted + version: "2.2.1" + platform: + dependency: transitive + description: + name: platform + sha256: "12220bb4b65720483f8fa9450b4332347737cf8213dd2840d8b2c823e47243ec" + url: "https://pub.dev" + source: hosted + version: "3.1.4" + plugin_platform_interface: + dependency: transitive + description: + name: plugin_platform_interface + sha256: "4820fbfdb9478b1ebae27888254d445073732dae3d6ea81f0b7e06d5dedc3f02" + url: "https://pub.dev" + source: hosted + version: "2.1.8" + protobuf: + dependency: transitive + description: + name: protobuf + sha256: "68645b24e0716782e58948f8467fd42a880f255096a821f9e7d0ec625b00c84d" + url: "https://pub.dev" + source: hosted + version: "3.1.0" + shared_preferences: + dependency: transitive + description: + name: shared_preferences + sha256: d3bbe5553a986e83980916ded2f0b435ef2e1893dfaa29d5a7a790d0eca12180 + url: "https://pub.dev" + source: hosted + version: "2.2.3" + shared_preferences_android: + dependency: transitive + description: + name: shared_preferences_android + sha256: "1ee8bf911094a1b592de7ab29add6f826a7331fb854273d55918693d5364a1f2" + url: "https://pub.dev" + source: hosted + version: "2.2.2" + shared_preferences_foundation: + dependency: transitive + description: + name: shared_preferences_foundation + sha256: "7708d83064f38060c7b39db12aefe449cb8cdc031d6062280087bc4cdb988f5c" + url: "https://pub.dev" + source: hosted + version: "2.3.5" + shared_preferences_linux: + dependency: transitive + description: + name: shared_preferences_linux + sha256: "9f2cbcf46d4270ea8be39fa156d86379077c8a5228d9dfdb1164ae0bb93f1faa" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + shared_preferences_platform_interface: + dependency: transitive + description: + name: shared_preferences_platform_interface + sha256: "22e2ecac9419b4246d7c22bfbbda589e3acf5c0351137d87dd2939d984d37c3b" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + shared_preferences_web: + dependency: transitive + description: + name: shared_preferences_web + sha256: "9aee1089b36bd2aafe06582b7d7817fd317ef05fc30e6ba14bff247d0933042a" + url: "https://pub.dev" + source: hosted + version: "2.3.0" + shared_preferences_windows: + dependency: transitive + description: + name: shared_preferences_windows + sha256: "841ad54f3c8381c480d0c9b508b89a34036f512482c407e6df7a9c4aa2ef8f59" + url: "https://pub.dev" + source: hosted + version: "2.3.2" + sky_engine: + dependency: transitive + description: flutter + source: sdk + version: "0.0.99" + source_span: + dependency: transitive + description: + name: source_span + sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c" + url: "https://pub.dev" + source: hosted + version: "1.10.0" + stack_trace: + dependency: "direct main" + description: + name: stack_trace + sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b" + url: "https://pub.dev" + source: hosted + version: "1.11.1" + string_scanner: + dependency: transitive + description: + name: string_scanner + sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde" + url: "https://pub.dev" + source: hosted + version: "1.2.0" + term_glyph: + dependency: transitive + description: + name: term_glyph + sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84 + url: "https://pub.dev" + source: hosted + version: "1.2.1" + typed_data: + dependency: transitive + description: + name: typed_data + sha256: facc8d6582f16042dd49f2463ff1bd6e2c9ef9f3d5da3d9b087e244a7b564b3c + url: "https://pub.dev" + source: hosted + version: "1.3.2" + vector_math: + dependency: transitive + description: + name: vector_math + sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803" + url: "https://pub.dev" + source: hosted + version: "2.1.4" + web: + dependency: transitive + description: + name: web + sha256: "97da13628db363c635202ad97068d47c5b8aa555808e7a9411963c533b449b27" + url: "https://pub.dev" + source: hosted + version: "0.5.1" + win32: + dependency: transitive + description: + name: win32 + sha256: "0a989dc7ca2bb51eac91e8fd00851297cfffd641aa7538b165c62637ca0eaa4a" + url: "https://pub.dev" + source: hosted + version: "5.4.0" + xdg_directories: + dependency: transitive + description: + name: xdg_directories + sha256: faea9dee56b520b55a566385b84f2e8de55e7496104adada9962e0bd11bcff1d + url: "https://pub.dev" + source: hosted + version: "1.0.4" +sdks: + dart: ">=3.3.0 <4.0.0" + flutter: ">=3.19.0" diff --git a/mobile/plugins/ente_cast_normal/pubspec.yaml b/mobile/plugins/ente_cast_normal/pubspec.yaml new file mode 100644 index 000000000..c97d70a84 --- /dev/null +++ b/mobile/plugins/ente_cast_normal/pubspec.yaml @@ -0,0 +1,22 @@ +name: ente_cast_normal +version: 0.0.1 +publish_to: none + +environment: + sdk: '>=3.3.0 <4.0.0' + +dependencies: + cast: + git: + url: https://github.com/guyluz11/flutter_cast.git + ref: multicast_version + ente_cast: + path: ../ente_cast + flutter: + sdk: flutter + stack_trace: + +dev_dependencies: + flutter_lints: + +flutter: diff --git a/mobile/pubspec.lock b/mobile/pubspec.lock index 0610e4588..ae74068eb 100644 --- a/mobile/pubspec.lock +++ b/mobile/pubspec.lock @@ -209,6 +209,15 @@ packages: url: "https://pub.dev" source: hosted version: "1.1.1" + cast: + dependency: transitive + description: + path: "." + ref: multicast_version + resolved-ref: "1f39cd4d6efa9363e77b2439f0317bae0c92dda1" + url: "https://github.com/guyluz11/flutter_cast.git" + source: git + version: "2.0.9" characters: dependency: transitive description: @@ -342,10 +351,10 @@ packages: dependency: "direct main" description: name: cupertino_icons - sha256: d57953e10f9f8327ce64a508a355f0b1ec902193f66288e8cb5070e7c47eeb2d + sha256: ba631d1c7f7bef6b729a622b7b752645a2d076dba9976925b8f25725a30e1ee6 url: "https://pub.dev" source: hosted - version: "1.0.6" + version: "1.0.8" dart_style: dependency: transitive description: @@ -354,14 +363,6 @@ packages: url: "https://pub.dev" source: hosted version: "2.3.2" - dartx: - dependency: transitive - description: - name: dartx - sha256: "8b25435617027257d43e6508b5fe061012880ddfdaa75a71d607c3de2a13d244" - url: "https://pub.dev" - source: hosted - version: "1.2.0" dbus: dependency: transitive description: @@ -434,6 +435,20 @@ packages: url: "https://pub.dev" source: hosted version: "2.1.17" + ente_cast: + dependency: "direct main" + description: + path: "plugins/ente_cast" + relative: true + source: path + version: "0.0.1" + ente_cast_normal: + dependency: "direct main" + description: + path: "plugins/ente_cast_normal" + relative: true + source: path + version: "0.0.1" ente_feature_flag: dependency: "direct main" description: @@ -1093,30 +1108,6 @@ packages: url: "https://pub.dev" source: hosted version: "1.0.4" - isar: - dependency: "direct main" - description: - name: isar - sha256: "99165dadb2cf2329d3140198363a7e7bff9bbd441871898a87e26914d25cf1ea" - url: "https://pub.dev" - source: hosted - version: "3.1.0+1" - isar_flutter_libs: - dependency: "direct main" - description: - name: isar_flutter_libs - sha256: bc6768cc4b9c61aabff77152e7f33b4b17d2fc93134f7af1c3dd51500fe8d5e8 - url: "https://pub.dev" - source: hosted - version: "3.1.0+1" - isar_generator: - dependency: "direct dev" - description: - name: isar_generator - sha256: "76c121e1295a30423604f2f819bc255bc79f852f3bc8743a24017df6068ad133" - url: "https://pub.dev" - source: hosted - version: "3.1.0+1" js: dependency: transitive description: @@ -1423,6 +1414,14 @@ packages: url: "https://pub.dev" source: hosted version: "1.0.2" + multicast_dns: + dependency: transitive + description: + name: multicast_dns + sha256: "316cc47a958d4bd3c67bd238fe8b44fdfb6133bad89cb191c0c3bd3edb14e296" + url: "https://pub.dev" + source: hosted + version: "0.3.2+6" nested: dependency: transitive description: @@ -1736,6 +1735,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.1.0" + protobuf: + dependency: transitive + description: + name: protobuf + sha256: "68645b24e0716782e58948f8467fd42a880f255096a821f9e7d0ec625b00c84d" + url: "https://pub.dev" + source: hosted + version: "3.1.0" provider: dependency: "direct main" description: @@ -2181,14 +2188,6 @@ packages: url: "https://pub.dev" source: hosted version: "0.5.9" - time: - dependency: transitive - description: - name: time - sha256: ad8e018a6c9db36cb917a031853a1aae49467a93e0d464683e029537d848c221 - url: "https://pub.dev" - source: hosted - version: "2.1.4" timezone: dependency: transitive description: @@ -2558,14 +2557,6 @@ packages: url: "https://pub.dev" source: hosted version: "1.1.1" - xxh3: - dependency: transitive - description: - name: xxh3 - sha256: a92b30944a9aeb4e3d4f3c3d4ddb3c7816ca73475cd603682c4f8149690f56d7 - url: "https://pub.dev" - source: hosted - version: "1.0.1" yaml: dependency: transitive description: diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 9c96bc762..2adf29321 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -12,7 +12,7 @@ description: ente photos application # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 0.8.84+604 +version: 0.8.88+608 publish_to: none environment: @@ -39,7 +39,7 @@ dependencies: connectivity_plus: ^6.0.2 cross_file: ^0.3.3 crypto: ^3.0.2 - cupertino_icons: ^1.0.0 + cupertino_icons: ^1.0.8 defer_pointer: ^0.0.2 device_info_plus: ^9.0.3 dio: ^4.0.6 @@ -47,6 +47,10 @@ dependencies: dotted_border: ^2.1.0 dropdown_button2: ^2.0.0 email_validator: ^2.0.1 + ente_cast: + path: plugins/ente_cast + ente_cast_normal: + path: plugins/ente_cast_normal ente_feature_flag: path: plugins/ente_feature_flag equatable: ^2.0.5 @@ -95,8 +99,6 @@ dependencies: image_editor: ^1.3.0 in_app_purchase: ^3.0.7 intl: ^0.18.0 - isar: ^3.1.0+1 - isar_flutter_libs: ^3.1.0+1 json_annotation: ^4.8.0 latlong2: ^0.9.0 like_button: ^2.0.5 @@ -192,7 +194,6 @@ dev_dependencies: freezed: ^2.5.2 integration_test: sdk: flutter - isar_generator: ^3.1.0+1 json_serializable: ^6.6.1 test: ^1.22.0 diff --git a/mobile/scripts/build_isar.sh b/mobile/scripts/build_isar.sh deleted file mode 100755 index 1bb1d38f6..000000000 --- a/mobile/scripts/build_isar.sh +++ /dev/null @@ -1,17 +0,0 @@ -# TODO: add `rustup@1.25.2` to `srclibs` -# TODO: verify if `gcc-multilib` or `libc-dev` is needed -$$rustup$$/rustup-init.sh -y -source $HOME/.cargo/env -cd thirdparty/isar/ -bash tool/build_android.sh x86 -bash tool/build_android.sh x64 -bash tool/build_android.sh armv7 -bash tool/build_android.sh arm64 -mv libisar_android_arm64.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/arm64-v8a/ -mv libisar_android_armv7.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/armeabi-v7a/ -mv libisar_android_x64.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86_64/ -mv libisar_android_x86.so libisar.so -mv libisar.so $PUB_CACHE/hosted/pub.dev/isar_flutter_libs-*/android/src/main/jniLibs/x86/ diff --git a/server/docs/docker.md b/server/docs/docker.md index d8f3db913..a328d734b 100644 --- a/server/docs/docker.md +++ b/server/docs/docker.md @@ -45,7 +45,7 @@ require you to clone the repository or build any images. + image: ghcr.io/ente-io/server ``` -4. Create an (empty) configuration file. Yyou can later put your custom +4. Create an (empty) configuration file. You can later put your custom configuration in this if needed. ```sh diff --git a/server/ente/cast/entity.go b/server/ente/cast/entity.go index deffa90b9..a54d109fc 100644 --- a/server/ente/cast/entity.go +++ b/server/ente/cast/entity.go @@ -9,8 +9,7 @@ type CastRequest struct { } type RegisterDeviceRequest struct { - DeviceCode *string `json:"deviceCode"` - PublicKey string `json:"publicKey" binding:"required"` + PublicKey string `json:"publicKey" binding:"required"` } type AuthContext struct { diff --git a/server/pkg/api/cast.go b/server/pkg/api/cast.go index 62d5c9478..9012624d3 100644 --- a/server/pkg/api/cast.go +++ b/server/pkg/api/cast.go @@ -1,16 +1,16 @@ package api import ( - entity "github.com/ente-io/museum/ente/cast" - "github.com/ente-io/museum/pkg/controller/cast" - "net/http" - "strconv" - "github.com/ente-io/museum/ente" + entity "github.com/ente-io/museum/ente/cast" "github.com/ente-io/museum/pkg/controller" + "github.com/ente-io/museum/pkg/controller/cast" "github.com/ente-io/museum/pkg/utils/handler" "github.com/ente-io/stacktrace" "github.com/gin-gonic/gin" + "net/http" + "strconv" + "strings" ) // CastHandler exposes request handlers for publicly accessible collections @@ -126,7 +126,7 @@ func (h *CastHandler) GetDiff(c *gin.Context) { } func getDeviceCode(c *gin.Context) string { - return c.Param("deviceCode") + return strings.ToUpper(c.Param("deviceCode")) } func (h *CastHandler) getFileForType(c *gin.Context, objectType ente.ObjectType) { diff --git a/server/pkg/controller/cast/controller.go b/server/pkg/controller/cast/controller.go index 4432e149f..2bb002f81 100644 --- a/server/pkg/controller/cast/controller.go +++ b/server/pkg/controller/cast/controller.go @@ -2,7 +2,6 @@ package cast import ( "context" - "github.com/ente-io/museum/ente" "github.com/ente-io/museum/ente/cast" "github.com/ente-io/museum/pkg/controller/access" castRepo "github.com/ente-io/museum/pkg/repo/cast" @@ -28,7 +27,7 @@ func NewController(castRepo *castRepo.Repository, } func (c *Controller) RegisterDevice(ctx *gin.Context, request *cast.RegisterDeviceRequest) (string, error) { - return c.CastRepo.AddCode(ctx, request.DeviceCode, request.PublicKey, network.GetClientIP(ctx)) + return c.CastRepo.AddCode(ctx, request.PublicKey, network.GetClientIP(ctx)) } func (c *Controller) GetPublicKey(ctx *gin.Context, deviceCode string) (string, error) { @@ -42,7 +41,6 @@ func (c *Controller) GetPublicKey(ctx *gin.Context, deviceCode string) (string, "ip": ip, "clientIP": network.GetClientIP(ctx), }).Warn("GetPublicKey: IP mismatch") - return "", &ente.ErrCastIPMismatch } return pubKey, nil } diff --git a/server/pkg/controller/storagebonus/referral.go b/server/pkg/controller/storagebonus/referral.go index b452484f4..5bdd951f8 100644 --- a/server/pkg/controller/storagebonus/referral.go +++ b/server/pkg/controller/storagebonus/referral.go @@ -3,7 +3,7 @@ package storagebonus import ( "database/sql" "errors" - "fmt" + "github.com/ente-io/museum/pkg/utils/random" "github.com/ente-io/museum/ente" entity "github.com/ente-io/museum/ente/storagebonus" @@ -119,7 +119,7 @@ func (c *Controller) GetOrCreateReferralCode(ctx *gin.Context, userID int64) (*s if !errors.Is(err, sql.ErrNoRows) { return nil, stacktrace.Propagate(err, "failed to get storagebonus code") } - code, err := generateAlphaNumString(codeLength) + code, err := random.GenerateAlphaNumString(codeLength) if err != nil { return nil, stacktrace.Propagate(err, "") } @@ -131,30 +131,3 @@ func (c *Controller) GetOrCreateReferralCode(ctx *gin.Context, userID int64) (*s } return referralCode, nil } - -// generateAlphaNumString returns AlphaNumeric code of given length -// which exclude number 0 and letter O. The code always starts with an -// alphabet -func generateAlphaNumString(length int) (string, error) { - // Define the alphabet and numbers to be used in the string. - alphabet := "ABCDEFGHIJKLMNPQRSTUVWXYZ" - // Define the alphabet and numbers to be used in the string. - alphaNum := fmt.Sprintf("%s123456789", alphabet) - // Allocate a byte slice with the desired length. - result := make([]byte, length) - // Generate the first letter as an alphabet. - r0, err := auth.GenerateRandomInt(int64(len(alphabet))) - if err != nil { - return "", stacktrace.Propagate(err, "") - } - result[0] = alphabet[r0] - // Generate the remaining characters as alphanumeric. - for i := 1; i < length; i++ { - ri, err := auth.GenerateRandomInt(int64(len(alphaNum))) - if err != nil { - return "", stacktrace.Propagate(err, "") - } - result[i] = alphaNum[ri] - } - return string(result), nil -} diff --git a/server/pkg/repo/cast/repo.go b/server/pkg/repo/cast/repo.go index 89ebc4083..2f4446c9d 100644 --- a/server/pkg/repo/cast/repo.go +++ b/server/pkg/repo/cast/repo.go @@ -8,23 +8,16 @@ import ( "github.com/ente-io/stacktrace" "github.com/google/uuid" log "github.com/sirupsen/logrus" - "strings" ) type Repository struct { DB *sql.DB } -func (r *Repository) AddCode(ctx context.Context, code *string, pubKey string, ip string) (string, error) { - var codeValue string - var err error - if code == nil || *code == "" { - codeValue, err = random.GenerateSixDigitOtp() - if err != nil { - return "", stacktrace.Propagate(err, "") - } - } else { - codeValue = strings.TrimSpace(*code) +func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (string, error) { + codeValue, err := random.GenerateAlphaNumString(6) + if err != nil { + return "", err } _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip) if err != nil { diff --git a/server/pkg/utils/random/generate.go b/server/pkg/utils/random/generate.go index 47932b660..75a811c8e 100644 --- a/server/pkg/utils/random/generate.go +++ b/server/pkg/utils/random/generate.go @@ -13,3 +13,30 @@ func GenerateSixDigitOtp() (string, error) { } return fmt.Sprintf("%06d", n), nil } + +// GenerateAlphaNumString returns AlphaNumeric code of given length +// which exclude number 0 and letter O. The code always starts with an +// alphabet +func GenerateAlphaNumString(length int) (string, error) { + // Define the alphabet and numbers to be used in the string. + alphabet := "ABCDEFGHIJKLMNPQRSTUVWXYZ" + // Define the alphabet and numbers to be used in the string. + alphaNum := fmt.Sprintf("%s123456789", alphabet) + // Allocate a byte slice with the desired length. + result := make([]byte, length) + // Generate the first letter as an alphabet. + r0, err := auth.GenerateRandomInt(int64(len(alphabet))) + if err != nil { + return "", stacktrace.Propagate(err, "") + } + result[0] = alphabet[r0] + // Generate the remaining characters as alphanumeric. + for i := 1; i < length; i++ { + ri, err := auth.GenerateRandomInt(int64(len(alphaNum))) + if err != nil { + return "", stacktrace.Propagate(err, "") + } + result[i] = alphaNum[ri] + } + return string(result), nil +} diff --git a/web/apps/auth/src/pages/_app.tsx b/web/apps/auth/src/pages/_app.tsx index bf1093c90..a5aa55f98 100644 --- a/web/apps/auth/src/pages/_app.tsx +++ b/web/apps/auth/src/pages/_app.tsx @@ -140,7 +140,7 @@ export default function App({ Component, pageProps }: AppProps) { {showNavbar && } - {offline && t("OFFLINE_MSG")} + {isI18nReady && offline && t("OFFLINE_MSG")} diff --git a/web/apps/cast/src/components/PhotoAuditorium.tsx b/web/apps/cast/src/components/PhotoAuditorium.tsx index 6aa2c3990..c77c9e6ca 100644 --- a/web/apps/cast/src/components/PhotoAuditorium.tsx +++ b/web/apps/cast/src/components/PhotoAuditorium.tsx @@ -11,14 +11,16 @@ export const PhotoAuditorium: React.FC = ({ showNextSlide, }) => { useEffect(() => { + console.log("showing slide"); const timeoutId = window.setTimeout(() => { + console.log("showing next slide timer"); showNextSlide(); }, 10000); return () => { if (timeoutId) clearTimeout(timeoutId); }; - }, [showNextSlide]); + }, []); return (
{ - const array = new Uint8Array(length); - window.crypto.getRandomValues(array); - // Modulo operation to ensure each byte is a single digit - for (let i = 0; i < length; i++) { - array[i] = array[i] % 10; - } - return array; -}; - -const convertDataToDecimalString = (data: Uint8Array): string => { - let decimalString = ""; - for (let i = 0; i < data.length; i++) { - decimalString += data[i].toString(); // No need to pad, as each value is a single digit - } - return decimalString; -}; - export default function PairingMode() { - const [digits, setDigits] = useState([]); + const [deviceCode, setDeviceCode] = useState(""); const [publicKeyB64, setPublicKeyB64] = useState(""); const [privateKeyB64, setPrivateKeyB64] = useState(""); const [codePending, setCodePending] = useState(true); @@ -41,6 +22,17 @@ export default function PairingMode() { init(); }, []); + const init = async () => { + try { + const keypair = await generateKeyPair(); + setPublicKeyB64(await toB64(keypair.publicKey)); + setPrivateKeyB64(await toB64(keypair.privateKey)); + } catch (e) { + log.error("failed to generate keypair", e); + throw e; + } + }; + useEffect(() => { if (!cast) { return; @@ -94,7 +86,7 @@ export default function PairingMode() { "urn:x-cast:pair-request", message.senderId, { - code: digits.join(""), + code: deviceCode, }, ); } catch (e) { @@ -102,24 +94,9 @@ export default function PairingMode() { } }; - const init = async () => { - try { - const data = generateSecureData(6); - setDigits(convertDataToDecimalString(data).split("")); - const keypair = await generateKeyPair(); - setPublicKeyB64(await toB64(keypair.publicKey)); - setPrivateKeyB64(await toB64(keypair.privateKey)); - } catch (e) { - log.error("failed to generate keypair", e); - throw e; - } - }; - const generateKeyPair = async () => { await _sodium.ready; - const keypair = _sodium.crypto_box_keypair(); - return keypair; }; @@ -132,9 +109,7 @@ export default function PairingMode() { // then, we can decrypt this and store all the necessary info locally so we can play the collection slideshow. let devicePayload = ""; try { - const encDastData = await castGateway.getCastData( - `${digits.join("")}`, - ); + const encDastData = await castGateway.getCastData(`${deviceCode}`); if (!encDastData) return; devicePayload = encDastData; } catch (e) { @@ -157,10 +132,8 @@ export default function PairingMode() { const advertisePublicKey = async (publicKeyB64: string) => { // hey client, we exist! try { - await castGateway.registerDevice( - `${digits.join("")}`, - publicKeyB64, - ); + const codeValue = await castGateway.registerDevice(publicKeyB64); + setDeviceCode(codeValue); setCodePending(false); } catch (e) { // schedule re-try after 5 seconds @@ -174,19 +147,25 @@ export default function PairingMode() { const router = useRouter(); useEffect(() => { - if (digits.length < 1 || !publicKeyB64 || !privateKeyB64) return; + console.log("useEffect for pairing called"); + if (deviceCode.length < 1 || !publicKeyB64 || !privateKeyB64) return; const interval = setInterval(async () => { + console.log("polling for cast data"); const data = await pollForCastData(); - if (!data) return; + if (!data) { + console.log("no data"); + return; + } storeCastData(data); + console.log("pushing slideshow"); await router.push("/slideshow"); }, 1000); return () => { clearInterval(interval); }; - }, [digits, publicKeyB64, privateKeyB64, codePending]); + }, [deviceCode, publicKeyB64, privateKeyB64, codePending]); useEffect(() => { if (!publicKeyB64) return; @@ -229,7 +208,7 @@ export default function PairingMode() { ) : ( <> - + )}
diff --git a/web/apps/cast/src/pages/slideshow.tsx b/web/apps/cast/src/pages/slideshow.tsx index 99b2209de..8554524b2 100644 --- a/web/apps/cast/src/pages/slideshow.tsx +++ b/web/apps/cast/src/pages/slideshow.tsx @@ -30,6 +30,7 @@ export default function Slideshow() { const syncCastFiles = async (token: string) => { try { + console.log("syncCastFiles"); const castToken = window.localStorage.getItem("castToken"); const requestedCollectionKey = window.localStorage.getItem("collectionKey"); @@ -50,6 +51,7 @@ export default function Slideshow() { } } catch (e) { log.error("error during sync", e); + // go back to preview page router.push("/"); } }; @@ -100,45 +102,79 @@ export default function Slideshow() { }, [collectionFiles]); const showNextSlide = async () => { - const currentIndex = collectionFiles.findIndex( - (file) => file.id === currentFileId, - ); + try { + console.log("showNextSlide"); + const currentIndex = collectionFiles.findIndex( + (file) => file.id === currentFileId, + ); - const nextIndex = (currentIndex + 1) % collectionFiles.length; - const nextNextIndex = (nextIndex + 1) % collectionFiles.length; + console.log( + "showNextSlide-index", + currentIndex, + collectionFiles.length, + ); - const nextFile = collectionFiles[nextIndex]; - const nextNextFile = collectionFiles[nextNextIndex]; + const nextIndex = (currentIndex + 1) % collectionFiles.length; + const nextNextIndex = (nextIndex + 1) % collectionFiles.length; - let nextURL = renderableFileURLCache.get(nextFile.id); - let nextNextURL = renderableFileURLCache.get(nextNextFile.id); + console.log( + "showNextSlide-nextIndex and nextNextIndex", + nextIndex, + nextNextIndex, + ); - if (!nextURL) { - try { - const blob = await getPreviewableImage(nextFile, castToken); - const url = URL.createObjectURL(blob); - renderableFileURLCache.set(nextFile.id, url); - nextURL = url; - } catch (e) { - return; + const nextFile = collectionFiles[nextIndex]; + const nextNextFile = collectionFiles[nextNextIndex]; + + let nextURL = renderableFileURLCache.get(nextFile.id); + let nextNextURL = renderableFileURLCache.get(nextNextFile.id); + + if (!nextURL) { + try { + console.log("nextURL doesn't exist yet"); + const blob = await getPreviewableImage(nextFile, castToken); + console.log("nextURL blobread"); + const url = URL.createObjectURL(blob); + console.log("nextURL", url); + renderableFileURLCache.set(nextFile.id, url); + console.log("nextUrlCache set"); + nextURL = url; + } catch (e) { + console.log("error in nextUrl", e); + return; + } + } else { + console.log("nextURL already exists"); } - } - if (!nextNextURL) { - try { - const blob = await getPreviewableImage(nextNextFile, castToken); - const url = URL.createObjectURL(blob); - renderableFileURLCache.set(nextNextFile.id, url); - nextNextURL = url; - } catch (e) { - return; + if (!nextNextURL) { + try { + console.log("nextNextURL doesn't exist yet"); + const blob = await getPreviewableImage( + nextNextFile, + castToken, + ); + console.log("nextNextURL blobread"); + const url = URL.createObjectURL(blob); + console.log("nextNextURL", url); + renderableFileURLCache.set(nextNextFile.id, url); + console.log("nextNextURCacheL set"); + nextNextURL = url; + } catch (e) { + console.log("error in nextNextURL", e); + return; + } + } else { + console.log("nextNextURL already exists"); } - } - setLoading(false); - setCurrentFileId(nextFile.id); - setCurrentFileURL(nextURL); - setNextFileURL(nextNextURL); + setLoading(false); + setCurrentFileId(nextFile.id); + setCurrentFileURL(nextURL); + setNextFileURL(nextNextURL); + } catch (e) { + console.log("error in showNextSlide", e); + } }; if (loading) return ; diff --git a/web/apps/photos/src/components/PhotoFrame.tsx b/web/apps/photos/src/components/PhotoFrame.tsx index 8c935ee27..89f1ce887 100644 --- a/web/apps/photos/src/components/PhotoFrame.tsx +++ b/web/apps/photos/src/components/PhotoFrame.tsx @@ -308,11 +308,7 @@ const PhotoFrame = ({ item: EnteFile, ) => { log.info( - `[${ - item.id - }] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${ - item.isSourceLoaded - } fetching:${fetching[item.id]}`, + `[${item.id}] getSlideData called for thumbnail: ${!!item.msrc} sourceLoaded: ${!!item.isSourceLoaded} fetching: ${!!fetching[item.id]}`, ); if (!item.msrc) { @@ -327,9 +323,7 @@ const PhotoFrame = ({ try { updateURL(index)(item.id, url); log.info( - `[${ - item.id - }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, + `[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -381,7 +375,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, dummyImgSrcUrl); log.info( - `[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -415,7 +409,7 @@ const PhotoFrame = ({ true, ); log.info( - `[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for live photo complete, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -433,7 +427,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, srcURLs); log.info( - `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -476,9 +470,7 @@ const PhotoFrame = ({ try { updateURL(index)(item.id, item.msrc, true); log.info( - `[${ - item.id - }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, + `[${item.id}] calling invalidateCurrItems for thumbnail msrc: ${!!item.msrc}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { @@ -495,7 +487,7 @@ const PhotoFrame = ({ } try { log.info( - `[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`, + `[${item.id}] new file getConvertedVideo request ${item.metadata.title}}`, ); fetching[item.id] = true; @@ -504,7 +496,7 @@ const PhotoFrame = ({ try { await updateSrcURL(index, item.id, srcURL, true); log.info( - `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, + `[${item.id}] calling invalidateCurrItems for src, source loaded: ${item.isSourceLoaded}`, ); instance.invalidateCurrItems(); if ((instance as any).isOpen()) { diff --git a/web/apps/photos/src/components/PhotoList/dedupe.tsx b/web/apps/photos/src/components/PhotoList/dedupe.tsx index 9c86ba24f..7181f6267 100644 --- a/web/apps/photos/src/components/PhotoList/dedupe.tsx +++ b/web/apps/photos/src/components/PhotoList/dedupe.tsx @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import { Box, styled } from "@mui/material"; import { @@ -20,6 +19,7 @@ import { } from "react-window"; import { Duplicate } from "services/deduplicationService"; import { EnteFile } from "types/file"; +import { convertBytesToHumanReadable } from "utils/file"; export enum ITEM_TYPE { TIME = "TIME", diff --git a/web/apps/photos/src/components/PhotoList/index.tsx b/web/apps/photos/src/components/PhotoList/index.tsx index 48454fa69..4803995d4 100644 --- a/web/apps/photos/src/components/PhotoList/index.tsx +++ b/web/apps/photos/src/components/PhotoList/index.tsx @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import { formatDate, getDate, isSameDay } from "@ente/shared/time/format"; import { Box, Checkbox, Link, Typography, styled } from "@mui/material"; @@ -23,6 +22,7 @@ import { areEqual, } from "react-window"; import { EnteFile } from "types/file"; +import { convertBytesToHumanReadable } from "utils/file"; import { handleSelectCreator } from "utils/photoFrame"; import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; @@ -111,14 +111,13 @@ function getShrinkRatio(width: number, columns: number) { ); } -const ListContainer = styled(Box)<{ - columns: number; - shrinkRatio: number; - groups?: number[]; +const ListContainer = styled(Box, { + shouldForwardProp: (propName) => propName != "gridTemplateColumns", +})<{ + gridTemplateColumns: string; }>` display: grid; - grid-template-columns: ${({ columns, shrinkRatio, groups }) => - getTemplateColumns(columns, shrinkRatio, groups)}; + grid-template-columns: ${(props) => props.gridTemplateColumns}; grid-column-gap: ${GAP_BTW_TILES}px; width: 100%; color: #fff; @@ -235,9 +234,11 @@ const PhotoListRow = React.memo( return ( {renderListItem(timeStampList[index], isScrolling)} diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx index ff795aca7..42edddbf1 100644 --- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx @@ -507,14 +507,14 @@ const ImageEditorOverlay = (props: IProps) => { const editedFile = await getEditedFile(); const file = { - fileOrPath: editedFile, + uploadItem: editedFile, localID: 1, collectionID: props.file.collectionID, }; uploadManager.prepareForNewUpload(); uploadManager.showUploadProgressDialog(); - uploadManager.uploadFiles([file], [collection]); + uploadManager.uploadItems([file], [collection]); setFileURL(null); props.onClose(); props.closePhotoViewer(); diff --git a/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx b/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx index 41d4a0971..d33c7c949 100644 --- a/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/styledComponents.tsx @@ -23,7 +23,9 @@ export const SearchMobileBox = styled(FluidContainer)` } `; -export const SearchInputWrapper = styled(CenteredFlex)<{ isOpen: boolean }>` +export const SearchInputWrapper = styled(CenteredFlex, { + shouldForwardProp: (propName) => propName != "isOpen", +})<{ isOpen: boolean }>` background: ${({ theme }) => theme.colors.background.base}; max-width: 484px; margin: auto; diff --git a/web/apps/photos/src/components/Sidebar/DebugSection.tsx b/web/apps/photos/src/components/Sidebar/DebugSection.tsx index 28c65ca8e..e33637403 100644 --- a/web/apps/photos/src/components/Sidebar/DebugSection.tsx +++ b/web/apps/photos/src/components/Sidebar/DebugSection.tsx @@ -9,10 +9,6 @@ import { useContext, useEffect, useState } from "react"; import { Trans } from "react-i18next"; import { isInternalUser } from "utils/user"; import { testUpload } from "../../../tests/upload.test"; -import { - testZipFileReading, - testZipWithRootFileReadingTest, -} from "../../../tests/zip-file-reading.test"; export default function DebugSection() { const appContext = useContext(AppContext); @@ -62,25 +58,11 @@ export default function DebugSection() { )} {isInternalUser() && ( - <> - - - - - - + )} ); diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx index 5cd157afa..717430655 100644 --- a/web/apps/photos/src/components/Upload/Uploader.tsx +++ b/web/apps/photos/src/components/Upload/Uploader.tsx @@ -1,11 +1,11 @@ +import { basename } from "@/next/file"; import log from "@/next/log"; -import { ElectronFile } from "@/next/types/file"; -import type { CollectionMapping, Electron } from "@/next/types/ipc"; +import type { CollectionMapping, Electron, ZipItem } from "@/next/types/ipc"; import { CustomError } from "@ente/shared/error"; import { isPromise } from "@ente/shared/utils"; import DiscFullIcon from "@mui/icons-material/DiscFull"; import UserNameInputDialog from "components/UserNameInputDialog"; -import { PICKED_UPLOAD_TYPE, UPLOAD_STAGES } from "constants/upload"; +import { UPLOAD_STAGES } from "constants/upload"; import { t } from "i18next"; import isElectron from "is-electron"; import { AppContext } from "pages/_app"; @@ -13,21 +13,21 @@ import { GalleryContext } from "pages/gallery"; import { useContext, useEffect, useRef, useState } from "react"; import billingService from "services/billingService"; import { getLatestCollections } from "services/collectionService"; +import { exportMetadataDirectoryName } from "services/export"; import { getPublicCollectionUID, getPublicCollectionUploaderName, savePublicCollectionUploaderName, } from "services/publicCollectionService"; +import type { FileAndPath, UploadItem } from "services/upload/types"; import type { - FileWithCollection, InProgressUpload, SegregatedFinishedUploads, UploadCounter, UploadFileNames, + UploadItemWithCollection, } from "services/upload/uploadManager"; -import uploadManager, { - setToUploadCollection, -} from "services/upload/uploadManager"; +import uploadManager from "services/upload/uploadManager"; import watcher from "services/watch"; import { NotificationAttributes } from "types/Notification"; import { Collection } from "types/collection"; @@ -45,19 +45,16 @@ import { getDownloadAppMessage, getRootLevelFileWithFolderNotAllowMessage, } from "utils/ui"; -import { - DEFAULT_IMPORT_SUGGESTION, - getImportSuggestion, - groupFilesBasedOnParentFolder, - pruneHiddenFiles, - type ImportSuggestion, -} from "utils/upload"; import { SetCollectionNamerAttributes } from "../Collections/CollectionNamer"; import { CollectionMappingChoiceModal } from "./CollectionMappingChoiceModal"; import UploadProgress from "./UploadProgress"; import UploadTypeSelector from "./UploadTypeSelector"; -const FIRST_ALBUM_NAME = "My First Album"; +enum PICKED_UPLOAD_TYPE { + FILES = "files", + FOLDERS = "folders", + ZIPS = "zips", +} interface Props { syncWithRemote: (force?: boolean, silent?: boolean) => Promise; @@ -73,17 +70,29 @@ interface Props { isFirstUpload?: boolean; uploadTypeSelectorView: boolean; showSessionExpiredMessage: () => void; - showUploadFilesDialog: () => void; - showUploadDirsDialog: () => void; - webFolderSelectorFiles: File[]; - webFileSelectorFiles: File[]; dragAndDropFiles: File[]; + openFileSelector: () => void; + fileSelectorFiles: File[]; + openFolderSelector: () => void; + folderSelectorFiles: File[]; + openZipFileSelector?: () => void; + fileSelectorZipFiles?: File[]; uploadCollection?: Collection; uploadTypeSelectorIntent: UploadTypeSelectorIntent; activeCollection?: Collection; } -export default function Uploader(props: Props) { +export default function Uploader({ + isFirstUpload, + dragAndDropFiles, + openFileSelector, + fileSelectorFiles, + openFolderSelector, + folderSelectorFiles, + openZipFileSelector, + fileSelectorZipFiles, + ...props +}: Props) { const appContext = useContext(AppContext); const galleryContext = useContext(GalleryContext); const publicCollectionGalleryContext = useContext( @@ -115,26 +124,55 @@ export default function Uploader(props: Props) { ); /** - * {@link File}s that the user drag-dropped or selected for uploads. This is - * the only type of selection that is possible when we're running in the - * browser. + * {@link File}s that the user drag-dropped or selected for uploads (web). + * + * This is the only type of selection that is possible when we're running in + * the browser. */ const [webFiles, setWebFiles] = useState([]); + /** + * {@link File}s that the user drag-dropped or selected for uploads, + * augmented with their paths (desktop). + * + * These siblings of {@link webFiles} come into play when we are running in + * the context of our desktop app. + */ + const [desktopFiles, setDesktopFiles] = useState([]); /** * Paths of file to upload that we've received over the IPC bridge from the * code running in the Node.js layer of our desktop app. + * + * Unlike {@link filesWithPaths} which are still user initiated, + * {@link desktopFilePaths} can be set via programmatic action. For example, + * if the user has setup a folder watch, and a new file is added on their + * local file system in one of the watched folders, then the relevant path + * of the new file would get added to {@link desktopFilePaths}. */ const [desktopFilePaths, setDesktopFilePaths] = useState([]); /** - * TODO(MR): When? + * (zip file path, entry within zip file) tuples for zip files that the user + * is trying to upload. + * + * These are only set when we are running in the context of our desktop app. + * They may be set either on a user action (when the user selects or + * drag-drops zip files) or programmatically (when the app is trying to + * resume pending uploads from a previous session). */ - const [electronFiles, setElectronFiles] = useState([]); + const [desktopZipItems, setDesktopZipItems] = useState([]); /** - * Consolidated and cleaned list obtained from {@link webFiles} and - * {@link desktopFilePaths}. + * Consolidated and cleaned list obtained from {@link webFiles}, + * {@link desktopFiles}, {@link desktopFilePaths} and + * {@link desktopZipItems}. + * + * Augment each {@link UploadItem} with its "path" (relative path or name in + * the case of {@link webFiles}, absolute path in the case of + * {@link desktopFiles}, {@link desktopFilePaths}, and the path within the + * zip file for {@link desktopZipItems}). + * + * See the documentation of {@link UploadItem} for more details. */ - const fileOrPathsToUpload = useRef<(File | string)[]>([]); + const uploadItemsAndPaths = useRef<[UploadItem, string][]>([]); /** * If true, then the next upload we'll be processing was initiated by our @@ -148,9 +186,12 @@ export default function Uploader(props: Props) { */ const pendingDesktopUploadCollectionName = useRef(""); - // This is set when the user choses a type to upload from the upload type selector dialog + /** + * This is set to thue user's choice when the user chooses one of the + * predefined type to upload from the upload type selector dialog + */ const pickedUploadType = useRef(null); - const zipPaths = useRef(null); + const currentUploadPromise = useRef>(null); const uploadRunning = useRef(false); const uploaderNameRef = useRef(null); @@ -165,9 +206,9 @@ export default function Uploader(props: Props) { setChoiceModalView(false); uploadRunning.current = false; }; + const handleCollectionSelectorCancel = () => { uploadRunning.current = false; - appContext.resetSharedFiles(); }; const handleUserNameInputDialogClose = () => { @@ -191,6 +232,7 @@ export default function Uploader(props: Props) { publicCollectionGalleryContext, appContext.isCFProxyDisabled, ); + if (uploadManager.isUploadRunning()) { setUploadProgressView(true); } @@ -214,16 +256,17 @@ export default function Uploader(props: Props) { watcher.init(upload, requestSyncWithRemote); electron.pendingUploads().then((pending) => { - if (pending) { - log.info("Resuming pending desktop upload", pending); - resumeDesktopUpload( - pending.type == "files" - ? PICKED_UPLOAD_TYPE.FILES - : PICKED_UPLOAD_TYPE.ZIPS, - pending.files, - pending.collectionName, - ); - } + if (!pending) return; + + const { collectionName, filePaths, zipItems } = pending; + + log.info( + `Resuming pending of upload of ${filePaths.length + zipItems.length} items${collectionName ? " to collection " + collectionName : ""}`, + ); + isPendingDesktopUpload.current = true; + pendingDesktopUploadCollectionName.current = collectionName; + setDesktopFilePaths(filePaths); + setDesktopZipItems(zipItems); }); } }, [ @@ -233,178 +276,191 @@ export default function Uploader(props: Props) { appContext.isCFProxyDisabled, ]); - // this handles the change of selectorFiles changes on web when user selects - // files for upload through the opened file/folder selector or dragAndDrop them - // the webFiles state is update which triggers the upload of those files + // Handle selected files when user selects files for upload through the open + // file / open folder selection dialog, or drag-and-drops them. useEffect(() => { if (appContext.watchFolderView) { // if watch folder dialog is open don't catch the dropped file // as they are folder being dropped for watching return; } - if ( - pickedUploadType.current === PICKED_UPLOAD_TYPE.FOLDERS && - props.webFolderSelectorFiles?.length > 0 - ) { - log.info(`received folder upload request`); - setWebFiles(props.webFolderSelectorFiles); - } else if ( - pickedUploadType.current === PICKED_UPLOAD_TYPE.FILES && - props.webFileSelectorFiles?.length > 0 - ) { - log.info(`received file upload request`); - setWebFiles(props.webFileSelectorFiles); - } else if (props.dragAndDropFiles?.length > 0) { - isDragAndDrop.current = true; - if (electron) { - const main = async () => { - try { - log.info(`uploading dropped files from desktop app`); - // check and parse dropped files which are zip files - let electronFiles = [] as ElectronFile[]; - for (const file of props.dragAndDropFiles) { - if (file.name.endsWith(".zip")) { - const zipFiles = - await electron.getElectronFilesFromGoogleZip( - (file as any).path, - ); - log.info( - `zip file - ${file.name} contains ${zipFiles.length} files`, - ); - electronFiles = [...electronFiles, ...zipFiles]; - } else { - // type cast to ElectronFile as the file is dropped from desktop app - // type file and ElectronFile should be interchangeable, but currently they have some differences. - // Typescript is giving error - // Conversion of type 'File' to type 'ElectronFile' may be a mistake because neither type sufficiently - // overlaps with the other. If this was intentional, convert the expression to 'unknown' first. - // Type 'File' is missing the following properties from type 'ElectronFile': path, blob - // for now patching by type casting first to unknown and then to ElectronFile - // TODO: fix types and remove type cast - electronFiles.push( - file as unknown as ElectronFile, - ); - } - } - log.info( - `uploading dropped files from desktop app - ${electronFiles.length} files found`, - ); - setElectronFiles(electronFiles); - } catch (e) { - log.error("failed to upload desktop dropped files", e); - setWebFiles(props.dragAndDropFiles); - } - }; - main(); - } else { - log.info(`uploading dropped files from web app`); - setWebFiles(props.dragAndDropFiles); - } + + let files: File[]; + + switch (pickedUploadType.current) { + case PICKED_UPLOAD_TYPE.FILES: + files = fileSelectorFiles; + break; + + case PICKED_UPLOAD_TYPE.FOLDERS: + files = folderSelectorFiles; + break; + + case PICKED_UPLOAD_TYPE.ZIPS: + files = fileSelectorZipFiles; + break; + + default: + files = dragAndDropFiles; + break; + } + + if (electron) { + desktopFilesAndZipItems(electron, files).then( + ({ fileAndPaths, zipItems }) => { + setDesktopFiles(fileAndPaths); + setDesktopZipItems(zipItems); + }, + ); + } else { + setWebFiles(files); } }, [ - props.dragAndDropFiles, - props.webFileSelectorFiles, - props.webFolderSelectorFiles, + dragAndDropFiles, + fileSelectorFiles, + folderSelectorFiles, + fileSelectorZipFiles, ]); + // Trigger an upload when any of the dependencies change. useEffect(() => { - if ( - desktopFilePaths.length > 0 || - electronFiles.length > 0 || - webFiles.length > 0 || - appContext.sharedFiles?.length > 0 - ) { - log.info( - `upload request type: ${ - desktopFilePaths.length > 0 - ? "desktopFilePaths" - : electronFiles.length > 0 - ? "electronFiles" - : webFiles.length > 0 - ? "webFiles" - : "sharedFiles" - } count ${ - desktopFilePaths.length + - electronFiles.length + - webFiles.length + - (appContext.sharedFiles?.length ?? 0) - }`, - ); - if (uploadManager.isUploadRunning()) { - if (watcher.isUploadRunning()) { - // Pause watch folder sync on user upload - log.info( - "Folder watcher was uploading, pausing it to first run user upload", - ); - watcher.pauseRunningSync(); - } else { - log.info( - "Ignoring new upload request because an upload is already running", - ); - return; - } - } - uploadRunning.current = true; - props.closeUploadTypeSelector(); - props.setLoading(true); - if (webFiles?.length > 0) { - // File selection by drag and drop or selection of file. - fileOrPathsToUpload.current = webFiles; - setWebFiles([]); - } else if (appContext.sharedFiles?.length > 0) { - fileOrPathsToUpload.current = appContext.sharedFiles; - appContext.resetSharedFiles(); - } else if (electronFiles?.length > 0) { - // File selection from desktop app - deprecated - log.warn("Using deprecated code path for ElectronFiles"); - fileOrPathsToUpload.current = electronFiles.map((f) => f.path); - setElectronFiles([]); - } else if (desktopFilePaths && desktopFilePaths.length > 0) { - // File selection from our desktop app - fileOrPathsToUpload.current = desktopFilePaths; - setDesktopFilePaths(undefined); - } + // Re the paths: + // + // - These are not necessarily the full paths. In particular, when + // running on the browser they'll be the relative paths (at best) or + // just the file-name otherwise. + // + // - All the paths use POSIX separators. See inline comments. + const allItemAndPaths = [ + // See: [Note: webkitRelativePath]. In particular, they use POSIX + // separators. + webFiles.map((f) => [f, f.webkitRelativePath ?? f.name]), + // The paths we get from the desktop app all eventually come either + // from electron.selectDirectory or electron.pathForFile, both of + // which return POSIX paths. + desktopFiles.map((fp) => [fp, fp.path]), + desktopFilePaths.map((p) => [p, p]), + // The first path, that of the zip file itself, is POSIX like the + // other paths we get over the IPC boundary. And the second path, + // ze[1], the entry name, uses POSIX separators because that is what + // the ZIP format uses. + desktopZipItems.map((ze) => [ze, ze[1]]), + ].flat() as [UploadItem, string][]; - fileOrPathsToUpload.current = pruneHiddenFiles( - fileOrPathsToUpload.current, - ); - if (fileOrPathsToUpload.current.length === 0) { - props.setLoading(false); + if (allItemAndPaths.length == 0) return; + + if (uploadManager.isUploadRunning()) { + if (watcher.isUploadRunning()) { + log.info("Pausing watch folder sync to prioritize user upload"); + watcher.pauseRunningSync(); + } else { + log.info( + "Ignoring new upload request when upload is already running", + ); + return; + } + } + + uploadRunning.current = true; + props.closeUploadTypeSelector(); + props.setLoading(true); + + setWebFiles([]); + setDesktopFiles([]); + setDesktopFilePaths([]); + setDesktopZipItems([]); + + // Remove hidden files (files whose names begins with a "."). + const prunedItemAndPaths = allItemAndPaths.filter( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + ([_, p]) => !basename(p).startsWith("."), + ); + + uploadItemsAndPaths.current = prunedItemAndPaths; + if (uploadItemsAndPaths.current.length === 0) { + props.setLoading(false); + return; + } + + const importSuggestion = getImportSuggestion( + pickedUploadType.current, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + prunedItemAndPaths.map(([_, p]) => p), + ); + setImportSuggestion(importSuggestion); + + log.debug(() => "Uploader invoked:"); + log.debug(() => uploadItemsAndPaths.current); + log.debug(() => importSuggestion); + + const _pickedUploadType = pickedUploadType.current; + pickedUploadType.current = null; + props.setLoading(false); + + (async () => { + if (publicCollectionGalleryContext.accessedThroughSharedURL) { + const uploaderName = await getPublicCollectionUploaderName( + getPublicCollectionUID( + publicCollectionGalleryContext.token, + ), + ); + uploaderNameRef.current = uploaderName; + showUserNameInputDialog(); return; } - const importSuggestion = getImportSuggestion( - pickedUploadType.current, - fileOrPathsToUpload.current.map((file) => - /** TODO(MR): Is path valid for Web files? */ - typeof file == "string" ? file : file["path"], - ), - ); - setImportSuggestion(importSuggestion); + if (isPendingDesktopUpload.current) { + isPendingDesktopUpload.current = false; + if (pendingDesktopUploadCollectionName.current) { + uploadFilesToNewCollections( + "root", + pendingDesktopUploadCollectionName.current, + ); + pendingDesktopUploadCollectionName.current = null; + } else { + uploadFilesToNewCollections("parent"); + } + return; + } - handleCollectionCreationAndUpload( - importSuggestion, - props.isFirstUpload, - pickedUploadType.current, - publicCollectionGalleryContext.accessedThroughSharedURL, - ); - pickedUploadType.current = null; - props.setLoading(false); - } - }, [webFiles, appContext.sharedFiles, electronFiles, desktopFilePaths]); + if (electron && _pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { + uploadFilesToNewCollections("parent"); + return; + } - const resumeDesktopUpload = async ( - type: PICKED_UPLOAD_TYPE, - electronFiles: ElectronFile[], - collectionName: string, - ) => { - if (electronFiles && electronFiles?.length > 0) { - isPendingDesktopUpload.current = true; - pendingDesktopUploadCollectionName.current = collectionName; - pickedUploadType.current = type; - setElectronFiles(electronFiles); - } - }; + if (isFirstUpload && !importSuggestion.rootFolderName) { + importSuggestion.rootFolderName = t( + "autogenerated_first_album_name", + ); + } + + if (isDragAndDrop.current) { + isDragAndDrop.current = false; + if ( + props.activeCollection && + props.activeCollection.owner.id === galleryContext.user?.id + ) { + uploadFilesToExistingCollection(props.activeCollection); + return; + } + } + + let showNextModal = () => {}; + if (importSuggestion.hasNestedFolders) { + showNextModal = () => setChoiceModalView(true); + } else { + showNextModal = () => + showCollectionCreateModal(importSuggestion.rootFolderName); + } + + props.setCollectionSelectorAttributes({ + callback: uploadFilesToExistingCollection, + onCancel: handleCollectionSelectorCancel, + showNextModal, + intent: CollectionSelectorIntent.upload, + }); + })(); + }, [webFiles, desktopFiles, desktopFilePaths, desktopZipItems]); const preCollectionCreationAction = async () => { props.closeCollectionSelector?.(); @@ -417,100 +473,78 @@ export default function Uploader(props: Props) { collection: Collection, uploaderName?: string, ) => { - try { - log.info( - `Uploading files existing collection id ${collection.id} (${collection.name})`, - ); - await preCollectionCreationAction(); - const filesWithCollectionToUpload = fileOrPathsToUpload.current.map( - (fileOrPath, index) => ({ - fileOrPath, - localID: index, - collectionID: collection.id, - }), - ); - await waitInQueueAndUploadFiles( - filesWithCollectionToUpload, - [collection], - uploaderName, - ); - } catch (e) { - log.error("Failed to upload files to existing collection", e); - } + await preCollectionCreationAction(); + const uploadItemsWithCollection = uploadItemsAndPaths.current.map( + ([uploadItem], index) => ({ + uploadItem, + localID: index, + collectionID: collection.id, + }), + ); + await waitInQueueAndUploadFiles( + uploadItemsWithCollection, + [collection], + uploaderName, + ); + uploadItemsAndPaths.current = null; }; const uploadFilesToNewCollections = async ( mapping: CollectionMapping, collectionName?: string, ) => { - try { - log.info( - `Uploading files to collection using ${mapping} mapping (${collectionName ?? ""})`, + await preCollectionCreationAction(); + let uploadItemsWithCollection: UploadItemWithCollection[] = []; + const collections: Collection[] = []; + let collectionNameToUploadItems = new Map(); + if (mapping == "root") { + collectionNameToUploadItems.set( + collectionName, + uploadItemsAndPaths.current.map(([i]) => i), ); - await preCollectionCreationAction(); - let filesWithCollectionToUpload: FileWithCollection[] = []; - const collections: Collection[] = []; - let collectionNameToFileOrPaths = new Map< - string, - (File | string)[] - >(); - if (mapping == "root") { - collectionNameToFileOrPaths.set( - collectionName, - fileOrPathsToUpload.current, - ); - } else { - collectionNameToFileOrPaths = groupFilesBasedOnParentFolder( - fileOrPathsToUpload.current, - ); - } - try { - const existingCollections = await getLatestCollections(); - let index = 0; - for (const [ - collectionName, - fileOrPaths, - ] of collectionNameToFileOrPaths) { - const collection = await getOrCreateAlbum( - collectionName, - existingCollections, - ); - collections.push(collection); - props.setCollections([ - ...existingCollections, - ...collections, - ]); - filesWithCollectionToUpload = [ - ...filesWithCollectionToUpload, - ...fileOrPaths.map((fileOrPath) => ({ - localID: index++, - collectionID: collection.id, - fileOrPath, - })), - ]; - } - } catch (e) { - closeUploadProgress(); - log.error("Failed to create album", e); - appContext.setDialogMessage({ - title: t("ERROR"), - close: { variant: "critical" }, - content: t("CREATE_ALBUM_FAILED"), - }); - throw e; - } - await waitInQueueAndUploadFiles( - filesWithCollectionToUpload, - collections, + } else { + collectionNameToUploadItems = groupFilesBasedOnParentFolder( + uploadItemsAndPaths.current, ); - fileOrPathsToUpload.current = null; - } catch (e) { - log.error("Failed to upload files to new collections", e); } + try { + const existingCollections = await getLatestCollections(); + let index = 0; + for (const [ + collectionName, + uploadItems, + ] of collectionNameToUploadItems) { + const collection = await getOrCreateAlbum( + collectionName, + existingCollections, + ); + collections.push(collection); + props.setCollections([...existingCollections, ...collections]); + uploadItemsWithCollection = [ + ...uploadItemsWithCollection, + ...uploadItems.map((uploadItem) => ({ + localID: index++, + collectionID: collection.id, + uploadItem, + })), + ]; + } + } catch (e) { + closeUploadProgress(); + log.error("Failed to create album", e); + appContext.setDialogMessage({ + title: t("ERROR"), + close: { variant: "critical" }, + content: t("CREATE_ALBUM_FAILED"), + }); + throw e; + } + await waitInQueueAndUploadFiles(uploadItemsWithCollection, collections); + uploadItemsAndPaths.current = null; }; const waitInQueueAndUploadFiles = async ( - filesWithCollectionToUploadIn: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) => { @@ -519,7 +553,7 @@ export default function Uploader(props: Props) { currentPromise, async () => await uploadFiles( - filesWithCollectionToUploadIn, + uploadItemsWithCollection, collections, uploaderName, ), @@ -540,7 +574,7 @@ export default function Uploader(props: Props) { } const uploadFiles = async ( - filesWithCollectionToUploadIn: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) => { @@ -551,27 +585,16 @@ export default function Uploader(props: Props) { !isPendingDesktopUpload.current && !watcher.isUploadRunning() ) { - await setToUploadCollection(collections); - if (zipPaths.current) { - await electron.setPendingUploadFiles( - "zips", - zipPaths.current, - ); - zipPaths.current = null; - } - await electron.setPendingUploadFiles( - "files", - filesWithCollectionToUploadIn.map( - // TODO(MR): ElectronFile - ({ fileOrPath }) => - typeof fileOrPath == "string" - ? fileOrPath - : (fileOrPath as any as ElectronFile).path, - ), + setPendingUploads( + electron, + collections, + uploadItemsWithCollection + .map(({ uploadItem }) => uploadItem) + .filter((x) => x), ); } - const wereFilesProcessed = await uploadManager.uploadFiles( - filesWithCollectionToUploadIn, + const wereFilesProcessed = await uploadManager.uploadItems( + uploadItemsWithCollection, collections, uploaderName, ); @@ -579,11 +602,12 @@ export default function Uploader(props: Props) { if (isElectron()) { if (watcher.isUploadRunning()) { await watcher.allFileUploadsDone( - filesWithCollectionToUploadIn, + uploadItemsWithCollection, collections, ); } else if (watcher.isSyncPaused()) { - // resume the service after user upload is done + // Resume folder watch after the user upload that + // interrupted it is done. watcher.resumePausedSync(); } } @@ -599,11 +623,11 @@ export default function Uploader(props: Props) { const retryFailed = async () => { try { log.info("Retrying failed uploads"); - const { files, collections } = - uploadManager.getFailedFilesWithCollections(); + const { items, collections } = + uploadManager.getFailedItemsWithCollections(); const uploaderName = uploadManager.getUploaderName(); await preUploadAction(); - await uploadManager.uploadFiles(files, collections, uploaderName); + await uploadManager.uploadItems(items, collections, uploaderName); } catch (e) { log.error("Retrying failed uploads failed", e); showUserFacingError(e.message); @@ -658,128 +682,28 @@ export default function Uploader(props: Props) { }); }; - const handleCollectionCreationAndUpload = async ( - importSuggestion: ImportSuggestion, - isFirstUpload: boolean, - pickedUploadType: PICKED_UPLOAD_TYPE, - accessedThroughSharedURL?: boolean, - ) => { - try { - if (accessedThroughSharedURL) { - const uploaderName = await getPublicCollectionUploaderName( - getPublicCollectionUID( - publicCollectionGalleryContext.token, - ), - ); - uploaderNameRef.current = uploaderName; - showUserNameInputDialog(); - return; - } - - if (isPendingDesktopUpload.current) { - isPendingDesktopUpload.current = false; - if (pendingDesktopUploadCollectionName.current) { - uploadFilesToNewCollections( - "root", - pendingDesktopUploadCollectionName.current, - ); - pendingDesktopUploadCollectionName.current = null; - } else { - uploadFilesToNewCollections("parent"); - } - return; - } - - if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { - uploadFilesToNewCollections("parent"); - return; - } - - if (isFirstUpload && !importSuggestion.rootFolderName) { - importSuggestion.rootFolderName = FIRST_ALBUM_NAME; - } - - if (isDragAndDrop.current) { - isDragAndDrop.current = false; - if ( - props.activeCollection && - props.activeCollection.owner.id === galleryContext.user?.id - ) { - uploadFilesToExistingCollection(props.activeCollection); - return; - } - } - - let showNextModal = () => {}; - if (importSuggestion.hasNestedFolders) { - showNextModal = () => setChoiceModalView(true); - } else { - showNextModal = () => - showCollectionCreateModal(importSuggestion.rootFolderName); - } - - props.setCollectionSelectorAttributes({ - callback: uploadFilesToExistingCollection, - onCancel: handleCollectionSelectorCancel, - showNextModal, - intent: CollectionSelectorIntent.upload, - }); - } catch (e) { - // TODO(MR): Why? - log.warn("Ignoring error in handleCollectionCreationAndUpload", e); - } - }; - - const handleDesktopUpload = async ( - type: PICKED_UPLOAD_TYPE, - electron: Electron, - ) => { - let files: ElectronFile[]; - pickedUploadType.current = type; - if (type === PICKED_UPLOAD_TYPE.FILES) { - files = await electron.showUploadFilesDialog(); - } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { - files = await electron.showUploadDirsDialog(); - } else { - const response = await electron.showUploadZipDialog(); - files = response.files; - zipPaths.current = response.zipPaths; - } - if (files?.length > 0) { - log.info( - ` desktop upload for type:${type} and fileCount: ${files?.length} requested`, - ); - setElectronFiles(files); - props.closeUploadTypeSelector(); - } - }; - - const handleWebUpload = async (type: PICKED_UPLOAD_TYPE) => { - pickedUploadType.current = type; - if (type === PICKED_UPLOAD_TYPE.FILES) { - props.showUploadFilesDialog(); - } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { - props.showUploadDirsDialog(); - } else { - appContext.setDialogMessage(getDownloadAppMessage()); - } - }; - const cancelUploads = () => { uploadManager.cancelRunningUpload(); }; - const handleUpload = (type) => () => { - if (electron) { - handleDesktopUpload(type, electron); + const handleUpload = (type: PICKED_UPLOAD_TYPE) => { + pickedUploadType.current = type; + if (type === PICKED_UPLOAD_TYPE.FILES) { + openFileSelector(); + } else if (type === PICKED_UPLOAD_TYPE.FOLDERS) { + openFolderSelector(); } else { - handleWebUpload(type); + if (openZipFileSelector && electron) { + openZipFileSelector(); + } else { + appContext.setDialogMessage(getDownloadAppMessage()); + } } }; - const handleFileUpload = handleUpload(PICKED_UPLOAD_TYPE.FILES); - const handleFolderUpload = handleUpload(PICKED_UPLOAD_TYPE.FOLDERS); - const handleZipUpload = handleUpload(PICKED_UPLOAD_TYPE.ZIPS); + const handleFileUpload = () => handleUpload(PICKED_UPLOAD_TYPE.FILES); + const handleFolderUpload = () => handleUpload(PICKED_UPLOAD_TYPE.FOLDERS); + const handleZipUpload = () => handleUpload(PICKED_UPLOAD_TYPE.ZIPS); const handlePublicUpload = async ( uploaderName: string, @@ -803,28 +727,33 @@ export default function Uploader(props: Props) { } }; - const handleUploadToSingleCollection = () => { - uploadToSingleNewCollection(importSuggestion.rootFolderName); - }; - - const handleUploadToMultipleCollections = () => { - if (importSuggestion.hasRootLevelFileWithFolder) { - appContext.setDialogMessage( - getRootLevelFileWithFolderNotAllowMessage(), - ); - return; - } - uploadFilesToNewCollections("parent"); - }; - const didSelectCollectionMapping = (mapping: CollectionMapping) => { switch (mapping) { case "root": - handleUploadToSingleCollection(); + uploadToSingleNewCollection( + // rootFolderName would be empty here if one edge case: + // - User drags and drops a mixture of files and folders + // - They select the "upload to multiple albums" option + // - The see the error, close the error + // - Then they select the "upload to single album" option + // + // In such a flow, we'll reach here with an empty + // rootFolderName. The proper fix for this would be + // rearrange the flow and ask them to name the album here, + // but we currently don't have support for chaining modals. + // So in the meanwhile, keep a fallback album name at hand. + importSuggestion.rootFolderName ?? + t("autogenerated_default_album_name"), + ); break; case "parent": - handleUploadToMultipleCollections(); - break; + if (importSuggestion.hasRootLevelFileWithFolder) { + appContext.setDialogMessage( + getRootLevelFileWithFolderNotAllowMessage(), + ); + } else { + uploadFilesToNewCollections("parent"); + } } }; @@ -860,7 +789,7 @@ export default function Uploader(props: Props) { open={userNameInputDialogView} onClose={handleUserNameInputDialogClose} onNameSubmit={handlePublicUpload} - toUploadFilesCount={fileOrPathsToUpload.current?.length} + toUploadFilesCount={uploadItemsAndPaths.current?.length} uploaderName={uploaderNameRef.current} /> @@ -876,3 +805,143 @@ async function waitAndRun( } await task(); } + +const desktopFilesAndZipItems = async (electron: Electron, files: File[]) => { + const fileAndPaths: FileAndPath[] = []; + let zipItems: ZipItem[] = []; + + for (const file of files) { + const path = electron.pathForFile(file); + if (file.name.endsWith(".zip")) { + zipItems = zipItems.concat(await electron.listZipItems(path)); + } else { + fileAndPaths.push({ file, path }); + } + } + + return { fileAndPaths, zipItems }; +}; + +// This is used to prompt the user the make upload strategy choice +interface ImportSuggestion { + rootFolderName: string; + hasNestedFolders: boolean; + hasRootLevelFileWithFolder: boolean; +} + +const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = { + rootFolderName: "", + hasNestedFolders: false, + hasRootLevelFileWithFolder: false, +}; + +function getImportSuggestion( + uploadType: PICKED_UPLOAD_TYPE, + paths: string[], +): ImportSuggestion { + if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) { + return DEFAULT_IMPORT_SUGGESTION; + } + + const getCharCount = (str: string) => (str.match(/\//g) ?? []).length; + paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2)); + const firstPath = paths[0]; + const lastPath = paths[paths.length - 1]; + + const L = firstPath.length; + let i = 0; + const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/")); + const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/")); + + while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++; + let commonPathPrefix = firstPath.substring(0, i); + + if (commonPathPrefix) { + commonPathPrefix = commonPathPrefix.substring( + 0, + commonPathPrefix.lastIndexOf("/"), + ); + if (commonPathPrefix) { + commonPathPrefix = commonPathPrefix.substring( + commonPathPrefix.lastIndexOf("/") + 1, + ); + } + } + return { + rootFolderName: commonPathPrefix || null, + hasNestedFolders: firstFileFolder !== lastFileFolder, + hasRootLevelFileWithFolder: firstFileFolder === "", + }; +} + +// This function groups files that are that have the same parent folder into collections +// For Example, for user files have a directory structure like this +// a +// / | \ +// b j c +// /|\ / \ +// e f g h i +// +// The files will grouped into 3 collections. +// [a => [j], +// b => [e,f,g], +// c => [h, i]] +const groupFilesBasedOnParentFolder = ( + uploadItemsAndPaths: [UploadItem, string][], +) => { + const result = new Map(); + for (const [uploadItem, pathOrName] of uploadItemsAndPaths) { + let folderPath = pathOrName.substring(0, pathOrName.lastIndexOf("/")); + // If the parent folder of a file is "metadata" + // we consider it to be part of the parent folder + // For Eg,For FileList -> [a/x.png, a/metadata/x.png.json] + // they will both we grouped into the collection "a" + // This is cluster the metadata json files in the same collection as the file it is for + if (folderPath.endsWith(exportMetadataDirectoryName)) { + folderPath = folderPath.substring(0, folderPath.lastIndexOf("/")); + } + const folderName = folderPath.substring( + folderPath.lastIndexOf("/") + 1, + ); + if (!folderName) throw Error("Unexpected empty folder name"); + if (!result.has(folderName)) result.set(folderName, []); + result.get(folderName).push(uploadItem); + } + return result; +}; + +export const setPendingUploads = async ( + electron: Electron, + collections: Collection[], + uploadItems: UploadItem[], +) => { + let collectionName: string | undefined; + /* collection being one suggest one of two things + 1. Either the user has upload to a single existing collection + 2. Created a new single collection to upload to + may have had multiple folder, but chose to upload + to one album + hence saving the collection name when upload collection count is 1 + helps the info of user choosing this options + and on next upload we can directly start uploading to this collection + */ + if (collections.length == 1) { + collectionName = collections[0].name; + } + + const filePaths: string[] = []; + const zipItems: ZipItem[] = []; + for (const item of uploadItems) { + if (item instanceof File) { + throw new Error("Unexpected web file for a desktop pending upload"); + } else if (typeof item == "string") { + filePaths.push(item); + } else if (Array.isArray(item)) { + zipItems.push(item); + } else { + filePaths.push(item.path); + } + } + + await electron.setPendingUploads({ collectionName, filePaths, zipItems }); +}; diff --git a/web/apps/photos/src/components/UploadSelectorInputs.tsx b/web/apps/photos/src/components/UploadSelectorInputs.tsx index 1b110d532..13e33fc6d 100644 --- a/web/apps/photos/src/components/UploadSelectorInputs.tsx +++ b/web/apps/photos/src/components/UploadSelectorInputs.tsx @@ -2,12 +2,16 @@ export default function UploadSelectorInputs({ getDragAndDropInputProps, getFileSelectorInputProps, getFolderSelectorInputProps, + getZipFileSelectorInputProps, }) { return ( <> + {getZipFileSelectorInputProps && ( + + )} ); } diff --git a/web/apps/photos/src/components/WatchFolder.tsx b/web/apps/photos/src/components/WatchFolder.tsx index 738bafde8..710a54168 100644 --- a/web/apps/photos/src/components/WatchFolder.tsx +++ b/web/apps/photos/src/components/WatchFolder.tsx @@ -1,5 +1,5 @@ import { ensureElectron } from "@/next/electron"; -import { basename } from "@/next/file"; +import { basename, dirname } from "@/next/file"; import type { CollectionMapping, FolderWatch } from "@/next/types/ipc"; import { ensure } from "@/utils/ensure"; import { @@ -32,7 +32,6 @@ import { t } from "i18next"; import { AppContext } from "pages/_app"; import React, { useContext, useEffect, useState } from "react"; import watcher from "services/watch"; -import { areAllInSameDirectory } from "utils/upload"; interface WatchFolderProps { open: boolean; @@ -324,3 +323,12 @@ const EntryOptions: React.FC = ({ confirmStopWatching }) => { ); }; + +/** + * Return true if all the paths in the given list are items that belong to the + * same (arbitrary) directory. + * + * Empty list of paths is considered to be in the same directory. + */ +const areAllInSameDirectory = (paths: string[]) => + new Set(paths.map(dirname)).size == 1; diff --git a/web/apps/photos/src/components/ml/MLSearchSettings.tsx b/web/apps/photos/src/components/ml/MLSearchSettings.tsx index 583b79529..9b50c2d6a 100644 --- a/web/apps/photos/src/components/ml/MLSearchSettings.tsx +++ b/web/apps/photos/src/components/ml/MLSearchSettings.tsx @@ -22,7 +22,7 @@ import { getFaceSearchEnabledStatus, updateFaceSearchEnabledStatus, } from "services/userService"; -import { openLink } from "utils/common"; +import { isInternalUser } from "utils/user"; export const MLSearchSettings = ({ open, onClose, onRootClose }) => { const { @@ -255,8 +255,8 @@ function EnableFaceSearch({ open, onClose, enableFaceSearch, onRootClose }) { } function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) { - const showDetails = () => - openLink("https://ente.io/blog/desktop-ml-beta", true); + // const showDetails = () => + // openLink("https://ente.io/blog/desktop-ml-beta", true); return ( @@ -269,25 +269,37 @@ function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) { {" "} - + {/* */} +

+ We're putting finishing touches, coming back soon! +

+

+ + Existing indexed faces will continue to show. + +

- - - + {/* + - + > + {t("ML_MORE_DETAILS")} + + */} +
+ )} ); diff --git a/web/apps/photos/src/constants/upload.ts b/web/apps/photos/src/constants/upload.ts index 2ff01810f..a0103cb6e 100644 --- a/web/apps/photos/src/constants/upload.ts +++ b/web/apps/photos/src/constants/upload.ts @@ -1,15 +1,5 @@ -import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants"; import { Location } from "types/metadata"; -// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part. -export const MULTIPART_PART_SIZE = 20 * 1024 * 1024; - -export const FILE_READER_CHUNK_SIZE = ENCRYPTION_CHUNK_SIZE; - -export const FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART = Math.floor( - MULTIPART_PART_SIZE / FILE_READER_CHUNK_SIZE, -); - export const RANDOM_PERCENTAGE_PROGRESS_FOR_PUT = () => 90 + 10 * Math.random(); export const NULL_LOCATION: Location = { latitude: null, longitude: null }; @@ -34,38 +24,3 @@ export enum UPLOAD_RESULT { UPLOADED_WITH_STATIC_THUMBNAIL, ADDED_SYMLINK, } - -export enum PICKED_UPLOAD_TYPE { - FILES = "files", - FOLDERS = "folders", - ZIPS = "zips", -} - -export const BLACK_THUMBNAIL_BASE64 = - "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" + - "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" + - "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" + - "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" + - "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" + - "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" + - "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" + - "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" + - "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" + - "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" + - "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" + - "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + - "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + - "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + - "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" + - "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + - "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + - "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k="; diff --git a/web/apps/photos/src/pages/_app.tsx b/web/apps/photos/src/pages/_app.tsx index 4b5fe3107..0e80d0df9 100644 --- a/web/apps/photos/src/pages/_app.tsx +++ b/web/apps/photos/src/pages/_app.tsx @@ -80,8 +80,6 @@ const redirectMap = new Map([ type AppContextType = { showNavBar: (show: boolean) => void; - sharedFiles: File[]; - resetSharedFiles: () => void; mlSearchEnabled: boolean; mapEnabled: boolean; updateMlSearchEnabled: (enabled: boolean) => Promise; @@ -114,7 +112,6 @@ export default function App({ Component, pageProps }: AppProps) { typeof window !== "undefined" && !window.navigator.onLine, ); const [showNavbar, setShowNavBar] = useState(false); - const [sharedFiles, setSharedFiles] = useState(null); const [redirectName, setRedirectName] = useState(null); const [mlSearchEnabled, setMlSearchEnabled] = useState(false); const [mapEnabled, setMapEnabled] = useState(false); @@ -227,7 +224,6 @@ export default function App({ Component, pageProps }: AppProps) { const setUserOnline = () => setOffline(false); const setUserOffline = () => setOffline(true); - const resetSharedFiles = () => setSharedFiles(null); useEffect(() => { const redirectTo = async (redirect) => { @@ -352,22 +348,8 @@ export default function App({ Component, pageProps }: AppProps) { {showNavbar && } - {offline && t("OFFLINE_MSG")} + {isI18nReady && offline && t("OFFLINE_MSG")} - {sharedFiles && - (router.pathname === "/gallery" ? ( - - {t("files_to_be_uploaded", { - count: sharedFiles.length, - })} - - ) : ( - - {t("login_to_upload_files", { - count: sharedFiles.length, - })} - - ))} syncWithRemote(false, true)); } }; @@ -1023,6 +1031,7 @@ export default function Gallery() { getDragAndDropInputProps={getDragAndDropInputProps} getFileSelectorInputProps={getFileSelectorInputProps} getFolderSelectorInputProps={getFolderSelectorInputProps} + getZipFileSelectorInputProps={getZipFileSelectorInputProps} /> {blockingLoad && ( @@ -1112,7 +1121,6 @@ export default function Gallery() { null, false, )} - uploadTypeSelectorIntent={uploadTypeSelectorIntent} setLoading={setBlockingLoad} setCollectionNamerAttributes={setCollectionNamerAttributes} setShouldDisableDropzone={setShouldDisableDropzone} @@ -1121,13 +1129,18 @@ export default function Gallery() { isFirstUpload={ !hasNonSystemCollections(collectionSummaries) } - webFileSelectorFiles={webFileSelectorFiles} - webFolderSelectorFiles={webFolderSelectorFiles} - dragAndDropFiles={dragAndDropFiles} - uploadTypeSelectorView={uploadTypeSelectorView} - showUploadFilesDialog={openFileSelector} - showUploadDirsDialog={openFolderSelector} - showSessionExpiredMessage={showSessionExpiredMessage} + {...{ + dragAndDropFiles, + openFileSelector, + fileSelectorFiles, + openFolderSelector, + folderSelectorFiles, + openZipFileSelector, + fileSelectorZipFiles, + uploadTypeSelectorIntent, + uploadTypeSelectorView, + showSessionExpiredMessage, + }} /> { const detectFileTypeFromBuffer = async (buffer: Uint8Array) => { const result = await FileType.fromBuffer(buffer); - if (!result?.ext || !result?.mime) { - throw Error(`Could not deduce file type from buffer`); - } + if (!result) + throw Error("Could not deduce file type from the file's contents"); return result; }; diff --git a/web/apps/photos/src/services/download/index.ts b/web/apps/photos/src/services/download/index.ts index 37eeac440..7b0171da1 100644 --- a/web/apps/photos/src/services/download/index.ts +++ b/web/apps/photos/src/services/download/index.ts @@ -10,7 +10,7 @@ import { Events, eventBus } from "@ente/shared/events"; import { isPlaybackPossible } from "@ente/shared/media/video-playback"; import { Remote } from "comlink"; import isElectron from "is-electron"; -import * as ffmpegService from "services/ffmpeg"; +import * as ffmpeg from "services/ffmpeg"; import { EnteFile } from "types/file"; import { generateStreamFromArrayBuffer, getRenderableImage } from "utils/file"; import { PhotosDownloadClient } from "./clients/photos"; @@ -150,7 +150,7 @@ class DownloadManagerImpl { this.ensureInitialized(); const key = file.id.toString(); - const cached = await this.thumbnailCache.get(key); + const cached = await this.thumbnailCache?.get(key); if (cached) return new Uint8Array(await cached.arrayBuffer()); if (localOnly) return null; @@ -610,17 +610,13 @@ async function getPlayableVideo( if (!forceConvert && !runOnWeb && !isElectron()) { return null; } - log.info( - `video format not supported, converting it name: ${videoNameTitle}`, - ); - const mp4ConvertedVideo = await ffmpegService.convertToMP4( - new File([videoBlob], videoNameTitle), - ); - log.info(`video successfully converted ${videoNameTitle}`); - return new Blob([mp4ConvertedVideo]); + // TODO(MR): This might not work for very large (~ GB) videos. Test. + log.info(`Converting video ${videoNameTitle} to mp4`); + const convertedVideoData = await ffmpeg.convertToMP4(videoBlob); + return new Blob([convertedVideoData]); } } catch (e) { - log.error("video conversion failed", e); + log.error("Video conversion failed", e); return null; } } diff --git a/web/apps/photos/src/services/embeddingService.ts b/web/apps/photos/src/services/embeddingService.ts index a4309e314..36af84842 100644 --- a/web/apps/photos/src/services/embeddingService.ts +++ b/web/apps/photos/src/services/embeddingService.ts @@ -86,7 +86,11 @@ export const syncEmbeddings = async () => { allLocalFiles.forEach((file) => { fileIdToKeyMap.set(file.id, file.key); }); - await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings); + await cleanupDeletedEmbeddings( + allLocalFiles, + allEmbeddings, + EMBEDDINGS_TABLE, + ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); for (const model of models) { let modelLastSinceTime = await getModelEmbeddingSyncTime(model); @@ -168,7 +172,11 @@ export const syncFileEmbeddings = async () => { allLocalFiles.forEach((file) => { fileIdToKeyMap.set(file.id, file.key); }); - await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings); + await cleanupDeletedEmbeddings( + allLocalFiles, + allEmbeddings, + FILE_EMBEDING_TABLE, + ); log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`); for (const model of models) { let modelLastSinceTime = await getModelEmbeddingSyncTime(model); @@ -289,6 +297,7 @@ export const putEmbedding = async ( export const cleanupDeletedEmbeddings = async ( allLocalFiles: EnteFile[], allLocalEmbeddings: Embedding[] | FileML[], + tableName: string, ) => { const activeFileIds = new Set(); allLocalFiles.forEach((file) => { @@ -302,6 +311,6 @@ export const cleanupDeletedEmbeddings = async ( log.info( `cleanupDeletedEmbeddings embeddingsCount: ${allLocalEmbeddings.length} remainingEmbeddingsCount: ${remainingEmbeddings.length}`, ); - await localForage.setItem(EMBEDDINGS_TABLE, remainingEmbeddings); + await localForage.setItem(tableName, remainingEmbeddings); } }; diff --git a/web/apps/photos/src/services/exif.ts b/web/apps/photos/src/services/exif.ts index 584d79f88..073a695f7 100644 --- a/web/apps/photos/src/services/exif.ts +++ b/web/apps/photos/src/services/exif.ts @@ -167,14 +167,7 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData { parsedExif.imageWidth = ImageWidth; parsedExif.imageHeight = ImageHeight; } else { - log.error( - `Image dimension parsing failed - ImageWidth or ImageHeight is not a number ${JSON.stringify( - { - ImageWidth, - ImageHeight, - }, - )}`, - ); + log.warn("EXIF: Ignoring non-numeric ImageWidth or ImageHeight"); } } else if (ExifImageWidth && ExifImageHeight) { if ( @@ -184,13 +177,8 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData { parsedExif.imageWidth = ExifImageWidth; parsedExif.imageHeight = ExifImageHeight; } else { - log.error( - `Image dimension parsing failed - ExifImageWidth or ExifImageHeight is not a number ${JSON.stringify( - { - ExifImageWidth, - ExifImageHeight, - }, - )}`, + log.warn( + "EXIF: Ignoring non-numeric ExifImageWidth or ExifImageHeight", ); } } else if (PixelXDimension && PixelYDimension) { @@ -201,13 +189,8 @@ function parseExifData(exifData: RawEXIFData): ParsedEXIFData { parsedExif.imageWidth = PixelXDimension; parsedExif.imageHeight = PixelYDimension; } else { - log.error( - `Image dimension parsing failed - PixelXDimension or PixelYDimension is not a number ${JSON.stringify( - { - PixelXDimension, - PixelYDimension, - }, - )}`, + log.warn( + "EXIF: Ignoring non-numeric PixelXDimension or PixelYDimension", ); } } @@ -302,15 +285,13 @@ export function parseEXIFLocation( ); return { latitude, longitude }; } catch (e) { - log.error( - `Failed to parseEXIFLocation ${JSON.stringify({ - gpsLatitude, - gpsLatitudeRef, - gpsLongitude, - gpsLongitudeRef, - })}`, - e, - ); + const p = { + gpsLatitude, + gpsLatitudeRef, + gpsLongitude, + gpsLongitudeRef, + }; + log.error(`Failed to parse EXIF location ${JSON.stringify(p)}`, e); return { ...NULL_LOCATION }; } } diff --git a/web/apps/photos/src/services/export/index.ts b/web/apps/photos/src/services/export/index.ts index 5a732658a..b02e05a42 100644 --- a/web/apps/photos/src/services/export/index.ts +++ b/web/apps/photos/src/services/export/index.ts @@ -46,13 +46,13 @@ const exportRecordFileName = "export_status.json"; /** * Name of the top level directory which we create underneath the selected - * directory when the user starts an export to the filesystem. + * directory when the user starts an export to the file system. */ const exportDirectoryName = "Ente Photos"; /** - * Name of the directory in which we put our metadata when exporting to the - * filesystem. + * Name of the directory in which we put our metadata when exporting to the file + * system. */ export const exportMetadataDirectoryName = "metadata"; @@ -547,6 +547,9 @@ class ExportService { isCanceled: CancellationStatus, ) { const fs = ensureElectron().fs; + const rmdirIfExists = async (dirPath: string) => { + if (await fs.exists(dirPath)) await fs.rmdir(dirPath); + }; try { const exportRecord = await this.getExportRecord(exportFolder); const collectionIDPathMap = @@ -581,11 +584,11 @@ class ExportService { ); try { // delete the collection metadata folder - await fs.rmdir( + await rmdirIfExists( getMetadataFolderExportPath(collectionExportPath), ); // delete the collection folder - await fs.rmdir(collectionExportPath); + await rmdirIfExists(collectionExportPath); } catch (e) { await this.addCollectionExportedRecord( exportFolder, @@ -1378,7 +1381,7 @@ const isExportInProgress = (exportStage: ExportStage) => * * Also move its associated metadata JSON to Trash. * - * @param exportDir The root directory on the user's filesystem where we are + * @param exportDir The root directory on the user's file system where we are * exporting to. * */ const moveToTrash = async ( @@ -1398,17 +1401,19 @@ const moveToTrash = async ( if (await fs.exists(filePath)) { await fs.mkdirIfNeeded(trashDir); - const trashFilePath = await safeFileName(trashDir, fileName, fs.exists); + const trashFileName = await safeFileName(trashDir, fileName, fs.exists); + const trashFilePath = `${trashDir}/${trashFileName}`; await fs.rename(filePath, trashFilePath); } if (await fs.exists(metadataFilePath)) { await fs.mkdirIfNeeded(metadataTrashDir); - const metadataTrashFilePath = await safeFileName( + const metadataTrashFileName = await safeFileName( metadataTrashDir, metadataFileName, fs.exists, ); - await fs.rename(filePath, metadataTrashFilePath); + const metadataTrashFilePath = `${metadataTrashDir}/${metadataTrashFileName}`; + await fs.rename(metadataFilePath, metadataTrashFilePath); } }; diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts index 6fc2404e2..4dfdb3f64 100644 --- a/web/apps/photos/src/services/ffmpeg.ts +++ b/web/apps/photos/src/services/ffmpeg.ts @@ -1,4 +1,3 @@ -import { ElectronFile } from "@/next/types/file"; import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { validateAndGetCreationUnixTimeInMicroSeconds } from "@ente/shared/time"; @@ -11,6 +10,11 @@ import { import { NULL_LOCATION } from "constants/upload"; import type { ParsedExtractedMetadata } from "types/metadata"; import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker"; +import { + toDataOrPathOrZipEntry, + type DesktopUploadItem, + type UploadItem, +} from "./upload/types"; /** * Generate a thumbnail for the given video using a wasm FFmpeg running in a web @@ -51,7 +55,7 @@ const _generateVideoThumbnail = async ( * for the new files that the user is adding. * * @param dataOrPath The input video's data or the path to the video on the - * user's local filesystem. See: [Note: Reading a fileOrPath]. + * user's local file system. See: [Note: Reading a UploadItem]. * * @returns JPEG data of the generated thumbnail. * @@ -59,12 +63,12 @@ const _generateVideoThumbnail = async ( */ export const generateVideoThumbnailNative = async ( electron: Electron, - dataOrPath: Uint8Array | string, + desktopUploadItem: DesktopUploadItem, ) => _generateVideoThumbnail((seekTime: number) => electron.ffmpegExec( makeGenThumbnailCommand(seekTime), - dataOrPath, + toDataOrPathOrZipEntry(desktopUploadItem), "jpeg", 0, ), @@ -93,18 +97,23 @@ const makeGenThumbnailCommand = (seekTime: number) => [ * This function is called during upload, when we need to extract the metadata * of videos that the user is uploading. * - * @param fileOrPath A {@link File}, or the absolute path to a file on the + * @param uploadItem A {@link File}, or the absolute path to a file on the * user's local filesytem. A path can only be provided when we're running in the * context of our desktop app. */ export const extractVideoMetadata = async ( - fileOrPath: File | string, + uploadItem: UploadItem, ): Promise => { const command = extractVideoMetadataCommand; const outputData = - fileOrPath instanceof File - ? await ffmpegExecWeb(command, fileOrPath, "txt", 0) - : await electron.ffmpegExec(command, fileOrPath, "txt", 0); + uploadItem instanceof File + ? await ffmpegExecWeb(command, uploadItem, "txt", 0) + : await electron.ffmpegExec( + command, + toDataOrPathOrZipEntry(uploadItem), + "txt", + 0, + ); return parseFFmpegExtractedMetadata(outputData); }; @@ -200,23 +209,6 @@ function parseCreationTime(creationTime: string) { return dateTime; } -/** Called when viewing a file */ -export async function convertToMP4(file: File) { - return await ffmpegExec2( - [ - ffmpegPathPlaceholder, - "-i", - inputPathPlaceholder, - "-preset", - "ultrafast", - outputPathPlaceholder, - ], - file, - "mp4", - 30 * 1000, - ); -} - /** * Run the given FFmpeg command using a wasm FFmpeg running in a web worker. * @@ -234,55 +226,53 @@ const ffmpegExecWeb = async ( }; /** - * Run the given FFmpeg command using a native FFmpeg binary bundled with our - * desktop app. + * Convert a video from a format that is not supported in the browser to MP4. + * + * This function is called when the user views a video or a live photo, and we + * want to play it back. The idea is to convert it to MP4 which has much more + * universal support in browsers. + * + * @param blob The video blob. + * + * @returns The mp4 video data. + */ +export const convertToMP4 = async (blob: Blob) => + ffmpegExecNativeOrWeb( + [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-preset", + "ultrafast", + outputPathPlaceholder, + ], + blob, + "mp4", + 30 * 1000, + ); + +/** + * Run the given FFmpeg command using a native FFmpeg binary when we're running + * in the context of our desktop app, otherwise using the browser based wasm + * FFmpeg implemenation. * * See also: {@link ffmpegExecWeb}. */ -/* -TODO(MR): Remove me -const ffmpegExecNative = async ( - electron: Electron, +const ffmpegExecNativeOrWeb = async ( command: string[], blob: Blob, - timeoutMs: number = 0, -) => { - const electron = globalThis.electron; - if (electron) { - const data = new Uint8Array(await blob.arrayBuffer()); - return await electron.ffmpegExec(command, data, timeoutMs); - } else { - const worker = await workerFactory.lazy(); - return await worker.exec(command, blob, timeoutMs); - } -}; -*/ - -const ffmpegExec2 = async ( - command: string[], - inputFile: File | ElectronFile, outputFileExtension: string, - timeoutMS: number = 0, + timeoutMs: number, ) => { const electron = globalThis.electron; - if (electron || false) { - throw new Error("WIP"); - // return electron.ffmpegExec( - // command, - // /* TODO(MR): ElectronFile changes */ - // inputFile as unknown as string, - // outputFileName, - // timeoutMS, - // ); - } else { - /* TODO(MR): ElectronFile changes */ - return ffmpegExecWeb( + if (electron) + return electron.ffmpegExec( command, - inputFile as File, + new Uint8Array(await blob.arrayBuffer()), outputFileExtension, - timeoutMS, + timeoutMs, ); - } + else return ffmpegExecWeb(command, blob, outputFileExtension, timeoutMs); }; /** Lazily create a singleton instance of our worker */ diff --git a/web/apps/photos/src/services/heic-convert.ts b/web/apps/photos/src/services/heic-convert.ts index 478cce218..c2ea19839 100644 --- a/web/apps/photos/src/services/heic-convert.ts +++ b/web/apps/photos/src/services/heic-convert.ts @@ -1,4 +1,3 @@ -import { convertBytesToHumanReadable } from "@/next/file"; import log from "@/next/log"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { CustomError } from "@ente/shared/error"; @@ -51,15 +50,10 @@ class HEICConverter { const startTime = Date.now(); const convertedHEIC = await worker.heicToJPEG(fileBlob); - log.info( - `originalFileSize:${convertBytesToHumanReadable( - fileBlob?.size, - )},convertedFileSize:${convertBytesToHumanReadable( - convertedHEIC?.size, - )}, heic conversion time: ${ - Date.now() - startTime - }ms `, + const ms = Math.round( + Date.now() - startTime, ); + log.debug(() => `heic => jpeg (${ms} ms)`); clearTimeout(timeout); resolve(convertedHEIC); } catch (e) { @@ -71,18 +65,7 @@ class HEICConverter { ); if (!convertedHEIC || convertedHEIC?.size === 0) { log.error( - `converted heic fileSize is Zero - ${JSON.stringify( - { - originalFileSize: - convertBytesToHumanReadable( - fileBlob?.size ?? 0, - ), - convertedFileSize: - convertBytesToHumanReadable( - convertedHEIC?.size ?? 0, - ), - }, - )}`, + `Converted HEIC file is empty (original was ${fileBlob?.size} bytes)`, ); } await new Promise((resolve) => { @@ -94,7 +77,7 @@ class HEICConverter { this.workerPool.push(convertWorker); return convertedHEIC; } catch (e) { - log.error("heic conversion failed", e); + log.error("HEIC conversion failed", e); convertWorker.terminate(); this.workerPool.push(createComlinkWorker()); throw e; diff --git a/web/apps/photos/src/services/readerService.ts b/web/apps/photos/src/services/readerService.ts deleted file mode 100644 index a1195b35d..000000000 --- a/web/apps/photos/src/services/readerService.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { ElectronFile } from "@/next/types/file"; - -export function getFileStream(file: File, chunkSize: number) { - const fileChunkReader = fileChunkReaderMaker(file, chunkSize); - - const stream = new ReadableStream({ - async pull(controller: ReadableStreamDefaultController) { - const chunk = await fileChunkReader.next(); - if (chunk.done) { - controller.close(); - } else { - controller.enqueue(chunk.value); - } - }, - }); - const chunkCount = Math.ceil(file.size / chunkSize); - return { - stream, - chunkCount, - }; -} - -async function* fileChunkReaderMaker(file: File, chunkSize: number) { - let offset = 0; - while (offset < file.size) { - const chunk = file.slice(offset, chunkSize + offset); - yield new Uint8Array(await chunk.arrayBuffer()); - offset += chunkSize; - } - return null; -} - -export async function getElectronFileStream( - file: ElectronFile, - chunkSize: number, -) { - const chunkCount = Math.ceil(file.size / chunkSize); - return { - stream: await file.stream(), - chunkCount, - }; -} diff --git a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts index 12228b822..8f18a1638 100644 --- a/web/apps/photos/src/services/upload/publicUploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/publicUploadHttpClient.ts @@ -3,7 +3,7 @@ import { CustomError, handleUploadError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint } from "@ente/shared/network/api"; import { EnteFile } from "types/file"; -import { retryHTTPCall } from "utils/upload/uploadRetrier"; +import { retryHTTPCall } from "./uploadHttpClient"; import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; const ENDPOINT = getEndpoint(); diff --git a/web/apps/photos/src/services/upload/takeout.ts b/web/apps/photos/src/services/upload/takeout.ts index 5cd16130e..24c0a9d26 100644 --- a/web/apps/photos/src/services/upload/takeout.ts +++ b/web/apps/photos/src/services/upload/takeout.ts @@ -5,6 +5,8 @@ import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; import { NULL_LOCATION } from "constants/upload"; import type { Location } from "types/metadata"; +import { readStream } from "utils/native-stream"; +import type { UploadItem } from "./types"; export interface ParsedMetadataJSON { creationTime: number; @@ -75,21 +77,29 @@ function getFileOriginalName(fileName: string) { /** Try to parse the contents of a metadata JSON file from a Google Takeout. */ export const tryParseTakeoutMetadataJSON = async ( - fileOrPath: File | string, + uploadItem: UploadItem, ): Promise => { try { - const text = - fileOrPath instanceof File - ? await fileOrPath.text() - : await ensureElectron().fs.readTextFile(fileOrPath); - - return parseMetadataJSONText(text); + return parseMetadataJSONText(await uploadItemText(uploadItem)); } catch (e) { log.error("Failed to parse takeout metadata JSON", e); return undefined; } }; +const uploadItemText = async (uploadItem: UploadItem) => { + if (uploadItem instanceof File) { + return await uploadItem.text(); + } else if (typeof uploadItem == "string") { + return await ensureElectron().fs.readTextFile(uploadItem); + } else if (Array.isArray(uploadItem)) { + const { response } = await readStream(ensureElectron(), uploadItem); + return await response.text(); + } else { + return await uploadItem.file.text(); + } +}; + const NULL_PARSED_METADATA_JSON: ParsedMetadataJSON = { creationTime: null, modificationTime: null, diff --git a/web/apps/photos/src/services/upload/thumbnail.ts b/web/apps/photos/src/services/upload/thumbnail.ts index 4552d11b3..1dd448376 100644 --- a/web/apps/photos/src/services/upload/thumbnail.ts +++ b/web/apps/photos/src/services/upload/thumbnail.ts @@ -2,9 +2,9 @@ import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type"; import log from "@/next/log"; import { type Electron } from "@/next/types/ipc"; import { withTimeout } from "@ente/shared/utils"; -import { BLACK_THUMBNAIL_BASE64 } from "constants/upload"; import * as ffmpeg from "services/ffmpeg"; import { heicToJPEG } from "services/heic-convert"; +import { toDataOrPathOrZipEntry, type DesktopUploadItem } from "./types"; /** Maximum width or height of the generated thumbnail */ const maxThumbnailDimension = 720; @@ -179,7 +179,7 @@ const percentageSizeDiff = ( * object which we use to perform IPC with the Node.js side of our desktop app. * * @param dataOrPath Contents of an image or video file, or the path to the - * image or video file on the user's local filesystem, whose thumbnail we want + * image or video file on the user's local file system, whose thumbnail we want * to generate. * * @param fileTypeInfo The type information for {@link dataOrPath}. @@ -190,20 +190,49 @@ const percentageSizeDiff = ( */ export const generateThumbnailNative = async ( electron: Electron, - dataOrPath: Uint8Array | string, + desktopUploadItem: DesktopUploadItem, fileTypeInfo: FileTypeInfo, ): Promise => fileTypeInfo.fileType === FILE_TYPE.IMAGE ? await electron.generateImageThumbnail( - dataOrPath, + toDataOrPathOrZipEntry(desktopUploadItem), maxThumbnailDimension, maxThumbnailSize, ) - : ffmpeg.generateVideoThumbnailNative(electron, dataOrPath); + : ffmpeg.generateVideoThumbnailNative(electron, desktopUploadItem); /** * A fallback, black, thumbnail for use in cases where thumbnail generation * fails. */ export const fallbackThumbnail = () => - Uint8Array.from(atob(BLACK_THUMBNAIL_BASE64), (c) => c.charCodeAt(0)); + Uint8Array.from(atob(blackThumbnailB64), (c) => c.charCodeAt(0)); + +const blackThumbnailB64 = + "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB" + + "AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQ" + + "EBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/wAARC" + + "ACWASwDAREAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUF" + + "BAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk" + + "6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztL" + + "W2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAA" + + "AAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVY" + + "nLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImK" + + "kpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oAD" + + "AMBAAIRAxEAPwD/AD/6ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAC" + + "gAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + + "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACg" + + "AoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKA" + + "CgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAK" + + "ACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA" + + "KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo" + + "AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k="; diff --git a/web/apps/photos/src/services/upload/types.ts b/web/apps/photos/src/services/upload/types.ts new file mode 100644 index 000000000..25e2ab408 --- /dev/null +++ b/web/apps/photos/src/services/upload/types.ts @@ -0,0 +1,57 @@ +import type { ZipItem } from "@/next/types/ipc"; + +/** + * An item to upload is one of the following: + * + * 1. A file drag-and-dropped or selected by the user when we are running in the + * web browser. These is the {@link File} case. + * + * 2. A file drag-and-dropped or selected by the user when we are running in the + * context of our desktop app. In such cases, we also have the absolute path + * of the file in the user's local file system. This is the + * {@link FileAndPath} case. + * + * 3. A file path programmatically requested by the desktop app. For example, we + * might be resuming a previously interrupted upload after an app restart + * (thus we no longer have access to the {@link File} from case 2). Or we + * could be uploading a file this is in one of the folders the user has asked + * us to watch for changes. This is the `string` case. + * + * 4. A file within a zip file on the user's local file system. This too is only + * possible when we are running in the context of our desktop app. The user + * might have drag-and-dropped or selected a zip file, or it might be a zip + * file that they'd previously selected but we now are resuming an + * interrupted upload of. Either ways, what we have is a tuple containing the + * (path to zip file, and the name of an entry within that zip file). This is + * the {@link ZipItem} case. + * + * Also see: [Note: Reading a UploadItem]. + */ +export type UploadItem = File | FileAndPath | string | ZipItem; + +/** + * When we are running in the context of our desktop app, we have access to the + * absolute path of {@link File} objects. This convenience type clubs these two + * bits of information, saving us the need to query the path again and again + * using the {@link getPathForFile} method of {@link Electron}. + */ +export interface FileAndPath { + file: File; + path: string; +} + +/** + * The of cases of {@link UploadItem} that apply when we're running in the + * context of our desktop app. + */ +export type DesktopUploadItem = Exclude; + +/** + * For each of cases of {@link UploadItem} that apply when we're running in the + * context of our desktop app, return a value that can be passed to + * {@link Electron} functions over IPC. + */ +export const toDataOrPathOrZipEntry = (desktopUploadItem: DesktopUploadItem) => + typeof desktopUploadItem == "string" || Array.isArray(desktopUploadItem) + ? desktopUploadItem + : desktopUploadItem.path; diff --git a/web/apps/photos/src/services/upload/uploadHttpClient.ts b/web/apps/photos/src/services/upload/uploadHttpClient.ts index 5757a841a..e8ae6de97 100644 --- a/web/apps/photos/src/services/upload/uploadHttpClient.ts +++ b/web/apps/photos/src/services/upload/uploadHttpClient.ts @@ -3,8 +3,8 @@ import { CustomError, handleUploadError } from "@ente/shared/error"; import HTTPService from "@ente/shared/network/HTTPService"; import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api"; import { getToken } from "@ente/shared/storage/localStorage/helpers"; +import { wait } from "@ente/shared/utils"; import { EnteFile } from "types/file"; -import { retryHTTPCall } from "utils/upload/uploadRetrier"; import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService"; const ENDPOINT = getEndpoint(); @@ -236,3 +236,31 @@ class UploadHttpClient { } export default new UploadHttpClient(); + +const retrySleepTimeInMilliSeconds = [2000, 5000, 10000]; + +export async function retryHTTPCall( + func: () => Promise, + checkForBreakingError?: (error) => void, +): Promise { + const retrier = async ( + func: () => Promise, + attemptNumber: number = 0, + ) => { + try { + const resp = await func(); + return resp; + } catch (e) { + if (checkForBreakingError) { + checkForBreakingError(e); + } + if (attemptNumber < retrySleepTimeInMilliSeconds.length) { + await wait(retrySleepTimeInMilliSeconds[attemptNumber]); + return await retrier(func, attemptNumber + 1); + } else { + throw e; + } + } + }; + return await retrier(func); +} diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts index 665cd76c8..38fd7037b 100644 --- a/web/apps/photos/src/services/upload/uploadManager.ts +++ b/web/apps/photos/src/services/upload/uploadManager.ts @@ -3,7 +3,6 @@ import { potentialFileTypeFromExtension } from "@/media/live-photo"; import { ensureElectron } from "@/next/electron"; import { lowercaseExtension, nameAndExtension } from "@/next/file"; import log from "@/next/log"; -import { ElectronFile } from "@/next/types/file"; import type { Electron } from "@/next/types/ipc"; import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ensure } from "@/utils/ensure"; @@ -36,7 +35,8 @@ import { tryParseTakeoutMetadataJSON, type ParsedMetadataJSON, } from "./takeout"; -import UploadService, { fopFileName, fopSize, uploader } from "./uploadService"; +import type { UploadItem } from "./types"; +import UploadService, { uploadItemFileName, uploader } from "./uploadService"; export type FileID = number; @@ -83,17 +83,17 @@ export interface ProgressUpdater { /** The number of uploads to process in parallel. */ const maxConcurrentUploads = 4; -export interface FileWithCollection { +export interface UploadItemWithCollection { localID: number; collectionID: number; isLivePhoto?: boolean; - fileOrPath?: File | string; + uploadItem?: UploadItem; livePhotoAssets?: LivePhotoAssets; } export interface LivePhotoAssets { - image: File | string; - video: File | string; + image: UploadItem; + video: UploadItem; } export interface PublicUploadProps { @@ -320,9 +320,8 @@ class UploadManager { ComlinkWorker >(maxConcurrentUploads); private parsedMetadataJSONMap: Map; - private filesToBeUploaded: ClusteredFile[]; - private remainingFiles: ClusteredFile[] = []; - private failedFiles: ClusteredFile[]; + private itemsToBeUploaded: ClusteredUploadItem[]; + private failedItems: ClusteredUploadItem[]; private existingFiles: EnteFile[]; private setFiles: SetFiles; private collections: Map; @@ -359,9 +358,8 @@ class UploadManager { } private resetState() { - this.filesToBeUploaded = []; - this.remainingFiles = []; - this.failedFiles = []; + this.itemsToBeUploaded = []; + this.failedItems = []; this.parsedMetadataJSONMap = new Map(); this.uploaderName = null; @@ -387,70 +385,66 @@ class UploadManager { * It is an error to call this method when there is already an in-progress * upload. * - * @param filesWithCollectionToUploadIn The files to upload, each paired - * with the id of the collection that they should be uploaded into. + * @param itemsWithCollection The items to upload, each paired with the id + * of the collection that they should be uploaded into. * * @returns `true` if at least one file was processed */ - public async uploadFiles( - filesWithCollectionToUploadIn: FileWithCollection[], + public async uploadItems( + itemsWithCollection: UploadItemWithCollection[], collections: Collection[], uploaderName?: string, ) { if (this.uploadInProgress) throw new Error("Cannot run multiple uploads at once"); - log.info(`Uploading ${filesWithCollectionToUploadIn.length} files`); + log.info(`Uploading ${itemsWithCollection.length} files`); this.uploadInProgress = true; this.uploaderName = uploaderName; try { await this.updateExistingFilesAndCollections(collections); - const namedFiles = filesWithCollectionToUploadIn.map( - makeFileWithCollectionIDAndName, + const namedItems = itemsWithCollection.map( + makeUploadItemWithCollectionIDAndName, ); - this.uiService.setFiles(namedFiles); + this.uiService.setFiles(namedItems); - const [metadataFiles, mediaFiles] = - splitMetadataAndMediaFiles(namedFiles); + const [metadataItems, mediaItems] = + splitMetadataAndMediaItems(namedItems); - if (metadataFiles.length) { + if (metadataItems.length) { this.uiService.setUploadStage( UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES, ); - await this.parseMetadataJSONFiles(metadataFiles); + await this.parseMetadataJSONFiles(metadataItems); } - if (mediaFiles.length) { - const clusteredMediaFiles = await clusterLivePhotos(mediaFiles); + if (mediaItems.length) { + const clusteredMediaItems = await clusterLivePhotos(mediaItems); this.abortIfCancelled(); // Live photos might've been clustered together, reset the list // of files to reflect that. - this.uiService.setFiles(clusteredMediaFiles); + this.uiService.setFiles(clusteredMediaItems); this.uiService.setHasLivePhoto( - mediaFiles.length != clusteredMediaFiles.length, + mediaItems.length != clusteredMediaItems.length, ); - await this.uploadMediaFiles(clusteredMediaFiles); + await this.uploadMediaItems(clusteredMediaItems); } } catch (e) { - if (e.message === CustomError.UPLOAD_CANCELLED) { - if (isElectron()) { - this.remainingFiles = []; - await cancelRemainingUploads(); - } - } else { - log.error("Uploading failed", e); + if (e.message != CustomError.UPLOAD_CANCELLED) { + log.error("Upload failed", e); throw e; } } finally { this.uiService.setUploadStage(UPLOAD_STAGES.FINISH); + void globalThis.electron?.clearPendingUploads(); for (let i = 0; i < maxConcurrentUploads; i++) { this.cryptoWorkers[i]?.terminate(); } @@ -479,14 +473,18 @@ class UploadManager { ); } - private async parseMetadataJSONFiles(files: FileWithCollectionIDAndName[]) { - this.uiService.reset(files.length); + private async parseMetadataJSONFiles( + items: UploadItemWithCollectionIDAndName[], + ) { + this.uiService.reset(items.length); - for (const { fileOrPath, fileName, collectionID } of files) { + for (const { uploadItem, fileName, collectionID } of items) { this.abortIfCancelled(); log.info(`Parsing metadata JSON ${fileName}`); - const metadataJSON = await tryParseTakeoutMetadataJSON(fileOrPath); + const metadataJSON = await tryParseTakeoutMetadataJSON( + ensure(uploadItem), + ); if (metadataJSON) { this.parsedMetadataJSONMap.set( getMetadataJSONMapKeyForJSON(collectionID, fileName), @@ -497,48 +495,41 @@ class UploadManager { } } - private async uploadMediaFiles(mediaFiles: ClusteredFile[]) { - this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles]; - - if (isElectron()) { - this.remainingFiles = [...this.remainingFiles, ...mediaFiles]; - } - - this.uiService.reset(mediaFiles.length); - - await UploadService.setFileCount(mediaFiles.length); - + private async uploadMediaItems(mediaItems: ClusteredUploadItem[]) { + this.itemsToBeUploaded = [...this.itemsToBeUploaded, ...mediaItems]; + this.uiService.reset(mediaItems.length); + await UploadService.setFileCount(mediaItems.length); this.uiService.setUploadStage(UPLOAD_STAGES.UPLOADING); const uploadProcesses = []; for ( let i = 0; - i < maxConcurrentUploads && this.filesToBeUploaded.length > 0; + i < maxConcurrentUploads && this.itemsToBeUploaded.length > 0; i++ ) { this.cryptoWorkers[i] = getDedicatedCryptoWorker(); const worker = await this.cryptoWorkers[i].remote; - uploadProcesses.push(this.uploadNextFileInQueue(worker)); + uploadProcesses.push(this.uploadNextItemInQueue(worker)); } await Promise.all(uploadProcesses); } - private async uploadNextFileInQueue(worker: Remote) { + private async uploadNextItemInQueue(worker: Remote) { const uiService = this.uiService; - while (this.filesToBeUploaded.length > 0) { + while (this.itemsToBeUploaded.length > 0) { this.abortIfCancelled(); - const clusteredFile = this.filesToBeUploaded.pop(); - const { localID, collectionID } = clusteredFile; + const clusteredItem = this.itemsToBeUploaded.pop(); + const { localID, collectionID } = clusteredItem; const collection = this.collections.get(collectionID); - const uploadableFile = { ...clusteredFile, collection }; + const uploadableItem = { ...clusteredItem, collection }; uiService.setFileProgress(localID, 0); await wait(0); const { uploadResult, uploadedFile } = await uploader( - uploadableFile, + uploadableItem, this.uploaderName, this.existingFiles, this.parsedMetadataJSONMap, @@ -560,7 +551,7 @@ class UploadManager { ); const finalUploadResult = await this.postUploadTask( - uploadableFile, + uploadableItem, uploadResult, uploadedFile, ); @@ -572,20 +563,22 @@ class UploadManager { } private async postUploadTask( - uploadableFile: UploadableFile, + uploadableItem: UploadableUploadItem, uploadResult: UPLOAD_RESULT, uploadedFile: EncryptedEnteFile | EnteFile | undefined, ) { log.info( - `Uploaded ${uploadableFile.fileName} with result ${uploadResult}`, + `Uploaded ${uploadableItem.fileName} with result ${uploadResult}`, ); try { + const electron = globalThis.electron; + if (electron) await markUploaded(electron, uploadableItem); + let decryptedFile: EnteFile; - await this.removeFromPendingUploads(uploadableFile); switch (uploadResult) { case UPLOAD_RESULT.FAILED: case UPLOAD_RESULT.BLOCKED: - this.failedFiles.push(uploadableFile); + this.failedItems.push(uploadableItem); break; case UPLOAD_RESULT.ALREADY_UPLOADED: decryptedFile = uploadedFile as EnteFile; @@ -598,7 +591,7 @@ class UploadManager { case UPLOAD_RESULT.UPLOADED_WITH_STATIC_THUMBNAIL: decryptedFile = await decryptFile( uploadedFile as EncryptedEnteFile, - uploadableFile.collection.key, + uploadableItem.collection.key, ); break; case UPLOAD_RESULT.UNSUPPORTED: @@ -616,11 +609,25 @@ class UploadManager { ].includes(uploadResult) ) { try { + let file: File | undefined; + const uploadItem = + uploadableItem.uploadItem ?? + uploadableItem.livePhotoAssets.image; + if (uploadItem) { + if (uploadItem instanceof File) { + file = uploadItem; + } else if ( + typeof uploadItem == "string" || + Array.isArray(uploadItem) + ) { + // path from desktop, no file object + } else { + file = uploadItem.file; + } + } eventBus.emit(Events.FILE_UPLOADED, { enteFile: decryptedFile, - localFile: - uploadableFile.fileOrPath ?? - uploadableFile.livePhotoAssets.image, + localFile: file, }); } catch (e) { log.warn("Ignoring error in fileUploaded handlers", e); @@ -629,7 +636,7 @@ class UploadManager { } await this.watchFolderCallback( uploadResult, - uploadableFile, + uploadableItem, uploadedFile as EncryptedEnteFile, ); return uploadResult; @@ -641,7 +648,7 @@ class UploadManager { private async watchFolderCallback( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: ClusteredFile, + fileWithCollection: ClusteredUploadItem, uploadedFile: EncryptedEnteFile, ) { if (isElectron()) { @@ -661,9 +668,9 @@ class UploadManager { uploadCancelService.requestUploadCancelation(); } - public getFailedFilesWithCollections() { + public getFailedItemsWithCollections() { return { - files: this.failedFiles, + items: this.failedItems, collections: [...this.collections.values()], }; } @@ -684,16 +691,6 @@ class UploadManager { this.setFiles((files) => sortFiles([...files, decryptedFile])); } - private async removeFromPendingUploads({ localID }: ClusteredFile) { - const electron = globalThis.electron; - if (electron) { - this.remainingFiles = this.remainingFiles.filter( - (f) => f.localID != localID, - ); - await updatePendingUploads(electron, this.remainingFiles); - } - } - public shouldAllowNewUpload = () => { return !this.uploadInProgress || watcher.isUploadRunning(); }; @@ -709,24 +706,25 @@ export default new UploadManager(); * As files progress through stages, they get more and more bits tacked on to * them. These types document the journey. * - * - The input is {@link FileWithCollection}. This can either be a new - * {@link FileWithCollection}, in which case it'll only have a - * {@link localID}, {@link collectionID} and a {@link fileOrPath}. Or it could - * be a retry, in which case it'll not have a {@link fileOrPath} but instead + * - The input is {@link UploadItemWithCollection}. This can either be a new + * {@link UploadItemWithCollection}, in which case it'll only have a + * {@link localID}, {@link collectionID} and a {@link uploadItem}. Or it could + * be a retry, in which case it'll not have a {@link uploadItem} but instead * will have data from a previous stage (concretely, it'll just be a - * relabelled {@link ClusteredFile}), like a snake eating its tail. + * relabelled {@link ClusteredUploadItem}), like a snake eating its tail. * - * - Immediately we convert it to {@link FileWithCollectionIDAndName}. This is - * to mostly systematize what we have, and also attach a {@link fileName}. + * - Immediately we convert it to {@link UploadItemWithCollectionIDAndName}. + * This is to mostly systematize what we have, and also attach a + * {@link fileName}. * * - These then get converted to "assets", whereby both parts of a live photo - * are combined. This is a {@link ClusteredFile}. + * are combined. This is a {@link ClusteredUploadItem}. * - * - On to the {@link ClusteredFile} we attach the corresponding - * {@link collection}, giving us {@link UploadableFile}. This is what gets - * queued and then passed to the {@link uploader}. + * - On to the {@link ClusteredUploadItem} we attach the corresponding + * {@link collection}, giving us {@link UploadableUploadItem}. This is what + * gets queued and then passed to the {@link uploader}. */ -type FileWithCollectionIDAndName = { +type UploadItemWithCollectionIDAndName = { /** A unique ID for the duration of the upload */ localID: number; /** The ID of the collection to which this file should be uploaded. */ @@ -740,64 +738,57 @@ type FileWithCollectionIDAndName = { /** `true` if this is a live photo. */ isLivePhoto?: boolean; /* Valid for non-live photos */ - fileOrPath?: File | string; + uploadItem?: UploadItem; /* Valid for live photos */ livePhotoAssets?: LivePhotoAssets; }; -const makeFileWithCollectionIDAndName = ( - f: FileWithCollection, -): FileWithCollectionIDAndName => { - const fileOrPath = f.fileOrPath; - /* TODO(MR): ElectronFile */ - if (!(fileOrPath instanceof File || typeof fileOrPath == "string")) - throw new Error(`Unexpected file ${f}`); - - return { - localID: ensure(f.localID), - collectionID: ensure(f.collectionID), - fileName: ensure( - f.isLivePhoto - ? fopFileName(f.livePhotoAssets.image) - : fopFileName(fileOrPath), - ), - isLivePhoto: f.isLivePhoto, - fileOrPath: fileOrPath, - livePhotoAssets: f.livePhotoAssets, - }; -}; +const makeUploadItemWithCollectionIDAndName = ( + f: UploadItemWithCollection, +): UploadItemWithCollectionIDAndName => ({ + localID: ensure(f.localID), + collectionID: ensure(f.collectionID), + fileName: ensure( + f.isLivePhoto + ? uploadItemFileName(f.livePhotoAssets.image) + : uploadItemFileName(f.uploadItem), + ), + isLivePhoto: f.isLivePhoto, + uploadItem: f.uploadItem, + livePhotoAssets: f.livePhotoAssets, +}); /** - * A file with both parts of a live photo clubbed together. + * An upload item with both parts of a live photo clubbed together. * * See: [Note: Intermediate file types during upload]. */ -type ClusteredFile = { +type ClusteredUploadItem = { localID: number; collectionID: number; fileName: string; isLivePhoto: boolean; - fileOrPath?: File | string; + uploadItem?: UploadItem; livePhotoAssets?: LivePhotoAssets; }; /** - * The file that we hand off to the uploader. Essentially {@link ClusteredFile} - * with the {@link collection} attached to it. + * The file that we hand off to the uploader. Essentially + * {@link ClusteredUploadItem} with the {@link collection} attached to it. * * See: [Note: Intermediate file types during upload]. */ -export type UploadableFile = ClusteredFile & { +export type UploadableUploadItem = ClusteredUploadItem & { collection: Collection; }; -const splitMetadataAndMediaFiles = ( - files: FileWithCollectionIDAndName[], +const splitMetadataAndMediaItems = ( + items: UploadItemWithCollectionIDAndName[], ): [ - metadata: FileWithCollectionIDAndName[], - media: FileWithCollectionIDAndName[], + metadata: UploadItemWithCollectionIDAndName[], + media: UploadItemWithCollectionIDAndName[], ] => - files.reduce( + items.reduce( ([metadata, media], f) => { if (lowercaseExtension(f.fileName) == "json") metadata.push(f); else media.push(f); @@ -806,59 +797,56 @@ const splitMetadataAndMediaFiles = ( [[], []], ); -export const setToUploadCollection = async (collections: Collection[]) => { - let collectionName: string = null; - /* collection being one suggest one of two things - 1. Either the user has upload to a single existing collection - 2. Created a new single collection to upload to - may have had multiple folder, but chose to upload - to one album - hence saving the collection name when upload collection count is 1 - helps the info of user choosing this options - and on next upload we can directly start uploading to this collection - */ - if (collections.length === 1) { - collectionName = collections[0].name; +const markUploaded = async (electron: Electron, item: ClusteredUploadItem) => { + // TODO: This can be done better + if (item.isLivePhoto) { + const [p0, p1] = [ + item.livePhotoAssets.image, + item.livePhotoAssets.video, + ]; + if (Array.isArray(p0) && Array.isArray(p1)) { + electron.markUploadedZipItems([p0, p1]); + } else if (typeof p0 == "string" && typeof p1 == "string") { + electron.markUploadedFiles([p0, p1]); + } else if ( + p0 && + typeof p0 == "object" && + "path" in p0 && + p1 && + typeof p1 == "object" && + "path" in p1 + ) { + electron.markUploadedFiles([p0.path, p1.path]); + } else { + throw new Error( + "Attempting to mark upload completion of unexpected desktop upload items", + ); + } + } else { + const p = ensure(item.uploadItem); + if (Array.isArray(p)) { + electron.markUploadedZipItems([p]); + } else if (typeof p == "string") { + electron.markUploadedFiles([p]); + } else if (p && typeof p == "object" && "path" in p) { + electron.markUploadedFiles([p.path]); + } else { + throw new Error( + "Attempting to mark upload completion of unexpected desktop upload items", + ); + } } - await ensureElectron().setPendingUploadCollection(collectionName); -}; - -const updatePendingUploads = async ( - electron: Electron, - files: ClusteredFile[], -) => { - const paths = files - .map((file) => - file.isLivePhoto - ? [file.livePhotoAssets.image, file.livePhotoAssets.video] - : [file.fileOrPath], - ) - .flat() - .map((f) => getFilePathElectron(f)); - await electron.setPendingUploadFiles("files", paths); -}; - -/** - * NOTE: a stop gap measure, only meant to be called by code that is running in - * the context of a desktop app initiated upload - */ -export const getFilePathElectron = (file: File | ElectronFile | string) => - typeof file == "string" ? file : (file as ElectronFile).path; - -const cancelRemainingUploads = async () => { - const electron = ensureElectron(); - await electron.setPendingUploadCollection(undefined); - await electron.setPendingUploadFiles("zips", []); - await electron.setPendingUploadFiles("files", []); }; /** * Go through the given files, combining any sibling image + video assets into a * single live photo when appropriate. */ -const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { - const result: ClusteredFile[] = []; - files +const clusterLivePhotos = async ( + items: UploadItemWithCollectionIDAndName[], +) => { + const result: ClusteredUploadItem[] = []; + items .sort((f, g) => nameAndExtension(f.fileName)[0].localeCompare( nameAndExtension(g.fileName)[0], @@ -866,22 +854,22 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { ) .sort((f, g) => f.collectionID - g.collectionID); let index = 0; - while (index < files.length - 1) { - const f = files[index]; - const g = files[index + 1]; + while (index < items.length - 1) { + const f = items[index]; + const g = items[index + 1]; const fFileType = potentialFileTypeFromExtension(f.fileName); const gFileType = potentialFileTypeFromExtension(g.fileName); const fa: PotentialLivePhotoAsset = { fileName: f.fileName, fileType: fFileType, collectionID: f.collectionID, - fileOrPath: f.fileOrPath, + uploadItem: f.uploadItem, }; const ga: PotentialLivePhotoAsset = { fileName: g.fileName, fileType: gFileType, collectionID: g.collectionID, - fileOrPath: g.fileOrPath, + uploadItem: g.uploadItem, }; if (await areLivePhotoAssets(fa, ga)) { const [image, video] = @@ -892,8 +880,8 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { fileName: image.fileName, isLivePhoto: true, livePhotoAssets: { - image: image.fileOrPath, - video: video.fileOrPath, + image: image.uploadItem, + video: video.uploadItem, }, }); index += 2; @@ -905,9 +893,9 @@ const clusterLivePhotos = async (files: FileWithCollectionIDAndName[]) => { index += 1; } } - if (index === files.length - 1) { + if (index === items.length - 1) { result.push({ - ...files[index], + ...items[index], isLivePhoto: false, }); } @@ -918,7 +906,7 @@ interface PotentialLivePhotoAsset { fileName: string; fileType: FILE_TYPE; collectionID: number; - fileOrPath: File | string; + uploadItem: UploadItem; } const areLivePhotoAssets = async ( @@ -961,11 +949,11 @@ const areLivePhotoAssets = async ( // we use doesn't support stream as a input. const maxAssetSize = 20 * 1024 * 1024; /* 20MB */ - const fSize = await fopSize(f.fileOrPath); - const gSize = await fopSize(g.fileOrPath); + const fSize = await uploadItemSize(f.uploadItem); + const gSize = await uploadItemSize(g.uploadItem); if (fSize > maxAssetSize || gSize > maxAssetSize) { log.info( - `Not classifying assets with too large sizes ${[fSize, gSize]} as a live photo`, + `Not classifying files with too large sizes (${fSize} and ${gSize} bytes) as a live photo`, ); return false; } @@ -998,3 +986,15 @@ const removePotentialLivePhotoSuffix = (name: string, suffix?: string) => { return foundSuffix ? name.slice(0, foundSuffix.length * -1) : name; }; + +/** + * Return the size of the given {@link uploadItem}. + */ +const uploadItemSize = async (uploadItem: UploadItem): Promise => { + if (uploadItem instanceof File) return uploadItem.size; + if (typeof uploadItem == "string") + return ensureElectron().pathOrZipItemSize(uploadItem); + if (Array.isArray(uploadItem)) + return ensureElectron().pathOrZipItemSize(uploadItem); + return uploadItem.file.size; +}; diff --git a/web/apps/photos/src/services/upload/uploadService.ts b/web/apps/photos/src/services/upload/uploadService.ts index 1848a2b1e..52f495785 100644 --- a/web/apps/photos/src/services/upload/uploadService.ts +++ b/web/apps/photos/src/services/upload/uploadService.ts @@ -1,3 +1,4 @@ +import { hasFileHash } from "@/media/file"; import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type"; import { encodeLivePhoto } from "@/media/live-photo"; import type { Metadata } from "@/media/types/file"; @@ -6,19 +7,12 @@ import { basename } from "@/next/file"; import log from "@/next/log"; import { CustomErrorMessage } from "@/next/types/ipc"; import { ensure } from "@/utils/ensure"; +import { ENCRYPTION_CHUNK_SIZE } from "@ente/shared/crypto/constants"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; -import { - B64EncryptionResult, - EncryptionResult, - LocalFileAttributes, -} from "@ente/shared/crypto/types"; +import { B64EncryptionResult } from "@ente/shared/crypto/types"; import { CustomError, handleUploadError } from "@ente/shared/error"; -import { isDataStream, type DataStream } from "@ente/shared/utils/data-stream"; import { Remote } from "comlink"; import { - FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART, - FILE_READER_CHUNK_SIZE, - MULTIPART_PART_SIZE, NULL_LOCATION, RANDOM_PERCENTAGE_PROGRESS_FOR_PUT, UPLOAD_RESULT, @@ -45,10 +39,8 @@ import { updateMagicMetadata, } from "utils/magicMetadata"; import { readStream } from "utils/native-stream"; -import { hasFileHash } from "utils/upload"; import * as convert from "xml-js"; import { detectFileTypeInfoFromChunk } from "../detect-type"; -import { getFileStream } from "../readerService"; import { tryParseEpochMicrosecondsFromFileName } from "./date"; import publicUploadHttpClient from "./publicUploadHttpClient"; import type { ParsedMetadataJSON } from "./takeout"; @@ -58,8 +50,54 @@ import { generateThumbnailNative, generateThumbnailWeb, } from "./thumbnail"; +import type { UploadItem } from "./types"; import UploadHttpClient from "./uploadHttpClient"; -import type { UploadableFile } from "./uploadManager"; +import type { UploadableUploadItem } from "./uploadManager"; + +/** + * A readable stream for a file, and its associated size and last modified time. + * + * This is the in-memory representation of the {@link UploadItem} type that we + * usually pass around. See: [Note: Reading a UploadItem] + */ +interface FileStream { + /** + * A stream of the file's contents + * + * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks + * (except the last chunk which can be smaller since a file would rarely + * align exactly to a ENCRYPTION_CHUNK_SIZE multiple). + * + * Note: A stream can only be read once! + */ + stream: ReadableStream; + /** + * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE + * sized (except the last one). + */ + chunkCount: number; + /** + * The size in bytes of the underlying file. + */ + fileSize: number; + /** + * The modification time of the file, in epoch milliseconds. + */ + lastModifiedMs: number; + /** + * Set to the underlying {@link File} when we also have access to it. + */ + file?: File; +} + +/** + * If the stream we have is more than 5 ENCRYPTION_CHUNK_SIZE chunks, then use + * multipart uploads for it, with each multipart-part containing 5 chunks. + * + * ENCRYPTION_CHUNK_SIZE is 4 MB, and the number of chunks in a single upload + * part is 5, so each part is (up to) 20 MB. + */ +const multipartChunksPerPart = 5; /** Upload files to cloud storage */ class UploadService { @@ -144,35 +182,25 @@ const uploadService = new UploadService(); export default uploadService; /** - * Return the file name for the given {@link fileOrPath}. - * - * @param fileOrPath The {@link File}, or the path to it. Note that it is only - * valid to specify a path if we are running in the context of our desktop app. + * Return the file name for the given {@link uploadItem}. */ -export const fopFileName = (fileOrPath: File | string) => - typeof fileOrPath == "string" ? basename(fileOrPath) : fileOrPath.name; - -/** - * Return the size of the given {@link fileOrPath}. - * - * @param fileOrPath The {@link File}, or the path to it. Note that it is only - * valid to specify a path if we are running in the context of our desktop app. - */ -export const fopSize = async (fileOrPath: File | string): Promise => - fileOrPath instanceof File - ? fileOrPath.size - : await ensureElectron().fs.size(fileOrPath); +export const uploadItemFileName = (uploadItem: UploadItem) => { + if (uploadItem instanceof File) return uploadItem.name; + if (typeof uploadItem == "string") return basename(uploadItem); + if (Array.isArray(uploadItem)) return basename(uploadItem[1]); + return uploadItem.file.name; +}; /* -- Various intermediate type used during upload -- */ -interface UploadAsset2 { +interface UploadAsset { isLivePhoto?: boolean; - fileOrPath?: File | string; + uploadItem?: UploadItem; livePhotoAssets?: LivePhotoAssets; } -interface FileInMemory { - filedata: Uint8Array | DataStream; +interface ThumbnailedFile { + fileStreamOrData: FileStream | Uint8Array; /** The JPEG data of the generated thumbnail */ thumbnail: Uint8Array; /** @@ -182,7 +210,7 @@ interface FileInMemory { hasStaticThumbnail: boolean; } -interface FileWithMetadata extends Omit { +interface FileWithMetadata extends Omit { metadata: Metadata; localID: number; pubMagicMetadata: FilePublicMagicMetadata; @@ -193,8 +221,38 @@ interface EncryptedFile { fileKey: B64EncryptionResult; } +interface EncryptedFileStream { + /** + * A stream of the file's encrypted contents + * + * This stream is guaranteed to emit data in ENCRYPTION_CHUNK_SIZE chunks + * (except the last chunk which can be smaller since a file would rarely + * align exactly to a ENCRYPTION_CHUNK_SIZE multiple). + */ + stream: ReadableStream; + /** + * Number of chunks {@link stream} will emit, each ENCRYPTION_CHUNK_SIZE + * sized (except the last one). + */ + chunkCount: number; +} + +interface LocalFileAttributes< + T extends string | Uint8Array | EncryptedFileStream, +> { + encryptedData: T; + decryptionHeader: string; +} + +interface EncryptionResult< + T extends string | Uint8Array | EncryptedFileStream, +> { + file: LocalFileAttributes; + key: string; +} + interface ProcessedFile { - file: LocalFileAttributes; + file: LocalFileAttributes; thumbnail: LocalFileAttributes; metadata: LocalFileAttributes; pubMagicMetadata: EncryptedMagicMetadata; @@ -244,14 +302,14 @@ interface UploadResponse { } /** - * Upload the given {@link UploadableFile} + * Upload the given {@link UploadableUploadItem} * * This is lower layer implementation of the upload. It is invoked by * {@link UploadManager} after it has assembled all the relevant bits we need to * go forth and upload. */ export const uploader = async ( - { collection, localID, fileName, ...uploadAsset }: UploadableFile, + { collection, localID, fileName, ...uploadAsset }: UploadableUploadItem, uploaderName: string, existingFiles: EnteFile[], parsedMetadataJSONMap: Map, @@ -325,10 +383,8 @@ export const uploader = async ( abortIfCancelled(); - const { filedata, thumbnail, hasStaticThumbnail } = await readAsset( - fileTypeInfo, - uploadAsset, - ); + const { fileStreamOrData, thumbnail, hasStaticThumbnail } = + await readAsset(fileTypeInfo, uploadAsset); if (hasStaticThumbnail) metadata.hasStaticThumbnail = true; @@ -341,7 +397,7 @@ export const uploader = async ( const fileWithMetadata: FileWithMetadata = { localID, - filedata, + fileStreamOrData, thumbnail, metadata, pubMagicMetadata, @@ -401,15 +457,25 @@ export const uploader = async ( }; /** - * Read the given file or path into an in-memory representation. + * Read the given file or path or zip item into an in-memory representation. * - * See: [Note: Reading a fileOrPath] + * [Note: Reading a UploadItem] * * The file can be either a web - * [File](https://developer.mozilla.org/en-US/docs/Web/API/File) or the absolute - * path to a file on desk. When and why, read on. + * [File](https://developer.mozilla.org/en-US/docs/Web/API/File), the absolute + * path to a file on desk, a combination of these two, or a entry in a zip file + * on the user's local file system. * - * This code gets invoked in two contexts: + * tl;dr; There are four cases: + * + * 1. web / File + * 2. desktop / File (+ path) + * 3. desktop / path + * 4. desktop / ZipItem + * + * For the when and why, read on. + * + * The code that accesses files (e.g. uplaads) gets invoked in two contexts: * * 1. web: the normal mode, when we're running in as a web app in the browser. * @@ -417,90 +483,110 @@ export const uploader = async ( * * In the web context, we'll always get a File, since within the browser we * cannot programmatically construct paths to or arbitrarily access files on the - * user's filesystem. Note that even if we were to have an absolute path at - * hand, we cannot programmatically create such File objects to arbitrary - * absolute paths on user's local filesystem for security reasons. + * user's file system. + * + * > Note that even if we were to somehow have an absolute path at hand, we + * cannot programmatically create such File objects to arbitrary absolute + * paths on user's local file system for security reasons. * * So in the web context, this will always be a File we get as a result of an - * explicit user interaction (e.g. drag and drop). + * explicit user interaction (e.g. drag and drop or using a file selector). * - * In the desktop context, this can be either a File or a path. + * In the desktop context, this can be either a File (+ path), or a path, or an + * entry within a zip file. * - * 1. If the user provided us this file via some user interaction (say a drag - * and a drop), this'll still be a File. + * 2. If the user provided us this file via some user interaction (say a drag + * and a drop), this'll still be a File. But unlike in the web context, we + * also have access to the full path of this file. * - * 2. However, when running in the desktop app we have the ability to access - * absolute paths on the user's file system. For example, if the user asks us - * to watch certain folders on their disk for changes, we'll be able to pick - * up new images being added, and in such cases, the parameter here will be a - * path. Another example is when resuming an previously interrupted upload - - * we'll only have the path at hand in such cases, not the File object. + * 3. In addition, when running in the desktop app we have the ability to + * initate programmatic access absolute paths on the user's file system. For + * example, if the user asks us to watch certain folders on their disk for + * changes, we'll be able to pick up new images being added, and in such + * cases, the parameter here will be a path. Another example is when resuming + * an previously interrupted upload - we'll only have the path at hand in + * such cases, not the original File object since the app subsequently + * restarted. * - * The advantage of the File object is that the browser has already read it into - * memory for us. The disadvantage comes in the case where we need to - * communicate with the native Node.js layer of our desktop app. Since this - * communication happens over IPC, the File's contents need to be serialized and - * copied, which is a bummer for large videos etc. + * 4. The user might've also initiated an upload of a zip file (or we might be + * resuming one). In such cases we will get a tuple (path to the zip file on + * the local file system, and the name of the entry within that zip file). * - * So when we do have a path, we first try to see if we can perform IPC using - * the path itself (e.g. when generating thumbnails). Eventually, we'll need to - * read the file once when we need to encrypt and upload it, but if we're smart - * we can do all the rest of the IPC operations using the path itself, and for - * the read during upload using a streaming IPC mechanism. + * Case 3 and 4, when we're provided a path, are simple. We don't have a choice, + * since we cannot still programmatically construct a File object (we can + * construct it on the Node.js layer, but it can't then be transferred over the + * IPC boundary). So all our operations use the path itself. + * + * Case 2 involves a choice on a use-case basis. Neither File nor the path is a + * better choice for all use cases. + * + * > The advantage of the File object is that the browser has already read it + * into memory for us. The disadvantage comes in the case where we need to + * communicate with the native Node.js layer of our desktop app. Since this + * communication happens over IPC, the File's contents need to be serialized + * and copied, which is a bummer for large videos etc. */ -const readFileOrPath = async ( - fileOrPath: File | string, -): Promise<{ - dataOrStream: Uint8Array | DataStream; - fileSize: number; - lastModifiedMs: number; -}> => { - let dataOrStream: Uint8Array | DataStream; +const readUploadItem = async (uploadItem: UploadItem): Promise => { + let underlyingStream: ReadableStream; + let file: File | undefined; let fileSize: number; let lastModifiedMs: number; - if (fileOrPath instanceof File) { - const file = fileOrPath; - fileSize = file.size; - lastModifiedMs = file.lastModified; - dataOrStream = - fileSize > MULTIPART_PART_SIZE - ? getFileStream(file, FILE_READER_CHUNK_SIZE) - : new Uint8Array(await file.arrayBuffer()); - } else { - const path = fileOrPath; + if (typeof uploadItem == "string" || Array.isArray(uploadItem)) { const { response, size, lastModifiedMs: lm, - } = await readStream(ensureElectron(), path); + } = await readStream(ensureElectron(), uploadItem); + underlyingStream = response.body; fileSize = size; lastModifiedMs = lm; - if (size > MULTIPART_PART_SIZE) { - const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE); - dataOrStream = { stream: response.body, chunkCount }; - } else { - dataOrStream = new Uint8Array(await response.arrayBuffer()); - } - } - - return { dataOrStream, fileSize, lastModifiedMs }; -}; - -/** A variant of {@readFileOrPath} that always returns an {@link DataStream}. */ -const readFileOrPathStream = async ( - fileOrPath: File | string, -): Promise => { - if (fileOrPath instanceof File) { - return getFileStream(fileOrPath, FILE_READER_CHUNK_SIZE); } else { - const { response, size } = await readStream( - ensureElectron(), - fileOrPath, - ); - const chunkCount = Math.ceil(size / FILE_READER_CHUNK_SIZE); - return { stream: response.body, chunkCount }; + if (uploadItem instanceof File) { + file = uploadItem; + } else { + file = uploadItem.file; + } + underlyingStream = file.stream(); + fileSize = file.size; + lastModifiedMs = file.lastModified; } + + const N = ENCRYPTION_CHUNK_SIZE; + const chunkCount = Math.ceil(fileSize / ENCRYPTION_CHUNK_SIZE); + + // Pipe the underlying stream through a transformer that emits + // ENCRYPTION_CHUNK_SIZE-ed chunks (except the last one, which can be + // smaller). + let pending: Uint8Array | undefined; + const transformer = new TransformStream({ + async transform( + chunk: Uint8Array, + controller: TransformStreamDefaultController, + ) { + let next: Uint8Array; + if (pending) { + next = new Uint8Array(pending.length + chunk.length); + next.set(pending); + next.set(chunk, pending.length); + pending = undefined; + } else { + next = chunk; + } + while (next.length >= N) { + controller.enqueue(next.slice(0, N)); + next = next.slice(N); + } + if (next.length) pending = next; + }, + flush(controller: TransformStreamDefaultController) { + if (pending) controller.enqueue(pending); + }, + }); + + const stream = underlyingStream.pipeThrough(transformer); + + return { stream, chunkCount, fileSize, lastModifiedMs, file }; }; interface ReadAssetDetailsResult { @@ -510,17 +596,17 @@ interface ReadAssetDetailsResult { } /** - * Read the file(s) to determine the type, size and last modified time of the - * given {@link asset}. + * Read the associated file(s) to determine the type, size and last modified + * time of the given {@link asset}. */ const readAssetDetails = async ({ isLivePhoto, livePhotoAssets, - fileOrPath, -}: UploadAsset2): Promise => + uploadItem, +}: UploadAsset): Promise => isLivePhoto ? readLivePhotoDetails(livePhotoAssets) - : readImageOrVideoDetails(fileOrPath); + : readImageOrVideoDetails(uploadItem); const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => { const img = await readImageOrVideoDetails(image); @@ -546,22 +632,18 @@ const readLivePhotoDetails = async ({ image, video }: LivePhotoAssets) => { * While we're at it, also return the size of the file, and its last modified * time (expressed as epoch milliseconds). * - * @param fileOrPath See: [Note: Reading a fileOrPath] + * @param uploadItem See: [Note: Reading a UploadItem] */ -const readImageOrVideoDetails = async (fileOrPath: File | string) => { - const { dataOrStream, fileSize, lastModifiedMs } = - await readFileOrPath(fileOrPath); +const readImageOrVideoDetails = async (uploadItem: UploadItem) => { + const { stream, fileSize, lastModifiedMs } = + await readUploadItem(uploadItem); const fileTypeInfo = await detectFileTypeInfoFromChunk(async () => { - if (dataOrStream instanceof Uint8Array) { - return dataOrStream; - } else { - const reader = dataOrStream.stream.getReader(); - const chunk = ensure((await reader.read()).value); - await reader.cancel(); - return chunk; - } - }, fopFileName(fileOrPath)); + const reader = stream.getReader(); + const chunk = ensure((await reader.read()).value); + await reader.cancel(); + return chunk; + }, uploadItemFileName(uploadItem)); return { fileTypeInfo, fileSize, lastModifiedMs }; }; @@ -587,7 +669,7 @@ interface ExtractAssetMetadataResult { * {@link parsedMetadataJSONMap} for the assets. Return the resultant metadatum. */ const extractAssetMetadata = async ( - { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2, + { isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, collectionID: number, @@ -604,7 +686,7 @@ const extractAssetMetadata = async ( worker, ) : await extractImageOrVideoMetadata( - fileOrPath, + uploadItem, fileTypeInfo, lastModifiedMs, collectionID, @@ -639,7 +721,7 @@ const extractLivePhotoMetadata = async ( return { metadata: { ...imageMetadata, - title: fopFileName(livePhotoAssets.image), + title: uploadItemFileName(livePhotoAssets.image), fileType: FILE_TYPE.LIVE_PHOTO, imageHash: imageMetadata.hash, videoHash: videoHash, @@ -650,33 +732,33 @@ const extractLivePhotoMetadata = async ( }; const extractImageOrVideoMetadata = async ( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, collectionID: number, parsedMetadataJSONMap: Map, worker: Remote, ) => { - const fileName = fopFileName(fileOrPath); + const fileName = uploadItemFileName(uploadItem); const { fileType } = fileTypeInfo; let extractedMetadata: ParsedExtractedMetadata; if (fileType === FILE_TYPE.IMAGE) { extractedMetadata = (await tryExtractImageMetadata( - fileOrPath, + uploadItem, fileTypeInfo, lastModifiedMs, )) ?? NULL_EXTRACTED_METADATA; } else if (fileType === FILE_TYPE.VIDEO) { extractedMetadata = - (await tryExtractVideoMetadata(fileOrPath)) ?? + (await tryExtractVideoMetadata(uploadItem)) ?? NULL_EXTRACTED_METADATA; } else { - throw new Error(`Unexpected file type ${fileType} for ${fileOrPath}`); + throw new Error(`Unexpected file type ${fileType} for ${uploadItem}`); } - const hash = await computeHash(fileOrPath, worker); + const hash = await computeHash(uploadItem, worker); const modificationTime = lastModifiedMs * 1000; const creationTime = @@ -720,46 +802,48 @@ const NULL_EXTRACTED_METADATA: ParsedExtractedMetadata = { }; async function tryExtractImageMetadata( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, lastModifiedMs: number, ): Promise { let file: File; - if (fileOrPath instanceof File) { - file = fileOrPath; - } else { - const path = fileOrPath; + if (typeof uploadItem == "string" || Array.isArray(uploadItem)) { // The library we use for extracting EXIF from images, exifr, doesn't // support streams. But unlike videos, for images it is reasonable to // read the entire stream into memory here. - const { response } = await readStream(ensureElectron(), path); + const { response } = await readStream(ensureElectron(), uploadItem); + const path = typeof uploadItem == "string" ? uploadItem : uploadItem[1]; file = new File([await response.arrayBuffer()], basename(path), { lastModified: lastModifiedMs, }); + } else if (uploadItem instanceof File) { + file = uploadItem; + } else { + file = uploadItem.file; } try { return await parseImageMetadata(file, fileTypeInfo); } catch (e) { - log.error(`Failed to extract image metadata for ${fileOrPath}`, e); + log.error(`Failed to extract image metadata for ${uploadItem}`, e); return undefined; } } -const tryExtractVideoMetadata = async (fileOrPath: File | string) => { +const tryExtractVideoMetadata = async (uploadItem: UploadItem) => { try { - return await ffmpeg.extractVideoMetadata(fileOrPath); + return await ffmpeg.extractVideoMetadata(uploadItem); } catch (e) { - log.error(`Failed to extract video metadata for ${fileOrPath}`, e); + log.error(`Failed to extract video metadata for ${uploadItem}`, e); return undefined; } }; const computeHash = async ( - fileOrPath: File | string, + uploadItem: UploadItem, worker: Remote, ) => { - const { stream, chunkCount } = await readFileOrPathStream(fileOrPath); + const { stream, chunkCount } = await readUploadItem(uploadItem); const hashState = await worker.initChunkHashing(); const streamReader = stream.getReader(); @@ -828,19 +912,18 @@ const areFilesSameNoHash = (f: Metadata, g: Metadata) => { const readAsset = async ( fileTypeInfo: FileTypeInfo, - { isLivePhoto, fileOrPath, livePhotoAssets }: UploadAsset2, -) => + { isLivePhoto, uploadItem, livePhotoAssets }: UploadAsset, +): Promise => isLivePhoto ? await readLivePhoto(livePhotoAssets, fileTypeInfo) - : await readImageOrVideo(fileOrPath, fileTypeInfo); + : await readImageOrVideo(uploadItem, fileTypeInfo); const readLivePhoto = async ( livePhotoAssets: LivePhotoAssets, fileTypeInfo: FileTypeInfo, ) => { - const readImage = await readFileOrPath(livePhotoAssets.image); const { - filedata: imageDataOrStream, + fileStreamOrData: imageFileStreamOrData, thumbnail, hasStaticThumbnail, } = await withThumbnail( @@ -849,28 +932,29 @@ const readLivePhoto = async ( extension: fileTypeInfo.imageType, fileType: FILE_TYPE.IMAGE, }, - readImage.dataOrStream, - readImage.fileSize, + await readUploadItem(livePhotoAssets.image), ); - const readVideo = await readFileOrPath(livePhotoAssets.video); + const videoFileStreamOrData = await readUploadItem(livePhotoAssets.video); - // We can revisit this later, but the existing code always read the entire - // file into memory here, and to avoid changing the rest of the scaffolding - // retain the same behaviour. + // The JS zip library that encodeLivePhoto uses does not support + // ReadableStreams, so pass the file (blob) if we have one, otherwise read + // the entire stream into memory and pass the resultant data. // - // This is a reasonable assumption too, since the videos corresponding to - // live photos are only a couple of seconds long. - const toData = async (dataOrStream: Uint8Array | DataStream) => - dataOrStream instanceof Uint8Array - ? dataOrStream - : await readEntireStream(dataOrStream.stream); + // This is a reasonable behaviour since the videos corresponding to live + // photos are only a couple of seconds long (we've already done a pre-flight + // check during areLivePhotoAssets to ensure their size is small). + const fileOrData = async (sd: FileStream | Uint8Array) => { + const fos = async ({ file, stream }: FileStream) => + file ? file : await readEntireStream(stream); + return sd instanceof Uint8Array ? sd : fos(sd); + }; return { - filedata: await encodeLivePhoto({ - imageFileName: fopFileName(livePhotoAssets.image), - imageData: await toData(imageDataOrStream), - videoFileName: fopFileName(livePhotoAssets.video), - videoData: await toData(readVideo.dataOrStream), + fileStreamOrData: await encodeLivePhoto({ + imageFileName: uploadItemFileName(livePhotoAssets.image), + imageFileOrData: await fileOrData(imageFileStreamOrData), + videoFileName: uploadItemFileName(livePhotoAssets.video), + videoFileOrData: await fileOrData(videoFileStreamOrData), }), thumbnail, hasStaticThumbnail, @@ -878,11 +962,11 @@ const readLivePhoto = async ( }; const readImageOrVideo = async ( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, ) => { - const { dataOrStream, fileSize } = await readFileOrPath(fileOrPath); - return withThumbnail(fileOrPath, fileTypeInfo, dataOrStream, fileSize); + const fileStream = await readUploadItem(uploadItem); + return withThumbnail(uploadItem, fileTypeInfo, fileStream); }; // TODO(MR): Merge with the uploader @@ -905,18 +989,21 @@ const moduleState = new ModuleState(); /** * Augment the given {@link dataOrStream} with thumbnail information. * - * This is a companion method for {@link readFileOrPath}, and can be used to - * convert the result of {@link readFileOrPath} into an {@link FileInMemory}. + * This is a companion method for {@link readUploadItem}, and can be used to + * convert the result of {@link readUploadItem} into an {@link ThumbnailedFile}. * - * Note: The returned dataOrStream might be different from the one that we - * provide to it. + * @param uploadItem The {@link UploadItem} where the given {@link fileStream} + * came from. + * + * Note: The `fileStream` in the returned {@link ThumbnailedFile} may be + * different from the one passed to the function. */ const withThumbnail = async ( - fileOrPath: File | string, + uploadItem: UploadItem, fileTypeInfo: FileTypeInfo, - dataOrStream: Uint8Array | DataStream, - fileSize: number, -): Promise => { + fileStream: FileStream, +): Promise => { + let fileData: Uint8Array | undefined; let thumbnail: Uint8Array | undefined; let hasStaticThumbnail = false; @@ -925,32 +1012,16 @@ const withThumbnail = async ( fileTypeInfo.fileType == FILE_TYPE.IMAGE && moduleState.isNativeImageThumbnailGenerationNotAvailable; - // 1. Native thumbnail generation. - if (electron && !notAvailable) { + // 1. Native thumbnail generation using items's (effective) path. + if (electron && !notAvailable && !(uploadItem instanceof File)) { try { - if (fileOrPath instanceof File) { - if (dataOrStream instanceof Uint8Array) { - thumbnail = await generateThumbnailNative( - electron, - dataOrStream, - fileTypeInfo, - ); - } else { - // This was large enough to need streaming, and trying to - // read it into memory or copying over IPC might cause us to - // run out of memory. So skip the native generation for it, - // instead let it get processed by the browser based - // thumbnailer (case 2). - } - } else { - thumbnail = await generateThumbnailNative( - electron, - fileOrPath, - fileTypeInfo, - ); - } + thumbnail = await generateThumbnailNative( + electron, + uploadItem, + fileTypeInfo, + ); } catch (e) { - if (e.message == CustomErrorMessage.NotAvailable) { + if (e.message.endsWith(CustomErrorMessage.NotAvailable)) { moduleState.isNativeImageThumbnailGenerationNotAvailable = true; } else { log.error("Native thumbnail generation failed", e); @@ -960,39 +1031,50 @@ const withThumbnail = async ( if (!thumbnail) { let blob: Blob | undefined; - if (fileOrPath instanceof File) { - // 2. Browser based thumbnail generation for `File`s. - blob = fileOrPath; + if (uploadItem instanceof File) { + // 2. Browser based thumbnail generation for File (blobs). + blob = uploadItem; } else { // 3. Browser based thumbnail generation for paths. - if (dataOrStream instanceof Uint8Array) { - blob = new Blob([dataOrStream]); + // + // There are two reasons why we could get here: + // + // - We're running under Electron, but thumbnail generation is not + // available. This is currently only a specific scenario for image + // files on Windows. + // + // - We're running under the Electron, but the thumbnail generation + // otherwise failed for some exception. + // + // The fallback in this case involves reading the entire stream into + // memory, and passing that data across the IPC boundary in a single + // go (i.e. not in a streaming manner). This is risky for videos of + // unbounded sizes, plus we shouldn't even be getting here unless + // something went wrong. + // + // So instead of trying to cater for arbitrary exceptions, we only + // run this fallback to cover for the case where thumbnail + // generation was not available for an image file on Windows. + // If/when we add support of native thumbnailing on Windows too, + // this entire branch can be removed. + + if (fileTypeInfo.fileType == FILE_TYPE.IMAGE) { + const data = await readEntireStream(fileStream.stream); + blob = new Blob([data]); + + // The Readable stream cannot be read twice, so use the data + // directly for subsequent steps. + fileData = data; } else { - // Read the stream into memory. Don't try this fallback for huge - // files though lest we run out of memory. - if (fileSize < 100 * 1024 * 1024 /* 100 MB */) { - const data = await readEntireStream(dataOrStream.stream); - // The Readable stream cannot be read twice, so also - // overwrite the stream with the data we read. - dataOrStream = data; - blob = new Blob([data]); - } else { - // There isn't a normal scenario where this should happen. - // Case 1, should've already worked, and the only known - // reason it'd have been skipped is for image files on - // Windows, but those should be less than 100 MB. - // - // So don't risk running out of memory for a case we don't - // comprehend. - log.error( - `Not using browser based thumbnail generation fallback for large file at path ${fileOrPath}`, - ); - } + log.warn( + `Not using browser based thumbnail generation fallback for video at path ${uploadItem}`, + ); } } try { - thumbnail = await generateThumbnailWeb(blob, fileTypeInfo); + if (blob) + thumbnail = await generateThumbnailWeb(blob, fileTypeInfo); } catch (e) { log.error("Web thumbnail creation failed", e); } @@ -1004,7 +1086,7 @@ const withThumbnail = async ( } return { - filedata: dataOrStream, + fileStreamOrData: fileData ?? fileStream, thumbnail, hasStaticThumbnail, }; @@ -1029,7 +1111,7 @@ const encryptFile = async ( worker: Remote, ): Promise => { const { key: fileKey, file: encryptedFiledata } = await encryptFiledata( - file.filedata, + file.fileStreamOrData, worker, ); @@ -1071,15 +1153,15 @@ const encryptFile = async ( }; const encryptFiledata = async ( - filedata: Uint8Array | DataStream, + fileStreamOrData: FileStream | Uint8Array, worker: Remote, -): Promise> => - isDataStream(filedata) - ? await encryptFileStream(filedata, worker) - : await worker.encryptFile(filedata); +): Promise> => + fileStreamOrData instanceof Uint8Array + ? await worker.encryptFile(fileStreamOrData) + : await encryptFileStream(fileStreamOrData, worker); const encryptFileStream = async ( - fileData: DataStream, + fileData: FileStream, worker: Remote, ) => { const { stream, chunkCount } = fileData; @@ -1120,27 +1202,38 @@ const uploadToBucket = async ( try { let fileObjectKey: string = null; - if (isDataStream(file.file.encryptedData)) { + const encryptedData = file.file.encryptedData; + if ( + !(encryptedData instanceof Uint8Array) && + encryptedData.chunkCount >= multipartChunksPerPart + ) { + // We have a stream, and it is more than multipartChunksPerPart + // chunks long, so use a multipart upload to upload it. fileObjectKey = await uploadStreamUsingMultipart( file.localID, - file.file.encryptedData, + encryptedData, makeProgessTracker, isCFUploadProxyDisabled, abortIfCancelled, ); } else { + const data = + encryptedData instanceof Uint8Array + ? encryptedData + : await readEntireStream(encryptedData.stream); + const progressTracker = makeProgessTracker(file.localID); const fileUploadURL = await uploadService.getUploadURL(); if (!isCFUploadProxyDisabled) { fileObjectKey = await UploadHttpClient.putFileV2( fileUploadURL, - file.file.encryptedData as Uint8Array, + data, progressTracker, ); } else { fileObjectKey = await UploadHttpClient.putFile( fileUploadURL, - file.file.encryptedData as Uint8Array, + data, progressTracker, ); } @@ -1189,13 +1282,13 @@ interface PartEtag { async function uploadStreamUsingMultipart( fileLocalID: number, - dataStream: DataStream, + dataStream: EncryptedFileStream, makeProgessTracker: MakeProgressTracker, isCFUploadProxyDisabled: boolean, abortIfCancelled: () => void, ) { const uploadPartCount = Math.ceil( - dataStream.chunkCount / FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART, + dataStream.chunkCount / multipartChunksPerPart, ); const multipartUploadURLs = await uploadService.fetchMultipartUploadURLs(uploadPartCount); @@ -1255,7 +1348,7 @@ async function combineChunksToFormUploadPart( streamReader: ReadableStreamDefaultReader, ) { const combinedChunks = []; - for (let i = 0; i < FILE_CHUNKS_COMBINED_FOR_A_UPLOAD_PART; i++) { + for (let i = 0; i < multipartChunksPerPart; i++) { const { done, value: chunk } = await streamReader.read(); if (done) { break; diff --git a/web/apps/photos/src/services/watch.ts b/web/apps/photos/src/services/watch.ts index 1f60836c5..82d3b2f4e 100644 --- a/web/apps/photos/src/services/watch.ts +++ b/web/apps/photos/src/services/watch.ts @@ -15,12 +15,11 @@ import { ensureString } from "@/utils/ensure"; import { UPLOAD_RESULT } from "constants/upload"; import debounce from "debounce"; import uploadManager, { - type FileWithCollection, + type UploadItemWithCollection, } from "services/upload/uploadManager"; import { Collection } from "types/collection"; import { EncryptedEnteFile } from "types/file"; import { groupFilesBasedOnCollectionID } from "utils/file"; -import { isHiddenFile } from "utils/upload"; import { removeFromCollection } from "./collectionService"; import { getLocalFiles } from "./fileService"; @@ -318,16 +317,17 @@ class FolderWatcher { } /** - * Callback invoked by the uploader whenever a file we requested to + * Callback invoked by the uploader whenever a item we requested to * {@link upload} gets uploaded. */ async onFileUpload( fileUploadResult: UPLOAD_RESULT, - fileWithCollection: FileWithCollection, + item: UploadItemWithCollection, file: EncryptedEnteFile, ) { - // The files we get here will have fileWithCollection.file as a string, - // not as a File or a ElectronFile + // Re the usage of ensureString: For desktop watch, the only possibility + // for a UploadItem is for it to be a string (the absolute path to a + // file on disk). if ( [ UPLOAD_RESULT.ADDED_SYMLINK, @@ -336,18 +336,18 @@ class FolderWatcher { UPLOAD_RESULT.ALREADY_UPLOADED, ].includes(fileUploadResult) ) { - if (fileWithCollection.isLivePhoto) { + if (item.isLivePhoto) { this.uploadedFileForPath.set( - ensureString(fileWithCollection.livePhotoAssets.image), + ensureString(item.livePhotoAssets.image), file, ); this.uploadedFileForPath.set( - ensureString(fileWithCollection.livePhotoAssets.video), + ensureString(item.livePhotoAssets.video), file, ); } else { this.uploadedFileForPath.set( - ensureString(fileWithCollection.fileOrPath), + ensureString(item.uploadItem), file, ); } @@ -356,17 +356,15 @@ class FolderWatcher { fileUploadResult, ) ) { - if (fileWithCollection.isLivePhoto) { + if (item.isLivePhoto) { this.unUploadableFilePaths.add( - ensureString(fileWithCollection.livePhotoAssets.image), + ensureString(item.livePhotoAssets.image), ); this.unUploadableFilePaths.add( - ensureString(fileWithCollection.livePhotoAssets.video), + ensureString(item.livePhotoAssets.video), ); } else { - this.unUploadableFilePaths.add( - ensureString(fileWithCollection.fileOrPath), - ); + this.unUploadableFilePaths.add(ensureString(item.uploadItem)); } } } @@ -376,7 +374,7 @@ class FolderWatcher { * {@link upload} get uploaded. */ async allFileUploadsDone( - filesWithCollection: FileWithCollection[], + uploadItemsWithCollection: UploadItemWithCollection[], collections: Collection[], ) { const electron = ensureElectron(); @@ -385,14 +383,15 @@ class FolderWatcher { log.debug(() => JSON.stringify({ f: "watch/allFileUploadsDone", - filesWithCollection, + uploadItemsWithCollection, collections, watch, }), ); - const { syncedFiles, ignoredFiles } = - this.deduceSyncedAndIgnored(filesWithCollection); + const { syncedFiles, ignoredFiles } = this.deduceSyncedAndIgnored( + uploadItemsWithCollection, + ); if (syncedFiles.length > 0) await electron.watch.updateSyncedFiles( @@ -412,7 +411,9 @@ class FolderWatcher { this.debouncedRunNextEvent(); } - private deduceSyncedAndIgnored(filesWithCollection: FileWithCollection[]) { + private deduceSyncedAndIgnored( + uploadItemsWithCollection: UploadItemWithCollection[], + ) { const syncedFiles: FolderWatch["syncedFiles"] = []; const ignoredFiles: FolderWatch["ignoredFiles"] = []; @@ -431,14 +432,13 @@ class FolderWatcher { this.unUploadableFilePaths.delete(path); }; - for (const fileWithCollection of filesWithCollection) { - if (fileWithCollection.isLivePhoto) { - const imagePath = ensureString( - fileWithCollection.livePhotoAssets.image, - ); - const videoPath = ensureString( - fileWithCollection.livePhotoAssets.video, - ); + for (const item of uploadItemsWithCollection) { + // Re the usage of ensureString: For desktop watch, the only + // possibility for a UploadItem is for it to be a string (the + // absolute path to a file on disk). + if (item.isLivePhoto) { + const imagePath = ensureString(item.livePhotoAssets.image); + const videoPath = ensureString(item.livePhotoAssets.video); const imageFile = this.uploadedFileForPath.get(imagePath); const videoFile = this.uploadedFileForPath.get(videoPath); @@ -454,7 +454,7 @@ class FolderWatcher { markIgnored(videoPath); } } else { - const path = ensureString(fileWithCollection.fileOrPath); + const path = ensureString(item.uploadItem); const file = this.uploadedFileForPath.get(path); if (file) { markSynced(file, path); @@ -596,6 +596,13 @@ const pathsToUpload = (paths: string[], watch: FolderWatch) => // Files that are on disk but not yet synced or ignored. .filter((path) => !isSyncedOrIgnoredPath(path, watch)); +/** + * Return true if the file at the given {@link path} is hidden. + * + * Hidden files are those whose names begin with a "." (dot). + */ +const isHiddenFile = (path: string) => basename(path).startsWith("."); + /** * Return the paths to previously synced files that are no longer on disk and so * must be removed from the Ente collection. @@ -611,7 +618,7 @@ const isSyncedOrIgnoredPath = (path: string, watch: FolderWatch) => const collectionNameForPath = (path: string, watch: FolderWatch) => watch.collectionMapping == "root" - ? dirname(watch.folderPath) + ? basename(watch.folderPath) : parentDirectoryName(path); const parentDirectoryName = (path: string) => basename(dirname(path)); diff --git a/web/apps/photos/src/utils/file/index.ts b/web/apps/photos/src/utils/file/index.ts index 5d7762abf..212b2efd3 100644 --- a/web/apps/photos/src/utils/file/index.ts +++ b/web/apps/photos/src/utils/file/index.ts @@ -116,6 +116,19 @@ export async function getUpdatedEXIFFileForDownload( } } +export function convertBytesToHumanReadable( + bytes: number, + precision = 2, +): string { + if (bytes === 0 || isNaN(bytes)) { + return "0 MB"; + } + + const i = Math.floor(Math.log(bytes) / Math.log(1024)); + const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; + return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i]; +} + export async function downloadFile(file: EnteFile) { try { const fileReader = new FileReader(); @@ -288,7 +301,8 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => { const tempFile = new File([imageBlob], fileName); const fileTypeInfo = await detectFileTypeInfo(tempFile); log.debug( - () => `Need renderable image for ${JSON.stringify(fileTypeInfo)}`, + () => + `Need renderable image for ${JSON.stringify({ fileName, ...fileTypeInfo })}`, ); const { extension } = fileTypeInfo; @@ -305,7 +319,7 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => { try { return await nativeConvertToJPEG(imageBlob); } catch (e) { - if (e.message == CustomErrorMessage.NotAvailable) { + if (e.message.endsWith(CustomErrorMessage.NotAvailable)) { moduleState.isNativeJPEGConversionNotAvailable = true; } else { log.error("Native conversion to JPEG failed", e); diff --git a/web/apps/photos/src/utils/machineLearning/config.ts b/web/apps/photos/src/utils/machineLearning/config.ts index 4d2030ca3..30a65b8f1 100644 --- a/web/apps/photos/src/utils/machineLearning/config.ts +++ b/web/apps/photos/src/utils/machineLearning/config.ts @@ -10,6 +10,7 @@ import mlIDbStorage, { ML_SYNC_CONFIG_NAME, ML_SYNC_JOB_CONFIG_NAME, } from "utils/storage/mlIDbStorage"; +import { isInternalUser } from "utils/user"; export async function getMLSyncJobConfig() { return mlIDbStorage.getConfig( @@ -23,10 +24,15 @@ export async function getMLSyncConfig() { } export async function getMLSearchConfig() { - return mlIDbStorage.getConfig( - ML_SEARCH_CONFIG_NAME, - DEFAULT_ML_SEARCH_CONFIG, - ); + if (isInternalUser()) { + return mlIDbStorage.getConfig( + ML_SEARCH_CONFIG_NAME, + DEFAULT_ML_SEARCH_CONFIG, + ); + } + // Force disabled for everyone else while we finalize it to avoid redundant + // reindexing for users. + return DEFAULT_ML_SEARCH_CONFIG; } export async function updateMLSyncJobConfig(newConfig: JobConfig) { diff --git a/web/apps/photos/src/utils/native-fs.ts b/web/apps/photos/src/utils/native-fs.ts index 2ef896302..27ebdd1c1 100644 --- a/web/apps/photos/src/utils/native-fs.ts +++ b/web/apps/photos/src/utils/native-fs.ts @@ -1,5 +1,5 @@ /** - * @file Utilities for native filesystem access. + * @file Utilities for native file system access. * * While they don't have any direct dependencies to our desktop app, they were * written for use by the code that runs in our desktop app. diff --git a/web/apps/photos/src/utils/native-stream.ts b/web/apps/photos/src/utils/native-stream.ts index c1033545b..4ed9da753 100644 --- a/web/apps/photos/src/utils/native-stream.ts +++ b/web/apps/photos/src/utils/native-stream.ts @@ -2,12 +2,14 @@ * @file Streaming IPC communication with the Node.js layer of our desktop app. * * NOTE: These functions only work when we're running in our desktop app. + * + * See: [Note: IPC streams]. */ -import type { Electron } from "@/next/types/ipc"; +import type { Electron, ZipItem } from "@/next/types/ipc"; /** - * Stream the given file from the user's local filesystem. + * Stream the given file or zip entry from the user's local file system. * * This only works when we're running in our desktop app since it uses the * "stream://" protocol handler exposed by our custom code in the Node.js layer. @@ -16,8 +18,9 @@ import type { Electron } from "@/next/types/ipc"; * To avoid accidentally invoking it in a non-desktop app context, it requires * the {@link Electron} object as a parameter (even though it doesn't use it). * - * @param path The path on the file on the user's local filesystem whose - * contents we want to stream. + * @param pathOrZipItem Either the path on the file on the user's local file + * system whose contents we want to stream. Or a tuple containing the path to a + * zip file and the name of the entry within it. * * @return A ({@link Response}, size, lastModifiedMs) triple. * @@ -32,16 +35,24 @@ import type { Electron } from "@/next/types/ipc"; */ export const readStream = async ( _: Electron, - path: string, + pathOrZipItem: string | ZipItem, ): Promise<{ response: Response; size: number; lastModifiedMs: number }> => { - const req = new Request(`stream://read${path}`, { - method: "GET", - }); + let url: URL; + if (typeof pathOrZipItem == "string") { + const params = new URLSearchParams({ path: pathOrZipItem }); + url = new URL(`stream://read?${params.toString()}`); + } else { + const [zipPath, entryName] = pathOrZipItem; + const params = new URLSearchParams({ zipPath, entryName }); + url = new URL(`stream://read-zip?${params.toString()}`); + } + + const req = new Request(url, { method: "GET" }); const res = await fetch(req); if (!res.ok) throw new Error( - `Failed to read stream from ${path}: HTTP ${res.status}`, + `Failed to read stream from ${url}: HTTP ${res.status}`, ); const size = readNumericHeader(res, "Content-Length"); @@ -51,10 +62,11 @@ export const readStream = async ( }; const readNumericHeader = (res: Response, key: string) => { - const value = +res.headers[key]; + const valueText = res.headers.get(key); + const value = +valueText; if (isNaN(value)) throw new Error( - `Expected a numeric ${key} when reading a stream response: ${res}`, + `Expected a numeric ${key} when reading a stream response, instead got ${valueText}`, ); return value; }; @@ -78,26 +90,16 @@ export const writeStream = async ( path: string, stream: ReadableStream, ) => { - // TODO(MR): This doesn't currently work. - // - // Not sure what I'm doing wrong here; I've opened an issue upstream - // https://github.com/electron/electron/issues/41872 - // - // A gist with a minimal reproduction - // https://gist.github.com/mnvr/e08d9f4876fb8400b7615347b4d268eb - // - // Meanwhile, write the complete body in one go (this'll eventually run into - // memory failures with large files - just a temporary stopgap to get the - // code to work). + const params = new URLSearchParams({ path }); + const url = new URL(`stream://write?${params.toString()}`); - /* // The duplex parameter needs to be set to 'half' when streaming requests. // // Currently browsers, and specifically in our case, since this code runs // only within our desktop (Electron) app, Chromium, don't support 'full' // duplex mode (i.e. streaming both the request and the response). // https://developer.chrome.com/docs/capabilities/web-apis/fetch-streaming-requests - const req = new Request(`stream://write${path}`, { + const req = new Request(url, { // GET can't have a body method: "POST", body: stream, @@ -106,12 +108,6 @@ export const writeStream = async ( // https://github.com/node-fetch/node-fetch/issues/1769. duplex: "half", }); - */ - - const req = new Request(`stream://write${path}`, { - method: "POST", - body: await new Response(stream).blob(), - }); const res = await fetch(req); if (!res.ok) diff --git a/web/apps/photos/src/utils/storage/mlIDbStorage.ts b/web/apps/photos/src/utils/storage/mlIDbStorage.ts index 40e6dad66..766c3ac9a 100644 --- a/web/apps/photos/src/utils/storage/mlIDbStorage.ts +++ b/web/apps/photos/src/utils/storage/mlIDbStorage.ts @@ -144,7 +144,13 @@ class MLIDbStorage { .objectStore("configs") .add(DEFAULT_ML_SEARCH_CONFIG, ML_SEARCH_CONFIG_NAME); } + /* + This'll go in version 5. Note that version 4 was never released, + but it was in main for a while, so we'll just skip it to avoid + breaking the upgrade path for people who ran the mainline. + */ if (oldVersion < 4) { + /* try { await tx .objectStore("configs") @@ -163,8 +169,8 @@ class MLIDbStorage { // the shipped implementation should have a more // deterministic migration. } + */ } - log.info( `ML DB upgraded from version ${oldVersion} to version ${newVersion}`, ); diff --git a/web/apps/photos/src/utils/upload/index.ts b/web/apps/photos/src/utils/upload/index.ts deleted file mode 100644 index 7f81408d6..000000000 --- a/web/apps/photos/src/utils/upload/index.ts +++ /dev/null @@ -1,128 +0,0 @@ -import type { Metadata } from "@/media/types/file"; -import { basename, dirname } from "@/next/file"; -import { PICKED_UPLOAD_TYPE } from "constants/upload"; -import isElectron from "is-electron"; -import { exportMetadataDirectoryName } from "services/export"; -import { fopFileName } from "services/upload/uploadService"; - -export const hasFileHash = (file: Metadata) => - file.hash || (file.imageHash && file.videoHash); - -/** - * Return true if all the paths in the given list are items that belong to the - * same (arbitrary) directory. - * - * Empty list of paths is considered to be in the same directory. - */ -export const areAllInSameDirectory = (paths: string[]) => - new Set(paths.map(dirname)).size == 1; - -// This is used to prompt the user the make upload strategy choice -export interface ImportSuggestion { - rootFolderName: string; - hasNestedFolders: boolean; - hasRootLevelFileWithFolder: boolean; -} - -export const DEFAULT_IMPORT_SUGGESTION: ImportSuggestion = { - rootFolderName: "", - hasNestedFolders: false, - hasRootLevelFileWithFolder: false, -}; - -export function getImportSuggestion( - uploadType: PICKED_UPLOAD_TYPE, - paths: string[], -): ImportSuggestion { - if (isElectron() && uploadType === PICKED_UPLOAD_TYPE.FILES) { - return DEFAULT_IMPORT_SUGGESTION; - } - - const getCharCount = (str: string) => (str.match(/\//g) ?? []).length; - paths.sort((path1, path2) => getCharCount(path1) - getCharCount(path2)); - const firstPath = paths[0]; - const lastPath = paths[paths.length - 1]; - - const L = firstPath.length; - let i = 0; - const firstFileFolder = firstPath.substring(0, firstPath.lastIndexOf("/")); - const lastFileFolder = lastPath.substring(0, lastPath.lastIndexOf("/")); - - while (i < L && firstPath.charAt(i) === lastPath.charAt(i)) i++; - let commonPathPrefix = firstPath.substring(0, i); - - if (commonPathPrefix) { - commonPathPrefix = commonPathPrefix.substring( - 0, - commonPathPrefix.lastIndexOf("/"), - ); - if (commonPathPrefix) { - commonPathPrefix = commonPathPrefix.substring( - commonPathPrefix.lastIndexOf("/") + 1, - ); - } - } - return { - rootFolderName: commonPathPrefix || null, - hasNestedFolders: firstFileFolder !== lastFileFolder, - hasRootLevelFileWithFolder: firstFileFolder === "", - }; -} - -// This function groups files that are that have the same parent folder into collections -// For Example, for user files have a directory structure like this -// a -// / | \ -// b j c -// /|\ / \ -// e f g h i -// -// The files will grouped into 3 collections. -// [a => [j], -// b => [e,f,g], -// c => [h, i]] -export const groupFilesBasedOnParentFolder = ( - fileOrPaths: (File | string)[], -) => { - const result = new Map(); - for (const fileOrPath of fileOrPaths) { - const filePath = - /* TODO(MR): ElectronFile */ - typeof fileOrPath == "string" - ? fileOrPath - : (fileOrPath["path"] as string); - - let folderPath = filePath.substring(0, filePath.lastIndexOf("/")); - // If the parent folder of a file is "metadata" - // we consider it to be part of the parent folder - // For Eg,For FileList -> [a/x.png, a/metadata/x.png.json] - // they will both we grouped into the collection "a" - // This is cluster the metadata json files in the same collection as the file it is for - if (folderPath.endsWith(exportMetadataDirectoryName)) { - folderPath = folderPath.substring(0, folderPath.lastIndexOf("/")); - } - const folderName = folderPath.substring( - folderPath.lastIndexOf("/") + 1, - ); - if (!folderName) throw Error("Unexpected empty folder name"); - if (!result.has(folderName)) result.set(folderName, []); - result.get(folderName).push(fileOrPath); - } - return result; -}; - -/** - * Filter out hidden files from amongst {@link fileOrPaths}. - * - * Hidden files are those whose names begin with a "." (dot). - */ - -export const pruneHiddenFiles = (fileOrPaths: (File | string)[]) => - fileOrPaths.filter((f) => !fopFileName(f).startsWith(".")); - -/** - * Return true if the file at the given {@link path} is hidden. - * - * Hidden files are those whose names begin with a "." (dot). - */ -export const isHiddenFile = (path: string) => basename(path).startsWith("."); diff --git a/web/apps/photos/src/utils/upload/uploadRetrier.ts b/web/apps/photos/src/utils/upload/uploadRetrier.ts deleted file mode 100644 index ca2764f3f..000000000 --- a/web/apps/photos/src/utils/upload/uploadRetrier.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { wait } from "@ente/shared/utils"; - -const retrySleepTimeInMilliSeconds = [2000, 5000, 10000]; - -export async function retryHTTPCall( - func: () => Promise, - checkForBreakingError?: (error) => void, -): Promise { - const retrier = async ( - func: () => Promise, - attemptNumber: number = 0, - ) => { - try { - const resp = await func(); - return resp; - } catch (e) { - if (checkForBreakingError) { - checkForBreakingError(e); - } - if (attemptNumber < retrySleepTimeInMilliSeconds.length) { - await wait(retrySleepTimeInMilliSeconds[attemptNumber]); - return await retrier(func, attemptNumber + 1); - } else { - throw e; - } - } - }; - return await retrier(func); -} diff --git a/web/apps/photos/src/worker/ffmpeg.worker.ts b/web/apps/photos/src/worker/ffmpeg.worker.ts index 03893efba..946a2090f 100644 --- a/web/apps/photos/src/worker/ffmpeg.worker.ts +++ b/web/apps/photos/src/worker/ffmpeg.worker.ts @@ -62,12 +62,16 @@ const ffmpegExec = async ( const inputData = new Uint8Array(await blob.arrayBuffer()); try { - ffmpeg.FS("writeFile", inputPath, inputData); + const startTime = Date.now(); - log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")}`); + ffmpeg.FS("writeFile", inputPath, inputData); await ffmpeg.run(...cmd); - return ffmpeg.FS("readFile", outputPath); + const result = ffmpeg.FS("readFile", outputPath); + + const ms = Math.round(Date.now() - startTime); + log.debug(() => `[wasm] ffmpeg ${cmd.join(" ")} (${ms} ms)`); + return result; } finally { try { ffmpeg.FS("unlink", inputPath); diff --git a/web/apps/photos/tests/zip-file-reading.test.ts b/web/apps/photos/tests/zip-file-reading.test.ts deleted file mode 100644 index ea7511d0b..000000000 --- a/web/apps/photos/tests/zip-file-reading.test.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { getFileNameSize } from "@/next/file"; -import type { DataStream } from "@ente/shared/utils/data-stream"; -import { FILE_READER_CHUNK_SIZE, PICKED_UPLOAD_TYPE } from "constants/upload"; -import { getElectronFileStream, getFileStream } from "services/readerService"; -import { getImportSuggestion } from "utils/upload"; - -// This was for used to verify that converting from the browser readable stream -// to the node readable stream correctly handles files that align on the 4 MB -// data boundary. This expects a zip file containing random files of various -// sizes starting from 1M to 20M. -export const testZipFileReading = async () => { - try { - const electron = globalThis.electron; - if (!electron) { - console.log("testZipFileReading Check is for desktop only"); - return; - } - if (!process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH) { - throw Error( - "upload test failed NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH missing", - ); - } - const files = await electron.getElectronFilesFromGoogleZip( - process.env.NEXT_PUBLIC_FILE_READING_TEST_ZIP_PATH, - ); - if (!files?.length) { - throw Error( - `testZipFileReading Check failed ❌ - No files selected`, - ); - } - console.log("test zip file reading check started"); - let i = 0; - for (const file of files) { - i++; - let filedata: DataStream; - if (file instanceof File) { - filedata = getFileStream(file, FILE_READER_CHUNK_SIZE); - } else { - filedata = await getElectronFileStream( - file, - FILE_READER_CHUNK_SIZE, - ); - } - const streamReader = filedata.stream.getReader(); - for (let i = 0; i < filedata.chunkCount; i++) { - const { done } = await streamReader.read(); - if (done) { - throw Error( - `testZipFileReading Check failed ❌ - ${getFileNameSize( - file, - )} less than expected chunks, expected: ${ - filedata.chunkCount - }, got ${i - 1}`, - ); - } - } - const { done } = await streamReader.read(); - - if (!done) { - throw Error( - `testZipFileReading Check failed ❌ - ${getFileNameSize( - file, - )} more than expected chunks, expected: ${ - filedata.chunkCount - }`, - ); - } - console.log(`${i}/${files.length} passed ✅`); - } - console.log("test zip file reading check passed ✅"); - } catch (e) { - console.log(e); - } -}; - -// This was used when fixing a bug around handling a zip file that has a photo -// at the root. -export const testZipWithRootFileReadingTest = async () => { - try { - const electron = globalThis.electron; - if (!electron) { - console.log("testZipFileReading Check is for desktop only"); - return; - } - if (!process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH) { - throw Error( - "upload test failed NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH missing", - ); - } - const files = await electron.getElectronFilesFromGoogleZip( - process.env.NEXT_PUBLIC_ZIP_WITH_ROOT_FILE_PATH, - ); - - const importSuggestion = getImportSuggestion( - PICKED_UPLOAD_TYPE.ZIPS, - files.map((file) => file["path"]), - ); - if (!importSuggestion.rootFolderName) { - throw Error( - `testZipWithRootFileReadingTest Check failed ❌ - rootFolderName is missing`, - ); - } - console.log("testZipWithRootFileReadingTest passed ✅"); - } catch (e) { - console.log(e); - } -}; diff --git a/web/apps/staff/src/App.tsx b/web/apps/staff/src/App.tsx index f8984fecb..01d79b18c 100644 --- a/web/apps/staff/src/App.tsx +++ b/web/apps/staff/src/App.tsx @@ -9,7 +9,7 @@ export const App: React.FC = () => { .then((userDetails) => { console.log("Fetched user details", userDetails); }) - .catch((e) => { + .catch((e: unknown) => { console.error("Failed to fetch user details", e); }); }; diff --git a/web/docs/storage.md b/web/docs/storage.md index d01654b23..9f19a6a46 100644 --- a/web/docs/storage.md +++ b/web/docs/storage.md @@ -34,6 +34,6 @@ meant for larger, tabular data. OPFS is used for caching entire files when we're running under Electron (the Web Cache API is used in the browser). -As it name suggests, it is an entire filesystem, private for us ("origin"). In +As it name suggests, it is an entire file system, private for us ("origin"). In is not undbounded though, and the storage is not guaranteed to be persistent (at least with the APIs we use), hence the cache designation. diff --git a/web/package.json b/web/package.json index 2d5919eb1..647ee3ba3 100644 --- a/web/package.json +++ b/web/package.json @@ -27,8 +27,8 @@ "dev:payments": "yarn workspace payments dev", "dev:photos": "yarn workspace photos next dev", "dev:staff": "yarn workspace staff dev", - "lint": "yarn prettier --check . && yarn workspaces run eslint --report-unused-disable-directives .", - "lint-fix": "yarn prettier --write . && yarn workspaces run eslint --fix .", + "lint": "yarn prettier --check --log-level warn . && yarn workspaces run eslint --report-unused-disable-directives .", + "lint-fix": "yarn prettier --write --log-level warn . && yarn workspaces run eslint --fix .", "preview": "yarn preview:photos", "preview:accounts": "yarn build:accounts && python3 -m http.server -d apps/accounts/out 3001", "preview:auth": "yarn build:auth && python3 -m http.server -d apps/auth/out 3000", diff --git a/web/packages/accounts/services/user.ts b/web/packages/accounts/services/user.ts index fb0e1c929..8f6d6609a 100644 --- a/web/packages/accounts/services/user.ts +++ b/web/packages/accounts/services/user.ts @@ -40,10 +40,18 @@ export const logoutUser = async () => { } catch (e) { log.error("Ignoring error when clearing files", e); } - try { - globalThis.electron?.clearStores(); - } catch (e) { - log.error("Ignoring error when clearing electron stores", e); + const electron = globalThis.electron; + if (electron) { + try { + await electron.watch.reset(); + } catch (e) { + log.error("Ignoring error when resetting native folder watches", e); + } + try { + await electron.clearStores(); + } catch (e) { + log.error("Ignoring error when clearing native stores", e); + } } try { eventBus.emit(Events.LOGOUT); diff --git a/web/packages/build-config/eslintrc-base.js b/web/packages/build-config/eslintrc-base.js index b302be36d..3e65638c1 100644 --- a/web/packages/build-config/eslintrc-base.js +++ b/web/packages/build-config/eslintrc-base.js @@ -10,4 +10,20 @@ module.exports = { parserOptions: { project: true }, parser: "@typescript-eslint/parser", ignorePatterns: [".eslintrc.js"], + rules: { + /* Allow numbers to be used in template literals */ + "@typescript-eslint/restrict-template-expressions": [ + "error", + { + allowNumber: true, + }, + ], + /* Allow void expressions as the entire body of an arrow function */ + "@typescript-eslint/no-confusing-void-expression": [ + "error", + { + ignoreArrowShorthand: true, + }, + ], + }, }; diff --git a/web/packages/media/file.ts b/web/packages/media/file.ts new file mode 100644 index 000000000..c84050049 --- /dev/null +++ b/web/packages/media/file.ts @@ -0,0 +1,4 @@ +import type { Metadata } from "./types/file"; + +export const hasFileHash = (file: Metadata) => + !!file.hash || (!!file.imageHash && !!file.videoHash); diff --git a/web/packages/media/live-photo.ts b/web/packages/media/live-photo.ts index 5cf0291fa..35a186a41 100644 --- a/web/packages/media/live-photo.ts +++ b/web/packages/media/live-photo.ts @@ -110,6 +110,14 @@ export const decodeLivePhoto = async ( return { imageFileName, imageData, videoFileName, videoData }; }; +/** Variant of {@link LivePhoto}, but one that allows files and data. */ +interface EncodeLivePhotoInput { + imageFileName: string; + imageFileOrData: File | Uint8Array; + videoFileName: string; + videoFileOrData: File | Uint8Array; +} + /** * Return a binary serialized representation of a live photo. * @@ -122,15 +130,15 @@ export const decodeLivePhoto = async ( */ export const encodeLivePhoto = async ({ imageFileName, - imageData, + imageFileOrData, videoFileName, - videoData, -}: LivePhoto) => { + videoFileOrData, +}: EncodeLivePhotoInput) => { const [, imageExt] = nameAndExtension(imageFileName); const [, videoExt] = nameAndExtension(videoFileName); const zip = new JSZip(); - zip.file(fileNameFromComponents(["image", imageExt]), imageData); - zip.file(fileNameFromComponents(["video", videoExt]), videoData); + zip.file(fileNameFromComponents(["image", imageExt]), imageFileOrData); + zip.file(fileNameFromComponents(["video", videoExt]), videoFileOrData); return await zip.generateAsync({ type: "uint8array" }); }; diff --git a/web/packages/next/blob-cache.ts b/web/packages/next/blob-cache.ts index 0e092fed6..e6c3734df 100644 --- a/web/packages/next/blob-cache.ts +++ b/web/packages/next/blob-cache.ts @@ -50,8 +50,6 @@ export type BlobCacheNamespace = (typeof blobCacheNames)[number]; * ([the WebKit bug](https://bugs.webkit.org/show_bug.cgi?id=231706)), so it's * not trivial to use this as a full on replacement of the Web Cache in the * browser. So for now we go with this split implementation. - * - * See also: [Note: Increased disk cache for the desktop app]. */ export interface BlobCache { /** diff --git a/web/packages/next/file.ts b/web/packages/next/file.ts index 56d27b79b..bd2c04393 100644 --- a/web/packages/next/file.ts +++ b/web/packages/next/file.ts @@ -1,5 +1,3 @@ -import type { ElectronFile } from "./types/file"; - /** * The two parts of a file name - the name itself, and an (optional) extension. * @@ -82,27 +80,3 @@ export const dirname = (path: string) => { } return pathComponents.join("/"); }; - -/** - * Return a short description of the given {@link fileOrPath} suitable for - * helping identify it in log messages. - */ -export const fopLabel = (fileOrPath: File | string) => - fileOrPath instanceof File ? `File(${fileOrPath.name})` : fileOrPath; - -export function getFileNameSize(file: File | ElectronFile) { - return `${file.name}_${convertBytesToHumanReadable(file.size)}`; -} - -export function convertBytesToHumanReadable( - bytes: number, - precision = 2, -): string { - if (bytes === 0 || isNaN(bytes)) { - return "0 MB"; - } - - const i = Math.floor(Math.log(bytes) / Math.log(1024)); - const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; - return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i]; -} diff --git a/web/packages/next/locales/bg-BG/translation.json b/web/packages/next/locales/bg-BG/translation.json index 1661e8fac..28689ba49 100644 --- a/web/packages/next/locales/bg-BG/translation.json +++ b/web/packages/next/locales/bg-BG/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/de-DE/translation.json b/web/packages/next/locales/de-DE/translation.json index de7980f3e..a0ee15a7c 100644 --- a/web/packages/next/locales/de-DE/translation.json +++ b/web/packages/next/locales/de-DE/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Ein Fehler trat auf beim Anmelden mit dem Passkey auf.", "TRY_AGAIN": "Erneut versuchen", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Folge den Schritten in deinem Browser, um mit dem Anmelden fortzufahren.", - "LOGIN_WITH_PASSKEY": "Mit Passkey anmelden" + "LOGIN_WITH_PASSKEY": "Mit Passkey anmelden", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/en-US/translation.json b/web/packages/next/locales/en-US/translation.json index 5fdb380d5..b3debe5aa 100644 --- a/web/packages/next/locales/en-US/translation.json +++ b/web/packages/next/locales/en-US/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "An error occurred while logging in with passkey.", "TRY_AGAIN": "Try again", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Follow the steps from your browser to continue logging in.", - "LOGIN_WITH_PASSKEY": "Login with passkey" + "LOGIN_WITH_PASSKEY": "Login with passkey", + "autogenerated_first_album_name": "My First Album", + "autogenerated_default_album_name": "New Album" } diff --git a/web/packages/next/locales/es-ES/translation.json b/web/packages/next/locales/es-ES/translation.json index 543551457..a01d322b7 100644 --- a/web/packages/next/locales/es-ES/translation.json +++ b/web/packages/next/locales/es-ES/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fa-IR/translation.json b/web/packages/next/locales/fa-IR/translation.json index 9dc5ccb7a..0c3749d13 100644 --- a/web/packages/next/locales/fa-IR/translation.json +++ b/web/packages/next/locales/fa-IR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fi-FI/translation.json b/web/packages/next/locales/fi-FI/translation.json index 2d2a56b54..d945fcde3 100644 --- a/web/packages/next/locales/fi-FI/translation.json +++ b/web/packages/next/locales/fi-FI/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/fr-FR/translation.json b/web/packages/next/locales/fr-FR/translation.json index 308728b98..f3113202f 100644 --- a/web/packages/next/locales/fr-FR/translation.json +++ b/web/packages/next/locales/fr-FR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Une erreur s'est produite lors de la connexion avec le code d'accès.", "TRY_AGAIN": "Réessayer", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Suivez les étapes de votre navigateur pour poursuivre la connexion.", - "LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès" + "LOGIN_WITH_PASSKEY": "Se connecter avec le code d'accès", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/it-IT/translation.json b/web/packages/next/locales/it-IT/translation.json index b66131ad7..bf555911c 100644 --- a/web/packages/next/locales/it-IT/translation.json +++ b/web/packages/next/locales/it-IT/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/ko-KR/translation.json b/web/packages/next/locales/ko-KR/translation.json index 63b6491de..aee2c6cd5 100644 --- a/web/packages/next/locales/ko-KR/translation.json +++ b/web/packages/next/locales/ko-KR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/nl-NL/translation.json b/web/packages/next/locales/nl-NL/translation.json index c12a38f8b..62b846b14 100644 --- a/web/packages/next/locales/nl-NL/translation.json +++ b/web/packages/next/locales/nl-NL/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Er is een fout opgetreden tijdens het inloggen met een passkey.", "TRY_AGAIN": "Probeer opnieuw", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Volg de stappen van je browser om door te gaan met inloggen.", - "LOGIN_WITH_PASSKEY": "Inloggen met passkey" + "LOGIN_WITH_PASSKEY": "Inloggen met passkey", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/pt-BR/translation.json b/web/packages/next/locales/pt-BR/translation.json index 5749591d1..9fc00517c 100644 --- a/web/packages/next/locales/pt-BR/translation.json +++ b/web/packages/next/locales/pt-BR/translation.json @@ -239,7 +239,7 @@ "ENABLE_MAPS": "Habilitar mapa?", "ENABLE_MAP": "Habilitar mapa", "DISABLE_MAPS": "Desativar Mapas?", - "ENABLE_MAP_DESCRIPTION": "Isto mostrará suas fotos em um mapa do mundo.

Este mapa é hospedado pelo OpenStreetMap , e os exatos locais de suas fotos nunca são compartilhados.

Você pode desativar esse recurso a qualquer momento nas Configurações.

", + "ENABLE_MAP_DESCRIPTION": "

Isto mostrará suas fotos em um mapa do mundo.

Este mapa é hospedado pelo OpenStreetMap, e os exatos locais de suas fotos nunca são compartilhados.

Você pode desativar esse recurso a qualquer momento nas Configurações.

", "DISABLE_MAP_DESCRIPTION": "

Isto irá desativar a exibição de suas fotos em um mapa mundial.

Você pode ativar este recurso a qualquer momento nas Configurações.

", "DISABLE_MAP": "Desabilitar mapa", "DETAILS": "Detalhes", @@ -380,14 +380,14 @@ "LINK_EXPIRED_MESSAGE": "Este link expirou ou foi desativado!", "MANAGE_LINK": "Gerenciar link", "LINK_TOO_MANY_REQUESTS": "Desculpe, este álbum foi visualizado em muitos dispositivos!", - "FILE_DOWNLOAD": "Permitir transferências", + "FILE_DOWNLOAD": "Permitir downloads", "LINK_PASSWORD_LOCK": "Bloqueio de senha", "PUBLIC_COLLECT": "Permitir adicionar fotos", "LINK_DEVICE_LIMIT": "Limite de dispositivos", "NO_DEVICE_LIMIT": "Nenhum", "LINK_EXPIRY": "Expiração do link", "NEVER": "Nunca", - "DISABLE_FILE_DOWNLOAD": "Desabilitar transferência", + "DISABLE_FILE_DOWNLOAD": "Desabilitar download", "DISABLE_FILE_DOWNLOAD_MESSAGE": "

Tem certeza de que deseja desativar o botão de download para arquivos?

Os visualizadores ainda podem capturar imagens da tela ou salvar uma cópia de suas fotos usando ferramentas externas.

", "SHARED_USING": "Compartilhar usando ", "SHARING_REFERRAL_CODE": "Use o código {{referralCode}} para obter 10 GB de graça", @@ -408,8 +408,8 @@ "STOP_ALL_UPLOADS_MESSAGE": "Tem certeza que deseja parar todos os envios em andamento?", "STOP_UPLOADS_HEADER": "Parar envios?", "YES_STOP_UPLOADS": "Sim, parar envios", - "STOP_DOWNLOADS_HEADER": "Parar transferências?", - "YES_STOP_DOWNLOADS": "Sim, parar transferências", + "STOP_DOWNLOADS_HEADER": "Parar downloads?", + "YES_STOP_DOWNLOADS": "Sim, parar downloads", "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos as transferências em andamento?", "albums_one": "1 Álbum", "albums_other": "{{count, number}} Álbuns", @@ -556,8 +556,8 @@ "SELECT_COLLECTION": "Selecionar álbum", "PIN_ALBUM": "Fixar álbum", "UNPIN_ALBUM": "Desafixar álbum", - "DOWNLOAD_COMPLETE": "Transferência concluída", - "DOWNLOADING_COLLECTION": "Transferindo {{name}}", + "DOWNLOAD_COMPLETE": "Download concluído", + "DOWNLOADING_COLLECTION": "Fazendo download de {{name}}", "DOWNLOAD_FAILED": "Falha ao baixar", "DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} arquivos", "CHRISTMAS": "Natal", @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "Ocorreu um erro ao entrar com a chave de acesso.", "TRY_AGAIN": "Tente novamente", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Siga os passos do seu navegador para continuar acessando.", - "LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso" + "LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso", + "autogenerated_first_album_name": "Meu Primeiro Álbum", + "autogenerated_default_album_name": "Novo Álbum" } diff --git a/web/packages/next/locales/pt-PT/translation.json b/web/packages/next/locales/pt-PT/translation.json index 20ec4d9ea..f6980b56e 100644 --- a/web/packages/next/locales/pt-PT/translation.json +++ b/web/packages/next/locales/pt-PT/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/ru-RU/translation.json b/web/packages/next/locales/ru-RU/translation.json index 95c4f6c58..5d036c6c8 100644 --- a/web/packages/next/locales/ru-RU/translation.json +++ b/web/packages/next/locales/ru-RU/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "При входе в систему с помощью пароля произошла ошибка.", "TRY_AGAIN": "Пробовать снова", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Следуйте инструкциям в вашем браузере, чтобы продолжить вход в систему.", - "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля" + "LOGIN_WITH_PASSKEY": "Войдите в систему с помощью пароля", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/sv-SE/translation.json b/web/packages/next/locales/sv-SE/translation.json index 77462524d..ba6ecee09 100644 --- a/web/packages/next/locales/sv-SE/translation.json +++ b/web/packages/next/locales/sv-SE/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/th-TH/translation.json b/web/packages/next/locales/th-TH/translation.json index 2d2a56b54..d945fcde3 100644 --- a/web/packages/next/locales/th-TH/translation.json +++ b/web/packages/next/locales/th-TH/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/tr-TR/translation.json b/web/packages/next/locales/tr-TR/translation.json index 2d2a56b54..d945fcde3 100644 --- a/web/packages/next/locales/tr-TR/translation.json +++ b/web/packages/next/locales/tr-TR/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "", "TRY_AGAIN": "", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "", - "LOGIN_WITH_PASSKEY": "" + "LOGIN_WITH_PASSKEY": "", + "autogenerated_first_album_name": "", + "autogenerated_default_album_name": "" } diff --git a/web/packages/next/locales/zh-CN/translation.json b/web/packages/next/locales/zh-CN/translation.json index 7a76b58b6..c67018aaa 100644 --- a/web/packages/next/locales/zh-CN/translation.json +++ b/web/packages/next/locales/zh-CN/translation.json @@ -621,5 +621,7 @@ "PASSKEY_LOGIN_ERRORED": "使用通行密钥登录时出错。", "TRY_AGAIN": "重试", "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "按照浏览器中提示的步骤继续登录。", - "LOGIN_WITH_PASSKEY": "使用通行密钥来登录" + "LOGIN_WITH_PASSKEY": "使用通行密钥来登录", + "autogenerated_first_album_name": "我的第一个相册", + "autogenerated_default_album_name": "新建相册" } diff --git a/web/packages/next/log.ts b/web/packages/next/log.ts index 0f49fea6d..f9ef7e549 100644 --- a/web/packages/next/log.ts +++ b/web/packages/next/log.ts @@ -17,7 +17,7 @@ export const logToDisk = (message: string) => { }; const workerLogToDisk = (message: string) => { - workerBridge.logToDisk(message).catch((e) => { + workerBridge.logToDisk(message).catch((e: unknown) => { console.error( "Failed to log a message from worker", e, @@ -34,7 +34,7 @@ const messageWithError = (message: string, e?: unknown) => { if (e instanceof Error) { // In practice, we expect ourselves to be called with Error objects, so // this is the happy path so to say. - return `${e.name}: ${e.message}\n${e.stack}`; + es = [`${e.name}: ${e.message}`, e.stack].filter((x) => x).join("\n"); } else { // For the rest rare cases, use the default string serialization of e. es = String(e); diff --git a/web/packages/next/types/file.ts b/web/packages/next/types/file.ts deleted file mode 100644 index 75641e3a2..000000000 --- a/web/packages/next/types/file.ts +++ /dev/null @@ -1,25 +0,0 @@ -/* - * ElectronFile is a custom interface that is used to represent - * any file on disk as a File-like object in the Electron desktop app. - * - * This was added to support the auto-resuming of failed uploads - * which needed absolute paths to the files which the - * normal File interface does not provide. - */ -export interface ElectronFile { - name: string; - path: string; - size: number; - lastModified: number; - stream: () => Promise>; - blob: () => Promise; - arrayBuffer: () => Promise; -} - -export interface EventQueueItem { - type: "upload" | "trash"; - folderPath: string; - collectionName?: string; - paths?: string[]; - files?: ElectronFile[]; -} diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts index 91a9927f2..4b05838fa 100644 --- a/web/packages/next/types/ipc.ts +++ b/web/packages/next/types/ipc.ts @@ -3,8 +3,6 @@ // // See [Note: types.ts <-> preload.ts <-> ipc.ts] -import type { ElectronFile } from "./file"; - /** * Extra APIs provided by our Node.js layer when our code is running inside our * desktop (Electron) app. @@ -51,6 +49,20 @@ export interface Electron { */ openLogDirectory: () => Promise; + /** + * Ask the user to select a directory on their local file system, and return + * it path. + * + * The returned path is guaranteed to use POSIX separators ('/'). + * + * We don't strictly need IPC for this, we can use a hidden element + * and trigger its click for the same behaviour (as we do for the + * `useFileInput` hook that we use for uploads). However, it's a bit + * cumbersome, and we anyways will need to IPC to get back its full path, so + * it is just convenient to expose this direct method. + */ + selectDirectory: () => Promise; + /** * Clear any stored data. * @@ -122,18 +134,20 @@ export interface Electron { */ skipAppUpdate: (version: string) => void; + // - FS + /** - * A subset of filesystem access APIs. + * A subset of file system access APIs. * * The renderer process, being a web process, does not have full access to - * the local filesystem apart from files explicitly dragged and dropped (or + * the local file system apart from files explicitly dragged and dropped (or * selected by the user in a native file open dialog). * - * The main process, however, has full filesystem access (limited only be an + * The main process, however, has full fil system access (limited only be an * OS level sandbox on the entire process). * * When we're running in the desktop app, we want to better utilize the - * local filesystem access to provide more integrated features to the user - + * local file system access to provide more integrated features to the user; * things that are not currently possible using web technologies. For * example, continuous exports to an arbitrary user chosen location on disk, * or watching some folders for changes and syncing them automatically. @@ -189,11 +203,6 @@ export interface Electron { * directory. */ isDir: (dirPath: string) => Promise; - - /** - * Return the size in bytes of the file at {@link path}. - */ - size: (path: string) => Promise; }; // - Conversion @@ -226,22 +235,27 @@ export interface Electron { * not yet possible, this function will throw an error with the * {@link CustomErrorMessage.NotAvailable} message. * - * @param dataOrPath The raw image data (the contents of the image file), or - * the path to the image file, whose thumbnail we want to generate. + * @param dataOrPathOrZipItem The file whose thumbnail we want to generate. + * It can be provided as raw image data (the contents of the image file), or + * the path to the image file, or a tuple containing the path of the zip + * file along with the name of an entry in it. + * * @param maxDimension The maximum width or height of the generated * thumbnail. + * * @param maxSize Maximum size (in bytes) of the generated thumbnail. * * @returns JPEG data of the generated thumbnail. */ generateImageThumbnail: ( - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, maxDimension: number, maxSize: number, ) => Promise; /** - * Execute a FFmpeg {@link command} on the given {@link dataOrPath}. + * Execute a FFmpeg {@link command} on the given + * {@link dataOrPathOrZipItem}. * * This executes the command using a FFmpeg executable we bundle with our * desktop app. We also have a wasm FFmpeg wasm implementation that we use @@ -254,10 +268,11 @@ export interface Electron { * (respectively {@link inputPathPlaceholder}, * {@link outputPathPlaceholder}, {@link ffmpegPathPlaceholder}). * - * @param dataOrPath The bytes of the input file, or the path to the input - * file on the user's local disk. In both cases, the data gets serialized to - * a temporary file, and then that path gets substituted in the FFmpeg - * {@link command} in lieu of {@link inputPathPlaceholder}. + * @param dataOrPathOrZipItem The bytes of the input file, or the path to + * the input file on the user's local disk, or the path to a zip file on the + * user's disk and the name of an entry in it. In all three cases, the data + * gets serialized to a temporary file, and then that path gets substituted + * in the FFmpeg {@link command} in lieu of {@link inputPathPlaceholder}. * * @param outputFileExtension The extension (without the dot, e.g. "jpeg") * to use for the output file that we ask FFmpeg to create in @@ -273,7 +288,7 @@ export interface Electron { */ ffmpegExec: ( command: string[], - dataOrPath: Uint8Array | string, + dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, timeoutMS: number, ) => Promise; @@ -331,20 +346,6 @@ export interface Electron { */ faceEmbedding: (input: Float32Array) => Promise; - // - File selection - // TODO: Deprecated - use dialogs on the renderer process itself - - selectDirectory: () => Promise; - - showUploadFilesDialog: () => Promise; - - showUploadDirsDialog: () => Promise; - - showUploadZipDialog: () => Promise<{ - zipPaths: string[]; - files: ElectronFile[]; - }>; - // - Watch /** @@ -461,49 +462,95 @@ export interface Electron { * The returned paths are guaranteed to use POSIX separators ('/'). */ findFiles: (folderPath: string) => Promise; + + /** + * Stop watching all existing folder watches and remove any callbacks. + * + * This function is meant to be called when the user logs out. It stops + * all existing folder watches and forgets about any "on*" callback + * functions that have been registered. + * + * The persisted state itself gets cleared via {@link clearStores}. + */ + reset: () => Promise; }; // - Upload + /** + * Return the file system path that this File object points to. + * + * This method is a bit different from the other methods on the Electron + * object in the sense that there is no actual IPC happening - the + * implementation of this method is completely in the preload script. Thus + * we can pass it an otherwise unserializable File object. + * + * Consequently, it is also _not_ async. + */ + pathForFile: (file: File) => string; + + /** + * Get the list of files that are present in the given zip file. + * + * @param zipPath The path of the zip file on the user's local file system. + * + * @returns A list of (zipPath, entryName) tuples, one for each file in the + * given zip. Directories are traversed recursively, but the directory + * entries themselves will be excluded from the returned list. File entries + * whose file name begins with a dot (i.e. "hidden" files) will also be + * excluded. + * + * To read the contents of the files themselves, see [Note: IPC streams]. + */ + listZipItems: (zipPath: string) => Promise; + + /** + * Return the size in bytes of the file at the given path or of a particular + * entry within a zip file. + */ + pathOrZipItemSize: (pathOrZipItem: string | ZipItem) => Promise; + /** * Return any pending uploads that were previously enqueued but haven't yet * been completed. * - * The state of pending uploads is persisted in the Node.js layer. + * Return undefined if there are no such pending uploads. * - * Note that we might have both outstanding zip and regular file uploads at - * the same time. In such cases, the zip file ones get precedence. + * The state of pending uploads is persisted in the Node.js layer. Or app + * start, we read in this data from the Node.js layer via this IPC method. + * The Node.js code returns the persisted data after filtering out any files + * that no longer exist on disk. */ pendingUploads: () => Promise; /** - * Set or clear the name of the collection where the pending upload is - * directed to. + * Set the state of pending uploads. + * + * - Typically, this would be called at the start of an upload. + * + * - Thereafter, as each item gets uploaded one by one, we'd call + * {@link markUploadedFiles} or {@link markUploadedZipItems}. + * + * - Finally, once the upload completes (or gets cancelled), we'd call + * {@link clearPendingUploads} to complete the circle. */ - setPendingUploadCollection: (collectionName: string) => Promise; + setPendingUploads: (pendingUploads: PendingUploads) => Promise; /** - * Update the list of files (of {@link type}) associated with the pending - * upload. + * Mark the given files (given by their {@link paths}) as having been + * uploaded. */ - setPendingUploadFiles: ( - type: PendingUploads["type"], - filePaths: string[], - ) => Promise; + markUploadedFiles: (paths: PendingUploads["filePaths"]) => Promise; - /* - * TODO: AUDIT below this - Some of the types we use below are not copyable - * across process boundaries, and such functions will (expectedly) fail at - * runtime. For such functions, find an efficient alternative or refactor - * the dataflow. + /** + * Mark the given {@link ZipItem}s as having been uploaded. */ + markUploadedZipItems: (items: PendingUploads["zipItems"]) => Promise; - // - - - getElectronFilesFromGoogleZip: ( - filePath: string, - ) => Promise; - getDirFiles: (dirPath: string) => Promise; + /** + * Clear any pending uploads. + */ + clearPendingUploads: () => Promise; } /** @@ -589,14 +636,56 @@ export interface FolderWatchSyncedFile { } /** - * When the user starts an upload, we remember the files they'd selected or drag - * and dropped so that we can resume (if needed) when the app restarts after - * being stopped in the middle of the uploads. + * A particular file within a zip file. + * + * When the user uploads a zip file, we create a "zip item" for each entry + * within the zip file. Each such entry is a tuple containing the (path to a zip + * file itself, and the name of an entry within it). + * + * The name of the entry is not just the file name, but rather is the full path + * of the file within the zip. That is, each entry name uniquely identifies a + * particular file within the given zip. + * + * When `entryName` is a path within a nested directory, it is guaranteed to use + * the POSIX path separator ("/") since that is the path separator required by + * the ZIP format itself + * + * > 4.4.17.1 The name of the file, with optional relative path. + * > + * > The path stored MUST NOT contain a drive or device letter, or a leading + * > slash. All slashes MUST be forward slashes '/' as opposed to backwards + * > slashes '\' for compatibility with Amiga and UNIX file systems etc. If + * > input came from standard input, there is no file name field. + * > + * > https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT + */ +export type ZipItem = [zipPath: string, entryName: string]; + +/** + * State about pending and in-progress uploads. + * + * When the user starts an upload, we remember the files they'd selected (or + * drag-dropped) so that we can resume if they restart the app in before the + * uploads have been completed. This state is kept on the Electron side, and + * this object is the IPC intermediary. */ export interface PendingUploads { - /** The collection to which we're uploading */ - collectionName: string; - /* The upload can be either of a Google Takeout zip, or regular files */ - type: "files" | "zips"; - files: ElectronFile[]; + /** + * The collection to which we're uploading, or the root collection. + * + * This is name of the collection (when uploading to a singular collection) + * or the root collection (when uploading to separate * albums) to which we + * these uploads are meant to go to. See {@link CollectionMapping}. + * + * It will not be set if we're just uploading standalone files. + */ + collectionName?: string; + /** + * Paths of regular files that need to be uploaded. + */ + filePaths: string[]; + /** + * {@link ZipItem} (zip path and entry name) that need to be uploaded. + */ + zipItems: ZipItem[]; } diff --git a/web/packages/shared/components/Navbar/base.tsx b/web/packages/shared/components/Navbar/base.tsx index 101506cfd..403dc808c 100644 --- a/web/packages/shared/components/Navbar/base.tsx +++ b/web/packages/shared/components/Navbar/base.tsx @@ -1,6 +1,9 @@ import { styled } from "@mui/material"; import { FlexWrapper } from "../../components/Container"; -const NavbarBase = styled(FlexWrapper)<{ isMobile: boolean }>` + +const NavbarBase = styled(FlexWrapper, { + shouldForwardProp: (propName) => propName != "isMobile", +})<{ isMobile: boolean }>` min-height: 64px; position: sticky; top: 0; diff --git a/web/packages/shared/crypto/types.ts b/web/packages/shared/crypto/types.ts index 47bfa8b2c..e591820f0 100644 --- a/web/packages/shared/crypto/types.ts +++ b/web/packages/shared/crypto/types.ts @@ -1,17 +1,3 @@ -import type { DataStream } from "../utils/data-stream"; - -export interface LocalFileAttributes< - T extends string | Uint8Array | DataStream, -> { - encryptedData: T; - decryptionHeader: string; -} - -export interface EncryptionResult { - file: LocalFileAttributes; - key: string; -} - export interface B64EncryptionResult { encryptedData: string; key: string; diff --git a/web/packages/shared/hooks/useFileInput.tsx b/web/packages/shared/hooks/useFileInput.tsx index b357d918e..71f027cef 100644 --- a/web/packages/shared/hooks/useFileInput.tsx +++ b/web/packages/shared/hooks/useFileInput.tsx @@ -1,10 +1,40 @@ import { useCallback, useRef, useState } from "react"; -export interface FileWithPath extends File { - readonly path?: string; +interface UseFileInputParams { + directory?: boolean; + accept?: string; } -export default function useFileInput({ directory }: { directory?: boolean }) { +/** + * Return three things: + * + * - A function that can be called to trigger the showing of the select file / + * directory dialog. + * + * - The list of properties that should be passed to a dummy `input` element + * that needs to be created to anchor the select file dialog. This input HTML + * element is not going to be visible, but it needs to be part of the DOM fro + * the open trigger to have effect. + * + * - The list of files that the user selected. This will be a list even if the + * user selected directories - in that case, it will be the recursive list of + * files within this directory. + * + * @param param0 + * + * - If {@link directory} is true, the file open dialog will ask the user to + * select directories. Otherwise it'll ask the user to select files. + * + * - If {@link accept} is specified, it'll restrict the type of files that the + * user can select by setting the "accept" attribute of the underlying HTML + * input element we use to surface the file selector dialog. For value of + * accept can be an extension or a MIME type (See + * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept). + */ +export default function useFileInput({ + directory, + accept, +}: UseFileInputParams) { const [selectedFiles, setSelectedFiles] = useState([]); const inputRef = useRef(); @@ -19,21 +49,34 @@ export default function useFileInput({ directory }: { directory?: boolean }) { event, ) => { if (!!event.target && !!event.target.files) { - const files = [...event.target.files].map((file) => - toFileWithPath(file), - ); - setSelectedFiles(files); + setSelectedFiles([...event.target.files]); } }; + // [Note: webkitRelativePath] + // + // If the webkitdirectory attribute of an HTML element is set then + // the File objects that we get will have `webkitRelativePath` property + // containing the relative path to the selected directory. + // + // https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/webkitdirectory + // + // These paths use the POSIX path separator ("/"). + // https://stackoverflow.com/questions/62806233/when-using-webkitrelativepath-is-the-path-separator-operating-system-specific + // + const directoryOpts = directory + ? { directory: "", webkitdirectory: "" } + : {}; + const getInputProps = useCallback( () => ({ type: "file", multiple: true, style: { display: "none" }, - ...(directory ? { directory: "", webkitdirectory: "" } : {}), + ...directoryOpts, ref: inputRef, onChange: handleChange, + ...(accept ? { accept } : {}), }), [], ); @@ -44,26 +87,3 @@ export default function useFileInput({ directory }: { directory?: boolean }) { selectedFiles: selectedFiles, }; } - -// https://github.com/react-dropzone/file-selector/blob/master/src/file.ts#L88 -export function toFileWithPath(file: File, path?: string): FileWithPath { - if (typeof (file as any).path !== "string") { - // on electron, path is already set to the absolute path - const { webkitRelativePath } = file; - Object.defineProperty(file, "path", { - value: - typeof path === "string" - ? path - : typeof webkitRelativePath === "string" && // If is set, - // the File will have a {webkitRelativePath} property - // https://developer.mozilla.org/en-US/docs/Web/API/HTMLInputElement/webkitdirectory - webkitRelativePath.length > 0 - ? webkitRelativePath - : file.name, - writable: false, - configurable: false, - enumerable: true, - }); - } - return file; -} diff --git a/web/packages/shared/network/cast.ts b/web/packages/shared/network/cast.ts index b240eab32..a18767baa 100644 --- a/web/packages/shared/network/cast.ts +++ b/web/packages/shared/network/cast.ts @@ -58,11 +58,14 @@ class CastGateway { return resp.data.publicKey; } - public async registerDevice(code: string, publicKey: string) { - await HTTPService.post(getEndpoint() + "/cast/device-info/", { - deviceCode: `${code}`, - publicKey: publicKey, - }); + public async registerDevice(publicKey: string): Promise { + const resp = await HTTPService.post( + getEndpoint() + "/cast/device-info/", + { + publicKey: publicKey, + }, + ); + return resp.data.deviceCode; } public async publishCastPayload( diff --git a/web/packages/shared/utils/data-stream.ts b/web/packages/shared/utils/data-stream.ts deleted file mode 100644 index d072dfe7e..000000000 --- a/web/packages/shared/utils/data-stream.ts +++ /dev/null @@ -1,8 +0,0 @@ -export interface DataStream { - stream: ReadableStream; - chunkCount: number; -} - -export function isDataStream(object: any): object is DataStream { - return "stream" in object; -} diff --git a/web/packages/utils/ensure.ts b/web/packages/utils/ensure.ts index 761cedc99..93706bfb6 100644 --- a/web/packages/utils/ensure.ts +++ b/web/packages/utils/ensure.ts @@ -1,7 +1,8 @@ /** - * Throw an exception if the given value is undefined. + * Throw an exception if the given value is `null` or `undefined`. */ -export const ensure = (v: T | undefined): T => { +export const ensure = (v: T | null | undefined): T => { + if (v === null) throw new Error("Required value was null"); if (v === undefined) throw new Error("Required value was not found"); return v; }; diff --git a/web/yarn.lock b/web/yarn.lock index 6886647d7..af3a5f210 100644 --- a/web/yarn.lock +++ b/web/yarn.lock @@ -528,7 +528,7 @@ dependencies: eslint-visitor-keys "^3.3.0" -"@eslint-community/regexpp@^4.5.1", "@eslint-community/regexpp@^4.6.1": +"@eslint-community/regexpp@^4.10.0", "@eslint-community/regexpp@^4.6.1": version "4.10.0" resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.10.0.tgz#548f6de556857c8bb73bbee70c35dc82a2e74d63" integrity sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA== @@ -1018,7 +1018,7 @@ "@types/react" "*" hoist-non-react-statics "^3.3.0" -"@types/json-schema@^7.0.12": +"@types/json-schema@^7.0.15": version "7.0.15" resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== @@ -1134,10 +1134,10 @@ resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.8.tgz#ce5ace04cfeabe7ef87c0091e50752e36707deff" integrity sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A== -"@types/semver@^7.5.0": - version "7.5.7" - resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.7.tgz#326f5fdda70d13580777bcaa1bc6fa772a5aef0e" - integrity sha512-/wdoPq1QqkSj9/QOeKkFquEuPzQbHTWAMPH/PaUMB+JuR31lXhlWXRZ52IpfDYVlDOUBvX09uBrPwxGT1hjNBg== +"@types/semver@^7.5.8": + version "7.5.8" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.5.8.tgz#8268a8c57a3e4abd25c165ecd36237db7948a55e" + integrity sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ== "@types/uuid@^9.0.2": version "9.0.8" @@ -1150,21 +1150,21 @@ integrity sha512-Tuk4q7q0DnpzyJDI4aMeghGuFu2iS1QAdKpabn8JfbtfGmVDUgvZv1I7mEjP61Bvnp3ljKCC8BE6YYSTNxmvRQ== "@typescript-eslint/eslint-plugin@^7": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.0.2.tgz#c13a34057be425167cc4a765158c46fdf2fd981d" - integrity sha512-/XtVZJtbaphtdrWjr+CJclaCVGPtOdBpFEnvtNf/jRV0IiEemRrL0qABex/nEt8isYcnFacm3nPHYQwL+Wb7qg== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.8.0.tgz#c78e309fe967cb4de05b85cdc876fb95f8e01b6f" + integrity sha512-gFTT+ezJmkwutUPmB0skOj3GZJtlEGnlssems4AjkVweUPGj7jRwwqg0Hhg7++kPGJqKtTYx+R05Ftww372aIg== dependencies: - "@eslint-community/regexpp" "^4.5.1" - "@typescript-eslint/scope-manager" "7.0.2" - "@typescript-eslint/type-utils" "7.0.2" - "@typescript-eslint/utils" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@eslint-community/regexpp" "^4.10.0" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/type-utils" "7.8.0" + "@typescript-eslint/utils" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" graphemer "^1.4.0" - ignore "^5.2.4" + ignore "^5.3.1" natural-compare "^1.4.0" - semver "^7.5.4" - ts-api-utils "^1.0.1" + semver "^7.6.0" + ts-api-utils "^1.3.0" "@typescript-eslint/parser@^5.4.2 || ^6.0.0": version "6.21.0" @@ -1178,14 +1178,14 @@ debug "^4.3.4" "@typescript-eslint/parser@^7": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.0.2.tgz#95c31233d343db1ca1df8df7811b5b87ca7b1a68" - integrity sha512-GdwfDglCxSmU+QTS9vhz2Sop46ebNCXpPPvsByK7hu0rFGRHL+AusKQJ7SoN+LbLh6APFpQwHKmDSwN35Z700Q== + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-7.8.0.tgz#1e1db30c8ab832caffee5f37e677dbcb9357ddc8" + integrity sha512-KgKQly1pv0l4ltcftP59uQZCi4HUYswCLbTqVZEJu7uLX8CTLyswqMLqLN+2QFz4jCptqWVV4SB7vdxcH2+0kQ== dependencies: - "@typescript-eslint/scope-manager" "7.0.2" - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/typescript-estree" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" "@typescript-eslint/scope-manager@6.21.0": @@ -1196,33 +1196,33 @@ "@typescript-eslint/types" "6.21.0" "@typescript-eslint/visitor-keys" "6.21.0" -"@typescript-eslint/scope-manager@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.0.2.tgz#6ec4cc03752758ddd1fdaae6fbd0ed9a2ca4fe63" - integrity sha512-l6sa2jF3h+qgN2qUMjVR3uCNGjWw4ahGfzIYsCtFrQJCjhbrDPdiihYT8FnnqFwsWX+20hK592yX9I2rxKTP4g== +"@typescript-eslint/scope-manager@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-7.8.0.tgz#bb19096d11ec6b87fb6640d921df19b813e02047" + integrity sha512-viEmZ1LmwsGcnr85gIq+FCYI7nO90DVbE37/ll51hjv9aG+YZMb4WDE2fyWpUR4O/UrhGRpYXK/XajcGTk2B8g== dependencies: - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" -"@typescript-eslint/type-utils@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.0.2.tgz#a7fc0adff0c202562721357e7478207d380a757b" - integrity sha512-IKKDcFsKAYlk8Rs4wiFfEwJTQlHcdn8CLwLaxwd6zb8HNiMcQIFX9sWax2k4Cjj7l7mGS5N1zl7RCHOVwHq2VQ== +"@typescript-eslint/type-utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-7.8.0.tgz#9de166f182a6e4d1c5da76e94880e91831e3e26f" + integrity sha512-H70R3AefQDQpz9mGv13Uhi121FNMh+WEaRqcXTX09YEDky21km4dV1ZXJIp8QjXc4ZaVkXVdohvWDzbnbHDS+A== dependencies: - "@typescript-eslint/typescript-estree" "7.0.2" - "@typescript-eslint/utils" "7.0.2" + "@typescript-eslint/typescript-estree" "7.8.0" + "@typescript-eslint/utils" "7.8.0" debug "^4.3.4" - ts-api-utils "^1.0.1" + ts-api-utils "^1.3.0" "@typescript-eslint/types@6.21.0": version "6.21.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.21.0.tgz#205724c5123a8fef7ecd195075fa6e85bac3436d" integrity sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg== -"@typescript-eslint/types@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.0.2.tgz#b6edd108648028194eb213887d8d43ab5750351c" - integrity sha512-ZzcCQHj4JaXFjdOql6adYV4B/oFOFjPOC9XYwCaZFRvqN8Llfvv4gSxrkQkd2u4Ci62i2c6W6gkDwQJDaRc4nA== +"@typescript-eslint/types@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-7.8.0.tgz#1fd2577b3ad883b769546e2d1ef379f929a7091d" + integrity sha512-wf0peJ+ZGlcH+2ZS23aJbOv+ztjeeP8uQ9GgwMJGVLx/Nj9CJt17GWgWWoSmoRVKAX2X+7fzEnAjxdvK2gqCLw== "@typescript-eslint/typescript-estree@6.21.0": version "6.21.0" @@ -1238,32 +1238,32 @@ semver "^7.5.4" ts-api-utils "^1.0.1" -"@typescript-eslint/typescript-estree@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.0.2.tgz#3c6dc8a3b9799f4ef7eca0d224ded01974e4cb39" - integrity sha512-3AMc8khTcELFWcKcPc0xiLviEvvfzATpdPj/DXuOGIdQIIFybf4DMT1vKRbuAEOFMwhWt7NFLXRkbjsvKZQyvw== +"@typescript-eslint/typescript-estree@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-7.8.0.tgz#b028a9226860b66e623c1ee55cc2464b95d2987c" + integrity sha512-5pfUCOwK5yjPaJQNy44prjCwtr981dO8Qo9J9PwYXZ0MosgAbfEMB008dJ5sNo3+/BN6ytBPuSvXUg9SAqB0dg== dependencies: - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/visitor-keys" "7.0.2" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/visitor-keys" "7.8.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" - minimatch "9.0.3" - semver "^7.5.4" - ts-api-utils "^1.0.1" + minimatch "^9.0.4" + semver "^7.6.0" + ts-api-utils "^1.3.0" -"@typescript-eslint/utils@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.0.2.tgz#8756123054cd934c8ba7db6a6cffbc654b10b5c4" - integrity sha512-PZPIONBIB/X684bhT1XlrkjNZJIEevwkKDsdwfiu1WeqBxYEEdIgVDgm8/bbKHVu+6YOpeRqcfImTdImx/4Bsw== +"@typescript-eslint/utils@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-7.8.0.tgz#57a79f9c0c0740ead2f622e444cfaeeb9fd047cd" + integrity sha512-L0yFqOCflVqXxiZyXrDr80lnahQfSOfc9ELAAZ75sqicqp2i36kEZZGuUymHNFoYOqxRT05up760b4iGsl02nQ== dependencies: "@eslint-community/eslint-utils" "^4.4.0" - "@types/json-schema" "^7.0.12" - "@types/semver" "^7.5.0" - "@typescript-eslint/scope-manager" "7.0.2" - "@typescript-eslint/types" "7.0.2" - "@typescript-eslint/typescript-estree" "7.0.2" - semver "^7.5.4" + "@types/json-schema" "^7.0.15" + "@types/semver" "^7.5.8" + "@typescript-eslint/scope-manager" "7.8.0" + "@typescript-eslint/types" "7.8.0" + "@typescript-eslint/typescript-estree" "7.8.0" + semver "^7.6.0" "@typescript-eslint/visitor-keys@6.21.0": version "6.21.0" @@ -1273,13 +1273,13 @@ "@typescript-eslint/types" "6.21.0" eslint-visitor-keys "^3.4.1" -"@typescript-eslint/visitor-keys@7.0.2": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.0.2.tgz#2899b716053ad7094962beb895d11396fc12afc7" - integrity sha512-8Y+YiBmqPighbm5xA2k4wKTxRzx9EkBu7Rlw+WHqMvRJ3RPz/BMBO9b2ru0LUNmXg120PHUXD5+SWFy2R8DqlQ== +"@typescript-eslint/visitor-keys@7.8.0": + version "7.8.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-7.8.0.tgz#7285aab991da8bee411a42edbd5db760d22fdd91" + integrity sha512-q4/gibTNBQNA0lGyYQCmWRS5D15n8rXh4QjK3KV+MBPlTYHpfBUT3D3PaPR/HeNiI9W6R7FvlkcGhNyAoP+caA== dependencies: - "@typescript-eslint/types" "7.0.2" - eslint-visitor-keys "^3.4.1" + "@typescript-eslint/types" "7.8.0" + eslint-visitor-keys "^3.4.3" "@ungap/structured-clone@^1.2.0": version "1.2.0" @@ -2893,7 +2893,7 @@ ieee754@^1.2.1: resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^5.2.0, ignore@^5.2.4: +ignore@^5.2.0, ignore@^5.2.4, ignore@^5.3.1: version "5.3.1" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.1.tgz#5073e554cd42c5b33b394375f538b8593e34d4ef" integrity sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw== @@ -3449,6 +3449,13 @@ minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: dependencies: brace-expansion "^1.1.7" +minimatch@^9.0.4: + version "9.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" + integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== + dependencies: + brace-expansion "^2.0.1" + minimist@^1.2.0, minimist@^1.2.6: version "1.2.8" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" @@ -4173,7 +4180,7 @@ semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.5.4: +semver@^7.5.4, semver@^7.6.0: version "7.6.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d" integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg== @@ -4565,10 +4572,10 @@ truncate-utf8-bytes@^1.0.0: dependencies: utf8-byte-length "^1.0.1" -ts-api-utils@^1.0.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.2.1.tgz#f716c7e027494629485b21c0df6180f4d08f5e8b" - integrity sha512-RIYA36cJn2WiH9Hy77hdF9r7oEwxAtB/TS9/S4Qd90Ap4z5FSiin5zEiTL44OII1Y3IIlEvxwxFUVgrHSZ/UpA== +ts-api-utils@^1.0.1, ts-api-utils@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.3.0.tgz#4b490e27129f1e8e686b45cc4ab63714dc60eea1" + integrity sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ== tsconfig-paths@^3.15.0: version "3.15.0" @@ -4659,9 +4666,9 @@ typed-array-length@^1.0.6: possible-typed-array-names "^1.0.0" typescript@^5: - version "5.3.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.3.tgz#b3ce6ba258e72e6305ba66f5c9b452aaee3ffe37" - integrity sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw== + version "5.4.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611" + integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ== unbox-primitive@^1.0.2: version "1.0.2"