diff --git a/.github/workflows/auth-crowdin.yml b/.github/workflows/auth-crowdin.yml index 811def939..bd92f1459 100644 --- a/.github/workflows/auth-crowdin.yml +++ b/.github/workflows/auth-crowdin.yml @@ -30,7 +30,7 @@ jobs: upload_sources: true upload_translations: false download_translations: true - localization_branch_name: crowdin-translations-auth + localization_branch_name: translations/auth create_pull_request: true skip_untranslated_strings: true pull_request_title: "[auth] New translations" diff --git a/.github/workflows/auth-lint.yml b/.github/workflows/auth-lint.yml index 6504e0646..e7c42e1a6 100644 --- a/.github/workflows/auth-lint.yml +++ b/.github/workflows/auth-lint.yml @@ -3,7 +3,7 @@ name: "Lint (auth)" on: # Run on every push to a branch other than main that changes auth/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "auth/**" - ".github/workflows/auth-lint.yml" diff --git a/.github/workflows/desktop-lint.yml b/.github/workflows/desktop-lint.yml index 0b8263f3d..d1cfda884 100644 --- a/.github/workflows/desktop-lint.yml +++ b/.github/workflows/desktop-lint.yml @@ -3,7 +3,7 @@ name: "Lint (desktop)" on: # Run on every push to a branch other than main that changes desktop/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "desktop/**" - ".github/workflows/desktop-lint.yml" diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml index 01b0c2254..b824fe5c3 100644 --- a/.github/workflows/docs-deploy.yml +++ b/.github/workflows/docs-deploy.yml @@ -37,11 +37,8 @@ jobs: run: yarn build - name: Publish - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: help - directory: docs/docs/.vitepress/dist - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=help docs/docs/.vitepress/dist diff --git a/.github/workflows/docs-verify-build.yml b/.github/workflows/docs-verify-build.yml index a57f71c86..addb52a05 100644 --- a/.github/workflows/docs-verify-build.yml +++ b/.github/workflows/docs-verify-build.yml @@ -6,7 +6,7 @@ name: "Verify build (docs)" on: # Run on every push to a branch other than main that changes docs/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "docs/**" - ".github/workflows/docs-verify-build.yml" diff --git a/.github/workflows/mobile-crowdin.yml b/.github/workflows/mobile-crowdin.yml index 5c52b59ad..556ac45f2 100644 --- a/.github/workflows/mobile-crowdin.yml +++ b/.github/workflows/mobile-crowdin.yml @@ -30,7 +30,7 @@ jobs: upload_sources: true upload_translations: false download_translations: true - localization_branch_name: crowdin-translations-mobile + localization_branch_name: translations/mobile create_pull_request: true skip_untranslated_strings: true pull_request_title: "[mobile] New translations" diff --git a/.github/workflows/mobile-internal-release.yml b/.github/workflows/mobile-internal-release.yml index 4ee736742..4b7d537ef 100644 --- a/.github/workflows/mobile-internal-release.yml +++ b/.github/workflows/mobile-internal-release.yml @@ -1,4 +1,4 @@ -name: "Internal Release - Photos" +name: "Internal release (photos)" on: workflow_dispatch: # Allow manually running the action diff --git a/.github/workflows/mobile-lint.yml b/.github/workflows/mobile-lint.yml index 57b2ca4db..493185b6b 100644 --- a/.github/workflows/mobile-lint.yml +++ b/.github/workflows/mobile-lint.yml @@ -3,7 +3,7 @@ name: "Lint (mobile)" on: # Run on every push to a branch other than main that changes mobile/ push: - branches-ignore: [main, f-droid, "deploy/**"] + branches-ignore: [main, f-droid] paths: - "mobile/**" - ".github/workflows/mobile-lint.yml" diff --git a/.github/workflows/server-lint.yml b/.github/workflows/server-lint.yml index d25f2adcc..c051d0290 100644 --- a/.github/workflows/server-lint.yml +++ b/.github/workflows/server-lint.yml @@ -3,7 +3,7 @@ name: "Lint (server)" on: # Run on every push to a branch other than main that changes server/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "server/**" - ".github/workflows/server-lint.yml" diff --git a/.github/workflows/server-publish.yml b/.github/workflows/server-publish.yml index 1ba193517..b5aabbb8a 100644 --- a/.github/workflows/server-publish.yml +++ b/.github/workflows/server-publish.yml @@ -38,3 +38,8 @@ jobs: tags: ${{ inputs.commit }}, latest username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + + - name: Tag as server/ghcr + run: | + git tag -f server/ghcr + git push -f origin server/ghcr diff --git a/.github/workflows/web-crowdin-push.yml b/.github/workflows/web-crowdin-push.yml new file mode 100644 index 000000000..1d525dfe0 --- /dev/null +++ b/.github/workflows/web-crowdin-push.yml @@ -0,0 +1,34 @@ +name: "Push Crowdin translations (web)" + +# This is a variant of web-crowdin.yml that uploads the translated strings in +# addition to the source strings. +# +# This allows us to change the strings in our source code for an automated +# refactoring (e.g. renaming a key), and then run this workflow to update the +# data in Crowdin taking our source code as the source of truth. + +on: + # Trigger manually, or using + # `gh workflow run web-crowdin-push.yml --ref ` + workflow_dispatch: + +jobs: + push-to-crowdin: + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Crowdin push + uses: crowdin/github-action@v1 + with: + base_path: "web/" + config: "web/crowdin.yml" + upload_sources: true + upload_translations: true + download_translations: false + project_id: 569613 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }} diff --git a/.github/workflows/web-crowdin.yml b/.github/workflows/web-crowdin.yml index d98685065..b20b19ce3 100644 --- a/.github/workflows/web-crowdin.yml +++ b/.github/workflows/web-crowdin.yml @@ -36,7 +36,7 @@ jobs: upload_sources: true upload_translations: false download_translations: true - localization_branch_name: crowdin-translations-web + localization_branch_name: translations/web create_pull_request: true skip_untranslated_strings: true pull_request_title: "[web] New translations" diff --git a/.github/workflows/web-deploy-accounts.yml b/.github/workflows/web-deploy-accounts.yml deleted file mode 100644 index 61411cac6..000000000 --- a/.github/workflows/web-deploy-accounts.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (accounts)" - -on: - push: - # Run workflow on pushes to the deploy/accounts - branches: [deploy/accounts] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build accounts - run: yarn build:accounts - - - name: Publish accounts - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/accounts - directory: web/apps/accounts/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-auth.yml b/.github/workflows/web-deploy-auth.yml deleted file mode 100644 index d195b62f8..000000000 --- a/.github/workflows/web-deploy-auth.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (auth)" - -on: - push: - # Run workflow on pushes to the deploy/auth - branches: [deploy/auth] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build auth - run: yarn build:auth - - - name: Publish auth - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/auth - directory: web/apps/auth/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-cast.yml b/.github/workflows/web-deploy-cast.yml deleted file mode 100644 index c5bbca954..000000000 --- a/.github/workflows/web-deploy-cast.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (cast)" - -on: - push: - # Run workflow on pushes to the deploy/cast - branches: [deploy/cast] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build cast - run: yarn build:cast - - - name: Publish cast - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/cast - directory: web/apps/cast/out - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-one.yml b/.github/workflows/web-deploy-one.yml new file mode 100644 index 000000000..77c338513 --- /dev/null +++ b/.github/workflows/web-deploy-one.yml @@ -0,0 +1,61 @@ +name: "Deploy one (web)" + +on: + workflow_dispatch: + inputs: + app: + description: "App to build and deploy" + type: choice + required: true + default: "photos" + options: + - "accounts" + - "auth" + - "cast" + - "payments" + - "photos" + +jobs: + deploy: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: web + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "web/yarn.lock" + + - name: Install dependencies + run: yarn install + + - name: Build ${{ inputs.app }} + run: yarn build:${{ inputs.app }} + + - name: Publish ${{ inputs.app }} to preview + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + # [Note: Wrangler commit-dirty] + # + # Without the --commit-dirty flag, running the wrangler-action + # always prints a warning when used: + # + # Warning: Your working directory is a git repo and has uncommitted changes + # To silence this warning, pass in --commit-dirty=true + # + # There is no clear documentation of if passing this is + # harmless, but all indications and in-practice tests seem to + # indicate so. + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/${{ inputs.app }} web/apps/${{ inputs.app }}/out diff --git a/.github/workflows/web-deploy-payments.yml b/.github/workflows/web-deploy-payments.yml deleted file mode 100644 index 367e1db18..000000000 --- a/.github/workflows/web-deploy-payments.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (payments)" - -on: - push: - # Run workflow on pushes to the deploy/payments - branches: [deploy/payments] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build payments - run: yarn build:payments - - - name: Publish payments - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/payments - directory: web/apps/payments/dist - wranglerVersion: "3" diff --git a/.github/workflows/web-deploy-photos.yml b/.github/workflows/web-deploy-photos.yml deleted file mode 100644 index cb3a9db86..000000000 --- a/.github/workflows/web-deploy-photos.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: "Deploy (photos)" - -on: - push: - # Run workflow on pushes to the deploy/photos - branches: [deploy/photos] - -jobs: - deploy: - runs-on: ubuntu-latest - - defaults: - run: - working-directory: web - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Setup node and enable yarn caching - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: "yarn" - cache-dependency-path: "web/yarn.lock" - - - name: Install dependencies - run: yarn install - - - name: Build photos - run: yarn build:photos - - - name: Publish photos - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/photos - directory: web/apps/photos/out - wranglerVersion: "3" diff --git a/.github/workflows/web-preview.yml b/.github/workflows/web-deploy-preview.yml similarity index 84% rename from .github/workflows/web-preview.yml rename to .github/workflows/web-deploy-preview.yml index 8f39c0247..4bb187072 100644 --- a/.github/workflows/web-preview.yml +++ b/.github/workflows/web-deploy-preview.yml @@ -1,4 +1,4 @@ -name: "Preview (web)" +name: "Deploy preview (web)" on: workflow_dispatch: @@ -43,11 +43,8 @@ jobs: run: yarn build:${{ inputs.app }} - name: Publish ${{ inputs.app }} to preview - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: preview - directory: web/apps/${{ inputs.app }}/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=preview web/apps/${{ inputs.app }}/out diff --git a/.github/workflows/web-deploy-staff.yml b/.github/workflows/web-deploy-staff.yml index 4d386344d..854e16364 100644 --- a/.github/workflows/web-deploy-staff.yml +++ b/.github/workflows/web-deploy-staff.yml @@ -38,11 +38,8 @@ jobs: run: yarn build:staff - name: Publish staff - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: deploy/staff - directory: web/apps/staff/dist - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/staff web/apps/staff/dist diff --git a/.github/workflows/web-deploy-staging.yml b/.github/workflows/web-deploy-staging.yml new file mode 100644 index 000000000..ca3a6142b --- /dev/null +++ b/.github/workflows/web-deploy-staging.yml @@ -0,0 +1,86 @@ +name: "Deploy staging (web)" + +on: + schedule: + # Run everyday at ~3:00 PM IST + # + # See: [Note: Run workflow every 24 hours] + - cron: "25 9 * * *" + # Also allow manually running the workflow + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + + defaults: + run: + working-directory: web + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup node and enable yarn caching + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "yarn" + cache-dependency-path: "web/yarn.lock" + + - name: Install dependencies + run: yarn install + + - name: Build photos + run: yarn build:photos + env: + NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh + + - name: Publish photos + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-photos web/apps/photos/out + + - name: Build accounts + run: yarn build:accounts + + - name: Publish accounts + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-accounts web/apps/accounts/out + + - name: Build auth + run: yarn build:auth + + - name: Publish auth + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-auth web/apps/auth/out + + - name: Build cast + run: yarn build:cast + + - name: Publish cast + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-cast web/apps/cast/out + + - name: Build payments + run: yarn build:payments + + - name: Publish payments + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=n-payments web/apps/payments/dist diff --git a/.github/workflows/web-nightly.yml b/.github/workflows/web-deploy.yml similarity index 60% rename from .github/workflows/web-nightly.yml rename to .github/workflows/web-deploy.yml index 949738292..6f6a113f2 100644 --- a/.github/workflows/web-nightly.yml +++ b/.github/workflows/web-deploy.yml @@ -1,17 +1,21 @@ -name: "Nightly (web)" +name: "Deploy (web)" on: schedule: # [Note: Run workflow every 24 hours] # - # Run every 24 hours - First field is minute, second is hour of the day - # This runs 23:15 UTC everyday - 1 and 15 are just arbitrary offset to - # avoid scheduling it on the exact hour, as suggested by GitHub. + # Run everyday at ~8:00 AM IST (except Sundays). + # + # First field is minute, second is hour of the day. Last is day of week, + # 0 being Sunday. + # + # Add a few minutes of offset to avoid scheduling on exact hourly + # boundaries (recommended by GitHub to avoid congestion). # # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule # https://crontab.guru/ # - - cron: "15 23 * * *" + - cron: "25 2 * * 1-6" # Also allow manually running the workflow workflow_dispatch: @@ -39,69 +43,52 @@ jobs: - name: Install dependencies run: yarn install + - name: Build photos + run: yarn build:photos + + - name: Publish photos + uses: cloudflare/wrangler-action@v3 + with: + accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/photos web/apps/photos/out + - name: Build accounts run: yarn build:accounts - name: Publish accounts - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-accounts - directory: web/apps/accounts/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/accounts web/apps/accounts/out - name: Build auth run: yarn build:auth - name: Publish auth - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-auth - directory: web/apps/auth/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/auth web/apps/auth/out - name: Build cast run: yarn build:cast - name: Publish cast - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-cast - directory: web/apps/cast/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/cast web/apps/cast/out - name: Build payments run: yarn build:payments - name: Publish payments - uses: cloudflare/pages-action@1 + uses: cloudflare/wrangler-action@v3 with: accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-payments - directory: web/apps/payments/dist - wranglerVersion: "3" - - - name: Build photos - run: yarn build:photos - env: - NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh - - - name: Publish photos - uses: cloudflare/pages-action@1 - with: - accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} - projectName: ente - branch: n-photos - directory: web/apps/photos/out - wranglerVersion: "3" + command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/payments web/apps/payments/dist diff --git a/.github/workflows/web-lint.yml b/.github/workflows/web-lint.yml index 0dc11aa0e..7f5d27002 100644 --- a/.github/workflows/web-lint.yml +++ b/.github/workflows/web-lint.yml @@ -3,7 +3,7 @@ name: "Lint (web)" on: # Run on every push to a branch other than main that changes web/ push: - branches-ignore: [main, "deploy/**"] + branches-ignore: [main] paths: - "web/**" - ".github/workflows/web-lint.yml" diff --git a/auth/assets/simple-icons b/auth/assets/simple-icons index 8e7701d6a..8a3731352 160000 --- a/auth/assets/simple-icons +++ b/auth/assets/simple-icons @@ -1 +1 @@ -Subproject commit 8e7701d6a40462733043f54b3849faf35af70a83 +Subproject commit 8a3731352af133a02223a6c7b1f37c4abb096af0 diff --git a/auth/ios/Podfile.lock b/auth/ios/Podfile.lock index 7d02d123b..991f52b42 100644 --- a/auth/ios/Podfile.lock +++ b/auth/ios/Podfile.lock @@ -87,7 +87,7 @@ PODS: - SDWebImage/Core (5.19.0) - Sentry/HybridSDK (8.21.0): - SentryPrivate (= 8.21.0) - - sentry_flutter (0.0.1): + - sentry_flutter (7.19.0): - Flutter - FlutterMacOS - Sentry/HybridSDK (= 8.21.0) @@ -249,7 +249,7 @@ SPEC CHECKSUMS: ReachabilitySwift: 5ae15e16814b5f9ef568963fb2c87aeb49158c66 SDWebImage: 981fd7e860af070920f249fd092420006014c3eb Sentry: ebc12276bd17613a114ab359074096b6b3725203 - sentry_flutter: dff1df05dc39c83d04f9330b36360fc374574c5e + sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5 SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5 shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695 @@ -263,4 +263,4 @@ SPEC CHECKSUMS: PODFILE CHECKSUM: b4e3a7eabb03395b66e81fc061789f61526ee6bb -COCOAPODS: 1.14.3 +COCOAPODS: 1.15.2 diff --git a/auth/lib/l10n/arb/app_ar.arb b/auth/lib/l10n/arb/app_ar.arb index 68bd38900..f9d37c7ba 100644 --- a/auth/lib/l10n/arb/app_ar.arb +++ b/auth/lib/l10n/arb/app_ar.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "المصدِّر", "codeSecretKeyHint": "الرمز السري", "codeAccountHint": "الحساب (you@domain.com)", - "accountKeyType": "نوع المفتاح", "sessionExpired": "انتهت صلاحية الجلسة", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_de.arb b/auth/lib/l10n/arb/app_de.arb index be769ecd5..0c4d29eaf 100644 --- a/auth/lib/l10n/arb/app_de.arb +++ b/auth/lib/l10n/arb/app_de.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Aussteller", "codeSecretKeyHint": "Geheimer Schlüssel", "codeAccountHint": "Konto (you@domain.com)", - "accountKeyType": "Art des Schlüssels", "sessionExpired": "Sitzung abgelaufen", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_es.arb b/auth/lib/l10n/arb/app_es.arb index 41113f0b9..f0c8971a0 100644 --- a/auth/lib/l10n/arb/app_es.arb +++ b/auth/lib/l10n/arb/app_es.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Emisor", "codeSecretKeyHint": "Llave Secreta", "codeAccountHint": "Cuenta (tu@dominio.com)", - "accountKeyType": "Tipo de llave", "sessionExpired": "La sesión ha expirado", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -113,6 +112,7 @@ "copied": "Copiado", "pleaseTryAgain": "Por favor, inténtalo nuevamente", "existingUser": "Usuario existente", + "newUser": "Nuevo a Ente", "delete": "Borrar", "enterYourPasswordHint": "Ingrese su contraseña", "forgotPassword": "Olvidé mi contraseña", @@ -138,6 +138,8 @@ "enterCodeHint": "Ingrese el código de seis dígitos de su aplicación de autenticación", "lostDeviceTitle": "¿Perdió su dispositivo?", "twoFactorAuthTitle": "Autenticación de dos factores", + "passkeyAuthTitle": "Verificación de llave de acceso", + "verifyPasskey": "Verificar llave de acceso", "recoverAccount": "Recuperar cuenta", "enterRecoveryKeyHint": "Introduzca su clave de recuperación", "recover": "Recuperar", @@ -191,6 +193,8 @@ "recoveryKeySaveDescription": "Nosotros no almacenamos esta clave, por favor guarde dicha clave de 24 palabras en un lugar seguro.", "doThisLater": "Hacer esto más tarde", "saveKey": "Guardar Clave", + "save": "Guardar", + "send": "Enviar", "back": "Atrás", "createAccount": "Crear cuenta", "passwordStrength": "Fortaleza de la contraseña: {passwordStrengthValue}", @@ -397,5 +401,8 @@ "signOutOtherDevices": "Cerrar la sesión de otros dispositivos", "doNotSignOut": "No cerrar la sesión", "hearUsWhereTitle": "¿Cómo conoció Ente? (opcional)", - "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!" + "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!", + "passkey": "Llave de acceso", + "developerSettingsWarning": "¿Estás seguro de que quieres modificar los ajustes de desarrollador?", + "developerSettings": "Ajustes de desarrollador" } \ No newline at end of file diff --git a/auth/lib/l10n/arb/app_fa.arb b/auth/lib/l10n/arb/app_fa.arb index 0cba193a9..948aa8b22 100644 --- a/auth/lib/l10n/arb/app_fa.arb +++ b/auth/lib/l10n/arb/app_fa.arb @@ -14,7 +14,6 @@ "codeIssuerHint": "صادر کننده", "codeSecretKeyHint": "کلید مخفی", "codeAccountHint": "حساب (you@domain.com)", - "accountKeyType": "نوع کلید", "sessionExpired": "نشست منقضی شده است", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_fi.arb b/auth/lib/l10n/arb/app_fi.arb index 72309b331..2a0404147 100644 --- a/auth/lib/l10n/arb/app_fi.arb +++ b/auth/lib/l10n/arb/app_fi.arb @@ -12,7 +12,6 @@ "codeIssuerHint": "Myöntäjä", "codeSecretKeyHint": "Salainen avain", "codeAccountHint": "Tili (sinun@jokinosoite.com)", - "accountKeyType": "Avaimen tyyppi", "sessionExpired": "Istunto on vanheutunut", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_fr.arb b/auth/lib/l10n/arb/app_fr.arb index 04a7058c7..71ddc0b31 100644 --- a/auth/lib/l10n/arb/app_fr.arb +++ b/auth/lib/l10n/arb/app_fr.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Émetteur", "codeSecretKeyHint": "Clé secrète", "codeAccountHint": "Compte (vous@exemple.com)", - "accountKeyType": "Type de clé", "sessionExpired": "Session expirée", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_he.arb b/auth/lib/l10n/arb/app_he.arb index 330585097..8f22e1e82 100644 --- a/auth/lib/l10n/arb/app_he.arb +++ b/auth/lib/l10n/arb/app_he.arb @@ -19,7 +19,6 @@ "codeIssuerHint": "מנפיק", "codeSecretKeyHint": "מפתח סודי", "codeAccountHint": "חשבון(you@domain.com)", - "accountKeyType": "סוג מפתח", "sessionExpired": "זמן החיבור הסתיים", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_it.arb b/auth/lib/l10n/arb/app_it.arb index e35fd11dc..92543ed82 100644 --- a/auth/lib/l10n/arb/app_it.arb +++ b/auth/lib/l10n/arb/app_it.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Emittente", "codeSecretKeyHint": "Codice segreto", "codeAccountHint": "Account (username@dominio.it)", - "accountKeyType": "Tipo di chiave", "sessionExpired": "Sessione scaduta", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ja.arb b/auth/lib/l10n/arb/app_ja.arb index 60d0a5150..8fea34c5e 100644 --- a/auth/lib/l10n/arb/app_ja.arb +++ b/auth/lib/l10n/arb/app_ja.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "発行者", "codeSecretKeyHint": "秘密鍵", "codeAccountHint": "アカウント (you@domain.com)", - "accountKeyType": "鍵の種類", "sessionExpired": "セッションが失効しました", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ka.arb b/auth/lib/l10n/arb/app_ka.arb index cb7dc8281..93631df2d 100644 --- a/auth/lib/l10n/arb/app_ka.arb +++ b/auth/lib/l10n/arb/app_ka.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "მომწოდებელი", "codeSecretKeyHint": "გასაღები", "codeAccountHint": "ანგარიში (you@domain.com)", - "accountKeyType": "გასაღების ტიპი", "sessionExpired": "სესიის დრო ამოიწურა", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_nl.arb b/auth/lib/l10n/arb/app_nl.arb index 2e84ae11b..36280f69d 100644 --- a/auth/lib/l10n/arb/app_nl.arb +++ b/auth/lib/l10n/arb/app_nl.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Uitgever", "codeSecretKeyHint": "Geheime sleutel", "codeAccountHint": "Account (jij@domein.nl)", - "accountKeyType": "Type sleutel", "sessionExpired": "Sessie verlopen", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_pl.arb b/auth/lib/l10n/arb/app_pl.arb index 8ebc935dc..796623def 100644 --- a/auth/lib/l10n/arb/app_pl.arb +++ b/auth/lib/l10n/arb/app_pl.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Wydawca", "codeSecretKeyHint": "Tajny klucz", "codeAccountHint": "Konto (ty@domena.com)", - "accountKeyType": "Rodzaj klucza", "sessionExpired": "Sesja wygasła", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" @@ -186,6 +185,8 @@ "recoveryKeySaveDescription": "Nie przechowujemy tego klucza, proszę zachować ten 24 wyrazowy klucz w bezpiecznym miejscu.", "doThisLater": "Zrób To Później", "saveKey": "Zapisz klucz", + "save": "Zapisz", + "send": "Wyślij", "back": "Wstecz", "createAccount": "Utwórz konto", "passwordStrength": "Siła hasła: {passwordStrengthValue}", @@ -336,6 +337,10 @@ "@androidBiometricNotRecognized": { "description": "Message to let the user know that authentication was failed. It is used on Android side. Maximum 60 characters." }, + "androidCancelButton": "Anuluj", + "@androidCancelButton": { + "description": "Message showed on a button that the user can click to leave the current dialog. It is used on Android side. Maximum 30 characters." + }, "androidSignInTitle": "Wymagana autoryzacja", "@androidSignInTitle": { "description": "Message showed as a title in a dialog which indicates the user that they need to scan biometric to continue. It is used on Android side. Maximum 60 characters." diff --git a/auth/lib/l10n/arb/app_pt.arb b/auth/lib/l10n/arb/app_pt.arb index b27a018fb..9b1f5b1b0 100644 --- a/auth/lib/l10n/arb/app_pt.arb +++ b/auth/lib/l10n/arb/app_pt.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Emissor", "codeSecretKeyHint": "Chave secreta", "codeAccountHint": "Conta (voce@dominio.com)", - "accountKeyType": "Tipo de chave", "sessionExpired": "Sessão expirada", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ru.arb b/auth/lib/l10n/arb/app_ru.arb index 7ae37a87b..ca98611ee 100644 --- a/auth/lib/l10n/arb/app_ru.arb +++ b/auth/lib/l10n/arb/app_ru.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Эмитент", "codeSecretKeyHint": "Секретный ключ", "codeAccountHint": "Аккаунт (you@domain.com)", - "accountKeyType": "Тип ключа", "sessionExpired": "Сеанс истек", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_sv.arb b/auth/lib/l10n/arb/app_sv.arb index cfb41d7bd..9761325ce 100644 --- a/auth/lib/l10n/arb/app_sv.arb +++ b/auth/lib/l10n/arb/app_sv.arb @@ -16,7 +16,6 @@ "codeIssuerHint": "Utfärdare", "codeSecretKeyHint": "Secret Key", "codeAccountHint": "Konto (du@domän.com)", - "accountKeyType": "Typ av nyckel", "sessionExpired": "Sessionen har gått ut", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_ti.arb b/auth/lib/l10n/arb/app_ti.arb index 27147ebb6..b41128f6e 100644 --- a/auth/lib/l10n/arb/app_ti.arb +++ b/auth/lib/l10n/arb/app_ti.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "ኣዋጂ", "codeSecretKeyHint": "ምስጢራዊ መፍትሕ", "codeAccountHint": "ሕሳብ (you@domain.com)", - "accountKeyType": "ዓይነት መፍትሕ", "sessionExpired": "ክፍለ ግዜኡ ኣኺሉ።", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_tr.arb b/auth/lib/l10n/arb/app_tr.arb index 9b847faf0..322af5f48 100644 --- a/auth/lib/l10n/arb/app_tr.arb +++ b/auth/lib/l10n/arb/app_tr.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Yayınlayan", "codeSecretKeyHint": "Gizli Anahtar", "codeAccountHint": "Hesap (ornek@domain.com)", - "accountKeyType": "Anahtar türü", "sessionExpired": "Oturum süresi doldu", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_vi.arb b/auth/lib/l10n/arb/app_vi.arb index e318f9b55..a8cccdbec 100644 --- a/auth/lib/l10n/arb/app_vi.arb +++ b/auth/lib/l10n/arb/app_vi.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "Nhà phát hành", "codeSecretKeyHint": "Khóa bí mật", "codeAccountHint": "Tài khoản (bạn@miền.com)", - "accountKeyType": "Loại khóa", "sessionExpired": "Phiên làm việc đã hết hạn", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/auth/lib/l10n/arb/app_zh.arb b/auth/lib/l10n/arb/app_zh.arb index 077ee26fd..c50e76c1d 100644 --- a/auth/lib/l10n/arb/app_zh.arb +++ b/auth/lib/l10n/arb/app_zh.arb @@ -20,7 +20,6 @@ "codeIssuerHint": "发行人", "codeSecretKeyHint": "私钥", "codeAccountHint": "账户 (you@domain.com)", - "accountKeyType": "密钥类型", "sessionExpired": "会话已过期", "@sessionExpired": { "description": "Title of the dialog when the users current session is invalid/expired" diff --git a/cli/cmd/account.go b/cli/cmd/account.go index a4c78fb10..4bc48dcf3 100644 --- a/cli/cmd/account.go +++ b/cli/cmd/account.go @@ -27,7 +27,8 @@ var listAccCmd = &cobra.Command{ // Subcommand for 'account add' var addAccCmd = &cobra.Command{ Use: "add", - Short: "Add a new account", + Short: "login into existing account", + Long: "Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app", Run: func(cmd *cobra.Command, args []string) { recoverWithLog() ctrl.AddAccount(context.Background()) diff --git a/cli/docs/generated/ente.md b/cli/docs/generated/ente.md index b9d3cde17..4f85dd098 100644 --- a/cli/docs/generated/ente.md +++ b/cli/docs/generated/ente.md @@ -25,4 +25,4 @@ ente [flags] * [ente export](ente_export.md) - Starts the export process * [ente version](ente_version.md) - Prints the current version -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account.md b/cli/docs/generated/ente_account.md index c48a65336..41c37b054 100644 --- a/cli/docs/generated/ente_account.md +++ b/cli/docs/generated/ente_account.md @@ -11,9 +11,9 @@ Manage account settings ### SEE ALSO * [ente](ente.md) - CLI tool for exporting your photos from ente.io -* [ente account add](ente_account_add.md) - Add a new account +* [ente account add](ente_account_add.md) - login into existing account * [ente account get-token](ente_account_get-token.md) - Get token for an account for a specific app * [ente account list](ente_account_list.md) - list configured accounts * [ente account update](ente_account_update.md) - Update an existing account's export directory -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_add.md b/cli/docs/generated/ente_account_add.md index 1904ca370..1e86ae12f 100644 --- a/cli/docs/generated/ente_account_add.md +++ b/cli/docs/generated/ente_account_add.md @@ -1,6 +1,10 @@ ## ente account add -Add a new account +login into existing account + +### Synopsis + +Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app ``` ente account add [flags] @@ -16,4 +20,4 @@ ente account add [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_get-token.md b/cli/docs/generated/ente_account_get-token.md index d7ee77255..3d8814d7d 100644 --- a/cli/docs/generated/ente_account_get-token.md +++ b/cli/docs/generated/ente_account_get-token.md @@ -18,4 +18,4 @@ ente account get-token [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_list.md b/cli/docs/generated/ente_account_list.md index cfc59bb8d..a7677eb85 100644 --- a/cli/docs/generated/ente_account_list.md +++ b/cli/docs/generated/ente_account_list.md @@ -16,4 +16,4 @@ ente account list [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_account_update.md b/cli/docs/generated/ente_account_update.md index acb65412a..8d9c8d7e5 100644 --- a/cli/docs/generated/ente_account_update.md +++ b/cli/docs/generated/ente_account_update.md @@ -19,4 +19,4 @@ ente account update [flags] * [ente account](ente_account.md) - Manage account settings -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin.md b/cli/docs/generated/ente_admin.md index aafe51b39..5ac72489d 100644 --- a/cli/docs/generated/ente_admin.md +++ b/cli/docs/generated/ente_admin.md @@ -21,4 +21,4 @@ Commands for admin actions like disable or enabling 2fa, bumping up the storage * [ente admin list-users](ente_admin_list-users.md) - List all users * [ente admin update-subscription](ente_admin_update-subscription.md) - Update subscription for user -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_delete-user.md b/cli/docs/generated/ente_admin_delete-user.md index 56c96841e..a1d52a73d 100644 --- a/cli/docs/generated/ente_admin_delete-user.md +++ b/cli/docs/generated/ente_admin_delete-user.md @@ -18,4 +18,4 @@ ente admin delete-user [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_disable-2fa.md b/cli/docs/generated/ente_admin_disable-2fa.md index 333f0912e..23cd33080 100644 --- a/cli/docs/generated/ente_admin_disable-2fa.md +++ b/cli/docs/generated/ente_admin_disable-2fa.md @@ -18,4 +18,4 @@ ente admin disable-2fa [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_get-user-id.md b/cli/docs/generated/ente_admin_get-user-id.md index 3d26f624a..47d632abb 100644 --- a/cli/docs/generated/ente_admin_get-user-id.md +++ b/cli/docs/generated/ente_admin_get-user-id.md @@ -18,4 +18,4 @@ ente admin get-user-id [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_list-users.md b/cli/docs/generated/ente_admin_list-users.md index 8841df57b..635e8ec3c 100644 --- a/cli/docs/generated/ente_admin_list-users.md +++ b/cli/docs/generated/ente_admin_list-users.md @@ -17,4 +17,4 @@ ente admin list-users [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_admin_update-subscription.md b/cli/docs/generated/ente_admin_update-subscription.md index cc1fa9623..d0fadcd2b 100644 --- a/cli/docs/generated/ente_admin_update-subscription.md +++ b/cli/docs/generated/ente_admin_update-subscription.md @@ -23,4 +23,4 @@ ente admin update-subscription [flags] * [ente admin](ente_admin.md) - Commands for admin actions -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_auth.md b/cli/docs/generated/ente_auth.md index 5770f36f3..e0e97d84f 100644 --- a/cli/docs/generated/ente_auth.md +++ b/cli/docs/generated/ente_auth.md @@ -13,4 +13,4 @@ Authenticator commands * [ente](ente.md) - CLI tool for exporting your photos from ente.io * [ente auth decrypt](ente_auth_decrypt.md) - Decrypt authenticator export -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_auth_decrypt.md b/cli/docs/generated/ente_auth_decrypt.md index e573db2a3..c9db6ea54 100644 --- a/cli/docs/generated/ente_auth_decrypt.md +++ b/cli/docs/generated/ente_auth_decrypt.md @@ -16,4 +16,4 @@ ente auth decrypt [input] [output] [flags] * [ente auth](ente_auth.md) - Authenticator commands -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_export.md b/cli/docs/generated/ente_export.md index c5783236c..d809e06e4 100644 --- a/cli/docs/generated/ente_export.md +++ b/cli/docs/generated/ente_export.md @@ -16,4 +16,4 @@ ente export [flags] * [ente](ente.md) - CLI tool for exporting your photos from ente.io -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/docs/generated/ente_version.md b/cli/docs/generated/ente_version.md index b51055697..08f384b52 100644 --- a/cli/docs/generated/ente_version.md +++ b/cli/docs/generated/ente_version.md @@ -16,4 +16,4 @@ ente version [flags] * [ente](ente.md) - CLI tool for exporting your photos from ente.io -###### Auto generated by spf13/cobra on 14-Mar-2024 +###### Auto generated by spf13/cobra on 6-May-2024 diff --git a/cli/pkg/account.go b/cli/pkg/account.go index 9363e2f80..e411ffacd 100644 --- a/cli/pkg/account.go +++ b/cli/pkg/account.go @@ -59,7 +59,7 @@ func (c *ClICtrl) AddAccount(cxt context.Context) { authResponse, flowErr = c.validateTOTP(cxt, authResponse) } if authResponse.EncryptedToken == "" || authResponse.KeyAttributes == nil { - panic("no encrypted token or keyAttributes") + log.Fatalf("missing key attributes or token.\nNote: Please use the mobile,web or desktop app to create a new account.\nIf you are trying to login to an existing account, report a bug.") } secretInfo, decErr := c.decryptAccSecretInfo(cxt, authResponse, keyEncKey) if decErr != nil { diff --git a/desktop/.github/workflows/desktop-release.yml b/desktop/.github/workflows/desktop-release.yml index 2fa382376..70eedf3ea 100644 --- a/desktop/.github/workflows/desktop-release.yml +++ b/desktop/.github/workflows/desktop-release.yml @@ -1,20 +1,12 @@ name: "Release" -# This will create a new draft release with public artifacts. +# Build the ente-io/ente's desktop/rc branch and create/update a draft release. # -# Note that a release will only get created if there is an associated tag -# (GitHub releases need a corresponding tag). -# -# The canonical source for this action is in the repository where we keep the -# source code for the Ente Photos desktop app: https://github.com/ente-io/ente -# -# However, it actually lives and runs in the repository that we use for making -# releases: https://github.com/ente-io/photos-desktop -# -# We need two repositories because Electron updater currently doesn't work well -# with monorepos. For more details, see `docs/release.md`. +# For more details, see `docs/release.md` in ente-io/ente. on: + # Trigger manually or `gh workflow run desktop-release.yml`. + workflow_dispatch: push: # Run when a tag matching the pattern "v*"" is pushed. # @@ -38,11 +30,9 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - # Checkout the tag photosd-v1.x.x from the source code - # repository when we're invoked for tag v1.x.x on the releases - # repository. + # Checkout the desktop/rc branch from the source repository. repository: ente-io/ente - ref: photosd-${{ github.ref_name }} + ref: desktop/rc submodules: recursive - name: Setup node @@ -50,6 +40,11 @@ jobs: with: node-version: 20 + - name: Increase yarn timeout + # `yarn install` times out sometimes on the Windows runner, + # resulting in flaky builds. + run: yarn config set network-timeout 900000 -g + - name: Install dependencies run: yarn install @@ -63,13 +58,15 @@ jobs: uses: ente-io/action-electron-builder@v1.0.0 with: package_root: desktop + build_script_name: build:ci # GitHub token, automatically provided to the action # (No need to define this secret in the repo settings) github_token: ${{ secrets.GITHUB_TOKEN }} # If the commit is tagged with a version (e.g. "v1.0.0"), - # release the app after building. + # create a (draft) release after building. Otherwise upload + # assets to the existing draft named after the version. release: ${{ startsWith(github.ref, 'refs/tags/v') }} mac_certs: ${{ secrets.MAC_CERTS }} diff --git a/desktop/CHANGELOG.md b/desktop/CHANGELOG.md index eb118a424..5fbbefaaa 100644 --- a/desktop/CHANGELOG.md +++ b/desktop/CHANGELOG.md @@ -2,11 +2,17 @@ ## v1.7.0 (Unreleased) -v1.7 is a major rewrite to improve the security of our app. We have enabled -sandboxing and disabled node integration for the renderer process. All this -required restructuring our IPC mechanisms, which resulted in a lot of under the -hood changes. The outcome is a more secure app that also uses the latest and -greatest Electron recommendations. +v1.7 is a major rewrite to improve the security of our app. In particular, the +UI and the native parts of the app now run isolated from each other and +communicate only using a predefined IPC boundary. + +Other highlights: + +- View your photos on big screens and Chromecast devices by using the "Play + album on TV" option in the album menu. +- Support Brazilian Portuguese, German and Russian. +- Provide a checkbox to select all photos in a day. +- Fix a case where the dedup screen would not refresh after removing items. ## v1.6.63 diff --git a/desktop/docs/release.md b/desktop/docs/release.md index b55c96326..1cda1c11b 100644 --- a/desktop/docs/release.md +++ b/desktop/docs/release.md @@ -1,46 +1,64 @@ ## Releases -Conceptually, the release is straightforward: We push a tag, a GitHub workflow -gets triggered that creates a draft release with artifacts built from that tag. -We then publish that release. The download links on our website, and existing -apps already know how to check for the latest GitHub release and update -accordingly. +Conceptually, the release is straightforward: We trigger a GitHub workflow that +creates a draft release with artifacts built. When ready, we publish that +release. The download links on our website, and existing apps already check the +latest GitHub release and update accordingly. -The complication comes by the fact that Electron Updater (the mechanism that we -use for auto updates) doesn't work well with monorepos. So we need to keep a -separate (non-mono) repository just for doing releases. +The complication comes by the fact that electron-builder's auto updaterr (the +mechanism that we use for auto updates) doesn't work with monorepos. So we need +to keep a separate (non-mono) repository just for doing releases. - Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente). - Releases are done from [ente-io/photos-desktop](https://github.com/ente-io/photos-desktop). -## Workflow +## Workflow - Release Candidates -The workflow is: +Leading up to the release, we can make one or more draft releases that are not +intended to be published, but serve as test release candidates. -1. Finalize the changes in the source repo. +The workflow for making such "rc" builds is: - - Update the CHANGELOG. - - Update the version in `package.json` - - `git commit -m "[photosd] Release v1.2.3"` - - Open PR, merge into main. +1. Update `package.json` in the source repo to use version `1.x.x-rc`. Create a + new draft release in the release repo with title `1.x.x-rc`. In the tag + input enter `v1.x.x-rc` and select the option to "create a new tag on + publish". -2. Tag the merge commit with a tag matching the pattern `photosd-v1.2.3`, where - `1.2.3` is the version in `package.json` +2. Push code to the `desktop/rc` branch in the source repo. + +3. Trigger the GitHub action in the release repo ```sh - git tag photosd-v1.x.x - git push origin photosd-v1.x.x + gh workflow run desktop-release.yml ``` -3. Head over to the releases repository and run the trigger script, passing it - the tag _without_ the `photosd-` prefix. +We can do steps 2 and 3 multiple times: each time it'll just update the +artifacts attached to the same draft. + +## Workflow - Release + +1. Update source repo to set version `1.x.x` in `package.json` and finialize + the CHANGELOG. + +2. Push code to the `desktop/rc` branch in the source repo. + +3. In the release repo ```sh ./.github/trigger-release.sh v1.x.x ``` +4. If the build is successful, tag `desktop/rc` in the source repo. + + ```sh + # Assuming we're on desktop/rc that just got build + + git tag photosd-v1.x.x + git push origin photosd-v1.x.x + ``` + ## Post build The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts diff --git a/desktop/electron-builder.yml b/desktop/electron-builder.yml index f62033fb9..c2c000ce9 100644 --- a/desktop/electron-builder.yml +++ b/desktop/electron-builder.yml @@ -29,5 +29,3 @@ mac: arch: [universal] category: public.app-category.photography hardenedRuntime: true - notarize: true -afterSign: electron-builder-notarize diff --git a/desktop/package.json b/desktop/package.json index 462857a8b..7297a0c17 100644 --- a/desktop/package.json +++ b/desktop/package.json @@ -1,6 +1,6 @@ { "name": "ente", - "version": "1.7.0-beta.0", + "version": "1.7.0-rc", "private": true, "description": "Desktop client for Ente Photos", "repository": "github:ente-io/photos-desktop", @@ -11,6 +11,7 @@ "build-main": "tsc && electron-builder", "build-main:quick": "tsc && electron-builder --dir --config.compression=store --config.mac.identity=null", "build-renderer": "cd ../web && yarn install && yarn build:photos && cd ../desktop && shx rm -f out && shx ln -sf ../web/apps/photos/out out", + "build:ci": "yarn build-renderer && tsc", "build:quick": "yarn build-renderer && yarn build-main:quick", "dev": "concurrently --kill-others --success first --names 'main,rndr' \"yarn dev-main\" \"yarn dev-renderer\"", "dev-main": "tsc && electron app/main.js", @@ -46,7 +47,6 @@ "concurrently": "^8", "electron": "^30", "electron-builder": "25.0.0-alpha.6", - "electron-builder-notarize": "^1.5", "eslint": "^8", "prettier": "^3", "prettier-plugin-organize-imports": "^3", diff --git a/desktop/src/main.ts b/desktop/src/main.ts index 49b316206..9cba9178d 100644 --- a/desktop/src/main.ts +++ b/desktop/src/main.ts @@ -142,7 +142,7 @@ const createMainWindow = () => { // Create the main window. This'll show our web content. const window = new BrowserWindow({ webPreferences: { - preload: path.join(app.getAppPath(), "preload.js"), + preload: path.join(__dirname, "preload.js"), sandbox: true, }, // The color to show in the window until the web content gets loaded. @@ -287,13 +287,29 @@ const setupTrayItem = (mainWindow: BrowserWindow) => { /** * Older versions of our app used to maintain a cache dir using the main - * process. This has been deprecated in favor of using a normal web cache. + * process. This has been removed in favor of cache on the web layer. * - * Delete the old cache dir if it exists. This code was added March 2024, and - * can be removed after some time once most people have upgraded to newer - * versions. + * Delete the old cache dir if it exists. + * + * This will happen in two phases. The cache had three subdirectories: + * + * - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024). + * + * - The third one, "face-crops" will be removed once we finish the face search + * changes. See: [Note: Legacy face crops]. + * + * This migration code can be removed after some time once most people have + * upgraded to newer versions. */ const deleteLegacyDiskCacheDirIfExists = async () => { + const removeIfExists = async (dirPath: string) => { + if (existsSync(dirPath)) { + log.info(`Removing legacy disk cache from ${dirPath}`); + await fs.rm(dirPath, { recursive: true }); + } + }; + // [Note: Getting the cache path] + // // The existing code was passing "cache" as a parameter to getPath. // // However, "cache" is not a valid parameter to getPath. It works! (for @@ -309,8 +325,8 @@ const deleteLegacyDiskCacheDirIfExists = async () => { // @ts-expect-error "cache" works but is not part of the public API. const cacheDir = path.join(app.getPath("cache"), "ente"); if (existsSync(cacheDir)) { - log.info(`Removing legacy disk cache from ${cacheDir}`); - await fs.rm(cacheDir, { recursive: true }); + await removeIfExists(path.join(cacheDir, "thumbs")); + await removeIfExists(path.join(cacheDir, "files")); } }; @@ -375,7 +391,7 @@ const main = () => { // Continue on with the rest of the startup sequence. Menu.setApplicationMenu(await createApplicationMenu(mainWindow)); setupTrayItem(mainWindow); - if (!isDev) setupAutoUpdater(mainWindow); + setupAutoUpdater(mainWindow); try { await deleteLegacyDiskCacheDirIfExists(); diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts index f59969202..5072db29e 100644 --- a/desktop/src/main/ipc.ts +++ b/desktop/src/main/ipc.ts @@ -24,6 +24,7 @@ import { updateOnNextRestart, } from "./services/app-update"; import { + legacyFaceCrop, openDirectory, openLogDirectory, selectDirectory, @@ -68,6 +69,7 @@ import { watchUpdateIgnoredFiles, watchUpdateSyncedFiles, } from "./services/watch"; +import { clearConvertToMP4Results } from "./stream"; /** * Listen for IPC events sent/invoked by the renderer process, and route them to @@ -107,6 +109,8 @@ export const attachIPCHandlers = () => { ipcMain.on("clearStores", () => clearStores()); + ipcMain.on("clearConvertToMP4Results", () => clearConvertToMP4Results()); + ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) => saveEncryptionKey(encryptionKey), ); @@ -170,14 +174,7 @@ export const attachIPCHandlers = () => { command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, - ) => - ffmpegExec( - command, - dataOrPathOrZipItem, - outputFileExtension, - timeoutMS, - ), + ) => ffmpegExec(command, dataOrPathOrZipItem, outputFileExtension), ); // - ML @@ -198,6 +195,10 @@ export const attachIPCHandlers = () => { faceEmbedding(input), ); + ipcMain.handle("legacyFaceCrop", (_, faceID: string) => + legacyFaceCrop(faceID), + ); + // - Upload ipcMain.handle("listZipItems", (_, zipPath: string) => diff --git a/desktop/src/main/log.ts b/desktop/src/main/log.ts index cf1404a90..60870c913 100644 --- a/desktop/src/main/log.ts +++ b/desktop/src/main/log.ts @@ -5,11 +5,8 @@ import { isDev } from "./utils/electron"; /** * Initialize logging in the main process. * - * This will set our underlying logger up to log to a file named `ente.log`, - * - * - on Linux at ~/.config/ente/logs/ente.log - * - on macOS at ~/Library/Logs/ente/ente.log - * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log + * This will set our underlying logger up to log to a file named `ente.log`, see + * [Note: App log path]. * * On dev builds, it will also log to the console. */ @@ -62,7 +59,7 @@ const logError = (message: string, e?: unknown) => { const logError_ = (message: string) => { log.error(`[main] [error] ${message}`); - if (isDev) console.error(`[error] ${message}`); + console.error(`[error] ${message}`); }; const logInfo = (...params: unknown[]) => { @@ -96,8 +93,8 @@ export default { * any arbitrary object that we obtain, say, when in a try-catch handler (in * JavaScript any arbitrary value can be thrown). * - * The log is written to disk. In development builds, the log is also - * printed to the main (Node.js) process console. + * The log is written to disk and printed to the main (Node.js) process's + * console. */ error: logError, /** @@ -120,7 +117,7 @@ export default { * The function can return an arbitrary value which is serialized before * being logged. * - * This log is NOT written to disk. And it is printed to the main (Node.js) + * This log is NOT written to disk. It is printed to the main (Node.js) * process console, but only on development builds. */ debug: logDebug, diff --git a/desktop/src/main/menu.ts b/desktop/src/main/menu.ts index b6fa7acfe..188b195f8 100644 --- a/desktop/src/main/menu.ts +++ b/desktop/src/main/menu.ts @@ -10,7 +10,6 @@ import { forceCheckForAppUpdates } from "./services/app-update"; import autoLauncher from "./services/auto-launcher"; import { openLogDirectory } from "./services/dir"; import { userPreferences } from "./stores/user-preferences"; -import { isDev } from "./utils/electron"; /** Create and return the entries in the app's main menu bar */ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { @@ -24,9 +23,6 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { const macOSOnly = (options: MenuItemConstructorOptions[]) => process.platform == "darwin" ? options : []; - const devOnly = (options: MenuItemConstructorOptions[]) => - isDev ? options : []; - const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow); const handleViewChangelog = () => @@ -86,12 +82,14 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { checked: isAutoLaunchEnabled, click: toggleAutoLaunch, }, - { - label: "Hide Dock Icon", - type: "checkbox", - checked: shouldHideDockIcon, - click: toggleHideDockIcon, - }, + ...macOSOnly([ + { + label: "Hide Dock Icon", + type: "checkbox", + checked: shouldHideDockIcon, + click: toggleHideDockIcon, + }, + ]), ], }, @@ -130,11 +128,11 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { submenu: [ { role: "startSpeaking", - label: "start speaking", + label: "Start Speaking", }, { role: "stopSpeaking", - label: "stop speaking", + label: "Stop Speaking", }, ], }, @@ -145,9 +143,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => { label: "View", submenu: [ { label: "Reload", role: "reload" }, - ...devOnly([ - { label: "Toggle Dev Tools", role: "toggleDevTools" }, - ]), + { label: "Toggle Dev Tools", role: "toggleDevTools" }, { type: "separator" }, { label: "Toggle Full Screen", role: "togglefullscreen" }, ], diff --git a/desktop/src/main/services/app-update.ts b/desktop/src/main/services/app-update.ts index 8d66cb8c3..ed6dc0e18 100644 --- a/desktop/src/main/services/app-update.ts +++ b/desktop/src/main/services/app-update.ts @@ -6,11 +6,90 @@ import { allowWindowClose } from "../../main"; import { AppUpdate } from "../../types/ipc"; import log from "../log"; import { userPreferences } from "../stores/user-preferences"; +import { isDev } from "../utils/electron"; export const setupAutoUpdater = (mainWindow: BrowserWindow) => { autoUpdater.logger = electronLog; autoUpdater.autoDownload = false; + /** + * [Note: Testing auto updates] + * + * By default, we skip checking for updates automatically in dev builds. + * This is because even if installing updates would fail (at least on macOS) + * because auto updates only work for signed builds. + * + * So an end to end testing for updates requires using a temporary GitHub + * repository and signed builds therein. More on this later. + * + * --------------- + * + * [Note: Testing auto updates - Sanity checks] + * + * However, for partial checks of the UI flow, something like the following + * can be used to do a test of the update process (up until the actual + * installation itself). + * + * Create a `app/dev-app-update.yml` with: + * + * provider: generic + * url: http://127.0.0.1:7777/ + * + * and start a local webserver in some directory: + * + * python3 -m http.server 7777 + * + * In this directory, put `latest-mac.yml` and the DMG file that this YAML + * file refers to. + * + * Alternatively, `dev-app-update.yml` can point to some arbitrary GitHub + * repository too, e.g.: + * + * provider: github + * owner: ente-io + * repo: test-desktop-updates + * + * Now we can use the "Check for updates..." menu option to trigger the + * update flow. + */ + autoUpdater.forceDevUpdateConfig = isDev; + if (isDev) return; + + /** + * [Note: Testing auto updates - End to end checks] + * + * Since end-to-end update testing can only be done with signed builds, the + * easiest way is to create temporary builds in a test repository. + * + * Let us say we have v2.0.0 about to go out. We have builds artifacts for + * v2.0.0 also in some draft release in our normal release repository. + * + * Create a new test repository, say `ente-io/test-desktop-updates`. In this + * repository, create a release v2.0.0, attaching the actual build + * artifacts. Make this release the latest. + * + * Now we need to create a old signed build. + * + * First, modify `package.json` to put in a version number older than the + * new version number that we want to test updating to, e.g. `v1.0.0-test`. + * + * Then uncomment the following block of code. This tells the auto updater + * to use `ente-io/test-desktop-updates` to get updates. + * + * With these two changes (older version and setFeedURL), create a new + * release signed build on CI. Install this build - it will check for + * updates in the temporary feed URL that we set, and we'll be able to check + * the full update flow. + */ + + /* + autoUpdater.setFeedURL({ + provider: "github", + owner: "ente-io", + repo: "test-desktop-updates", + }); + */ + const oneDay = 1 * 24 * 60 * 60 * 1000; setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay); void checkForUpdatesAndNotify(mainWindow); @@ -61,17 +140,17 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => { log.debug(() => "Attempting auto update"); await autoUpdater.downloadUpdate(); - let timeoutId: ReturnType; + let timeout: ReturnType; const fiveMinutes = 5 * 60 * 1000; autoUpdater.on("update-downloaded", () => { - timeoutId = setTimeout( + timeout = setTimeout( () => showUpdateDialog({ autoUpdatable: true, version }), fiveMinutes, ); }); autoUpdater.on("error", (error) => { - clearTimeout(timeoutId); + clearTimeout(timeout); log.error("Auto update failed", error); showUpdateDialog({ autoUpdatable: false, version }); }); diff --git a/desktop/src/main/services/auto-launcher.ts b/desktop/src/main/services/auto-launcher.ts index 4e97a0225..0942a4935 100644 --- a/desktop/src/main/services/auto-launcher.ts +++ b/desktop/src/main/services/auto-launcher.ts @@ -27,14 +27,14 @@ class AutoLauncher { } async toggleAutoLaunch() { - const isEnabled = await this.isEnabled(); + const wasEnabled = await this.isEnabled(); const autoLaunch = this.autoLaunch; if (autoLaunch) { - if (isEnabled) await autoLaunch.disable(); + if (wasEnabled) await autoLaunch.disable(); else await autoLaunch.enable(); } else { - if (isEnabled) app.setLoginItemSettings({ openAtLogin: false }); - else app.setLoginItemSettings({ openAtLogin: true }); + const openAtLogin = !wasEnabled; + app.setLoginItemSettings({ openAtLogin }); } } @@ -42,8 +42,7 @@ class AutoLauncher { if (this.autoLaunch) { return app.commandLine.hasSwitch("hidden"); } else { - // TODO(MR): This apparently doesn't work anymore. - return app.getLoginItemSettings().wasOpenedAtLogin; + return app.getLoginItemSettings().openAtLogin; } } } diff --git a/desktop/src/main/services/dir.ts b/desktop/src/main/services/dir.ts index d375648f6..d97cad6fb 100644 --- a/desktop/src/main/services/dir.ts +++ b/desktop/src/main/services/dir.ts @@ -1,5 +1,7 @@ import { shell } from "electron/common"; import { app, dialog } from "electron/main"; +import { existsSync } from "fs"; +import fs from "node:fs/promises"; import path from "node:path"; import { posixPath } from "../utils/electron"; @@ -38,14 +40,54 @@ export const openLogDirectory = () => openDirectory(logDirectoryPath()); * * [Note: Electron app paths] * - * By default, these paths are at the following locations: + * There are three paths we need to be aware of usually. * - * - macOS: `~/Library/Application Support/ente` + * First is the "appData". We can obtain this with `app.getPath("appData")`. + * This is per-user application data directory. This is usually the following: + * + * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local` + * - Linux: `~/.config` + * - macOS: `~/Library/Application Support` + * + * Now, if we suffix the app's name onto the appData directory, we get the + * "userData" directory. This is the **primary** place applications are meant to + * store user's data, e.g. various configuration files and saved state. + * + * During development, our app name is "Electron", so this'd be, for example, + * `~/Library/Application Support/Electron` if we run using `yarn dev`. For the + * packaged production app, our app name is "ente", so this would be: + * + * - Windows: `%APPDATA%\ente`, e.g. `C:\Users\\AppData\Local\ente` * - Linux: `~/.config/ente` - * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local\ente` - * - Windows: C:\Users\\AppData\Local\ + * - macOS: `~/Library/Application Support/ente` + * + * Note that Chromium also stores the browser state, e.g. localStorage or disk + * caches, in userData. * * https://www.electronjs.org/docs/latest/api/app * + * [Note: App log path] + * + * Finally, there is the "logs" directory. This is not within "appData" but has + * a slightly different OS specific path. Since our log file is named + * "ente.log", it can be found at: + * + * - macOS: ~/Library/Logs/ente/ente.log (production) + * - macOS: ~/Library/Logs/Electron/ente.log (dev) + * - Linux: ~/.config/ente/logs/ente.log + * - Windows: %USERPROFILE%\AppData\Roaming\ente\logs\ente.log */ const logDirectoryPath = () => app.getPath("logs"); + +/** + * See: [Note: Legacy face crops] + */ +export const legacyFaceCrop = async ( + faceID: string, +): Promise => { + // See: [Note: Getting the cache path] + // @ts-expect-error "cache" works but is not part of the public API. + const cacheDir = path.join(app.getPath("cache"), "ente"); + const filePath = path.join(cacheDir, "face-crops", faceID); + return existsSync(filePath) ? await fs.readFile(filePath) : undefined; +}; diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts index 0a5c4eed2..4803fd6f0 100644 --- a/desktop/src/main/services/ffmpeg.ts +++ b/desktop/src/main/services/ffmpeg.ts @@ -1,11 +1,10 @@ import pathToFfmpeg from "ffmpeg-static"; import fs from "node:fs/promises"; import type { ZipItem } from "../../types/ipc"; -import log from "../log"; -import { ensure, withTimeout } from "../utils/common"; +import { ensure } from "../utils/common"; import { execAsync } from "../utils/electron"; import { - deleteTempFile, + deleteTempFileIgnoringErrors, makeFileForDataOrPathOrZipItem, makeTempFilePath, } from "../utils/temp"; @@ -46,13 +45,7 @@ export const ffmpegExec = async ( command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ): Promise => { - // TODO (MR): This currently copies files for both input (when - // dataOrPathOrZipItem is data) and output. This needs to be tested - // extremely large video files when invoked downstream of `convertToMP4` in - // the web code. - const { path: inputFilePath, isFileTemporary: isInputFileTemporary, @@ -69,17 +62,13 @@ export const ffmpegExec = async ( outputFilePath, ); - if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000); - else await execAsync(cmd); + await execAsync(cmd); return fs.readFile(outputFilePath); } finally { - try { - if (isInputFileTemporary) await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + if (isInputFileTemporary) + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -112,3 +101,32 @@ const ffmpegBinaryPath = () => { // https://github.com/eugeneware/ffmpeg-static/issues/16 return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked"); }; + +/** + * A variant of {@link ffmpegExec} adapted to work with streams so that it can + * handle the MP4 conversion of large video files. + * + * See: [Note: Convert to MP4] + + * @param inputFilePath The path to a file on the user's local file system. This + * is the video we want to convert. + * @param inputFilePath The path to a file on the user's local file system where + * we should write the converted MP4 video. + */ +export const ffmpegConvertToMP4 = async ( + inputFilePath: string, + outputFilePath: string, +): Promise => { + const command = [ + ffmpegPathPlaceholder, + "-i", + inputPathPlaceholder, + "-preset", + "ultrafast", + outputPathPlaceholder, + ]; + + const cmd = substitutePlaceholders(command, inputFilePath, outputFilePath); + + await execAsync(cmd); +}; diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts index 957fe8120..c07b051a1 100644 --- a/desktop/src/main/services/image.ts +++ b/desktop/src/main/services/image.ts @@ -6,7 +6,7 @@ import { CustomErrorMessage, type ZipItem } from "../../types/ipc"; import log from "../log"; import { execAsync, isDev } from "../utils/electron"; import { - deleteTempFile, + deleteTempFileIgnoringErrors, makeFileForDataOrPathOrZipItem, makeTempFilePath, } from "../utils/temp"; @@ -23,12 +23,8 @@ export const convertToJPEG = async (imageData: Uint8Array) => { await execAsync(command); return new Uint8Array(await fs.readFile(outputFilePath)); } finally { - try { - await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -49,6 +45,9 @@ const convertToJPEGCommand = ( ]; case "linux": + // The bundled binary is an ELF x86-64 executable. + if (process.arch != "x64") + throw new Error(CustomErrorMessage.NotAvailable); return [ imageMagickPath(), inputFilePath, @@ -79,7 +78,7 @@ export const generateImageThumbnail = async ( const outputFilePath = await makeTempFilePath("jpeg"); - // Construct the command first, it may throw `NotAvailable` on win32. + // Construct the command first, it may throw `NotAvailable`. let quality = 70; let command = generateImageThumbnailCommand( inputFilePath, @@ -94,6 +93,9 @@ export const generateImageThumbnail = async ( let thumbnail: Uint8Array; do { await execAsync(command); + // TODO(MR): release 1.7 + // TODO(MR): imagemagick debugging. Remove me after verifying logs. + log.info(`Generated thumbnail using ${command.join(" ")}`); thumbnail = new Uint8Array(await fs.readFile(outputFilePath)); quality -= 10; command = generateImageThumbnailCommand( @@ -105,12 +107,9 @@ export const generateImageThumbnail = async ( } while (thumbnail.length > maxSize && quality > 50); return thumbnail; } finally { - try { - if (isInputFileTemporary) await deleteTempFile(inputFilePath); - await deleteTempFile(outputFilePath); - } catch (e) { - log.error("Could not clean up temp files", e); - } + if (isInputFileTemporary) + await deleteTempFileIgnoringErrors(inputFilePath); + await deleteTempFileIgnoringErrors(outputFilePath); } }; @@ -138,14 +137,17 @@ const generateImageThumbnailCommand = ( ]; case "linux": + // The bundled binary is an ELF x86-64 executable. + if (process.arch != "x64") + throw new Error(CustomErrorMessage.NotAvailable); return [ imageMagickPath(), - inputFilePath, - "-auto-orient", "-define", `jpeg:size=${2 * maxDimension}x${2 * maxDimension}`, + inputFilePath, + "-auto-orient", "-thumbnail", - `${maxDimension}x${maxDimension}>`, + `${maxDimension}x${maxDimension}`, "-unsharp", "0x.5", "-quality", diff --git a/desktop/src/main/services/store.ts b/desktop/src/main/services/store.ts index 20cc91ea4..471928d76 100644 --- a/desktop/src/main/services/store.ts +++ b/desktop/src/main/services/store.ts @@ -14,6 +14,15 @@ export const clearStores = () => { watchStore.clear(); }; +/** + * [Note: Safe storage keys] + * + * On macOS, `safeStorage` stores our data under a Keychain entry named + * " Safe Storage". Which resolves to: + * + * - Electron Safe Storage (dev) + * - ente Safe Storage (prod) + */ export const saveEncryptionKey = (encryptionKey: string) => { const encryptedKey = safeStorage.encryptString(encryptionKey); const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64"); diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts index bae13aa12..1c8223c87 100644 --- a/desktop/src/main/stream.ts +++ b/desktop/src/main/stream.ts @@ -3,13 +3,20 @@ */ import { net, protocol } from "electron/main"; import StreamZip from "node-stream-zip"; +import { randomUUID } from "node:crypto"; import { createWriteStream, existsSync } from "node:fs"; import fs from "node:fs/promises"; import { Readable } from "node:stream"; import { ReadableStream } from "node:stream/web"; import { pathToFileURL } from "node:url"; import log from "./log"; +import { ffmpegConvertToMP4 } from "./services/ffmpeg"; import { ensure } from "./utils/common"; +import { + deleteTempFile, + deleteTempFileIgnoringErrors, + makeTempFilePath, +} from "./utils/temp"; /** * Register a protocol handler that we use for streaming large files between the @@ -34,119 +41,117 @@ import { ensure } from "./utils/common"; * Depends on {@link registerPrivilegedSchemes}. */ export const registerStreamProtocol = () => { - protocol.handle("stream", async (request: Request) => { - const url = request.url; - // The request URL contains the command to run as the host, and the - // pathname of the file(s) as the search params. - const { host, searchParams } = new URL(url); - switch (host) { - case "read": - return handleRead(ensure(searchParams.get("path"))); - case "read-zip": - return handleReadZip( - ensure(searchParams.get("zipPath")), - ensure(searchParams.get("entryName")), - ); - case "write": - return handleWrite(ensure(searchParams.get("path")), request); - default: - return new Response("", { status: 404 }); + protocol.handle("stream", (request: Request) => { + try { + return handleStreamRequest(request); + } catch (e) { + log.error(`Failed to handle stream request for ${request.url}`, e); + return new Response(String(e), { status: 500 }); } }); }; -const handleRead = async (path: string) => { - try { - const res = await net.fetch(pathToFileURL(path).toString()); - if (res.ok) { - // net.fetch already seems to add "Content-Type" and "Last-Modified" - // headers, but I couldn't find documentation for this. In any case, - // since we already are stat-ting the file for the "Content-Length", - // we explicitly add the "X-Last-Modified-Ms" too, - // - // 1. Guaranteeing its presence, - // - // 2. Having it be in the exact format we want (no string <-> date - // conversions), - // - // 3. Retaining milliseconds. +const handleStreamRequest = async (request: Request): Promise => { + const url = request.url; + // The request URL contains the command to run as the host, and the + // pathname of the file(s) as the search params. + const { host, searchParams } = new URL(url); + switch (host) { + case "read": + return handleRead(ensure(searchParams.get("path"))); - const stat = await fs.stat(path); + case "read-zip": + return handleReadZip( + ensure(searchParams.get("zipPath")), + ensure(searchParams.get("entryName")), + ); - // Add the file's size as the Content-Length header. - const fileSize = stat.size; - res.headers.set("Content-Length", `${fileSize}`); + case "write": + return handleWrite(ensure(searchParams.get("path")), request); - // Add the file's last modified time (as epoch milliseconds). - const mtimeMs = stat.mtimeMs; - res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`); + case "convert-to-mp4": { + const token = searchParams.get("token"); + const done = searchParams.get("done") !== null; + return token + ? done + ? handleConvertToMP4ReadDone(token) + : handleConvertToMP4Read(token) + : handleConvertToMP4Write(request); } - return res; - } catch (e) { - log.error(`Failed to read stream at ${path}`, e); - return new Response(`Failed to read stream: ${String(e)}`, { - status: 500, - }); + + default: + return new Response("", { status: 404 }); } }; +const handleRead = async (path: string) => { + const res = await net.fetch(pathToFileURL(path).toString()); + if (res.ok) { + // net.fetch already seems to add "Content-Type" and "Last-Modified" + // headers, but I couldn't find documentation for this. In any case, + // since we already are stat-ting the file for the "Content-Length", we + // explicitly add the "X-Last-Modified-Ms" too, + // + // 1. Guaranteeing its presence, + // + // 2. Having it be in the exact format we want (no string <-> date + // conversions), + // + // 3. Retaining milliseconds. + + const stat = await fs.stat(path); + + // Add the file's size as the Content-Length header. + const fileSize = stat.size; + res.headers.set("Content-Length", `${fileSize}`); + + // Add the file's last modified time (as epoch milliseconds). + const mtimeMs = stat.mtimeMs; + res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`); + } + return res; +}; + const handleReadZip = async (zipPath: string, entryName: string) => { - try { - const zip = new StreamZip.async({ file: zipPath }); - const entry = await zip.entry(entryName); - if (!entry) return new Response("", { status: 404 }); + const zip = new StreamZip.async({ file: zipPath }); + const entry = await zip.entry(entryName); + if (!entry) return new Response("", { status: 404 }); - // This returns an "old style" NodeJS.ReadableStream. - const stream = await zip.stream(entry); - // Convert it into a new style NodeJS.Readable. - const nodeReadable = new Readable().wrap(stream); - // Then convert it into a Web stream. - const webReadableStreamAny = Readable.toWeb(nodeReadable); - // However, we get a ReadableStream now. This doesn't go into the - // `BodyInit` expected by the Response constructor, which wants a - // ReadableStream. Force a cast. - const webReadableStream = - webReadableStreamAny as ReadableStream; + // This returns an "old style" NodeJS.ReadableStream. + const stream = await zip.stream(entry); + // Convert it into a new style NodeJS.Readable. + const nodeReadable = new Readable().wrap(stream); + // Then convert it into a Web stream. + const webReadableStreamAny = Readable.toWeb(nodeReadable); + // However, we get a ReadableStream now. This doesn't go into the + // `BodyInit` expected by the Response constructor, which wants a + // ReadableStream. Force a cast. + const webReadableStream = + webReadableStreamAny as ReadableStream; - // Close the zip handle when the underlying stream closes. - stream.on("end", () => void zip.close()); + // Close the zip handle when the underlying stream closes. + stream.on("end", () => void zip.close()); - return new Response(webReadableStream, { - headers: { - // We don't know the exact type, but it doesn't really matter, - // just set it to a generic binary content-type so that the - // browser doesn't tinker with it thinking of it as text. - "Content-Type": "application/octet-stream", - "Content-Length": `${entry.size}`, - // While it is documented that entry.time is the modification - // time, the units are not mentioned. By seeing the source code, - // we can verify that it is indeed epoch milliseconds. See - // `parseZipTime` in the node-stream-zip source, - // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js - "X-Last-Modified-Ms": `${entry.time}`, - }, - }); - } catch (e) { - log.error( - `Failed to read entry ${entryName} from zip file at ${zipPath}`, - e, - ); - return new Response(`Failed to read stream: ${String(e)}`, { - status: 500, - }); - } + return new Response(webReadableStream, { + headers: { + // We don't know the exact type, but it doesn't really matter, just + // set it to a generic binary content-type so that the browser + // doesn't tinker with it thinking of it as text. + "Content-Type": "application/octet-stream", + "Content-Length": `${entry.size}`, + // While it is documented that entry.time is the modification time, + // the units are not mentioned. By seeing the source code, we can + // verify that it is indeed epoch milliseconds. See `parseZipTime` + // in the node-stream-zip source, + // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js + "X-Last-Modified-Ms": `${entry.time}`, + }, + }); }; const handleWrite = async (path: string, request: Request) => { - try { - await writeStream(path, ensure(request.body)); - return new Response("", { status: 200 }); - } catch (e) { - log.error(`Failed to write stream to ${path}`, e); - return new Response(`Failed to write stream: ${String(e)}`, { - status: 500, - }); - } + await writeStream(path, ensure(request.body)); + return new Response("", { status: 200 }); }; /** @@ -154,7 +159,7 @@ const handleWrite = async (path: string, request: Request) => { * * The returned promise resolves when the write completes. * - * @param filePath The local filesystem path where the file should be written. + * @param filePath The local file system path where the file should be written. * * @param readableStream A web * [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream). @@ -181,3 +186,84 @@ const writeNodeStream = async (filePath: string, fileStream: Readable) => { }); }); }; + +/** + * A map from token to file paths for convert-to-mp4 requests that we have + * received. + */ +const convertToMP4Results = new Map(); + +/** + * Clear any in-memory state for in-flight convert-to-mp4 requests. Meant to be + * called during logout. + */ +export const clearConvertToMP4Results = () => convertToMP4Results.clear(); + +/** + * [Note: Convert to MP4] + * + * When we want to convert a video to MP4, if we were to send the entire + * contents of the video from the renderer to the main process over IPC, it just + * causes the renderer to run out of memory and restart when the videos are very + * large. So we need to stream the original video renderer → main and then + * stream back the converted video renderer ← main. + * + * Currently Chromium does not support bi-directional streaming ("full" duplex + * mode for the Web fetch API). So we need to simulate that using two different + * streaming requests. + * + * renderer → main stream://convert-to-mp4 + * → request.body is the original video + * ← response is a token + * + * renderer → main stream://convert-to-mp4?token= + * ← response.body is the converted video + * + * renderer → main stream://convert-to-mp4?token=&done + * ← 200 OK + * + * Note that the conversion itself is not streaming. The conversion still + * happens in a single shot, we are just streaming the data across the IPC + * boundary to allow us to pass large amounts of data without running out of + * memory. + * + * See also: [Note: IPC streams] + */ +const handleConvertToMP4Write = async (request: Request) => { + const inputTempFilePath = await makeTempFilePath(); + await writeStream(inputTempFilePath, ensure(request.body)); + + const outputTempFilePath = await makeTempFilePath("mp4"); + try { + await ffmpegConvertToMP4(inputTempFilePath, outputTempFilePath); + } catch (e) { + log.error("Conversion to MP4 failed", e); + await deleteTempFileIgnoringErrors(outputTempFilePath); + throw e; + } finally { + await deleteTempFileIgnoringErrors(inputTempFilePath); + } + + const token = randomUUID(); + convertToMP4Results.set(token, outputTempFilePath); + return new Response(token, { status: 200 }); +}; + +const handleConvertToMP4Read = async (token: string) => { + const filePath = convertToMP4Results.get(token); + if (!filePath) + return new Response(`Unknown token ${token}`, { status: 404 }); + + return net.fetch(pathToFileURL(filePath).toString()); +}; + +const handleConvertToMP4ReadDone = async (token: string) => { + const filePath = convertToMP4Results.get(token); + if (!filePath) + return new Response(`Unknown token ${token}`, { status: 404 }); + + await deleteTempFile(filePath); + + convertToMP4Results.delete(token); + return new Response("", { status: 200 }); +}; diff --git a/desktop/src/main/utils/common.ts b/desktop/src/main/utils/common.ts index 1f5016e61..5ed46aa8a 100644 --- a/desktop/src/main/utils/common.ts +++ b/desktop/src/main/utils/common.ts @@ -13,32 +13,3 @@ export const ensure = (v: T | null | undefined): T => { if (v === undefined) throw new Error("Required value was not found"); return v; }; - -/** - * Wait for {@link ms} milliseconds - * - * This function is a promisified `setTimeout`. It returns a promise that - * resolves after {@link ms} milliseconds. - */ -export const wait = (ms: number) => - new Promise((resolve) => setTimeout(resolve, ms)); - -/** - * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it - * does not resolve within {@link timeoutMS}, then reject with a timeout error. - */ -export const withTimeout = async (promise: Promise, ms: number) => { - let timeoutId: ReturnType; - const rejectOnTimeout = new Promise((_, reject) => { - timeoutId = setTimeout( - () => reject(new Error("Operation timed out")), - ms, - ); - }); - const promiseAndCancelTimeout = async () => { - const result = await promise; - clearTimeout(timeoutId); - return result; - }; - return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]); -}; diff --git a/desktop/src/main/utils/electron.ts b/desktop/src/main/utils/electron.ts index 93e8565ef..133edf87c 100644 --- a/desktop/src/main/utils/electron.ts +++ b/desktop/src/main/utils/electron.ts @@ -49,12 +49,12 @@ export const posixPath = (platformPath: string) => * > output, this might not be the best option and it might be better to use the * > underlying functions. */ -export const execAsync = (command: string | string[]) => { +export const execAsync = async (command: string | string[]) => { const escapedCommand = Array.isArray(command) ? shellescape(command) : command; const startTime = Date.now(); - const result = execAsync_(escapedCommand); + const result = await execAsync_(escapedCommand); log.debug( () => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`, ); diff --git a/desktop/src/main/utils/temp.ts b/desktop/src/main/utils/temp.ts index 11f7a5d84..70dec844d 100644 --- a/desktop/src/main/utils/temp.ts +++ b/desktop/src/main/utils/temp.ts @@ -4,6 +4,7 @@ import { existsSync } from "node:fs"; import fs from "node:fs/promises"; import path from "node:path"; import type { ZipItem } from "../../types/ipc"; +import log from "../log"; import { ensure } from "./common"; /** @@ -62,6 +63,19 @@ export const deleteTempFile = async (tempFilePath: string) => { await fs.rm(tempFilePath, { force: true }); }; +/** + * A variant of {@link deleteTempFile} that supresses any errors, making it + * safe to call them in a sequence without needing to handle the scenario where + * one of them failing causes the rest to be skipped. + */ +export const deleteTempFileIgnoringErrors = async (tempFilePath: string) => { + try { + await deleteTempFile(tempFilePath); + } catch (e) { + log.error(`Could not delete temporary file at path ${tempFilePath}`, e); + } +}; + /** The result of {@link makeFileForDataOrPathOrZipItem}. */ interface FileForDataOrPathOrZipItem { /** diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts index 407e541ff..d52745184 100644 --- a/desktop/src/preload.ts +++ b/desktop/src/preload.ts @@ -65,6 +65,9 @@ const selectDirectory = () => ipcRenderer.invoke("selectDirectory"); const clearStores = () => ipcRenderer.send("clearStores"); +const clearConvertToMP4Results = () => + ipcRenderer.send("clearConvertToMP4Results"); + const encryptionKey = () => ipcRenderer.invoke("encryptionKey"); const saveEncryptionKey = (encryptionKey: string) => @@ -140,14 +143,12 @@ const ffmpegExec = ( command: string[], dataOrPathOrZipItem: Uint8Array | string | ZipItem, outputFileExtension: string, - timeoutMS: number, ) => ipcRenderer.invoke( "ffmpegExec", command, dataOrPathOrZipItem, outputFileExtension, - timeoutMS, ); // - ML @@ -164,6 +165,9 @@ const detectFaces = (input: Float32Array) => const faceEmbedding = (input: Float32Array) => ipcRenderer.invoke("faceEmbedding", input); +const legacyFaceCrop = (faceID: string) => + ipcRenderer.invoke("legacyFaceCrop", faceID); + // - Watch const watchGet = () => ipcRenderer.invoke("watchGet"); @@ -305,6 +309,7 @@ contextBridge.exposeInMainWorld("electron", { openLogDirectory, selectDirectory, clearStores, + clearConvertToMP4Results, encryptionKey, saveEncryptionKey, onMainWindowFocus, @@ -341,6 +346,7 @@ contextBridge.exposeInMainWorld("electron", { clipTextEmbeddingIfAvailable, detectFaces, faceEmbedding, + legacyFaceCrop, // - Watch diff --git a/desktop/yarn.lock b/desktop/yarn.lock index 833b623a7..21e56d0ae 100644 --- a/desktop/yarn.lock +++ b/desktop/yarn.lock @@ -7,29 +7,6 @@ resolved "https://registry.yarnpkg.com/7zip-bin/-/7zip-bin-5.2.0.tgz#7a03314684dd6572b7dfa89e68ce31d60286854d" integrity sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A== -"@babel/code-frame@^7.0.0": - version "7.24.2" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.2.tgz#718b4b19841809a58b29b68cde80bc5e1aa6d9ae" - integrity sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ== - dependencies: - "@babel/highlight" "^7.24.2" - picocolors "^1.0.0" - -"@babel/helper-validator-identifier@^7.24.5": - version "7.24.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz#918b1a7fa23056603506370089bd990d8720db62" - integrity sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA== - -"@babel/highlight@^7.24.2": - version "7.24.5" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.5.tgz#bc0613f98e1dd0720e99b2a9ee3760194a704b6e" - integrity sha512-8lLmua6AVh/8SLJRRVD6V8p73Hir9w5mJrhE+IPpILG31KKlI9iz5zmBYKcWPS59qSfgP9RaSBQSHHE81WKuEw== - dependencies: - "@babel/helper-validator-identifier" "^7.24.5" - chalk "^2.4.2" - js-tokens "^4.0.0" - picocolors "^1.0.0" - "@babel/runtime@^7.21.0": version "7.24.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c" @@ -339,9 +316,9 @@ integrity sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g== "@types/node@*", "@types/node@^20.9.0": - version "20.12.7" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.7.tgz#04080362fa3dd6c5822061aa3124f5c152cff384" - integrity sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg== + version "20.12.11" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.11.tgz#c4ef00d3507000d17690643278a60dc55a9dc9be" + integrity sha512-vDg9PZ/zi+Nqp6boSOT7plNuthRugEKixDv5sFTIpkE89MmNtEArAShI4mxuX2+UrLEe9pxC1vm2cjm9YlWbJw== dependencies: undici-types "~5.26.4" @@ -350,11 +327,6 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b" integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw== -"@types/normalize-package-data@^2.4.0": - version "2.4.4" - resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz#56e2cc26c397c038fab0e3a917a12d5c5909e901" - integrity sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA== - "@types/plist@^3.0.1": version "3.0.5" resolved "https://registry.yarnpkg.com/@types/plist/-/plist-3.0.5.tgz#9a0c49c0f9886c8c8696a7904dd703f6284036e0" @@ -557,13 +529,6 @@ ansi-regex@^5.0.1: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" @@ -641,13 +606,6 @@ are-we-there-yet@^3.0.0: delegates "^1.0.0" readable-stream "^3.6.0" -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - argparse@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" @@ -875,15 +833,6 @@ caseless@^0.12.0: resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== -chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" @@ -973,13 +922,6 @@ clone@^1.0.2: resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - color-convert@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" @@ -987,11 +929,6 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - color-name@~1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" @@ -1259,11 +1196,6 @@ dotenv-expand@^5.1.0: resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== -dotenv@^8.2.0: - version "8.6.0" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" - integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== - dotenv@^9.0.2: version "9.0.2" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-9.0.2.tgz#dacc20160935a37dea6364aa1bef819fb9b6ab05" @@ -1276,16 +1208,6 @@ ejs@^3.1.8: dependencies: jake "^10.8.5" -electron-builder-notarize@^1.5: - version "1.5.2" - resolved "https://registry.yarnpkg.com/electron-builder-notarize/-/electron-builder-notarize-1.5.2.tgz#540185b57a336fc6eec01bfe092a3b4764459255" - integrity sha512-vo6RGgIFYxMk2yp59N4NsvmAYfB7ncYi6gV9Fcq2TVKxEn2tPXrSjIKB2e/pu+5iXIY6BHNZNXa75F3DHgOOLA== - dependencies: - dotenv "^8.2.0" - electron-notarize "^1.1.1" - js-yaml "^3.14.0" - read-pkg-up "^7.0.0" - electron-builder@25.0.0-alpha.6: version "25.0.0-alpha.6" resolved "https://registry.yarnpkg.com/electron-builder/-/electron-builder-25.0.0-alpha.6.tgz#a72f96f7029539ac28f92ce5c83f872ba3b6e7c1" @@ -1308,14 +1230,6 @@ electron-log@^5.1: resolved "https://registry.yarnpkg.com/electron-log/-/electron-log-5.1.2.tgz#fb40ad7f4ae694dd0e4c02c662d1a65c03e1243e" integrity sha512-Cpg4hAZ27yM9wzE77c4TvgzxzavZ+dVltCczParXN+Vb3jocojCSAuSMCVOI9fhFuuOR+iuu3tZLX1cu0y0kgQ== -electron-notarize@^1.1.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/electron-notarize/-/electron-notarize-1.2.2.tgz#ebf2b258e8e08c1c9f8ff61dc53d5b16b439daf4" - integrity sha512-ZStVWYcWI7g87/PgjPJSIIhwQXOaw4/XeXU+pWqMMktSLHaGMLHdyPPN7Cmao7+Cr7fYufA16npdtMndYciHNw== - dependencies: - debug "^4.1.1" - fs-extra "^9.0.1" - electron-publish@25.0.0-alpha.6: version "25.0.0-alpha.6" resolved "https://registry.yarnpkg.com/electron-publish/-/electron-publish-25.0.0-alpha.6.tgz#8af3cb6e2435c00b8c71de43c330483808df5924" @@ -1352,9 +1266,9 @@ electron-updater@^6.1: tiny-typed-emitter "^2.1.0" electron@^30: - version "30.0.2" - resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733" - integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ== + version "30.0.3" + resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.3.tgz#7c25ddb12ba89fd117991d010f1b274b1bafcb73" + integrity sha512-h+suwx6e0fnv/9wi0/cmCMtG+4LrPzJZa+3DEEpxcPcP+pcWnBI70t8QspxgMNIh2wzXLMD9XVqrLkEbiBAInw== dependencies: "@electron/get" "^2.0.0" "@types/node" "^20.9.0" @@ -1389,13 +1303,6 @@ err-code@^2.0.2: resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9" integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA== -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - es-define-property@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" @@ -1418,11 +1325,6 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27" integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA== -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" @@ -1494,11 +1396,6 @@ espree@^9.6.0, espree@^9.6.1: acorn-jsx "^5.3.2" eslint-visitor-keys "^3.4.1" -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - esquery@^1.4.2: version "1.5.0" resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" @@ -1622,14 +1519,6 @@ find-up@^3.0.0: dependencies: locate-path "^3.0.0" -find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - find-up@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" @@ -1907,11 +1796,6 @@ graphemer@^1.4.0: resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6" integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag== -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - has-flag@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" @@ -1946,11 +1830,6 @@ hasown@^2.0.0: dependencies: function-bind "^1.1.2" -hosted-git-info@^2.1.4: - version "2.8.9" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" - integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== - hosted-git-info@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224" @@ -2081,11 +1960,6 @@ ip-address@^9.0.5: jsbn "1.1.0" sprintf-js "^1.1.3" -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -2198,19 +2072,6 @@ jpeg-js@^0.4: resolved "https://registry.yarnpkg.com/jpeg-js/-/jpeg-js-0.4.4.tgz#a9f1c6f1f9f0fa80cdb3484ed9635054d28936aa" integrity sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg== -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.14.0: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - js-yaml@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" @@ -2228,11 +2089,6 @@ json-buffer@3.0.1: resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== -json-parse-even-better-errors@^2.3.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" @@ -2299,11 +2155,6 @@ levn@^0.4.1: prelude-ls "^1.2.1" type-check "~0.4.0" -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" @@ -2312,13 +2163,6 @@ locate-path@^3.0.0: p-locate "^3.0.0" path-exists "^3.0.0" -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - locate-path@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" @@ -2643,16 +2487,6 @@ nopt@^6.0.0: dependencies: abbrev "^1.0.0" -normalize-package-data@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" - integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== - dependencies: - hosted-git-info "^2.1.4" - resolve "^1.10.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -2737,7 +2571,7 @@ p-cancelable@^2.0.0: resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== -p-limit@^2.0.0, p-limit@^2.2.0: +p-limit@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== @@ -2758,13 +2592,6 @@ p-locate@^3.0.0: dependencies: p-limit "^2.0.0" -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - p-locate@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" @@ -2796,16 +2623,6 @@ parse-cache-control@^1.0.1: resolved "https://registry.yarnpkg.com/parse-cache-control/-/parse-cache-control-1.0.1.tgz#8eeab3e54fa56920fe16ba38f77fa21aacc2d74e" integrity sha512-60zvsJReQPX5/QP0Kzfd/VrpjScIQ7SHBW6bFCYfEP+fp0Eppr1SHhIO5nd1PjZtvclzSzES9D/p5nFJurwfWg== -parse-json@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" @@ -2849,11 +2666,6 @@ pend@~1.2.0: resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50" integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg== -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" @@ -2958,25 +2770,6 @@ read-config-file@6.3.2: json5 "^2.2.0" lazy-val "^1.0.4" -read-pkg-up@^7.0.0: - version "7.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" - integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== - dependencies: - find-up "^4.1.0" - read-pkg "^5.2.0" - type-fest "^0.8.1" - -read-pkg@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" - integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== - dependencies: - "@types/normalize-package-data" "^2.4.0" - normalize-package-data "^2.5.0" - parse-json "^5.0.0" - type-fest "^0.6.0" - readable-stream@^3.0.2, readable-stream@^3.4.0, readable-stream@^3.6.0: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" @@ -3025,7 +2818,7 @@ resolve-from@^4.0.0: resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== -resolve@^1.1.6, resolve@^1.10.0: +resolve@^1.1.6: version "1.22.8" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== @@ -3126,17 +2919,17 @@ semver-compare@^1.0.0: resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc" integrity sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow== -"semver@2 || 3 || 4 || 5": - version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" - integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== - semver@^6.2.0: version "6.3.1" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0: +semver@^7.3.2: + version "7.6.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13" + integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w== + +semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0: version "7.6.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d" integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg== @@ -3284,42 +3077,11 @@ spawn-command@0.0.2: resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2.tgz#9544e1a43ca045f8531aac1a48cb29bdae62338e" integrity sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ== -spdx-correct@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c" - integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA== - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz#5d607d27fc806f66d7b64a766650fa890f04ed66" - integrity sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w== - -spdx-expression-parse@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" - integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.17" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz#887da8aa73218e51a1d917502d79863161a93f9c" - integrity sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg== - sprintf-js@^1.1.2, sprintf-js@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - ssri@^9.0.0: version "9.0.1" resolved "https://registry.yarnpkg.com/ssri/-/ssri-9.0.1.tgz#544d4c357a8d7b71a19700074b6883fcb4eae057" @@ -3367,13 +3129,6 @@ sumchecker@^3.0.1: dependencies: debug "^4.1.0" -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - supports-color@^7.1.0: version "7.2.0" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" @@ -3501,16 +3256,6 @@ type-fest@^0.20.2: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== -type-fest@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" - integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== - -type-fest@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" - integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== - type-fest@^2.17.0: version "2.19.0" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" @@ -3577,14 +3322,6 @@ util-deprecate@^1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== -validate-npm-package-license@^3.0.1: - version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" - integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - verror@^1.10.0: version "1.10.1" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.1.tgz#4bf09eeccf4563b109ed4b3d458380c972b0cdeb" diff --git a/docs/docs/.vitepress/sidebar.ts b/docs/docs/.vitepress/sidebar.ts index 6af9e3556..84ae5e0fa 100644 --- a/docs/docs/.vitepress/sidebar.ts +++ b/docs/docs/.vitepress/sidebar.ts @@ -123,6 +123,10 @@ export const sidebar = [ text: "Troubleshooting", collapsed: true, items: [ + { + text: "Desktop install", + link: "/photos/troubleshooting/desktop-install/", + }, { text: "Files not uploading", link: "/photos/troubleshooting/files-not-uploading", @@ -197,6 +201,10 @@ export const sidebar = [ text: "System requirements", link: "/self-hosting/guides/system-requirements", }, + { + text: "Configuring S3", + link: "/self-hosting/guides/configuring-s3", + }, { text: "Using external S3", link: "/self-hosting/guides/external-s3", diff --git a/docs/docs/photos/features/cast/index.md b/docs/docs/photos/features/cast/index.md index 89dc801f6..ecd91cb7c 100644 --- a/docs/docs/photos/features/cast/index.md +++ b/docs/docs/photos/features/cast/index.md @@ -1,19 +1,13 @@ --- -title: Archive -description: | - Archiving photos and albums in Ente Photos to remove them from your home - timeline +title: Cast +description: + Casting your photos on to a large screen or a TV or a Chromecast device --- -> [!CAUTION] -> -> This is preview documentation for an upcoming feature. This feature has not -> yet been released yet, so the steps below will not work currently. - # Cast With Ente Cast, you can play a slideshow of your favourite albums on your Google -Chromecast TVs or other Internet-connected large screen devices. +Chromecast TVs or any other internet-connected large screen devices. ## Get Started diff --git a/docs/docs/photos/troubleshooting/desktop-install/index.md b/docs/docs/photos/troubleshooting/desktop-install/index.md new file mode 100644 index 000000000..7410c7818 --- /dev/null +++ b/docs/docs/photos/troubleshooting/desktop-install/index.md @@ -0,0 +1,75 @@ +--- +title: Desktop installation +description: Troubleshooting issues when installing the Ente Photos desktop app +--- + +# Desktop app installation + +The latest version of the Ente Photos desktop app can be downloaded from +[ente.io/download](https://ente.io/download). If you're having trouble, please +see if any of the following cases apply. + +## Windows + +If the app stops with an "A JavaScript error occurred in the main process - The +specified module could not be found" error on your Windows machine when you +start it, then you might need to install the VC++ runtime from Microsoft. + +This is what the error looks like: + +![Error when VC++ runtime is not installed](windows-vc.png){width=500px} + +You can install the Microsoft VC++ redistributable runtime from here:
+https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#latest-microsoft-visual-c-redistributable-version + +## AppImages on ARM64 Linux + +If you're on an ARM64 machine running Linux, and the AppImages doesn't do +anything when you run it, you will need to run the following command on your +machine: + +```sh +sudo ln -s /usr/lib/aarch64-linux-gnu/libz.so{.1,} +``` + +It is possible that the exact path might be different on your machine. Briefly, +what we need to do is create `libz.so` as an alias for `libz.so.1`. For more +details, see the following upstream issues: + +- libz.so cannot open shared object file on ARM64 - + [AppImage/AppImageKit/issues/1092](https://github.com/AppImage/AppImageKit/issues/1092) + +- libz.so: cannot open shared object file with Ubuntu arm64 - + [electron-userland/electron-builder/issues/7835](https://github.com/electron-userland/electron-builder/issues/7835) + +## AppImage says it requires FUSE + +See +[docs.appimage.org](https://docs.appimage.org/user-guide/troubleshooting/fuse.html#the-appimage-tells-me-it-needs-fuse-to-run). + +tl;dr; for example, on Ubuntu, + +```sh +sudo apt install libfuse2 +``` + +## Linux SUID error + +On some Linux distributions, if you run the AppImage from the CLI, it might fail +with the following error: + +> The SUID sandbox helper binary was found, but is not configured correctly. + +This happens when you try to run the AppImage from the command line. If you +instead double click on the AppImage in your Files browser, then it should start +properly. + +If you do want to run it from the command line, you can do so by passing the +`--no-sandbox` flag when executing the AppImage. e.g. + +```sh +./ente.AppImage --no-sandbox +``` + +For more details, see this upstream issue on +[electron](https://github.com/electron/electron/issues/17972). diff --git a/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png new file mode 100644 index 000000000..852c037d5 Binary files /dev/null and b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png differ diff --git a/docs/docs/self-hosting/guides/configuring-s3.md b/docs/docs/self-hosting/guides/configuring-s3.md new file mode 100644 index 000000000..8e823ed2a --- /dev/null +++ b/docs/docs/self-hosting/guides/configuring-s3.md @@ -0,0 +1,80 @@ +--- +title: Configuring S3 buckets +description: + Configure S3 endpoints to fix upload errors or use your self hosted ente + from outside localhost +--- + +# Configuring S3 + +There are three components involved in uploading: + +1. The client (e.g. the web app or the mobile app) +2. Ente's server (museum) +3. The S3-compatible object storage (e.g. minio in the default starter) + +For the uploads to work, all three of them need to be able to reach each other. +This is because the client uploads directly to the object storage. The +interaction goes something like this: + +1. Client wants to upload, it asks museum where it should upload to. +2. Museum creates pre-signed URLs for the S3 bucket that was configured. +3. Client directly uploads to the S3 buckets these URLs. + +The upshot of this is that _both_ the client and museum should be able to reach +your S3 bucket. + +The URL for the S3 bucket is configured in +[scripts/compose/credentials.yaml](https://github.com/ente-io/ente/blob/main/server/scripts/compose/credentials.yaml#L10). +You can edit this file directly when testing, though it is just simpler and more +robust to create a `museum.yaml` (in the same folder as the Docker compose file) +and put your custom configuration there (in your case, you can put an entire +`s3` config object in your `museum.yaml`). + +> [!TIP] +> +> For more details about these configuration objects, see the documentaion for +> the `s3` object in +> [configurations/local.yaml](https://github.com/ente-io/ente/blob/main/server/configurations/local.yaml). + +By default, you only need to configure the endpoint for the first bucket. + +> [!NOTE] +> +> If you're wondering why there are 3 buckets - that's because our production +> instance uses these to perform replication. +> +> However, in a self hosted setup replication is off by default (you can turn it +> on if you want). When replication is turned off, only the first bucket is +> used, and you can remove the other two if you wish or just ignore them. + +The `endpoint` for the first bucket in the starter `credentials.yaml` is +`localhost:3200`. The way this works then is that both museum (`2`) and minio +(`3`) are running within the same Docker compose cluster, so are able to reach +each other. If at this point we were to run the web app (`1`) on localhost (say +using `yarn dev:photos`), it would also run on localhost and thus would be able +to reach `3`. + +If you were to try and connect from a mobile app, this would not work since +`localhost:3200` would not resolve on your mobile. So you'll need to modify this +endpoint to a value, say `yourserverip:3200`, so that the mobile app can also +reach it. + +The same principle applies if you're deploying to your custom domain. + +> [!NOTE] +> +> If you need to configure SSL, for example if you're running over the internet, +> you'll need to turn off `s3.are_local_buckets` (which disables SSL in the +> default starter compose template). +> +> Disabling `s3.are_local_buckets` also switches to the subdomain style URLs for +> the buckets. However, not all S3 providers support these, in particular, minio +> does not work with these in default configuration. So in such cases you'll +> also need to then enable `s3.use_path_style_urls`. + +To summarize: + +Set the S3 bucket `endpoint` in `credentials.yaml` to a `yourserverip:3200` or +some such IP/hostname that accessible from both where you are running the Ente +clients (e.g. the mobile app) and also from within the Docker compose cluster. diff --git a/docs/docs/self-hosting/guides/custom-server/index.md b/docs/docs/self-hosting/guides/custom-server/index.md index a5ce76cc2..110e3dbb8 100644 --- a/docs/docs/self-hosting/guides/custom-server/index.md +++ b/docs/docs/self-hosting/guides/custom-server/index.md @@ -34,4 +34,18 @@ endpoint: api: "http://localhost:8080" ``` -(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) +(Another +[example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example)) + +## Web appps and Photos desktop app + +You will need to build the app from source and use the +`NEXT_PUBLIC_ENTE_ENDPOINT` environment variable to tell it which server to +connect to. For example: + +```sh +NEXT_PUBLIC_ENTE_ENDPOINT=http://localhost:8080 yarn dev:photos +``` + +For more details, see +[hosting the web app](https://help.ente.io/self-hosting/guides/web-app). diff --git a/docs/docs/self-hosting/guides/index.md b/docs/docs/self-hosting/guides/index.md index a8a64d960..b8a73d7eb 100644 --- a/docs/docs/self-hosting/guides/index.md +++ b/docs/docs/self-hosting/guides/index.md @@ -16,5 +16,8 @@ See the sidebar for existing guides. In particular: - For various admin related tasks, e.g. increasing the storage quota on your self hosted instance, see [administering your custom server](admin). -- For self hosting both the server and web app using external S3 buckets for - object storage, see [using external S3](external-s3). +- For configuring your S3 buckets to get the object storage to work from your + mobile device or for fixing an upload errors, see + [configuring S3](configuring-s3). There is also a longer + [community contributed guide](external-s3) for a more self hosted setup of + both the server and web app using external S3 buckets for object storage. diff --git a/docs/docs/self-hosting/guides/web-app.md b/docs/docs/self-hosting/guides/web-app.md index 49dfdd114..28802c457 100644 --- a/docs/docs/self-hosting/guides/web-app.md +++ b/docs/docs/self-hosting/guides/web-app.md @@ -1,6 +1,8 @@ --- title: Hosting the web app -description: Building and hosting Ente's web app, connecting it to your self-hosted server +description: + Building and hosting Ente's web app, connecting it to your self-hosted + server --- # Web app diff --git a/docs/docs/self-hosting/troubleshooting/uploads.md b/docs/docs/self-hosting/troubleshooting/uploads.md index 4f7273e94..435a5e93c 100644 --- a/docs/docs/self-hosting/troubleshooting/uploads.md +++ b/docs/docs/self-hosting/troubleshooting/uploads.md @@ -5,9 +5,9 @@ description: Fixing upload errors when trying to self host Ente # Uploads failing -If uploads to your self-hosted server are failing, make sure that -`credentials.yaml` has `yourserverip:3200` for all three minio locations. +If uploads to your minio are failing, you need to ensure that you've configured +the S3 bucket `endpoint` in `credentials.yaml` (or `museum.yaml`) to, say, +`yourserverip:3200`. This can be any host or port, it just need to be a value +that is reachable from both your client and from museum. -By default it is `localhost:3200`, and it needs to be changed to an IP that is -accessible from both where you are running the Ente clients (e.g. the mobile -app) and also from within the Docker compose cluster. +For more details, see [configuring-s3](/self-hosting/guides/configuring-s3). diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist index cdbc23774..9afb874e5 100644 --- a/mobile/ios/Runner/Info.plist +++ b/mobile/ios/Runner/Info.plist @@ -108,7 +108,7 @@ NSBonjourServices _googlecast._tcp - F5BCEC64._googlecast._tcp + _F5BCEC64._googlecast._tcp NSLocalNetworkUsageDescription diff --git a/mobile/lib/core/configuration.dart b/mobile/lib/core/configuration.dart index cde766b1e..334da4af9 100644 --- a/mobile/lib/core/configuration.dart +++ b/mobile/lib/core/configuration.dart @@ -72,8 +72,6 @@ class Configuration { static const anonymousUserIDKey = "anonymous_user_id"; static const endPointKey = "endpoint"; - final kTempFolderDeletionTimeBuffer = const Duration(hours: 6).inMicroseconds; - static final _logger = Logger("Configuration"); String? _cachedToken; @@ -103,20 +101,7 @@ class Configuration { _documentsDirectory = (await getApplicationDocumentsDirectory()).path; _tempDocumentsDirPath = _documentsDirectory + "/temp/"; final tempDocumentsDir = Directory(_tempDocumentsDirPath); - try { - final currentTime = DateTime.now().microsecondsSinceEpoch; - if (tempDocumentsDir.existsSync() && - (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) < - (currentTime - kTempFolderDeletionTimeBuffer)) { - await tempDocumentsDir.delete(recursive: true); - await _preferences.setInt(lastTempFolderClearTimeKey, currentTime); - _logger.info("Cleared temp folder"); - } else { - _logger.info("Skipping temp folder clear"); - } - } catch (e) { - _logger.warning(e); - } + await _cleanUpStaleFiles(tempDocumentsDir); tempDocumentsDir.createSync(recursive: true); final tempDirectoryPath = (await getTemporaryDirectory()).path; _thumbnailCacheDirectory = tempDirectoryPath + "/thumbnail-cache"; @@ -144,6 +129,42 @@ class Configuration { SuperLogging.setUserID(await _getOrCreateAnonymousUserID()).ignore(); } + // _cleanUpStaleFiles deletes all files in the temp directory that are older + // than kTempFolderDeletionTimeBuffer except the the temp encrypted files for upload. + // Those file are deleted by file uploader after the upload is complete or those + // files are not being used / tracked. + Future _cleanUpStaleFiles(Directory tempDocumentsDir) async { + try { + final currentTime = DateTime.now().microsecondsSinceEpoch; + if (tempDocumentsDir.existsSync() && + (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) < + (currentTime - tempDirCleanUpInterval)) { + int skippedTempUploadFiles = 0; + final files = tempDocumentsDir.listSync(); + for (final file in files) { + if (file is File) { + if (file.path.contains(uploadTempFilePrefix)) { + skippedTempUploadFiles++; + continue; + } + _logger.info("Deleting file: ${file.path}"); + await file.delete(); + } else if (file is Directory) { + await file.delete(recursive: true); + } + } + await _preferences.setInt(lastTempFolderClearTimeKey, currentTime); + _logger.info( + "Cleared temp folder except $skippedTempUploadFiles upload files", + ); + } else { + _logger.info("Skipping temp folder clear"); + } + } catch (e) { + _logger.warning(e); + } + } + Future logout({bool autoLogout = false}) async { if (SyncService.instance.isSyncInProgress()) { SyncService.instance.stopSync(); diff --git a/mobile/lib/core/constants.dart b/mobile/lib/core/constants.dart index c2d08d903..77764ee65 100644 --- a/mobile/lib/core/constants.dart +++ b/mobile/lib/core/constants.dart @@ -1,3 +1,5 @@ +import "package:flutter/foundation.dart"; + const int thumbnailSmallSize = 256; const int thumbnailQuality = 50; const int thumbnailLargeSize = 512; @@ -41,6 +43,7 @@ const supportEmail = 'support@ente.io'; // this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part. const multipartPartSize = 20 * 1024 * 1024; +const multipartPartSizeInternal = 8 * 1024 * 1024; const kDefaultProductionEndpoint = 'https://api.ente.io'; @@ -95,3 +98,8 @@ const blackThumbnailBase64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB' 'KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' + 'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' + 'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k='; + +const uploadTempFilePrefix = "upload_file_"; +final tempDirCleanUpInterval = kDebugMode + ? const Duration(seconds: 30).inMicroseconds + : const Duration(hours: 6).inMicroseconds; diff --git a/mobile/lib/db/embeddings_db.dart b/mobile/lib/db/embeddings_db.dart index 0eb1d3f6d..64878a2ce 100644 --- a/mobile/lib/db/embeddings_db.dart +++ b/mobile/lib/db/embeddings_db.dart @@ -63,6 +63,19 @@ class EmbeddingsDB { return _convertToEmbeddings(results); } + // Get FileIDs for a specific model + Future> getFileIDs(Model model) async { + final db = await _database; + final results = await db.getAll( + 'SELECT $columnFileID FROM $tableName WHERE $columnModel = ?', + [modelToInt(model)!], + ); + if (results.isEmpty) { + return {}; + } + return results.map((e) => e[columnFileID] as int).toSet(); + } + Future put(Embedding embedding) async { final db = await _database; await db.execute( diff --git a/mobile/lib/db/upload_locks_db.dart b/mobile/lib/db/upload_locks_db.dart index 11112d0ce..b32084b6f 100644 --- a/mobile/lib/db/upload_locks_db.dart +++ b/mobile/lib/db/upload_locks_db.dart @@ -3,16 +3,60 @@ import 'dart:io'; import 'package:path/path.dart'; import 'package:path_provider/path_provider.dart'; +import "package:photos/module/upload/model/multipart.dart"; import 'package:sqflite/sqflite.dart'; +import "package:sqflite_migration/sqflite_migration.dart"; class UploadLocksDB { static const _databaseName = "ente.upload_locks.db"; - static const _databaseVersion = 1; - static const _table = "upload_locks"; - static const _columnID = "id"; - static const _columnOwner = "owner"; - static const _columnTime = "time"; + static const _uploadLocksTable = ( + table: "upload_locks", + columnID: "id", + columnOwner: "owner", + columnTime: "time", + ); + + static const _trackUploadTable = ( + table: "track_uploads", + columnID: "id", + columnLocalID: "local_id", + columnFileHash: "file_hash", + columnCollectionID: "collection_id", + columnEncryptedFileName: "encrypted_file_name", + columnEncryptedFileSize: "encrypted_file_size", + columnEncryptedFileKey: "encrypted_file_key", + columnFileEncryptionNonce: "file_encryption_nonce", + columnKeyEncryptionNonce: "key_encryption_nonce", + columnObjectKey: "object_key", + columnCompleteUrl: "complete_url", + columnStatus: "status", + columnPartSize: "part_size", + columnLastAttemptedAt: "last_attempted_at", + columnCreatedAt: "created_at", + ); + + static const _partsTable = ( + table: "upload_parts", + columnObjectKey: "object_key", + columnPartNumber: "part_number", + columnPartUrl: "part_url", + columnPartETag: "part_etag", + columnPartStatus: "part_status", + ); + + static final initializationScript = [ + ..._createUploadLocksTable(), + ]; + + static final migrationScripts = [ + ..._createTrackUploadsTable(), + ]; + + final dbConfig = MigrationConfig( + initializationScript: initializationScript, + migrationScripts: migrationScripts, + ); UploadLocksDB._privateConstructor(); static final UploadLocksDB instance = UploadLocksDB._privateConstructor(); @@ -27,44 +71,82 @@ class UploadLocksDB { final Directory documentsDirectory = await getApplicationDocumentsDirectory(); final String path = join(documentsDirectory.path, _databaseName); - return await openDatabase( - path, - version: _databaseVersion, - onCreate: _onCreate, - ); + + return await openDatabaseWithMigration(path, dbConfig); } - Future _onCreate(Database db, int version) async { - await db.execute( + static List _createUploadLocksTable() { + return [ ''' - CREATE TABLE $_table ( - $_columnID TEXT PRIMARY KEY NOT NULL, - $_columnOwner TEXT NOT NULL, - $_columnTime TEXT NOT NULL + CREATE TABLE ${_uploadLocksTable.table} ( + ${_uploadLocksTable.columnID} TEXT PRIMARY KEY NOT NULL, + ${_uploadLocksTable.columnOwner} TEXT NOT NULL, + ${_uploadLocksTable.columnTime} TEXT NOT NULL ) ''', - ); + ]; + } + + static List _createTrackUploadsTable() { + return [ + ''' + CREATE TABLE IF NOT EXISTS ${_trackUploadTable.table} ( + ${_trackUploadTable.columnID} INTEGER PRIMARY KEY, + ${_trackUploadTable.columnLocalID} TEXT NOT NULL, + ${_trackUploadTable.columnFileHash} TEXT NOT NULL, + ${_trackUploadTable.columnCollectionID} INTEGER NOT NULL, + ${_trackUploadTable.columnEncryptedFileName} TEXT NOT NULL, + ${_trackUploadTable.columnEncryptedFileSize} INTEGER NOT NULL, + ${_trackUploadTable.columnEncryptedFileKey} TEXT NOT NULL, + ${_trackUploadTable.columnFileEncryptionNonce} TEXT NOT NULL, + ${_trackUploadTable.columnKeyEncryptionNonce} TEXT NOT NULL, + ${_trackUploadTable.columnObjectKey} TEXT NOT NULL, + ${_trackUploadTable.columnCompleteUrl} TEXT NOT NULL, + ${_trackUploadTable.columnStatus} TEXT DEFAULT '${MultipartStatus.pending.name}' NOT NULL, + ${_trackUploadTable.columnPartSize} INTEGER NOT NULL, + ${_trackUploadTable.columnLastAttemptedAt} INTEGER NOT NULL, + ${_trackUploadTable.columnCreatedAt} INTEGER DEFAULT CURRENT_TIMESTAMP NOT NULL + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS ${_partsTable.table} ( + ${_partsTable.columnObjectKey} TEXT NOT NULL REFERENCES ${_trackUploadTable.table}(${_trackUploadTable.columnObjectKey}) ON DELETE CASCADE, + ${_partsTable.columnPartNumber} INTEGER NOT NULL, + ${_partsTable.columnPartUrl} TEXT NOT NULL, + ${_partsTable.columnPartETag} TEXT, + ${_partsTable.columnPartStatus} TEXT NOT NULL, + PRIMARY KEY (${_partsTable.columnObjectKey}, ${_partsTable.columnPartNumber}) + ) + ''', + ]; } Future clearTable() async { final db = await instance.database; - await db.delete(_table); + await db.delete(_uploadLocksTable.table); + await db.delete(_trackUploadTable.table); + await db.delete(_partsTable.table); } Future acquireLock(String id, String owner, int time) async { final db = await instance.database; final row = {}; - row[_columnID] = id; - row[_columnOwner] = owner; - row[_columnTime] = time; - await db.insert(_table, row, conflictAlgorithm: ConflictAlgorithm.fail); + row[_uploadLocksTable.columnID] = id; + row[_uploadLocksTable.columnOwner] = owner; + row[_uploadLocksTable.columnTime] = time; + await db.insert( + _uploadLocksTable.table, + row, + conflictAlgorithm: ConflictAlgorithm.fail, + ); } Future isLocked(String id, String owner) async { final db = await instance.database; final rows = await db.query( - _table, - where: '$_columnID = ? AND $_columnOwner = ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?', whereArgs: [id, owner], ); return rows.length == 1; @@ -73,8 +155,9 @@ class UploadLocksDB { Future releaseLock(String id, String owner) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnID = ? AND $_columnOwner = ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?', whereArgs: [id, owner], ); } @@ -82,8 +165,9 @@ class UploadLocksDB { Future releaseLocksAcquiredByOwnerBefore(String owner, int time) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnOwner = ? AND $_columnTime < ?', + _uploadLocksTable.table, + where: + '${_uploadLocksTable.columnOwner} = ? AND ${_uploadLocksTable.columnTime} < ?', whereArgs: [owner, time], ); } @@ -91,9 +175,251 @@ class UploadLocksDB { Future releaseAllLocksAcquiredBefore(int time) async { final db = await instance.database; return db.delete( - _table, - where: '$_columnTime < ?', + _uploadLocksTable.table, + where: '${_uploadLocksTable.columnTime} < ?', whereArgs: [time], ); } + + Future<({String encryptedFileKey, String fileNonce, String keyNonce})> + getFileEncryptionData( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + + if (rows.isEmpty) { + throw Exception("No cached links found for $localId and $fileHash"); + } + final row = rows.first; + + return ( + encryptedFileKey: row[_trackUploadTable.columnEncryptedFileKey] as String, + fileNonce: row[_trackUploadTable.columnFileEncryptionNonce] as String, + keyNonce: row[_trackUploadTable.columnKeyEncryptionNonce] as String, + ); + } + + Future updateLastAttempted( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + await db.update( + _trackUploadTable.table, + { + _trackUploadTable.columnLastAttemptedAt: + DateTime.now().millisecondsSinceEpoch, + }, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [ + localId, + fileHash, + collectionID, + ], + ); + } + + Future getCachedLinks( + String localId, + String fileHash, + int collectionID, + ) async { + final db = await instance.database; + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + if (rows.isEmpty) { + throw Exception("No cached links found for $localId and $fileHash"); + } + final row = rows.first; + final objectKey = row[_trackUploadTable.columnObjectKey] as String; + final partsStatus = await db.query( + _partsTable.table, + where: '${_partsTable.columnObjectKey} = ?', + whereArgs: [objectKey], + ); + + final List partUploadStatus = []; + final List partsURLs = List.generate( + partsStatus.length, + (index) => "", + ); + final Map partETags = {}; + + for (final part in partsStatus) { + final partNumber = part[_partsTable.columnPartNumber] as int; + final partUrl = part[_partsTable.columnPartUrl] as String; + final partStatus = part[_partsTable.columnPartStatus] as String; + partsURLs[partNumber] = partUrl; + if (part[_partsTable.columnPartETag] != null) { + partETags[partNumber] = part[_partsTable.columnPartETag] as String; + } + partUploadStatus.add(partStatus == "uploaded"); + } + final urls = MultipartUploadURLs( + objectKey: objectKey, + completeURL: row[_trackUploadTable.columnCompleteUrl] as String, + partsURLs: partsURLs, + ); + + return MultipartInfo( + urls: urls, + status: MultipartStatus.values + .byName(row[_trackUploadTable.columnStatus] as String), + partUploadStatus: partUploadStatus, + partETags: partETags, + partSize: row[_trackUploadTable.columnPartSize] as int, + ); + } + + Future createTrackUploadsEntry( + String localId, + String fileHash, + int collectionID, + MultipartUploadURLs urls, + String encryptedFileName, + int fileSize, + String fileKey, + String fileNonce, + String keyNonce, { + required int partSize, + }) async { + final db = await UploadLocksDB.instance.database; + final objectKey = urls.objectKey; + + await db.insert( + _trackUploadTable.table, + { + _trackUploadTable.columnLocalID: localId, + _trackUploadTable.columnFileHash: fileHash, + _trackUploadTable.columnCollectionID: collectionID, + _trackUploadTable.columnObjectKey: objectKey, + _trackUploadTable.columnCompleteUrl: urls.completeURL, + _trackUploadTable.columnEncryptedFileName: encryptedFileName, + _trackUploadTable.columnEncryptedFileSize: fileSize, + _trackUploadTable.columnEncryptedFileKey: fileKey, + _trackUploadTable.columnFileEncryptionNonce: fileNonce, + _trackUploadTable.columnKeyEncryptionNonce: keyNonce, + _trackUploadTable.columnPartSize: partSize, + _trackUploadTable.columnLastAttemptedAt: + DateTime.now().millisecondsSinceEpoch, + }, + ); + + final partsURLs = urls.partsURLs; + final partsLength = partsURLs.length; + + for (int i = 0; i < partsLength; i++) { + await db.insert( + _partsTable.table, + { + _partsTable.columnObjectKey: objectKey, + _partsTable.columnPartNumber: i, + _partsTable.columnPartUrl: partsURLs[i], + _partsTable.columnPartStatus: PartStatus.pending.name, + }, + ); + } + } + + Future updatePartStatus( + String objectKey, + int partNumber, + String etag, + ) async { + final db = await instance.database; + await db.update( + _partsTable.table, + { + _partsTable.columnPartStatus: PartStatus.uploaded.name, + _partsTable.columnPartETag: etag, + }, + where: + '${_partsTable.columnObjectKey} = ? AND ${_partsTable.columnPartNumber} = ?', + whereArgs: [objectKey, partNumber], + ); + } + + Future updateTrackUploadStatus( + String objectKey, + MultipartStatus status, + ) async { + final db = await instance.database; + await db.update( + _trackUploadTable.table, + { + _trackUploadTable.columnStatus: status.name, + }, + where: '${_trackUploadTable.columnObjectKey} = ?', + whereArgs: [objectKey], + ); + } + + Future deleteMultipartTrack( + String localId, + ) async { + final db = await instance.database; + return await db.delete( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?', + whereArgs: [localId], + ); + } + + // getFileNameToLastAttemptedAtMap returns a map of encrypted file name to last attempted at time + Future> getFileNameToLastAttemptedAtMap() { + return instance.database.then((db) async { + final rows = await db.query( + _trackUploadTable.table, + columns: [ + _trackUploadTable.columnEncryptedFileName, + _trackUploadTable.columnLastAttemptedAt, + ], + ); + final map = {}; + for (final row in rows) { + map[row[_trackUploadTable.columnEncryptedFileName] as String] = + row[_trackUploadTable.columnLastAttemptedAt] as int; + } + return map; + }); + } + + Future getEncryptedFileName( + String localId, + String fileHash, + int collectionID, + ) { + return instance.database.then((db) async { + final rows = await db.query( + _trackUploadTable.table, + where: '${_trackUploadTable.columnLocalID} = ?' + ' AND ${_trackUploadTable.columnFileHash} = ?' + ' AND ${_trackUploadTable.columnCollectionID} = ?', + whereArgs: [localId, fileHash, collectionID], + ); + if (rows.isEmpty) { + return null; + } + final row = rows.first; + return row[_trackUploadTable.columnEncryptedFileName] as String; + }); + } } diff --git a/mobile/lib/events/embedding_updated_event.dart b/mobile/lib/events/embedding_updated_event.dart index 9021b8b50..736b85c17 100644 --- a/mobile/lib/events/embedding_updated_event.dart +++ b/mobile/lib/events/embedding_updated_event.dart @@ -1,3 +1,5 @@ import "package:photos/events/event.dart"; class EmbeddingUpdatedEvent extends Event {} + +class EmbeddingCacheUpdatedEvent extends Event {} diff --git a/mobile/lib/generated/intl/messages_en.dart b/mobile/lib/generated/intl/messages_en.dart index 43b39c82e..aab7f47bd 100644 --- a/mobile/lib/generated/intl/messages_en.dart +++ b/mobile/lib/generated/intl/messages_en.dart @@ -132,7 +132,7 @@ class MessageLookup extends MessageLookupByLibrary { "Please talk to ${providerName} support if you were charged"; static String m38(endDate) => - "Free trial valid till ${endDate}.\nYou can choose a paid plan afterwards."; + "Free trial valid till ${endDate}.\nYou can purchase a paid plan afterwards."; static String m39(toEmail) => "Please email us at ${toEmail}"; diff --git a/mobile/lib/generated/intl/messages_nl.dart b/mobile/lib/generated/intl/messages_nl.dart index af7502d90..f6987973c 100644 --- a/mobile/lib/generated/intl/messages_nl.dart +++ b/mobile/lib/generated/intl/messages_nl.dart @@ -368,6 +368,14 @@ class MessageLookup extends MessageLookupByLibrary { "Verificatie mislukt, probeer het opnieuw"), "authenticationSuccessful": MessageLookupByLibrary.simpleMessage("Verificatie geslaagd!"), + "autoCastDialogBody": MessageLookupByLibrary.simpleMessage( + "Je zult de beschikbare Cast apparaten hier zien."), + "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage( + "Zorg ervoor dat lokale netwerkrechten zijn ingeschakeld voor de Ente Photos app, in Instellingen."), + "autoPair": + MessageLookupByLibrary.simpleMessage("Automatisch koppelen"), + "autoPairDesc": MessageLookupByLibrary.simpleMessage( + "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen."), "available": MessageLookupByLibrary.simpleMessage("Beschikbaar"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("Back-up mappen"), @@ -399,6 +407,10 @@ class MessageLookup extends MessageLookupByLibrary { "cannotAddMorePhotosAfterBecomingViewer": m9, "cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage( "Kan gedeelde bestanden niet verwijderen"), + "castIPMismatchBody": MessageLookupByLibrary.simpleMessage( + "Zorg ervoor dat je op hetzelfde netwerk zit als de tv."), + "castIPMismatchTitle": + MessageLookupByLibrary.simpleMessage("Album casten mislukt"), "castInstruction": MessageLookupByLibrary.simpleMessage( "Bezoek cast.ente.io op het apparaat dat u wilt koppelen.\n\nVoer de code hieronder in om het album op uw TV af te spelen."), "centerPoint": MessageLookupByLibrary.simpleMessage("Middelpunt"), @@ -473,6 +485,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"), "confirmYourRecoveryKey": MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"), + "connectToDevice": MessageLookupByLibrary.simpleMessage( + "Verbinding maken met apparaat"), "contactFamilyAdmin": m12, "contactSupport": MessageLookupByLibrary.simpleMessage("Contacteer klantenservice"), @@ -750,6 +764,8 @@ class MessageLookup extends MessageLookupByLibrary { "filesBackedUpInAlbum": m23, "filesDeleted": MessageLookupByLibrary.simpleMessage("Bestanden verwijderd"), + "filesSavedToGallery": MessageLookupByLibrary.simpleMessage( + "Bestand opgeslagen in galerij"), "flip": MessageLookupByLibrary.simpleMessage("Omdraaien"), "forYourMemories": MessageLookupByLibrary.simpleMessage("voor uw herinneringen"), @@ -938,6 +954,8 @@ class MessageLookup extends MessageLookupByLibrary { "manageParticipants": MessageLookupByLibrary.simpleMessage("Beheren"), "manageSubscription": MessageLookupByLibrary.simpleMessage("Abonnement beheren"), + "manualPairDesc": MessageLookupByLibrary.simpleMessage( + "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien."), "map": MessageLookupByLibrary.simpleMessage("Kaart"), "maps": MessageLookupByLibrary.simpleMessage("Kaarten"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), @@ -974,6 +992,8 @@ class MessageLookup extends MessageLookupByLibrary { "no": MessageLookupByLibrary.simpleMessage("Nee"), "noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage( "Nog geen albums gedeeld door jou"), + "noDeviceFound": + MessageLookupByLibrary.simpleMessage("Geen apparaat gevonden"), "noDeviceLimit": MessageLookupByLibrary.simpleMessage("Geen"), "noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage( "Je hebt geen bestanden op dit apparaat die verwijderd kunnen worden"), @@ -1023,6 +1043,9 @@ class MessageLookup extends MessageLookupByLibrary { "orPickAnExistingOne": MessageLookupByLibrary.simpleMessage("Of kies een bestaande"), "pair": MessageLookupByLibrary.simpleMessage("Koppelen"), + "pairWithPin": MessageLookupByLibrary.simpleMessage("Koppelen met PIN"), + "pairingComplete": + MessageLookupByLibrary.simpleMessage("Koppeling voltooid"), "passkey": MessageLookupByLibrary.simpleMessage("Passkey"), "passkeyAuthTitle": MessageLookupByLibrary.simpleMessage("Passkey verificatie"), @@ -1383,6 +1406,10 @@ class MessageLookup extends MessageLookupByLibrary { "sparkleSuccess": MessageLookupByLibrary.simpleMessage("✨ Succes"), "startBackup": MessageLookupByLibrary.simpleMessage("Back-up starten"), "status": MessageLookupByLibrary.simpleMessage("Status"), + "stopCastingBody": + MessageLookupByLibrary.simpleMessage("Wil je stoppen met casten?"), + "stopCastingTitle": + MessageLookupByLibrary.simpleMessage("Casten stoppen"), "storage": MessageLookupByLibrary.simpleMessage("Opslagruimte"), "storageBreakupFamily": MessageLookupByLibrary.simpleMessage("Familie"), "storageBreakupYou": MessageLookupByLibrary.simpleMessage("Jij"), diff --git a/mobile/lib/generated/intl/messages_pt.dart b/mobile/lib/generated/intl/messages_pt.dart index cc410241f..e17cb674e 100644 --- a/mobile/lib/generated/intl/messages_pt.dart +++ b/mobile/lib/generated/intl/messages_pt.dart @@ -371,6 +371,8 @@ class MessageLookup extends MessageLookupByLibrary { "Certifique-se de que as permissões de Rede local estão ativadas para o aplicativo de Fotos Ente, em Configurações."), "autoPair": MessageLookupByLibrary.simpleMessage("Pareamento automático"), + "autoPairDesc": MessageLookupByLibrary.simpleMessage( + "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast."), "available": MessageLookupByLibrary.simpleMessage("Disponível"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("Backup de pastas concluído"), @@ -629,8 +631,9 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Dobre seu armazenamento"), "download": MessageLookupByLibrary.simpleMessage("Baixar"), "downloadFailed": - MessageLookupByLibrary.simpleMessage("Falha ao baixar"), - "downloading": MessageLookupByLibrary.simpleMessage("Baixando..."), + MessageLookupByLibrary.simpleMessage("Falha no download"), + "downloading": + MessageLookupByLibrary.simpleMessage("Fazendo download..."), "dropSupportEmail": m17, "duplicateFileCountWithStorageSaved": m18, "duplicateItemsGroup": m19, @@ -716,8 +719,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Falha ao aplicar o código"), "failedToCancel": MessageLookupByLibrary.simpleMessage("Falha ao cancelar"), - "failedToDownloadVideo": - MessageLookupByLibrary.simpleMessage("Falha ao baixar vídeo"), + "failedToDownloadVideo": MessageLookupByLibrary.simpleMessage( + "Falha ao fazer download do vídeo"), "failedToFetchOriginalForEdit": MessageLookupByLibrary.simpleMessage( "Falha ao obter original para edição"), "failedToFetchReferralDetails": MessageLookupByLibrary.simpleMessage( @@ -735,7 +738,7 @@ class MessageLookup extends MessageLookupByLibrary { "familyPlans": MessageLookupByLibrary.simpleMessage("Plano familiar"), "faq": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"), "faqs": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"), - "favorite": MessageLookupByLibrary.simpleMessage("Favoritar"), + "favorite": MessageLookupByLibrary.simpleMessage("Favorito"), "feedback": MessageLookupByLibrary.simpleMessage("Comentários"), "fileFailedToSaveToGallery": MessageLookupByLibrary.simpleMessage( "Falha ao salvar o arquivo na galeria"), @@ -902,8 +905,8 @@ class MessageLookup extends MessageLookupByLibrary { MessageLookupByLibrary.simpleMessage("Carregando galeria..."), "loadingMessage": MessageLookupByLibrary.simpleMessage("Carregando suas fotos..."), - "loadingModel": - MessageLookupByLibrary.simpleMessage("Baixando modelos..."), + "loadingModel": MessageLookupByLibrary.simpleMessage( + "Fazendo download de modelos..."), "localGallery": MessageLookupByLibrary.simpleMessage("Galeria local"), "location": MessageLookupByLibrary.simpleMessage("Local"), "locationName": MessageLookupByLibrary.simpleMessage("Nome do Local"), @@ -944,7 +947,7 @@ class MessageLookup extends MessageLookupByLibrary { "manageSubscription": MessageLookupByLibrary.simpleMessage("Gerenciar assinatura"), "manualPairDesc": MessageLookupByLibrary.simpleMessage( - "Parear com o PIN funciona para qualquer dispositivo de tela grande onde você deseja reproduzir seu álbum."), + "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado."), "map": MessageLookupByLibrary.simpleMessage("Mapa"), "maps": MessageLookupByLibrary.simpleMessage("Mapas"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), diff --git a/mobile/lib/generated/intl/messages_zh.dart b/mobile/lib/generated/intl/messages_zh.dart index 0d507c2e1..db60c5e0b 100644 --- a/mobile/lib/generated/intl/messages_zh.dart +++ b/mobile/lib/generated/intl/messages_zh.dart @@ -124,7 +124,7 @@ class MessageLookup extends MessageLookupByLibrary { static String m37(providerName) => "如果您被收取费用,请用英语与 ${providerName} 的客服聊天"; - static String m38(endDate) => "免费试用有效期至 ${endDate}。\n之后您可以选择付费计划。"; + static String m38(endDate) => "免费试用有效期至 ${endDate}。\n您可以随后购买付费计划。"; static String m39(toEmail) => "请给我们发送电子邮件至 ${toEmail}"; @@ -325,6 +325,8 @@ class MessageLookup extends MessageLookupByLibrary { "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage( "请确保已在“设置”中为 Ente Photos 应用打开本地网络权限。"), "autoPair": MessageLookupByLibrary.simpleMessage("自动配对"), + "autoPairDesc": + MessageLookupByLibrary.simpleMessage("自动配对仅适用于支持 Chromecast 的设备。"), "available": MessageLookupByLibrary.simpleMessage("可用"), "backedUpFolders": MessageLookupByLibrary.simpleMessage("已备份的文件夹"), "backup": MessageLookupByLibrary.simpleMessage("备份"), @@ -777,7 +779,7 @@ class MessageLookup extends MessageLookupByLibrary { "manageParticipants": MessageLookupByLibrary.simpleMessage("管理"), "manageSubscription": MessageLookupByLibrary.simpleMessage("管理订阅"), "manualPairDesc": MessageLookupByLibrary.simpleMessage( - "用 PIN 配对适用于任何大屏幕设备,您可以在这些设备上播放您的相册。"), + "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。"), "map": MessageLookupByLibrary.simpleMessage("地图"), "maps": MessageLookupByLibrary.simpleMessage("地图"), "mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"), diff --git a/mobile/lib/generated/l10n.dart b/mobile/lib/generated/l10n.dart index dbedbadf6..4c7679154 100644 --- a/mobile/lib/generated/l10n.dart +++ b/mobile/lib/generated/l10n.dart @@ -4034,10 +4034,10 @@ class S { ); } - /// `Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.` + /// `Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.` String playStoreFreeTrialValidTill(Object endDate) { return Intl.message( - 'Free trial valid till $endDate.\nYou can choose a paid plan afterwards.', + 'Free trial valid till $endDate.\nYou can purchase a paid plan afterwards.', name: 'playStoreFreeTrialValidTill', desc: '', args: [endDate], diff --git a/mobile/lib/l10n/intl_en.arb b/mobile/lib/l10n/intl_en.arb index e59163e6b..6bc8b5926 100644 --- a/mobile/lib/l10n/intl_en.arb +++ b/mobile/lib/l10n/intl_en.arb @@ -569,7 +569,7 @@ "freeTrialValidTill": "Free trial valid till {endDate}", "validTill": "Valid till {endDate}", "addOnValidTill": "Your {storageAmount} add-on is valid till {endDate}", - "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.", + "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.", "subWillBeCancelledOn": "Your subscription will be cancelled on {endDate}", "subscription": "Subscription", "paymentDetails": "Payment details", diff --git a/mobile/lib/l10n/intl_nl.arb b/mobile/lib/l10n/intl_nl.arb index 0ba9bd10c..a8f854a43 100644 --- a/mobile/lib/l10n/intl_nl.arb +++ b/mobile/lib/l10n/intl_nl.arb @@ -835,6 +835,7 @@ "close": "Sluiten", "setAs": "Instellen als", "fileSavedToGallery": "Bestand opgeslagen in galerij", + "filesSavedToGallery": "Bestand opgeslagen in galerij", "fileFailedToSaveToGallery": "Opslaan van bestand naar galerij mislukt", "download": "Downloaden", "pressAndHoldToPlayVideo": "Ingedrukt houden om video af te spelen", @@ -1195,6 +1196,8 @@ "verifyPasskey": "Bevestig passkey", "playOnTv": "Album afspelen op TV", "pair": "Koppelen", + "autoPair": "Automatisch koppelen", + "pairWithPin": "Koppelen met PIN", "deviceNotFound": "Apparaat niet gevonden", "castInstruction": "Bezoek cast.ente.io op het apparaat dat u wilt koppelen.\n\nVoer de code hieronder in om het album op uw TV af te spelen.", "deviceCodeHint": "Voer de code in", @@ -1212,5 +1215,16 @@ "endpointUpdatedMessage": "Eindpunt met succes bijgewerkt", "customEndpoint": "Verbonden met {endpoint}", "createCollaborativeLink": "Maak een gezamenlijke link", - "search": "Zoeken" + "search": "Zoeken", + "autoPairDesc": "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen.", + "manualPairDesc": "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien.", + "connectToDevice": "Verbinding maken met apparaat", + "autoCastDialogBody": "Je zult de beschikbare Cast apparaten hier zien.", + "autoCastiOSPermission": "Zorg ervoor dat lokale netwerkrechten zijn ingeschakeld voor de Ente Photos app, in Instellingen.", + "noDeviceFound": "Geen apparaat gevonden", + "stopCastingTitle": "Casten stoppen", + "stopCastingBody": "Wil je stoppen met casten?", + "castIPMismatchTitle": "Album casten mislukt", + "castIPMismatchBody": "Zorg ervoor dat je op hetzelfde netwerk zit als de tv.", + "pairingComplete": "Koppeling voltooid" } \ No newline at end of file diff --git a/mobile/lib/l10n/intl_pt.arb b/mobile/lib/l10n/intl_pt.arb index 16a0ea753..bf95cf6ba 100644 --- a/mobile/lib/l10n/intl_pt.arb +++ b/mobile/lib/l10n/intl_pt.arb @@ -410,7 +410,7 @@ "machineLearning": "Aprendizagem de máquina", "magicSearch": "Busca mágica", "magicSearchDescription": "Por favor, note que isso resultará em uma largura de banda maior e uso de bateria até que todos os itens sejam indexados.", - "loadingModel": "Baixando modelos...", + "loadingModel": "Fazendo download de modelos...", "waitingForWifi": "Esperando por Wi-Fi...", "status": "Estado", "indexedItems": "Itens indexados", @@ -471,7 +471,7 @@ "criticalUpdateAvailable": "Atualização crítica disponível", "updateAvailable": "Atualização disponível", "ignoreUpdate": "Ignorar", - "downloading": "Baixando...", + "downloading": "Fazendo download...", "cannotDeleteSharedFiles": "Não é possível excluir arquivos compartilhados", "theDownloadCouldNotBeCompleted": "Não foi possível concluir o download", "retry": "Tentar novamente", @@ -734,7 +734,7 @@ "moveToAlbum": "Mover para álbum", "unhide": "Desocultar", "unarchive": "Desarquivar", - "favorite": "Favoritar", + "favorite": "Favorito", "removeFromFavorite": "Remover dos favoritos", "shareLink": "Compartilhar link", "createCollage": "Criar colagem", @@ -840,7 +840,7 @@ "download": "Baixar", "pressAndHoldToPlayVideo": "Pressione e segure para reproduzir o vídeo", "pressAndHoldToPlayVideoDetailed": "Pressione e segure na imagem para reproduzir o vídeo", - "downloadFailed": "Falha ao baixar", + "downloadFailed": "Falha no download", "deduplicateFiles": "Arquivos duplicados", "deselectAll": "Desmarcar todos", "reviewDeduplicateItems": "Por favor, reveja e exclua os itens que você acredita serem duplicados.", @@ -1132,7 +1132,7 @@ "sharedWithYou": "Compartilhado com você", "sharedByYou": "Compartilhado por você", "inviteYourFriendsToEnte": "Convide seus amigos ao Ente", - "failedToDownloadVideo": "Falha ao baixar vídeo", + "failedToDownloadVideo": "Falha ao fazer download do vídeo", "hiding": "Ocultando...", "unhiding": "Desocultando...", "successfullyHid": "Ocultado com sucesso", @@ -1216,8 +1216,8 @@ "customEndpoint": "Conectado a {endpoint}", "createCollaborativeLink": "Criar link colaborativo", "search": "Pesquisar", - "autoPairGoogle": "O Pareamento Automático requer a conexão com servidores do Google e só funciona com dispositivos Chromecast. O Google não receberá dados confidenciais, como suas fotos.", - "manualPairDesc": "Parear com o PIN funciona para qualquer dispositivo de tela grande onde você deseja reproduzir seu álbum.", + "autoPairDesc": "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast.", + "manualPairDesc": "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado.", "connectToDevice": "Conectar ao dispositivo", "autoCastDialogBody": "Você verá dispositivos disponíveis para transmitir aqui.", "autoCastiOSPermission": "Certifique-se de que as permissões de Rede local estão ativadas para o aplicativo de Fotos Ente, em Configurações.", diff --git a/mobile/lib/l10n/intl_zh.arb b/mobile/lib/l10n/intl_zh.arb index 370bb6a3c..a26f1fc6e 100644 --- a/mobile/lib/l10n/intl_zh.arb +++ b/mobile/lib/l10n/intl_zh.arb @@ -569,7 +569,7 @@ "freeTrialValidTill": "免费试用有效期至 {endDate}", "validTill": "有效期至 {endDate}", "addOnValidTill": "您的 {storageAmount} 插件有效期至 {endDate}", - "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n之后您可以选择付费计划。", + "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n您可以随后购买付费计划。", "subWillBeCancelledOn": "您的订阅将于 {endDate} 取消", "subscription": "订阅", "paymentDetails": "付款明细", @@ -1216,8 +1216,8 @@ "customEndpoint": "已连接至 {endpoint}", "createCollaborativeLink": "创建协作链接", "search": "搜索", - "autoPairGoogle": "自动配对需要连接到 Google 服务器,且仅适用于支持 Chromecast 的设备。Google 不会接收敏感数据,例如您的照片。", - "manualPairDesc": "用 PIN 配对适用于任何大屏幕设备,您可以在这些设备上播放您的相册。", + "autoPairDesc": "自动配对仅适用于支持 Chromecast 的设备。", + "manualPairDesc": "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。", "connectToDevice": "连接到设备", "autoCastDialogBody": "您将在此处看到可用的 Cast 设备。", "autoCastiOSPermission": "请确保已在“设置”中为 Ente Photos 应用打开本地网络权限。", diff --git a/mobile/lib/models/gallery_type.dart b/mobile/lib/models/gallery_type.dart index ba0eb397f..40426f701 100644 --- a/mobile/lib/models/gallery_type.dart +++ b/mobile/lib/models/gallery_type.dart @@ -32,12 +32,12 @@ extension GalleyTypeExtension on GalleryType { case GalleryType.locationTag: case GalleryType.quickLink: case GalleryType.uncategorized: + case GalleryType.sharedCollection: return true; case GalleryType.hiddenSection: case GalleryType.hiddenOwnedCollection: case GalleryType.trash: - case GalleryType.sharedCollection: return false; } } diff --git a/mobile/lib/module/upload/model/multipart.dart b/mobile/lib/module/upload/model/multipart.dart new file mode 100644 index 000000000..cda72d141 --- /dev/null +++ b/mobile/lib/module/upload/model/multipart.dart @@ -0,0 +1,66 @@ +import "package:photos/module/upload/model/xml.dart"; + +class PartETag extends XmlParsableObject { + final int partNumber; + final String eTag; + + PartETag(this.partNumber, this.eTag); + + @override + String get elementName => "Part"; + + @override + Map toMap() { + return { + "PartNumber": partNumber, + "ETag": eTag, + }; + } +} + +enum MultipartStatus { + pending, + uploaded, + completed, +} + +enum PartStatus { + pending, + uploaded, +} + +class MultipartInfo { + final List? partUploadStatus; + final Map? partETags; + final int? partSize; + final MultipartUploadURLs urls; + final MultipartStatus status; + + MultipartInfo({ + this.partUploadStatus, + this.partETags, + this.partSize, + this.status = MultipartStatus.pending, + required this.urls, + }); +} + +class MultipartUploadURLs { + final String objectKey; + final List partsURLs; + final String completeURL; + + MultipartUploadURLs({ + required this.objectKey, + required this.partsURLs, + required this.completeURL, + }); + + factory MultipartUploadURLs.fromMap(Map map) { + return MultipartUploadURLs( + objectKey: map["urls"]["objectKey"], + partsURLs: (map["urls"]["partURLs"] as List).cast(), + completeURL: map["urls"]["completeURL"], + ); + } +} diff --git a/mobile/lib/module/upload/model/xml.dart b/mobile/lib/module/upload/model/xml.dart new file mode 100644 index 000000000..9490fc40c --- /dev/null +++ b/mobile/lib/module/upload/model/xml.dart @@ -0,0 +1,41 @@ +// ignore_for_file: implementation_imports + +import "package:xml/xml.dart"; + +// used for classes that can be converted to xml +abstract class XmlParsableObject { + Map toMap(); + String get elementName; +} + +// for converting the response to xml +String convertJs2Xml(Map json) { + final builder = XmlBuilder(); + buildXml(builder, json); + return builder.buildDocument().toXmlString( + pretty: true, + indent: ' ', + ); +} + +// for building the xml node tree recursively +void buildXml(XmlBuilder builder, dynamic node) { + if (node is Map) { + node.forEach((key, value) { + builder.element(key, nest: () => buildXml(builder, value)); + }); + } else if (node is List) { + for (var item in node) { + buildXml(builder, item); + } + } else if (node is XmlParsableObject) { + builder.element( + node.elementName, + nest: () { + buildXml(builder, node.toMap()); + }, + ); + } else { + builder.text(node.toString()); + } +} diff --git a/mobile/lib/module/upload/service/multipart.dart b/mobile/lib/module/upload/service/multipart.dart new file mode 100644 index 000000000..ad0d19703 --- /dev/null +++ b/mobile/lib/module/upload/service/multipart.dart @@ -0,0 +1,266 @@ +import "dart:io"; + +import "package:dio/dio.dart"; +import "package:ente_feature_flag/ente_feature_flag.dart"; +import "package:flutter/foundation.dart"; +import "package:logging/logging.dart"; +import "package:photos/core/constants.dart"; +import "package:photos/db/upload_locks_db.dart"; +import "package:photos/models/encryption_result.dart"; +import "package:photos/module/upload/model/multipart.dart"; +import "package:photos/module/upload/model/xml.dart"; +import "package:photos/services/collections_service.dart"; +import "package:photos/utils/crypto_util.dart"; + +class MultiPartUploader { + final Dio _enteDio; + final Dio _s3Dio; + final UploadLocksDB _db; + final FlagService _featureFlagService; + late final Logger _logger = Logger("MultiPartUploader"); + + MultiPartUploader( + this._enteDio, + this._s3Dio, + this._db, + this._featureFlagService, + ); + + Future getEncryptionResult( + String localId, + String fileHash, + int collectionID, + ) async { + final collectionKey = + CollectionsService.instance.getCollectionKey(collectionID); + final result = + await _db.getFileEncryptionData(localId, fileHash, collectionID); + final encryptedFileKey = CryptoUtil.base642bin(result.encryptedFileKey); + final fileNonce = CryptoUtil.base642bin(result.fileNonce); + + final encryptKeyNonce = CryptoUtil.base642bin(result.keyNonce); + + return EncryptionResult( + key: CryptoUtil.decryptSync( + encryptedFileKey, + collectionKey, + encryptKeyNonce, + ), + header: fileNonce, + ); + } + + int get multipartPartSizeForUpload { + if (_featureFlagService.internalUser) { + return multipartPartSizeInternal; + } + return multipartPartSize; + } + + Future calculatePartCount(int fileSize) async { + // Multipart upload is only enabled for internal users + // and debug builds till it's battle tested. + if (!_featureFlagService.internalUser) return 1; + + final partCount = (fileSize / multipartPartSizeForUpload).ceil(); + return partCount; + } + + Future getMultipartUploadURLs(int count) async { + try { + assert( + _featureFlagService.internalUser, + "Multipart upload should not be enabled for external users.", + ); + final response = await _enteDio.get( + "/files/multipart-upload-urls", + queryParameters: { + "count": count, + }, + ); + + return MultipartUploadURLs.fromMap(response.data); + } on Exception catch (e) { + _logger.severe('failed to get multipart url', e); + rethrow; + } + } + + Future createTableEntry( + String localId, + String fileHash, + int collectionID, + MultipartUploadURLs urls, + String encryptedFileName, + int fileSize, + Uint8List fileKey, + Uint8List fileNonce, + ) async { + final collectionKey = + CollectionsService.instance.getCollectionKey(collectionID); + + final encryptedResult = CryptoUtil.encryptSync( + fileKey, + collectionKey, + ); + + await _db.createTrackUploadsEntry( + localId, + fileHash, + collectionID, + urls, + encryptedFileName, + fileSize, + CryptoUtil.bin2base64(encryptedResult.encryptedData!), + CryptoUtil.bin2base64(fileNonce), + CryptoUtil.bin2base64(encryptedResult.nonce!), + partSize: multipartPartSizeForUpload, + ); + } + + Future putExistingMultipartFile( + File encryptedFile, + String localId, + String fileHash, + int collectionID, + ) async { + final multipartInfo = + await _db.getCachedLinks(localId, fileHash, collectionID); + await _db.updateLastAttempted(localId, fileHash, collectionID); + + Map etags = multipartInfo.partETags ?? {}; + + if (multipartInfo.status == MultipartStatus.pending) { + // upload individual parts and get their etags + etags = await _uploadParts(multipartInfo, encryptedFile); + } + + if (multipartInfo.status != MultipartStatus.completed) { + // complete the multipart upload + await _completeMultipartUpload( + multipartInfo.urls.objectKey, + etags, + multipartInfo.urls.completeURL, + ); + } + + return multipartInfo.urls.objectKey; + } + + Future putMultipartFile( + MultipartUploadURLs urls, + File encryptedFile, + ) async { + // upload individual parts and get their etags + final etags = await _uploadParts( + MultipartInfo(urls: urls), + encryptedFile, + ); + + // complete the multipart upload + await _completeMultipartUpload(urls.objectKey, etags, urls.completeURL); + + return urls.objectKey; + } + + Future> _uploadParts( + MultipartInfo partInfo, + File encryptedFile, + ) async { + final partsURLs = partInfo.urls.partsURLs; + final partUploadStatus = partInfo.partUploadStatus; + final partsLength = partsURLs.length; + final etags = partInfo.partETags ?? {}; + + int i = 0; + final partSize = partInfo.partSize ?? multipartPartSizeForUpload; + + // Go to the first part that is not uploaded + while (i < (partUploadStatus?.length ?? 0) && + (partUploadStatus?[i] ?? false)) { + i++; + } + + final int encFileLength = encryptedFile.lengthSync(); + // Start parts upload + int count = 0; + while (i < partsLength) { + count++; + final partURL = partsURLs[i]; + final isLastPart = i == partsLength - 1; + final fileSize = isLastPart ? encFileLength % partSize : partSize; + _logger.info( + "Uploading part ${i + 1} / $partsLength of size $fileSize bytes (total size $encFileLength).", + ); + if (kDebugMode && count > 3) { + throw Exception( + 'Forced exception to test multipart upload retry mechanism.', + ); + } + final response = await _s3Dio.put( + partURL, + data: encryptedFile.openRead( + i * partSize, + isLastPart ? null : (i + 1) * partSize, + ), + options: Options( + headers: { + Headers.contentLengthHeader: fileSize, + }, + ), + ); + + final eTag = response.headers.value("etag"); + + if (eTag?.isEmpty ?? true) { + throw Exception('ETAG_MISSING'); + } + + etags[i] = eTag!; + + await _db.updatePartStatus(partInfo.urls.objectKey, i, eTag); + i++; + } + + await _db.updateTrackUploadStatus( + partInfo.urls.objectKey, + MultipartStatus.uploaded, + ); + + return etags; + } + + Future _completeMultipartUpload( + String objectKey, + Map partEtags, + String completeURL, + ) async { + final body = convertJs2Xml({ + 'CompleteMultipartUpload': partEtags.entries + .map( + (e) => PartETag( + e.key + 1, + e.value, + ), + ) + .toList(), + }).replaceAll('"', '').replaceAll('"', ''); + + try { + await _s3Dio.post( + completeURL, + data: body, + options: Options( + contentType: "text/xml", + ), + ); + await _db.updateTrackUploadStatus( + objectKey, + MultipartStatus.completed, + ); + } catch (e) { + Logger("MultipartUpload").severe(e); + rethrow; + } + } +} diff --git a/mobile/lib/services/collections_service.dart b/mobile/lib/services/collections_service.dart index 0981eb767..5b16bc70f 100644 --- a/mobile/lib/services/collections_service.dart +++ b/mobile/lib/services/collections_service.dart @@ -30,7 +30,6 @@ import 'package:photos/models/collection/collection_items.dart'; import 'package:photos/models/file/file.dart'; import "package:photos/models/files_split.dart"; import "package:photos/models/metadata/collection_magic.dart"; -import "package:photos/service_locator.dart"; import 'package:photos/services/app_lifecycle_service.dart'; import "package:photos/services/favorites_service.dart"; import 'package:photos/services/file_magic_service.dart'; @@ -1179,9 +1178,6 @@ class CollectionsService { await _addToCollection(dstCollectionID, splitResult.ownedByCurrentUser); } if (splitResult.ownedByOtherUsers.isNotEmpty) { - if (!flagService.internalUser) { - throw ArgumentError('Cannot add files owned by other users'); - } late final List filesToCopy; late final List filesToAdd; (filesToAdd, filesToCopy) = (await _splitFilesToAddAndCopy( diff --git a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart index 420b8c97f..485e1f2c9 100644 --- a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart +++ b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart @@ -145,9 +145,12 @@ class EmbeddingStore { } _logger.info("${remoteEmbeddings.length} embeddings fetched"); + return RemoteEmbeddings( remoteEmbeddings, - remoteEmbeddings.length == limit, + // keep fetching until we get all embeddings. Avoid limit check as + // some embedding fetch might fail on server + remoteEmbeddings.isNotEmpty, ); } diff --git a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart index 337ca913f..99aa3a011 100644 --- a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart +++ b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart @@ -190,6 +190,7 @@ class SemanticSearchService { _logger.info( "Loading ${_cachedEmbeddings.length} took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch)}ms", ); + Bus.instance.fire(EmbeddingCacheUpdatedEvent()); _logger.info("Cached embeddings: " + _cachedEmbeddings.length.toString()); } @@ -225,7 +226,9 @@ class SemanticSearchService { Future> _getFileIDsToBeIndexed() async { final uploadedFileIDs = await FilesDB.instance .getOwnedFileIDs(Configuration.instance.getUserID()!); - final embeddedFileIDs = _cachedEmbeddings.map((e) => e.fileID).toSet(); + final embeddedFileIDs = + await EmbeddingsDB.instance.getFileIDs(_currentModel); + uploadedFileIDs.removeWhere( (id) => embeddedFileIDs.contains(id), ); diff --git a/mobile/lib/ui/actions/collection/collection_sharing_actions.dart b/mobile/lib/ui/actions/collection/collection_sharing_actions.dart index 7993c4342..3328722db 100644 --- a/mobile/lib/ui/actions/collection/collection_sharing_actions.dart +++ b/mobile/lib/ui/actions/collection/collection_sharing_actions.dart @@ -439,7 +439,12 @@ class CollectionActions { ) async { final List files = await FilesDB.instance.getAllFilesCollection(collection.id); - await moveFilesFromCurrentCollection(bContext, collection, files); + await moveFilesFromCurrentCollection( + bContext, + collection, + files, + isHidden: collection.isHidden() && !collection.isDefaultHidden(), + ); // collection should be empty on server now await collectionsService.trashEmptyCollection(collection); } diff --git a/mobile/lib/ui/cast/auto.dart b/mobile/lib/ui/cast/auto.dart index 7b310855e..34c97b34d 100644 --- a/mobile/lib/ui/cast/auto.dart +++ b/mobile/lib/ui/cast/auto.dart @@ -79,12 +79,6 @@ class _AutoCastDialogState extends State { }); try { await _connectToYourApp(context, device); - if (mounted) { - setState(() { - _isDeviceTapInProgress.remove(device); - }); - Navigator.of(context).pop(); - } } catch (e) { if (mounted) { setState(() { @@ -128,6 +122,11 @@ class _AutoCastDialogState extends State { final code = message[CastMessageType.pairCode]!['code']; widget.onConnect(code); } + if (mounted) { + setState(() { + _isDeviceTapInProgress.remove(castDevice); + }); + } }, ); } diff --git a/mobile/lib/ui/notification/update/change_log_page.dart b/mobile/lib/ui/notification/update/change_log_page.dart index 1216b3219..90430fae2 100644 --- a/mobile/lib/ui/notification/update/change_log_page.dart +++ b/mobile/lib/ui/notification/update/change_log_page.dart @@ -124,7 +124,7 @@ class _ChangeLogPageState extends State { ), ChangeLogEntry( "Organize shared photos", - "You can now add shared items to your favorites to any of your personal albums. Ente will create a copy that is fully owned by you and can be organized to your liking.", + "You can now add shared items to your favorites or to any of your personal albums. Ente will create a copy that is fully owned by you and can be organized to your liking.", ), ChangeLogEntry( "Download multiple items", diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart index 3306ea36f..a0b72ae09 100644 --- a/mobile/lib/ui/settings/machine_learning_settings_page.dart +++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart @@ -228,13 +228,13 @@ class MagicSearchIndexStatsWidget extends StatefulWidget { class _MagicSearchIndexStatsWidgetState extends State { IndexStatus? _status; - late StreamSubscription _eventSubscription; + late StreamSubscription _eventSubscription; @override void initState() { super.initState(); _eventSubscription = - Bus.instance.on().listen((event) { + Bus.instance.on().listen((event) { _fetchIndexStatus(); }); _fetchIndexStatus(); diff --git a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart index a630e3354..e805927a6 100644 --- a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart +++ b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart @@ -15,7 +15,6 @@ import 'package:photos/models/files_split.dart'; import 'package:photos/models/gallery_type.dart'; import "package:photos/models/metadata/common_keys.dart"; import 'package:photos/models/selected_files.dart'; -import "package:photos/service_locator.dart"; import 'package:photos/services/collections_service.dart'; import 'package:photos/services/hidden_service.dart'; import "package:photos/theme/colors.dart"; @@ -64,7 +63,6 @@ class _FileSelectionActionsWidgetState late FilesSplit split; late CollectionActions collectionActions; late bool isCollectionOwner; - bool _isInternalUser = false; // _cachedCollectionForSharedLink is primarily used to avoid creating duplicate // links if user keeps on creating Create link button after selecting @@ -102,7 +100,6 @@ class _FileSelectionActionsWidgetState @override Widget build(BuildContext context) { - _isInternalUser = flagService.internalUser; final ownedFilesCount = split.ownedByCurrentUser.length; final ownedAndPendingUploadFilesCount = ownedFilesCount + split.pendingUploads.length; @@ -150,14 +147,13 @@ class _FileSelectionActionsWidgetState final showUploadIcon = widget.type == GalleryType.localFolder && split.ownedByCurrentUser.isEmpty; - if (widget.type.showAddToAlbum() || - (_isInternalUser && widget.type == GalleryType.sharedCollection)) { + if (widget.type.showAddToAlbum()) { if (showUploadIcon) { items.add( SelectionActionButton( icon: Icons.cloud_upload_outlined, labelText: S.of(context).addToEnte, - onTap: (anyOwnedFiles || _isInternalUser) ? _addToAlbum : null, + onTap: _addToAlbum, ), ); } else { @@ -165,8 +161,7 @@ class _FileSelectionActionsWidgetState SelectionActionButton( icon: Icons.add_outlined, labelText: S.of(context).addToAlbum, - onTap: (anyOwnedFiles || _isInternalUser) ? _addToAlbum : null, - shouldShow: ownedAndPendingUploadFilesCount > 0 || _isInternalUser, + onTap: _addToAlbum, ), ); } @@ -450,10 +445,6 @@ class _FileSelectionActionsWidgetState } Future _addToAlbum() async { - if (split.ownedByOtherUsers.isNotEmpty && !_isInternalUser) { - widget.selectedFiles - .unSelectAll(split.ownedByOtherUsers.toSet(), skipNotify: true); - } showCollectionActionSheet(context, selectedFiles: widget.selectedFiles); } diff --git a/mobile/lib/ui/viewer/file/file_app_bar.dart b/mobile/lib/ui/viewer/file/file_app_bar.dart index 2918924db..aa46de55a 100644 --- a/mobile/lib/ui/viewer/file/file_app_bar.dart +++ b/mobile/lib/ui/viewer/file/file_app_bar.dart @@ -1,6 +1,5 @@ import 'dart:io'; -import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; import 'package:logging/logging.dart'; import 'package:media_extension/media_extension.dart'; @@ -12,7 +11,6 @@ import 'package:photos/models/file/file_type.dart'; import 'package:photos/models/file/trash_file.dart'; import "package:photos/models/metadata/common_keys.dart"; import 'package:photos/models/selected_files.dart'; -import "package:photos/service_locator.dart"; import 'package:photos/services/collections_service.dart'; import 'package:photos/services/hidden_service.dart'; import 'package:photos/ui/collections/collection_action_sheet.dart'; @@ -133,11 +131,13 @@ class FileAppBarState extends State { ), ); } - // only show fav option for files owned by the user - if ((isOwnedByUser || flagService.internalUser) && - !isFileHidden && - isFileUploaded) { - _actions.add(FavoriteWidget(widget.file)); + if (!isFileHidden && isFileUploaded) { + _actions.add( + Padding( + padding: const EdgeInsets.all(8), + child: FavoriteWidget(widget.file), + ), + ); } if (!isFileUploaded) { _actions.add( diff --git a/mobile/lib/ui/viewer/file_details/favorite_widget.dart b/mobile/lib/ui/viewer/file_details/favorite_widget.dart index f9d643490..3371b1442 100644 --- a/mobile/lib/ui/viewer/file_details/favorite_widget.dart +++ b/mobile/lib/ui/viewer/file_details/favorite_widget.dart @@ -50,7 +50,6 @@ class _FavoriteWidgetState extends State { : LikeButton( size: 24, isLiked: isLiked, - padding: const EdgeInsets.all(2), onTap: (oldValue) async { if (widget.file.uploadedFileID == null || widget.file.ownerID != diff --git a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart index 4a3d9450a..d2b7a6ec3 100644 --- a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart +++ b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart @@ -90,16 +90,16 @@ class _GalleryAppBarWidgetState extends State { String? _appBarTitle; late CollectionActions collectionActions; bool isQuickLink = false; - late bool isInternalUser; late GalleryType galleryType; + final ValueNotifier castNotifier = ValueNotifier(0); + @override void initState() { super.initState(); _selectedFilesListener = () { setState(() {}); }; - isInternalUser = flagService.internalUser; collectionActions = CollectionActions(CollectionsService.instance); widget.selectedFiles.addListener(_selectedFilesListener); _userAuthEventSubscription = @@ -328,14 +328,16 @@ class _GalleryAppBarWidgetState extends State { Tooltip( message: "Cast album", child: IconButton( - icon: castService.getActiveSessions().isNotEmpty - ? const Icon(Icons.cast_connected_rounded) - : const Icon(Icons.cast_outlined), + icon: ValueListenableBuilder( + valueListenable: castNotifier, + builder: (context, value, child) { + return castService.getActiveSessions().isNotEmpty + ? const Icon(Icons.cast_connected_rounded) + : const Icon(Icons.cast_outlined); + }, + ), onPressed: () async { await _castChoiceDialog(); - if (mounted) { - setState(() {}); - } }, ), ), @@ -412,7 +414,7 @@ class _GalleryAppBarWidgetState extends State { ? Icons.visibility_outlined : Icons.visibility_off_outlined, ), - if (widget.collection != null && isInternalUser) + if (widget.collection != null) EntePopupMenuItem( value: AlbumPopupAction.playOnTv, context.l10n.playOnTv, @@ -728,38 +730,44 @@ class _GalleryAppBarWidgetState extends State { await castService.closeActiveCasts(); }, ); + castNotifier.value++; return; } // stop any existing cast session gw.revokeAllTokens().ignore(); - final result = await showDialog( - context: context, - barrierDismissible: true, - builder: (BuildContext context) { - return const CastChooseDialog(); - }, - ); - if (result == null) { - return; - } - // wait to allow the dialog to close - await Future.delayed(const Duration(milliseconds: 100)); - if (result == ButtonAction.first) { - await showDialog( + if (!Platform.isAndroid) { + await _pairWithPin(gw, ''); + } else { + final result = await showDialog( context: context, barrierDismissible: true, - builder: (BuildContext bContext) { - return AutoCastDialog( - (device) async { - await _castPair(bContext, gw, device); - }, - ); + builder: (BuildContext context) { + return const CastChooseDialog(); }, ); - } - if (result == ButtonAction.second) { - await _pairWithPin(gw, ''); + if (result == null) { + return; + } + // wait to allow the dialog to close + await Future.delayed(const Duration(milliseconds: 100)); + if (result == ButtonAction.first) { + await showDialog( + context: context, + barrierDismissible: true, + builder: (BuildContext bContext) { + return AutoCastDialog( + (device) async { + await _castPair(bContext, gw, device); + Navigator.pop(bContext); + }, + ); + }, + ); + } + if (result == ButtonAction.second) { + await _pairWithPin(gw, ''); + } } } @@ -785,7 +793,10 @@ class _GalleryAppBarWidgetState extends State { String lastCode = ''; Future _castPair( - BuildContext bContext, CastGateway gw, String code) async { + BuildContext bContext, + CastGateway gw, + String code, + ) async { try { if (lastCode == code) { return false; @@ -801,15 +812,15 @@ class _GalleryAppBarWidgetState extends State { final String castToken = const Uuid().v4().toString(); final castPayload = CollectionsService.instance .getCastData(castToken, widget.collection!, publicKey); - _logger.info("Casting album with token $castToken"); await gw.publishCastPayload( code, castPayload, widget.collection!.id, castToken, ); - _logger.info("Casted album with token $castToken"); + _logger.info("cast album completed"); // showToast(bContext, S.of(context).pairingComplete); + castNotifier.value++; return true; } catch (e, s) { lastCode = ''; @@ -823,6 +834,7 @@ class _GalleryAppBarWidgetState extends State { } else { await showGenericErrorDialog(context: bContext, error: e); } + castNotifier.value++; return false; } } diff --git a/mobile/lib/utils/file_uploader.dart b/mobile/lib/utils/file_uploader.dart index bcd5bb121..ad1015303 100644 --- a/mobile/lib/utils/file_uploader.dart +++ b/mobile/lib/utils/file_uploader.dart @@ -2,7 +2,7 @@ import 'dart:async'; import 'dart:collection'; import 'dart:convert'; import 'dart:io'; -import 'dart:math'; +import 'dart:math' as math; import 'package:collection/collection.dart'; import 'package:connectivity_plus/connectivity_plus.dart'; @@ -28,6 +28,8 @@ import 'package:photos/models/file/file_type.dart'; import "package:photos/models/metadata/file_magic.dart"; import 'package:photos/models/upload_url.dart'; import "package:photos/models/user_details.dart"; +import "package:photos/module/upload/service/multipart.dart"; +import "package:photos/service_locator.dart"; import 'package:photos/services/collections_service.dart'; import "package:photos/services/file_magic_service.dart"; import 'package:photos/services/local_sync_service.dart'; @@ -37,7 +39,6 @@ import 'package:photos/utils/crypto_util.dart'; import 'package:photos/utils/file_download_util.dart'; import 'package:photos/utils/file_uploader_util.dart'; import "package:photos/utils/file_util.dart"; -import "package:photos/utils/multipart_upload_util.dart"; import 'package:shared_preferences/shared_preferences.dart'; import 'package:tuple/tuple.dart'; import "package:uuid/uuid.dart"; @@ -51,7 +52,7 @@ class FileUploader { static const kBlockedUploadsPollFrequency = Duration(seconds: 2); static const kFileUploadTimeout = Duration(minutes: 50); static const k20MBStorageBuffer = 20 * 1024 * 1024; - static const kUploadTempPrefix = "upload_file_"; + static const _lastStaleFileCleanupTime = "lastStaleFileCleanupTime"; final _logger = Logger("FileUploader"); final _dio = NetworkClient.instance.getDio(); @@ -79,6 +80,7 @@ class FileUploader { // cases, we don't want to clear the stale upload files. See #removeStaleFiles // as it can result in clearing files which are still being force uploaded. bool _hasInitiatedForceUpload = false; + late MultiPartUploader _multiPartUploader; FileUploader._privateConstructor() { Bus.instance.on().listen((event) { @@ -114,6 +116,17 @@ class FileUploader { // ignore: unawaited_futures _pollBackgroundUploadStatus(); } + _multiPartUploader = MultiPartUploader( + _enteDio, + _dio, + UploadLocksDB.instance, + flagService, + ); + if (currentTime - (_prefs.getInt(_lastStaleFileCleanupTime) ?? 0) > + tempDirCleanUpInterval) { + await removeStaleFiles(); + await _prefs.setInt(_lastStaleFileCleanupTime, currentTime); + } Bus.instance.on().listen((event) { if (event.type == EventType.deletedFromDevice || event.type == EventType.deletedFromEverywhere) { @@ -309,13 +322,28 @@ class FileUploader { // ends with .encrypted. Fetch files in async manner final files = await Directory(dir).list().toList(); final filesToDelete = files.where((file) { - return file.path.contains(kUploadTempPrefix) && + return file.path.contains(uploadTempFilePrefix) && file.path.contains(".encrypted"); }); if (filesToDelete.isNotEmpty) { - _logger.info('cleaning up state files ${filesToDelete.length}'); + _logger.info('Deleting ${filesToDelete.length} stale upload files '); + final fileNameToLastAttempt = + await _uploadLocks.getFileNameToLastAttemptedAtMap(); for (final file in filesToDelete) { - await file.delete(); + final fileName = file.path.split('/').last; + final lastAttemptTime = fileNameToLastAttempt[fileName] != null + ? DateTime.fromMillisecondsSinceEpoch( + fileNameToLastAttempt[fileName]!, + ) + : null; + if (lastAttemptTime == null || + DateTime.now().difference(lastAttemptTime).inDays > 1) { + await file.delete(); + } else { + _logger.info( + 'Skipping file $fileName as it was attempted recently on $lastAttemptTime', + ); + } } } @@ -405,7 +433,7 @@ class FileUploader { (fileOnDisk.updationTime ?? -1) != -1 && (fileOnDisk.collectionID ?? -1) == collectionID; if (wasAlreadyUploaded) { - debugPrint("File is already uploaded ${fileOnDisk.tag}"); + _logger.info("File is already uploaded ${fileOnDisk.tag}"); return fileOnDisk; } } @@ -425,6 +453,7 @@ class FileUploader { } final String lockKey = file.localID!; + bool _isMultipartUpload = false; try { await _uploadLocks.acquireLock( @@ -438,12 +467,27 @@ class FileUploader { } final tempDirectory = Configuration.instance.getTempDirectory(); - final String uniqueID = const Uuid().v4().toString(); - final encryptedFilePath = - '$tempDirectory$kUploadTempPrefix${uniqueID}_file.encrypted'; - final encryptedThumbnailPath = - '$tempDirectory$kUploadTempPrefix${uniqueID}_thumb.encrypted'; MediaUploadData? mediaUploadData; + mediaUploadData = await getUploadDataFromEnteFile(file); + + final String? existingMultipartEncFileName = + mediaUploadData.hashData?.fileHash != null + ? await _uploadLocks.getEncryptedFileName( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ) + : null; + bool multipartEntryExists = existingMultipartEncFileName != null; + + final String uniqueID = const Uuid().v4().toString(); + + final encryptedFilePath = multipartEntryExists + ? '$tempDirectory$existingMultipartEncFileName' + : '$tempDirectory$uploadTempFilePrefix${uniqueID}_file.encrypted'; + final encryptedThumbnailPath = + '$tempDirectory$uploadTempFilePrefix${uniqueID}_thumb.encrypted'; + var uploadCompleted = false; // This flag is used to decide whether to clear the iOS origin file cache // or not. @@ -457,13 +501,18 @@ class FileUploader { '${isUpdatedFile ? 're-upload' : 'upload'} of ${file.toString()}', ); - mediaUploadData = await getUploadDataFromEnteFile(file); - Uint8List? key; + EncryptionResult? multiPartFileEncResult = multipartEntryExists + ? await _multiPartUploader.getEncryptionResult( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ) + : null; if (isUpdatedFile) { key = getFileKey(file); } else { - key = null; + key = multiPartFileEncResult?.key; // check if the file is already uploaded and can be mapped to existing // uploaded file. If map is found, it also returns the corresponding // mapped or update file entry. @@ -482,16 +531,40 @@ class FileUploader { } } - if (File(encryptedFilePath).existsSync()) { + final encryptedFileExists = File(encryptedFilePath).existsSync(); + + // If the multipart entry exists but the encrypted file doesn't, it means + // that we'll have to reupload as the nonce is lost + if (multipartEntryExists) { + final bool updateWithDiffKey = isUpdatedFile && + multiPartFileEncResult != null && + !listEquals(key, multiPartFileEncResult.key); + if (!encryptedFileExists || updateWithDiffKey) { + if (updateWithDiffKey) { + _logger.severe('multiPart update resumed with differentKey'); + } else { + _logger.warning( + 'multiPart EncryptedFile missing, discard multipart entry', + ); + } + await _uploadLocks.deleteMultipartTrack(lockKey); + multipartEntryExists = false; + multiPartFileEncResult = null; + } + } else if (encryptedFileExists) { + // otherwise just delete the file for singlepart upload await File(encryptedFilePath).delete(); } await _checkIfWithinStorageLimit(mediaUploadData.sourceFile!); final encryptedFile = File(encryptedFilePath); - final EncryptionResult fileAttributes = await CryptoUtil.encryptFile( - mediaUploadData.sourceFile!.path, - encryptedFilePath, - key: key, - ); + + final EncryptionResult fileAttributes = multiPartFileEncResult ?? + await CryptoUtil.encryptFile( + mediaUploadData.sourceFile!.path, + encryptedFilePath, + key: key, + ); + late final Uint8List? thumbnailData; if (mediaUploadData.thumbnail == null && file.fileType == FileType.video) { @@ -512,31 +585,63 @@ class FileUploader { await encryptedThumbnailFile .writeAsBytes(encryptedThumbnailData.encryptedData!); - final thumbnailUploadURL = await _getUploadURL(); - final String thumbnailObjectKey = - await _putFile(thumbnailUploadURL, encryptedThumbnailFile); - - // Calculate the number of parts for the file. Multiple part upload - // is only enabled for internal users and debug builds till it's battle tested. - final count = kDebugMode - ? await calculatePartCount( - await encryptedFile.length(), - ) - : 1; + // Calculate the number of parts for the file. + final count = await _multiPartUploader.calculatePartCount( + await encryptedFile.length(), + ); late String fileObjectKey; + late String thumbnailObjectKey; if (count <= 1) { + final thumbnailUploadURL = await _getUploadURL(); + thumbnailObjectKey = + await _putFile(thumbnailUploadURL, encryptedThumbnailFile); final fileUploadURL = await _getUploadURL(); fileObjectKey = await _putFile(fileUploadURL, encryptedFile); } else { - final fileUploadURLs = await getMultipartUploadURLs(count); - fileObjectKey = await putMultipartFile(fileUploadURLs, encryptedFile); + _isMultipartUpload = true; + _logger.finest( + "Init multipartUpload $multipartEntryExists, isUpdate $isUpdatedFile", + ); + if (multipartEntryExists) { + fileObjectKey = await _multiPartUploader.putExistingMultipartFile( + encryptedFile, + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + ); + } else { + final fileUploadURLs = + await _multiPartUploader.getMultipartUploadURLs(count); + final encFileName = encryptedFile.path.split('/').last; + await _multiPartUploader.createTableEntry( + lockKey, + mediaUploadData.hashData!.fileHash!, + collectionID, + fileUploadURLs, + encFileName, + await encryptedFile.length(), + fileAttributes.key!, + fileAttributes.header!, + ); + fileObjectKey = await _multiPartUploader.putMultipartFile( + fileUploadURLs, + encryptedFile, + ); + } + // in case of multipart, upload the thumbnail towards the end to avoid + // re-uploading the thumbnail in case of failure. + // In regular upload, always upload the thumbnail first to keep existing behaviour + // + final thumbnailUploadURL = await _getUploadURL(); + thumbnailObjectKey = + await _putFile(thumbnailUploadURL, encryptedThumbnailFile); } final metadata = await file.getMetadataForUpload(mediaUploadData); final encryptedMetadataResult = await CryptoUtil.encryptChaCha( - utf8.encode(jsonEncode(metadata)) as Uint8List, + utf8.encode(jsonEncode(metadata)), fileAttributes.key!, ); final fileDecryptionHeader = @@ -618,6 +723,8 @@ class FileUploader { } await FilesDB.instance.update(remoteFile); } + await UploadLocksDB.instance.deleteMultipartTrack(lockKey); + if (!_isBackground) { Bus.instance.fire( LocalPhotosUpdatedEvent( @@ -659,6 +766,7 @@ class FileUploader { encryptedFilePath, encryptedThumbnailPath, lockKey: lockKey, + isMultiPartUpload: _isMultipartUpload, ); } } @@ -803,6 +911,7 @@ class FileUploader { String encryptedFilePath, String encryptedThumbnailPath, { required String lockKey, + bool isMultiPartUpload = false, }) async { if (mediaUploadData != null && mediaUploadData.sourceFile != null) { // delete the file from app's internal cache if it was copied to app @@ -816,7 +925,14 @@ class FileUploader { } } if (File(encryptedFilePath).existsSync()) { - await File(encryptedFilePath).delete(); + if (isMultiPartUpload && !uploadCompleted) { + _logger.fine( + "skip delete for multipart encrypted file $encryptedFilePath", + ); + } else { + _logger.fine("deleting encrypted file $encryptedFilePath"); + await File(encryptedFilePath).delete(); + } } if (File(encryptedThumbnailPath).existsSync()) { await File(encryptedThumbnailPath).delete(); @@ -1039,7 +1155,7 @@ class FileUploader { if (_uploadURLs.isEmpty) { // the queue is empty, fetch at least for one file to handle force uploads // that are not in the queue. This is to also avoid - await fetchUploadURLs(max(_queue.length, 1)); + await fetchUploadURLs(math.max(_queue.length, 1)); } try { return _uploadURLs.removeFirst(); @@ -1061,7 +1177,7 @@ class FileUploader { final response = await _enteDio.get( "/files/upload-urls", queryParameters: { - "count": min(42, fileCount * 2), // m4gic number + "count": math.min(42, fileCount * 2), // m4gic number }, ); final urls = (response.data["urls"] as List) diff --git a/mobile/lib/utils/multipart_upload_util.dart b/mobile/lib/utils/multipart_upload_util.dart index 102c08d8d..6b9ccafb9 100644 --- a/mobile/lib/utils/multipart_upload_util.dart +++ b/mobile/lib/utils/multipart_upload_util.dart @@ -6,8 +6,8 @@ import "package:dio/dio.dart"; import "package:logging/logging.dart"; import "package:photos/core/constants.dart"; import "package:photos/core/network/network.dart"; +import 'package:photos/module/upload/model/xml.dart'; import "package:photos/service_locator.dart"; -import "package:photos/utils/xml_parser_util.dart"; final _enteDio = NetworkClient.instance.enteDio; final _dio = NetworkClient.instance.getDio(); diff --git a/mobile/lib/utils/xml_parser_util.dart b/mobile/lib/utils/xml_parser_util.dart index 9490fc40c..8b1378917 100644 --- a/mobile/lib/utils/xml_parser_util.dart +++ b/mobile/lib/utils/xml_parser_util.dart @@ -1,41 +1 @@ -// ignore_for_file: implementation_imports -import "package:xml/xml.dart"; - -// used for classes that can be converted to xml -abstract class XmlParsableObject { - Map toMap(); - String get elementName; -} - -// for converting the response to xml -String convertJs2Xml(Map json) { - final builder = XmlBuilder(); - buildXml(builder, json); - return builder.buildDocument().toXmlString( - pretty: true, - indent: ' ', - ); -} - -// for building the xml node tree recursively -void buildXml(XmlBuilder builder, dynamic node) { - if (node is Map) { - node.forEach((key, value) { - builder.element(key, nest: () => buildXml(builder, value)); - }); - } else if (node is List) { - for (var item in node) { - buildXml(builder, item); - } - } else if (node is XmlParsableObject) { - builder.element( - node.elementName, - nest: () { - buildXml(builder, node.toMap()); - }, - ); - } else { - builder.text(node.toString()); - } -} diff --git a/mobile/plugins/ente_cast_normal/lib/src/service.dart b/mobile/plugins/ente_cast_normal/lib/src/service.dart index 04c501666..8a1f2aaf1 100644 --- a/mobile/plugins/ente_cast_normal/lib/src/service.dart +++ b/mobile/plugins/ente_cast_normal/lib/src/service.dart @@ -24,7 +24,9 @@ class CastServiceImpl extends CastService { "got RECEIVER_STATUS, Send request to pair", name: "CastServiceImpl", ); - session.sendMessage(_pairRequestNamespace, {}); + session.sendMessage(_pairRequestNamespace, { + "collectionID": collectionID, + }); } else { if (onMessage != null && message.containsKey("code")) { onMessage( @@ -32,8 +34,9 @@ class CastServiceImpl extends CastService { CastMessageType.pairCode: message, }, ); + } else { + print('receive message: $message'); } - print('receive message: $message'); } }); @@ -56,7 +59,9 @@ class CastServiceImpl extends CastService { @override Future> searchDevices() { - return CastDiscoveryService().search().then((devices) { + return CastDiscoveryService() + .search(timeout: const Duration(seconds: 7)) + .then((devices) { return devices.map((device) => (device.name, device)).toList(); }); } diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index 385deb769..ecf762a3d 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -12,7 +12,7 @@ description: ente photos application # Read more about iOS versioning at # https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html -version: 0.8.90+610 +version: 0.8.95+615 publish_to: none environment: diff --git a/server/configurations/local.yaml b/server/configurations/local.yaml index 7785f5601..196c56f1f 100644 --- a/server/configurations/local.yaml +++ b/server/configurations/local.yaml @@ -180,6 +180,9 @@ smtp: port: username: password: + # The email address from which to send the email. Set this to an email + # address whose credentials you're providing. + email: # Zoho Zeptomail config (optional) # diff --git a/server/docs/publish.md b/server/docs/publish.md index de4849d90..3a49a4761 100644 --- a/server/docs/publish.md +++ b/server/docs/publish.md @@ -39,3 +39,7 @@ combine both these steps too. Once the workflow completes, the resultant image will be available at `ghcr.io/ente-io/server`. The image will be tagged by the commit SHA. The latest image will also be tagged, well, "latest". + +The workflow will also tag the commit it used to build the image with +`server/ghcr`. This tag will be overwritten on each publish, and it'll point to +the code that was used in the most recent publish. diff --git a/server/ente/billing.go b/server/ente/billing.go index 20c37bdb5..f623a92e8 100644 --- a/server/ente/billing.go +++ b/server/ente/billing.go @@ -11,7 +11,7 @@ import ( const ( // FreePlanStorage is the amount of storage in free plan - FreePlanStorage = 1 * 1024 * 1024 * 1024 + FreePlanStorage = 5 * 1024 * 1024 * 1024 // FreePlanProductID is the product ID of free plan FreePlanProductID = "free" // FreePlanTransactionID is the dummy transaction ID for the free plan diff --git a/server/ente/embedding.go b/server/ente/embedding.go index 2990a779a..fabde44a5 100644 --- a/server/ente/embedding.go +++ b/server/ente/embedding.go @@ -7,6 +7,7 @@ type Embedding struct { DecryptionHeader string `json:"decryptionHeader"` UpdatedAt int64 `json:"updatedAt"` Version *int `json:"version,omitempty"` + Size *int64 } type InsertOrUpdateEmbeddingRequest struct { @@ -30,9 +31,10 @@ type GetFilesEmbeddingRequest struct { } type GetFilesEmbeddingResponse struct { - Embeddings []Embedding `json:"embeddings"` - NoDataFileIDs []int64 `json:"noDataFileIDs"` - ErrFileIDs []int64 `json:"errFileIDs"` + Embeddings []Embedding `json:"embeddings"` + PendingIndexFileIDs []int64 `json:"pendingIndexFileIDs"` + ErrFileIDs []int64 `json:"errFileIDs"` + NoEmbeddingFileIDs []int64 `json:"noEmbeddingFileIDs"` } type Model string diff --git a/server/ente/file.go b/server/ente/file.go index 4a69473e3..a0e67c71c 100644 --- a/server/ente/file.go +++ b/server/ente/file.go @@ -134,6 +134,7 @@ type UpdateMagicMetadata struct { // UpdateMultipleMagicMetadataRequest request payload for updating magic metadata for list of files type UpdateMultipleMagicMetadataRequest struct { MetadataList []UpdateMagicMetadata `json:"metadataList" binding:"required"` + SkipVersion *bool `json:"skipVersion"` } // UploadURL represents the upload url for a specific object diff --git a/server/migrations/85_increase_free_storage.down.sql b/server/migrations/85_increase_free_storage.down.sql new file mode 100644 index 000000000..9f7060a47 --- /dev/null +++ b/server/migrations/85_increase_free_storage.down.sql @@ -0,0 +1 @@ +-- no-op diff --git a/server/migrations/85_increase_free_storage.up.sql b/server/migrations/85_increase_free_storage.up.sql new file mode 100644 index 000000000..395033c8d --- /dev/null +++ b/server/migrations/85_increase_free_storage.up.sql @@ -0,0 +1 @@ +UPDATE subscriptions SET storage = 5368709120, expiry_time = 1749355117000000 where storage = 1073741824 and product_id = 'free'; diff --git a/server/pkg/api/file.go b/server/pkg/api/file.go index a253c71c2..990336e37 100644 --- a/server/pkg/api/file.go +++ b/server/pkg/api/file.go @@ -110,7 +110,7 @@ func (h *FileHandler) GetUploadURLs(c *gin.Context) { userID := auth.GetUserID(c.Request.Header) count, _ := strconv.Atoi(c.Query("count")) - urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp) + urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp, false) if err != nil { handler.Error(c, stacktrace.Propagate(err, "")) return diff --git a/server/pkg/api/public_collection.go b/server/pkg/api/public_collection.go index 7a38f4380..9290d6456 100644 --- a/server/pkg/api/public_collection.go +++ b/server/pkg/api/public_collection.go @@ -57,7 +57,7 @@ func (h *PublicCollectionHandler) GetUploadUrls(c *gin.Context) { } userID := collection.Owner.ID count, _ := strconv.Atoi(c.Query("count")) - urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp) + urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp, false) if err != nil { handler.Error(c, stacktrace.Propagate(err, "")) return diff --git a/server/pkg/controller/embedding/controller.go b/server/pkg/controller/embedding/controller.go index d6e78209f..bf317ccfe 100644 --- a/server/pkg/controller/embedding/controller.go +++ b/server/pkg/controller/embedding/controller.go @@ -2,12 +2,14 @@ package embedding import ( "bytes" + "context" "encoding/json" "errors" "fmt" "github.com/ente-io/museum/pkg/utils/array" "strconv" "sync" + gTime "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/s3" @@ -26,6 +28,12 @@ import ( log "github.com/sirupsen/logrus" ) +const ( + // maxEmbeddingDataSize is the min size of an embedding object in bytes + minEmbeddingDataSize = 2048 + embeddingFetchTimeout = 15 * gTime.Second +) + type Controller struct { Repo *embedding.Repository AccessCtrl access.Controller @@ -135,15 +143,23 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd return nil, stacktrace.Propagate(err, "") } + embeddingsWithData := make([]ente.Embedding, 0) + noEmbeddingFileIds := make([]int64, 0) dbFileIds := make([]int64, 0) - for _, embedding := range userFileEmbeddings { - dbFileIds = append(dbFileIds, embedding.FileID) + // fileIDs that were indexed but they don't contain any embedding information + for i := range userFileEmbeddings { + dbFileIds = append(dbFileIds, userFileEmbeddings[i].FileID) + if userFileEmbeddings[i].Size != nil && *userFileEmbeddings[i].Size < minEmbeddingDataSize { + noEmbeddingFileIds = append(noEmbeddingFileIds, userFileEmbeddings[i].FileID) + } else { + embeddingsWithData = append(embeddingsWithData, userFileEmbeddings[i]) + } } - missingFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds) + pendingIndexFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds) errFileIds := make([]int64, 0) // Fetch missing userFileEmbeddings in parallel - embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, userFileEmbeddings) + embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, embeddingsWithData) if err != nil { return nil, stacktrace.Propagate(err, "") } @@ -166,9 +182,10 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd } return &ente.GetFilesEmbeddingResponse{ - Embeddings: fetchedEmbeddings, - NoDataFileIDs: missingFileIds, - ErrFileIDs: errFileIds, + Embeddings: fetchedEmbeddings, + PendingIndexFileIDs: pendingIndexFileIds, + ErrFileIDs: errFileIds, + NoEmbeddingFileIDs: noEmbeddingFileIds, }, nil } @@ -292,7 +309,7 @@ func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string) ([]ente.Em defer wg.Done() defer func() { <-globalDiffFetchSemaphore }() // Release back to global semaphore - obj, err := c.getEmbeddingObject(objectKey, downloader) + obj, err := c.getEmbeddingObject(context.Background(), objectKey, downloader) if err != nil { errs = append(errs, err) log.Error("error fetching embedding object: "+objectKey, err) @@ -329,7 +346,9 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows defer wg.Done() defer func() { <-globalFileFetchSemaphore }() // Release back to global semaphore objectKey := c.getObjectKey(userID, dbEmbeddingRow.FileID, dbEmbeddingRow.Model) - obj, err := c.getEmbeddingObject(objectKey, downloader) + ctx, cancel := context.WithTimeout(context.Background(), embeddingFetchTimeout) + defer cancel() + obj, err := c.getEmbeddingObjectWithRetries(ctx, objectKey, downloader, 0) if err != nil { log.Error("error fetching embedding object: "+objectKey, err) embeddingObjects[i] = embeddingObjectResult{ @@ -349,15 +368,22 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows return embeddingObjects, nil } -func (c *Controller) getEmbeddingObject(objectKey string, downloader *s3manager.Downloader) (ente.EmbeddingObject, error) { +func (c *Controller) getEmbeddingObject(ctx context.Context, objectKey string, downloader *s3manager.Downloader) (ente.EmbeddingObject, error) { + return c.getEmbeddingObjectWithRetries(ctx, objectKey, downloader, 3) +} + +func (c *Controller) getEmbeddingObjectWithRetries(ctx context.Context, objectKey string, downloader *s3manager.Downloader, retryCount int) (ente.EmbeddingObject, error) { var obj ente.EmbeddingObject buff := &aws.WriteAtBuffer{} - _, err := downloader.Download(buff, &s3.GetObjectInput{ + _, err := downloader.DownloadWithContext(ctx, buff, &s3.GetObjectInput{ Bucket: c.S3Config.GetHotBucket(), Key: &objectKey, }) if err != nil { log.Error(err) + if retryCount > 0 { + return c.getEmbeddingObjectWithRetries(ctx, objectKey, downloader, retryCount-1) + } return obj, stacktrace.Propagate(err, "") } err = json.Unmarshal(buff.Bytes(), &obj) diff --git a/server/pkg/controller/file.go b/server/pkg/controller/file.go index e91d299f1..b3fec115d 100644 --- a/server/pkg/controller/file.go +++ b/server/pkg/controller/file.go @@ -258,7 +258,7 @@ func (c *FileController) Update(ctx context.Context, userID int64, file ente.Fil } // GetUploadURLs returns a bunch of presigned URLs for uploading files -func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App) ([]ente.UploadURL, error) { +func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App, ignoreLimit bool) ([]ente.UploadURL, error) { err := c.UsageCtrl.CanUploadFile(ctx, userID, nil, app) if err != nil { return []ente.UploadURL{}, stacktrace.Propagate(err, "") @@ -268,7 +268,7 @@ func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count bucket := c.S3Config.GetHotBucket() urls := make([]ente.UploadURL, 0) objectKeys := make([]string, 0) - if count > MaxUploadURLsLimit { + if count > MaxUploadURLsLimit && !ignoreLimit { count = MaxUploadURLsLimit } for i := 0; i < count; i++ { @@ -502,7 +502,7 @@ func (c *FileController) UpdateMagicMetadata(ctx *gin.Context, req ente.UpdateMu if err != nil { return stacktrace.Propagate(err, "") } - err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata) + err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata, req.SkipVersion) if err != nil { return stacktrace.Propagate(err, "failed to update magic attributes") } diff --git a/server/pkg/controller/file_copy/file_copy.go b/server/pkg/controller/file_copy/file_copy.go index afab10efe..4f9267e2e 100644 --- a/server/pkg/controller/file_copy/file_copy.go +++ b/server/pkg/controller/file_copy/file_copy.go @@ -92,7 +92,7 @@ func (fc *FileCopyController) CopyFiles(c *gin.Context, req ente.CopyFileSyncReq // request the uploadUrls using existing method. This is to ensure that orphan objects are automatically cleaned up // todo:(neeraj) optimize this method by removing the need for getting a signed url for each object - uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app) + uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app, true) if err != nil { return nil, err } diff --git a/server/pkg/repo/cast/repo.go b/server/pkg/repo/cast/repo.go index 2f4446c9d..823b17b2e 100644 --- a/server/pkg/repo/cast/repo.go +++ b/server/pkg/repo/cast/repo.go @@ -8,6 +8,7 @@ import ( "github.com/ente-io/stacktrace" "github.com/google/uuid" log "github.com/sirupsen/logrus" + "strings" ) type Repository struct { @@ -19,6 +20,7 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str if err != nil { return "", err } + codeValue = strings.ToUpper(codeValue) _, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip) if err != nil { return "", err @@ -28,11 +30,13 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str // InsertCastData insert collection_id, cast_user, token and encrypted_payload for given code if collection_id is not null func (r *Repository) InsertCastData(ctx context.Context, castUserID int64, code string, collectionID int64, castToken string, encryptedPayload string) error { + code = strings.ToUpper(code) _, err := r.DB.ExecContext(ctx, "UPDATE casting SET collection_id = $1, cast_user = $2, token = $3, encrypted_payload = $4 WHERE code = $5 and is_deleted=false", collectionID, castUserID, castToken, encryptedPayload, code) return err } func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, string, error) { + code = strings.ToUpper(code) var pubKey, ip string row := r.DB.QueryRowContext(ctx, "SELECT public_key, ip FROM casting WHERE code = $1 and is_deleted=false", code) err := row.Scan(&pubKey, &ip) @@ -46,6 +50,7 @@ func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, s } func (r *Repository) GetEncCastData(ctx context.Context, code string) (*string, error) { + code = strings.ToUpper(code) var payload sql.NullString row := r.DB.QueryRowContext(ctx, "SELECT encrypted_payload FROM casting WHERE code = $1 and is_deleted=false", code) err := row.Scan(&payload) diff --git a/server/pkg/repo/embedding/repository.go b/server/pkg/repo/embedding/repository.go index f21e3b4f1..86915fde5 100644 --- a/server/pkg/repo/embedding/repository.go +++ b/server/pkg/repo/embedding/repository.go @@ -45,7 +45,7 @@ func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry en // GetDiff returns the embeddings that have been updated since the given time func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Model, sinceTime int64, limit int16) ([]ente.Embedding, error) { - rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version + rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size FROM embeddings WHERE owner_id = $1 AND model = $2 AND updated_at > $3 ORDER BY updated_at ASC @@ -57,7 +57,7 @@ func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Mode } func (r *Repository) GetFilesEmbedding(ctx context.Context, ownerID int64, model ente.Model, fileIDs []int64) ([]ente.Embedding, error) { - rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version + rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size FROM embeddings WHERE owner_id = $1 AND model = $2 AND file_id = ANY($3)`, ownerID, model, pq.Array(fileIDs)) if err != nil { @@ -94,7 +94,7 @@ func convertRowsToEmbeddings(rows *sql.Rows) ([]ente.Embedding, error) { embedding := ente.Embedding{} var encryptedEmbedding, decryptionHeader sql.NullString var version sql.NullInt32 - err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version) + err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version, &embedding.Size) if encryptedEmbedding.Valid && len(encryptedEmbedding.String) > 0 { embedding.EncryptedEmbedding = encryptedEmbedding.String } diff --git a/server/pkg/repo/file.go b/server/pkg/repo/file.go index eafc7b570..2ae4eafdc 100644 --- a/server/pkg/repo/file.go +++ b/server/pkg/repo/file.go @@ -311,7 +311,12 @@ func (repo *FileRepository) Update(file ente.File, fileSize int64, thumbnailSize // UpdateMagicAttributes updates the magic attributes for the list of files and update collection_files & collection // which have this file. -func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdates []ente.UpdateMagicMetadata, isPublicMetadata bool) error { +func (repo *FileRepository) UpdateMagicAttributes( + ctx context.Context, + fileUpdates []ente.UpdateMagicMetadata, + isPublicMetadata bool, + skipVersion *bool, +) error { updationTime := time.Microseconds() tx, err := repo.DB.BeginTx(ctx, nil) if err != nil { @@ -336,6 +341,9 @@ func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdat return stacktrace.Propagate(err, "") } } + if skipVersion != nil && *skipVersion { + return tx.Commit() + } // todo: full table scan, need to add index (for discussion: add user_id and idx {user_id, file_id}). updatedRows, err := tx.QueryContext(ctx, `UPDATE collection_files SET updation_time = $1 WHERE file_id = ANY($2) AND is_deleted= false RETURNING collection_id`, updationTime, diff --git a/server/pkg/repo/user.go b/server/pkg/repo/user.go index 596d24c64..f35a47e1f 100644 --- a/server/pkg/repo/user.go +++ b/server/pkg/repo/user.go @@ -194,8 +194,8 @@ func (repo *UserRepository) UpdateEmail(userID int64, encryptedEmail ente.Encryp // GetUserIDWithEmail returns the userID associated with a provided email func (repo *UserRepository) GetUserIDWithEmail(email string) (int64, error) { - trimmedEmail := strings.TrimSpace(email) - emailHash, err := crypto.GetHash(trimmedEmail, repo.HashingKey) + sanitizedEmail := strings.ToLower(strings.TrimSpace(email)) + emailHash, err := crypto.GetHash(sanitizedEmail, repo.HashingKey) if err != nil { return -1, stacktrace.Propagate(err, "") } diff --git a/server/pkg/utils/email/email.go b/server/pkg/utils/email/email.go index 46202313e..a19987a1d 100644 --- a/server/pkg/utils/email/email.go +++ b/server/pkg/utils/email/email.go @@ -38,6 +38,7 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s smtpPort := viper.GetString("smtp.port") smtpUsername := viper.GetString("smtp.username") smtpPassword := viper.GetString("smtp.password") + smtpEmail := viper.GetString("smtp.email") var emailMessage string @@ -50,6 +51,11 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s emailAddresses += email } + // If an sender email is provided use it instead of the fromEmail. + if smtpEmail != "" { + fromEmail = smtpEmail + } + header := "From: " + fromName + " <" + fromEmail + ">\n" + "To: " + emailAddresses + "\n" + "Subject: " + subject + "\n" + diff --git a/web/apps/accounts/.env b/web/apps/accounts/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/accounts/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/auth/.env b/web/apps/auth/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/auth/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/cast/.env b/web/apps/cast/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/cast/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/cast/package.json b/web/apps/cast/package.json index 012148969..4f774662a 100644 --- a/web/apps/cast/package.json +++ b/web/apps/cast/package.json @@ -8,5 +8,8 @@ "@ente/accounts": "*", "@ente/eslint-config": "*", "@ente/shared": "*" + }, + "devDependencies": { + "@types/chromecast-caf-receiver": "^6.0.14" } } diff --git a/web/apps/cast/src/components/FilledCircleCheck.tsx b/web/apps/cast/src/components/FilledCircleCheck.tsx index c0635f138..ba2292922 100644 --- a/web/apps/cast/src/components/FilledCircleCheck.tsx +++ b/web/apps/cast/src/components/FilledCircleCheck.tsx @@ -1,6 +1,6 @@ import { styled } from "@mui/material"; -const FilledCircleCheck = () => { +export const FilledCircleCheck: React.FC = () => { return ( @@ -11,8 +11,6 @@ const FilledCircleCheck = () => { ); }; -export default FilledCircleCheck; - const Container = styled("div")` width: 100px; height: 100px; diff --git a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx b/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx deleted file mode 100644 index 845416fed..000000000 --- a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import FilledCircleCheck from "./FilledCircleCheck"; - -export default function PairedSuccessfullyOverlay() { - return ( -
-
- -

- Pairing Complete -

-

- We're preparing your album. -
This should only take a few seconds. -

-
-
- ); -} diff --git a/web/apps/cast/src/components/LargeType.tsx b/web/apps/cast/src/components/PairingCode.tsx similarity index 74% rename from web/apps/cast/src/components/LargeType.tsx rename to web/apps/cast/src/components/PairingCode.tsx index ecf7a201b..fa1474baf 100644 --- a/web/apps/cast/src/components/LargeType.tsx +++ b/web/apps/cast/src/components/PairingCode.tsx @@ -1,6 +1,6 @@ import { styled } from "@mui/material"; -const colourPool = [ +const colors = [ "#87CEFA", // Light Blue "#90EE90", // Light Green "#F08080", // Light Coral @@ -23,27 +23,34 @@ const colourPool = [ "#808000", // Light Olive ]; -export default function LargeType({ chars }: { chars: string[] }) { +interface PairingCodeProps { + code: string; +} + +export const PairingCode: React.FC = ({ code }) => { return ( - - {chars.map((char, i) => ( + + {code.split("").map((char, i) => ( {char} ))} - + ); -} +}; + +const PairingCode_ = styled("div")` + border-radius: 10px; + overflow: hidden; -const Container = styled("div")` font-size: 4rem; font-weight: bold; font-family: monospace; diff --git a/web/apps/cast/src/components/PhotoAuditorium.tsx b/web/apps/cast/src/components/PhotoAuditorium.tsx deleted file mode 100644 index c77c9e6ca..000000000 --- a/web/apps/cast/src/components/PhotoAuditorium.tsx +++ /dev/null @@ -1,66 +0,0 @@ -import { useEffect } from "react"; - -interface PhotoAuditoriumProps { - url: string; - nextSlideUrl: string; - showNextSlide: () => void; -} -export const PhotoAuditorium: React.FC = ({ - url, - nextSlideUrl, - showNextSlide, -}) => { - useEffect(() => { - console.log("showing slide"); - const timeoutId = window.setTimeout(() => { - console.log("showing next slide timer"); - showNextSlide(); - }, 10000); - - return () => { - if (timeoutId) clearTimeout(timeoutId); - }; - }, []); - - return ( -
-
- - -
-
- ); -}; diff --git a/web/apps/cast/src/constants/upload.ts b/web/apps/cast/src/constants/upload.ts deleted file mode 100644 index 2ae1c4383..000000000 --- a/web/apps/cast/src/constants/upload.ts +++ /dev/null @@ -1,13 +0,0 @@ -export const RAW_FORMATS = [ - "heic", - "rw2", - "tiff", - "arw", - "cr3", - "cr2", - "raf", - "nef", - "psd", - "dng", - "tif", -]; diff --git a/web/apps/cast/src/pages/_app.tsx b/web/apps/cast/src/pages/_app.tsx index 99b047d41..d85ac0542 100644 --- a/web/apps/cast/src/pages/_app.tsx +++ b/web/apps/cast/src/pages/_app.tsx @@ -1,4 +1,5 @@ import { CustomHead } from "@/next/components/Head"; +import { disableDiskLogs } from "@/next/log"; import { logUnhandledErrorsAndRejections } from "@/next/log-web"; import { APPS, APP_TITLES } from "@ente/shared/apps/constants"; import { getTheme } from "@ente/shared/themes"; @@ -11,6 +12,7 @@ import "styles/global.css"; export default function App({ Component, pageProps }: AppProps) { useEffect(() => { + disableDiskLogs(); logUnhandledErrorsAndRejections(true); return () => logUnhandledErrorsAndRejections(false); }, []); diff --git a/web/apps/cast/src/pages/index.tsx b/web/apps/cast/src/pages/index.tsx index b12bf1e76..37fcf3d4b 100644 --- a/web/apps/cast/src/pages/index.tsx +++ b/web/apps/cast/src/pages/index.tsx @@ -1,238 +1,110 @@ import log from "@/next/log"; import EnteSpinner from "@ente/shared/components/EnteSpinner"; -import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium"; -import castGateway from "@ente/shared/network/cast"; -import LargeType from "components/LargeType"; -import _sodium from "libsodium-wrappers"; +import { styled } from "@mui/material"; +import { PairingCode } from "components/PairingCode"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -import { storeCastData } from "services/cast/castService"; -import { useCastReceiver } from "../utils/useCastReceiver"; +import { readCastData, storeCastData } from "services/cast-data"; +import { getCastData, register } from "services/pair"; +import { advertiseOnChromecast } from "../services/chromecast"; -export default function PairingMode() { - const [deviceCode, setDeviceCode] = useState(""); - const [publicKeyB64, setPublicKeyB64] = useState(""); - const [privateKeyB64, setPrivateKeyB64] = useState(""); - const [codePending, setCodePending] = useState(true); - const [isCastReady, setIsCastReady] = useState(false); - - const { cast } = useCastReceiver(); - - useEffect(() => { - init(); - }, []); - - const init = async () => { - try { - const keypair = await generateKeyPair(); - setPublicKeyB64(await toB64(keypair.publicKey)); - setPrivateKeyB64(await toB64(keypair.privateKey)); - } catch (e) { - log.error("failed to generate keypair", e); - throw e; - } - }; - - useEffect(() => { - if (!cast) { - return; - } - if (isCastReady) { - return; - } - const context = cast.framework.CastReceiverContext.getInstance(); - - try { - const options = new cast.framework.CastReceiverOptions(); - options.maxInactivity = 3600; - options.customNamespaces = Object.assign({}); - options.customNamespaces["urn:x-cast:pair-request"] = - cast.framework.system.MessageType.JSON; - - options.disableIdleTimeout = true; - context.set; - - context.addCustomMessageListener( - "urn:x-cast:pair-request", - messageReceiveHandler, - ); - - // listen to close request and stop the context - context.addEventListener( - cast.framework.system.EventType.SENDER_DISCONNECTED, - // eslint-disable-next-line @typescript-eslint/no-unused-vars - (_) => { - context.stop(); - }, - ); - context.start(options); - setIsCastReady(true); - } catch (e) { - log.error("failed to create cast context", e); - } - - return () => { - // context.stop(); - }; - }, [cast]); - - const messageReceiveHandler = (message: { - type: string; - senderId: string; - data: any; - }) => { - try { - cast.framework.CastReceiverContext.getInstance().sendCustomMessage( - "urn:x-cast:pair-request", - message.senderId, - { - code: deviceCode, - }, - ); - } catch (e) { - log.error("failed to send message", e); - } - }; - - const generateKeyPair = async () => { - await _sodium.ready; - const keypair = _sodium.crypto_box_keypair(); - return keypair; - }; - - const pollForCastData = async () => { - if (codePending) { - return; - } - // see if we were acknowledged on the client. - // the client will send us the encrypted payload using our public key that we advertised. - // then, we can decrypt this and store all the necessary info locally so we can play the collection slideshow. - let devicePayload = ""; - try { - const encDastData = await castGateway.getCastData(`${deviceCode}`); - if (!encDastData) return; - devicePayload = encDastData; - } catch (e) { - setCodePending(true); - init(); - return; - } - - const decryptedPayload = await boxSealOpen( - devicePayload, - publicKeyB64, - privateKeyB64, - ); - - const decryptedPayloadObj = JSON.parse(atob(decryptedPayload)); - - return decryptedPayloadObj; - }; - - const advertisePublicKey = async (publicKeyB64: string) => { - // hey client, we exist! - try { - const codeValue = await castGateway.registerDevice(publicKeyB64); - setDeviceCode(codeValue); - setCodePending(false); - } catch (e) { - // schedule re-try after 5 seconds - setTimeout(() => { - init(); - }, 5000); - return; - } - }; +export default function Index() { + const [publicKeyB64, setPublicKeyB64] = useState(); + const [privateKeyB64, setPrivateKeyB64] = useState(); + const [pairingCode, setPairingCode] = useState(); const router = useRouter(); useEffect(() => { - console.log("useEffect for pairing called"); - if (deviceCode.length < 1 || !publicKeyB64 || !privateKeyB64) return; - - const interval = setInterval(async () => { - console.log("polling for cast data"); - const data = await pollForCastData(); - if (!data) { - console.log("no data"); - return; - } - storeCastData(data); - console.log("pushing slideshow"); - await router.push("/slideshow"); - }, 1000); - - return () => { - clearInterval(interval); - }; - }, [deviceCode, publicKeyB64, privateKeyB64, codePending]); + if (!pairingCode) { + register().then((r) => { + setPublicKeyB64(r.publicKeyB64); + setPrivateKeyB64(r.privateKeyB64); + setPairingCode(r.pairingCode); + }); + } else { + advertiseOnChromecast( + () => pairingCode, + () => readCastData()?.collectionID, + ); + } + }, [pairingCode]); useEffect(() => { - if (!publicKeyB64) return; - advertisePublicKey(publicKeyB64); - }, [publicKeyB64]); + if (!publicKeyB64 || !privateKeyB64 || !pairingCode) return; + + const interval = setInterval(pollTick, 2000); + return () => clearInterval(interval); + }, [publicKeyB64, privateKeyB64, pairingCode]); + + const pollTick = async () => { + const registration = { publicKeyB64, privateKeyB64, pairingCode }; + try { + const data = await getCastData(registration); + if (!data) { + // No one has connected yet. + return; + } + + storeCastData(data); + await router.push("/slideshow"); + } catch (e) { + // The pairing code becomes invalid after an hour, which will cause + // `getCastData` to fail. There might be other reasons this might + // fail too, but in all such cases, it is a reasonable idea to start + // again from the beginning. + log.warn("Failed to get cast data", e); + setPairingCode(undefined); + } + }; return ( - <> -
-
- -

- Enter this code on ente to pair this TV -

-
- {codePending ? ( - - ) : ( - <> - - - )} -
-

- Visit{" "} - - ente.io/cast - {" "} - for help -

-
-
- + + +

+ Enter this code on Ente Photos to pair this screen +

+ {pairingCode ? : } +

+ Visit{" "} + + ente.io/cast + {" "} + for help +

+
); } + +const Container = styled("div")` + height: 100%; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + text-align: center; + + h1 { + font-weight: normal; + } + + p { + font-size: 1.2rem; + } + a { + text-decoration: none; + color: #87cefa; + font-weight: bold; + } +`; + +const Spinner: React.FC = () => ( + + + +); + +const Spinner_ = styled("div")` + /* Roughly same height as the pairing code section to roduce layout shift */ + margin-block: 1.7rem; +`; diff --git a/web/apps/cast/src/pages/slideshow.tsx b/web/apps/cast/src/pages/slideshow.tsx index 8554524b2..326b183d4 100644 --- a/web/apps/cast/src/pages/slideshow.tsx +++ b/web/apps/cast/src/pages/slideshow.tsx @@ -1,189 +1,192 @@ -import { FILE_TYPE } from "@/media/file-type"; import log from "@/next/log"; -import PairedSuccessfullyOverlay from "components/PairedSuccessfullyOverlay"; -import { PhotoAuditorium } from "components/PhotoAuditorium"; +import { ensure } from "@/utils/ensure"; +import { styled } from "@mui/material"; +import { FilledCircleCheck } from "components/FilledCircleCheck"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; -import { - getCastCollection, - getLocalFiles, - syncPublicFiles, -} from "services/cast/castService"; -import { Collection } from "types/collection"; -import { EnteFile } from "types/file"; -import { getPreviewableImage, isRawFileFromFileName } from "utils/file"; - -const renderableFileURLCache = new Map(); +import { readCastData } from "services/cast-data"; +import { isChromecast } from "services/chromecast"; +import { imageURLGenerator } from "services/render"; export default function Slideshow() { const [loading, setLoading] = useState(true); - const [castToken, setCastToken] = useState(""); - const [castCollection, setCastCollection] = useState< - Collection | undefined - >(); - const [collectionFiles, setCollectionFiles] = useState([]); - const [currentFileId, setCurrentFileId] = useState(); - const [currentFileURL, setCurrentFileURL] = useState(); - const [nextFileURL, setNextFileURL] = useState(); + const [imageURL, setImageURL] = useState(); + const [isEmpty, setIsEmpty] = useState(false); const router = useRouter(); - const syncCastFiles = async (token: string) => { - try { - console.log("syncCastFiles"); - const castToken = window.localStorage.getItem("castToken"); - const requestedCollectionKey = - window.localStorage.getItem("collectionKey"); - const collection = await getCastCollection( - castToken, - requestedCollectionKey, - ); - if ( - castCollection === undefined || - castCollection.updationTime !== collection.updationTime - ) { - setCastCollection(collection); - await syncPublicFiles(token, collection, () => {}); - const files = await getLocalFiles(String(collection.id)); - setCollectionFiles( - files.filter((file) => isFileEligibleForCast(file)), - ); + /** Go back to pairing page */ + const pair = () => router.push("/"); + + useEffect(() => { + let stop = false; + + const loop = async () => { + try { + const urlGenerator = imageURLGenerator(ensure(readCastData())); + while (!stop) { + const { value: url, done } = await urlGenerator.next(); + if (done || !url) { + // No items in this callection can be shown. + setIsEmpty(true); + // Go back to pairing screen after 5 seconds. + setTimeout(pair, 5000); + return; + } + + setImageURL(url); + setLoading(false); + } + } catch (e) { + log.error("Failed to prepare generator", e); + pair(); } - } catch (e) { - log.error("error during sync", e); - // go back to preview page - router.push("/"); - } - }; + }; - useEffect(() => { - if (castToken) { - const intervalId = setInterval(() => { - syncCastFiles(castToken); - }, 10000); - syncCastFiles(castToken); + void loop(); - return () => clearInterval(intervalId); - } - }, [castToken]); - - const isFileEligibleForCast = (file: EnteFile) => { - const fileType = file.metadata.fileType; - if (fileType !== FILE_TYPE.IMAGE && fileType !== FILE_TYPE.LIVE_PHOTO) - return false; - - if (file.info.fileSize > 100 * 1024 * 1024) return false; - - if (isRawFileFromFileName(file.metadata.title)) return false; - - return true; - }; - - useEffect(() => { - try { - const castToken = window.localStorage.getItem("castToken"); - // Wait 2 seconds to ensure the green tick and the confirmation - // message remains visible for at least 2 seconds before we start - // the slideshow. - const timeoutId = setTimeout(() => { - setCastToken(castToken); - }, 2000); - - return () => clearTimeout(timeoutId); - } catch (e) { - log.error("error during sync", e); - router.push("/"); - } + return () => { + stop = true; + }; }, []); - useEffect(() => { - if (collectionFiles.length < 1) return; - showNextSlide(); - }, [collectionFiles]); + if (loading) return ; + if (isEmpty) return ; - const showNextSlide = async () => { - try { - console.log("showNextSlide"); - const currentIndex = collectionFiles.findIndex( - (file) => file.id === currentFileId, - ); - - console.log( - "showNextSlide-index", - currentIndex, - collectionFiles.length, - ); - - const nextIndex = (currentIndex + 1) % collectionFiles.length; - const nextNextIndex = (nextIndex + 1) % collectionFiles.length; - - console.log( - "showNextSlide-nextIndex and nextNextIndex", - nextIndex, - nextNextIndex, - ); - - const nextFile = collectionFiles[nextIndex]; - const nextNextFile = collectionFiles[nextNextIndex]; - - let nextURL = renderableFileURLCache.get(nextFile.id); - let nextNextURL = renderableFileURLCache.get(nextNextFile.id); - - if (!nextURL) { - try { - console.log("nextURL doesn't exist yet"); - const blob = await getPreviewableImage(nextFile, castToken); - console.log("nextURL blobread"); - const url = URL.createObjectURL(blob); - console.log("nextURL", url); - renderableFileURLCache.set(nextFile.id, url); - console.log("nextUrlCache set"); - nextURL = url; - } catch (e) { - console.log("error in nextUrl", e); - return; - } - } else { - console.log("nextURL already exists"); - } - - if (!nextNextURL) { - try { - console.log("nextNextURL doesn't exist yet"); - const blob = await getPreviewableImage( - nextNextFile, - castToken, - ); - console.log("nextNextURL blobread"); - const url = URL.createObjectURL(blob); - console.log("nextNextURL", url); - renderableFileURLCache.set(nextNextFile.id, url); - console.log("nextNextURCacheL set"); - nextNextURL = url; - } catch (e) { - console.log("error in nextNextURL", e); - return; - } - } else { - console.log("nextNextURL already exists"); - } - - setLoading(false); - setCurrentFileId(nextFile.id); - setCurrentFileURL(nextURL); - setNextFileURL(nextNextURL); - } catch (e) { - console.log("error in showNextSlide", e); - } - }; - - if (loading) return ; - - return ( - + return isChromecast() ? ( + + ) : ( + ); } + +const PairingComplete: React.FC = () => { + return ( + + +

Pairing Complete

+

+ We're preparing your album. +
This should only take a few seconds. +

+
+ ); +}; + +const Message = styled("div")` + display: flex; + flex-direction: column; + height: 100%; + justify-content: center; + align-items: center; + text-align: center; + + line-height: 1.5rem; + + h2 { + margin-block-end: 0; + } +`; + +const NoItems: React.FC = () => { + return ( + +

Try another album

+

+ This album has no photos that can be shown here +
Please try another album +

+
+ ); +}; + +interface SlideViewProps { + /** The URL of the image to show. */ + url: string; +} + +const SlideView: React.FC = ({ url }) => { + return ( + + + + ); +}; + +const SlideView_ = styled("div")` + width: 100%; + height: 100%; + + background-size: cover; + background-position: center; + background-repeat: no-repeat; + background-blend-mode: multiply; + background-color: rgba(0, 0, 0, 0.5); + + /* Smooth out the transition a bit. + * + * For the img itself, we set decoding="sync" to have it switch seamlessly. + * But there does not seem to be a way of setting decoding sync for the + * background image, and for large (multi-MB) images the background image + * switch is still visually non-atomic. + * + * As a workaround, add a long transition so that the background image + * transitions in a more "fade-to" manner. This effect might or might not be + * visually the best though. + * + * Does not work in Firefox, but that's fine, this is only a slight tweak, + * not a functional requirement. + */ + transition: all 2s; + + img { + width: 100%; + height: 100%; + backdrop-filter: blur(10px); + object-fit: contain; + } +`; + +/** + * Variant of {@link SlideView} for use when we're running on Chromecast. + * + * Chromecast devices have trouble with + * + * backdrop-filter: blur(10px); + * + * So emulate a cheaper approximation for use on Chromecast. + */ +const SlideViewChromecast: React.FC = ({ url }) => { + return ( + + + + + ); +}; + +const SlideViewChromecast_ = styled("div")` + width: 100%; + height: 100%; + + /* We can't set opacity of background-image, so use a wrapper */ + position: relative; + overflow: hidden; + + img.svc-bg { + position: absolute; + left: 0; + top: 0; + width: 100%; + height: 100%; + object-fit: cover; + opacity: 0.1; + } + + img.svc-content { + position: relative; + width: 100%; + height: 100%; + object-fit: contain; + } +`; diff --git a/web/apps/cast/src/services/cast-data.ts b/web/apps/cast/src/services/cast-data.ts new file mode 100644 index 000000000..587d1db32 --- /dev/null +++ b/web/apps/cast/src/services/cast-data.ts @@ -0,0 +1,41 @@ +export interface CastData { + /** The ID of the callection we are casting. */ + collectionID: string; + /** A key to decrypt the collection we are casting. */ + collectionKey: string; + /** A credential to use for fetching media files for this cast session. */ + castToken: string; +} + +/** + * Save the data received after pairing with a sender into local storage. + * + * We will read in back when we start the slideshow. + */ +export const storeCastData = (payload: unknown) => { + if (!payload || typeof payload != "object") + throw new Error("Unexpected cast data"); + + // Iterate through all the keys of the payload object and save them to + // localStorage. We don't validate here, we'll validate when we read these + // values back in `readCastData`. + for (const key in payload) { + window.localStorage.setItem(key, payload[key]); + } +}; + +/** + * Read back the cast data we got after pairing. + * + * Sibling of {@link storeCastData}. It returns undefined if the expected data + * is not present in localStorage. + */ +export const readCastData = (): CastData | undefined => { + const collectionID = localStorage.getItem("collectionID"); + const collectionKey = localStorage.getItem("collectionKey"); + const castToken = localStorage.getItem("castToken"); + + return collectionID && collectionKey && castToken + ? { collectionID, collectionKey, castToken } + : undefined; +}; diff --git a/web/apps/cast/src/services/cast/castService.ts b/web/apps/cast/src/services/cast/castService.ts deleted file mode 100644 index 84636d3a1..000000000 --- a/web/apps/cast/src/services/cast/castService.ts +++ /dev/null @@ -1,304 +0,0 @@ -import log from "@/next/log"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; -import { CustomError, parseSharingErrorCodes } from "@ente/shared/error"; -import HTTPService from "@ente/shared/network/HTTPService"; -import { getEndpoint } from "@ente/shared/network/api"; -import localForage from "@ente/shared/storage/localForage"; -import { Collection, CollectionPublicMagicMetadata } from "types/collection"; -import { EncryptedEnteFile, EnteFile } from "types/file"; -import { decryptFile, mergeMetadata, sortFiles } from "utils/file"; - -export interface SavedCollectionFiles { - collectionLocalID: string; - files: EnteFile[]; -} -const ENDPOINT = getEndpoint(); -const COLLECTION_FILES_TABLE = "collection-files"; -const COLLECTIONS_TABLE = "collections"; - -const getLastSyncKey = (collectionUID: string) => `${collectionUID}-time`; - -export const getLocalFiles = async ( - collectionUID: string, -): Promise => { - const localSavedcollectionFiles = - (await localForage.getItem( - COLLECTION_FILES_TABLE, - )) || []; - const matchedCollection = localSavedcollectionFiles.find( - (item) => item.collectionLocalID === collectionUID, - ); - return matchedCollection?.files || []; -}; - -const savecollectionFiles = async ( - collectionUID: string, - files: EnteFile[], -) => { - const collectionFiles = - (await localForage.getItem( - COLLECTION_FILES_TABLE, - )) || []; - await localForage.setItem( - COLLECTION_FILES_TABLE, - dedupeCollectionFiles([ - { collectionLocalID: collectionUID, files }, - ...collectionFiles, - ]), - ); -}; - -export const getLocalCollections = async (collectionKey: string) => { - const localCollections = - (await localForage.getItem(COLLECTIONS_TABLE)) || []; - const collection = - localCollections.find( - (localSavedPublicCollection) => - localSavedPublicCollection.key === collectionKey, - ) || null; - return collection; -}; - -const saveCollection = async (collection: Collection) => { - const collections = - (await localForage.getItem(COLLECTIONS_TABLE)) ?? []; - await localForage.setItem( - COLLECTIONS_TABLE, - dedupeCollections([collection, ...collections]), - ); -}; - -const dedupeCollections = (collections: Collection[]) => { - const keySet = new Set([]); - return collections.filter((collection) => { - if (!keySet.has(collection.key)) { - keySet.add(collection.key); - return true; - } else { - return false; - } - }); -}; - -const dedupeCollectionFiles = (collectionFiles: SavedCollectionFiles[]) => { - const keySet = new Set([]); - return collectionFiles.filter(({ collectionLocalID: collectionUID }) => { - if (!keySet.has(collectionUID)) { - keySet.add(collectionUID); - return true; - } else { - return false; - } - }); -}; - -async function getSyncTime(collectionUID: string): Promise { - const lastSyncKey = getLastSyncKey(collectionUID); - const lastSyncTime = await localForage.getItem(lastSyncKey); - return lastSyncTime ?? 0; -} - -const updateSyncTime = async (collectionUID: string, time: number) => - await localForage.setItem(getLastSyncKey(collectionUID), time); - -export const syncPublicFiles = async ( - token: string, - collection: Collection, - setPublicFiles: (files: EnteFile[]) => void, -) => { - try { - let files: EnteFile[] = []; - const sortAsc = collection?.pubMagicMetadata?.data.asc ?? false; - const collectionUID = String(collection.id); - const localFiles = await getLocalFiles(collectionUID); - files = [...files, ...localFiles]; - try { - const lastSyncTime = await getSyncTime(collectionUID); - if (collection.updationTime === lastSyncTime) { - return sortFiles(files, sortAsc); - } - const fetchedFiles = await fetchFiles( - token, - collection, - lastSyncTime, - files, - setPublicFiles, - ); - - files = [...files, ...fetchedFiles]; - const latestVersionFiles = new Map(); - files.forEach((file) => { - const uid = `${file.collectionID}-${file.id}`; - if ( - !latestVersionFiles.has(uid) || - latestVersionFiles.get(uid).updationTime < file.updationTime - ) { - latestVersionFiles.set(uid, file); - } - }); - files = []; - // eslint-disable-next-line @typescript-eslint/no-unused-vars - for (const [_, file] of latestVersionFiles) { - if (file.isDeleted) { - continue; - } - files.push(file); - } - await savecollectionFiles(collectionUID, files); - await updateSyncTime(collectionUID, collection.updationTime); - setPublicFiles([...sortFiles(mergeMetadata(files), sortAsc)]); - } catch (e) { - const parsedError = parseSharingErrorCodes(e); - log.error("failed to sync shared collection files", e); - if (parsedError.message === CustomError.TOKEN_EXPIRED) { - throw e; - } - } - return [...sortFiles(mergeMetadata(files), sortAsc)]; - } catch (e) { - log.error("failed to get local or sync shared collection files", e); - throw e; - } -}; - -const fetchFiles = async ( - castToken: string, - collection: Collection, - sinceTime: number, - files: EnteFile[], - setPublicFiles: (files: EnteFile[]) => void, -): Promise => { - try { - let decryptedFiles: EnteFile[] = []; - let time = sinceTime; - let resp; - const sortAsc = collection?.pubMagicMetadata?.data.asc ?? false; - do { - if (!castToken) { - break; - } - resp = await HTTPService.get( - `${ENDPOINT}/cast/diff`, - { - sinceTime: time, - }, - { - "Cache-Control": "no-cache", - "X-Cast-Access-Token": castToken, - }, - ); - decryptedFiles = [ - ...decryptedFiles, - ...(await Promise.all( - resp.data.diff.map(async (file: EncryptedEnteFile) => { - if (!file.isDeleted) { - return await decryptFile(file, collection.key); - } else { - return file; - } - }) as Promise[], - )), - ]; - - if (resp.data.diff.length) { - time = resp.data.diff.slice(-1)[0].updationTime; - } - setPublicFiles( - sortFiles( - mergeMetadata( - [...(files || []), ...decryptedFiles].filter( - (item) => !item.isDeleted, - ), - ), - sortAsc, - ), - ); - } while (resp.data.hasMore); - return decryptedFiles; - } catch (e) { - log.error("Get cast files failed", e); - throw e; - } -}; - -export const getCastCollection = async ( - castToken: string, - collectionKey: string, -): Promise => { - try { - const resp = await HTTPService.get(`${ENDPOINT}/cast/info`, null, { - "Cache-Control": "no-cache", - "X-Cast-Access-Token": castToken, - }); - const fetchedCollection = resp.data.collection; - - const cryptoWorker = await ComlinkCryptoWorker.getInstance(); - - const collectionName = (fetchedCollection.name = - fetchedCollection.name || - (await cryptoWorker.decryptToUTF8( - fetchedCollection.encryptedName, - fetchedCollection.nameDecryptionNonce, - collectionKey, - ))); - - let collectionPublicMagicMetadata: CollectionPublicMagicMetadata; - if (fetchedCollection.pubMagicMetadata?.data) { - collectionPublicMagicMetadata = { - ...fetchedCollection.pubMagicMetadata, - data: await cryptoWorker.decryptMetadata( - fetchedCollection.pubMagicMetadata.data, - fetchedCollection.pubMagicMetadata.header, - collectionKey, - ), - }; - } - - const collection = { - ...fetchedCollection, - name: collectionName, - key: collectionKey, - pubMagicMetadata: collectionPublicMagicMetadata, - }; - await saveCollection(collection); - return collection; - } catch (e) { - log.error("failed to get cast collection", e); - throw e; - } -}; - -export const removeCollection = async ( - collectionUID: string, - collectionKey: string, -) => { - const collections = - (await localForage.getItem(COLLECTIONS_TABLE)) || []; - await localForage.setItem( - COLLECTIONS_TABLE, - collections.filter((collection) => collection.key !== collectionKey), - ); - await removeCollectionFiles(collectionUID); -}; - -export const removeCollectionFiles = async (collectionUID: string) => { - await localForage.removeItem(getLastSyncKey(collectionUID)); - const collectionFiles = - (await localForage.getItem( - COLLECTION_FILES_TABLE, - )) ?? []; - await localForage.setItem( - COLLECTION_FILES_TABLE, - collectionFiles.filter( - (collectionFiles) => - collectionFiles.collectionLocalID !== collectionUID, - ), - ); -}; - -export const storeCastData = (payloadObj: Object) => { - // iterate through all the keys in the payload object and set them in localStorage. - for (const key in payloadObj) { - window.localStorage.setItem(key, payloadObj[key]); - } -}; diff --git a/web/apps/cast/src/services/castDownloadManager.ts b/web/apps/cast/src/services/castDownloadManager.ts deleted file mode 100644 index 2314ed54e..000000000 --- a/web/apps/cast/src/services/castDownloadManager.ts +++ /dev/null @@ -1,103 +0,0 @@ -import { FILE_TYPE } from "@/media/file-type"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; -import { CustomError } from "@ente/shared/error"; -import HTTPService from "@ente/shared/network/HTTPService"; -import { getCastFileURL } from "@ente/shared/network/api"; -import { EnteFile } from "types/file"; -import { generateStreamFromArrayBuffer } from "utils/file"; - -class CastDownloadManager { - async downloadFile(castToken: string, file: EnteFile) { - const cryptoWorker = await ComlinkCryptoWorker.getInstance(); - - if ( - file.metadata.fileType === FILE_TYPE.IMAGE || - file.metadata.fileType === FILE_TYPE.LIVE_PHOTO - ) { - const resp = await HTTPService.get( - getCastFileURL(file.id), - null, - { - "X-Cast-Access-Token": castToken, - }, - { responseType: "arraybuffer" }, - ); - if (typeof resp.data === "undefined") { - throw Error(CustomError.REQUEST_FAILED); - } - const decrypted = await cryptoWorker.decryptFile( - new Uint8Array(resp.data), - await cryptoWorker.fromB64(file.file.decryptionHeader), - file.key, - ); - return generateStreamFromArrayBuffer(decrypted); - } - const resp = await fetch(getCastFileURL(file.id), { - headers: { - "X-Cast-Access-Token": castToken, - }, - }); - const reader = resp.body.getReader(); - - const stream = new ReadableStream({ - async start(controller) { - const decryptionHeader = await cryptoWorker.fromB64( - file.file.decryptionHeader, - ); - const fileKey = await cryptoWorker.fromB64(file.key); - const { pullState, decryptionChunkSize } = - await cryptoWorker.initChunkDecryption( - decryptionHeader, - fileKey, - ); - let data = new Uint8Array(); - // The following function handles each data chunk - function push() { - // "done" is a Boolean and value a "Uint8Array" - reader.read().then(async ({ done, value }) => { - // Is there more data to read? - if (!done) { - const buffer = new Uint8Array( - data.byteLength + value.byteLength, - ); - buffer.set(new Uint8Array(data), 0); - buffer.set(new Uint8Array(value), data.byteLength); - if (buffer.length > decryptionChunkSize) { - const fileData = buffer.slice( - 0, - decryptionChunkSize, - ); - const { decryptedData } = - await cryptoWorker.decryptFileChunk( - fileData, - pullState, - ); - controller.enqueue(decryptedData); - data = buffer.slice(decryptionChunkSize); - } else { - data = buffer; - } - push(); - } else { - if (data) { - const { decryptedData } = - await cryptoWorker.decryptFileChunk( - data, - pullState, - ); - controller.enqueue(decryptedData); - data = null; - } - controller.close(); - } - }); - } - - push(); - }, - }); - return stream; - } -} - -export default new CastDownloadManager(); diff --git a/web/apps/cast/src/services/chromecast.ts b/web/apps/cast/src/services/chromecast.ts new file mode 100644 index 000000000..e7539e8c5 --- /dev/null +++ b/web/apps/cast/src/services/chromecast.ts @@ -0,0 +1,227 @@ +/// + +import log from "@/next/log"; + +export type Cast = typeof cast; + +/** + * A holder for the "cast" global object exposed by the Chromecast SDK, + * alongwith auxiliary state we need around it. + */ +class CastReceiver { + /** + * A reference to the `cast` global object that the Chromecast Web Receiver + * SDK attaches to the window. + * + * https://developers.google.com/cast/docs/web_receiver/basic + */ + cast: Cast | undefined; + /** + * A promise that allows us to ensure multiple requests to load are funneled + * through the same reified load. + */ + loader: Promise | undefined; + /** + * True if we have already attached listeners (i.e. if we have "started" the + * Chromecast SDK). + * + * Note that "stopping" the Chromecast SDK causes the Chromecast device to + * reload our tab, so this is a one way flag. The stop is something that'll + * only get triggered when we're actually running on a Chromecast since it + * always happens in response to a message handler. + */ + haveStarted = false; + /** + * Cached result of the isChromecast test. + */ + isChromecast: boolean | undefined; + /** + * A callback to invoke to get the pairing code when we get a new incoming + * pairing request. + */ + pairingCode: (() => string | undefined) | undefined; + /** + * A callback to invoke to get the ID of the collection that is currently + * being shown (if any). + */ + collectionID: (() => string | undefined) | undefined; +} + +/** Singleton instance of {@link CastReceiver}. */ +const castReceiver = new CastReceiver(); + +/** + * Listen for incoming messages on the given {@link cast} receiver, replying to + * each of them with a pairing code obtained using the given {@link pairingCode} + * callback. Phase 2 of the pairing protocol. + * + * Calling this function multiple times is fine. The first time around, the + * Chromecast SDK will be loaded and will start listening. Subsequently, each + * time this is call, we'll update the callbacks, but otherwise just return + * immediately (letting the already attached listeners do their thing). + * + * @param pairingCode A callback to invoke to get the pairing code when we get a + * new incoming pairing request. + * + * @param collectionID A callback to invoke to get the ID of the collection that + * is currently being shown (if any). + * + * See: [Note: Pairing protocol]. + */ +export const advertiseOnChromecast = ( + pairingCode: () => string | undefined, + collectionID: () => string | undefined, +) => { + // Always update the callbacks. + castReceiver.pairingCode = pairingCode; + castReceiver.collectionID = collectionID; + + // No-op if we're already running. + if (castReceiver.haveStarted) return; + + void loadingChromecastSDKIfNeeded().then((cast) => advertiseCode(cast)); +}; + +/** + * Load the Chromecast Web Receiver SDK and return a reference to the `cast` + * global object that the SDK attaches to the window. + * + * Calling this function multiple times is fine, once the Chromecast SDK is + * loaded it'll thereafter return the reference to the same object always. + */ +const loadingChromecastSDKIfNeeded = async (): Promise => { + if (castReceiver.cast) return castReceiver.cast; + if (castReceiver.loader) return await castReceiver.loader; + + castReceiver.loader = new Promise((resolve) => { + const script = document.createElement("script"); + script.src = + "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; + script.addEventListener("load", () => { + castReceiver.cast = cast; + resolve(cast); + }); + document.body.appendChild(script); + }); + + return await castReceiver.loader; +}; + +const advertiseCode = (cast: Cast) => { + if (castReceiver.haveStarted) { + // Multiple attempts raced to completion, ignore all but the first. + return; + } + + castReceiver.haveStarted = true; + + // Prepare the Chromecast "context". + const context = cast.framework.CastReceiverContext.getInstance(); + const namespace = "urn:x-cast:pair-request"; + + const options = new cast.framework.CastReceiverOptions(); + // We don't use the media features of the Cast SDK. + options.skipPlayersLoad = true; + // Do not stop the casting if the receiver is unreachable. A user should be + // able to start a cast on their phone and then put it away, leaving the + // cast running on their big screen. + options.disableIdleTimeout = true; + + type ListenerProps = { + senderId: string; + data: unknown; + }; + + // Reply with the code that we have if anyone asks over Chromecast. + const incomingMessageListener = ({ senderId, data }: ListenerProps) => { + // The collection ID with is currently paired (if any). + const pairedCollectionID = castReceiver.collectionID?.(); + + // The collection ID in the request (if any). + const collectionID = + data && + typeof data == "object" && + typeof data["collectionID"] == "string" + ? data["collectionID"] + : undefined; + + // If the request does not have a collectionID (or if we're not showing + // anything currently), forego this check. + + if (collectionID && pairedCollectionID) { + // If we get another connection request for a _different_ collection + // ID, stop the app to allow the second device to reconnect using a + // freshly generated pairing code. + if (pairedCollectionID != collectionID) { + log.info(`request for a new collection ${collectionID}`); + context.stop(); + } else { + // Duplicate request for same collection that we're already + // showing. Ignore. + } + return; + } + + const code = castReceiver.pairingCode?.(); + if (!code) { + // No code, but if we're already showing a collection, then ignore. + if (pairedCollectionID) return; + + // Our caller waits until it has a pairing code before it calls + // `advertiseCode`, but there is still an edge case where we can + // find ourselves without a pairing code: + // + // 1. The current pairing code expires. We start the process to get + // a new one. + // + // 2. But before that happens, someone connects. + // + // The window where this can happen is short, so if we do find + // ourselves in this scenario, just shutdown. + log.error("got pairing request when refreshing pairing codes"); + context.stop(); + return; + } + + context.sendCustomMessage(namespace, senderId, { code }); + }; + + context.addCustomMessageListener( + namespace, + // We need to cast, the `senderId` is present in the message we get but + // not present in the TypeScript type. + incomingMessageListener as unknown as SystemEventHandler, + ); + + // Close the (chromecast) tab if the sender disconnects. + // + // Chromecast does a "shutdown" of our cast app when we call `context.stop`. + // This translates into it closing the tab where it is showing our app. + context.addEventListener( + cast.framework.system.EventType.SENDER_DISCONNECTED, + () => context.stop(), + ); + + // Start listening for Chromecast connections. + context.start(options); +}; + +/** + * Return true if we're running on a Chromecast device. + * + * This allows changing our app's behaviour when we're running on Chromecast. + * Such checks are needed because during our testing we found that in practice, + * some processing is too heavy for Chromecast hardware (we tested with a 2nd + * gen device, this might not be true for newer variants). + * + * This variable is lazily updated when we enter {@link renderableImageURLs}. It + * is kept at the top level to avoid passing it around. + */ +export const isChromecast = () => { + let isCast = castReceiver.isChromecast; + if (isCast === undefined) { + isCast = window.navigator.userAgent.includes("CrKey"); + castReceiver.isChromecast = isCast; + } + return isCast; +}; diff --git a/web/apps/cast/src/services/detect-type.ts b/web/apps/cast/src/services/detect-type.ts index 187e19df8..c43529aae 100644 --- a/web/apps/cast/src/services/detect-type.ts +++ b/web/apps/cast/src/services/detect-type.ts @@ -9,6 +9,9 @@ import FileType from "file-type"; * * It first peeks into the file's initial contents to detect the MIME type. If * that doesn't give any results, it tries to deduce it from the file's name. + * + * For the list of returned extensions, see (for our installed version): + * https://github.com/sindresorhus/file-type/blob/main/core.d.ts */ export const detectMediaMIMEType = async (file: File): Promise => { const chunkSizeForTypeDetection = 4100; diff --git a/web/apps/cast/src/services/pair.ts b/web/apps/cast/src/services/pair.ts new file mode 100644 index 000000000..36b54cf75 --- /dev/null +++ b/web/apps/cast/src/services/pair.ts @@ -0,0 +1,134 @@ +import log from "@/next/log"; +import { wait } from "@/utils/promise"; +import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium"; +import castGateway from "@ente/shared/network/cast"; +import _sodium from "libsodium-wrappers"; + +export interface Registration { + /** A pairing code shown on the screen. A client can use this to connect. */ + pairingCode: string; + /** The public part of the keypair we registered with the server. */ + publicKeyB64: string; + /** The private part of the keypair we registered with the server. */ + privateKeyB64: string; +} + +/** + * Register a keypair with the server and return a pairing code that can be used + * to connect to us. Phase 1 of the pairing protocol. + * + * [Note: Pairing protocol] + * + * The Chromecast Framework (represented here by our handle to the Chromecast + * Web SDK, {@link cast}) itself is used for only the initial handshake, none of + * the data, even encrypted passes over it thereafter. + * + * The pairing happens in two phases: + * + * Phase 1 - {@link register} + * + * 1. We (the receiver) generate a public/private keypair. and register the + * public part of it with museum. + * + * 2. Museum gives us a pairing "code" in lieu. Show this on the screen. + * + * Phase 2 - {@link advertiseCode} + * + * There are two ways the client can connect - either by sending us a blank + * message over the Chromecast protocol (to which we'll reply with the pairing + * code), or by the user manually entering the pairing code on their screen. + * + * 3. Listen for incoming messages over the Chromecast connection. + * + * 4. The client (our Web or mobile app) will connect using the "sender" + * Chromecast SDK. This will result in a bi-directional channel between us + * ("receiver") and the Ente client app ("sender"). + * + * 5. Thereafter, if at any time the sender disconnects, close the Chromecast + * context. This effectively shuts us down, causing the entire page to get + * reloaded. + * + * 6. After connecting, the sender sends an (empty) message. We reply by sending + * them a message containing the pairing code. This exchange is the only data + * that traverses over the Chromecast connection. + * + * Once the client gets the pairing code (via Chromecast or manual entry), + * they'll let museum know. So in parallel with Phase 2, we perform Phase 3. + * + * Phase 3 - {@link getCastData} in a setInterval. + * + * 7. Keep polling museum to ask it if anyone has claimed that code we vended + * out and used that to send us an payload encrypted using our public key. + * + * 8. When that happens, decrypt that data with our private key, and return this + * payload. It is a JSON object that contains the data we need to initiate a + * slideshow for a particular Ente collection. + * + * Phase 1 (Steps 1 and 2) are done by the {@link register} function, which + * returns a {@link Registration}. + * + * At this time we start showing the pairing code on the UI, and start phase 2, + * {@link advertiseCode} to vend out the pairing code to Chromecast connections. + * + * In parallel, we start Phase 3, calling {@link getCastData} in a loop. Once we + * get a response, we decrypt it to get the data we need to start the slideshow. + */ +export const register = async (): Promise => { + // Generate keypair. + const keypair = await generateKeyPair(); + const publicKeyB64 = await toB64(keypair.publicKey); + const privateKeyB64 = await toB64(keypair.privateKey); + + // Register keypair with museum to get a pairing code. + let pairingCode: string; + // eslint has fixed this spurious warning, but we're not on the latest + // version yet, so add a disable. + // https://github.com/eslint/eslint/pull/18286 + /* eslint-disable no-constant-condition */ + while (true) { + try { + pairingCode = await castGateway.registerDevice(publicKeyB64); + } catch (e) { + log.error("Failed to register public key with server", e); + } + if (pairingCode) break; + // Schedule retry after 10 seconds. + await wait(10000); + } + + return { pairingCode, publicKeyB64, privateKeyB64 }; +}; + +/** + * Ask museum if anyone has sent a (encrypted) payload corresponding to the + * given pairing code. If so, decrypt it using our private key and return the + * JSON payload. Phase 3 of the pairing protocol. + * + * Returns `undefined` if there hasn't been any data obtained yet. + * + * See: [Note: Pairing protocol]. + */ +export const getCastData = async (registration: Registration) => { + const { pairingCode, publicKeyB64, privateKeyB64 } = registration; + + // The client will send us the encrypted payload using our public key that + // we registered with museum. + const encryptedCastData = await castGateway.getCastData(pairingCode); + if (!encryptedCastData) return; + + // Decrypt it using the private key of the pair and return the plaintext + // payload, which'll be a JSON object containing the data we need to start a + // slideshow for some collection. + const decryptedCastData = await boxSealOpen( + encryptedCastData, + publicKeyB64, + privateKeyB64, + ); + + return JSON.parse(atob(decryptedCastData)); +}; + +const generateKeyPair = async () => { + await _sodium.ready; + return _sodium.crypto_box_keypair(); +}; diff --git a/web/apps/cast/src/services/render.ts b/web/apps/cast/src/services/render.ts new file mode 100644 index 000000000..79065c2af --- /dev/null +++ b/web/apps/cast/src/services/render.ts @@ -0,0 +1,352 @@ +import { FILE_TYPE } from "@/media/file-type"; +import { isHEICExtension, isNonWebImageFileExtension } from "@/media/formats"; +import { decodeLivePhoto } from "@/media/live-photo"; +import { createHEICConvertComlinkWorker } from "@/media/worker/heic-convert"; +import type { DedicatedHEICConvertWorker } from "@/media/worker/heic-convert.worker"; +import { nameAndExtension } from "@/next/file"; +import log from "@/next/log"; +import type { ComlinkWorker } from "@/next/worker/comlink-worker"; +import { shuffled } from "@/utils/array"; +import { wait } from "@/utils/promise"; +import ComlinkCryptoWorker from "@ente/shared/crypto"; +import { ApiError } from "@ente/shared/error"; +import HTTPService from "@ente/shared/network/HTTPService"; +import { + getCastFileURL, + getCastThumbnailURL, + getEndpoint, +} from "@ente/shared/network/api"; +import type { AxiosResponse } from "axios"; +import type { CastData } from "services/cast-data"; +import { detectMediaMIMEType } from "services/detect-type"; +import { + EncryptedEnteFile, + EnteFile, + FileMagicMetadata, + FilePublicMagicMetadata, +} from "types/file"; +import { isChromecast } from "./chromecast"; + +/** + * If we're using HEIC conversion, then this variable caches the comlink web + * worker we're using to perform the actual conversion. + */ +let heicWorker: ComlinkWorker | undefined; + +/** + * An async generator function that loops through all the files in the + * collection, returning renderable image URLs to each that can be displayed in + * a slideshow. + * + * Each time it resolves with a (data) URL for the slideshow image to show next. + * + * If there are no renderable image in the collection, the sequence ends by + * yielding `{done: true}`. + * + * Otherwise when the generator reaches the end of the collection, it starts + * from the beginning again. So the sequence will continue indefinitely for + * non-empty collections. + * + * The generator ignores errors in the fetching and decoding of individual + * images in the collection, skipping the erroneous ones and moving onward to + * the next one. + * + * - It will however throw if there are errors when getting the collection + * itself. This can happen both the first time, or when we are about to loop + * around to the start of the collection. + * + * - It will also throw if three consecutive image fail. + * + * @param castData The collection to show and credentials to fetch the files + * within it. + */ +export const imageURLGenerator = async function* (castData: CastData) { + const { collectionKey, castToken } = castData; + + /** + * Keep a FIFO queue of the URLs that we've vended out recently so that we + * can revoke those that are not being shown anymore. + */ + const previousURLs: string[] = []; + + /** Number of milliseconds to keep the slide on the screen. */ + const slideDuration = 12000; /* 12 s */ + + /** + * Time when we last yielded. + * + * We use this to keep an roughly periodic spacing between yields that + * accounts for the time we spend fetching and processing the images. + */ + let lastYieldTime = Date.now(); + + // The first time around regress the lastYieldTime into the past so that + // we don't wait around too long for the first slide (we do want to wait a + // bit, for the user to see the checkmark animation as reassurance). + lastYieldTime -= slideDuration - 2500; /* wait at most 2.5 s */ + + /** + * Number of time we have caught an exception while trying to generate an + * image URL for individual files. + * + * When this happens three times consecutively, we throw. + */ + let consecutiveFailures = 0; + + while (true) { + const encryptedFiles = shuffled( + await getEncryptedCollectionFiles(castToken), + ); + + let haveEligibleFiles = false; + + for (const encryptedFile of encryptedFiles) { + const file = await decryptEnteFile(encryptedFile, collectionKey); + + if (!isFileEligible(file)) continue; + + let url: string; + try { + url = await createRenderableURL(castToken, file); + consecutiveFailures = 0; + haveEligibleFiles = true; + } catch (e) { + consecutiveFailures += 1; + // 1, 2, bang! + if (consecutiveFailures == 3) throw e; + + if (e instanceof ApiError && e.httpStatusCode == 401) { + // The token has expired. This can happen, e.g., if the user + // opens the dialog to cast again, causing the client to + // invalidate existing tokens. + // + // Rethrow the error, which will bring us back to the + // pairing page. + throw e; + } + + // On all other errors (including temporary network issues), + log.error("Skipping unrenderable file", e); + await wait(100); /* Breathe */ + continue; + } + + // The last element of previousURLs is the URL that is currently + // being shown on screen. + // + // The last to last element is the one that was shown prior to that, + // and now can be safely revoked. + if (previousURLs.length > 1) + URL.revokeObjectURL(previousURLs.shift()); + + previousURLs.push(url); + + const elapsedTime = Date.now() - lastYieldTime; + if (elapsedTime > 0 && elapsedTime < slideDuration) + await wait(slideDuration - elapsedTime); + + lastYieldTime = Date.now(); + yield url; + } + + // This collection does not have any files that we can show. + if (!haveEligibleFiles) return; + } +}; + +/** + * Fetch the list of non-deleted files in the given collection. + * + * The returned files are not decrypted yet, so their metadata will not be + * readable. + */ +const getEncryptedCollectionFiles = async ( + castToken: string, +): Promise => { + let files: EncryptedEnteFile[] = []; + let sinceTime = 0; + let resp: AxiosResponse; + do { + resp = await HTTPService.get( + `${getEndpoint()}/cast/diff`, + { sinceTime }, + { + "Cache-Control": "no-cache", + "X-Cast-Access-Token": castToken, + }, + ); + const diff = resp.data.diff; + files = files.concat(diff.filter((file: EnteFile) => !file.isDeleted)); + sinceTime = diff.reduce( + (max: number, file: EnteFile) => Math.max(max, file.updationTime), + sinceTime, + ); + } while (resp.data.hasMore); + return files; +}; + +/** + * Decrypt the given {@link EncryptedEnteFile}, returning a {@link EnteFile}. + */ +const decryptEnteFile = async ( + encryptedFile: EncryptedEnteFile, + collectionKey: string, +): Promise => { + const worker = await ComlinkCryptoWorker.getInstance(); + const { + encryptedKey, + keyDecryptionNonce, + metadata, + magicMetadata, + pubMagicMetadata, + ...restFileProps + } = encryptedFile; + const fileKey = await worker.decryptB64( + encryptedKey, + keyDecryptionNonce, + collectionKey, + ); + const fileMetadata = await worker.decryptMetadata( + metadata.encryptedData, + metadata.decryptionHeader, + fileKey, + ); + let fileMagicMetadata: FileMagicMetadata; + let filePubMagicMetadata: FilePublicMagicMetadata; + if (magicMetadata?.data) { + fileMagicMetadata = { + ...encryptedFile.magicMetadata, + data: await worker.decryptMetadata( + magicMetadata.data, + magicMetadata.header, + fileKey, + ), + }; + } + if (pubMagicMetadata?.data) { + filePubMagicMetadata = { + ...pubMagicMetadata, + data: await worker.decryptMetadata( + pubMagicMetadata.data, + pubMagicMetadata.header, + fileKey, + ), + }; + } + const file = { + ...restFileProps, + key: fileKey, + metadata: fileMetadata, + magicMetadata: fileMagicMetadata, + pubMagicMetadata: filePubMagicMetadata, + }; + if (file.pubMagicMetadata?.data.editedTime) { + file.metadata.creationTime = file.pubMagicMetadata.data.editedTime; + } + if (file.pubMagicMetadata?.data.editedName) { + file.metadata.title = file.pubMagicMetadata.data.editedName; + } + return file; +}; + +const isFileEligible = (file: EnteFile) => { + if (!isImageOrLivePhoto(file)) return false; + if (file.info.fileSize > 100 * 1024 * 1024) return false; + + // This check is fast but potentially incorrect because in practice we do + // encounter files that are incorrectly named and have a misleading + // extension. To detect the actual type, we need to sniff the MIME type, but + // that requires downloading and decrypting the file first. + const [, extension] = nameAndExtension(file.metadata.title); + if (isNonWebImageFileExtension(extension)) { + // Of the known non-web types, we support HEIC. + return isHEICExtension(extension); + } + + return true; +}; + +const isImageOrLivePhoto = (file: EnteFile) => { + const fileType = file.metadata.fileType; + return fileType == FILE_TYPE.IMAGE || fileType == FILE_TYPE.LIVE_PHOTO; +}; + +export const heicToJPEG = async (heicBlob: Blob) => { + let worker = heicWorker; + if (!worker) heicWorker = worker = createHEICConvertComlinkWorker(); + return await (await worker.remote).heicToJPEG(heicBlob); +}; + +/** + * Create and return a new data URL that can be used to show the given + * {@link file} in our slideshow image viewer. + * + * Once we're done showing the file, the URL should be revoked using + * {@link URL.revokeObjectURL} to free up browser resources. + */ +const createRenderableURL = async (castToken: string, file: EnteFile) => { + const imageBlob = await renderableImageBlob(castToken, file); + return URL.createObjectURL(imageBlob); +}; + +const renderableImageBlob = async (castToken: string, file: EnteFile) => { + const shouldUseThumbnail = isChromecast(); + + let blob = await downloadFile(castToken, file, shouldUseThumbnail); + + let fileName = file.metadata.title; + if (!shouldUseThumbnail && file.metadata.fileType == FILE_TYPE.LIVE_PHOTO) { + const { imageData, imageFileName } = await decodeLivePhoto( + fileName, + blob, + ); + fileName = imageFileName; + blob = new Blob([imageData]); + } + + // We cannot rely on the file's extension to detect the file type, some + // files are incorrectly named. So use a MIME type sniffer first, but if + // that fails than fallback to the extension. + const mimeType = await detectMediaMIMEType(new File([blob], fileName)); + if (!mimeType) + throw new Error(`Could not detect MIME type for file ${fileName}`); + + if (mimeType == "image/heif" || mimeType == "image/heic") + blob = await heicToJPEG(blob); + + return new Blob([blob], { type: mimeType }); +}; + +const downloadFile = async ( + castToken: string, + file: EnteFile, + shouldUseThumbnail: boolean, +) => { + if (!isImageOrLivePhoto(file)) + throw new Error("Can only cast images and live photos"); + + const url = shouldUseThumbnail + ? getCastThumbnailURL(file.id) + : getCastFileURL(file.id); + const resp = await HTTPService.get( + url, + null, + { + "X-Cast-Access-Token": castToken, + }, + { responseType: "arraybuffer" }, + ); + if (resp.data === undefined) throw new Error(`Failed to get ${url}`); + + const cryptoWorker = await ComlinkCryptoWorker.getInstance(); + const decrypted = await cryptoWorker.decryptFile( + new Uint8Array(resp.data), + await cryptoWorker.fromB64( + shouldUseThumbnail + ? file.thumbnail.decryptionHeader + : file.file.decryptionHeader, + ), + file.key, + ); + return new Response(decrypted).blob(); +}; diff --git a/web/apps/cast/src/types/collection.ts b/web/apps/cast/src/types/collection.ts deleted file mode 100644 index c495937ae..000000000 --- a/web/apps/cast/src/types/collection.ts +++ /dev/null @@ -1,100 +0,0 @@ -import { EnteFile } from "types/file"; -import { - EncryptedMagicMetadata, - MagicMetadataCore, - SUB_TYPE, - VISIBILITY_STATE, -} from "types/magicMetadata"; - -export enum COLLECTION_ROLE { - VIEWER = "VIEWER", - OWNER = "OWNER", - COLLABORATOR = "COLLABORATOR", - UNKNOWN = "UNKNOWN", -} - -export interface CollectionUser { - id: number; - email: string; - role: COLLECTION_ROLE; -} - -enum CollectionType { - folder = "folder", - favorites = "favorites", - album = "album", - uncategorized = "uncategorized", -} - -export interface EncryptedCollection { - id: number; - owner: CollectionUser; - // collection name was unencrypted in the past, so we need to keep it as optional - name?: string; - encryptedKey: string; - keyDecryptionNonce: string; - encryptedName: string; - nameDecryptionNonce: string; - type: CollectionType; - attributes: collectionAttributes; - sharees: CollectionUser[]; - publicURLs?: unknown; - updationTime: number; - isDeleted: boolean; - magicMetadata: EncryptedMagicMetadata; - pubMagicMetadata: EncryptedMagicMetadata; - sharedMagicMetadata: EncryptedMagicMetadata; -} - -export interface Collection - extends Omit< - EncryptedCollection, - | "encryptedKey" - | "keyDecryptionNonce" - | "encryptedName" - | "nameDecryptionNonce" - | "magicMetadata" - | "pubMagicMetadata" - | "sharedMagicMetadata" - > { - key: string; - name: string; - magicMetadata: CollectionMagicMetadata; - pubMagicMetadata: CollectionPublicMagicMetadata; - sharedMagicMetadata: CollectionShareeMagicMetadata; -} - -// define a method on Collection interface to return the sync key as collection.id-time -// this is used to store the last sync time of a collection in local storage - -export interface collectionAttributes { - encryptedPath?: string; - pathDecryptionNonce?: string; -} - -export type CollectionToFileMap = Map; - -export interface CollectionMagicMetadataProps { - visibility?: VISIBILITY_STATE; - subType?: SUB_TYPE; - order?: number; -} - -export type CollectionMagicMetadata = - MagicMetadataCore; - -export interface CollectionShareeMetadataProps { - visibility?: VISIBILITY_STATE; -} -export type CollectionShareeMagicMetadata = - MagicMetadataCore; - -export interface CollectionPublicMagicMetadataProps { - asc?: boolean; - coverID?: number; -} - -export type CollectionPublicMagicMetadata = - MagicMetadataCore; - -export type CollectionFilesCount = Map; diff --git a/web/apps/cast/src/utils/file.ts b/web/apps/cast/src/utils/file.ts deleted file mode 100644 index 91961b7be..000000000 --- a/web/apps/cast/src/utils/file.ts +++ /dev/null @@ -1,144 +0,0 @@ -import { FILE_TYPE } from "@/media/file-type"; -import { decodeLivePhoto } from "@/media/live-photo"; -import log from "@/next/log"; -import ComlinkCryptoWorker from "@ente/shared/crypto"; -import { RAW_FORMATS } from "constants/upload"; -import CastDownloadManager from "services/castDownloadManager"; -import { detectMediaMIMEType } from "services/detect-type"; -import { - EncryptedEnteFile, - EnteFile, - FileMagicMetadata, - FilePublicMagicMetadata, -} from "types/file"; - -export function sortFiles(files: EnteFile[], sortAsc = false) { - // sort based on the time of creation time of the file, - // for files with same creation time, sort based on the time of last modification - const factor = sortAsc ? -1 : 1; - return files.sort((a, b) => { - if (a.metadata.creationTime === b.metadata.creationTime) { - return ( - factor * - (b.metadata.modificationTime - a.metadata.modificationTime) - ); - } - return factor * (b.metadata.creationTime - a.metadata.creationTime); - }); -} - -export async function decryptFile( - file: EncryptedEnteFile, - collectionKey: string, -): Promise { - try { - const worker = await ComlinkCryptoWorker.getInstance(); - const { - encryptedKey, - keyDecryptionNonce, - metadata, - magicMetadata, - pubMagicMetadata, - ...restFileProps - } = file; - const fileKey = await worker.decryptB64( - encryptedKey, - keyDecryptionNonce, - collectionKey, - ); - const fileMetadata = await worker.decryptMetadata( - metadata.encryptedData, - metadata.decryptionHeader, - fileKey, - ); - let fileMagicMetadata: FileMagicMetadata; - let filePubMagicMetadata: FilePublicMagicMetadata; - if (magicMetadata?.data) { - fileMagicMetadata = { - ...file.magicMetadata, - data: await worker.decryptMetadata( - magicMetadata.data, - magicMetadata.header, - fileKey, - ), - }; - } - if (pubMagicMetadata?.data) { - filePubMagicMetadata = { - ...pubMagicMetadata, - data: await worker.decryptMetadata( - pubMagicMetadata.data, - pubMagicMetadata.header, - fileKey, - ), - }; - } - return { - ...restFileProps, - key: fileKey, - metadata: fileMetadata, - magicMetadata: fileMagicMetadata, - pubMagicMetadata: filePubMagicMetadata, - }; - } catch (e) { - log.error("file decryption failed", e); - throw e; - } -} - -export function generateStreamFromArrayBuffer(data: Uint8Array) { - return new ReadableStream({ - async start(controller: ReadableStreamDefaultController) { - controller.enqueue(data); - controller.close(); - }, - }); -} - -export function isRawFileFromFileName(fileName: string) { - for (const rawFormat of RAW_FORMATS) { - if (fileName.toLowerCase().endsWith(rawFormat)) { - return true; - } - } - return false; -} - -export function mergeMetadata(files: EnteFile[]): EnteFile[] { - return files.map((file) => { - if (file.pubMagicMetadata?.data.editedTime) { - file.metadata.creationTime = file.pubMagicMetadata.data.editedTime; - } - if (file.pubMagicMetadata?.data.editedName) { - file.metadata.title = file.pubMagicMetadata.data.editedName; - } - - return file; - }); -} - -export const getPreviewableImage = async ( - file: EnteFile, - castToken: string, -): Promise => { - try { - let fileBlob = await new Response( - await CastDownloadManager.downloadFile(castToken, file), - ).blob(); - if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { - const { imageData } = await decodeLivePhoto( - file.metadata.title, - fileBlob, - ); - fileBlob = new Blob([imageData]); - } - const mimeType = await detectMediaMIMEType( - new File([fileBlob], file.metadata.title), - ); - if (!mimeType) return undefined; - fileBlob = new Blob([fileBlob], { type: mimeType }); - return fileBlob; - } catch (e) { - log.error("failed to download file", e); - } -}; diff --git a/web/apps/cast/src/utils/useCastReceiver.tsx b/web/apps/cast/src/utils/useCastReceiver.tsx deleted file mode 100644 index ff17b0910..000000000 --- a/web/apps/cast/src/utils/useCastReceiver.tsx +++ /dev/null @@ -1,43 +0,0 @@ -declare const cast: any; - -import { useEffect, useState } from "react"; - -type Receiver = { - cast: typeof cast; -}; - -const load = (() => { - let promise: Promise | null = null; - - return () => { - if (promise === null) { - promise = new Promise((resolve) => { - const script = document.createElement("script"); - script.src = - "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js"; - - script.addEventListener("load", () => { - resolve({ - cast, - }); - }); - document.body.appendChild(script); - }); - } - return promise; - }; -})(); - -export const useCastReceiver = () => { - const [receiver, setReceiver] = useState({ - cast: null, - }); - - useEffect(() => { - load().then((receiver) => { - setReceiver(receiver); - }); - }); - - return receiver; -}; diff --git a/web/apps/payments/.env b/web/apps/payments/.env new file mode 100644 index 000000000..3f3b1cc9a --- /dev/null +++ b/web/apps/payments/.env @@ -0,0 +1 @@ +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/photos/.env b/web/apps/photos/.env index a039e9105..978c67776 100644 --- a/web/apps/photos/.env +++ b/web/apps/photos/.env @@ -88,3 +88,5 @@ # NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON=`cat path/to/expected.json` yarn dev # # NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON = {} + +NEXT_TELEMETRY_DISABLED = 1 diff --git a/web/apps/photos/package.json b/web/apps/photos/package.json index 1196b4ddf..ac658c0ea 100644 --- a/web/apps/photos/package.json +++ b/web/apps/photos/package.json @@ -23,13 +23,11 @@ "ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm", "formik": "^2.1.5", "hdbscan": "0.0.1-alpha.5", - "heic-convert": "^2.0.0", "idb": "^7.1.1", "leaflet": "^1.9.4", "leaflet-defaulticon-compatibility": "^0.1.1", "localforage": "^1.9.0", "memoize-one": "^6.0.0", - "mime-types": "^2.1.35", "ml-matrix": "^6.10.4", "otpauth": "^9.0.2", "p-debounce": "^4.0.0", @@ -37,7 +35,7 @@ "photoswipe": "file:./thirdparty/photoswipe", "piexifjs": "^1.0.6", "pure-react-carousel": "^1.30.1", - "react-dropzone": "^11.2.4", + "react-dropzone": "^14.2", "react-otp-input": "^2.3.1", "react-select": "^4.3.1", "react-top-loading-bar": "^2.0.1", diff --git a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx index 3d9d06166..8b92f1cbb 100644 --- a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx +++ b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx @@ -32,7 +32,11 @@ declare global { } } -export default function AlbumCastDialog(props: Props) { +export default function AlbumCastDialog({ + show, + onHide, + currentCollection, +}: Props) { const [view, setView] = useState< "choose" | "auto" | "pin" | "auto-cast-error" >("choose"); @@ -51,7 +55,7 @@ export default function AlbumCastDialog(props: Props) { ) => { try { await doCast(value.trim()); - props.onHide(); + onHide(); } catch (e) { const error = e as Error; let fieldError: string; @@ -80,8 +84,8 @@ export default function AlbumCastDialog(props: Props) { // ok, they exist. let's give them the good stuff. const payload = JSON.stringify({ castToken: castToken, - collectionID: props.currentCollection.id, - collectionKey: props.currentCollection.key, + collectionID: currentCollection.id, + collectionKey: currentCollection.key, }); const encryptedPayload = await boxSeal(btoa(payload), tvPublicKeyB64); @@ -89,7 +93,7 @@ export default function AlbumCastDialog(props: Props) { await castGateway.publishCastPayload( pin, encryptedPayload, - props.currentCollection.id, + currentCollection.id, castToken, ); }; @@ -119,7 +123,7 @@ export default function AlbumCastDialog(props: Props) { doCast(code) .then(() => { setView("choose"); - props.onHide(); + onHide(); }) .catch((e) => { setView("auto-cast-error"); @@ -129,8 +133,9 @@ export default function AlbumCastDialog(props: Props) { }, ); + const collectionID = currentCollection.id; session - .sendMessage("urn:x-cast:pair-request", {}) + .sendMessage("urn:x-cast:pair-request", { collectionID }) .then(() => { log.debug(() => "Message sent successfully"); }) @@ -142,16 +147,16 @@ export default function AlbumCastDialog(props: Props) { }, [view]); useEffect(() => { - if (props.show) { + if (show) { castGateway.revokeAllTokens(); } - }, [props.show]); + }, [show]); return ( {t("LEAVE_ALBUM")} + } + onClick={handleCollectionAction( + CollectionActions.SHOW_ALBUM_CAST_DIALOG, + false, + )} + > + {t("CAST_ALBUM_TO_TV")} + ); } diff --git a/web/apps/photos/src/components/Directory/index.tsx b/web/apps/photos/src/components/Directory/index.tsx index a99581134..2fc4be58e 100644 --- a/web/apps/photos/src/components/Directory/index.tsx +++ b/web/apps/photos/src/components/Directory/index.tsx @@ -1,8 +1,7 @@ import { ensureElectron } from "@/next/electron"; import log from "@/next/log"; import LinkButton from "@ente/shared/components/LinkButton"; -import { Tooltip } from "@mui/material"; -import { styled } from "@mui/material/styles"; +import { Tooltip, styled } from "@mui/material"; const DirectoryPathContainer = styled(LinkButton)( ({ width }) => ` diff --git a/web/apps/photos/src/components/PhotoList/dedupe.tsx b/web/apps/photos/src/components/PhotoList/dedupe.tsx index 7181f6267..61b9958ef 100644 --- a/web/apps/photos/src/components/PhotoList/dedupe.tsx +++ b/web/apps/photos/src/components/PhotoList/dedupe.tsx @@ -19,7 +19,7 @@ import { } from "react-window"; import { Duplicate } from "services/deduplicationService"; import { EnteFile } from "types/file"; -import { convertBytesToHumanReadable } from "utils/file"; +import { formattedByteSize } from "utils/units"; export enum ITEM_TYPE { TIME = "TIME", @@ -304,10 +304,13 @@ export function DedupePhotoList({ switch (listItem.itemType) { case ITEM_TYPE.SIZE_AND_COUNT: return ( + /*TODO: Translate the full phrase instead of piecing + together parts like this See: + https://crowdin.com/editor/ente-photos-web/9/enus-de?view=comfortable&filter=basic&value=0#8104 + */ {listItem.fileCount} {t("FILES")},{" "} - {convertBytesToHumanReadable(listItem.fileSize || 0)}{" "} - {t("EACH")} + {formattedByteSize(listItem.fileSize || 0)} {t("EACH")} ); case ITEM_TYPE.FILE: { diff --git a/web/apps/photos/src/components/PhotoList/index.tsx b/web/apps/photos/src/components/PhotoList/index.tsx index 4803995d4..5ac6b263e 100644 --- a/web/apps/photos/src/components/PhotoList/index.tsx +++ b/web/apps/photos/src/components/PhotoList/index.tsx @@ -22,9 +22,9 @@ import { areEqual, } from "react-window"; import { EnteFile } from "types/file"; -import { convertBytesToHumanReadable } from "utils/file"; import { handleSelectCreator } from "utils/photoFrame"; import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery"; +import { formattedByteSize } from "utils/units"; const A_DAY = 24 * 60 * 60 * 1000; const FOOTER_HEIGHT = 90; @@ -829,8 +829,7 @@ export function PhotoList({ return ( {listItem.fileCount} {t("FILES")},{" "} - {convertBytesToHumanReadable(listItem.fileSize || 0)}{" "} - {t("EACH")} + {formattedByteSize(listItem.fileSize || 0)} {t("EACH")} ); case ITEM_TYPE.FILE: { diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx index 399051185..e9e27d55e 100644 --- a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx +++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx @@ -7,8 +7,8 @@ import VideocamOutlined from "@mui/icons-material/VideocamOutlined"; import Box from "@mui/material/Box"; import { useEffect, useState } from "react"; import { EnteFile } from "types/file"; -import { makeHumanReadableStorage } from "utils/billing"; import { changeFileName, updateExistingFilePubMetadata } from "utils/file"; +import { formattedByteSize } from "utils/units"; import { FileNameEditDialog } from "./FileNameEditDialog"; import InfoItem from "./InfoItem"; @@ -33,7 +33,7 @@ const getCaption = (file: EnteFile, parsedExifData) => { captionParts.push(resolution); } if (fileSize) { - captionParts.push(makeHumanReadableStorage(fileSize)); + captionParts.push(formattedByteSize(fileSize)); } return ( diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx index 42edddbf1..c4e1f5854 100644 --- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx @@ -1,24 +1,6 @@ +import { nameAndExtension } from "@/next/file"; import log from "@/next/log"; -import { - Backdrop, - Box, - CircularProgress, - IconButton, - Tab, - Tabs, - Typography, -} from "@mui/material"; -import { - Dispatch, - MutableRefObject, - SetStateAction, - createContext, - useContext, - useEffect, - useRef, - useState, -} from "react"; - +import { ensure } from "@/utils/ensure"; import { CenteredFlex, HorizontalFlex, @@ -32,6 +14,15 @@ import CropIcon from "@mui/icons-material/Crop"; import CropOriginalIcon from "@mui/icons-material/CropOriginal"; import DownloadIcon from "@mui/icons-material/Download"; import MenuIcon from "@mui/icons-material/Menu"; +import { + Backdrop, + Box, + CircularProgress, + IconButton, + Tab, + Tabs, + Typography, +} from "@mui/material"; import { EnteDrawer } from "components/EnteDrawer"; import { EnteMenuItem } from "components/Menu/EnteMenuItem"; import MenuItemDivider from "components/Menu/MenuItemDivider"; @@ -39,10 +30,18 @@ import { MenuItemGroup } from "components/Menu/MenuItemGroup"; import MenuSectionTitle from "components/Menu/MenuSectionTitle"; import { CORNER_THRESHOLD, FILTER_DEFAULT_VALUES } from "constants/photoEditor"; import { t } from "i18next"; -import mime from "mime-types"; import { AppContext } from "pages/_app"; +import { + Dispatch, + MutableRefObject, + SetStateAction, + createContext, + useContext, + useEffect, + useRef, + useState, +} from "react"; import { getLocalCollections } from "services/collectionService"; -import { detectFileTypeInfo } from "services/detect-type"; import downloadManager from "services/download"; import uploadManager from "services/upload/uploadManager"; import { EnteFile } from "types/file"; @@ -72,13 +71,6 @@ export const ImageEditorOverlayContext = createContext( type OperationTab = "crop" | "transform" | "colours"; -const getEditedFileName = (fileName: string) => { - const fileNameParts = fileName.split("."); - const extension = fileNameParts.pop(); - const editedFileName = `${fileNameParts.join(".")}-edited.${extension}`; - return editedFileName; -}; - export interface CropBoxProps { x: number; y: number; @@ -94,6 +86,10 @@ const ImageEditorOverlay = (props: IProps) => { const parentRef = useRef(null); const [fileURL, setFileURL] = useState(""); + // The MIME type of the original file that we are editing. + // + // It _should_ generally be present, but it is not guaranteed to be. + const [mimeType, setMIMEType] = useState(); const [currentRotationAngle, setCurrentRotationAngle] = useState(0); @@ -372,6 +368,10 @@ const ImageEditorOverlay = (props: IProps) => { ); img.src = srcURLs.url as string; setFileURL(srcURLs.url as string); + // We're casting the srcURLs.url to string above, i.e. this code + // is not meant to run for the live photos scenario. For images, + // we usually will have the mime type. + setMIMEType(srcURLs.mimeType); } else { img.src = fileURL; } @@ -430,37 +430,6 @@ const ImageEditorOverlay = (props: IProps) => { loadCanvas(); }, [props.show, props.file]); - const exportCanvasToBlob = (): Promise => { - try { - const canvas = originalSizeCanvasRef.current; - if (!canvas) return; - - const mimeType = mime.lookup(props.file.metadata.title); - - const image = new Image(); - image.src = canvas.toDataURL(); - - const context = canvas.getContext("2d"); - if (!context) return; - return new Promise((resolve) => { - canvas.toBlob(resolve, mimeType); - }); - } catch (e) { - log.error("Error exporting canvas to blob", e); - throw e; - } - }; - - const getEditedFile = async () => { - const blob = await exportCanvasToBlob(); - if (!blob) { - throw Error("no blob"); - } - const editedFileName = getEditedFileName(props.file.metadata.title); - const editedFile = new File([blob], editedFileName); - return editedFile; - }; - const handleClose = () => { setFileURL(null); props.onClose(); @@ -480,25 +449,23 @@ const ImageEditorOverlay = (props: IProps) => { return <>; } - const downloadEditedPhoto = async () => { - try { - if (!canvasRef.current) return; + const getEditedFile = async () => { + const originalSizeCanvas = ensure(originalSizeCanvasRef.current); + const originalFileName = props.file.metadata.title; + return canvasToFile(originalSizeCanvas, originalFileName, mimeType); + }; - const editedFile = await getEditedFile(); - const fileType = await detectFileTypeInfo(editedFile); - const tempImgURL = URL.createObjectURL( - new Blob([editedFile], { type: fileType.mimeType }), - ); - downloadUsingAnchor(tempImgURL, editedFile.name); - } catch (e) { - log.error("Error downloading edited photo", e); - } + const downloadEditedPhoto = async () => { + if (!canvasRef.current) return; + + const f = await getEditedFile(); + // Revokes the URL after downloading. + downloadUsingAnchor(URL.createObjectURL(f), f.name); }; const saveCopyToEnte = async () => { + if (!canvasRef.current) return; try { - if (!canvasRef.current) return; - const collections = await getLocalCollections(); const collection = collections.find( @@ -678,7 +645,7 @@ const ImageEditorOverlay = (props: IProps) => { setCurrentTab(value); }} > - + { }; export default ImageEditorOverlay; + +/** + * Create a new {@link File} with the contents of the given canvas. + * + * @param canvas A {@link HTMLCanvasElement} whose contents we want to download + * as a file. + * + * @param originalFileName The name of the original file which was used to seed + * the canvas. This will be used as a base name for the generated file (with an + * "-edited" suffix). + * + * @param originalMIMEType The MIME type of the original file which was used to + * seed the canvas. When possible, we try to download a file in the same format, + * but this is not guaranteed and depends on browser support. If the original + * MIME type can not be preserved, a PNG file will be downloaded. + */ +const canvasToFile = async ( + canvas: HTMLCanvasElement, + originalFileName: string, + originalMIMEType?: string, +): Promise => { + const image = new Image(); + image.src = canvas.toDataURL(); + + // Browsers are required to support "image/png". They may also support + // "image/jpeg" and "image/webp". Potentially they may even support more + // formats, but to keep this scoped we limit to these three. + let [mimeType, extension] = ["image/png", "png"]; + switch (originalMIMEType) { + case "image/jpeg": + mimeType = originalMIMEType; + extension = "jpeg"; + break; + case "image/webp": + mimeType = originalMIMEType; + extension = "webp"; + break; + default: + break; + } + + const blob = ensure( + await new Promise((resolve) => canvas.toBlob(resolve, mimeType)), + ); + + const [originalName] = nameAndExtension(originalFileName); + const fileName = `${originalName}-edited.${extension}`; + + log.debug(() => ({ a: "canvas => file", blob, type: blob.type, mimeType })); + + return new File([blob], fileName); +}; diff --git a/web/apps/photos/src/components/PhotoViewer/index.tsx b/web/apps/photos/src/components/PhotoViewer/index.tsx index 8e6debf68..c7383efb1 100644 --- a/web/apps/photos/src/components/PhotoViewer/index.tsx +++ b/web/apps/photos/src/components/PhotoViewer/index.tsx @@ -11,11 +11,11 @@ import { copyFileToClipboard, downloadSingleFile, getFileFromURL, - isRawFile, isSupportedRawFormat, } from "utils/file"; import { FILE_TYPE } from "@/media/file-type"; +import { isNonWebImageFileExtension } from "@/media/formats"; import { lowercaseExtension } from "@/next/file"; import { FlexWrapper } from "@ente/shared/components/Container"; import EnteSpinner from "@ente/shared/components/EnteSpinner"; @@ -350,7 +350,8 @@ function PhotoViewer(props: Iprops) { function updateShowEditButton(file: EnteFile) { const extension = lowercaseExtension(file.metadata.title); const isSupported = - !isRawFile(extension) || isSupportedRawFormat(extension); + !isNonWebImageFileExtension(extension) || + isSupportedRawFormat(extension); setShowEditButton( file.metadata.fileType === FILE_TYPE.IMAGE && isSupported, ); diff --git a/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx b/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx index 40de098f5..00b8979d5 100644 --- a/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx +++ b/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx @@ -1,5 +1,4 @@ -import { Paper } from "@mui/material"; -import { styled } from "@mui/material/styles"; +import { Paper, styled } from "@mui/material"; export const LivePhotoBtnContainer = styled(Paper)` border-radius: 4px; diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx index 6ebc0d942..b9b7ea88d 100644 --- a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx @@ -5,7 +5,7 @@ import { t } from "i18next"; import { AppContext } from "pages/_app"; import { useContext } from "react"; import { components } from "react-select"; -import { IndexStatus } from "types/machineLearning/ui"; +import { IndexStatus } from "services/ml/db"; import { Suggestion, SuggestionType } from "types/search"; const { Menu } = components; diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx index 3f737b3e0..da462a3b5 100644 --- a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx +++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx @@ -10,6 +10,7 @@ import { components } from "react-select"; import AsyncSelect from "react-select/async"; import { InputActionMeta } from "react-select/src/types"; import { City } from "services/locationSearchService"; +import { Person } from "services/ml/types"; import { getAutoCompleteSuggestions, getDefaultOptions, @@ -17,7 +18,6 @@ import { import { Collection } from "types/collection"; import { LocationTagData } from "types/entity"; import { EnteFile } from "types/file"; -import { Person } from "types/machineLearning"; import { ClipSearchScores, DateValue, diff --git a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx index a9474a37d..bdc0d5a84 100644 --- a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx +++ b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx @@ -19,6 +19,8 @@ export const localeName = (locale: SupportedLocale) => { return "English"; case "fr-FR": return "Français"; + case "de-DE": + return "Deutsch"; case "zh-CN": return "中文"; case "nl-NL": diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx index 4b0ce31b0..8975941ad 100644 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx +++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx @@ -1,7 +1,7 @@ import { SpaceBetweenFlex } from "@ente/shared/components/Container"; import { Box, Typography } from "@mui/material"; import { t } from "i18next"; -import { makeHumanReadableStorage } from "utils/billing"; +import { formattedStorageByteSize } from "utils/units"; import { Progressbar } from "../../styledComponents"; @@ -19,7 +19,7 @@ export function IndividualUsageSection({ usage, storage, fileCount }: Iprops) { marginTop: 1.5, }} > - {`${makeHumanReadableStorage( + {`${formattedStorageByteSize( storage - usage, )} ${t("FREE")}`} diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx index 6143044f0..7f2712f73 100644 --- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx +++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx @@ -1,6 +1,6 @@ import { Box, styled, Typography } from "@mui/material"; import { t } from "i18next"; -import { convertBytesToGBs, makeHumanReadableStorage } from "utils/billing"; +import { bytesInGB, formattedStorageByteSize } from "utils/units"; const MobileSmallBox = styled(Box)` display: none; @@ -30,9 +30,9 @@ export default function StorageSection({ usage, storage }: Iprops) { fontWeight={"bold"} sx={{ fontSize: "24px", lineHeight: "30px" }} > - {`${makeHumanReadableStorage(usage, { roundUp: true })} ${t( + {`${formattedStorageByteSize(usage, { round: true })} ${t( "OF", - )} ${makeHumanReadableStorage(storage)} ${t("USED")}`} + )} ${formattedStorageByteSize(storage)} ${t("USED")}`} @@ -40,9 +40,7 @@ export default function StorageSection({ usage, storage }: Iprops) { fontWeight={"bold"} sx={{ fontSize: "24px", lineHeight: "30px" }} > - {`${convertBytesToGBs(usage)} / ${convertBytesToGBs( - storage, - )} ${t("GB")} ${t("USED")}`} + {`${bytesInGB(usage)} / ${bytesInGB(storage)} ${t("storage_unit.gb")} ${t("USED")}`} diff --git a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx index 6b4a6f43d..32f61d976 100644 --- a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx +++ b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx @@ -9,7 +9,7 @@ import { t } from "i18next"; import { useRouter } from "next/router"; import { AppContext } from "pages/_app"; import { useContext, useState } from "react"; -// import mlIDbStorage from 'utils/storage/mlIDbStorage'; +// import mlIDbStorage from 'services/ml/db'; import { configurePasskeyRecovery, isPasskeyRecoveryEnabled, diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx index 717430655..bea54c645 100644 --- a/web/apps/photos/src/components/Upload/Uploader.tsx +++ b/web/apps/photos/src/components/Upload/Uploader.tsx @@ -1,6 +1,8 @@ import { basename } from "@/next/file"; import log from "@/next/log"; import type { CollectionMapping, Electron, ZipItem } from "@/next/types/ipc"; +import { firstNonEmpty } from "@/utils/array"; +import { ensure } from "@/utils/ensure"; import { CustomError } from "@ente/shared/error"; import { isPromise } from "@ente/shared/utils"; import DiscFullIcon from "@mui/icons-material/DiscFull"; @@ -324,17 +326,17 @@ export default function Uploader({ // Trigger an upload when any of the dependencies change. useEffect(() => { - // Re the paths: + // About the paths: // // - These are not necessarily the full paths. In particular, when // running on the browser they'll be the relative paths (at best) or // just the file-name otherwise. // // - All the paths use POSIX separators. See inline comments. + // const allItemAndPaths = [ - // See: [Note: webkitRelativePath]. In particular, they use POSIX - // separators. - webFiles.map((f) => [f, f.webkitRelativePath ?? f.name]), + // Relative path (using POSIX separators) or the file's name. + webFiles.map((f) => [f, pathLikeForWebFile(f)]), // The paths we get from the desktop app all eventually come either // from electron.selectDirectory or electron.pathForFile, both of // which return POSIX paths. @@ -822,6 +824,37 @@ const desktopFilesAndZipItems = async (electron: Electron, files: File[]) => { return { fileAndPaths, zipItems }; }; +/** + * Return the relative path or name of a File object selected or + * drag-and-dropped on the web. + * + * There are three cases here: + * + * 1. If the user selects individual file(s), then the returned File objects + * will only have a `name`. + * + * 2. If the user selects directory(ies), then the returned File objects will + * have a `webkitRelativePath`. For more details, see [Note: + * webkitRelativePath]. In particular, these will POSIX separators. + * + * 3. If the user drags-and-drops, then the react-dropzone library that we use + * will internally convert `webkitRelativePath` to `path`, but otherwise it + * behaves same as case 2. + * https://github.com/react-dropzone/file-selector/blob/master/src/file.ts#L1214 + */ +const pathLikeForWebFile = (file: File): string => + ensure( + firstNonEmpty([ + // We need to check first, since path is not a property of + // the standard File objects. + "path" in file && typeof file.path == "string" + ? file.path + : undefined, + file.webkitRelativePath, + file.name, + ]), + ); + // This is used to prompt the user the make upload strategy choice interface ImportSuggestion { rootFolderName: string; diff --git a/web/apps/photos/src/components/UploadSelectorInputs.tsx b/web/apps/photos/src/components/UploadSelectorInputs.tsx index 13e33fc6d..e22e2f541 100644 --- a/web/apps/photos/src/components/UploadSelectorInputs.tsx +++ b/web/apps/photos/src/components/UploadSelectorInputs.tsx @@ -1,9 +1,24 @@ -export default function UploadSelectorInputs({ +type GetInputProps = () => React.HTMLAttributes; + +interface UploadSelectorInputsProps { + getDragAndDropInputProps: GetInputProps; + getFileSelectorInputProps: GetInputProps; + getFolderSelectorInputProps: GetInputProps; + getZipFileSelectorInputProps?: GetInputProps; +} + +/** + * Create a bunch of HTML inputs elements, one each for the given props. + * + * These hidden input element serve as the way for us to show various file / + * folder Selector dialogs and handle drag and drop inputs. + */ +export const UploadSelectorInputs: React.FC = ({ getDragAndDropInputProps, getFileSelectorInputProps, getFolderSelectorInputProps, getZipFileSelectorInputProps, -}) { +}) => { return ( <> @@ -14,4 +29,4 @@ export default function UploadSelectorInputs({ )} ); -} +}; diff --git a/web/apps/photos/src/components/WatchFolder.tsx b/web/apps/photos/src/components/WatchFolder.tsx index 710a54168..4d2144e0c 100644 --- a/web/apps/photos/src/components/WatchFolder.tsx +++ b/web/apps/photos/src/components/WatchFolder.tsx @@ -25,8 +25,8 @@ import { Stack, Tooltip, Typography, + styled, } from "@mui/material"; -import { styled } from "@mui/material/styles"; import { CollectionMappingChoiceModal } from "components/Upload/CollectionMappingChoiceModal"; import { t } from "i18next"; import { AppContext } from "pages/_app"; diff --git a/web/apps/photos/src/components/ml/MLSearchSettings.tsx b/web/apps/photos/src/components/ml/MLSearchSettings.tsx index 9b50c2d6a..409df4fc6 100644 --- a/web/apps/photos/src/components/ml/MLSearchSettings.tsx +++ b/web/apps/photos/src/components/ml/MLSearchSettings.tsx @@ -22,7 +22,7 @@ import { getFaceSearchEnabledStatus, updateFaceSearchEnabledStatus, } from "services/userService"; -import { isInternalUser } from "utils/user"; +import { isInternalUserForML } from "utils/user"; export const MLSearchSettings = ({ open, onClose, onRootClose }) => { const { @@ -280,7 +280,7 @@ function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) {

- {isInternalUser() && ( + {isInternalUserForML() && (