diff --git a/.github/workflows/auth-crowdin.yml b/.github/workflows/auth-crowdin.yml
index 811def9396c6db575a0a6b33c18ea73566064aca..bd92f145910c0df5fabfa19bfac1ceb2e9dd0e6b 100644
--- a/.github/workflows/auth-crowdin.yml
+++ b/.github/workflows/auth-crowdin.yml
@@ -30,7 +30,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
- localization_branch_name: crowdin-translations-auth
+ localization_branch_name: translations/auth
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[auth] New translations"
diff --git a/.github/workflows/auth-lint.yml b/.github/workflows/auth-lint.yml
index 6504e0646ae3883b7de0807e8b785f8c31e51262..e7c42e1a6bc22a8c737f3e98aade76e323d16511 100644
--- a/.github/workflows/auth-lint.yml
+++ b/.github/workflows/auth-lint.yml
@@ -3,7 +3,7 @@ name: "Lint (auth)"
on:
# Run on every push to a branch other than main that changes auth/
push:
- branches-ignore: [main, "deploy/**"]
+ branches-ignore: [main]
paths:
- "auth/**"
- ".github/workflows/auth-lint.yml"
diff --git a/.github/workflows/desktop-lint.yml b/.github/workflows/desktop-lint.yml
index 0b8263f3d3613340318ffe229586115dcc1d101a..d1cfda884dae83561fe8c409f675e1213abd971b 100644
--- a/.github/workflows/desktop-lint.yml
+++ b/.github/workflows/desktop-lint.yml
@@ -3,7 +3,7 @@ name: "Lint (desktop)"
on:
# Run on every push to a branch other than main that changes desktop/
push:
- branches-ignore: [main, "deploy/**"]
+ branches-ignore: [main]
paths:
- "desktop/**"
- ".github/workflows/desktop-lint.yml"
diff --git a/.github/workflows/docs-deploy.yml b/.github/workflows/docs-deploy.yml
index 01b0c2254ac692d3219e61352ada03b4ab59d8d3..b824fe5c32a0b1ef82e9fbfaea1c14e435ebe22e 100644
--- a/.github/workflows/docs-deploy.yml
+++ b/.github/workflows/docs-deploy.yml
@@ -37,11 +37,8 @@ jobs:
run: yarn build
- name: Publish
- uses: cloudflare/pages-action@1
+ uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: help
- directory: docs/docs/.vitepress/dist
- wranglerVersion: "3"
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=help docs/docs/.vitepress/dist
diff --git a/.github/workflows/docs-verify-build.yml b/.github/workflows/docs-verify-build.yml
index a57f71c8687cae39f91ae72e1ac0eb10260185ba..addb52a059de0c9d728d012eb1fe0fe20a6d02b5 100644
--- a/.github/workflows/docs-verify-build.yml
+++ b/.github/workflows/docs-verify-build.yml
@@ -6,7 +6,7 @@ name: "Verify build (docs)"
on:
# Run on every push to a branch other than main that changes docs/
push:
- branches-ignore: [main, "deploy/**"]
+ branches-ignore: [main]
paths:
- "docs/**"
- ".github/workflows/docs-verify-build.yml"
diff --git a/.github/workflows/mobile-crowdin.yml b/.github/workflows/mobile-crowdin.yml
index 5c52b59ad12ac1480440ab10ac1045d74cef2025..556ac45f24ea8b69944f58c321f817a531a64002 100644
--- a/.github/workflows/mobile-crowdin.yml
+++ b/.github/workflows/mobile-crowdin.yml
@@ -30,7 +30,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
- localization_branch_name: crowdin-translations-mobile
+ localization_branch_name: translations/mobile
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[mobile] New translations"
diff --git a/.github/workflows/mobile-internal-release.yml b/.github/workflows/mobile-internal-release.yml
index 4ee7367424ddad30fab21c7d642d922f9771dc82..4b7d537ef281ac432d7c5ebefbca8986bafb478b 100644
--- a/.github/workflows/mobile-internal-release.yml
+++ b/.github/workflows/mobile-internal-release.yml
@@ -1,4 +1,4 @@
-name: "Internal Release - Photos"
+name: "Internal release (photos)"
on:
workflow_dispatch: # Allow manually running the action
diff --git a/.github/workflows/mobile-lint.yml b/.github/workflows/mobile-lint.yml
index 57b2ca4dbd16d137e583d95ece5b69828dcb14c1..493185b6bd28976276776b26094e6105491c3b5e 100644
--- a/.github/workflows/mobile-lint.yml
+++ b/.github/workflows/mobile-lint.yml
@@ -3,7 +3,7 @@ name: "Lint (mobile)"
on:
# Run on every push to a branch other than main that changes mobile/
push:
- branches-ignore: [main, f-droid, "deploy/**"]
+ branches-ignore: [main, f-droid]
paths:
- "mobile/**"
- ".github/workflows/mobile-lint.yml"
diff --git a/.github/workflows/server-lint.yml b/.github/workflows/server-lint.yml
index d25f2adcc860f2b72e8759c4f8eb914ab8e53aab..c051d029011697405346b18aef71cd21332c6e7c 100644
--- a/.github/workflows/server-lint.yml
+++ b/.github/workflows/server-lint.yml
@@ -3,7 +3,7 @@ name: "Lint (server)"
on:
# Run on every push to a branch other than main that changes server/
push:
- branches-ignore: [main, "deploy/**"]
+ branches-ignore: [main]
paths:
- "server/**"
- ".github/workflows/server-lint.yml"
diff --git a/.github/workflows/server-publish.yml b/.github/workflows/server-publish.yml
index 1ba1935171d5f3c5dba700c4e6669b34227e8507..b5aabbb8a2dfc4037b373df1cdfecbb1574f184c 100644
--- a/.github/workflows/server-publish.yml
+++ b/.github/workflows/server-publish.yml
@@ -38,3 +38,8 @@ jobs:
tags: ${{ inputs.commit }}, latest
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Tag as server/ghcr
+ run: |
+ git tag -f server/ghcr
+ git push -f origin server/ghcr
diff --git a/.github/workflows/web-crowdin-push.yml b/.github/workflows/web-crowdin-push.yml
new file mode 100644
index 0000000000000000000000000000000000000000..1d525dfe0e3e0355bd68a72f2716c105f11f84f0
--- /dev/null
+++ b/.github/workflows/web-crowdin-push.yml
@@ -0,0 +1,34 @@
+name: "Push Crowdin translations (web)"
+
+# This is a variant of web-crowdin.yml that uploads the translated strings in
+# addition to the source strings.
+#
+# This allows us to change the strings in our source code for an automated
+# refactoring (e.g. renaming a key), and then run this workflow to update the
+# data in Crowdin taking our source code as the source of truth.
+
+on:
+ # Trigger manually, or using
+ # `gh workflow run web-crowdin-push.yml --ref `
+ workflow_dispatch:
+
+jobs:
+ push-to-crowdin:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Crowdin push
+ uses: crowdin/github-action@v1
+ with:
+ base_path: "web/"
+ config: "web/crowdin.yml"
+ upload_sources: true
+ upload_translations: true
+ download_translations: false
+ project_id: 569613
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
diff --git a/.github/workflows/web-crowdin.yml b/.github/workflows/web-crowdin.yml
index d986850653095207a45eccdd906dbfddee0db433..b20b19ce3f92f3e124ec12fa3295e6ee1c13f8a1 100644
--- a/.github/workflows/web-crowdin.yml
+++ b/.github/workflows/web-crowdin.yml
@@ -36,7 +36,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
- localization_branch_name: crowdin-translations-web
+ localization_branch_name: translations/web
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[web] New translations"
diff --git a/.github/workflows/web-deploy-accounts.yml b/.github/workflows/web-deploy-accounts.yml
deleted file mode 100644
index 61411cac6f6abf1f09399dcf4a1cf25de8bf9538..0000000000000000000000000000000000000000
--- a/.github/workflows/web-deploy-accounts.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: "Deploy (accounts)"
-
-on:
- push:
- # Run workflow on pushes to the deploy/accounts
- branches: [deploy/accounts]
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
-
- defaults:
- run:
- working-directory: web
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- submodules: recursive
-
- - name: Setup node and enable yarn caching
- uses: actions/setup-node@v4
- with:
- node-version: 20
- cache: "yarn"
- cache-dependency-path: "web/yarn.lock"
-
- - name: Install dependencies
- run: yarn install
-
- - name: Build accounts
- run: yarn build:accounts
-
- - name: Publish accounts
- uses: cloudflare/pages-action@1
- with:
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: deploy/accounts
- directory: web/apps/accounts/out
- wranglerVersion: "3"
diff --git a/.github/workflows/web-deploy-auth.yml b/.github/workflows/web-deploy-auth.yml
deleted file mode 100644
index d195b62f8cfe427b7acd35a6447947dba0b680a4..0000000000000000000000000000000000000000
--- a/.github/workflows/web-deploy-auth.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: "Deploy (auth)"
-
-on:
- push:
- # Run workflow on pushes to the deploy/auth
- branches: [deploy/auth]
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
-
- defaults:
- run:
- working-directory: web
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- submodules: recursive
-
- - name: Setup node and enable yarn caching
- uses: actions/setup-node@v4
- with:
- node-version: 20
- cache: "yarn"
- cache-dependency-path: "web/yarn.lock"
-
- - name: Install dependencies
- run: yarn install
-
- - name: Build auth
- run: yarn build:auth
-
- - name: Publish auth
- uses: cloudflare/pages-action@1
- with:
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: deploy/auth
- directory: web/apps/auth/out
- wranglerVersion: "3"
diff --git a/.github/workflows/web-deploy-cast.yml b/.github/workflows/web-deploy-cast.yml
deleted file mode 100644
index c5bbca9542c91ff50bd6460c90f86391b03f2816..0000000000000000000000000000000000000000
--- a/.github/workflows/web-deploy-cast.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: "Deploy (cast)"
-
-on:
- push:
- # Run workflow on pushes to the deploy/cast
- branches: [deploy/cast]
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
-
- defaults:
- run:
- working-directory: web
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- submodules: recursive
-
- - name: Setup node and enable yarn caching
- uses: actions/setup-node@v4
- with:
- node-version: 20
- cache: "yarn"
- cache-dependency-path: "web/yarn.lock"
-
- - name: Install dependencies
- run: yarn install
-
- - name: Build cast
- run: yarn build:cast
-
- - name: Publish cast
- uses: cloudflare/pages-action@1
- with:
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: deploy/cast
- directory: web/apps/cast/out
- wranglerVersion: "3"
diff --git a/.github/workflows/web-deploy-one.yml b/.github/workflows/web-deploy-one.yml
new file mode 100644
index 0000000000000000000000000000000000000000..77c338513d8fb0e441bd2e02000845e2670bfe89
--- /dev/null
+++ b/.github/workflows/web-deploy-one.yml
@@ -0,0 +1,61 @@
+name: "Deploy one (web)"
+
+on:
+ workflow_dispatch:
+ inputs:
+ app:
+ description: "App to build and deploy"
+ type: choice
+ required: true
+ default: "photos"
+ options:
+ - "accounts"
+ - "auth"
+ - "cast"
+ - "payments"
+ - "photos"
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+
+ defaults:
+ run:
+ working-directory: web
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+
+ - name: Setup node and enable yarn caching
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ cache: "yarn"
+ cache-dependency-path: "web/yarn.lock"
+
+ - name: Install dependencies
+ run: yarn install
+
+ - name: Build ${{ inputs.app }}
+ run: yarn build:${{ inputs.app }}
+
+ - name: Publish ${{ inputs.app }} to preview
+ uses: cloudflare/wrangler-action@v3
+ with:
+ accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+ apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+ # [Note: Wrangler commit-dirty]
+ #
+ # Without the --commit-dirty flag, running the wrangler-action
+ # always prints a warning when used:
+ #
+ # Warning: Your working directory is a git repo and has uncommitted changes
+ # To silence this warning, pass in --commit-dirty=true
+ #
+ # There is no clear documentation of if passing this is
+ # harmless, but all indications and in-practice tests seem to
+ # indicate so.
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/${{ inputs.app }} web/apps/${{ inputs.app }}/out
diff --git a/.github/workflows/web-deploy-payments.yml b/.github/workflows/web-deploy-payments.yml
deleted file mode 100644
index 367e1db186271183b29f51404a6ecd9b931b56ab..0000000000000000000000000000000000000000
--- a/.github/workflows/web-deploy-payments.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: "Deploy (payments)"
-
-on:
- push:
- # Run workflow on pushes to the deploy/payments
- branches: [deploy/payments]
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
-
- defaults:
- run:
- working-directory: web
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- submodules: recursive
-
- - name: Setup node and enable yarn caching
- uses: actions/setup-node@v4
- with:
- node-version: 20
- cache: "yarn"
- cache-dependency-path: "web/yarn.lock"
-
- - name: Install dependencies
- run: yarn install
-
- - name: Build payments
- run: yarn build:payments
-
- - name: Publish payments
- uses: cloudflare/pages-action@1
- with:
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: deploy/payments
- directory: web/apps/payments/dist
- wranglerVersion: "3"
diff --git a/.github/workflows/web-deploy-photos.yml b/.github/workflows/web-deploy-photos.yml
deleted file mode 100644
index cb3a9db86d4ba1d9445d6d0183e11f4d0ceec5cb..0000000000000000000000000000000000000000
--- a/.github/workflows/web-deploy-photos.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: "Deploy (photos)"
-
-on:
- push:
- # Run workflow on pushes to the deploy/photos
- branches: [deploy/photos]
-
-jobs:
- deploy:
- runs-on: ubuntu-latest
-
- defaults:
- run:
- working-directory: web
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- submodules: recursive
-
- - name: Setup node and enable yarn caching
- uses: actions/setup-node@v4
- with:
- node-version: 20
- cache: "yarn"
- cache-dependency-path: "web/yarn.lock"
-
- - name: Install dependencies
- run: yarn install
-
- - name: Build photos
- run: yarn build:photos
-
- - name: Publish photos
- uses: cloudflare/pages-action@1
- with:
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: deploy/photos
- directory: web/apps/photos/out
- wranglerVersion: "3"
diff --git a/.github/workflows/web-preview.yml b/.github/workflows/web-deploy-preview.yml
similarity index 84%
rename from .github/workflows/web-preview.yml
rename to .github/workflows/web-deploy-preview.yml
index 8f39c02474f1cc272201f2baa51dd9e25673abfb..4bb1870726998accc66d6d0dcaadedf0544223c8 100644
--- a/.github/workflows/web-preview.yml
+++ b/.github/workflows/web-deploy-preview.yml
@@ -1,4 +1,4 @@
-name: "Preview (web)"
+name: "Deploy preview (web)"
on:
workflow_dispatch:
@@ -43,11 +43,8 @@ jobs:
run: yarn build:${{ inputs.app }}
- name: Publish ${{ inputs.app }} to preview
- uses: cloudflare/pages-action@1
+ uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: preview
- directory: web/apps/${{ inputs.app }}/out
- wranglerVersion: "3"
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=preview web/apps/${{ inputs.app }}/out
diff --git a/.github/workflows/web-deploy-staff.yml b/.github/workflows/web-deploy-staff.yml
index 4d386344df0073b82b683c9c3de36e71abb7e088..854e163644d1442b9ebbfbe8c552a80e2d86cee3 100644
--- a/.github/workflows/web-deploy-staff.yml
+++ b/.github/workflows/web-deploy-staff.yml
@@ -38,11 +38,8 @@ jobs:
run: yarn build:staff
- name: Publish staff
- uses: cloudflare/pages-action@1
+ uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: deploy/staff
- directory: web/apps/staff/dist
- wranglerVersion: "3"
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/staff web/apps/staff/dist
diff --git a/.github/workflows/web-deploy-staging.yml b/.github/workflows/web-deploy-staging.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ca3a6142b2c70d97494ba3fab1d1e79d1685f549
--- /dev/null
+++ b/.github/workflows/web-deploy-staging.yml
@@ -0,0 +1,86 @@
+name: "Deploy staging (web)"
+
+on:
+ schedule:
+ # Run everyday at ~3:00 PM IST
+ #
+ # See: [Note: Run workflow every 24 hours]
+ - cron: "25 9 * * *"
+ # Also allow manually running the workflow
+ workflow_dispatch:
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+
+ defaults:
+ run:
+ working-directory: web
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ submodules: recursive
+
+ - name: Setup node and enable yarn caching
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+ cache: "yarn"
+ cache-dependency-path: "web/yarn.lock"
+
+ - name: Install dependencies
+ run: yarn install
+
+ - name: Build photos
+ run: yarn build:photos
+ env:
+ NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh
+
+ - name: Publish photos
+ uses: cloudflare/wrangler-action@v3
+ with:
+ accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+ apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=n-photos web/apps/photos/out
+
+ - name: Build accounts
+ run: yarn build:accounts
+
+ - name: Publish accounts
+ uses: cloudflare/wrangler-action@v3
+ with:
+ accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+ apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=n-accounts web/apps/accounts/out
+
+ - name: Build auth
+ run: yarn build:auth
+
+ - name: Publish auth
+ uses: cloudflare/wrangler-action@v3
+ with:
+ accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+ apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=n-auth web/apps/auth/out
+
+ - name: Build cast
+ run: yarn build:cast
+
+ - name: Publish cast
+ uses: cloudflare/wrangler-action@v3
+ with:
+ accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+ apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=n-cast web/apps/cast/out
+
+ - name: Build payments
+ run: yarn build:payments
+
+ - name: Publish payments
+ uses: cloudflare/wrangler-action@v3
+ with:
+ accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+ apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=n-payments web/apps/payments/dist
diff --git a/.github/workflows/web-nightly.yml b/.github/workflows/web-deploy.yml
similarity index 60%
rename from .github/workflows/web-nightly.yml
rename to .github/workflows/web-deploy.yml
index 9497382924e86b3f65d449b3eb7a6064088ae944..6f6a113f24e5298ffc868172fe3611482bbfe2ae 100644
--- a/.github/workflows/web-nightly.yml
+++ b/.github/workflows/web-deploy.yml
@@ -1,17 +1,21 @@
-name: "Nightly (web)"
+name: "Deploy (web)"
on:
schedule:
# [Note: Run workflow every 24 hours]
#
- # Run every 24 hours - First field is minute, second is hour of the day
- # This runs 23:15 UTC everyday - 1 and 15 are just arbitrary offset to
- # avoid scheduling it on the exact hour, as suggested by GitHub.
+ # Run everyday at ~8:00 AM IST (except Sundays).
+ #
+ # First field is minute, second is hour of the day. Last is day of week,
+ # 0 being Sunday.
+ #
+ # Add a few minutes of offset to avoid scheduling on exact hourly
+ # boundaries (recommended by GitHub to avoid congestion).
#
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule
# https://crontab.guru/
#
- - cron: "15 23 * * *"
+ - cron: "25 2 * * 1-6"
# Also allow manually running the workflow
workflow_dispatch:
@@ -39,69 +43,52 @@ jobs:
- name: Install dependencies
run: yarn install
+ - name: Build photos
+ run: yarn build:photos
+
+ - name: Publish photos
+ uses: cloudflare/wrangler-action@v3
+ with:
+ accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
+ apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/photos web/apps/photos/out
+
- name: Build accounts
run: yarn build:accounts
- name: Publish accounts
- uses: cloudflare/pages-action@1
+ uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: n-accounts
- directory: web/apps/accounts/out
- wranglerVersion: "3"
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/accounts web/apps/accounts/out
- name: Build auth
run: yarn build:auth
- name: Publish auth
- uses: cloudflare/pages-action@1
+ uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: n-auth
- directory: web/apps/auth/out
- wranglerVersion: "3"
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/auth web/apps/auth/out
- name: Build cast
run: yarn build:cast
- name: Publish cast
- uses: cloudflare/pages-action@1
+ uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: n-cast
- directory: web/apps/cast/out
- wranglerVersion: "3"
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/cast web/apps/cast/out
- name: Build payments
run: yarn build:payments
- name: Publish payments
- uses: cloudflare/pages-action@1
- with:
- accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
- apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: n-payments
- directory: web/apps/payments/dist
- wranglerVersion: "3"
-
- - name: Build photos
- run: yarn build:photos
- env:
- NEXT_PUBLIC_ENTE_ALBUMS_ENDPOINT: https://albums.ente.sh
-
- - name: Publish photos
- uses: cloudflare/pages-action@1
+ uses: cloudflare/wrangler-action@v3
with:
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
- projectName: ente
- branch: n-photos
- directory: web/apps/photos/out
- wranglerVersion: "3"
+ command: pages deploy --project-name=ente --commit-dirty=true --branch=deploy/payments web/apps/payments/dist
diff --git a/.github/workflows/web-lint.yml b/.github/workflows/web-lint.yml
index 0dc11aa0e7785b09a73e05d69e1f3938cd629919..7f5d270029ee6a79054ab11e37fb5cce9a0de412 100644
--- a/.github/workflows/web-lint.yml
+++ b/.github/workflows/web-lint.yml
@@ -3,7 +3,7 @@ name: "Lint (web)"
on:
# Run on every push to a branch other than main that changes web/
push:
- branches-ignore: [main, "deploy/**"]
+ branches-ignore: [main]
paths:
- "web/**"
- ".github/workflows/web-lint.yml"
diff --git a/auth/assets/simple-icons b/auth/assets/simple-icons
index 8e7701d6a40462733043f54b3849faf35af70a83..8a3731352af133a02223a6c7b1f37c4abb096af0 160000
--- a/auth/assets/simple-icons
+++ b/auth/assets/simple-icons
@@ -1 +1 @@
-Subproject commit 8e7701d6a40462733043f54b3849faf35af70a83
+Subproject commit 8a3731352af133a02223a6c7b1f37c4abb096af0
diff --git a/auth/ios/Podfile.lock b/auth/ios/Podfile.lock
index 7d02d123b27e59861ba2858613976f4963f3768e..991f52b42acb259b498dfc9d518aab5eb65bb1f3 100644
--- a/auth/ios/Podfile.lock
+++ b/auth/ios/Podfile.lock
@@ -87,7 +87,7 @@ PODS:
- SDWebImage/Core (5.19.0)
- Sentry/HybridSDK (8.21.0):
- SentryPrivate (= 8.21.0)
- - sentry_flutter (0.0.1):
+ - sentry_flutter (7.19.0):
- Flutter
- FlutterMacOS
- Sentry/HybridSDK (= 8.21.0)
@@ -249,7 +249,7 @@ SPEC CHECKSUMS:
ReachabilitySwift: 5ae15e16814b5f9ef568963fb2c87aeb49158c66
SDWebImage: 981fd7e860af070920f249fd092420006014c3eb
Sentry: ebc12276bd17613a114ab359074096b6b3725203
- sentry_flutter: dff1df05dc39c83d04f9330b36360fc374574c5e
+ sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5
SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
@@ -263,4 +263,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: b4e3a7eabb03395b66e81fc061789f61526ee6bb
-COCOAPODS: 1.14.3
+COCOAPODS: 1.15.2
diff --git a/auth/lib/l10n/arb/app_ar.arb b/auth/lib/l10n/arb/app_ar.arb
index 68bd38900eadbb17a8323076628a819ef198be75..f9d37c7ba91aa08514480305341ccf63c21300e1 100644
--- a/auth/lib/l10n/arb/app_ar.arb
+++ b/auth/lib/l10n/arb/app_ar.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "المصدِّر",
"codeSecretKeyHint": "الرمز السري",
"codeAccountHint": "الحساب (you@domain.com)",
- "accountKeyType": "نوع المفتاح",
"sessionExpired": "انتهت صلاحية الجلسة",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_de.arb b/auth/lib/l10n/arb/app_de.arb
index be769ecd5016142d3ef3d6c5828fdd41979606df..0c4d29eaf32eeaf37fce02e211d92ce0937cf9e3 100644
--- a/auth/lib/l10n/arb/app_de.arb
+++ b/auth/lib/l10n/arb/app_de.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Aussteller",
"codeSecretKeyHint": "Geheimer Schlüssel",
"codeAccountHint": "Konto (you@domain.com)",
- "accountKeyType": "Art des Schlüssels",
"sessionExpired": "Sitzung abgelaufen",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_es.arb b/auth/lib/l10n/arb/app_es.arb
index 41113f0b9acf3a521f9f518068a72ee40fdfe308..f0c8971a0f7bbd6efe20a3a26d294904ea17bbb0 100644
--- a/auth/lib/l10n/arb/app_es.arb
+++ b/auth/lib/l10n/arb/app_es.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Emisor",
"codeSecretKeyHint": "Llave Secreta",
"codeAccountHint": "Cuenta (tu@dominio.com)",
- "accountKeyType": "Tipo de llave",
"sessionExpired": "La sesión ha expirado",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
@@ -113,6 +112,7 @@
"copied": "Copiado",
"pleaseTryAgain": "Por favor, inténtalo nuevamente",
"existingUser": "Usuario existente",
+ "newUser": "Nuevo a Ente",
"delete": "Borrar",
"enterYourPasswordHint": "Ingrese su contraseña",
"forgotPassword": "Olvidé mi contraseña",
@@ -138,6 +138,8 @@
"enterCodeHint": "Ingrese el código de seis dígitos de su aplicación de autenticación",
"lostDeviceTitle": "¿Perdió su dispositivo?",
"twoFactorAuthTitle": "Autenticación de dos factores",
+ "passkeyAuthTitle": "Verificación de llave de acceso",
+ "verifyPasskey": "Verificar llave de acceso",
"recoverAccount": "Recuperar cuenta",
"enterRecoveryKeyHint": "Introduzca su clave de recuperación",
"recover": "Recuperar",
@@ -191,6 +193,8 @@
"recoveryKeySaveDescription": "Nosotros no almacenamos esta clave, por favor guarde dicha clave de 24 palabras en un lugar seguro.",
"doThisLater": "Hacer esto más tarde",
"saveKey": "Guardar Clave",
+ "save": "Guardar",
+ "send": "Enviar",
"back": "Atrás",
"createAccount": "Crear cuenta",
"passwordStrength": "Fortaleza de la contraseña: {passwordStrengthValue}",
@@ -397,5 +401,8 @@
"signOutOtherDevices": "Cerrar la sesión de otros dispositivos",
"doNotSignOut": "No cerrar la sesión",
"hearUsWhereTitle": "¿Cómo conoció Ente? (opcional)",
- "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!"
+ "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!",
+ "passkey": "Llave de acceso",
+ "developerSettingsWarning": "¿Estás seguro de que quieres modificar los ajustes de desarrollador?",
+ "developerSettings": "Ajustes de desarrollador"
}
\ No newline at end of file
diff --git a/auth/lib/l10n/arb/app_fa.arb b/auth/lib/l10n/arb/app_fa.arb
index 0cba193a96643ce853f6be15fe21d35da5d89a16..948aa8b223c5ad9eb82374a321d48307b4039fa3 100644
--- a/auth/lib/l10n/arb/app_fa.arb
+++ b/auth/lib/l10n/arb/app_fa.arb
@@ -14,7 +14,6 @@
"codeIssuerHint": "صادر کننده",
"codeSecretKeyHint": "کلید مخفی",
"codeAccountHint": "حساب (you@domain.com)",
- "accountKeyType": "نوع کلید",
"sessionExpired": "نشست منقضی شده است",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_fi.arb b/auth/lib/l10n/arb/app_fi.arb
index 72309b3310ecc26737cd1539b35ba341a485b160..2a0404147587926e87fc7843b7a5343dd92b1881 100644
--- a/auth/lib/l10n/arb/app_fi.arb
+++ b/auth/lib/l10n/arb/app_fi.arb
@@ -12,7 +12,6 @@
"codeIssuerHint": "Myöntäjä",
"codeSecretKeyHint": "Salainen avain",
"codeAccountHint": "Tili (sinun@jokinosoite.com)",
- "accountKeyType": "Avaimen tyyppi",
"sessionExpired": "Istunto on vanheutunut",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_fr.arb b/auth/lib/l10n/arb/app_fr.arb
index 04a7058c7c877025a920a219bda71765b4578a56..71ddc0b31c815e53a7d6b0ba28a36bab788acf1e 100644
--- a/auth/lib/l10n/arb/app_fr.arb
+++ b/auth/lib/l10n/arb/app_fr.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Émetteur",
"codeSecretKeyHint": "Clé secrète",
"codeAccountHint": "Compte (vous@exemple.com)",
- "accountKeyType": "Type de clé",
"sessionExpired": "Session expirée",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_he.arb b/auth/lib/l10n/arb/app_he.arb
index 33058509796fdde5dee7d093e8371e30eae75884..8f22e1e82c9eb550af1f6e35cb84c2394d850ed6 100644
--- a/auth/lib/l10n/arb/app_he.arb
+++ b/auth/lib/l10n/arb/app_he.arb
@@ -19,7 +19,6 @@
"codeIssuerHint": "מנפיק",
"codeSecretKeyHint": "מפתח סודי",
"codeAccountHint": "חשבון(you@domain.com)",
- "accountKeyType": "סוג מפתח",
"sessionExpired": "זמן החיבור הסתיים",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_it.arb b/auth/lib/l10n/arb/app_it.arb
index e35fd11dc02223ca6b963318fe149c215bfb111b..92543ed82161aad89a04437f3d9e330175ed2099 100644
--- a/auth/lib/l10n/arb/app_it.arb
+++ b/auth/lib/l10n/arb/app_it.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Emittente",
"codeSecretKeyHint": "Codice segreto",
"codeAccountHint": "Account (username@dominio.it)",
- "accountKeyType": "Tipo di chiave",
"sessionExpired": "Sessione scaduta",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_ja.arb b/auth/lib/l10n/arb/app_ja.arb
index 60d0a51507304cd163374a4c639f0083b9defc17..8fea34c5e13a2917bd5e0ce6509e66d5a050e483 100644
--- a/auth/lib/l10n/arb/app_ja.arb
+++ b/auth/lib/l10n/arb/app_ja.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "発行者",
"codeSecretKeyHint": "秘密鍵",
"codeAccountHint": "アカウント (you@domain.com)",
- "accountKeyType": "鍵の種類",
"sessionExpired": "セッションが失効しました",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_ka.arb b/auth/lib/l10n/arb/app_ka.arb
index cb7dc8281856f4eac42fbc20c673789e75058fcf..93631df2d591563404251d794c0d7e21738cac15 100644
--- a/auth/lib/l10n/arb/app_ka.arb
+++ b/auth/lib/l10n/arb/app_ka.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "მომწოდებელი",
"codeSecretKeyHint": "გასაღები",
"codeAccountHint": "ანგარიში (you@domain.com)",
- "accountKeyType": "გასაღების ტიპი",
"sessionExpired": "სესიის დრო ამოიწურა",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_nl.arb b/auth/lib/l10n/arb/app_nl.arb
index 2e84ae11bb0692b6468f738f7106d8419cd702ef..36280f69dc7b16eb20fa6ebd5b5f019a778cc601 100644
--- a/auth/lib/l10n/arb/app_nl.arb
+++ b/auth/lib/l10n/arb/app_nl.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Uitgever",
"codeSecretKeyHint": "Geheime sleutel",
"codeAccountHint": "Account (jij@domein.nl)",
- "accountKeyType": "Type sleutel",
"sessionExpired": "Sessie verlopen",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_pl.arb b/auth/lib/l10n/arb/app_pl.arb
index 8ebc935dc8c841a035ebc7a4ec629041e1ca5e5a..796623def2b0ffd5808ebf4c6d5ad7249273dd82 100644
--- a/auth/lib/l10n/arb/app_pl.arb
+++ b/auth/lib/l10n/arb/app_pl.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Wydawca",
"codeSecretKeyHint": "Tajny klucz",
"codeAccountHint": "Konto (ty@domena.com)",
- "accountKeyType": "Rodzaj klucza",
"sessionExpired": "Sesja wygasła",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
@@ -186,6 +185,8 @@
"recoveryKeySaveDescription": "Nie przechowujemy tego klucza, proszę zachować ten 24 wyrazowy klucz w bezpiecznym miejscu.",
"doThisLater": "Zrób To Później",
"saveKey": "Zapisz klucz",
+ "save": "Zapisz",
+ "send": "Wyślij",
"back": "Wstecz",
"createAccount": "Utwórz konto",
"passwordStrength": "Siła hasła: {passwordStrengthValue}",
@@ -336,6 +337,10 @@
"@androidBiometricNotRecognized": {
"description": "Message to let the user know that authentication was failed. It is used on Android side. Maximum 60 characters."
},
+ "androidCancelButton": "Anuluj",
+ "@androidCancelButton": {
+ "description": "Message showed on a button that the user can click to leave the current dialog. It is used on Android side. Maximum 30 characters."
+ },
"androidSignInTitle": "Wymagana autoryzacja",
"@androidSignInTitle": {
"description": "Message showed as a title in a dialog which indicates the user that they need to scan biometric to continue. It is used on Android side. Maximum 60 characters."
diff --git a/auth/lib/l10n/arb/app_pt.arb b/auth/lib/l10n/arb/app_pt.arb
index b27a018fba057282826c79597d2b98b61c357174..9b1f5b1b0af4cc1d210149b009f21146fe73c1e1 100644
--- a/auth/lib/l10n/arb/app_pt.arb
+++ b/auth/lib/l10n/arb/app_pt.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Emissor",
"codeSecretKeyHint": "Chave secreta",
"codeAccountHint": "Conta (voce@dominio.com)",
- "accountKeyType": "Tipo de chave",
"sessionExpired": "Sessão expirada",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_ru.arb b/auth/lib/l10n/arb/app_ru.arb
index 7ae37a87b91501078a7007eec30cc0bd1f69e6a2..ca98611ee1542576c829b401749e7cf3d1ad6317 100644
--- a/auth/lib/l10n/arb/app_ru.arb
+++ b/auth/lib/l10n/arb/app_ru.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Эмитент",
"codeSecretKeyHint": "Секретный ключ",
"codeAccountHint": "Аккаунт (you@domain.com)",
- "accountKeyType": "Тип ключа",
"sessionExpired": "Сеанс истек",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_sv.arb b/auth/lib/l10n/arb/app_sv.arb
index cfb41d7bdc573579036c4106976fdf78778cb48d..9761325ce108c096a14bf10bb6a1253aaee8379c 100644
--- a/auth/lib/l10n/arb/app_sv.arb
+++ b/auth/lib/l10n/arb/app_sv.arb
@@ -16,7 +16,6 @@
"codeIssuerHint": "Utfärdare",
"codeSecretKeyHint": "Secret Key",
"codeAccountHint": "Konto (du@domän.com)",
- "accountKeyType": "Typ av nyckel",
"sessionExpired": "Sessionen har gått ut",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_ti.arb b/auth/lib/l10n/arb/app_ti.arb
index 27147ebb6e02fae0010103b8f6877c3922f4539f..b41128f6eaf59f2404acd9c750de35f5be05bc10 100644
--- a/auth/lib/l10n/arb/app_ti.arb
+++ b/auth/lib/l10n/arb/app_ti.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "ኣዋጂ",
"codeSecretKeyHint": "ምስጢራዊ መፍትሕ",
"codeAccountHint": "ሕሳብ (you@domain.com)",
- "accountKeyType": "ዓይነት መፍትሕ",
"sessionExpired": "ክፍለ ግዜኡ ኣኺሉ።",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_tr.arb b/auth/lib/l10n/arb/app_tr.arb
index 9b847faf0fb453a0a23e509d842983ed6d915396..322af5f48c444c4c0ef2094f3bd68082b6ecf4c0 100644
--- a/auth/lib/l10n/arb/app_tr.arb
+++ b/auth/lib/l10n/arb/app_tr.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Yayınlayan",
"codeSecretKeyHint": "Gizli Anahtar",
"codeAccountHint": "Hesap (ornek@domain.com)",
- "accountKeyType": "Anahtar türü",
"sessionExpired": "Oturum süresi doldu",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_vi.arb b/auth/lib/l10n/arb/app_vi.arb
index e318f9b557e38f7f703401a1891a710a4c7e1248..a8cccdbec555e05d20b04ade136214e2eaea2f82 100644
--- a/auth/lib/l10n/arb/app_vi.arb
+++ b/auth/lib/l10n/arb/app_vi.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "Nhà phát hành",
"codeSecretKeyHint": "Khóa bí mật",
"codeAccountHint": "Tài khoản (bạn@miền.com)",
- "accountKeyType": "Loại khóa",
"sessionExpired": "Phiên làm việc đã hết hạn",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/auth/lib/l10n/arb/app_zh.arb b/auth/lib/l10n/arb/app_zh.arb
index 077ee26fdf66c234a3e7cd3ac45a99efc129b211..c50e76c1ddd8eb86b85e73e29d1cf014fb71d974 100644
--- a/auth/lib/l10n/arb/app_zh.arb
+++ b/auth/lib/l10n/arb/app_zh.arb
@@ -20,7 +20,6 @@
"codeIssuerHint": "发行人",
"codeSecretKeyHint": "私钥",
"codeAccountHint": "账户 (you@domain.com)",
- "accountKeyType": "密钥类型",
"sessionExpired": "会话已过期",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
diff --git a/cli/cmd/account.go b/cli/cmd/account.go
index a4c78fb10e31a10ea4388b22467cf0320fd36826..4bc48dcf304a472bba2c28fbd358b3b133047a65 100644
--- a/cli/cmd/account.go
+++ b/cli/cmd/account.go
@@ -27,7 +27,8 @@ var listAccCmd = &cobra.Command{
// Subcommand for 'account add'
var addAccCmd = &cobra.Command{
Use: "add",
- Short: "Add a new account",
+ Short: "login into existing account",
+ Long: "Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app",
Run: func(cmd *cobra.Command, args []string) {
recoverWithLog()
ctrl.AddAccount(context.Background())
diff --git a/cli/docs/generated/ente.md b/cli/docs/generated/ente.md
index b9d3cde1762c85758ce19913ff1260c98303b572..4f85dd0980b94f13f4ab3f5f75dfceab9d9c18c7 100644
--- a/cli/docs/generated/ente.md
+++ b/cli/docs/generated/ente.md
@@ -25,4 +25,4 @@ ente [flags]
* [ente export](ente_export.md) - Starts the export process
* [ente version](ente_version.md) - Prints the current version
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_account.md b/cli/docs/generated/ente_account.md
index c48a6533655c6ecc4fa7289a817668ce5580abfc..41c37b0547295f492695bf62c596729cd42af235 100644
--- a/cli/docs/generated/ente_account.md
+++ b/cli/docs/generated/ente_account.md
@@ -11,9 +11,9 @@ Manage account settings
### SEE ALSO
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
-* [ente account add](ente_account_add.md) - Add a new account
+* [ente account add](ente_account_add.md) - login into existing account
* [ente account get-token](ente_account_get-token.md) - Get token for an account for a specific app
* [ente account list](ente_account_list.md) - list configured accounts
* [ente account update](ente_account_update.md) - Update an existing account's export directory
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_account_add.md b/cli/docs/generated/ente_account_add.md
index 1904ca370273729452ac1b4fc82cad4e4f97dd25..1e86ae12f7458a5a36ac6162614faba68e7c113c 100644
--- a/cli/docs/generated/ente_account_add.md
+++ b/cli/docs/generated/ente_account_add.md
@@ -1,6 +1,10 @@
## ente account add
-Add a new account
+login into existing account
+
+### Synopsis
+
+Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app
```
ente account add [flags]
@@ -16,4 +20,4 @@ ente account add [flags]
* [ente account](ente_account.md) - Manage account settings
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_account_get-token.md b/cli/docs/generated/ente_account_get-token.md
index d7ee77255c7e50d6bd69a248590e5454ff22b3a8..3d8814d7d11135e9469e3a5c421ccf73de4a8aae 100644
--- a/cli/docs/generated/ente_account_get-token.md
+++ b/cli/docs/generated/ente_account_get-token.md
@@ -18,4 +18,4 @@ ente account get-token [flags]
* [ente account](ente_account.md) - Manage account settings
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_account_list.md b/cli/docs/generated/ente_account_list.md
index cfc59bb8d21609b6924c973b464d807b748cd948..a7677eb85552b3edaae447cd858437e18e9f787c 100644
--- a/cli/docs/generated/ente_account_list.md
+++ b/cli/docs/generated/ente_account_list.md
@@ -16,4 +16,4 @@ ente account list [flags]
* [ente account](ente_account.md) - Manage account settings
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_account_update.md b/cli/docs/generated/ente_account_update.md
index acb65412aa98c71e3ad82beab3768e86fdef3471..8d9c8d7e54316f194ac1f7d5be7f19a704514e3b 100644
--- a/cli/docs/generated/ente_account_update.md
+++ b/cli/docs/generated/ente_account_update.md
@@ -19,4 +19,4 @@ ente account update [flags]
* [ente account](ente_account.md) - Manage account settings
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_admin.md b/cli/docs/generated/ente_admin.md
index aafe51b396281a4aaa4ce3928c761efd16638ecf..5ac72489d628b5ee0da44fce583276e86bb7e395 100644
--- a/cli/docs/generated/ente_admin.md
+++ b/cli/docs/generated/ente_admin.md
@@ -21,4 +21,4 @@ Commands for admin actions like disable or enabling 2fa, bumping up the storage
* [ente admin list-users](ente_admin_list-users.md) - List all users
* [ente admin update-subscription](ente_admin_update-subscription.md) - Update subscription for user
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_admin_delete-user.md b/cli/docs/generated/ente_admin_delete-user.md
index 56c96841ed3dfeb3a77779e83eb7010f169066db..a1d52a73d2b43fba790c85dffdd1f65af562fc85 100644
--- a/cli/docs/generated/ente_admin_delete-user.md
+++ b/cli/docs/generated/ente_admin_delete-user.md
@@ -18,4 +18,4 @@ ente admin delete-user [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_admin_disable-2fa.md b/cli/docs/generated/ente_admin_disable-2fa.md
index 333f0912e31c25ff1ab25d4fdf83d6b606a8b328..23cd33080053703c71cdc7734d7d856991d1ab01 100644
--- a/cli/docs/generated/ente_admin_disable-2fa.md
+++ b/cli/docs/generated/ente_admin_disable-2fa.md
@@ -18,4 +18,4 @@ ente admin disable-2fa [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_admin_get-user-id.md b/cli/docs/generated/ente_admin_get-user-id.md
index 3d26f624ac9e01e9fa14b232ed5ab20472ba3221..47d632abb6778b71fc5b2c781a8e37fb3534dcc9 100644
--- a/cli/docs/generated/ente_admin_get-user-id.md
+++ b/cli/docs/generated/ente_admin_get-user-id.md
@@ -18,4 +18,4 @@ ente admin get-user-id [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_admin_list-users.md b/cli/docs/generated/ente_admin_list-users.md
index 8841df57b5bb8cb5674242a59560e70e69b6cd38..635e8ec3cdd6122b8cd4d1b8e0cd327d020b7714 100644
--- a/cli/docs/generated/ente_admin_list-users.md
+++ b/cli/docs/generated/ente_admin_list-users.md
@@ -17,4 +17,4 @@ ente admin list-users [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_admin_update-subscription.md b/cli/docs/generated/ente_admin_update-subscription.md
index cc1fa96234390aad7017d5ae339e2053f84c8211..d0fadcd2ba3d75c2e7a28351f841aab9ea61c0ec 100644
--- a/cli/docs/generated/ente_admin_update-subscription.md
+++ b/cli/docs/generated/ente_admin_update-subscription.md
@@ -23,4 +23,4 @@ ente admin update-subscription [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_auth.md b/cli/docs/generated/ente_auth.md
index 5770f36f39d6bbb9caac5c377d1d4aaca1ee86fa..e0e97d84fc7535e491c4b3b5ed7c3f04e585cf57 100644
--- a/cli/docs/generated/ente_auth.md
+++ b/cli/docs/generated/ente_auth.md
@@ -13,4 +13,4 @@ Authenticator commands
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
* [ente auth decrypt](ente_auth_decrypt.md) - Decrypt authenticator export
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_auth_decrypt.md b/cli/docs/generated/ente_auth_decrypt.md
index e573db2a332035e2577558359babd9f2eba212a0..c9db6ea545d4c1a34e186cbd2b571f54996ff4cf 100644
--- a/cli/docs/generated/ente_auth_decrypt.md
+++ b/cli/docs/generated/ente_auth_decrypt.md
@@ -16,4 +16,4 @@ ente auth decrypt [input] [output] [flags]
* [ente auth](ente_auth.md) - Authenticator commands
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_export.md b/cli/docs/generated/ente_export.md
index c5783236cfbd8cd15e50b1bc65b3283217a95e16..d809e06e46419c694d24ccf0f44c34203a8a7d2c 100644
--- a/cli/docs/generated/ente_export.md
+++ b/cli/docs/generated/ente_export.md
@@ -16,4 +16,4 @@ ente export [flags]
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/docs/generated/ente_version.md b/cli/docs/generated/ente_version.md
index b51055697f7ad064d872ac2b9874b8701edb048d..08f384b52f3c556f6c4d2ae72157acdda94c4b06 100644
--- a/cli/docs/generated/ente_version.md
+++ b/cli/docs/generated/ente_version.md
@@ -16,4 +16,4 @@ ente version [flags]
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024
diff --git a/cli/pkg/account.go b/cli/pkg/account.go
index 9363e2f80ba2ce0c60a877f2f4b621f65d39558d..e411ffacd52b236b184b38c25d5db51688fdc580 100644
--- a/cli/pkg/account.go
+++ b/cli/pkg/account.go
@@ -59,7 +59,7 @@ func (c *ClICtrl) AddAccount(cxt context.Context) {
authResponse, flowErr = c.validateTOTP(cxt, authResponse)
}
if authResponse.EncryptedToken == "" || authResponse.KeyAttributes == nil {
- panic("no encrypted token or keyAttributes")
+ log.Fatalf("missing key attributes or token.\nNote: Please use the mobile,web or desktop app to create a new account.\nIf you are trying to login to an existing account, report a bug.")
}
secretInfo, decErr := c.decryptAccSecretInfo(cxt, authResponse, keyEncKey)
if decErr != nil {
diff --git a/desktop/.github/workflows/desktop-release.yml b/desktop/.github/workflows/desktop-release.yml
index 2fa38237672723a19e175a0621ed8c714ad6b7de..70eedf3ea6b2d9e399547a641493f445f324a795 100644
--- a/desktop/.github/workflows/desktop-release.yml
+++ b/desktop/.github/workflows/desktop-release.yml
@@ -1,20 +1,12 @@
name: "Release"
-# This will create a new draft release with public artifacts.
+# Build the ente-io/ente's desktop/rc branch and create/update a draft release.
#
-# Note that a release will only get created if there is an associated tag
-# (GitHub releases need a corresponding tag).
-#
-# The canonical source for this action is in the repository where we keep the
-# source code for the Ente Photos desktop app: https://github.com/ente-io/ente
-#
-# However, it actually lives and runs in the repository that we use for making
-# releases: https://github.com/ente-io/photos-desktop
-#
-# We need two repositories because Electron updater currently doesn't work well
-# with monorepos. For more details, see `docs/release.md`.
+# For more details, see `docs/release.md` in ente-io/ente.
on:
+ # Trigger manually or `gh workflow run desktop-release.yml`.
+ workflow_dispatch:
push:
# Run when a tag matching the pattern "v*"" is pushed.
#
@@ -38,11 +30,9 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
with:
- # Checkout the tag photosd-v1.x.x from the source code
- # repository when we're invoked for tag v1.x.x on the releases
- # repository.
+ # Checkout the desktop/rc branch from the source repository.
repository: ente-io/ente
- ref: photosd-${{ github.ref_name }}
+ ref: desktop/rc
submodules: recursive
- name: Setup node
@@ -50,6 +40,11 @@ jobs:
with:
node-version: 20
+ - name: Increase yarn timeout
+ # `yarn install` times out sometimes on the Windows runner,
+ # resulting in flaky builds.
+ run: yarn config set network-timeout 900000 -g
+
- name: Install dependencies
run: yarn install
@@ -63,13 +58,15 @@ jobs:
uses: ente-io/action-electron-builder@v1.0.0
with:
package_root: desktop
+ build_script_name: build:ci
# GitHub token, automatically provided to the action
# (No need to define this secret in the repo settings)
github_token: ${{ secrets.GITHUB_TOKEN }}
# If the commit is tagged with a version (e.g. "v1.0.0"),
- # release the app after building.
+ # create a (draft) release after building. Otherwise upload
+ # assets to the existing draft named after the version.
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
mac_certs: ${{ secrets.MAC_CERTS }}
diff --git a/desktop/CHANGELOG.md b/desktop/CHANGELOG.md
index eb118a424d84c41cbdc544832c3f7eef4f843056..5fbbefaaa822c6a5d5705d7502ee78378e22e619 100644
--- a/desktop/CHANGELOG.md
+++ b/desktop/CHANGELOG.md
@@ -2,11 +2,17 @@
## v1.7.0 (Unreleased)
-v1.7 is a major rewrite to improve the security of our app. We have enabled
-sandboxing and disabled node integration for the renderer process. All this
-required restructuring our IPC mechanisms, which resulted in a lot of under the
-hood changes. The outcome is a more secure app that also uses the latest and
-greatest Electron recommendations.
+v1.7 is a major rewrite to improve the security of our app. In particular, the
+UI and the native parts of the app now run isolated from each other and
+communicate only using a predefined IPC boundary.
+
+Other highlights:
+
+- View your photos on big screens and Chromecast devices by using the "Play
+ album on TV" option in the album menu.
+- Support Brazilian Portuguese, German and Russian.
+- Provide a checkbox to select all photos in a day.
+- Fix a case where the dedup screen would not refresh after removing items.
## v1.6.63
diff --git a/desktop/docs/release.md b/desktop/docs/release.md
index b55c96326d9ff18c69c9c5657176b3de1d600655..1cda1c11b17762f57889e6d9894afb1e268bf387 100644
--- a/desktop/docs/release.md
+++ b/desktop/docs/release.md
@@ -1,46 +1,64 @@
## Releases
-Conceptually, the release is straightforward: We push a tag, a GitHub workflow
-gets triggered that creates a draft release with artifacts built from that tag.
-We then publish that release. The download links on our website, and existing
-apps already know how to check for the latest GitHub release and update
-accordingly.
+Conceptually, the release is straightforward: We trigger a GitHub workflow that
+creates a draft release with artifacts built. When ready, we publish that
+release. The download links on our website, and existing apps already check the
+latest GitHub release and update accordingly.
-The complication comes by the fact that Electron Updater (the mechanism that we
-use for auto updates) doesn't work well with monorepos. So we need to keep a
-separate (non-mono) repository just for doing releases.
+The complication comes by the fact that electron-builder's auto updaterr (the
+mechanism that we use for auto updates) doesn't work with monorepos. So we need
+to keep a separate (non-mono) repository just for doing releases.
- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente).
- Releases are done from
[ente-io/photos-desktop](https://github.com/ente-io/photos-desktop).
-## Workflow
+## Workflow - Release Candidates
-The workflow is:
+Leading up to the release, we can make one or more draft releases that are not
+intended to be published, but serve as test release candidates.
-1. Finalize the changes in the source repo.
+The workflow for making such "rc" builds is:
- - Update the CHANGELOG.
- - Update the version in `package.json`
- - `git commit -m "[photosd] Release v1.2.3"`
- - Open PR, merge into main.
+1. Update `package.json` in the source repo to use version `1.x.x-rc`. Create a
+ new draft release in the release repo with title `1.x.x-rc`. In the tag
+ input enter `v1.x.x-rc` and select the option to "create a new tag on
+ publish".
-2. Tag the merge commit with a tag matching the pattern `photosd-v1.2.3`, where
- `1.2.3` is the version in `package.json`
+2. Push code to the `desktop/rc` branch in the source repo.
+
+3. Trigger the GitHub action in the release repo
```sh
- git tag photosd-v1.x.x
- git push origin photosd-v1.x.x
+ gh workflow run desktop-release.yml
```
-3. Head over to the releases repository and run the trigger script, passing it
- the tag _without_ the `photosd-` prefix.
+We can do steps 2 and 3 multiple times: each time it'll just update the
+artifacts attached to the same draft.
+
+## Workflow - Release
+
+1. Update source repo to set version `1.x.x` in `package.json` and finialize
+ the CHANGELOG.
+
+2. Push code to the `desktop/rc` branch in the source repo.
+
+3. In the release repo
```sh
./.github/trigger-release.sh v1.x.x
```
+4. If the build is successful, tag `desktop/rc` in the source repo.
+
+ ```sh
+ # Assuming we're on desktop/rc that just got build
+
+ git tag photosd-v1.x.x
+ git push origin photosd-v1.x.x
+ ```
+
## Post build
The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts
diff --git a/desktop/electron-builder.yml b/desktop/electron-builder.yml
index f62033fb9c3c4d107a3ad1ee92dbbcccdecdd105..c2c000ce9f223513536f7f32afa8997ebe671160 100644
--- a/desktop/electron-builder.yml
+++ b/desktop/electron-builder.yml
@@ -29,5 +29,3 @@ mac:
arch: [universal]
category: public.app-category.photography
hardenedRuntime: true
- notarize: true
-afterSign: electron-builder-notarize
diff --git a/desktop/package.json b/desktop/package.json
index 462857a8bf4b61dbc36af4b395a695bc0b1fb7d5..7297a0c17be7beebfd8759210f8a6ab649b40117 100644
--- a/desktop/package.json
+++ b/desktop/package.json
@@ -1,6 +1,6 @@
{
"name": "ente",
- "version": "1.7.0-beta.0",
+ "version": "1.7.0-rc",
"private": true,
"description": "Desktop client for Ente Photos",
"repository": "github:ente-io/photos-desktop",
@@ -11,6 +11,7 @@
"build-main": "tsc && electron-builder",
"build-main:quick": "tsc && electron-builder --dir --config.compression=store --config.mac.identity=null",
"build-renderer": "cd ../web && yarn install && yarn build:photos && cd ../desktop && shx rm -f out && shx ln -sf ../web/apps/photos/out out",
+ "build:ci": "yarn build-renderer && tsc",
"build:quick": "yarn build-renderer && yarn build-main:quick",
"dev": "concurrently --kill-others --success first --names 'main,rndr' \"yarn dev-main\" \"yarn dev-renderer\"",
"dev-main": "tsc && electron app/main.js",
@@ -46,7 +47,6 @@
"concurrently": "^8",
"electron": "^30",
"electron-builder": "25.0.0-alpha.6",
- "electron-builder-notarize": "^1.5",
"eslint": "^8",
"prettier": "^3",
"prettier-plugin-organize-imports": "^3",
diff --git a/desktop/src/main.ts b/desktop/src/main.ts
index 49b3162061953f90026ed196821089996d834733..9cba9178df2a4a3a9ee09dc676dcb07fe7ac23b8 100644
--- a/desktop/src/main.ts
+++ b/desktop/src/main.ts
@@ -142,7 +142,7 @@ const createMainWindow = () => {
// Create the main window. This'll show our web content.
const window = new BrowserWindow({
webPreferences: {
- preload: path.join(app.getAppPath(), "preload.js"),
+ preload: path.join(__dirname, "preload.js"),
sandbox: true,
},
// The color to show in the window until the web content gets loaded.
@@ -287,13 +287,29 @@ const setupTrayItem = (mainWindow: BrowserWindow) => {
/**
* Older versions of our app used to maintain a cache dir using the main
- * process. This has been deprecated in favor of using a normal web cache.
+ * process. This has been removed in favor of cache on the web layer.
*
- * Delete the old cache dir if it exists. This code was added March 2024, and
- * can be removed after some time once most people have upgraded to newer
- * versions.
+ * Delete the old cache dir if it exists.
+ *
+ * This will happen in two phases. The cache had three subdirectories:
+ *
+ * - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024).
+ *
+ * - The third one, "face-crops" will be removed once we finish the face search
+ * changes. See: [Note: Legacy face crops].
+ *
+ * This migration code can be removed after some time once most people have
+ * upgraded to newer versions.
*/
const deleteLegacyDiskCacheDirIfExists = async () => {
+ const removeIfExists = async (dirPath: string) => {
+ if (existsSync(dirPath)) {
+ log.info(`Removing legacy disk cache from ${dirPath}`);
+ await fs.rm(dirPath, { recursive: true });
+ }
+ };
+ // [Note: Getting the cache path]
+ //
// The existing code was passing "cache" as a parameter to getPath.
//
// However, "cache" is not a valid parameter to getPath. It works! (for
@@ -309,8 +325,8 @@ const deleteLegacyDiskCacheDirIfExists = async () => {
// @ts-expect-error "cache" works but is not part of the public API.
const cacheDir = path.join(app.getPath("cache"), "ente");
if (existsSync(cacheDir)) {
- log.info(`Removing legacy disk cache from ${cacheDir}`);
- await fs.rm(cacheDir, { recursive: true });
+ await removeIfExists(path.join(cacheDir, "thumbs"));
+ await removeIfExists(path.join(cacheDir, "files"));
}
};
@@ -375,7 +391,7 @@ const main = () => {
// Continue on with the rest of the startup sequence.
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
setupTrayItem(mainWindow);
- if (!isDev) setupAutoUpdater(mainWindow);
+ setupAutoUpdater(mainWindow);
try {
await deleteLegacyDiskCacheDirIfExists();
diff --git a/desktop/src/main/ipc.ts b/desktop/src/main/ipc.ts
index f59969202bbe987e10f7c61e9cba04e878523e28..5072db29ea5325961937d67d8da28a7459e424ad 100644
--- a/desktop/src/main/ipc.ts
+++ b/desktop/src/main/ipc.ts
@@ -24,6 +24,7 @@ import {
updateOnNextRestart,
} from "./services/app-update";
import {
+ legacyFaceCrop,
openDirectory,
openLogDirectory,
selectDirectory,
@@ -68,6 +69,7 @@ import {
watchUpdateIgnoredFiles,
watchUpdateSyncedFiles,
} from "./services/watch";
+import { clearConvertToMP4Results } from "./stream";
/**
* Listen for IPC events sent/invoked by the renderer process, and route them to
@@ -107,6 +109,8 @@ export const attachIPCHandlers = () => {
ipcMain.on("clearStores", () => clearStores());
+ ipcMain.on("clearConvertToMP4Results", () => clearConvertToMP4Results());
+
ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) =>
saveEncryptionKey(encryptionKey),
);
@@ -170,14 +174,7 @@ export const attachIPCHandlers = () => {
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
- timeoutMS: number,
- ) =>
- ffmpegExec(
- command,
- dataOrPathOrZipItem,
- outputFileExtension,
- timeoutMS,
- ),
+ ) => ffmpegExec(command, dataOrPathOrZipItem, outputFileExtension),
);
// - ML
@@ -198,6 +195,10 @@ export const attachIPCHandlers = () => {
faceEmbedding(input),
);
+ ipcMain.handle("legacyFaceCrop", (_, faceID: string) =>
+ legacyFaceCrop(faceID),
+ );
+
// - Upload
ipcMain.handle("listZipItems", (_, zipPath: string) =>
diff --git a/desktop/src/main/log.ts b/desktop/src/main/log.ts
index cf1404a90a9a02e9c29b39b9be3167132a87d999..60870c913c47322de1d33a8bd6d16571645476d7 100644
--- a/desktop/src/main/log.ts
+++ b/desktop/src/main/log.ts
@@ -5,11 +5,8 @@ import { isDev } from "./utils/electron";
/**
* Initialize logging in the main process.
*
- * This will set our underlying logger up to log to a file named `ente.log`,
- *
- * - on Linux at ~/.config/ente/logs/ente.log
- * - on macOS at ~/Library/Logs/ente/ente.log
- * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
+ * This will set our underlying logger up to log to a file named `ente.log`, see
+ * [Note: App log path].
*
* On dev builds, it will also log to the console.
*/
@@ -62,7 +59,7 @@ const logError = (message: string, e?: unknown) => {
const logError_ = (message: string) => {
log.error(`[main] [error] ${message}`);
- if (isDev) console.error(`[error] ${message}`);
+ console.error(`[error] ${message}`);
};
const logInfo = (...params: unknown[]) => {
@@ -96,8 +93,8 @@ export default {
* any arbitrary object that we obtain, say, when in a try-catch handler (in
* JavaScript any arbitrary value can be thrown).
*
- * The log is written to disk. In development builds, the log is also
- * printed to the main (Node.js) process console.
+ * The log is written to disk and printed to the main (Node.js) process's
+ * console.
*/
error: logError,
/**
@@ -120,7 +117,7 @@ export default {
* The function can return an arbitrary value which is serialized before
* being logged.
*
- * This log is NOT written to disk. And it is printed to the main (Node.js)
+ * This log is NOT written to disk. It is printed to the main (Node.js)
* process console, but only on development builds.
*/
debug: logDebug,
diff --git a/desktop/src/main/menu.ts b/desktop/src/main/menu.ts
index b6fa7acfe8c01c3fab548835803587225df4b674..188b195f825f1751bfa5f5a32358f8ed65db1a39 100644
--- a/desktop/src/main/menu.ts
+++ b/desktop/src/main/menu.ts
@@ -10,7 +10,6 @@ import { forceCheckForAppUpdates } from "./services/app-update";
import autoLauncher from "./services/auto-launcher";
import { openLogDirectory } from "./services/dir";
import { userPreferences } from "./stores/user-preferences";
-import { isDev } from "./utils/electron";
/** Create and return the entries in the app's main menu bar */
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
@@ -24,9 +23,6 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
process.platform == "darwin" ? options : [];
- const devOnly = (options: MenuItemConstructorOptions[]) =>
- isDev ? options : [];
-
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
const handleViewChangelog = () =>
@@ -86,12 +82,14 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
checked: isAutoLaunchEnabled,
click: toggleAutoLaunch,
},
- {
- label: "Hide Dock Icon",
- type: "checkbox",
- checked: shouldHideDockIcon,
- click: toggleHideDockIcon,
- },
+ ...macOSOnly([
+ {
+ label: "Hide Dock Icon",
+ type: "checkbox",
+ checked: shouldHideDockIcon,
+ click: toggleHideDockIcon,
+ },
+ ]),
],
},
@@ -130,11 +128,11 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
submenu: [
{
role: "startSpeaking",
- label: "start speaking",
+ label: "Start Speaking",
},
{
role: "stopSpeaking",
- label: "stop speaking",
+ label: "Stop Speaking",
},
],
},
@@ -145,9 +143,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
label: "View",
submenu: [
{ label: "Reload", role: "reload" },
- ...devOnly([
- { label: "Toggle Dev Tools", role: "toggleDevTools" },
- ]),
+ { label: "Toggle Dev Tools", role: "toggleDevTools" },
{ type: "separator" },
{ label: "Toggle Full Screen", role: "togglefullscreen" },
],
diff --git a/desktop/src/main/services/app-update.ts b/desktop/src/main/services/app-update.ts
index 8d66cb8c3ba3d7c382802204974ce40e8dfddadc..ed6dc0e18fa2954c51cc477f19ff0728f91a5e98 100644
--- a/desktop/src/main/services/app-update.ts
+++ b/desktop/src/main/services/app-update.ts
@@ -6,11 +6,90 @@ import { allowWindowClose } from "../../main";
import { AppUpdate } from "../../types/ipc";
import log from "../log";
import { userPreferences } from "../stores/user-preferences";
+import { isDev } from "../utils/electron";
export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
autoUpdater.logger = electronLog;
autoUpdater.autoDownload = false;
+ /**
+ * [Note: Testing auto updates]
+ *
+ * By default, we skip checking for updates automatically in dev builds.
+ * This is because even if installing updates would fail (at least on macOS)
+ * because auto updates only work for signed builds.
+ *
+ * So an end to end testing for updates requires using a temporary GitHub
+ * repository and signed builds therein. More on this later.
+ *
+ * ---------------
+ *
+ * [Note: Testing auto updates - Sanity checks]
+ *
+ * However, for partial checks of the UI flow, something like the following
+ * can be used to do a test of the update process (up until the actual
+ * installation itself).
+ *
+ * Create a `app/dev-app-update.yml` with:
+ *
+ * provider: generic
+ * url: http://127.0.0.1:7777/
+ *
+ * and start a local webserver in some directory:
+ *
+ * python3 -m http.server 7777
+ *
+ * In this directory, put `latest-mac.yml` and the DMG file that this YAML
+ * file refers to.
+ *
+ * Alternatively, `dev-app-update.yml` can point to some arbitrary GitHub
+ * repository too, e.g.:
+ *
+ * provider: github
+ * owner: ente-io
+ * repo: test-desktop-updates
+ *
+ * Now we can use the "Check for updates..." menu option to trigger the
+ * update flow.
+ */
+ autoUpdater.forceDevUpdateConfig = isDev;
+ if (isDev) return;
+
+ /**
+ * [Note: Testing auto updates - End to end checks]
+ *
+ * Since end-to-end update testing can only be done with signed builds, the
+ * easiest way is to create temporary builds in a test repository.
+ *
+ * Let us say we have v2.0.0 about to go out. We have builds artifacts for
+ * v2.0.0 also in some draft release in our normal release repository.
+ *
+ * Create a new test repository, say `ente-io/test-desktop-updates`. In this
+ * repository, create a release v2.0.0, attaching the actual build
+ * artifacts. Make this release the latest.
+ *
+ * Now we need to create a old signed build.
+ *
+ * First, modify `package.json` to put in a version number older than the
+ * new version number that we want to test updating to, e.g. `v1.0.0-test`.
+ *
+ * Then uncomment the following block of code. This tells the auto updater
+ * to use `ente-io/test-desktop-updates` to get updates.
+ *
+ * With these two changes (older version and setFeedURL), create a new
+ * release signed build on CI. Install this build - it will check for
+ * updates in the temporary feed URL that we set, and we'll be able to check
+ * the full update flow.
+ */
+
+ /*
+ autoUpdater.setFeedURL({
+ provider: "github",
+ owner: "ente-io",
+ repo: "test-desktop-updates",
+ });
+ */
+
const oneDay = 1 * 24 * 60 * 60 * 1000;
setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay);
void checkForUpdatesAndNotify(mainWindow);
@@ -61,17 +140,17 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
log.debug(() => "Attempting auto update");
await autoUpdater.downloadUpdate();
- let timeoutId: ReturnType;
+ let timeout: ReturnType;
const fiveMinutes = 5 * 60 * 1000;
autoUpdater.on("update-downloaded", () => {
- timeoutId = setTimeout(
+ timeout = setTimeout(
() => showUpdateDialog({ autoUpdatable: true, version }),
fiveMinutes,
);
});
autoUpdater.on("error", (error) => {
- clearTimeout(timeoutId);
+ clearTimeout(timeout);
log.error("Auto update failed", error);
showUpdateDialog({ autoUpdatable: false, version });
});
diff --git a/desktop/src/main/services/auto-launcher.ts b/desktop/src/main/services/auto-launcher.ts
index 4e97a02257e367959aaa5f8e9daed49a1df39d0e..0942a49359884cc2db5213347e827237ff10028b 100644
--- a/desktop/src/main/services/auto-launcher.ts
+++ b/desktop/src/main/services/auto-launcher.ts
@@ -27,14 +27,14 @@ class AutoLauncher {
}
async toggleAutoLaunch() {
- const isEnabled = await this.isEnabled();
+ const wasEnabled = await this.isEnabled();
const autoLaunch = this.autoLaunch;
if (autoLaunch) {
- if (isEnabled) await autoLaunch.disable();
+ if (wasEnabled) await autoLaunch.disable();
else await autoLaunch.enable();
} else {
- if (isEnabled) app.setLoginItemSettings({ openAtLogin: false });
- else app.setLoginItemSettings({ openAtLogin: true });
+ const openAtLogin = !wasEnabled;
+ app.setLoginItemSettings({ openAtLogin });
}
}
@@ -42,8 +42,7 @@ class AutoLauncher {
if (this.autoLaunch) {
return app.commandLine.hasSwitch("hidden");
} else {
- // TODO(MR): This apparently doesn't work anymore.
- return app.getLoginItemSettings().wasOpenedAtLogin;
+ return app.getLoginItemSettings().openAtLogin;
}
}
}
diff --git a/desktop/src/main/services/dir.ts b/desktop/src/main/services/dir.ts
index d375648f6f9fb9b20635c36772b88fb9600c2985..d97cad6fb219ae9e7f591059e0e0ed23ac9f49fe 100644
--- a/desktop/src/main/services/dir.ts
+++ b/desktop/src/main/services/dir.ts
@@ -1,5 +1,7 @@
import { shell } from "electron/common";
import { app, dialog } from "electron/main";
+import { existsSync } from "fs";
+import fs from "node:fs/promises";
import path from "node:path";
import { posixPath } from "../utils/electron";
@@ -38,14 +40,54 @@ export const openLogDirectory = () => openDirectory(logDirectoryPath());
*
* [Note: Electron app paths]
*
- * By default, these paths are at the following locations:
+ * There are three paths we need to be aware of usually.
*
- * - macOS: `~/Library/Application Support/ente`
+ * First is the "appData". We can obtain this with `app.getPath("appData")`.
+ * This is per-user application data directory. This is usually the following:
+ *
+ * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local`
+ * - Linux: `~/.config`
+ * - macOS: `~/Library/Application Support`
+ *
+ * Now, if we suffix the app's name onto the appData directory, we get the
+ * "userData" directory. This is the **primary** place applications are meant to
+ * store user's data, e.g. various configuration files and saved state.
+ *
+ * During development, our app name is "Electron", so this'd be, for example,
+ * `~/Library/Application Support/Electron` if we run using `yarn dev`. For the
+ * packaged production app, our app name is "ente", so this would be:
+ *
+ * - Windows: `%APPDATA%\ente`, e.g. `C:\Users\\AppData\Local\ente`
* - Linux: `~/.config/ente`
- * - Windows: `%APPDATA%`, e.g. `C:\Users\\AppData\Local\ente`
- * - Windows: C:\Users\\AppData\Local\
+ * - macOS: `~/Library/Application Support/ente`
+ *
+ * Note that Chromium also stores the browser state, e.g. localStorage or disk
+ * caches, in userData.
*
* https://www.electronjs.org/docs/latest/api/app
*
+ * [Note: App log path]
+ *
+ * Finally, there is the "logs" directory. This is not within "appData" but has
+ * a slightly different OS specific path. Since our log file is named
+ * "ente.log", it can be found at:
+ *
+ * - macOS: ~/Library/Logs/ente/ente.log (production)
+ * - macOS: ~/Library/Logs/Electron/ente.log (dev)
+ * - Linux: ~/.config/ente/logs/ente.log
+ * - Windows: %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
*/
const logDirectoryPath = () => app.getPath("logs");
+
+/**
+ * See: [Note: Legacy face crops]
+ */
+export const legacyFaceCrop = async (
+ faceID: string,
+): Promise => {
+ // See: [Note: Getting the cache path]
+ // @ts-expect-error "cache" works but is not part of the public API.
+ const cacheDir = path.join(app.getPath("cache"), "ente");
+ const filePath = path.join(cacheDir, "face-crops", faceID);
+ return existsSync(filePath) ? await fs.readFile(filePath) : undefined;
+};
diff --git a/desktop/src/main/services/ffmpeg.ts b/desktop/src/main/services/ffmpeg.ts
index 0a5c4eed2c31bdfefda74d0acbf432be6261a512..4803fd6f0cf2b49ac10f6dd982dfb5b2dc40c858 100644
--- a/desktop/src/main/services/ffmpeg.ts
+++ b/desktop/src/main/services/ffmpeg.ts
@@ -1,11 +1,10 @@
import pathToFfmpeg from "ffmpeg-static";
import fs from "node:fs/promises";
import type { ZipItem } from "../../types/ipc";
-import log from "../log";
-import { ensure, withTimeout } from "../utils/common";
+import { ensure } from "../utils/common";
import { execAsync } from "../utils/electron";
import {
- deleteTempFile,
+ deleteTempFileIgnoringErrors,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
@@ -46,13 +45,7 @@ export const ffmpegExec = async (
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
- timeoutMS: number,
): Promise => {
- // TODO (MR): This currently copies files for both input (when
- // dataOrPathOrZipItem is data) and output. This needs to be tested
- // extremely large video files when invoked downstream of `convertToMP4` in
- // the web code.
-
const {
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
@@ -69,17 +62,13 @@ export const ffmpegExec = async (
outputFilePath,
);
- if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
- else await execAsync(cmd);
+ await execAsync(cmd);
return fs.readFile(outputFilePath);
} finally {
- try {
- if (isInputFileTemporary) await deleteTempFile(inputFilePath);
- await deleteTempFile(outputFilePath);
- } catch (e) {
- log.error("Could not clean up temp files", e);
- }
+ if (isInputFileTemporary)
+ await deleteTempFileIgnoringErrors(inputFilePath);
+ await deleteTempFileIgnoringErrors(outputFilePath);
}
};
@@ -112,3 +101,32 @@ const ffmpegBinaryPath = () => {
// https://github.com/eugeneware/ffmpeg-static/issues/16
return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked");
};
+
+/**
+ * A variant of {@link ffmpegExec} adapted to work with streams so that it can
+ * handle the MP4 conversion of large video files.
+ *
+ * See: [Note: Convert to MP4]
+
+ * @param inputFilePath The path to a file on the user's local file system. This
+ * is the video we want to convert.
+ * @param inputFilePath The path to a file on the user's local file system where
+ * we should write the converted MP4 video.
+ */
+export const ffmpegConvertToMP4 = async (
+ inputFilePath: string,
+ outputFilePath: string,
+): Promise => {
+ const command = [
+ ffmpegPathPlaceholder,
+ "-i",
+ inputPathPlaceholder,
+ "-preset",
+ "ultrafast",
+ outputPathPlaceholder,
+ ];
+
+ const cmd = substitutePlaceholders(command, inputFilePath, outputFilePath);
+
+ await execAsync(cmd);
+};
diff --git a/desktop/src/main/services/image.ts b/desktop/src/main/services/image.ts
index 957fe8120003d4ad59a73634c7080bad6f78a33a..c07b051a1a5d100eb4cbc34b74e04fa54096f504 100644
--- a/desktop/src/main/services/image.ts
+++ b/desktop/src/main/services/image.ts
@@ -6,7 +6,7 @@ import { CustomErrorMessage, type ZipItem } from "../../types/ipc";
import log from "../log";
import { execAsync, isDev } from "../utils/electron";
import {
- deleteTempFile,
+ deleteTempFileIgnoringErrors,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
@@ -23,12 +23,8 @@ export const convertToJPEG = async (imageData: Uint8Array) => {
await execAsync(command);
return new Uint8Array(await fs.readFile(outputFilePath));
} finally {
- try {
- await deleteTempFile(inputFilePath);
- await deleteTempFile(outputFilePath);
- } catch (e) {
- log.error("Could not clean up temp files", e);
- }
+ await deleteTempFileIgnoringErrors(inputFilePath);
+ await deleteTempFileIgnoringErrors(outputFilePath);
}
};
@@ -49,6 +45,9 @@ const convertToJPEGCommand = (
];
case "linux":
+ // The bundled binary is an ELF x86-64 executable.
+ if (process.arch != "x64")
+ throw new Error(CustomErrorMessage.NotAvailable);
return [
imageMagickPath(),
inputFilePath,
@@ -79,7 +78,7 @@ export const generateImageThumbnail = async (
const outputFilePath = await makeTempFilePath("jpeg");
- // Construct the command first, it may throw `NotAvailable` on win32.
+ // Construct the command first, it may throw `NotAvailable`.
let quality = 70;
let command = generateImageThumbnailCommand(
inputFilePath,
@@ -94,6 +93,9 @@ export const generateImageThumbnail = async (
let thumbnail: Uint8Array;
do {
await execAsync(command);
+ // TODO(MR): release 1.7
+ // TODO(MR): imagemagick debugging. Remove me after verifying logs.
+ log.info(`Generated thumbnail using ${command.join(" ")}`);
thumbnail = new Uint8Array(await fs.readFile(outputFilePath));
quality -= 10;
command = generateImageThumbnailCommand(
@@ -105,12 +107,9 @@ export const generateImageThumbnail = async (
} while (thumbnail.length > maxSize && quality > 50);
return thumbnail;
} finally {
- try {
- if (isInputFileTemporary) await deleteTempFile(inputFilePath);
- await deleteTempFile(outputFilePath);
- } catch (e) {
- log.error("Could not clean up temp files", e);
- }
+ if (isInputFileTemporary)
+ await deleteTempFileIgnoringErrors(inputFilePath);
+ await deleteTempFileIgnoringErrors(outputFilePath);
}
};
@@ -138,14 +137,17 @@ const generateImageThumbnailCommand = (
];
case "linux":
+ // The bundled binary is an ELF x86-64 executable.
+ if (process.arch != "x64")
+ throw new Error(CustomErrorMessage.NotAvailable);
return [
imageMagickPath(),
- inputFilePath,
- "-auto-orient",
"-define",
`jpeg:size=${2 * maxDimension}x${2 * maxDimension}`,
+ inputFilePath,
+ "-auto-orient",
"-thumbnail",
- `${maxDimension}x${maxDimension}>`,
+ `${maxDimension}x${maxDimension}`,
"-unsharp",
"0x.5",
"-quality",
diff --git a/desktop/src/main/services/store.ts b/desktop/src/main/services/store.ts
index 20cc91ea4dad7beec90071e145bb636ba8f8c22c..471928d76ce70a5f85e77f22d516b6d9c9a27037 100644
--- a/desktop/src/main/services/store.ts
+++ b/desktop/src/main/services/store.ts
@@ -14,6 +14,15 @@ export const clearStores = () => {
watchStore.clear();
};
+/**
+ * [Note: Safe storage keys]
+ *
+ * On macOS, `safeStorage` stores our data under a Keychain entry named
+ * " Safe Storage". Which resolves to:
+ *
+ * - Electron Safe Storage (dev)
+ * - ente Safe Storage (prod)
+ */
export const saveEncryptionKey = (encryptionKey: string) => {
const encryptedKey = safeStorage.encryptString(encryptionKey);
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
diff --git a/desktop/src/main/stream.ts b/desktop/src/main/stream.ts
index bae13aa121598e95822eb0b401e2443d69fcfd59..1c8223c87c5ede23926eba73ce8a98d19099cb5d 100644
--- a/desktop/src/main/stream.ts
+++ b/desktop/src/main/stream.ts
@@ -3,13 +3,20 @@
*/
import { net, protocol } from "electron/main";
import StreamZip from "node-stream-zip";
+import { randomUUID } from "node:crypto";
import { createWriteStream, existsSync } from "node:fs";
import fs from "node:fs/promises";
import { Readable } from "node:stream";
import { ReadableStream } from "node:stream/web";
import { pathToFileURL } from "node:url";
import log from "./log";
+import { ffmpegConvertToMP4 } from "./services/ffmpeg";
import { ensure } from "./utils/common";
+import {
+ deleteTempFile,
+ deleteTempFileIgnoringErrors,
+ makeTempFilePath,
+} from "./utils/temp";
/**
* Register a protocol handler that we use for streaming large files between the
@@ -34,119 +41,117 @@ import { ensure } from "./utils/common";
* Depends on {@link registerPrivilegedSchemes}.
*/
export const registerStreamProtocol = () => {
- protocol.handle("stream", async (request: Request) => {
- const url = request.url;
- // The request URL contains the command to run as the host, and the
- // pathname of the file(s) as the search params.
- const { host, searchParams } = new URL(url);
- switch (host) {
- case "read":
- return handleRead(ensure(searchParams.get("path")));
- case "read-zip":
- return handleReadZip(
- ensure(searchParams.get("zipPath")),
- ensure(searchParams.get("entryName")),
- );
- case "write":
- return handleWrite(ensure(searchParams.get("path")), request);
- default:
- return new Response("", { status: 404 });
+ protocol.handle("stream", (request: Request) => {
+ try {
+ return handleStreamRequest(request);
+ } catch (e) {
+ log.error(`Failed to handle stream request for ${request.url}`, e);
+ return new Response(String(e), { status: 500 });
}
});
};
-const handleRead = async (path: string) => {
- try {
- const res = await net.fetch(pathToFileURL(path).toString());
- if (res.ok) {
- // net.fetch already seems to add "Content-Type" and "Last-Modified"
- // headers, but I couldn't find documentation for this. In any case,
- // since we already are stat-ting the file for the "Content-Length",
- // we explicitly add the "X-Last-Modified-Ms" too,
- //
- // 1. Guaranteeing its presence,
- //
- // 2. Having it be in the exact format we want (no string <-> date
- // conversions),
- //
- // 3. Retaining milliseconds.
-
- const stat = await fs.stat(path);
-
- // Add the file's size as the Content-Length header.
- const fileSize = stat.size;
- res.headers.set("Content-Length", `${fileSize}`);
-
- // Add the file's last modified time (as epoch milliseconds).
- const mtimeMs = stat.mtimeMs;
- res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
+const handleStreamRequest = async (request: Request): Promise => {
+ const url = request.url;
+ // The request URL contains the command to run as the host, and the
+ // pathname of the file(s) as the search params.
+ const { host, searchParams } = new URL(url);
+ switch (host) {
+ case "read":
+ return handleRead(ensure(searchParams.get("path")));
+
+ case "read-zip":
+ return handleReadZip(
+ ensure(searchParams.get("zipPath")),
+ ensure(searchParams.get("entryName")),
+ );
+
+ case "write":
+ return handleWrite(ensure(searchParams.get("path")), request);
+
+ case "convert-to-mp4": {
+ const token = searchParams.get("token");
+ const done = searchParams.get("done") !== null;
+ return token
+ ? done
+ ? handleConvertToMP4ReadDone(token)
+ : handleConvertToMP4Read(token)
+ : handleConvertToMP4Write(request);
}
- return res;
- } catch (e) {
- log.error(`Failed to read stream at ${path}`, e);
- return new Response(`Failed to read stream: ${String(e)}`, {
- status: 500,
- });
+
+ default:
+ return new Response("", { status: 404 });
}
};
-const handleReadZip = async (zipPath: string, entryName: string) => {
- try {
- const zip = new StreamZip.async({ file: zipPath });
- const entry = await zip.entry(entryName);
- if (!entry) return new Response("", { status: 404 });
-
- // This returns an "old style" NodeJS.ReadableStream.
- const stream = await zip.stream(entry);
- // Convert it into a new style NodeJS.Readable.
- const nodeReadable = new Readable().wrap(stream);
- // Then convert it into a Web stream.
- const webReadableStreamAny = Readable.toWeb(nodeReadable);
- // However, we get a ReadableStream now. This doesn't go into the
- // `BodyInit` expected by the Response constructor, which wants a
- // ReadableStream. Force a cast.
- const webReadableStream =
- webReadableStreamAny as ReadableStream;
-
- // Close the zip handle when the underlying stream closes.
- stream.on("end", () => void zip.close());
-
- return new Response(webReadableStream, {
- headers: {
- // We don't know the exact type, but it doesn't really matter,
- // just set it to a generic binary content-type so that the
- // browser doesn't tinker with it thinking of it as text.
- "Content-Type": "application/octet-stream",
- "Content-Length": `${entry.size}`,
- // While it is documented that entry.time is the modification
- // time, the units are not mentioned. By seeing the source code,
- // we can verify that it is indeed epoch milliseconds. See
- // `parseZipTime` in the node-stream-zip source,
- // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js
- "X-Last-Modified-Ms": `${entry.time}`,
- },
- });
- } catch (e) {
- log.error(
- `Failed to read entry ${entryName} from zip file at ${zipPath}`,
- e,
- );
- return new Response(`Failed to read stream: ${String(e)}`, {
- status: 500,
- });
+const handleRead = async (path: string) => {
+ const res = await net.fetch(pathToFileURL(path).toString());
+ if (res.ok) {
+ // net.fetch already seems to add "Content-Type" and "Last-Modified"
+ // headers, but I couldn't find documentation for this. In any case,
+ // since we already are stat-ting the file for the "Content-Length", we
+ // explicitly add the "X-Last-Modified-Ms" too,
+ //
+ // 1. Guaranteeing its presence,
+ //
+ // 2. Having it be in the exact format we want (no string <-> date
+ // conversions),
+ //
+ // 3. Retaining milliseconds.
+
+ const stat = await fs.stat(path);
+
+ // Add the file's size as the Content-Length header.
+ const fileSize = stat.size;
+ res.headers.set("Content-Length", `${fileSize}`);
+
+ // Add the file's last modified time (as epoch milliseconds).
+ const mtimeMs = stat.mtimeMs;
+ res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
}
+ return res;
+};
+
+const handleReadZip = async (zipPath: string, entryName: string) => {
+ const zip = new StreamZip.async({ file: zipPath });
+ const entry = await zip.entry(entryName);
+ if (!entry) return new Response("", { status: 404 });
+
+ // This returns an "old style" NodeJS.ReadableStream.
+ const stream = await zip.stream(entry);
+ // Convert it into a new style NodeJS.Readable.
+ const nodeReadable = new Readable().wrap(stream);
+ // Then convert it into a Web stream.
+ const webReadableStreamAny = Readable.toWeb(nodeReadable);
+ // However, we get a ReadableStream now. This doesn't go into the
+ // `BodyInit` expected by the Response constructor, which wants a
+ // ReadableStream. Force a cast.
+ const webReadableStream =
+ webReadableStreamAny as ReadableStream;
+
+ // Close the zip handle when the underlying stream closes.
+ stream.on("end", () => void zip.close());
+
+ return new Response(webReadableStream, {
+ headers: {
+ // We don't know the exact type, but it doesn't really matter, just
+ // set it to a generic binary content-type so that the browser
+ // doesn't tinker with it thinking of it as text.
+ "Content-Type": "application/octet-stream",
+ "Content-Length": `${entry.size}`,
+ // While it is documented that entry.time is the modification time,
+ // the units are not mentioned. By seeing the source code, we can
+ // verify that it is indeed epoch milliseconds. See `parseZipTime`
+ // in the node-stream-zip source,
+ // https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js
+ "X-Last-Modified-Ms": `${entry.time}`,
+ },
+ });
};
const handleWrite = async (path: string, request: Request) => {
- try {
- await writeStream(path, ensure(request.body));
- return new Response("", { status: 200 });
- } catch (e) {
- log.error(`Failed to write stream to ${path}`, e);
- return new Response(`Failed to write stream: ${String(e)}`, {
- status: 500,
- });
- }
+ await writeStream(path, ensure(request.body));
+ return new Response("", { status: 200 });
};
/**
@@ -154,7 +159,7 @@ const handleWrite = async (path: string, request: Request) => {
*
* The returned promise resolves when the write completes.
*
- * @param filePath The local filesystem path where the file should be written.
+ * @param filePath The local file system path where the file should be written.
*
* @param readableStream A web
* [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
@@ -181,3 +186,84 @@ const writeNodeStream = async (filePath: string, fileStream: Readable) => {
});
});
};
+
+/**
+ * A map from token to file paths for convert-to-mp4 requests that we have
+ * received.
+ */
+const convertToMP4Results = new Map();
+
+/**
+ * Clear any in-memory state for in-flight convert-to-mp4 requests. Meant to be
+ * called during logout.
+ */
+export const clearConvertToMP4Results = () => convertToMP4Results.clear();
+
+/**
+ * [Note: Convert to MP4]
+ *
+ * When we want to convert a video to MP4, if we were to send the entire
+ * contents of the video from the renderer to the main process over IPC, it just
+ * causes the renderer to run out of memory and restart when the videos are very
+ * large. So we need to stream the original video renderer → main and then
+ * stream back the converted video renderer ← main.
+ *
+ * Currently Chromium does not support bi-directional streaming ("full" duplex
+ * mode for the Web fetch API). So we need to simulate that using two different
+ * streaming requests.
+ *
+ * renderer → main stream://convert-to-mp4
+ * → request.body is the original video
+ * ← response is a token
+ *
+ * renderer → main stream://convert-to-mp4?token=
+ * ← response.body is the converted video
+ *
+ * renderer → main stream://convert-to-mp4?token=&done
+ * ← 200 OK
+ *
+ * Note that the conversion itself is not streaming. The conversion still
+ * happens in a single shot, we are just streaming the data across the IPC
+ * boundary to allow us to pass large amounts of data without running out of
+ * memory.
+ *
+ * See also: [Note: IPC streams]
+ */
+const handleConvertToMP4Write = async (request: Request) => {
+ const inputTempFilePath = await makeTempFilePath();
+ await writeStream(inputTempFilePath, ensure(request.body));
+
+ const outputTempFilePath = await makeTempFilePath("mp4");
+ try {
+ await ffmpegConvertToMP4(inputTempFilePath, outputTempFilePath);
+ } catch (e) {
+ log.error("Conversion to MP4 failed", e);
+ await deleteTempFileIgnoringErrors(outputTempFilePath);
+ throw e;
+ } finally {
+ await deleteTempFileIgnoringErrors(inputTempFilePath);
+ }
+
+ const token = randomUUID();
+ convertToMP4Results.set(token, outputTempFilePath);
+ return new Response(token, { status: 200 });
+};
+
+const handleConvertToMP4Read = async (token: string) => {
+ const filePath = convertToMP4Results.get(token);
+ if (!filePath)
+ return new Response(`Unknown token ${token}`, { status: 404 });
+
+ return net.fetch(pathToFileURL(filePath).toString());
+};
+
+const handleConvertToMP4ReadDone = async (token: string) => {
+ const filePath = convertToMP4Results.get(token);
+ if (!filePath)
+ return new Response(`Unknown token ${token}`, { status: 404 });
+
+ await deleteTempFile(filePath);
+
+ convertToMP4Results.delete(token);
+ return new Response("", { status: 200 });
+};
diff --git a/desktop/src/main/utils/common.ts b/desktop/src/main/utils/common.ts
index 1f5016e617fdd31bb876ee313937b22ae6314c23..5ed46aa8a1b836cbf58197b02b70f78b20d5b254 100644
--- a/desktop/src/main/utils/common.ts
+++ b/desktop/src/main/utils/common.ts
@@ -13,32 +13,3 @@ export const ensure = (v: T | null | undefined): T => {
if (v === undefined) throw new Error("Required value was not found");
return v;
};
-
-/**
- * Wait for {@link ms} milliseconds
- *
- * This function is a promisified `setTimeout`. It returns a promise that
- * resolves after {@link ms} milliseconds.
- */
-export const wait = (ms: number) =>
- new Promise((resolve) => setTimeout(resolve, ms));
-
-/**
- * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
- * does not resolve within {@link timeoutMS}, then reject with a timeout error.
- */
-export const withTimeout = async (promise: Promise, ms: number) => {
- let timeoutId: ReturnType;
- const rejectOnTimeout = new Promise((_, reject) => {
- timeoutId = setTimeout(
- () => reject(new Error("Operation timed out")),
- ms,
- );
- });
- const promiseAndCancelTimeout = async () => {
- const result = await promise;
- clearTimeout(timeoutId);
- return result;
- };
- return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
-};
diff --git a/desktop/src/main/utils/electron.ts b/desktop/src/main/utils/electron.ts
index 93e8565ef2945a6c98daf10c46979cada7cc80da..133edf87c67860673ea0a30a4f2fca2449fddc96 100644
--- a/desktop/src/main/utils/electron.ts
+++ b/desktop/src/main/utils/electron.ts
@@ -49,12 +49,12 @@ export const posixPath = (platformPath: string) =>
* > output, this might not be the best option and it might be better to use the
* > underlying functions.
*/
-export const execAsync = (command: string | string[]) => {
+export const execAsync = async (command: string | string[]) => {
const escapedCommand = Array.isArray(command)
? shellescape(command)
: command;
const startTime = Date.now();
- const result = execAsync_(escapedCommand);
+ const result = await execAsync_(escapedCommand);
log.debug(
() => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`,
);
diff --git a/desktop/src/main/utils/temp.ts b/desktop/src/main/utils/temp.ts
index 11f7a5d84531c659b0137f3fa8ba6f6f3681785d..70dec844d6c14cd2806105a2f17ba8f624d3a5b8 100644
--- a/desktop/src/main/utils/temp.ts
+++ b/desktop/src/main/utils/temp.ts
@@ -4,6 +4,7 @@ import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import type { ZipItem } from "../../types/ipc";
+import log from "../log";
import { ensure } from "./common";
/**
@@ -62,6 +63,19 @@ export const deleteTempFile = async (tempFilePath: string) => {
await fs.rm(tempFilePath, { force: true });
};
+/**
+ * A variant of {@link deleteTempFile} that supresses any errors, making it
+ * safe to call them in a sequence without needing to handle the scenario where
+ * one of them failing causes the rest to be skipped.
+ */
+export const deleteTempFileIgnoringErrors = async (tempFilePath: string) => {
+ try {
+ await deleteTempFile(tempFilePath);
+ } catch (e) {
+ log.error(`Could not delete temporary file at path ${tempFilePath}`, e);
+ }
+};
+
/** The result of {@link makeFileForDataOrPathOrZipItem}. */
interface FileForDataOrPathOrZipItem {
/**
diff --git a/desktop/src/preload.ts b/desktop/src/preload.ts
index 407e541ff799a3d1e6863a0d8c16ea8edcecb750..d52745184eb56fc917b17be53699cbb10ee34f78 100644
--- a/desktop/src/preload.ts
+++ b/desktop/src/preload.ts
@@ -65,6 +65,9 @@ const selectDirectory = () => ipcRenderer.invoke("selectDirectory");
const clearStores = () => ipcRenderer.send("clearStores");
+const clearConvertToMP4Results = () =>
+ ipcRenderer.send("clearConvertToMP4Results");
+
const encryptionKey = () => ipcRenderer.invoke("encryptionKey");
const saveEncryptionKey = (encryptionKey: string) =>
@@ -140,14 +143,12 @@ const ffmpegExec = (
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
- timeoutMS: number,
) =>
ipcRenderer.invoke(
"ffmpegExec",
command,
dataOrPathOrZipItem,
outputFileExtension,
- timeoutMS,
);
// - ML
@@ -164,6 +165,9 @@ const detectFaces = (input: Float32Array) =>
const faceEmbedding = (input: Float32Array) =>
ipcRenderer.invoke("faceEmbedding", input);
+const legacyFaceCrop = (faceID: string) =>
+ ipcRenderer.invoke("legacyFaceCrop", faceID);
+
// - Watch
const watchGet = () => ipcRenderer.invoke("watchGet");
@@ -305,6 +309,7 @@ contextBridge.exposeInMainWorld("electron", {
openLogDirectory,
selectDirectory,
clearStores,
+ clearConvertToMP4Results,
encryptionKey,
saveEncryptionKey,
onMainWindowFocus,
@@ -341,6 +346,7 @@ contextBridge.exposeInMainWorld("electron", {
clipTextEmbeddingIfAvailable,
detectFaces,
faceEmbedding,
+ legacyFaceCrop,
// - Watch
diff --git a/desktop/yarn.lock b/desktop/yarn.lock
index 833b623a7ebadffc6e3239b855a9ec2f6288a31c..21e56d0ae5322c4ebbadb27e2779a53de748a747 100644
--- a/desktop/yarn.lock
+++ b/desktop/yarn.lock
@@ -7,29 +7,6 @@
resolved "https://registry.yarnpkg.com/7zip-bin/-/7zip-bin-5.2.0.tgz#7a03314684dd6572b7dfa89e68ce31d60286854d"
integrity sha512-ukTPVhqG4jNzMro2qA9HSCSSVJN3aN7tlb+hfqYCt3ER0yWroeA2VR38MNrOHLQ/cVj+DaIMad0kFCtWWowh/A==
-"@babel/code-frame@^7.0.0":
- version "7.24.2"
- resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.2.tgz#718b4b19841809a58b29b68cde80bc5e1aa6d9ae"
- integrity sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==
- dependencies:
- "@babel/highlight" "^7.24.2"
- picocolors "^1.0.0"
-
-"@babel/helper-validator-identifier@^7.24.5":
- version "7.24.5"
- resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.5.tgz#918b1a7fa23056603506370089bd990d8720db62"
- integrity sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA==
-
-"@babel/highlight@^7.24.2":
- version "7.24.5"
- resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.5.tgz#bc0613f98e1dd0720e99b2a9ee3760194a704b6e"
- integrity sha512-8lLmua6AVh/8SLJRRVD6V8p73Hir9w5mJrhE+IPpILG31KKlI9iz5zmBYKcWPS59qSfgP9RaSBQSHHE81WKuEw==
- dependencies:
- "@babel/helper-validator-identifier" "^7.24.5"
- chalk "^2.4.2"
- js-tokens "^4.0.0"
- picocolors "^1.0.0"
-
"@babel/runtime@^7.21.0":
version "7.24.5"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c"
@@ -339,9 +316,9 @@
integrity sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==
"@types/node@*", "@types/node@^20.9.0":
- version "20.12.7"
- resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.7.tgz#04080362fa3dd6c5822061aa3124f5c152cff384"
- integrity sha512-wq0cICSkRLVaf3UGLMGItu/PtdY7oaXaI/RVU+xliKVOtRna3PRY57ZDfztpDL0n11vfymMUnXv8QwYCO7L1wg==
+ version "20.12.11"
+ resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.11.tgz#c4ef00d3507000d17690643278a60dc55a9dc9be"
+ integrity sha512-vDg9PZ/zi+Nqp6boSOT7plNuthRugEKixDv5sFTIpkE89MmNtEArAShI4mxuX2+UrLEe9pxC1vm2cjm9YlWbJw==
dependencies:
undici-types "~5.26.4"
@@ -350,11 +327,6 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.17.60.tgz#35f3d6213daed95da7f0f73e75bcc6980e90597b"
integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw==
-"@types/normalize-package-data@^2.4.0":
- version "2.4.4"
- resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz#56e2cc26c397c038fab0e3a917a12d5c5909e901"
- integrity sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==
-
"@types/plist@^3.0.1":
version "3.0.5"
resolved "https://registry.yarnpkg.com/@types/plist/-/plist-3.0.5.tgz#9a0c49c0f9886c8c8696a7904dd703f6284036e0"
@@ -557,13 +529,6 @@ ansi-regex@^5.0.1:
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
-ansi-styles@^3.2.1:
- version "3.2.1"
- resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
- integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
- dependencies:
- color-convert "^1.9.0"
-
ansi-styles@^4.0.0, ansi-styles@^4.1.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
@@ -641,13 +606,6 @@ are-we-there-yet@^3.0.0:
delegates "^1.0.0"
readable-stream "^3.6.0"
-argparse@^1.0.7:
- version "1.0.10"
- resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
- integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
- dependencies:
- sprintf-js "~1.0.2"
-
argparse@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
@@ -875,15 +833,6 @@ caseless@^0.12.0:
resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc"
integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==
-chalk@^2.4.2:
- version "2.4.2"
- resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
- integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
- dependencies:
- ansi-styles "^3.2.1"
- escape-string-regexp "^1.0.5"
- supports-color "^5.3.0"
-
chalk@^4.0.0, chalk@^4.0.2, chalk@^4.1.0, chalk@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
@@ -973,13 +922,6 @@ clone@^1.0.2:
resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e"
integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==
-color-convert@^1.9.0:
- version "1.9.3"
- resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
- integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
- dependencies:
- color-name "1.1.3"
-
color-convert@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
@@ -987,11 +929,6 @@ color-convert@^2.0.1:
dependencies:
color-name "~1.1.4"
-color-name@1.1.3:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
- integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==
-
color-name@~1.1.4:
version "1.1.4"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
@@ -1259,11 +1196,6 @@ dotenv-expand@^5.1.0:
resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0"
integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==
-dotenv@^8.2.0:
- version "8.6.0"
- resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b"
- integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g==
-
dotenv@^9.0.2:
version "9.0.2"
resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-9.0.2.tgz#dacc20160935a37dea6364aa1bef819fb9b6ab05"
@@ -1276,16 +1208,6 @@ ejs@^3.1.8:
dependencies:
jake "^10.8.5"
-electron-builder-notarize@^1.5:
- version "1.5.2"
- resolved "https://registry.yarnpkg.com/electron-builder-notarize/-/electron-builder-notarize-1.5.2.tgz#540185b57a336fc6eec01bfe092a3b4764459255"
- integrity sha512-vo6RGgIFYxMk2yp59N4NsvmAYfB7ncYi6gV9Fcq2TVKxEn2tPXrSjIKB2e/pu+5iXIY6BHNZNXa75F3DHgOOLA==
- dependencies:
- dotenv "^8.2.0"
- electron-notarize "^1.1.1"
- js-yaml "^3.14.0"
- read-pkg-up "^7.0.0"
-
electron-builder@25.0.0-alpha.6:
version "25.0.0-alpha.6"
resolved "https://registry.yarnpkg.com/electron-builder/-/electron-builder-25.0.0-alpha.6.tgz#a72f96f7029539ac28f92ce5c83f872ba3b6e7c1"
@@ -1308,14 +1230,6 @@ electron-log@^5.1:
resolved "https://registry.yarnpkg.com/electron-log/-/electron-log-5.1.2.tgz#fb40ad7f4ae694dd0e4c02c662d1a65c03e1243e"
integrity sha512-Cpg4hAZ27yM9wzE77c4TvgzxzavZ+dVltCczParXN+Vb3jocojCSAuSMCVOI9fhFuuOR+iuu3tZLX1cu0y0kgQ==
-electron-notarize@^1.1.1:
- version "1.2.2"
- resolved "https://registry.yarnpkg.com/electron-notarize/-/electron-notarize-1.2.2.tgz#ebf2b258e8e08c1c9f8ff61dc53d5b16b439daf4"
- integrity sha512-ZStVWYcWI7g87/PgjPJSIIhwQXOaw4/XeXU+pWqMMktSLHaGMLHdyPPN7Cmao7+Cr7fYufA16npdtMndYciHNw==
- dependencies:
- debug "^4.1.1"
- fs-extra "^9.0.1"
-
electron-publish@25.0.0-alpha.6:
version "25.0.0-alpha.6"
resolved "https://registry.yarnpkg.com/electron-publish/-/electron-publish-25.0.0-alpha.6.tgz#8af3cb6e2435c00b8c71de43c330483808df5924"
@@ -1352,9 +1266,9 @@ electron-updater@^6.1:
tiny-typed-emitter "^2.1.0"
electron@^30:
- version "30.0.2"
- resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733"
- integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ==
+ version "30.0.3"
+ resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.3.tgz#7c25ddb12ba89fd117991d010f1b274b1bafcb73"
+ integrity sha512-h+suwx6e0fnv/9wi0/cmCMtG+4LrPzJZa+3DEEpxcPcP+pcWnBI70t8QspxgMNIh2wzXLMD9XVqrLkEbiBAInw==
dependencies:
"@electron/get" "^2.0.0"
"@types/node" "^20.9.0"
@@ -1389,13 +1303,6 @@ err-code@^2.0.2:
resolved "https://registry.yarnpkg.com/err-code/-/err-code-2.0.3.tgz#23c2f3b756ffdfc608d30e27c9a941024807e7f9"
integrity sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==
-error-ex@^1.3.1:
- version "1.3.2"
- resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf"
- integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==
- dependencies:
- is-arrayish "^0.2.1"
-
es-define-property@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845"
@@ -1418,11 +1325,6 @@ escalade@^3.1.1:
resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27"
integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==
-escape-string-regexp@^1.0.5:
- version "1.0.5"
- resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
- integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==
-
escape-string-regexp@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
@@ -1494,11 +1396,6 @@ espree@^9.6.0, espree@^9.6.1:
acorn-jsx "^5.3.2"
eslint-visitor-keys "^3.4.1"
-esprima@^4.0.0:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
- integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
-
esquery@^1.4.2:
version "1.5.0"
resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b"
@@ -1622,14 +1519,6 @@ find-up@^3.0.0:
dependencies:
locate-path "^3.0.0"
-find-up@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
- integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
- dependencies:
- locate-path "^5.0.0"
- path-exists "^4.0.0"
-
find-up@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc"
@@ -1907,11 +1796,6 @@ graphemer@^1.4.0:
resolved "https://registry.yarnpkg.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6"
integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==
-has-flag@^3.0.0:
- version "3.0.0"
- resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
- integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==
-
has-flag@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
@@ -1946,11 +1830,6 @@ hasown@^2.0.0:
dependencies:
function-bind "^1.1.2"
-hosted-git-info@^2.1.4:
- version "2.8.9"
- resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9"
- integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==
-
hosted-git-info@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-4.1.0.tgz#827b82867e9ff1c8d0c4d9d53880397d2c86d224"
@@ -2081,11 +1960,6 @@ ip-address@^9.0.5:
jsbn "1.1.0"
sprintf-js "^1.1.3"
-is-arrayish@^0.2.1:
- version "0.2.1"
- resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
- integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==
-
is-binary-path@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
@@ -2198,19 +2072,6 @@ jpeg-js@^0.4:
resolved "https://registry.yarnpkg.com/jpeg-js/-/jpeg-js-0.4.4.tgz#a9f1c6f1f9f0fa80cdb3484ed9635054d28936aa"
integrity sha512-WZzeDOEtTOBK4Mdsar0IqEU5sMr3vSV2RqkAIzUEV2BHnUfKGyswWFPFwK5EeDo93K3FohSHbLAjj0s1Wzd+dg==
-js-tokens@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
- integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
-
-js-yaml@^3.14.0:
- version "3.14.1"
- resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
- integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
- dependencies:
- argparse "^1.0.7"
- esprima "^4.0.0"
-
js-yaml@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
@@ -2228,11 +2089,6 @@ json-buffer@3.0.1:
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13"
integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==
-json-parse-even-better-errors@^2.3.0:
- version "2.3.1"
- resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d"
- integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==
-
json-schema-traverse@^0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
@@ -2299,11 +2155,6 @@ levn@^0.4.1:
prelude-ls "^1.2.1"
type-check "~0.4.0"
-lines-and-columns@^1.1.6:
- version "1.2.4"
- resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632"
- integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==
-
locate-path@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
@@ -2312,13 +2163,6 @@ locate-path@^3.0.0:
p-locate "^3.0.0"
path-exists "^3.0.0"
-locate-path@^5.0.0:
- version "5.0.0"
- resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0"
- integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==
- dependencies:
- p-locate "^4.1.0"
-
locate-path@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
@@ -2643,16 +2487,6 @@ nopt@^6.0.0:
dependencies:
abbrev "^1.0.0"
-normalize-package-data@^2.5.0:
- version "2.5.0"
- resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
- integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==
- dependencies:
- hosted-git-info "^2.1.4"
- resolve "^1.10.0"
- semver "2 || 3 || 4 || 5"
- validate-npm-package-license "^3.0.1"
-
normalize-path@^3.0.0, normalize-path@~3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
@@ -2737,7 +2571,7 @@ p-cancelable@^2.0.0:
resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf"
integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==
-p-limit@^2.0.0, p-limit@^2.2.0:
+p-limit@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==
@@ -2758,13 +2592,6 @@ p-locate@^3.0.0:
dependencies:
p-limit "^2.0.0"
-p-locate@^4.1.0:
- version "4.1.0"
- resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07"
- integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==
- dependencies:
- p-limit "^2.2.0"
-
p-locate@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834"
@@ -2796,16 +2623,6 @@ parse-cache-control@^1.0.1:
resolved "https://registry.yarnpkg.com/parse-cache-control/-/parse-cache-control-1.0.1.tgz#8eeab3e54fa56920fe16ba38f77fa21aacc2d74e"
integrity sha512-60zvsJReQPX5/QP0Kzfd/VrpjScIQ7SHBW6bFCYfEP+fp0Eppr1SHhIO5nd1PjZtvclzSzES9D/p5nFJurwfWg==
-parse-json@^5.0.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
- integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==
- dependencies:
- "@babel/code-frame" "^7.0.0"
- error-ex "^1.3.1"
- json-parse-even-better-errors "^2.3.0"
- lines-and-columns "^1.1.6"
-
path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
@@ -2849,11 +2666,6 @@ pend@~1.2.0:
resolved "https://registry.yarnpkg.com/pend/-/pend-1.2.0.tgz#7a57eb550a6783f9115331fcf4663d5c8e007a50"
integrity sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==
-picocolors@^1.0.0:
- version "1.0.0"
- resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
- integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
-
picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
@@ -2958,25 +2770,6 @@ read-config-file@6.3.2:
json5 "^2.2.0"
lazy-val "^1.0.4"
-read-pkg-up@^7.0.0:
- version "7.0.1"
- resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507"
- integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==
- dependencies:
- find-up "^4.1.0"
- read-pkg "^5.2.0"
- type-fest "^0.8.1"
-
-read-pkg@^5.2.0:
- version "5.2.0"
- resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc"
- integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==
- dependencies:
- "@types/normalize-package-data" "^2.4.0"
- normalize-package-data "^2.5.0"
- parse-json "^5.0.0"
- type-fest "^0.6.0"
-
readable-stream@^3.0.2, readable-stream@^3.4.0, readable-stream@^3.6.0:
version "3.6.2"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967"
@@ -3025,7 +2818,7 @@ resolve-from@^4.0.0:
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
-resolve@^1.1.6, resolve@^1.10.0:
+resolve@^1.1.6:
version "1.22.8"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d"
integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==
@@ -3126,17 +2919,17 @@ semver-compare@^1.0.0:
resolved "https://registry.yarnpkg.com/semver-compare/-/semver-compare-1.0.0.tgz#0dee216a1c941ab37e9efb1788f6afc5ff5537fc"
integrity sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==
-"semver@2 || 3 || 4 || 5":
- version "5.7.2"
- resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
- integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
-
semver@^6.2.0:
version "6.3.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
-semver@^7.3.2, semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0:
+semver@^7.3.2:
+ version "7.6.2"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13"
+ integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==
+
+semver@^7.3.5, semver@^7.3.8, semver@^7.5.3, semver@^7.6.0:
version "7.6.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d"
integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==
@@ -3284,42 +3077,11 @@ spawn-command@0.0.2:
resolved "https://registry.yarnpkg.com/spawn-command/-/spawn-command-0.0.2.tgz#9544e1a43ca045f8531aac1a48cb29bdae62338e"
integrity sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==
-spdx-correct@^3.0.0:
- version "3.2.0"
- resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.2.0.tgz#4f5ab0668f0059e34f9c00dce331784a12de4e9c"
- integrity sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==
- dependencies:
- spdx-expression-parse "^3.0.0"
- spdx-license-ids "^3.0.0"
-
-spdx-exceptions@^2.1.0:
- version "2.5.0"
- resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz#5d607d27fc806f66d7b64a766650fa890f04ed66"
- integrity sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==
-
-spdx-expression-parse@^3.0.0:
- version "3.0.1"
- resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679"
- integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==
- dependencies:
- spdx-exceptions "^2.1.0"
- spdx-license-ids "^3.0.0"
-
-spdx-license-ids@^3.0.0:
- version "3.0.17"
- resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz#887da8aa73218e51a1d917502d79863161a93f9c"
- integrity sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg==
-
sprintf-js@^1.1.2, sprintf-js@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a"
integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==
-sprintf-js@~1.0.2:
- version "1.0.3"
- resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
- integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
-
ssri@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/ssri/-/ssri-9.0.1.tgz#544d4c357a8d7b71a19700074b6883fcb4eae057"
@@ -3367,13 +3129,6 @@ sumchecker@^3.0.1:
dependencies:
debug "^4.1.0"
-supports-color@^5.3.0:
- version "5.5.0"
- resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
- integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
- dependencies:
- has-flag "^3.0.0"
-
supports-color@^7.1.0:
version "7.2.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
@@ -3501,16 +3256,6 @@ type-fest@^0.20.2:
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
-type-fest@^0.6.0:
- version "0.6.0"
- resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b"
- integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==
-
-type-fest@^0.8.1:
- version "0.8.1"
- resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
- integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
-
type-fest@^2.17.0:
version "2.19.0"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b"
@@ -3577,14 +3322,6 @@ util-deprecate@^1.0.1:
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==
-validate-npm-package-license@^3.0.1:
- version "3.0.4"
- resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a"
- integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==
- dependencies:
- spdx-correct "^3.0.0"
- spdx-expression-parse "^3.0.0"
-
verror@^1.10.0:
version "1.10.1"
resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.1.tgz#4bf09eeccf4563b109ed4b3d458380c972b0cdeb"
diff --git a/docs/docs/.vitepress/sidebar.ts b/docs/docs/.vitepress/sidebar.ts
index 6af9e3556a1822fa56f9822d1a6920acecbff93f..84ae5e0fa807c0fcf5f6be9f67c9ad466afd2be2 100644
--- a/docs/docs/.vitepress/sidebar.ts
+++ b/docs/docs/.vitepress/sidebar.ts
@@ -123,6 +123,10 @@ export const sidebar = [
text: "Troubleshooting",
collapsed: true,
items: [
+ {
+ text: "Desktop install",
+ link: "/photos/troubleshooting/desktop-install/",
+ },
{
text: "Files not uploading",
link: "/photos/troubleshooting/files-not-uploading",
@@ -197,6 +201,10 @@ export const sidebar = [
text: "System requirements",
link: "/self-hosting/guides/system-requirements",
},
+ {
+ text: "Configuring S3",
+ link: "/self-hosting/guides/configuring-s3",
+ },
{
text: "Using external S3",
link: "/self-hosting/guides/external-s3",
diff --git a/docs/docs/photos/features/cast/index.md b/docs/docs/photos/features/cast/index.md
index 89dc801f6df6a384c2f13af6dd390afcfb26608a..ecd91cb7ce6655a5c9c876b7733861467ff5b0b7 100644
--- a/docs/docs/photos/features/cast/index.md
+++ b/docs/docs/photos/features/cast/index.md
@@ -1,19 +1,13 @@
---
-title: Archive
-description: |
- Archiving photos and albums in Ente Photos to remove them from your home
- timeline
+title: Cast
+description:
+ Casting your photos on to a large screen or a TV or a Chromecast device
---
-> [!CAUTION]
->
-> This is preview documentation for an upcoming feature. This feature has not
-> yet been released yet, so the steps below will not work currently.
-
# Cast
With Ente Cast, you can play a slideshow of your favourite albums on your Google
-Chromecast TVs or other Internet-connected large screen devices.
+Chromecast TVs or any other internet-connected large screen devices.
## Get Started
diff --git a/docs/docs/photos/troubleshooting/desktop-install/index.md b/docs/docs/photos/troubleshooting/desktop-install/index.md
new file mode 100644
index 0000000000000000000000000000000000000000..7410c7818e280b9d07c77c5a6da3d40065335911
--- /dev/null
+++ b/docs/docs/photos/troubleshooting/desktop-install/index.md
@@ -0,0 +1,75 @@
+---
+title: Desktop installation
+description: Troubleshooting issues when installing the Ente Photos desktop app
+---
+
+# Desktop app installation
+
+The latest version of the Ente Photos desktop app can be downloaded from
+[ente.io/download](https://ente.io/download). If you're having trouble, please
+see if any of the following cases apply.
+
+## Windows
+
+If the app stops with an "A JavaScript error occurred in the main process - The
+specified module could not be found" error on your Windows machine when you
+start it, then you might need to install the VC++ runtime from Microsoft.
+
+This is what the error looks like:
+
+{width=500px}
+
+You can install the Microsoft VC++ redistributable runtime from here:
+https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#latest-microsoft-visual-c-redistributable-version
+
+## AppImages on ARM64 Linux
+
+If you're on an ARM64 machine running Linux, and the AppImages doesn't do
+anything when you run it, you will need to run the following command on your
+machine:
+
+```sh
+sudo ln -s /usr/lib/aarch64-linux-gnu/libz.so{.1,}
+```
+
+It is possible that the exact path might be different on your machine. Briefly,
+what we need to do is create `libz.so` as an alias for `libz.so.1`. For more
+details, see the following upstream issues:
+
+- libz.so cannot open shared object file on ARM64 -
+ [AppImage/AppImageKit/issues/1092](https://github.com/AppImage/AppImageKit/issues/1092)
+
+- libz.so: cannot open shared object file with Ubuntu arm64 -
+ [electron-userland/electron-builder/issues/7835](https://github.com/electron-userland/electron-builder/issues/7835)
+
+## AppImage says it requires FUSE
+
+See
+[docs.appimage.org](https://docs.appimage.org/user-guide/troubleshooting/fuse.html#the-appimage-tells-me-it-needs-fuse-to-run).
+
+tl;dr; for example, on Ubuntu,
+
+```sh
+sudo apt install libfuse2
+```
+
+## Linux SUID error
+
+On some Linux distributions, if you run the AppImage from the CLI, it might fail
+with the following error:
+
+> The SUID sandbox helper binary was found, but is not configured correctly.
+
+This happens when you try to run the AppImage from the command line. If you
+instead double click on the AppImage in your Files browser, then it should start
+properly.
+
+If you do want to run it from the command line, you can do so by passing the
+`--no-sandbox` flag when executing the AppImage. e.g.
+
+```sh
+./ente.AppImage --no-sandbox
+```
+
+For more details, see this upstream issue on
+[electron](https://github.com/electron/electron/issues/17972).
diff --git a/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png
new file mode 100644
index 0000000000000000000000000000000000000000..852c037d57cf9b8c522be881b38dceacd1faac9a
Binary files /dev/null and b/docs/docs/photos/troubleshooting/desktop-install/windows-vc.png differ
diff --git a/docs/docs/self-hosting/guides/configuring-s3.md b/docs/docs/self-hosting/guides/configuring-s3.md
new file mode 100644
index 0000000000000000000000000000000000000000..8e823ed2ae32c334201a58681a1bd6e903241114
--- /dev/null
+++ b/docs/docs/self-hosting/guides/configuring-s3.md
@@ -0,0 +1,80 @@
+---
+title: Configuring S3 buckets
+description:
+ Configure S3 endpoints to fix upload errors or use your self hosted ente
+ from outside localhost
+---
+
+# Configuring S3
+
+There are three components involved in uploading:
+
+1. The client (e.g. the web app or the mobile app)
+2. Ente's server (museum)
+3. The S3-compatible object storage (e.g. minio in the default starter)
+
+For the uploads to work, all three of them need to be able to reach each other.
+This is because the client uploads directly to the object storage. The
+interaction goes something like this:
+
+1. Client wants to upload, it asks museum where it should upload to.
+2. Museum creates pre-signed URLs for the S3 bucket that was configured.
+3. Client directly uploads to the S3 buckets these URLs.
+
+The upshot of this is that _both_ the client and museum should be able to reach
+your S3 bucket.
+
+The URL for the S3 bucket is configured in
+[scripts/compose/credentials.yaml](https://github.com/ente-io/ente/blob/main/server/scripts/compose/credentials.yaml#L10).
+You can edit this file directly when testing, though it is just simpler and more
+robust to create a `museum.yaml` (in the same folder as the Docker compose file)
+and put your custom configuration there (in your case, you can put an entire
+`s3` config object in your `museum.yaml`).
+
+> [!TIP]
+>
+> For more details about these configuration objects, see the documentaion for
+> the `s3` object in
+> [configurations/local.yaml](https://github.com/ente-io/ente/blob/main/server/configurations/local.yaml).
+
+By default, you only need to configure the endpoint for the first bucket.
+
+> [!NOTE]
+>
+> If you're wondering why there are 3 buckets - that's because our production
+> instance uses these to perform replication.
+>
+> However, in a self hosted setup replication is off by default (you can turn it
+> on if you want). When replication is turned off, only the first bucket is
+> used, and you can remove the other two if you wish or just ignore them.
+
+The `endpoint` for the first bucket in the starter `credentials.yaml` is
+`localhost:3200`. The way this works then is that both museum (`2`) and minio
+(`3`) are running within the same Docker compose cluster, so are able to reach
+each other. If at this point we were to run the web app (`1`) on localhost (say
+using `yarn dev:photos`), it would also run on localhost and thus would be able
+to reach `3`.
+
+If you were to try and connect from a mobile app, this would not work since
+`localhost:3200` would not resolve on your mobile. So you'll need to modify this
+endpoint to a value, say `yourserverip:3200`, so that the mobile app can also
+reach it.
+
+The same principle applies if you're deploying to your custom domain.
+
+> [!NOTE]
+>
+> If you need to configure SSL, for example if you're running over the internet,
+> you'll need to turn off `s3.are_local_buckets` (which disables SSL in the
+> default starter compose template).
+>
+> Disabling `s3.are_local_buckets` also switches to the subdomain style URLs for
+> the buckets. However, not all S3 providers support these, in particular, minio
+> does not work with these in default configuration. So in such cases you'll
+> also need to then enable `s3.use_path_style_urls`.
+
+To summarize:
+
+Set the S3 bucket `endpoint` in `credentials.yaml` to a `yourserverip:3200` or
+some such IP/hostname that accessible from both where you are running the Ente
+clients (e.g. the mobile app) and also from within the Docker compose cluster.
diff --git a/docs/docs/self-hosting/guides/custom-server/index.md b/docs/docs/self-hosting/guides/custom-server/index.md
index a5ce76cc2b05816c744f0075f99288bc2e08bc5d..110e3dbb8842b3b1016a7c393e8ca0da665e787d 100644
--- a/docs/docs/self-hosting/guides/custom-server/index.md
+++ b/docs/docs/self-hosting/guides/custom-server/index.md
@@ -34,4 +34,18 @@ endpoint:
api: "http://localhost:8080"
```
-(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example))
+(Another
+[example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example))
+
+## Web appps and Photos desktop app
+
+You will need to build the app from source and use the
+`NEXT_PUBLIC_ENTE_ENDPOINT` environment variable to tell it which server to
+connect to. For example:
+
+```sh
+NEXT_PUBLIC_ENTE_ENDPOINT=http://localhost:8080 yarn dev:photos
+```
+
+For more details, see
+[hosting the web app](https://help.ente.io/self-hosting/guides/web-app).
diff --git a/docs/docs/self-hosting/guides/index.md b/docs/docs/self-hosting/guides/index.md
index a8a64d96055d4a8454cc63748cb2cad47408473c..b8a73d7eb0c677dc9affa1dbd83ef6495fc0c76d 100644
--- a/docs/docs/self-hosting/guides/index.md
+++ b/docs/docs/self-hosting/guides/index.md
@@ -16,5 +16,8 @@ See the sidebar for existing guides. In particular:
- For various admin related tasks, e.g. increasing the storage quota on your
self hosted instance, see [administering your custom server](admin).
-- For self hosting both the server and web app using external S3 buckets for
- object storage, see [using external S3](external-s3).
+- For configuring your S3 buckets to get the object storage to work from your
+ mobile device or for fixing an upload errors, see
+ [configuring S3](configuring-s3). There is also a longer
+ [community contributed guide](external-s3) for a more self hosted setup of
+ both the server and web app using external S3 buckets for object storage.
diff --git a/docs/docs/self-hosting/guides/web-app.md b/docs/docs/self-hosting/guides/web-app.md
index 49dfdd114b38ad41d92d327029d004de5625e380..28802c457d48ab1b49e78ce5170eb38a2707651e 100644
--- a/docs/docs/self-hosting/guides/web-app.md
+++ b/docs/docs/self-hosting/guides/web-app.md
@@ -1,6 +1,8 @@
---
title: Hosting the web app
-description: Building and hosting Ente's web app, connecting it to your self-hosted server
+description:
+ Building and hosting Ente's web app, connecting it to your self-hosted
+ server
---
# Web app
diff --git a/docs/docs/self-hosting/troubleshooting/uploads.md b/docs/docs/self-hosting/troubleshooting/uploads.md
index 4f7273e94378ccaecee54b9ad590656cd8e6de7c..435a5e93c62be8053b0956ce04becfdd641716a8 100644
--- a/docs/docs/self-hosting/troubleshooting/uploads.md
+++ b/docs/docs/self-hosting/troubleshooting/uploads.md
@@ -5,9 +5,9 @@ description: Fixing upload errors when trying to self host Ente
# Uploads failing
-If uploads to your self-hosted server are failing, make sure that
-`credentials.yaml` has `yourserverip:3200` for all three minio locations.
+If uploads to your minio are failing, you need to ensure that you've configured
+the S3 bucket `endpoint` in `credentials.yaml` (or `museum.yaml`) to, say,
+`yourserverip:3200`. This can be any host or port, it just need to be a value
+that is reachable from both your client and from museum.
-By default it is `localhost:3200`, and it needs to be changed to an IP that is
-accessible from both where you are running the Ente clients (e.g. the mobile
-app) and also from within the Docker compose cluster.
+For more details, see [configuring-s3](/self-hosting/guides/configuring-s3).
diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist
index cdbc23774913e8bded8d8b4467a771a6b3718ee8..9afb874e520be843ebf735c15c789baad88cbd4f 100644
--- a/mobile/ios/Runner/Info.plist
+++ b/mobile/ios/Runner/Info.plist
@@ -108,7 +108,7 @@
NSBonjourServices
_googlecast._tcp
- F5BCEC64._googlecast._tcp
+ _F5BCEC64._googlecast._tcp
NSLocalNetworkUsageDescription
diff --git a/mobile/lib/core/configuration.dart b/mobile/lib/core/configuration.dart
index cde766b1e01aed4d0632e88df983353970e031b3..334da4af94d1202c297b8355b7e4cc2956eed112 100644
--- a/mobile/lib/core/configuration.dart
+++ b/mobile/lib/core/configuration.dart
@@ -72,8 +72,6 @@ class Configuration {
static const anonymousUserIDKey = "anonymous_user_id";
static const endPointKey = "endpoint";
- final kTempFolderDeletionTimeBuffer = const Duration(hours: 6).inMicroseconds;
-
static final _logger = Logger("Configuration");
String? _cachedToken;
@@ -103,20 +101,7 @@ class Configuration {
_documentsDirectory = (await getApplicationDocumentsDirectory()).path;
_tempDocumentsDirPath = _documentsDirectory + "/temp/";
final tempDocumentsDir = Directory(_tempDocumentsDirPath);
- try {
- final currentTime = DateTime.now().microsecondsSinceEpoch;
- if (tempDocumentsDir.existsSync() &&
- (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) <
- (currentTime - kTempFolderDeletionTimeBuffer)) {
- await tempDocumentsDir.delete(recursive: true);
- await _preferences.setInt(lastTempFolderClearTimeKey, currentTime);
- _logger.info("Cleared temp folder");
- } else {
- _logger.info("Skipping temp folder clear");
- }
- } catch (e) {
- _logger.warning(e);
- }
+ await _cleanUpStaleFiles(tempDocumentsDir);
tempDocumentsDir.createSync(recursive: true);
final tempDirectoryPath = (await getTemporaryDirectory()).path;
_thumbnailCacheDirectory = tempDirectoryPath + "/thumbnail-cache";
@@ -144,6 +129,42 @@ class Configuration {
SuperLogging.setUserID(await _getOrCreateAnonymousUserID()).ignore();
}
+ // _cleanUpStaleFiles deletes all files in the temp directory that are older
+ // than kTempFolderDeletionTimeBuffer except the the temp encrypted files for upload.
+ // Those file are deleted by file uploader after the upload is complete or those
+ // files are not being used / tracked.
+ Future _cleanUpStaleFiles(Directory tempDocumentsDir) async {
+ try {
+ final currentTime = DateTime.now().microsecondsSinceEpoch;
+ if (tempDocumentsDir.existsSync() &&
+ (_preferences.getInt(lastTempFolderClearTimeKey) ?? 0) <
+ (currentTime - tempDirCleanUpInterval)) {
+ int skippedTempUploadFiles = 0;
+ final files = tempDocumentsDir.listSync();
+ for (final file in files) {
+ if (file is File) {
+ if (file.path.contains(uploadTempFilePrefix)) {
+ skippedTempUploadFiles++;
+ continue;
+ }
+ _logger.info("Deleting file: ${file.path}");
+ await file.delete();
+ } else if (file is Directory) {
+ await file.delete(recursive: true);
+ }
+ }
+ await _preferences.setInt(lastTempFolderClearTimeKey, currentTime);
+ _logger.info(
+ "Cleared temp folder except $skippedTempUploadFiles upload files",
+ );
+ } else {
+ _logger.info("Skipping temp folder clear");
+ }
+ } catch (e) {
+ _logger.warning(e);
+ }
+ }
+
Future logout({bool autoLogout = false}) async {
if (SyncService.instance.isSyncInProgress()) {
SyncService.instance.stopSync();
diff --git a/mobile/lib/core/constants.dart b/mobile/lib/core/constants.dart
index c2d08d903a6d8ef3ba3fa40d633e349d6de54071..77764ee6504d383d2fd7001c1747355cc16d3965 100644
--- a/mobile/lib/core/constants.dart
+++ b/mobile/lib/core/constants.dart
@@ -1,3 +1,5 @@
+import "package:flutter/foundation.dart";
+
const int thumbnailSmallSize = 256;
const int thumbnailQuality = 50;
const int thumbnailLargeSize = 512;
@@ -41,6 +43,7 @@ const supportEmail = 'support@ente.io';
// this is the chunk size of the un-encrypted file which is read and encrypted before uploading it as a single part.
const multipartPartSize = 20 * 1024 * 1024;
+const multipartPartSizeInternal = 8 * 1024 * 1024;
const kDefaultProductionEndpoint = 'https://api.ente.io';
@@ -95,3 +98,8 @@ const blackThumbnailBase64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB'
'KACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' +
'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' +
'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k=';
+
+const uploadTempFilePrefix = "upload_file_";
+final tempDirCleanUpInterval = kDebugMode
+ ? const Duration(seconds: 30).inMicroseconds
+ : const Duration(hours: 6).inMicroseconds;
diff --git a/mobile/lib/db/embeddings_db.dart b/mobile/lib/db/embeddings_db.dart
index 0eb1d3f6d7e1c087a39079da12a7ff083b5159b6..64878a2ce4365ebd008e9e2fced8f78eac6d1eb4 100644
--- a/mobile/lib/db/embeddings_db.dart
+++ b/mobile/lib/db/embeddings_db.dart
@@ -63,6 +63,19 @@ class EmbeddingsDB {
return _convertToEmbeddings(results);
}
+ // Get FileIDs for a specific model
+ Future> getFileIDs(Model model) async {
+ final db = await _database;
+ final results = await db.getAll(
+ 'SELECT $columnFileID FROM $tableName WHERE $columnModel = ?',
+ [modelToInt(model)!],
+ );
+ if (results.isEmpty) {
+ return {};
+ }
+ return results.map((e) => e[columnFileID] as int).toSet();
+ }
+
Future put(Embedding embedding) async {
final db = await _database;
await db.execute(
diff --git a/mobile/lib/db/upload_locks_db.dart b/mobile/lib/db/upload_locks_db.dart
index 11112d0cead465d918ed5cfc792fc8e492ef9b93..b32084b6ffb9c2cc081cc4adb7b778b6d1a908eb 100644
--- a/mobile/lib/db/upload_locks_db.dart
+++ b/mobile/lib/db/upload_locks_db.dart
@@ -3,16 +3,60 @@ import 'dart:io';
import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart';
+import "package:photos/module/upload/model/multipart.dart";
import 'package:sqflite/sqflite.dart';
+import "package:sqflite_migration/sqflite_migration.dart";
class UploadLocksDB {
static const _databaseName = "ente.upload_locks.db";
- static const _databaseVersion = 1;
- static const _table = "upload_locks";
- static const _columnID = "id";
- static const _columnOwner = "owner";
- static const _columnTime = "time";
+ static const _uploadLocksTable = (
+ table: "upload_locks",
+ columnID: "id",
+ columnOwner: "owner",
+ columnTime: "time",
+ );
+
+ static const _trackUploadTable = (
+ table: "track_uploads",
+ columnID: "id",
+ columnLocalID: "local_id",
+ columnFileHash: "file_hash",
+ columnCollectionID: "collection_id",
+ columnEncryptedFileName: "encrypted_file_name",
+ columnEncryptedFileSize: "encrypted_file_size",
+ columnEncryptedFileKey: "encrypted_file_key",
+ columnFileEncryptionNonce: "file_encryption_nonce",
+ columnKeyEncryptionNonce: "key_encryption_nonce",
+ columnObjectKey: "object_key",
+ columnCompleteUrl: "complete_url",
+ columnStatus: "status",
+ columnPartSize: "part_size",
+ columnLastAttemptedAt: "last_attempted_at",
+ columnCreatedAt: "created_at",
+ );
+
+ static const _partsTable = (
+ table: "upload_parts",
+ columnObjectKey: "object_key",
+ columnPartNumber: "part_number",
+ columnPartUrl: "part_url",
+ columnPartETag: "part_etag",
+ columnPartStatus: "part_status",
+ );
+
+ static final initializationScript = [
+ ..._createUploadLocksTable(),
+ ];
+
+ static final migrationScripts = [
+ ..._createTrackUploadsTable(),
+ ];
+
+ final dbConfig = MigrationConfig(
+ initializationScript: initializationScript,
+ migrationScripts: migrationScripts,
+ );
UploadLocksDB._privateConstructor();
static final UploadLocksDB instance = UploadLocksDB._privateConstructor();
@@ -27,44 +71,82 @@ class UploadLocksDB {
final Directory documentsDirectory =
await getApplicationDocumentsDirectory();
final String path = join(documentsDirectory.path, _databaseName);
- return await openDatabase(
- path,
- version: _databaseVersion,
- onCreate: _onCreate,
- );
+
+ return await openDatabaseWithMigration(path, dbConfig);
}
- Future _onCreate(Database db, int version) async {
- await db.execute(
+ static List _createUploadLocksTable() {
+ return [
'''
- CREATE TABLE $_table (
- $_columnID TEXT PRIMARY KEY NOT NULL,
- $_columnOwner TEXT NOT NULL,
- $_columnTime TEXT NOT NULL
+ CREATE TABLE ${_uploadLocksTable.table} (
+ ${_uploadLocksTable.columnID} TEXT PRIMARY KEY NOT NULL,
+ ${_uploadLocksTable.columnOwner} TEXT NOT NULL,
+ ${_uploadLocksTable.columnTime} TEXT NOT NULL
)
''',
- );
+ ];
+ }
+
+ static List _createTrackUploadsTable() {
+ return [
+ '''
+ CREATE TABLE IF NOT EXISTS ${_trackUploadTable.table} (
+ ${_trackUploadTable.columnID} INTEGER PRIMARY KEY,
+ ${_trackUploadTable.columnLocalID} TEXT NOT NULL,
+ ${_trackUploadTable.columnFileHash} TEXT NOT NULL,
+ ${_trackUploadTable.columnCollectionID} INTEGER NOT NULL,
+ ${_trackUploadTable.columnEncryptedFileName} TEXT NOT NULL,
+ ${_trackUploadTable.columnEncryptedFileSize} INTEGER NOT NULL,
+ ${_trackUploadTable.columnEncryptedFileKey} TEXT NOT NULL,
+ ${_trackUploadTable.columnFileEncryptionNonce} TEXT NOT NULL,
+ ${_trackUploadTable.columnKeyEncryptionNonce} TEXT NOT NULL,
+ ${_trackUploadTable.columnObjectKey} TEXT NOT NULL,
+ ${_trackUploadTable.columnCompleteUrl} TEXT NOT NULL,
+ ${_trackUploadTable.columnStatus} TEXT DEFAULT '${MultipartStatus.pending.name}' NOT NULL,
+ ${_trackUploadTable.columnPartSize} INTEGER NOT NULL,
+ ${_trackUploadTable.columnLastAttemptedAt} INTEGER NOT NULL,
+ ${_trackUploadTable.columnCreatedAt} INTEGER DEFAULT CURRENT_TIMESTAMP NOT NULL
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS ${_partsTable.table} (
+ ${_partsTable.columnObjectKey} TEXT NOT NULL REFERENCES ${_trackUploadTable.table}(${_trackUploadTable.columnObjectKey}) ON DELETE CASCADE,
+ ${_partsTable.columnPartNumber} INTEGER NOT NULL,
+ ${_partsTable.columnPartUrl} TEXT NOT NULL,
+ ${_partsTable.columnPartETag} TEXT,
+ ${_partsTable.columnPartStatus} TEXT NOT NULL,
+ PRIMARY KEY (${_partsTable.columnObjectKey}, ${_partsTable.columnPartNumber})
+ )
+ ''',
+ ];
}
Future clearTable() async {
final db = await instance.database;
- await db.delete(_table);
+ await db.delete(_uploadLocksTable.table);
+ await db.delete(_trackUploadTable.table);
+ await db.delete(_partsTable.table);
}
Future acquireLock(String id, String owner, int time) async {
final db = await instance.database;
final row = {};
- row[_columnID] = id;
- row[_columnOwner] = owner;
- row[_columnTime] = time;
- await db.insert(_table, row, conflictAlgorithm: ConflictAlgorithm.fail);
+ row[_uploadLocksTable.columnID] = id;
+ row[_uploadLocksTable.columnOwner] = owner;
+ row[_uploadLocksTable.columnTime] = time;
+ await db.insert(
+ _uploadLocksTable.table,
+ row,
+ conflictAlgorithm: ConflictAlgorithm.fail,
+ );
}
Future isLocked(String id, String owner) async {
final db = await instance.database;
final rows = await db.query(
- _table,
- where: '$_columnID = ? AND $_columnOwner = ?',
+ _uploadLocksTable.table,
+ where:
+ '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?',
whereArgs: [id, owner],
);
return rows.length == 1;
@@ -73,8 +155,9 @@ class UploadLocksDB {
Future releaseLock(String id, String owner) async {
final db = await instance.database;
return db.delete(
- _table,
- where: '$_columnID = ? AND $_columnOwner = ?',
+ _uploadLocksTable.table,
+ where:
+ '${_uploadLocksTable.columnID} = ? AND ${_uploadLocksTable.columnOwner} = ?',
whereArgs: [id, owner],
);
}
@@ -82,8 +165,9 @@ class UploadLocksDB {
Future releaseLocksAcquiredByOwnerBefore(String owner, int time) async {
final db = await instance.database;
return db.delete(
- _table,
- where: '$_columnOwner = ? AND $_columnTime < ?',
+ _uploadLocksTable.table,
+ where:
+ '${_uploadLocksTable.columnOwner} = ? AND ${_uploadLocksTable.columnTime} < ?',
whereArgs: [owner, time],
);
}
@@ -91,9 +175,251 @@ class UploadLocksDB {
Future releaseAllLocksAcquiredBefore(int time) async {
final db = await instance.database;
return db.delete(
- _table,
- where: '$_columnTime < ?',
+ _uploadLocksTable.table,
+ where: '${_uploadLocksTable.columnTime} < ?',
whereArgs: [time],
);
}
+
+ Future<({String encryptedFileKey, String fileNonce, String keyNonce})>
+ getFileEncryptionData(
+ String localId,
+ String fileHash,
+ int collectionID,
+ ) async {
+ final db = await instance.database;
+
+ final rows = await db.query(
+ _trackUploadTable.table,
+ where: '${_trackUploadTable.columnLocalID} = ?'
+ ' AND ${_trackUploadTable.columnFileHash} = ?'
+ ' AND ${_trackUploadTable.columnCollectionID} = ?',
+ whereArgs: [localId, fileHash, collectionID],
+ );
+
+ if (rows.isEmpty) {
+ throw Exception("No cached links found for $localId and $fileHash");
+ }
+ final row = rows.first;
+
+ return (
+ encryptedFileKey: row[_trackUploadTable.columnEncryptedFileKey] as String,
+ fileNonce: row[_trackUploadTable.columnFileEncryptionNonce] as String,
+ keyNonce: row[_trackUploadTable.columnKeyEncryptionNonce] as String,
+ );
+ }
+
+ Future updateLastAttempted(
+ String localId,
+ String fileHash,
+ int collectionID,
+ ) async {
+ final db = await instance.database;
+ await db.update(
+ _trackUploadTable.table,
+ {
+ _trackUploadTable.columnLastAttemptedAt:
+ DateTime.now().millisecondsSinceEpoch,
+ },
+ where: '${_trackUploadTable.columnLocalID} = ?'
+ ' AND ${_trackUploadTable.columnFileHash} = ?'
+ ' AND ${_trackUploadTable.columnCollectionID} = ?',
+ whereArgs: [
+ localId,
+ fileHash,
+ collectionID,
+ ],
+ );
+ }
+
+ Future getCachedLinks(
+ String localId,
+ String fileHash,
+ int collectionID,
+ ) async {
+ final db = await instance.database;
+ final rows = await db.query(
+ _trackUploadTable.table,
+ where: '${_trackUploadTable.columnLocalID} = ?'
+ ' AND ${_trackUploadTable.columnFileHash} = ?'
+ ' AND ${_trackUploadTable.columnCollectionID} = ?',
+ whereArgs: [localId, fileHash, collectionID],
+ );
+ if (rows.isEmpty) {
+ throw Exception("No cached links found for $localId and $fileHash");
+ }
+ final row = rows.first;
+ final objectKey = row[_trackUploadTable.columnObjectKey] as String;
+ final partsStatus = await db.query(
+ _partsTable.table,
+ where: '${_partsTable.columnObjectKey} = ?',
+ whereArgs: [objectKey],
+ );
+
+ final List partUploadStatus = [];
+ final List partsURLs = List.generate(
+ partsStatus.length,
+ (index) => "",
+ );
+ final Map partETags = {};
+
+ for (final part in partsStatus) {
+ final partNumber = part[_partsTable.columnPartNumber] as int;
+ final partUrl = part[_partsTable.columnPartUrl] as String;
+ final partStatus = part[_partsTable.columnPartStatus] as String;
+ partsURLs[partNumber] = partUrl;
+ if (part[_partsTable.columnPartETag] != null) {
+ partETags[partNumber] = part[_partsTable.columnPartETag] as String;
+ }
+ partUploadStatus.add(partStatus == "uploaded");
+ }
+ final urls = MultipartUploadURLs(
+ objectKey: objectKey,
+ completeURL: row[_trackUploadTable.columnCompleteUrl] as String,
+ partsURLs: partsURLs,
+ );
+
+ return MultipartInfo(
+ urls: urls,
+ status: MultipartStatus.values
+ .byName(row[_trackUploadTable.columnStatus] as String),
+ partUploadStatus: partUploadStatus,
+ partETags: partETags,
+ partSize: row[_trackUploadTable.columnPartSize] as int,
+ );
+ }
+
+ Future createTrackUploadsEntry(
+ String localId,
+ String fileHash,
+ int collectionID,
+ MultipartUploadURLs urls,
+ String encryptedFileName,
+ int fileSize,
+ String fileKey,
+ String fileNonce,
+ String keyNonce, {
+ required int partSize,
+ }) async {
+ final db = await UploadLocksDB.instance.database;
+ final objectKey = urls.objectKey;
+
+ await db.insert(
+ _trackUploadTable.table,
+ {
+ _trackUploadTable.columnLocalID: localId,
+ _trackUploadTable.columnFileHash: fileHash,
+ _trackUploadTable.columnCollectionID: collectionID,
+ _trackUploadTable.columnObjectKey: objectKey,
+ _trackUploadTable.columnCompleteUrl: urls.completeURL,
+ _trackUploadTable.columnEncryptedFileName: encryptedFileName,
+ _trackUploadTable.columnEncryptedFileSize: fileSize,
+ _trackUploadTable.columnEncryptedFileKey: fileKey,
+ _trackUploadTable.columnFileEncryptionNonce: fileNonce,
+ _trackUploadTable.columnKeyEncryptionNonce: keyNonce,
+ _trackUploadTable.columnPartSize: partSize,
+ _trackUploadTable.columnLastAttemptedAt:
+ DateTime.now().millisecondsSinceEpoch,
+ },
+ );
+
+ final partsURLs = urls.partsURLs;
+ final partsLength = partsURLs.length;
+
+ for (int i = 0; i < partsLength; i++) {
+ await db.insert(
+ _partsTable.table,
+ {
+ _partsTable.columnObjectKey: objectKey,
+ _partsTable.columnPartNumber: i,
+ _partsTable.columnPartUrl: partsURLs[i],
+ _partsTable.columnPartStatus: PartStatus.pending.name,
+ },
+ );
+ }
+ }
+
+ Future updatePartStatus(
+ String objectKey,
+ int partNumber,
+ String etag,
+ ) async {
+ final db = await instance.database;
+ await db.update(
+ _partsTable.table,
+ {
+ _partsTable.columnPartStatus: PartStatus.uploaded.name,
+ _partsTable.columnPartETag: etag,
+ },
+ where:
+ '${_partsTable.columnObjectKey} = ? AND ${_partsTable.columnPartNumber} = ?',
+ whereArgs: [objectKey, partNumber],
+ );
+ }
+
+ Future updateTrackUploadStatus(
+ String objectKey,
+ MultipartStatus status,
+ ) async {
+ final db = await instance.database;
+ await db.update(
+ _trackUploadTable.table,
+ {
+ _trackUploadTable.columnStatus: status.name,
+ },
+ where: '${_trackUploadTable.columnObjectKey} = ?',
+ whereArgs: [objectKey],
+ );
+ }
+
+ Future deleteMultipartTrack(
+ String localId,
+ ) async {
+ final db = await instance.database;
+ return await db.delete(
+ _trackUploadTable.table,
+ where: '${_trackUploadTable.columnLocalID} = ?',
+ whereArgs: [localId],
+ );
+ }
+
+ // getFileNameToLastAttemptedAtMap returns a map of encrypted file name to last attempted at time
+ Future> getFileNameToLastAttemptedAtMap() {
+ return instance.database.then((db) async {
+ final rows = await db.query(
+ _trackUploadTable.table,
+ columns: [
+ _trackUploadTable.columnEncryptedFileName,
+ _trackUploadTable.columnLastAttemptedAt,
+ ],
+ );
+ final map = {};
+ for (final row in rows) {
+ map[row[_trackUploadTable.columnEncryptedFileName] as String] =
+ row[_trackUploadTable.columnLastAttemptedAt] as int;
+ }
+ return map;
+ });
+ }
+
+ Future getEncryptedFileName(
+ String localId,
+ String fileHash,
+ int collectionID,
+ ) {
+ return instance.database.then((db) async {
+ final rows = await db.query(
+ _trackUploadTable.table,
+ where: '${_trackUploadTable.columnLocalID} = ?'
+ ' AND ${_trackUploadTable.columnFileHash} = ?'
+ ' AND ${_trackUploadTable.columnCollectionID} = ?',
+ whereArgs: [localId, fileHash, collectionID],
+ );
+ if (rows.isEmpty) {
+ return null;
+ }
+ final row = rows.first;
+ return row[_trackUploadTable.columnEncryptedFileName] as String;
+ });
+ }
}
diff --git a/mobile/lib/events/embedding_updated_event.dart b/mobile/lib/events/embedding_updated_event.dart
index 9021b8b50c4dab94ea30fa294d0b70e53befa81c..736b85c17c23eb99ae02c2729a1bd9830c0cea24 100644
--- a/mobile/lib/events/embedding_updated_event.dart
+++ b/mobile/lib/events/embedding_updated_event.dart
@@ -1,3 +1,5 @@
import "package:photos/events/event.dart";
class EmbeddingUpdatedEvent extends Event {}
+
+class EmbeddingCacheUpdatedEvent extends Event {}
diff --git a/mobile/lib/generated/intl/messages_en.dart b/mobile/lib/generated/intl/messages_en.dart
index 43b39c82edd40e484d1286c40d22f42c0fbd03a7..aab7f47bd8a0156f07d648efa6bf803acf089917 100644
--- a/mobile/lib/generated/intl/messages_en.dart
+++ b/mobile/lib/generated/intl/messages_en.dart
@@ -132,7 +132,7 @@ class MessageLookup extends MessageLookupByLibrary {
"Please talk to ${providerName} support if you were charged";
static String m38(endDate) =>
- "Free trial valid till ${endDate}.\nYou can choose a paid plan afterwards.";
+ "Free trial valid till ${endDate}.\nYou can purchase a paid plan afterwards.";
static String m39(toEmail) => "Please email us at ${toEmail}";
diff --git a/mobile/lib/generated/intl/messages_nl.dart b/mobile/lib/generated/intl/messages_nl.dart
index af7502d9067a72099b5c0fc1f0ca5f171fbd3f5a..f6987973c33f1eed184b16b2d8c926c6d7bacb8a 100644
--- a/mobile/lib/generated/intl/messages_nl.dart
+++ b/mobile/lib/generated/intl/messages_nl.dart
@@ -368,6 +368,14 @@ class MessageLookup extends MessageLookupByLibrary {
"Verificatie mislukt, probeer het opnieuw"),
"authenticationSuccessful":
MessageLookupByLibrary.simpleMessage("Verificatie geslaagd!"),
+ "autoCastDialogBody": MessageLookupByLibrary.simpleMessage(
+ "Je zult de beschikbare Cast apparaten hier zien."),
+ "autoCastiOSPermission": MessageLookupByLibrary.simpleMessage(
+ "Zorg ervoor dat lokale netwerkrechten zijn ingeschakeld voor de Ente Photos app, in Instellingen."),
+ "autoPair":
+ MessageLookupByLibrary.simpleMessage("Automatisch koppelen"),
+ "autoPairDesc": MessageLookupByLibrary.simpleMessage(
+ "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen."),
"available": MessageLookupByLibrary.simpleMessage("Beschikbaar"),
"backedUpFolders":
MessageLookupByLibrary.simpleMessage("Back-up mappen"),
@@ -399,6 +407,10 @@ class MessageLookup extends MessageLookupByLibrary {
"cannotAddMorePhotosAfterBecomingViewer": m9,
"cannotDeleteSharedFiles": MessageLookupByLibrary.simpleMessage(
"Kan gedeelde bestanden niet verwijderen"),
+ "castIPMismatchBody": MessageLookupByLibrary.simpleMessage(
+ "Zorg ervoor dat je op hetzelfde netwerk zit als de tv."),
+ "castIPMismatchTitle":
+ MessageLookupByLibrary.simpleMessage("Album casten mislukt"),
"castInstruction": MessageLookupByLibrary.simpleMessage(
"Bezoek cast.ente.io op het apparaat dat u wilt koppelen.\n\nVoer de code hieronder in om het album op uw TV af te spelen."),
"centerPoint": MessageLookupByLibrary.simpleMessage("Middelpunt"),
@@ -473,6 +485,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"),
"confirmYourRecoveryKey":
MessageLookupByLibrary.simpleMessage("Bevestig herstelsleutel"),
+ "connectToDevice": MessageLookupByLibrary.simpleMessage(
+ "Verbinding maken met apparaat"),
"contactFamilyAdmin": m12,
"contactSupport":
MessageLookupByLibrary.simpleMessage("Contacteer klantenservice"),
@@ -750,6 +764,8 @@ class MessageLookup extends MessageLookupByLibrary {
"filesBackedUpInAlbum": m23,
"filesDeleted":
MessageLookupByLibrary.simpleMessage("Bestanden verwijderd"),
+ "filesSavedToGallery": MessageLookupByLibrary.simpleMessage(
+ "Bestand opgeslagen in galerij"),
"flip": MessageLookupByLibrary.simpleMessage("Omdraaien"),
"forYourMemories":
MessageLookupByLibrary.simpleMessage("voor uw herinneringen"),
@@ -938,6 +954,8 @@ class MessageLookup extends MessageLookupByLibrary {
"manageParticipants": MessageLookupByLibrary.simpleMessage("Beheren"),
"manageSubscription":
MessageLookupByLibrary.simpleMessage("Abonnement beheren"),
+ "manualPairDesc": MessageLookupByLibrary.simpleMessage(
+ "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien."),
"map": MessageLookupByLibrary.simpleMessage("Kaart"),
"maps": MessageLookupByLibrary.simpleMessage("Kaarten"),
"mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"),
@@ -974,6 +992,8 @@ class MessageLookup extends MessageLookupByLibrary {
"no": MessageLookupByLibrary.simpleMessage("Nee"),
"noAlbumsSharedByYouYet": MessageLookupByLibrary.simpleMessage(
"Nog geen albums gedeeld door jou"),
+ "noDeviceFound":
+ MessageLookupByLibrary.simpleMessage("Geen apparaat gevonden"),
"noDeviceLimit": MessageLookupByLibrary.simpleMessage("Geen"),
"noDeviceThatCanBeDeleted": MessageLookupByLibrary.simpleMessage(
"Je hebt geen bestanden op dit apparaat die verwijderd kunnen worden"),
@@ -1023,6 +1043,9 @@ class MessageLookup extends MessageLookupByLibrary {
"orPickAnExistingOne":
MessageLookupByLibrary.simpleMessage("Of kies een bestaande"),
"pair": MessageLookupByLibrary.simpleMessage("Koppelen"),
+ "pairWithPin": MessageLookupByLibrary.simpleMessage("Koppelen met PIN"),
+ "pairingComplete":
+ MessageLookupByLibrary.simpleMessage("Koppeling voltooid"),
"passkey": MessageLookupByLibrary.simpleMessage("Passkey"),
"passkeyAuthTitle":
MessageLookupByLibrary.simpleMessage("Passkey verificatie"),
@@ -1383,6 +1406,10 @@ class MessageLookup extends MessageLookupByLibrary {
"sparkleSuccess": MessageLookupByLibrary.simpleMessage("✨ Succes"),
"startBackup": MessageLookupByLibrary.simpleMessage("Back-up starten"),
"status": MessageLookupByLibrary.simpleMessage("Status"),
+ "stopCastingBody":
+ MessageLookupByLibrary.simpleMessage("Wil je stoppen met casten?"),
+ "stopCastingTitle":
+ MessageLookupByLibrary.simpleMessage("Casten stoppen"),
"storage": MessageLookupByLibrary.simpleMessage("Opslagruimte"),
"storageBreakupFamily": MessageLookupByLibrary.simpleMessage("Familie"),
"storageBreakupYou": MessageLookupByLibrary.simpleMessage("Jij"),
diff --git a/mobile/lib/generated/intl/messages_pt.dart b/mobile/lib/generated/intl/messages_pt.dart
index cc410241f3ab5b8185d0ae49383b8848aa510461..e17cb674e8dbd2a2e60c1999d67cb9b6d1311a93 100644
--- a/mobile/lib/generated/intl/messages_pt.dart
+++ b/mobile/lib/generated/intl/messages_pt.dart
@@ -371,6 +371,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Certifique-se de que as permissões de Rede local estão ativadas para o aplicativo de Fotos Ente, em Configurações."),
"autoPair":
MessageLookupByLibrary.simpleMessage("Pareamento automático"),
+ "autoPairDesc": MessageLookupByLibrary.simpleMessage(
+ "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast."),
"available": MessageLookupByLibrary.simpleMessage("Disponível"),
"backedUpFolders":
MessageLookupByLibrary.simpleMessage("Backup de pastas concluído"),
@@ -629,8 +631,9 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Dobre seu armazenamento"),
"download": MessageLookupByLibrary.simpleMessage("Baixar"),
"downloadFailed":
- MessageLookupByLibrary.simpleMessage("Falha ao baixar"),
- "downloading": MessageLookupByLibrary.simpleMessage("Baixando..."),
+ MessageLookupByLibrary.simpleMessage("Falha no download"),
+ "downloading":
+ MessageLookupByLibrary.simpleMessage("Fazendo download..."),
"dropSupportEmail": m17,
"duplicateFileCountWithStorageSaved": m18,
"duplicateItemsGroup": m19,
@@ -716,8 +719,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Falha ao aplicar o código"),
"failedToCancel":
MessageLookupByLibrary.simpleMessage("Falha ao cancelar"),
- "failedToDownloadVideo":
- MessageLookupByLibrary.simpleMessage("Falha ao baixar vídeo"),
+ "failedToDownloadVideo": MessageLookupByLibrary.simpleMessage(
+ "Falha ao fazer download do vídeo"),
"failedToFetchOriginalForEdit": MessageLookupByLibrary.simpleMessage(
"Falha ao obter original para edição"),
"failedToFetchReferralDetails": MessageLookupByLibrary.simpleMessage(
@@ -735,7 +738,7 @@ class MessageLookup extends MessageLookupByLibrary {
"familyPlans": MessageLookupByLibrary.simpleMessage("Plano familiar"),
"faq": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"),
"faqs": MessageLookupByLibrary.simpleMessage("Perguntas frequentes"),
- "favorite": MessageLookupByLibrary.simpleMessage("Favoritar"),
+ "favorite": MessageLookupByLibrary.simpleMessage("Favorito"),
"feedback": MessageLookupByLibrary.simpleMessage("Comentários"),
"fileFailedToSaveToGallery": MessageLookupByLibrary.simpleMessage(
"Falha ao salvar o arquivo na galeria"),
@@ -902,8 +905,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Carregando galeria..."),
"loadingMessage":
MessageLookupByLibrary.simpleMessage("Carregando suas fotos..."),
- "loadingModel":
- MessageLookupByLibrary.simpleMessage("Baixando modelos..."),
+ "loadingModel": MessageLookupByLibrary.simpleMessage(
+ "Fazendo download de modelos..."),
"localGallery": MessageLookupByLibrary.simpleMessage("Galeria local"),
"location": MessageLookupByLibrary.simpleMessage("Local"),
"locationName": MessageLookupByLibrary.simpleMessage("Nome do Local"),
@@ -944,7 +947,7 @@ class MessageLookup extends MessageLookupByLibrary {
"manageSubscription":
MessageLookupByLibrary.simpleMessage("Gerenciar assinatura"),
"manualPairDesc": MessageLookupByLibrary.simpleMessage(
- "Parear com o PIN funciona para qualquer dispositivo de tela grande onde você deseja reproduzir seu álbum."),
+ "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado."),
"map": MessageLookupByLibrary.simpleMessage("Mapa"),
"maps": MessageLookupByLibrary.simpleMessage("Mapas"),
"mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"),
diff --git a/mobile/lib/generated/intl/messages_zh.dart b/mobile/lib/generated/intl/messages_zh.dart
index 0d507c2e1d25ed4cb46331a458cb77d545cfe1bd..db60c5e0b25a8899e8036ddba4b8ed48642cfa1d 100644
--- a/mobile/lib/generated/intl/messages_zh.dart
+++ b/mobile/lib/generated/intl/messages_zh.dart
@@ -124,7 +124,7 @@ class MessageLookup extends MessageLookupByLibrary {
static String m37(providerName) => "如果您被收取费用,请用英语与 ${providerName} 的客服聊天";
- static String m38(endDate) => "免费试用有效期至 ${endDate}。\n之后您可以选择付费计划。";
+ static String m38(endDate) => "免费试用有效期至 ${endDate}。\n您可以随后购买付费计划。";
static String m39(toEmail) => "请给我们发送电子邮件至 ${toEmail}";
@@ -325,6 +325,8 @@ class MessageLookup extends MessageLookupByLibrary {
"autoCastiOSPermission": MessageLookupByLibrary.simpleMessage(
"请确保已在“设置”中为 Ente Photos 应用打开本地网络权限。"),
"autoPair": MessageLookupByLibrary.simpleMessage("自动配对"),
+ "autoPairDesc":
+ MessageLookupByLibrary.simpleMessage("自动配对仅适用于支持 Chromecast 的设备。"),
"available": MessageLookupByLibrary.simpleMessage("可用"),
"backedUpFolders": MessageLookupByLibrary.simpleMessage("已备份的文件夹"),
"backup": MessageLookupByLibrary.simpleMessage("备份"),
@@ -777,7 +779,7 @@ class MessageLookup extends MessageLookupByLibrary {
"manageParticipants": MessageLookupByLibrary.simpleMessage("管理"),
"manageSubscription": MessageLookupByLibrary.simpleMessage("管理订阅"),
"manualPairDesc": MessageLookupByLibrary.simpleMessage(
- "用 PIN 配对适用于任何大屏幕设备,您可以在这些设备上播放您的相册。"),
+ "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。"),
"map": MessageLookupByLibrary.simpleMessage("地图"),
"maps": MessageLookupByLibrary.simpleMessage("地图"),
"mastodon": MessageLookupByLibrary.simpleMessage("Mastodon"),
diff --git a/mobile/lib/generated/l10n.dart b/mobile/lib/generated/l10n.dart
index dbedbadf6395c3c2fc9eb56e40575313642dfdc8..4c7679154f5fa2c6c50ca288cf0fbd49b27e0cb6 100644
--- a/mobile/lib/generated/l10n.dart
+++ b/mobile/lib/generated/l10n.dart
@@ -4034,10 +4034,10 @@ class S {
);
}
- /// `Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.`
+ /// `Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.`
String playStoreFreeTrialValidTill(Object endDate) {
return Intl.message(
- 'Free trial valid till $endDate.\nYou can choose a paid plan afterwards.',
+ 'Free trial valid till $endDate.\nYou can purchase a paid plan afterwards.',
name: 'playStoreFreeTrialValidTill',
desc: '',
args: [endDate],
diff --git a/mobile/lib/l10n/intl_en.arb b/mobile/lib/l10n/intl_en.arb
index e59163e6b67df0ff08fa9c73b8c44fa1b0e75cb3..6bc8b59269564d26d19e34b971a15889d2228515 100644
--- a/mobile/lib/l10n/intl_en.arb
+++ b/mobile/lib/l10n/intl_en.arb
@@ -569,7 +569,7 @@
"freeTrialValidTill": "Free trial valid till {endDate}",
"validTill": "Valid till {endDate}",
"addOnValidTill": "Your {storageAmount} add-on is valid till {endDate}",
- "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.",
+ "playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.",
"subWillBeCancelledOn": "Your subscription will be cancelled on {endDate}",
"subscription": "Subscription",
"paymentDetails": "Payment details",
diff --git a/mobile/lib/l10n/intl_nl.arb b/mobile/lib/l10n/intl_nl.arb
index 0ba9bd10c32e47e5c570a2e22499a4eeefe0e426..a8f854a4300ec0159e1da67e3f388efba8415eb7 100644
--- a/mobile/lib/l10n/intl_nl.arb
+++ b/mobile/lib/l10n/intl_nl.arb
@@ -835,6 +835,7 @@
"close": "Sluiten",
"setAs": "Instellen als",
"fileSavedToGallery": "Bestand opgeslagen in galerij",
+ "filesSavedToGallery": "Bestand opgeslagen in galerij",
"fileFailedToSaveToGallery": "Opslaan van bestand naar galerij mislukt",
"download": "Downloaden",
"pressAndHoldToPlayVideo": "Ingedrukt houden om video af te spelen",
@@ -1195,6 +1196,8 @@
"verifyPasskey": "Bevestig passkey",
"playOnTv": "Album afspelen op TV",
"pair": "Koppelen",
+ "autoPair": "Automatisch koppelen",
+ "pairWithPin": "Koppelen met PIN",
"deviceNotFound": "Apparaat niet gevonden",
"castInstruction": "Bezoek cast.ente.io op het apparaat dat u wilt koppelen.\n\nVoer de code hieronder in om het album op uw TV af te spelen.",
"deviceCodeHint": "Voer de code in",
@@ -1212,5 +1215,16 @@
"endpointUpdatedMessage": "Eindpunt met succes bijgewerkt",
"customEndpoint": "Verbonden met {endpoint}",
"createCollaborativeLink": "Maak een gezamenlijke link",
- "search": "Zoeken"
+ "search": "Zoeken",
+ "autoPairDesc": "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen.",
+ "manualPairDesc": "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien.",
+ "connectToDevice": "Verbinding maken met apparaat",
+ "autoCastDialogBody": "Je zult de beschikbare Cast apparaten hier zien.",
+ "autoCastiOSPermission": "Zorg ervoor dat lokale netwerkrechten zijn ingeschakeld voor de Ente Photos app, in Instellingen.",
+ "noDeviceFound": "Geen apparaat gevonden",
+ "stopCastingTitle": "Casten stoppen",
+ "stopCastingBody": "Wil je stoppen met casten?",
+ "castIPMismatchTitle": "Album casten mislukt",
+ "castIPMismatchBody": "Zorg ervoor dat je op hetzelfde netwerk zit als de tv.",
+ "pairingComplete": "Koppeling voltooid"
}
\ No newline at end of file
diff --git a/mobile/lib/l10n/intl_pt.arb b/mobile/lib/l10n/intl_pt.arb
index 16a0ea7535cfd868ac97c4436b62966f24b7b1a3..bf95cf6ba548c0261e59db8a5f71fb02db927601 100644
--- a/mobile/lib/l10n/intl_pt.arb
+++ b/mobile/lib/l10n/intl_pt.arb
@@ -410,7 +410,7 @@
"machineLearning": "Aprendizagem de máquina",
"magicSearch": "Busca mágica",
"magicSearchDescription": "Por favor, note que isso resultará em uma largura de banda maior e uso de bateria até que todos os itens sejam indexados.",
- "loadingModel": "Baixando modelos...",
+ "loadingModel": "Fazendo download de modelos...",
"waitingForWifi": "Esperando por Wi-Fi...",
"status": "Estado",
"indexedItems": "Itens indexados",
@@ -471,7 +471,7 @@
"criticalUpdateAvailable": "Atualização crítica disponível",
"updateAvailable": "Atualização disponível",
"ignoreUpdate": "Ignorar",
- "downloading": "Baixando...",
+ "downloading": "Fazendo download...",
"cannotDeleteSharedFiles": "Não é possível excluir arquivos compartilhados",
"theDownloadCouldNotBeCompleted": "Não foi possível concluir o download",
"retry": "Tentar novamente",
@@ -734,7 +734,7 @@
"moveToAlbum": "Mover para álbum",
"unhide": "Desocultar",
"unarchive": "Desarquivar",
- "favorite": "Favoritar",
+ "favorite": "Favorito",
"removeFromFavorite": "Remover dos favoritos",
"shareLink": "Compartilhar link",
"createCollage": "Criar colagem",
@@ -840,7 +840,7 @@
"download": "Baixar",
"pressAndHoldToPlayVideo": "Pressione e segure para reproduzir o vídeo",
"pressAndHoldToPlayVideoDetailed": "Pressione e segure na imagem para reproduzir o vídeo",
- "downloadFailed": "Falha ao baixar",
+ "downloadFailed": "Falha no download",
"deduplicateFiles": "Arquivos duplicados",
"deselectAll": "Desmarcar todos",
"reviewDeduplicateItems": "Por favor, reveja e exclua os itens que você acredita serem duplicados.",
@@ -1132,7 +1132,7 @@
"sharedWithYou": "Compartilhado com você",
"sharedByYou": "Compartilhado por você",
"inviteYourFriendsToEnte": "Convide seus amigos ao Ente",
- "failedToDownloadVideo": "Falha ao baixar vídeo",
+ "failedToDownloadVideo": "Falha ao fazer download do vídeo",
"hiding": "Ocultando...",
"unhiding": "Desocultando...",
"successfullyHid": "Ocultado com sucesso",
@@ -1216,8 +1216,8 @@
"customEndpoint": "Conectado a {endpoint}",
"createCollaborativeLink": "Criar link colaborativo",
"search": "Pesquisar",
- "autoPairGoogle": "O Pareamento Automático requer a conexão com servidores do Google e só funciona com dispositivos Chromecast. O Google não receberá dados confidenciais, como suas fotos.",
- "manualPairDesc": "Parear com o PIN funciona para qualquer dispositivo de tela grande onde você deseja reproduzir seu álbum.",
+ "autoPairDesc": "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast.",
+ "manualPairDesc": "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado.",
"connectToDevice": "Conectar ao dispositivo",
"autoCastDialogBody": "Você verá dispositivos disponíveis para transmitir aqui.",
"autoCastiOSPermission": "Certifique-se de que as permissões de Rede local estão ativadas para o aplicativo de Fotos Ente, em Configurações.",
diff --git a/mobile/lib/l10n/intl_zh.arb b/mobile/lib/l10n/intl_zh.arb
index 370bb6a3c3abe84a51a1b8e61da4356d617a184a..a26f1fc6e60cc03a82ab820b886a7fc426ef78f5 100644
--- a/mobile/lib/l10n/intl_zh.arb
+++ b/mobile/lib/l10n/intl_zh.arb
@@ -569,7 +569,7 @@
"freeTrialValidTill": "免费试用有效期至 {endDate}",
"validTill": "有效期至 {endDate}",
"addOnValidTill": "您的 {storageAmount} 插件有效期至 {endDate}",
- "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n之后您可以选择付费计划。",
+ "playStoreFreeTrialValidTill": "免费试用有效期至 {endDate}。\n您可以随后购买付费计划。",
"subWillBeCancelledOn": "您的订阅将于 {endDate} 取消",
"subscription": "订阅",
"paymentDetails": "付款明细",
@@ -1216,8 +1216,8 @@
"customEndpoint": "已连接至 {endpoint}",
"createCollaborativeLink": "创建协作链接",
"search": "搜索",
- "autoPairGoogle": "自动配对需要连接到 Google 服务器,且仅适用于支持 Chromecast 的设备。Google 不会接收敏感数据,例如您的照片。",
- "manualPairDesc": "用 PIN 配对适用于任何大屏幕设备,您可以在这些设备上播放您的相册。",
+ "autoPairDesc": "自动配对仅适用于支持 Chromecast 的设备。",
+ "manualPairDesc": "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。",
"connectToDevice": "连接到设备",
"autoCastDialogBody": "您将在此处看到可用的 Cast 设备。",
"autoCastiOSPermission": "请确保已在“设置”中为 Ente Photos 应用打开本地网络权限。",
diff --git a/mobile/lib/models/gallery_type.dart b/mobile/lib/models/gallery_type.dart
index ba0eb397f723d56f1e5125064aed508a0d395e0c..40426f7015a3b343442b4c92a4f238824603c158 100644
--- a/mobile/lib/models/gallery_type.dart
+++ b/mobile/lib/models/gallery_type.dart
@@ -32,12 +32,12 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.locationTag:
case GalleryType.quickLink:
case GalleryType.uncategorized:
+ case GalleryType.sharedCollection:
return true;
case GalleryType.hiddenSection:
case GalleryType.hiddenOwnedCollection:
case GalleryType.trash:
- case GalleryType.sharedCollection:
return false;
}
}
diff --git a/mobile/lib/module/upload/model/multipart.dart b/mobile/lib/module/upload/model/multipart.dart
new file mode 100644
index 0000000000000000000000000000000000000000..cda72d141c7f2530d6a343aac583deb7fcacefe8
--- /dev/null
+++ b/mobile/lib/module/upload/model/multipart.dart
@@ -0,0 +1,66 @@
+import "package:photos/module/upload/model/xml.dart";
+
+class PartETag extends XmlParsableObject {
+ final int partNumber;
+ final String eTag;
+
+ PartETag(this.partNumber, this.eTag);
+
+ @override
+ String get elementName => "Part";
+
+ @override
+ Map toMap() {
+ return {
+ "PartNumber": partNumber,
+ "ETag": eTag,
+ };
+ }
+}
+
+enum MultipartStatus {
+ pending,
+ uploaded,
+ completed,
+}
+
+enum PartStatus {
+ pending,
+ uploaded,
+}
+
+class MultipartInfo {
+ final List? partUploadStatus;
+ final Map? partETags;
+ final int? partSize;
+ final MultipartUploadURLs urls;
+ final MultipartStatus status;
+
+ MultipartInfo({
+ this.partUploadStatus,
+ this.partETags,
+ this.partSize,
+ this.status = MultipartStatus.pending,
+ required this.urls,
+ });
+}
+
+class MultipartUploadURLs {
+ final String objectKey;
+ final List partsURLs;
+ final String completeURL;
+
+ MultipartUploadURLs({
+ required this.objectKey,
+ required this.partsURLs,
+ required this.completeURL,
+ });
+
+ factory MultipartUploadURLs.fromMap(Map map) {
+ return MultipartUploadURLs(
+ objectKey: map["urls"]["objectKey"],
+ partsURLs: (map["urls"]["partURLs"] as List).cast(),
+ completeURL: map["urls"]["completeURL"],
+ );
+ }
+}
diff --git a/mobile/lib/module/upload/model/xml.dart b/mobile/lib/module/upload/model/xml.dart
new file mode 100644
index 0000000000000000000000000000000000000000..9490fc40cbb599cd0bdef42d572492aa0f26dbb5
--- /dev/null
+++ b/mobile/lib/module/upload/model/xml.dart
@@ -0,0 +1,41 @@
+// ignore_for_file: implementation_imports
+
+import "package:xml/xml.dart";
+
+// used for classes that can be converted to xml
+abstract class XmlParsableObject {
+ Map toMap();
+ String get elementName;
+}
+
+// for converting the response to xml
+String convertJs2Xml(Map json) {
+ final builder = XmlBuilder();
+ buildXml(builder, json);
+ return builder.buildDocument().toXmlString(
+ pretty: true,
+ indent: ' ',
+ );
+}
+
+// for building the xml node tree recursively
+void buildXml(XmlBuilder builder, dynamic node) {
+ if (node is Map) {
+ node.forEach((key, value) {
+ builder.element(key, nest: () => buildXml(builder, value));
+ });
+ } else if (node is List) {
+ for (var item in node) {
+ buildXml(builder, item);
+ }
+ } else if (node is XmlParsableObject) {
+ builder.element(
+ node.elementName,
+ nest: () {
+ buildXml(builder, node.toMap());
+ },
+ );
+ } else {
+ builder.text(node.toString());
+ }
+}
diff --git a/mobile/lib/module/upload/service/multipart.dart b/mobile/lib/module/upload/service/multipart.dart
new file mode 100644
index 0000000000000000000000000000000000000000..ad0d19703a4007a1b25cd8f7088d6669da87f7a6
--- /dev/null
+++ b/mobile/lib/module/upload/service/multipart.dart
@@ -0,0 +1,266 @@
+import "dart:io";
+
+import "package:dio/dio.dart";
+import "package:ente_feature_flag/ente_feature_flag.dart";
+import "package:flutter/foundation.dart";
+import "package:logging/logging.dart";
+import "package:photos/core/constants.dart";
+import "package:photos/db/upload_locks_db.dart";
+import "package:photos/models/encryption_result.dart";
+import "package:photos/module/upload/model/multipart.dart";
+import "package:photos/module/upload/model/xml.dart";
+import "package:photos/services/collections_service.dart";
+import "package:photos/utils/crypto_util.dart";
+
+class MultiPartUploader {
+ final Dio _enteDio;
+ final Dio _s3Dio;
+ final UploadLocksDB _db;
+ final FlagService _featureFlagService;
+ late final Logger _logger = Logger("MultiPartUploader");
+
+ MultiPartUploader(
+ this._enteDio,
+ this._s3Dio,
+ this._db,
+ this._featureFlagService,
+ );
+
+ Future getEncryptionResult(
+ String localId,
+ String fileHash,
+ int collectionID,
+ ) async {
+ final collectionKey =
+ CollectionsService.instance.getCollectionKey(collectionID);
+ final result =
+ await _db.getFileEncryptionData(localId, fileHash, collectionID);
+ final encryptedFileKey = CryptoUtil.base642bin(result.encryptedFileKey);
+ final fileNonce = CryptoUtil.base642bin(result.fileNonce);
+
+ final encryptKeyNonce = CryptoUtil.base642bin(result.keyNonce);
+
+ return EncryptionResult(
+ key: CryptoUtil.decryptSync(
+ encryptedFileKey,
+ collectionKey,
+ encryptKeyNonce,
+ ),
+ header: fileNonce,
+ );
+ }
+
+ int get multipartPartSizeForUpload {
+ if (_featureFlagService.internalUser) {
+ return multipartPartSizeInternal;
+ }
+ return multipartPartSize;
+ }
+
+ Future calculatePartCount(int fileSize) async {
+ // Multipart upload is only enabled for internal users
+ // and debug builds till it's battle tested.
+ if (!_featureFlagService.internalUser) return 1;
+
+ final partCount = (fileSize / multipartPartSizeForUpload).ceil();
+ return partCount;
+ }
+
+ Future getMultipartUploadURLs(int count) async {
+ try {
+ assert(
+ _featureFlagService.internalUser,
+ "Multipart upload should not be enabled for external users.",
+ );
+ final response = await _enteDio.get(
+ "/files/multipart-upload-urls",
+ queryParameters: {
+ "count": count,
+ },
+ );
+
+ return MultipartUploadURLs.fromMap(response.data);
+ } on Exception catch (e) {
+ _logger.severe('failed to get multipart url', e);
+ rethrow;
+ }
+ }
+
+ Future createTableEntry(
+ String localId,
+ String fileHash,
+ int collectionID,
+ MultipartUploadURLs urls,
+ String encryptedFileName,
+ int fileSize,
+ Uint8List fileKey,
+ Uint8List fileNonce,
+ ) async {
+ final collectionKey =
+ CollectionsService.instance.getCollectionKey(collectionID);
+
+ final encryptedResult = CryptoUtil.encryptSync(
+ fileKey,
+ collectionKey,
+ );
+
+ await _db.createTrackUploadsEntry(
+ localId,
+ fileHash,
+ collectionID,
+ urls,
+ encryptedFileName,
+ fileSize,
+ CryptoUtil.bin2base64(encryptedResult.encryptedData!),
+ CryptoUtil.bin2base64(fileNonce),
+ CryptoUtil.bin2base64(encryptedResult.nonce!),
+ partSize: multipartPartSizeForUpload,
+ );
+ }
+
+ Future putExistingMultipartFile(
+ File encryptedFile,
+ String localId,
+ String fileHash,
+ int collectionID,
+ ) async {
+ final multipartInfo =
+ await _db.getCachedLinks(localId, fileHash, collectionID);
+ await _db.updateLastAttempted(localId, fileHash, collectionID);
+
+ Map etags = multipartInfo.partETags ?? {};
+
+ if (multipartInfo.status == MultipartStatus.pending) {
+ // upload individual parts and get their etags
+ etags = await _uploadParts(multipartInfo, encryptedFile);
+ }
+
+ if (multipartInfo.status != MultipartStatus.completed) {
+ // complete the multipart upload
+ await _completeMultipartUpload(
+ multipartInfo.urls.objectKey,
+ etags,
+ multipartInfo.urls.completeURL,
+ );
+ }
+
+ return multipartInfo.urls.objectKey;
+ }
+
+ Future putMultipartFile(
+ MultipartUploadURLs urls,
+ File encryptedFile,
+ ) async {
+ // upload individual parts and get their etags
+ final etags = await _uploadParts(
+ MultipartInfo(urls: urls),
+ encryptedFile,
+ );
+
+ // complete the multipart upload
+ await _completeMultipartUpload(urls.objectKey, etags, urls.completeURL);
+
+ return urls.objectKey;
+ }
+
+ Future> _uploadParts(
+ MultipartInfo partInfo,
+ File encryptedFile,
+ ) async {
+ final partsURLs = partInfo.urls.partsURLs;
+ final partUploadStatus = partInfo.partUploadStatus;
+ final partsLength = partsURLs.length;
+ final etags = partInfo.partETags ?? {};
+
+ int i = 0;
+ final partSize = partInfo.partSize ?? multipartPartSizeForUpload;
+
+ // Go to the first part that is not uploaded
+ while (i < (partUploadStatus?.length ?? 0) &&
+ (partUploadStatus?[i] ?? false)) {
+ i++;
+ }
+
+ final int encFileLength = encryptedFile.lengthSync();
+ // Start parts upload
+ int count = 0;
+ while (i < partsLength) {
+ count++;
+ final partURL = partsURLs[i];
+ final isLastPart = i == partsLength - 1;
+ final fileSize = isLastPart ? encFileLength % partSize : partSize;
+ _logger.info(
+ "Uploading part ${i + 1} / $partsLength of size $fileSize bytes (total size $encFileLength).",
+ );
+ if (kDebugMode && count > 3) {
+ throw Exception(
+ 'Forced exception to test multipart upload retry mechanism.',
+ );
+ }
+ final response = await _s3Dio.put(
+ partURL,
+ data: encryptedFile.openRead(
+ i * partSize,
+ isLastPart ? null : (i + 1) * partSize,
+ ),
+ options: Options(
+ headers: {
+ Headers.contentLengthHeader: fileSize,
+ },
+ ),
+ );
+
+ final eTag = response.headers.value("etag");
+
+ if (eTag?.isEmpty ?? true) {
+ throw Exception('ETAG_MISSING');
+ }
+
+ etags[i] = eTag!;
+
+ await _db.updatePartStatus(partInfo.urls.objectKey, i, eTag);
+ i++;
+ }
+
+ await _db.updateTrackUploadStatus(
+ partInfo.urls.objectKey,
+ MultipartStatus.uploaded,
+ );
+
+ return etags;
+ }
+
+ Future _completeMultipartUpload(
+ String objectKey,
+ Map partEtags,
+ String completeURL,
+ ) async {
+ final body = convertJs2Xml({
+ 'CompleteMultipartUpload': partEtags.entries
+ .map(
+ (e) => PartETag(
+ e.key + 1,
+ e.value,
+ ),
+ )
+ .toList(),
+ }).replaceAll('"', '').replaceAll('"', '');
+
+ try {
+ await _s3Dio.post(
+ completeURL,
+ data: body,
+ options: Options(
+ contentType: "text/xml",
+ ),
+ );
+ await _db.updateTrackUploadStatus(
+ objectKey,
+ MultipartStatus.completed,
+ );
+ } catch (e) {
+ Logger("MultipartUpload").severe(e);
+ rethrow;
+ }
+ }
+}
diff --git a/mobile/lib/services/collections_service.dart b/mobile/lib/services/collections_service.dart
index 0981eb767e884ed340a46d4a155ee17fbd07520a..5b16bc70fb29d07d7754434f3e9d9c3497019403 100644
--- a/mobile/lib/services/collections_service.dart
+++ b/mobile/lib/services/collections_service.dart
@@ -30,7 +30,6 @@ import 'package:photos/models/collection/collection_items.dart';
import 'package:photos/models/file/file.dart';
import "package:photos/models/files_split.dart";
import "package:photos/models/metadata/collection_magic.dart";
-import "package:photos/service_locator.dart";
import 'package:photos/services/app_lifecycle_service.dart';
import "package:photos/services/favorites_service.dart";
import 'package:photos/services/file_magic_service.dart';
@@ -1179,9 +1178,6 @@ class CollectionsService {
await _addToCollection(dstCollectionID, splitResult.ownedByCurrentUser);
}
if (splitResult.ownedByOtherUsers.isNotEmpty) {
- if (!flagService.internalUser) {
- throw ArgumentError('Cannot add files owned by other users');
- }
late final List filesToCopy;
late final List filesToAdd;
(filesToAdd, filesToCopy) = (await _splitFilesToAddAndCopy(
diff --git a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart
index 420b8c97f7fc0327a8f2f8080903a6008a931617..485e1f2c916101ee4d1e30b2290081a89d8a880a 100644
--- a/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart
+++ b/mobile/lib/services/machine_learning/semantic_search/embedding_store.dart
@@ -145,9 +145,12 @@ class EmbeddingStore {
}
_logger.info("${remoteEmbeddings.length} embeddings fetched");
+
return RemoteEmbeddings(
remoteEmbeddings,
- remoteEmbeddings.length == limit,
+ // keep fetching until we get all embeddings. Avoid limit check as
+ // some embedding fetch might fail on server
+ remoteEmbeddings.isNotEmpty,
);
}
diff --git a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart
index 337ca913ff7c5133e0ea851c2f2cecbe760db538..99aa3a0119f0e0544f60d90d92166672feb13906 100644
--- a/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart
+++ b/mobile/lib/services/machine_learning/semantic_search/semantic_search_service.dart
@@ -190,6 +190,7 @@ class SemanticSearchService {
_logger.info(
"Loading ${_cachedEmbeddings.length} took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch)}ms",
);
+ Bus.instance.fire(EmbeddingCacheUpdatedEvent());
_logger.info("Cached embeddings: " + _cachedEmbeddings.length.toString());
}
@@ -225,7 +226,9 @@ class SemanticSearchService {
Future> _getFileIDsToBeIndexed() async {
final uploadedFileIDs = await FilesDB.instance
.getOwnedFileIDs(Configuration.instance.getUserID()!);
- final embeddedFileIDs = _cachedEmbeddings.map((e) => e.fileID).toSet();
+ final embeddedFileIDs =
+ await EmbeddingsDB.instance.getFileIDs(_currentModel);
+
uploadedFileIDs.removeWhere(
(id) => embeddedFileIDs.contains(id),
);
diff --git a/mobile/lib/ui/actions/collection/collection_sharing_actions.dart b/mobile/lib/ui/actions/collection/collection_sharing_actions.dart
index 7993c43423b874bddd6cf9f1a1606ec53b57daff..3328722dbe9f414170755e7eecd848a8f963b7b2 100644
--- a/mobile/lib/ui/actions/collection/collection_sharing_actions.dart
+++ b/mobile/lib/ui/actions/collection/collection_sharing_actions.dart
@@ -439,7 +439,12 @@ class CollectionActions {
) async {
final List files =
await FilesDB.instance.getAllFilesCollection(collection.id);
- await moveFilesFromCurrentCollection(bContext, collection, files);
+ await moveFilesFromCurrentCollection(
+ bContext,
+ collection,
+ files,
+ isHidden: collection.isHidden() && !collection.isDefaultHidden(),
+ );
// collection should be empty on server now
await collectionsService.trashEmptyCollection(collection);
}
diff --git a/mobile/lib/ui/cast/auto.dart b/mobile/lib/ui/cast/auto.dart
index 7b310855e34db8a83dd468acf27073d1a41b3462..34c97b34de7187069572f52a55623f8fe17f83d0 100644
--- a/mobile/lib/ui/cast/auto.dart
+++ b/mobile/lib/ui/cast/auto.dart
@@ -79,12 +79,6 @@ class _AutoCastDialogState extends State {
});
try {
await _connectToYourApp(context, device);
- if (mounted) {
- setState(() {
- _isDeviceTapInProgress.remove(device);
- });
- Navigator.of(context).pop();
- }
} catch (e) {
if (mounted) {
setState(() {
@@ -128,6 +122,11 @@ class _AutoCastDialogState extends State {
final code = message[CastMessageType.pairCode]!['code'];
widget.onConnect(code);
}
+ if (mounted) {
+ setState(() {
+ _isDeviceTapInProgress.remove(castDevice);
+ });
+ }
},
);
}
diff --git a/mobile/lib/ui/notification/update/change_log_page.dart b/mobile/lib/ui/notification/update/change_log_page.dart
index 1216b3219acd82f2331c4082fefbf6ba7765448b..90430fae25a0e39eafe8a6b148a1916c0e7d2542 100644
--- a/mobile/lib/ui/notification/update/change_log_page.dart
+++ b/mobile/lib/ui/notification/update/change_log_page.dart
@@ -124,7 +124,7 @@ class _ChangeLogPageState extends State {
),
ChangeLogEntry(
"Organize shared photos",
- "You can now add shared items to your favorites to any of your personal albums. Ente will create a copy that is fully owned by you and can be organized to your liking.",
+ "You can now add shared items to your favorites or to any of your personal albums. Ente will create a copy that is fully owned by you and can be organized to your liking.",
),
ChangeLogEntry(
"Download multiple items",
diff --git a/mobile/lib/ui/settings/machine_learning_settings_page.dart b/mobile/lib/ui/settings/machine_learning_settings_page.dart
index 3306ea36f777933826ffd1516ca83b07b20b8962..a0b72ae09d5a5cb300b261df4c73d663623de35e 100644
--- a/mobile/lib/ui/settings/machine_learning_settings_page.dart
+++ b/mobile/lib/ui/settings/machine_learning_settings_page.dart
@@ -228,13 +228,13 @@ class MagicSearchIndexStatsWidget extends StatefulWidget {
class _MagicSearchIndexStatsWidgetState
extends State {
IndexStatus? _status;
- late StreamSubscription _eventSubscription;
+ late StreamSubscription _eventSubscription;
@override
void initState() {
super.initState();
_eventSubscription =
- Bus.instance.on().listen((event) {
+ Bus.instance.on().listen((event) {
_fetchIndexStatus();
});
_fetchIndexStatus();
diff --git a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart
index a630e3354a36d260c53442cae4a3e740656e85a8..e805927a645454ac1edd582ab446c5d4cd141978 100644
--- a/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart
+++ b/mobile/lib/ui/viewer/actions/file_selection_actions_widget.dart
@@ -15,7 +15,6 @@ import 'package:photos/models/files_split.dart';
import 'package:photos/models/gallery_type.dart';
import "package:photos/models/metadata/common_keys.dart";
import 'package:photos/models/selected_files.dart';
-import "package:photos/service_locator.dart";
import 'package:photos/services/collections_service.dart';
import 'package:photos/services/hidden_service.dart';
import "package:photos/theme/colors.dart";
@@ -64,7 +63,6 @@ class _FileSelectionActionsWidgetState
late FilesSplit split;
late CollectionActions collectionActions;
late bool isCollectionOwner;
- bool _isInternalUser = false;
// _cachedCollectionForSharedLink is primarily used to avoid creating duplicate
// links if user keeps on creating Create link button after selecting
@@ -102,7 +100,6 @@ class _FileSelectionActionsWidgetState
@override
Widget build(BuildContext context) {
- _isInternalUser = flagService.internalUser;
final ownedFilesCount = split.ownedByCurrentUser.length;
final ownedAndPendingUploadFilesCount =
ownedFilesCount + split.pendingUploads.length;
@@ -150,14 +147,13 @@ class _FileSelectionActionsWidgetState
final showUploadIcon = widget.type == GalleryType.localFolder &&
split.ownedByCurrentUser.isEmpty;
- if (widget.type.showAddToAlbum() ||
- (_isInternalUser && widget.type == GalleryType.sharedCollection)) {
+ if (widget.type.showAddToAlbum()) {
if (showUploadIcon) {
items.add(
SelectionActionButton(
icon: Icons.cloud_upload_outlined,
labelText: S.of(context).addToEnte,
- onTap: (anyOwnedFiles || _isInternalUser) ? _addToAlbum : null,
+ onTap: _addToAlbum,
),
);
} else {
@@ -165,8 +161,7 @@ class _FileSelectionActionsWidgetState
SelectionActionButton(
icon: Icons.add_outlined,
labelText: S.of(context).addToAlbum,
- onTap: (anyOwnedFiles || _isInternalUser) ? _addToAlbum : null,
- shouldShow: ownedAndPendingUploadFilesCount > 0 || _isInternalUser,
+ onTap: _addToAlbum,
),
);
}
@@ -450,10 +445,6 @@ class _FileSelectionActionsWidgetState
}
Future _addToAlbum() async {
- if (split.ownedByOtherUsers.isNotEmpty && !_isInternalUser) {
- widget.selectedFiles
- .unSelectAll(split.ownedByOtherUsers.toSet(), skipNotify: true);
- }
showCollectionActionSheet(context, selectedFiles: widget.selectedFiles);
}
diff --git a/mobile/lib/ui/viewer/file/file_app_bar.dart b/mobile/lib/ui/viewer/file/file_app_bar.dart
index 2918924dbc7e992f8be3378219d756fbb0850ec0..aa46de55a2bc8cd5c8c7e870877e2be8e6d91a35 100644
--- a/mobile/lib/ui/viewer/file/file_app_bar.dart
+++ b/mobile/lib/ui/viewer/file/file_app_bar.dart
@@ -1,6 +1,5 @@
import 'dart:io';
-import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
import 'package:logging/logging.dart';
import 'package:media_extension/media_extension.dart';
@@ -12,7 +11,6 @@ import 'package:photos/models/file/file_type.dart';
import 'package:photos/models/file/trash_file.dart';
import "package:photos/models/metadata/common_keys.dart";
import 'package:photos/models/selected_files.dart';
-import "package:photos/service_locator.dart";
import 'package:photos/services/collections_service.dart';
import 'package:photos/services/hidden_service.dart';
import 'package:photos/ui/collections/collection_action_sheet.dart';
@@ -133,11 +131,13 @@ class FileAppBarState extends State {
),
);
}
- // only show fav option for files owned by the user
- if ((isOwnedByUser || flagService.internalUser) &&
- !isFileHidden &&
- isFileUploaded) {
- _actions.add(FavoriteWidget(widget.file));
+ if (!isFileHidden && isFileUploaded) {
+ _actions.add(
+ Padding(
+ padding: const EdgeInsets.all(8),
+ child: FavoriteWidget(widget.file),
+ ),
+ );
}
if (!isFileUploaded) {
_actions.add(
diff --git a/mobile/lib/ui/viewer/file_details/favorite_widget.dart b/mobile/lib/ui/viewer/file_details/favorite_widget.dart
index f9d6434908c98c5237806ce0a9aa113abd23d373..3371b14421bdd782b5754d9b1cb8f00cd39c9659 100644
--- a/mobile/lib/ui/viewer/file_details/favorite_widget.dart
+++ b/mobile/lib/ui/viewer/file_details/favorite_widget.dart
@@ -50,7 +50,6 @@ class _FavoriteWidgetState extends State {
: LikeButton(
size: 24,
isLiked: isLiked,
- padding: const EdgeInsets.all(2),
onTap: (oldValue) async {
if (widget.file.uploadedFileID == null ||
widget.file.ownerID !=
diff --git a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart
index 4a3d9450ac3b957c30d24dc22aa69d57fadcf710..d2b7a6ec3db2664a6a7d43f661e995946f71c47d 100644
--- a/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart
+++ b/mobile/lib/ui/viewer/gallery/gallery_app_bar_widget.dart
@@ -90,16 +90,16 @@ class _GalleryAppBarWidgetState extends State {
String? _appBarTitle;
late CollectionActions collectionActions;
bool isQuickLink = false;
- late bool isInternalUser;
late GalleryType galleryType;
+ final ValueNotifier castNotifier = ValueNotifier(0);
+
@override
void initState() {
super.initState();
_selectedFilesListener = () {
setState(() {});
};
- isInternalUser = flagService.internalUser;
collectionActions = CollectionActions(CollectionsService.instance);
widget.selectedFiles.addListener(_selectedFilesListener);
_userAuthEventSubscription =
@@ -328,14 +328,16 @@ class _GalleryAppBarWidgetState extends State {
Tooltip(
message: "Cast album",
child: IconButton(
- icon: castService.getActiveSessions().isNotEmpty
- ? const Icon(Icons.cast_connected_rounded)
- : const Icon(Icons.cast_outlined),
+ icon: ValueListenableBuilder(
+ valueListenable: castNotifier,
+ builder: (context, value, child) {
+ return castService.getActiveSessions().isNotEmpty
+ ? const Icon(Icons.cast_connected_rounded)
+ : const Icon(Icons.cast_outlined);
+ },
+ ),
onPressed: () async {
await _castChoiceDialog();
- if (mounted) {
- setState(() {});
- }
},
),
),
@@ -412,7 +414,7 @@ class _GalleryAppBarWidgetState extends State {
? Icons.visibility_outlined
: Icons.visibility_off_outlined,
),
- if (widget.collection != null && isInternalUser)
+ if (widget.collection != null)
EntePopupMenuItem(
value: AlbumPopupAction.playOnTv,
context.l10n.playOnTv,
@@ -728,38 +730,44 @@ class _GalleryAppBarWidgetState extends State {
await castService.closeActiveCasts();
},
);
+ castNotifier.value++;
return;
}
// stop any existing cast session
gw.revokeAllTokens().ignore();
- final result = await showDialog(
- context: context,
- barrierDismissible: true,
- builder: (BuildContext context) {
- return const CastChooseDialog();
- },
- );
- if (result == null) {
- return;
- }
- // wait to allow the dialog to close
- await Future.delayed(const Duration(milliseconds: 100));
- if (result == ButtonAction.first) {
- await showDialog(
+ if (!Platform.isAndroid) {
+ await _pairWithPin(gw, '');
+ } else {
+ final result = await showDialog(
context: context,
barrierDismissible: true,
- builder: (BuildContext bContext) {
- return AutoCastDialog(
- (device) async {
- await _castPair(bContext, gw, device);
- },
- );
+ builder: (BuildContext context) {
+ return const CastChooseDialog();
},
);
- }
- if (result == ButtonAction.second) {
- await _pairWithPin(gw, '');
+ if (result == null) {
+ return;
+ }
+ // wait to allow the dialog to close
+ await Future.delayed(const Duration(milliseconds: 100));
+ if (result == ButtonAction.first) {
+ await showDialog(
+ context: context,
+ barrierDismissible: true,
+ builder: (BuildContext bContext) {
+ return AutoCastDialog(
+ (device) async {
+ await _castPair(bContext, gw, device);
+ Navigator.pop(bContext);
+ },
+ );
+ },
+ );
+ }
+ if (result == ButtonAction.second) {
+ await _pairWithPin(gw, '');
+ }
}
}
@@ -785,7 +793,10 @@ class _GalleryAppBarWidgetState extends State {
String lastCode = '';
Future _castPair(
- BuildContext bContext, CastGateway gw, String code) async {
+ BuildContext bContext,
+ CastGateway gw,
+ String code,
+ ) async {
try {
if (lastCode == code) {
return false;
@@ -801,15 +812,15 @@ class _GalleryAppBarWidgetState extends State {
final String castToken = const Uuid().v4().toString();
final castPayload = CollectionsService.instance
.getCastData(castToken, widget.collection!, publicKey);
- _logger.info("Casting album with token $castToken");
await gw.publishCastPayload(
code,
castPayload,
widget.collection!.id,
castToken,
);
- _logger.info("Casted album with token $castToken");
+ _logger.info("cast album completed");
// showToast(bContext, S.of(context).pairingComplete);
+ castNotifier.value++;
return true;
} catch (e, s) {
lastCode = '';
@@ -823,6 +834,7 @@ class _GalleryAppBarWidgetState extends State {
} else {
await showGenericErrorDialog(context: bContext, error: e);
}
+ castNotifier.value++;
return false;
}
}
diff --git a/mobile/lib/utils/file_uploader.dart b/mobile/lib/utils/file_uploader.dart
index bcd5bb121977128b983cd35b38ade63cc0a05cfb..ad10153035f29a5dc23af76ccc9cfb6b372310f2 100644
--- a/mobile/lib/utils/file_uploader.dart
+++ b/mobile/lib/utils/file_uploader.dart
@@ -2,7 +2,7 @@ import 'dart:async';
import 'dart:collection';
import 'dart:convert';
import 'dart:io';
-import 'dart:math';
+import 'dart:math' as math;
import 'package:collection/collection.dart';
import 'package:connectivity_plus/connectivity_plus.dart';
@@ -28,6 +28,8 @@ import 'package:photos/models/file/file_type.dart';
import "package:photos/models/metadata/file_magic.dart";
import 'package:photos/models/upload_url.dart';
import "package:photos/models/user_details.dart";
+import "package:photos/module/upload/service/multipart.dart";
+import "package:photos/service_locator.dart";
import 'package:photos/services/collections_service.dart';
import "package:photos/services/file_magic_service.dart";
import 'package:photos/services/local_sync_service.dart';
@@ -37,7 +39,6 @@ import 'package:photos/utils/crypto_util.dart';
import 'package:photos/utils/file_download_util.dart';
import 'package:photos/utils/file_uploader_util.dart';
import "package:photos/utils/file_util.dart";
-import "package:photos/utils/multipart_upload_util.dart";
import 'package:shared_preferences/shared_preferences.dart';
import 'package:tuple/tuple.dart';
import "package:uuid/uuid.dart";
@@ -51,7 +52,7 @@ class FileUploader {
static const kBlockedUploadsPollFrequency = Duration(seconds: 2);
static const kFileUploadTimeout = Duration(minutes: 50);
static const k20MBStorageBuffer = 20 * 1024 * 1024;
- static const kUploadTempPrefix = "upload_file_";
+ static const _lastStaleFileCleanupTime = "lastStaleFileCleanupTime";
final _logger = Logger("FileUploader");
final _dio = NetworkClient.instance.getDio();
@@ -79,6 +80,7 @@ class FileUploader {
// cases, we don't want to clear the stale upload files. See #removeStaleFiles
// as it can result in clearing files which are still being force uploaded.
bool _hasInitiatedForceUpload = false;
+ late MultiPartUploader _multiPartUploader;
FileUploader._privateConstructor() {
Bus.instance.on().listen((event) {
@@ -114,6 +116,17 @@ class FileUploader {
// ignore: unawaited_futures
_pollBackgroundUploadStatus();
}
+ _multiPartUploader = MultiPartUploader(
+ _enteDio,
+ _dio,
+ UploadLocksDB.instance,
+ flagService,
+ );
+ if (currentTime - (_prefs.getInt(_lastStaleFileCleanupTime) ?? 0) >
+ tempDirCleanUpInterval) {
+ await removeStaleFiles();
+ await _prefs.setInt(_lastStaleFileCleanupTime, currentTime);
+ }
Bus.instance.on().listen((event) {
if (event.type == EventType.deletedFromDevice ||
event.type == EventType.deletedFromEverywhere) {
@@ -309,13 +322,28 @@ class FileUploader {
// ends with .encrypted. Fetch files in async manner
final files = await Directory(dir).list().toList();
final filesToDelete = files.where((file) {
- return file.path.contains(kUploadTempPrefix) &&
+ return file.path.contains(uploadTempFilePrefix) &&
file.path.contains(".encrypted");
});
if (filesToDelete.isNotEmpty) {
- _logger.info('cleaning up state files ${filesToDelete.length}');
+ _logger.info('Deleting ${filesToDelete.length} stale upload files ');
+ final fileNameToLastAttempt =
+ await _uploadLocks.getFileNameToLastAttemptedAtMap();
for (final file in filesToDelete) {
- await file.delete();
+ final fileName = file.path.split('/').last;
+ final lastAttemptTime = fileNameToLastAttempt[fileName] != null
+ ? DateTime.fromMillisecondsSinceEpoch(
+ fileNameToLastAttempt[fileName]!,
+ )
+ : null;
+ if (lastAttemptTime == null ||
+ DateTime.now().difference(lastAttemptTime).inDays > 1) {
+ await file.delete();
+ } else {
+ _logger.info(
+ 'Skipping file $fileName as it was attempted recently on $lastAttemptTime',
+ );
+ }
}
}
@@ -405,7 +433,7 @@ class FileUploader {
(fileOnDisk.updationTime ?? -1) != -1 &&
(fileOnDisk.collectionID ?? -1) == collectionID;
if (wasAlreadyUploaded) {
- debugPrint("File is already uploaded ${fileOnDisk.tag}");
+ _logger.info("File is already uploaded ${fileOnDisk.tag}");
return fileOnDisk;
}
}
@@ -425,6 +453,7 @@ class FileUploader {
}
final String lockKey = file.localID!;
+ bool _isMultipartUpload = false;
try {
await _uploadLocks.acquireLock(
@@ -438,12 +467,27 @@ class FileUploader {
}
final tempDirectory = Configuration.instance.getTempDirectory();
+ MediaUploadData? mediaUploadData;
+ mediaUploadData = await getUploadDataFromEnteFile(file);
+
+ final String? existingMultipartEncFileName =
+ mediaUploadData.hashData?.fileHash != null
+ ? await _uploadLocks.getEncryptedFileName(
+ lockKey,
+ mediaUploadData.hashData!.fileHash!,
+ collectionID,
+ )
+ : null;
+ bool multipartEntryExists = existingMultipartEncFileName != null;
+
final String uniqueID = const Uuid().v4().toString();
- final encryptedFilePath =
- '$tempDirectory$kUploadTempPrefix${uniqueID}_file.encrypted';
+
+ final encryptedFilePath = multipartEntryExists
+ ? '$tempDirectory$existingMultipartEncFileName'
+ : '$tempDirectory$uploadTempFilePrefix${uniqueID}_file.encrypted';
final encryptedThumbnailPath =
- '$tempDirectory$kUploadTempPrefix${uniqueID}_thumb.encrypted';
- MediaUploadData? mediaUploadData;
+ '$tempDirectory$uploadTempFilePrefix${uniqueID}_thumb.encrypted';
+
var uploadCompleted = false;
// This flag is used to decide whether to clear the iOS origin file cache
// or not.
@@ -457,13 +501,18 @@ class FileUploader {
'${isUpdatedFile ? 're-upload' : 'upload'} of ${file.toString()}',
);
- mediaUploadData = await getUploadDataFromEnteFile(file);
-
Uint8List? key;
+ EncryptionResult? multiPartFileEncResult = multipartEntryExists
+ ? await _multiPartUploader.getEncryptionResult(
+ lockKey,
+ mediaUploadData.hashData!.fileHash!,
+ collectionID,
+ )
+ : null;
if (isUpdatedFile) {
key = getFileKey(file);
} else {
- key = null;
+ key = multiPartFileEncResult?.key;
// check if the file is already uploaded and can be mapped to existing
// uploaded file. If map is found, it also returns the corresponding
// mapped or update file entry.
@@ -482,16 +531,40 @@ class FileUploader {
}
}
- if (File(encryptedFilePath).existsSync()) {
+ final encryptedFileExists = File(encryptedFilePath).existsSync();
+
+ // If the multipart entry exists but the encrypted file doesn't, it means
+ // that we'll have to reupload as the nonce is lost
+ if (multipartEntryExists) {
+ final bool updateWithDiffKey = isUpdatedFile &&
+ multiPartFileEncResult != null &&
+ !listEquals(key, multiPartFileEncResult.key);
+ if (!encryptedFileExists || updateWithDiffKey) {
+ if (updateWithDiffKey) {
+ _logger.severe('multiPart update resumed with differentKey');
+ } else {
+ _logger.warning(
+ 'multiPart EncryptedFile missing, discard multipart entry',
+ );
+ }
+ await _uploadLocks.deleteMultipartTrack(lockKey);
+ multipartEntryExists = false;
+ multiPartFileEncResult = null;
+ }
+ } else if (encryptedFileExists) {
+ // otherwise just delete the file for singlepart upload
await File(encryptedFilePath).delete();
}
await _checkIfWithinStorageLimit(mediaUploadData.sourceFile!);
final encryptedFile = File(encryptedFilePath);
- final EncryptionResult fileAttributes = await CryptoUtil.encryptFile(
- mediaUploadData.sourceFile!.path,
- encryptedFilePath,
- key: key,
- );
+
+ final EncryptionResult fileAttributes = multiPartFileEncResult ??
+ await CryptoUtil.encryptFile(
+ mediaUploadData.sourceFile!.path,
+ encryptedFilePath,
+ key: key,
+ );
+
late final Uint8List? thumbnailData;
if (mediaUploadData.thumbnail == null &&
file.fileType == FileType.video) {
@@ -512,31 +585,63 @@ class FileUploader {
await encryptedThumbnailFile
.writeAsBytes(encryptedThumbnailData.encryptedData!);
- final thumbnailUploadURL = await _getUploadURL();
- final String thumbnailObjectKey =
- await _putFile(thumbnailUploadURL, encryptedThumbnailFile);
-
- // Calculate the number of parts for the file. Multiple part upload
- // is only enabled for internal users and debug builds till it's battle tested.
- final count = kDebugMode
- ? await calculatePartCount(
- await encryptedFile.length(),
- )
- : 1;
+ // Calculate the number of parts for the file.
+ final count = await _multiPartUploader.calculatePartCount(
+ await encryptedFile.length(),
+ );
late String fileObjectKey;
+ late String thumbnailObjectKey;
if (count <= 1) {
+ final thumbnailUploadURL = await _getUploadURL();
+ thumbnailObjectKey =
+ await _putFile(thumbnailUploadURL, encryptedThumbnailFile);
final fileUploadURL = await _getUploadURL();
fileObjectKey = await _putFile(fileUploadURL, encryptedFile);
} else {
- final fileUploadURLs = await getMultipartUploadURLs(count);
- fileObjectKey = await putMultipartFile(fileUploadURLs, encryptedFile);
+ _isMultipartUpload = true;
+ _logger.finest(
+ "Init multipartUpload $multipartEntryExists, isUpdate $isUpdatedFile",
+ );
+ if (multipartEntryExists) {
+ fileObjectKey = await _multiPartUploader.putExistingMultipartFile(
+ encryptedFile,
+ lockKey,
+ mediaUploadData.hashData!.fileHash!,
+ collectionID,
+ );
+ } else {
+ final fileUploadURLs =
+ await _multiPartUploader.getMultipartUploadURLs(count);
+ final encFileName = encryptedFile.path.split('/').last;
+ await _multiPartUploader.createTableEntry(
+ lockKey,
+ mediaUploadData.hashData!.fileHash!,
+ collectionID,
+ fileUploadURLs,
+ encFileName,
+ await encryptedFile.length(),
+ fileAttributes.key!,
+ fileAttributes.header!,
+ );
+ fileObjectKey = await _multiPartUploader.putMultipartFile(
+ fileUploadURLs,
+ encryptedFile,
+ );
+ }
+ // in case of multipart, upload the thumbnail towards the end to avoid
+ // re-uploading the thumbnail in case of failure.
+ // In regular upload, always upload the thumbnail first to keep existing behaviour
+ //
+ final thumbnailUploadURL = await _getUploadURL();
+ thumbnailObjectKey =
+ await _putFile(thumbnailUploadURL, encryptedThumbnailFile);
}
final metadata = await file.getMetadataForUpload(mediaUploadData);
final encryptedMetadataResult = await CryptoUtil.encryptChaCha(
- utf8.encode(jsonEncode(metadata)) as Uint8List,
+ utf8.encode(jsonEncode(metadata)),
fileAttributes.key!,
);
final fileDecryptionHeader =
@@ -618,6 +723,8 @@ class FileUploader {
}
await FilesDB.instance.update(remoteFile);
}
+ await UploadLocksDB.instance.deleteMultipartTrack(lockKey);
+
if (!_isBackground) {
Bus.instance.fire(
LocalPhotosUpdatedEvent(
@@ -659,6 +766,7 @@ class FileUploader {
encryptedFilePath,
encryptedThumbnailPath,
lockKey: lockKey,
+ isMultiPartUpload: _isMultipartUpload,
);
}
}
@@ -803,6 +911,7 @@ class FileUploader {
String encryptedFilePath,
String encryptedThumbnailPath, {
required String lockKey,
+ bool isMultiPartUpload = false,
}) async {
if (mediaUploadData != null && mediaUploadData.sourceFile != null) {
// delete the file from app's internal cache if it was copied to app
@@ -816,7 +925,14 @@ class FileUploader {
}
}
if (File(encryptedFilePath).existsSync()) {
- await File(encryptedFilePath).delete();
+ if (isMultiPartUpload && !uploadCompleted) {
+ _logger.fine(
+ "skip delete for multipart encrypted file $encryptedFilePath",
+ );
+ } else {
+ _logger.fine("deleting encrypted file $encryptedFilePath");
+ await File(encryptedFilePath).delete();
+ }
}
if (File(encryptedThumbnailPath).existsSync()) {
await File(encryptedThumbnailPath).delete();
@@ -1039,7 +1155,7 @@ class FileUploader {
if (_uploadURLs.isEmpty) {
// the queue is empty, fetch at least for one file to handle force uploads
// that are not in the queue. This is to also avoid
- await fetchUploadURLs(max(_queue.length, 1));
+ await fetchUploadURLs(math.max(_queue.length, 1));
}
try {
return _uploadURLs.removeFirst();
@@ -1061,7 +1177,7 @@ class FileUploader {
final response = await _enteDio.get(
"/files/upload-urls",
queryParameters: {
- "count": min(42, fileCount * 2), // m4gic number
+ "count": math.min(42, fileCount * 2), // m4gic number
},
);
final urls = (response.data["urls"] as List)
diff --git a/mobile/lib/utils/multipart_upload_util.dart b/mobile/lib/utils/multipart_upload_util.dart
index 102c08d8d8da888c8e5b7cc67f28a58ce87ced94..6b9ccafb970e7f0927f42819b166f22b41e27450 100644
--- a/mobile/lib/utils/multipart_upload_util.dart
+++ b/mobile/lib/utils/multipart_upload_util.dart
@@ -6,8 +6,8 @@ import "package:dio/dio.dart";
import "package:logging/logging.dart";
import "package:photos/core/constants.dart";
import "package:photos/core/network/network.dart";
+import 'package:photos/module/upload/model/xml.dart';
import "package:photos/service_locator.dart";
-import "package:photos/utils/xml_parser_util.dart";
final _enteDio = NetworkClient.instance.enteDio;
final _dio = NetworkClient.instance.getDio();
diff --git a/mobile/lib/utils/xml_parser_util.dart b/mobile/lib/utils/xml_parser_util.dart
index 9490fc40cbb599cd0bdef42d572492aa0f26dbb5..8b137891791fe96927ad78e64b0aad7bded08bdc 100644
--- a/mobile/lib/utils/xml_parser_util.dart
+++ b/mobile/lib/utils/xml_parser_util.dart
@@ -1,41 +1 @@
-// ignore_for_file: implementation_imports
-import "package:xml/xml.dart";
-
-// used for classes that can be converted to xml
-abstract class XmlParsableObject {
- Map toMap();
- String get elementName;
-}
-
-// for converting the response to xml
-String convertJs2Xml(Map json) {
- final builder = XmlBuilder();
- buildXml(builder, json);
- return builder.buildDocument().toXmlString(
- pretty: true,
- indent: ' ',
- );
-}
-
-// for building the xml node tree recursively
-void buildXml(XmlBuilder builder, dynamic node) {
- if (node is Map) {
- node.forEach((key, value) {
- builder.element(key, nest: () => buildXml(builder, value));
- });
- } else if (node is List) {
- for (var item in node) {
- buildXml(builder, item);
- }
- } else if (node is XmlParsableObject) {
- builder.element(
- node.elementName,
- nest: () {
- buildXml(builder, node.toMap());
- },
- );
- } else {
- builder.text(node.toString());
- }
-}
diff --git a/mobile/plugins/ente_cast_normal/lib/src/service.dart b/mobile/plugins/ente_cast_normal/lib/src/service.dart
index 04c501666a6fbacbac247da3fd65de21cbdc2e97..8a1f2aaf16b7b03d5f43d6dff418b5aa45c43818 100644
--- a/mobile/plugins/ente_cast_normal/lib/src/service.dart
+++ b/mobile/plugins/ente_cast_normal/lib/src/service.dart
@@ -24,7 +24,9 @@ class CastServiceImpl extends CastService {
"got RECEIVER_STATUS, Send request to pair",
name: "CastServiceImpl",
);
- session.sendMessage(_pairRequestNamespace, {});
+ session.sendMessage(_pairRequestNamespace, {
+ "collectionID": collectionID,
+ });
} else {
if (onMessage != null && message.containsKey("code")) {
onMessage(
@@ -32,8 +34,9 @@ class CastServiceImpl extends CastService {
CastMessageType.pairCode: message,
},
);
+ } else {
+ print('receive message: $message');
}
- print('receive message: $message');
}
});
@@ -56,7 +59,9 @@ class CastServiceImpl extends CastService {
@override
Future> searchDevices() {
- return CastDiscoveryService().search().then((devices) {
+ return CastDiscoveryService()
+ .search(timeout: const Duration(seconds: 7))
+ .then((devices) {
return devices.map((device) => (device.name, device)).toList();
});
}
diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml
index 385deb7691c8aa788963516ad54abbcf0a7aad78..ecf762a3d6bb864a41c44594013d9246cf3736a8 100644
--- a/mobile/pubspec.yaml
+++ b/mobile/pubspec.yaml
@@ -12,7 +12,7 @@ description: ente photos application
# Read more about iOS versioning at
# https://developer.apple.com/library/archive/documentation/General/Reference/InfoPlistKeyReference/Articles/CoreFoundationKeys.html
-version: 0.8.90+610
+version: 0.8.95+615
publish_to: none
environment:
diff --git a/server/configurations/local.yaml b/server/configurations/local.yaml
index 7785f56019878364ff784b325201526727120c56..196c56f1f9f6ce06fea84af3037ece49606c3772 100644
--- a/server/configurations/local.yaml
+++ b/server/configurations/local.yaml
@@ -180,6 +180,9 @@ smtp:
port:
username:
password:
+ # The email address from which to send the email. Set this to an email
+ # address whose credentials you're providing.
+ email:
# Zoho Zeptomail config (optional)
#
diff --git a/server/docs/publish.md b/server/docs/publish.md
index de4849d900f7870ed252605f5c524b8799264ab6..3a49a47611cd92ee30feace40d8227e7c50192d8 100644
--- a/server/docs/publish.md
+++ b/server/docs/publish.md
@@ -39,3 +39,7 @@ combine both these steps too.
Once the workflow completes, the resultant image will be available at
`ghcr.io/ente-io/server`. The image will be tagged by the commit SHA. The latest
image will also be tagged, well, "latest".
+
+The workflow will also tag the commit it used to build the image with
+`server/ghcr`. This tag will be overwritten on each publish, and it'll point to
+the code that was used in the most recent publish.
diff --git a/server/ente/billing.go b/server/ente/billing.go
index 20c37bdb5afcdad5024fbc7105f46b35e9f23513..f623a92e85cadbe4fa7f724930f3f8b3a570bb99 100644
--- a/server/ente/billing.go
+++ b/server/ente/billing.go
@@ -11,7 +11,7 @@ import (
const (
// FreePlanStorage is the amount of storage in free plan
- FreePlanStorage = 1 * 1024 * 1024 * 1024
+ FreePlanStorage = 5 * 1024 * 1024 * 1024
// FreePlanProductID is the product ID of free plan
FreePlanProductID = "free"
// FreePlanTransactionID is the dummy transaction ID for the free plan
diff --git a/server/ente/embedding.go b/server/ente/embedding.go
index 2990a779a310ab933ea15428c336522b5134dea4..fabde44a580a74bdd3796386b751ecbbb4fa4cc6 100644
--- a/server/ente/embedding.go
+++ b/server/ente/embedding.go
@@ -7,6 +7,7 @@ type Embedding struct {
DecryptionHeader string `json:"decryptionHeader"`
UpdatedAt int64 `json:"updatedAt"`
Version *int `json:"version,omitempty"`
+ Size *int64
}
type InsertOrUpdateEmbeddingRequest struct {
@@ -30,9 +31,10 @@ type GetFilesEmbeddingRequest struct {
}
type GetFilesEmbeddingResponse struct {
- Embeddings []Embedding `json:"embeddings"`
- NoDataFileIDs []int64 `json:"noDataFileIDs"`
- ErrFileIDs []int64 `json:"errFileIDs"`
+ Embeddings []Embedding `json:"embeddings"`
+ PendingIndexFileIDs []int64 `json:"pendingIndexFileIDs"`
+ ErrFileIDs []int64 `json:"errFileIDs"`
+ NoEmbeddingFileIDs []int64 `json:"noEmbeddingFileIDs"`
}
type Model string
diff --git a/server/ente/file.go b/server/ente/file.go
index 4a69473e3afeb847cc100a27c776669d7604e44a..a0e67c71cfde1982381d9feb6b42de4f6cb02dbb 100644
--- a/server/ente/file.go
+++ b/server/ente/file.go
@@ -134,6 +134,7 @@ type UpdateMagicMetadata struct {
// UpdateMultipleMagicMetadataRequest request payload for updating magic metadata for list of files
type UpdateMultipleMagicMetadataRequest struct {
MetadataList []UpdateMagicMetadata `json:"metadataList" binding:"required"`
+ SkipVersion *bool `json:"skipVersion"`
}
// UploadURL represents the upload url for a specific object
diff --git a/server/migrations/85_increase_free_storage.down.sql b/server/migrations/85_increase_free_storage.down.sql
new file mode 100644
index 0000000000000000000000000000000000000000..9f7060a47f15d39a9d3eefbcd01ebf4cba2ba51c
--- /dev/null
+++ b/server/migrations/85_increase_free_storage.down.sql
@@ -0,0 +1 @@
+-- no-op
diff --git a/server/migrations/85_increase_free_storage.up.sql b/server/migrations/85_increase_free_storage.up.sql
new file mode 100644
index 0000000000000000000000000000000000000000..395033c8dd3c0b4c814cd20f602874f58cc225ab
--- /dev/null
+++ b/server/migrations/85_increase_free_storage.up.sql
@@ -0,0 +1 @@
+UPDATE subscriptions SET storage = 5368709120, expiry_time = 1749355117000000 where storage = 1073741824 and product_id = 'free';
diff --git a/server/pkg/api/file.go b/server/pkg/api/file.go
index a253c71c2a684d65026f7ea955b4d24cd4c7a926..990336e37297f229950653aa26cc3c8366c42d2d 100644
--- a/server/pkg/api/file.go
+++ b/server/pkg/api/file.go
@@ -110,7 +110,7 @@ func (h *FileHandler) GetUploadURLs(c *gin.Context) {
userID := auth.GetUserID(c.Request.Header)
count, _ := strconv.Atoi(c.Query("count"))
- urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp)
+ urls, err := h.Controller.GetUploadURLs(c, userID, count, enteApp, false)
if err != nil {
handler.Error(c, stacktrace.Propagate(err, ""))
return
diff --git a/server/pkg/api/public_collection.go b/server/pkg/api/public_collection.go
index 7a38f43808c8e46cdf1fb1770ed19e7265ab11e9..9290d645604deb3fbbafa52dbed4d9cf73d5a1dd 100644
--- a/server/pkg/api/public_collection.go
+++ b/server/pkg/api/public_collection.go
@@ -57,7 +57,7 @@ func (h *PublicCollectionHandler) GetUploadUrls(c *gin.Context) {
}
userID := collection.Owner.ID
count, _ := strconv.Atoi(c.Query("count"))
- urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp)
+ urls, err := h.FileCtrl.GetUploadURLs(c, userID, count, enteApp, false)
if err != nil {
handler.Error(c, stacktrace.Propagate(err, ""))
return
diff --git a/server/pkg/controller/embedding/controller.go b/server/pkg/controller/embedding/controller.go
index d6e78209fa3cac5b6702251b6a4b4dd747a39cd9..bf317ccfec736e29089fba228e1642f1b83b64f2 100644
--- a/server/pkg/controller/embedding/controller.go
+++ b/server/pkg/controller/embedding/controller.go
@@ -2,12 +2,14 @@ package embedding
import (
"bytes"
+ "context"
"encoding/json"
"errors"
"fmt"
"github.com/ente-io/museum/pkg/utils/array"
"strconv"
"sync"
+ gTime "time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/s3"
@@ -26,6 +28,12 @@ import (
log "github.com/sirupsen/logrus"
)
+const (
+ // maxEmbeddingDataSize is the min size of an embedding object in bytes
+ minEmbeddingDataSize = 2048
+ embeddingFetchTimeout = 15 * gTime.Second
+)
+
type Controller struct {
Repo *embedding.Repository
AccessCtrl access.Controller
@@ -135,15 +143,23 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd
return nil, stacktrace.Propagate(err, "")
}
+ embeddingsWithData := make([]ente.Embedding, 0)
+ noEmbeddingFileIds := make([]int64, 0)
dbFileIds := make([]int64, 0)
- for _, embedding := range userFileEmbeddings {
- dbFileIds = append(dbFileIds, embedding.FileID)
+ // fileIDs that were indexed but they don't contain any embedding information
+ for i := range userFileEmbeddings {
+ dbFileIds = append(dbFileIds, userFileEmbeddings[i].FileID)
+ if userFileEmbeddings[i].Size != nil && *userFileEmbeddings[i].Size < minEmbeddingDataSize {
+ noEmbeddingFileIds = append(noEmbeddingFileIds, userFileEmbeddings[i].FileID)
+ } else {
+ embeddingsWithData = append(embeddingsWithData, userFileEmbeddings[i])
+ }
}
- missingFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds)
+ pendingIndexFileIds := array.FindMissingElementsInSecondList(req.FileIDs, dbFileIds)
errFileIds := make([]int64, 0)
// Fetch missing userFileEmbeddings in parallel
- embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, userFileEmbeddings)
+ embeddingObjects, err := c.getEmbeddingObjectsParallelV2(userID, embeddingsWithData)
if err != nil {
return nil, stacktrace.Propagate(err, "")
}
@@ -166,9 +182,10 @@ func (c *Controller) GetFilesEmbedding(ctx *gin.Context, req ente.GetFilesEmbedd
}
return &ente.GetFilesEmbeddingResponse{
- Embeddings: fetchedEmbeddings,
- NoDataFileIDs: missingFileIds,
- ErrFileIDs: errFileIds,
+ Embeddings: fetchedEmbeddings,
+ PendingIndexFileIDs: pendingIndexFileIds,
+ ErrFileIDs: errFileIds,
+ NoEmbeddingFileIDs: noEmbeddingFileIds,
}, nil
}
@@ -292,7 +309,7 @@ func (c *Controller) getEmbeddingObjectsParallel(objectKeys []string) ([]ente.Em
defer wg.Done()
defer func() { <-globalDiffFetchSemaphore }() // Release back to global semaphore
- obj, err := c.getEmbeddingObject(objectKey, downloader)
+ obj, err := c.getEmbeddingObject(context.Background(), objectKey, downloader)
if err != nil {
errs = append(errs, err)
log.Error("error fetching embedding object: "+objectKey, err)
@@ -329,7 +346,9 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows
defer wg.Done()
defer func() { <-globalFileFetchSemaphore }() // Release back to global semaphore
objectKey := c.getObjectKey(userID, dbEmbeddingRow.FileID, dbEmbeddingRow.Model)
- obj, err := c.getEmbeddingObject(objectKey, downloader)
+ ctx, cancel := context.WithTimeout(context.Background(), embeddingFetchTimeout)
+ defer cancel()
+ obj, err := c.getEmbeddingObjectWithRetries(ctx, objectKey, downloader, 0)
if err != nil {
log.Error("error fetching embedding object: "+objectKey, err)
embeddingObjects[i] = embeddingObjectResult{
@@ -349,15 +368,22 @@ func (c *Controller) getEmbeddingObjectsParallelV2(userID int64, dbEmbeddingRows
return embeddingObjects, nil
}
-func (c *Controller) getEmbeddingObject(objectKey string, downloader *s3manager.Downloader) (ente.EmbeddingObject, error) {
+func (c *Controller) getEmbeddingObject(ctx context.Context, objectKey string, downloader *s3manager.Downloader) (ente.EmbeddingObject, error) {
+ return c.getEmbeddingObjectWithRetries(ctx, objectKey, downloader, 3)
+}
+
+func (c *Controller) getEmbeddingObjectWithRetries(ctx context.Context, objectKey string, downloader *s3manager.Downloader, retryCount int) (ente.EmbeddingObject, error) {
var obj ente.EmbeddingObject
buff := &aws.WriteAtBuffer{}
- _, err := downloader.Download(buff, &s3.GetObjectInput{
+ _, err := downloader.DownloadWithContext(ctx, buff, &s3.GetObjectInput{
Bucket: c.S3Config.GetHotBucket(),
Key: &objectKey,
})
if err != nil {
log.Error(err)
+ if retryCount > 0 {
+ return c.getEmbeddingObjectWithRetries(ctx, objectKey, downloader, retryCount-1)
+ }
return obj, stacktrace.Propagate(err, "")
}
err = json.Unmarshal(buff.Bytes(), &obj)
diff --git a/server/pkg/controller/file.go b/server/pkg/controller/file.go
index e91d299f151a18d3576eeeeb509489ae80bbd85e..b3fec115d0fe0ee9e94ac8a162338dac48955953 100644
--- a/server/pkg/controller/file.go
+++ b/server/pkg/controller/file.go
@@ -258,7 +258,7 @@ func (c *FileController) Update(ctx context.Context, userID int64, file ente.Fil
}
// GetUploadURLs returns a bunch of presigned URLs for uploading files
-func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App) ([]ente.UploadURL, error) {
+func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count int, app ente.App, ignoreLimit bool) ([]ente.UploadURL, error) {
err := c.UsageCtrl.CanUploadFile(ctx, userID, nil, app)
if err != nil {
return []ente.UploadURL{}, stacktrace.Propagate(err, "")
@@ -268,7 +268,7 @@ func (c *FileController) GetUploadURLs(ctx context.Context, userID int64, count
bucket := c.S3Config.GetHotBucket()
urls := make([]ente.UploadURL, 0)
objectKeys := make([]string, 0)
- if count > MaxUploadURLsLimit {
+ if count > MaxUploadURLsLimit && !ignoreLimit {
count = MaxUploadURLsLimit
}
for i := 0; i < count; i++ {
@@ -502,7 +502,7 @@ func (c *FileController) UpdateMagicMetadata(ctx *gin.Context, req ente.UpdateMu
if err != nil {
return stacktrace.Propagate(err, "")
}
- err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata)
+ err = c.FileRepo.UpdateMagicAttributes(ctx, req.MetadataList, isPublicMetadata, req.SkipVersion)
if err != nil {
return stacktrace.Propagate(err, "failed to update magic attributes")
}
diff --git a/server/pkg/controller/file_copy/file_copy.go b/server/pkg/controller/file_copy/file_copy.go
index afab10efee290cc057be7edfa1dc136f92114aad..4f9267e2e9abd9d7b700dec72b191a1388bb88d9 100644
--- a/server/pkg/controller/file_copy/file_copy.go
+++ b/server/pkg/controller/file_copy/file_copy.go
@@ -92,7 +92,7 @@ func (fc *FileCopyController) CopyFiles(c *gin.Context, req ente.CopyFileSyncReq
// request the uploadUrls using existing method. This is to ensure that orphan objects are automatically cleaned up
// todo:(neeraj) optimize this method by removing the need for getting a signed url for each object
- uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app)
+ uploadUrls, err := fc.FileController.GetUploadURLs(c, userID, len(s3ObjectsToCopy), app, true)
if err != nil {
return nil, err
}
diff --git a/server/pkg/repo/cast/repo.go b/server/pkg/repo/cast/repo.go
index 2f4446c9d0c5e4fcb0ab3acc4e75174726c7667a..823b17b2eecb1e33af4ea9f4dc3c4d5fd32fb4c8 100644
--- a/server/pkg/repo/cast/repo.go
+++ b/server/pkg/repo/cast/repo.go
@@ -8,6 +8,7 @@ import (
"github.com/ente-io/stacktrace"
"github.com/google/uuid"
log "github.com/sirupsen/logrus"
+ "strings"
)
type Repository struct {
@@ -19,6 +20,7 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str
if err != nil {
return "", err
}
+ codeValue = strings.ToUpper(codeValue)
_, err = r.DB.ExecContext(ctx, "INSERT INTO casting (code, public_key, id, ip) VALUES ($1, $2, $3, $4)", codeValue, pubKey, uuid.New(), ip)
if err != nil {
return "", err
@@ -28,11 +30,13 @@ func (r *Repository) AddCode(ctx context.Context, pubKey string, ip string) (str
// InsertCastData insert collection_id, cast_user, token and encrypted_payload for given code if collection_id is not null
func (r *Repository) InsertCastData(ctx context.Context, castUserID int64, code string, collectionID int64, castToken string, encryptedPayload string) error {
+ code = strings.ToUpper(code)
_, err := r.DB.ExecContext(ctx, "UPDATE casting SET collection_id = $1, cast_user = $2, token = $3, encrypted_payload = $4 WHERE code = $5 and is_deleted=false", collectionID, castUserID, castToken, encryptedPayload, code)
return err
}
func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, string, error) {
+ code = strings.ToUpper(code)
var pubKey, ip string
row := r.DB.QueryRowContext(ctx, "SELECT public_key, ip FROM casting WHERE code = $1 and is_deleted=false", code)
err := row.Scan(&pubKey, &ip)
@@ -46,6 +50,7 @@ func (r *Repository) GetPubKeyAndIp(ctx context.Context, code string) (string, s
}
func (r *Repository) GetEncCastData(ctx context.Context, code string) (*string, error) {
+ code = strings.ToUpper(code)
var payload sql.NullString
row := r.DB.QueryRowContext(ctx, "SELECT encrypted_payload FROM casting WHERE code = $1 and is_deleted=false", code)
err := row.Scan(&payload)
diff --git a/server/pkg/repo/embedding/repository.go b/server/pkg/repo/embedding/repository.go
index f21e3b4f19eff1d668d2f9eccf8790bd94db6068..86915fde51f4da49fa65c59f4089abe477096796 100644
--- a/server/pkg/repo/embedding/repository.go
+++ b/server/pkg/repo/embedding/repository.go
@@ -45,7 +45,7 @@ func (r *Repository) InsertOrUpdate(ctx context.Context, ownerID int64, entry en
// GetDiff returns the embeddings that have been updated since the given time
func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Model, sinceTime int64, limit int16) ([]ente.Embedding, error) {
- rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version
+ rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size
FROM embeddings
WHERE owner_id = $1 AND model = $2 AND updated_at > $3
ORDER BY updated_at ASC
@@ -57,7 +57,7 @@ func (r *Repository) GetDiff(ctx context.Context, ownerID int64, model ente.Mode
}
func (r *Repository) GetFilesEmbedding(ctx context.Context, ownerID int64, model ente.Model, fileIDs []int64) ([]ente.Embedding, error) {
- rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version
+ rows, err := r.DB.QueryContext(ctx, `SELECT file_id, model, encrypted_embedding, decryption_header, updated_at, version, size
FROM embeddings
WHERE owner_id = $1 AND model = $2 AND file_id = ANY($3)`, ownerID, model, pq.Array(fileIDs))
if err != nil {
@@ -94,7 +94,7 @@ func convertRowsToEmbeddings(rows *sql.Rows) ([]ente.Embedding, error) {
embedding := ente.Embedding{}
var encryptedEmbedding, decryptionHeader sql.NullString
var version sql.NullInt32
- err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version)
+ err := rows.Scan(&embedding.FileID, &embedding.Model, &encryptedEmbedding, &decryptionHeader, &embedding.UpdatedAt, &version, &embedding.Size)
if encryptedEmbedding.Valid && len(encryptedEmbedding.String) > 0 {
embedding.EncryptedEmbedding = encryptedEmbedding.String
}
diff --git a/server/pkg/repo/file.go b/server/pkg/repo/file.go
index eafc7b570c70c707dfbf48df48020b608d406c90..2ae4eafdca8414991ac6bc400aa9a21b6c4d154a 100644
--- a/server/pkg/repo/file.go
+++ b/server/pkg/repo/file.go
@@ -311,7 +311,12 @@ func (repo *FileRepository) Update(file ente.File, fileSize int64, thumbnailSize
// UpdateMagicAttributes updates the magic attributes for the list of files and update collection_files & collection
// which have this file.
-func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdates []ente.UpdateMagicMetadata, isPublicMetadata bool) error {
+func (repo *FileRepository) UpdateMagicAttributes(
+ ctx context.Context,
+ fileUpdates []ente.UpdateMagicMetadata,
+ isPublicMetadata bool,
+ skipVersion *bool,
+) error {
updationTime := time.Microseconds()
tx, err := repo.DB.BeginTx(ctx, nil)
if err != nil {
@@ -336,6 +341,9 @@ func (repo *FileRepository) UpdateMagicAttributes(ctx context.Context, fileUpdat
return stacktrace.Propagate(err, "")
}
}
+ if skipVersion != nil && *skipVersion {
+ return tx.Commit()
+ }
// todo: full table scan, need to add index (for discussion: add user_id and idx {user_id, file_id}).
updatedRows, err := tx.QueryContext(ctx, `UPDATE collection_files
SET updation_time = $1 WHERE file_id = ANY($2) AND is_deleted= false RETURNING collection_id`, updationTime,
diff --git a/server/pkg/repo/user.go b/server/pkg/repo/user.go
index 596d24c64c6fdd359aad692a4b3b1e72297eb7d2..f35a47e1f9975486148318d120af74e5c3c49f5b 100644
--- a/server/pkg/repo/user.go
+++ b/server/pkg/repo/user.go
@@ -194,8 +194,8 @@ func (repo *UserRepository) UpdateEmail(userID int64, encryptedEmail ente.Encryp
// GetUserIDWithEmail returns the userID associated with a provided email
func (repo *UserRepository) GetUserIDWithEmail(email string) (int64, error) {
- trimmedEmail := strings.TrimSpace(email)
- emailHash, err := crypto.GetHash(trimmedEmail, repo.HashingKey)
+ sanitizedEmail := strings.ToLower(strings.TrimSpace(email))
+ emailHash, err := crypto.GetHash(sanitizedEmail, repo.HashingKey)
if err != nil {
return -1, stacktrace.Propagate(err, "")
}
diff --git a/server/pkg/utils/email/email.go b/server/pkg/utils/email/email.go
index 46202313e7ad8f6441d45e19999f290bdaccc545..a19987a1d8fc0834d31643c84b901f53e8bf4933 100644
--- a/server/pkg/utils/email/email.go
+++ b/server/pkg/utils/email/email.go
@@ -38,6 +38,7 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s
smtpPort := viper.GetString("smtp.port")
smtpUsername := viper.GetString("smtp.username")
smtpPassword := viper.GetString("smtp.password")
+ smtpEmail := viper.GetString("smtp.email")
var emailMessage string
@@ -50,6 +51,11 @@ func sendViaSMTP(toEmails []string, fromName string, fromEmail string, subject s
emailAddresses += email
}
+ // If an sender email is provided use it instead of the fromEmail.
+ if smtpEmail != "" {
+ fromEmail = smtpEmail
+ }
+
header := "From: " + fromName + " <" + fromEmail + ">\n" +
"To: " + emailAddresses + "\n" +
"Subject: " + subject + "\n" +
diff --git a/web/apps/accounts/.env b/web/apps/accounts/.env
new file mode 100644
index 0000000000000000000000000000000000000000..3f3b1cc9aeff1df0aabcc5d235dab2e39858b623
--- /dev/null
+++ b/web/apps/accounts/.env
@@ -0,0 +1 @@
+NEXT_TELEMETRY_DISABLED = 1
diff --git a/web/apps/auth/.env b/web/apps/auth/.env
new file mode 100644
index 0000000000000000000000000000000000000000..3f3b1cc9aeff1df0aabcc5d235dab2e39858b623
--- /dev/null
+++ b/web/apps/auth/.env
@@ -0,0 +1 @@
+NEXT_TELEMETRY_DISABLED = 1
diff --git a/web/apps/cast/.env b/web/apps/cast/.env
new file mode 100644
index 0000000000000000000000000000000000000000..3f3b1cc9aeff1df0aabcc5d235dab2e39858b623
--- /dev/null
+++ b/web/apps/cast/.env
@@ -0,0 +1 @@
+NEXT_TELEMETRY_DISABLED = 1
diff --git a/web/apps/cast/package.json b/web/apps/cast/package.json
index 012148969ad057ffa89adac120c671a573eb64cd..4f774662ad17dc28b0de9e6bcb6b7e70dac83026 100644
--- a/web/apps/cast/package.json
+++ b/web/apps/cast/package.json
@@ -8,5 +8,8 @@
"@ente/accounts": "*",
"@ente/eslint-config": "*",
"@ente/shared": "*"
+ },
+ "devDependencies": {
+ "@types/chromecast-caf-receiver": "^6.0.14"
}
}
diff --git a/web/apps/cast/src/components/FilledCircleCheck.tsx b/web/apps/cast/src/components/FilledCircleCheck.tsx
index c0635f138afe9e93b34a4ea98c1136d527745367..ba2292922ebbf615d84e32380e74c5923c329d61 100644
--- a/web/apps/cast/src/components/FilledCircleCheck.tsx
+++ b/web/apps/cast/src/components/FilledCircleCheck.tsx
@@ -1,6 +1,6 @@
import { styled } from "@mui/material";
-const FilledCircleCheck = () => {
+export const FilledCircleCheck: React.FC = () => {
return (
@@ -11,8 +11,6 @@ const FilledCircleCheck = () => {
);
};
-export default FilledCircleCheck;
-
const Container = styled("div")`
width: 100px;
height: 100px;
diff --git a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx b/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx
deleted file mode 100644
index 845416fedc9113092a63b73a5c42846e8415f558..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/components/PairedSuccessfullyOverlay.tsx
+++ /dev/null
@@ -1,46 +0,0 @@
-import FilledCircleCheck from "./FilledCircleCheck";
-
-export default function PairedSuccessfullyOverlay() {
- return (
-
-
-
-
- Pairing Complete
-
-
- We're preparing your album.
- This should only take a few seconds.
-
-
-
- );
-}
diff --git a/web/apps/cast/src/components/LargeType.tsx b/web/apps/cast/src/components/PairingCode.tsx
similarity index 74%
rename from web/apps/cast/src/components/LargeType.tsx
rename to web/apps/cast/src/components/PairingCode.tsx
index ecf7a201bbeea024acbc20598898c596c5e648eb..fa1474bafc34c02d9261f1a89a8368d8b2a988a8 100644
--- a/web/apps/cast/src/components/LargeType.tsx
+++ b/web/apps/cast/src/components/PairingCode.tsx
@@ -1,6 +1,6 @@
import { styled } from "@mui/material";
-const colourPool = [
+const colors = [
"#87CEFA", // Light Blue
"#90EE90", // Light Green
"#F08080", // Light Coral
@@ -23,27 +23,34 @@ const colourPool = [
"#808000", // Light Olive
];
-export default function LargeType({ chars }: { chars: string[] }) {
+interface PairingCodeProps {
+ code: string;
+}
+
+export const PairingCode: React.FC = ({ code }) => {
return (
-
- {chars.map((char, i) => (
+
+ {code.split("").map((char, i) => (
{char}
))}
-
+
);
-}
+};
+
+const PairingCode_ = styled("div")`
+ border-radius: 10px;
+ overflow: hidden;
-const Container = styled("div")`
font-size: 4rem;
font-weight: bold;
font-family: monospace;
diff --git a/web/apps/cast/src/components/PhotoAuditorium.tsx b/web/apps/cast/src/components/PhotoAuditorium.tsx
deleted file mode 100644
index c77c9e6cad0bf441363f2210a880a2093b32c0a7..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/components/PhotoAuditorium.tsx
+++ /dev/null
@@ -1,66 +0,0 @@
-import { useEffect } from "react";
-
-interface PhotoAuditoriumProps {
- url: string;
- nextSlideUrl: string;
- showNextSlide: () => void;
-}
-export const PhotoAuditorium: React.FC = ({
- url,
- nextSlideUrl,
- showNextSlide,
-}) => {
- useEffect(() => {
- console.log("showing slide");
- const timeoutId = window.setTimeout(() => {
- console.log("showing next slide timer");
- showNextSlide();
- }, 10000);
-
- return () => {
- if (timeoutId) clearTimeout(timeoutId);
- };
- }, []);
-
- return (
-
-
-
-
-
-
- );
-};
diff --git a/web/apps/cast/src/constants/upload.ts b/web/apps/cast/src/constants/upload.ts
deleted file mode 100644
index 2ae1c43833f80d445aacbbc23abd9cfcdef2c8a8..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/constants/upload.ts
+++ /dev/null
@@ -1,13 +0,0 @@
-export const RAW_FORMATS = [
- "heic",
- "rw2",
- "tiff",
- "arw",
- "cr3",
- "cr2",
- "raf",
- "nef",
- "psd",
- "dng",
- "tif",
-];
diff --git a/web/apps/cast/src/pages/_app.tsx b/web/apps/cast/src/pages/_app.tsx
index 99b047d4169deafb412f903f9553c53fad262c48..d85ac054227f1c19b6600e8fee07d19e71d9ce42 100644
--- a/web/apps/cast/src/pages/_app.tsx
+++ b/web/apps/cast/src/pages/_app.tsx
@@ -1,4 +1,5 @@
import { CustomHead } from "@/next/components/Head";
+import { disableDiskLogs } from "@/next/log";
import { logUnhandledErrorsAndRejections } from "@/next/log-web";
import { APPS, APP_TITLES } from "@ente/shared/apps/constants";
import { getTheme } from "@ente/shared/themes";
@@ -11,6 +12,7 @@ import "styles/global.css";
export default function App({ Component, pageProps }: AppProps) {
useEffect(() => {
+ disableDiskLogs();
logUnhandledErrorsAndRejections(true);
return () => logUnhandledErrorsAndRejections(false);
}, []);
diff --git a/web/apps/cast/src/pages/index.tsx b/web/apps/cast/src/pages/index.tsx
index b12bf1e765c755abbea8164677c3578b9fedb351..37fcf3d4beee5af1774c99003096794a97a039bb 100644
--- a/web/apps/cast/src/pages/index.tsx
+++ b/web/apps/cast/src/pages/index.tsx
@@ -1,238 +1,110 @@
import log from "@/next/log";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
-import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium";
-import castGateway from "@ente/shared/network/cast";
-import LargeType from "components/LargeType";
-import _sodium from "libsodium-wrappers";
+import { styled } from "@mui/material";
+import { PairingCode } from "components/PairingCode";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
-import { storeCastData } from "services/cast/castService";
-import { useCastReceiver } from "../utils/useCastReceiver";
+import { readCastData, storeCastData } from "services/cast-data";
+import { getCastData, register } from "services/pair";
+import { advertiseOnChromecast } from "../services/chromecast";
-export default function PairingMode() {
- const [deviceCode, setDeviceCode] = useState("");
- const [publicKeyB64, setPublicKeyB64] = useState("");
- const [privateKeyB64, setPrivateKeyB64] = useState("");
- const [codePending, setCodePending] = useState(true);
- const [isCastReady, setIsCastReady] = useState(false);
+export default function Index() {
+ const [publicKeyB64, setPublicKeyB64] = useState();
+ const [privateKeyB64, setPrivateKeyB64] = useState();
+ const [pairingCode, setPairingCode] = useState();
- const { cast } = useCastReceiver();
-
- useEffect(() => {
- init();
- }, []);
-
- const init = async () => {
- try {
- const keypair = await generateKeyPair();
- setPublicKeyB64(await toB64(keypair.publicKey));
- setPrivateKeyB64(await toB64(keypair.privateKey));
- } catch (e) {
- log.error("failed to generate keypair", e);
- throw e;
- }
- };
+ const router = useRouter();
useEffect(() => {
- if (!cast) {
- return;
- }
- if (isCastReady) {
- return;
- }
- const context = cast.framework.CastReceiverContext.getInstance();
-
- try {
- const options = new cast.framework.CastReceiverOptions();
- options.maxInactivity = 3600;
- options.customNamespaces = Object.assign({});
- options.customNamespaces["urn:x-cast:pair-request"] =
- cast.framework.system.MessageType.JSON;
-
- options.disableIdleTimeout = true;
- context.set;
-
- context.addCustomMessageListener(
- "urn:x-cast:pair-request",
- messageReceiveHandler,
- );
-
- // listen to close request and stop the context
- context.addEventListener(
- cast.framework.system.EventType.SENDER_DISCONNECTED,
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- (_) => {
- context.stop();
- },
- );
- context.start(options);
- setIsCastReady(true);
- } catch (e) {
- log.error("failed to create cast context", e);
- }
-
- return () => {
- // context.stop();
- };
- }, [cast]);
-
- const messageReceiveHandler = (message: {
- type: string;
- senderId: string;
- data: any;
- }) => {
- try {
- cast.framework.CastReceiverContext.getInstance().sendCustomMessage(
- "urn:x-cast:pair-request",
- message.senderId,
- {
- code: deviceCode,
- },
+ if (!pairingCode) {
+ register().then((r) => {
+ setPublicKeyB64(r.publicKeyB64);
+ setPrivateKeyB64(r.privateKeyB64);
+ setPairingCode(r.pairingCode);
+ });
+ } else {
+ advertiseOnChromecast(
+ () => pairingCode,
+ () => readCastData()?.collectionID,
);
- } catch (e) {
- log.error("failed to send message", e);
- }
- };
-
- const generateKeyPair = async () => {
- await _sodium.ready;
- const keypair = _sodium.crypto_box_keypair();
- return keypair;
- };
-
- const pollForCastData = async () => {
- if (codePending) {
- return;
}
- // see if we were acknowledged on the client.
- // the client will send us the encrypted payload using our public key that we advertised.
- // then, we can decrypt this and store all the necessary info locally so we can play the collection slideshow.
- let devicePayload = "";
- try {
- const encDastData = await castGateway.getCastData(`${deviceCode}`);
- if (!encDastData) return;
- devicePayload = encDastData;
- } catch (e) {
- setCodePending(true);
- init();
- return;
- }
-
- const decryptedPayload = await boxSealOpen(
- devicePayload,
- publicKeyB64,
- privateKeyB64,
- );
+ }, [pairingCode]);
- const decryptedPayloadObj = JSON.parse(atob(decryptedPayload));
+ useEffect(() => {
+ if (!publicKeyB64 || !privateKeyB64 || !pairingCode) return;
- return decryptedPayloadObj;
- };
+ const interval = setInterval(pollTick, 2000);
+ return () => clearInterval(interval);
+ }, [publicKeyB64, privateKeyB64, pairingCode]);
- const advertisePublicKey = async (publicKeyB64: string) => {
- // hey client, we exist!
+ const pollTick = async () => {
+ const registration = { publicKeyB64, privateKeyB64, pairingCode };
try {
- const codeValue = await castGateway.registerDevice(publicKeyB64);
- setDeviceCode(codeValue);
- setCodePending(false);
- } catch (e) {
- // schedule re-try after 5 seconds
- setTimeout(() => {
- init();
- }, 5000);
- return;
- }
- };
-
- const router = useRouter();
-
- useEffect(() => {
- console.log("useEffect for pairing called");
- if (deviceCode.length < 1 || !publicKeyB64 || !privateKeyB64) return;
-
- const interval = setInterval(async () => {
- console.log("polling for cast data");
- const data = await pollForCastData();
+ const data = await getCastData(registration);
if (!data) {
- console.log("no data");
+ // No one has connected yet.
return;
}
+
storeCastData(data);
- console.log("pushing slideshow");
await router.push("/slideshow");
- }, 1000);
-
- return () => {
- clearInterval(interval);
- };
- }, [deviceCode, publicKeyB64, privateKeyB64, codePending]);
-
- useEffect(() => {
- if (!publicKeyB64) return;
- advertisePublicKey(publicKeyB64);
- }, [publicKeyB64]);
+ } catch (e) {
+ // The pairing code becomes invalid after an hour, which will cause
+ // `getCastData` to fail. There might be other reasons this might
+ // fail too, but in all such cases, it is a reasonable idea to start
+ // again from the beginning.
+ log.warn("Failed to get cast data", e);
+ setPairingCode(undefined);
+ }
+ };
return (
- <>
-
-
-
-
- Enter this code on ente to pair this TV
-
-
- {codePending ? (
-
- ) : (
- <>
-
- >
- )}
-
-
- Visit{" "}
-
- ente.io/cast
- {" "}
- for help
-
-
-
- >
+
+
+
+ Enter this code on Ente Photos to pair this screen
+
+ {pairingCode ? : }
+
+ Visit{" "}
+
+ ente.io/cast
+ {" "}
+ for help
+
+
);
}
+
+const Container = styled("div")`
+ height: 100%;
+ display: flex;
+ flex-direction: column;
+ justify-content: center;
+ align-items: center;
+ text-align: center;
+
+ h1 {
+ font-weight: normal;
+ }
+
+ p {
+ font-size: 1.2rem;
+ }
+ a {
+ text-decoration: none;
+ color: #87cefa;
+ font-weight: bold;
+ }
+`;
+
+const Spinner: React.FC = () => (
+
+
+
+);
+
+const Spinner_ = styled("div")`
+ /* Roughly same height as the pairing code section to roduce layout shift */
+ margin-block: 1.7rem;
+`;
diff --git a/web/apps/cast/src/pages/slideshow.tsx b/web/apps/cast/src/pages/slideshow.tsx
index 8554524b2c0dd78e28a669afc4b9251683fa061b..326b183d4ac47e2695c1ab21b2998cda96b73498 100644
--- a/web/apps/cast/src/pages/slideshow.tsx
+++ b/web/apps/cast/src/pages/slideshow.tsx
@@ -1,189 +1,192 @@
-import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
-import PairedSuccessfullyOverlay from "components/PairedSuccessfullyOverlay";
-import { PhotoAuditorium } from "components/PhotoAuditorium";
+import { ensure } from "@/utils/ensure";
+import { styled } from "@mui/material";
+import { FilledCircleCheck } from "components/FilledCircleCheck";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
-import {
- getCastCollection,
- getLocalFiles,
- syncPublicFiles,
-} from "services/cast/castService";
-import { Collection } from "types/collection";
-import { EnteFile } from "types/file";
-import { getPreviewableImage, isRawFileFromFileName } from "utils/file";
-
-const renderableFileURLCache = new Map();
+import { readCastData } from "services/cast-data";
+import { isChromecast } from "services/chromecast";
+import { imageURLGenerator } from "services/render";
export default function Slideshow() {
const [loading, setLoading] = useState(true);
- const [castToken, setCastToken] = useState("");
- const [castCollection, setCastCollection] = useState<
- Collection | undefined
- >();
- const [collectionFiles, setCollectionFiles] = useState([]);
- const [currentFileId, setCurrentFileId] = useState();
- const [currentFileURL, setCurrentFileURL] = useState();
- const [nextFileURL, setNextFileURL] = useState();
+ const [imageURL, setImageURL] = useState();
+ const [isEmpty, setIsEmpty] = useState(false);
const router = useRouter();
- const syncCastFiles = async (token: string) => {
- try {
- console.log("syncCastFiles");
- const castToken = window.localStorage.getItem("castToken");
- const requestedCollectionKey =
- window.localStorage.getItem("collectionKey");
- const collection = await getCastCollection(
- castToken,
- requestedCollectionKey,
- );
- if (
- castCollection === undefined ||
- castCollection.updationTime !== collection.updationTime
- ) {
- setCastCollection(collection);
- await syncPublicFiles(token, collection, () => {});
- const files = await getLocalFiles(String(collection.id));
- setCollectionFiles(
- files.filter((file) => isFileEligibleForCast(file)),
- );
- }
- } catch (e) {
- log.error("error during sync", e);
- // go back to preview page
- router.push("/");
- }
- };
+ /** Go back to pairing page */
+ const pair = () => router.push("/");
useEffect(() => {
- if (castToken) {
- const intervalId = setInterval(() => {
- syncCastFiles(castToken);
- }, 10000);
- syncCastFiles(castToken);
+ let stop = false;
+
+ const loop = async () => {
+ try {
+ const urlGenerator = imageURLGenerator(ensure(readCastData()));
+ while (!stop) {
+ const { value: url, done } = await urlGenerator.next();
+ if (done || !url) {
+ // No items in this callection can be shown.
+ setIsEmpty(true);
+ // Go back to pairing screen after 5 seconds.
+ setTimeout(pair, 5000);
+ return;
+ }
+
+ setImageURL(url);
+ setLoading(false);
+ }
+ } catch (e) {
+ log.error("Failed to prepare generator", e);
+ pair();
+ }
+ };
- return () => clearInterval(intervalId);
- }
- }, [castToken]);
+ void loop();
- const isFileEligibleForCast = (file: EnteFile) => {
- const fileType = file.metadata.fileType;
- if (fileType !== FILE_TYPE.IMAGE && fileType !== FILE_TYPE.LIVE_PHOTO)
- return false;
+ return () => {
+ stop = true;
+ };
+ }, []);
- if (file.info.fileSize > 100 * 1024 * 1024) return false;
+ if (loading) return ;
+ if (isEmpty) return ;
- if (isRawFileFromFileName(file.metadata.title)) return false;
+ return isChromecast() ? (
+
+ ) : (
+
+ );
+}
- return true;
- };
+const PairingComplete: React.FC = () => {
+ return (
+
+
+ Pairing Complete
+
+ We're preparing your album.
+ This should only take a few seconds.
+
+
+ );
+};
- useEffect(() => {
- try {
- const castToken = window.localStorage.getItem("castToken");
- // Wait 2 seconds to ensure the green tick and the confirmation
- // message remains visible for at least 2 seconds before we start
- // the slideshow.
- const timeoutId = setTimeout(() => {
- setCastToken(castToken);
- }, 2000);
-
- return () => clearTimeout(timeoutId);
- } catch (e) {
- log.error("error during sync", e);
- router.push("/");
- }
- }, []);
+const Message = styled("div")`
+ display: flex;
+ flex-direction: column;
+ height: 100%;
+ justify-content: center;
+ align-items: center;
+ text-align: center;
- useEffect(() => {
- if (collectionFiles.length < 1) return;
- showNextSlide();
- }, [collectionFiles]);
-
- const showNextSlide = async () => {
- try {
- console.log("showNextSlide");
- const currentIndex = collectionFiles.findIndex(
- (file) => file.id === currentFileId,
- );
-
- console.log(
- "showNextSlide-index",
- currentIndex,
- collectionFiles.length,
- );
-
- const nextIndex = (currentIndex + 1) % collectionFiles.length;
- const nextNextIndex = (nextIndex + 1) % collectionFiles.length;
-
- console.log(
- "showNextSlide-nextIndex and nextNextIndex",
- nextIndex,
- nextNextIndex,
- );
-
- const nextFile = collectionFiles[nextIndex];
- const nextNextFile = collectionFiles[nextNextIndex];
-
- let nextURL = renderableFileURLCache.get(nextFile.id);
- let nextNextURL = renderableFileURLCache.get(nextNextFile.id);
-
- if (!nextURL) {
- try {
- console.log("nextURL doesn't exist yet");
- const blob = await getPreviewableImage(nextFile, castToken);
- console.log("nextURL blobread");
- const url = URL.createObjectURL(blob);
- console.log("nextURL", url);
- renderableFileURLCache.set(nextFile.id, url);
- console.log("nextUrlCache set");
- nextURL = url;
- } catch (e) {
- console.log("error in nextUrl", e);
- return;
- }
- } else {
- console.log("nextURL already exists");
- }
+ line-height: 1.5rem;
- if (!nextNextURL) {
- try {
- console.log("nextNextURL doesn't exist yet");
- const blob = await getPreviewableImage(
- nextNextFile,
- castToken,
- );
- console.log("nextNextURL blobread");
- const url = URL.createObjectURL(blob);
- console.log("nextNextURL", url);
- renderableFileURLCache.set(nextNextFile.id, url);
- console.log("nextNextURCacheL set");
- nextNextURL = url;
- } catch (e) {
- console.log("error in nextNextURL", e);
- return;
- }
- } else {
- console.log("nextNextURL already exists");
- }
+ h2 {
+ margin-block-end: 0;
+ }
+`;
- setLoading(false);
- setCurrentFileId(nextFile.id);
- setCurrentFileURL(nextURL);
- setNextFileURL(nextNextURL);
- } catch (e) {
- console.log("error in showNextSlide", e);
- }
- };
+const NoItems: React.FC = () => {
+ return (
+
+ Try another album
+
+ This album has no photos that can be shown here
+ Please try another album
+
+
+ );
+};
- if (loading) return ;
+interface SlideViewProps {
+ /** The URL of the image to show. */
+ url: string;
+}
+const SlideView: React.FC = ({ url }) => {
return (
-
+
+
+
);
-}
+};
+
+const SlideView_ = styled("div")`
+ width: 100%;
+ height: 100%;
+
+ background-size: cover;
+ background-position: center;
+ background-repeat: no-repeat;
+ background-blend-mode: multiply;
+ background-color: rgba(0, 0, 0, 0.5);
+
+ /* Smooth out the transition a bit.
+ *
+ * For the img itself, we set decoding="sync" to have it switch seamlessly.
+ * But there does not seem to be a way of setting decoding sync for the
+ * background image, and for large (multi-MB) images the background image
+ * switch is still visually non-atomic.
+ *
+ * As a workaround, add a long transition so that the background image
+ * transitions in a more "fade-to" manner. This effect might or might not be
+ * visually the best though.
+ *
+ * Does not work in Firefox, but that's fine, this is only a slight tweak,
+ * not a functional requirement.
+ */
+ transition: all 2s;
+
+ img {
+ width: 100%;
+ height: 100%;
+ backdrop-filter: blur(10px);
+ object-fit: contain;
+ }
+`;
+
+/**
+ * Variant of {@link SlideView} for use when we're running on Chromecast.
+ *
+ * Chromecast devices have trouble with
+ *
+ * backdrop-filter: blur(10px);
+ *
+ * So emulate a cheaper approximation for use on Chromecast.
+ */
+const SlideViewChromecast: React.FC = ({ url }) => {
+ return (
+
+
+
+
+ );
+};
+
+const SlideViewChromecast_ = styled("div")`
+ width: 100%;
+ height: 100%;
+
+ /* We can't set opacity of background-image, so use a wrapper */
+ position: relative;
+ overflow: hidden;
+
+ img.svc-bg {
+ position: absolute;
+ left: 0;
+ top: 0;
+ width: 100%;
+ height: 100%;
+ object-fit: cover;
+ opacity: 0.1;
+ }
+
+ img.svc-content {
+ position: relative;
+ width: 100%;
+ height: 100%;
+ object-fit: contain;
+ }
+`;
diff --git a/web/apps/cast/src/services/cast-data.ts b/web/apps/cast/src/services/cast-data.ts
new file mode 100644
index 0000000000000000000000000000000000000000..587d1db323764478eb9233e91d1654ae9c92717c
--- /dev/null
+++ b/web/apps/cast/src/services/cast-data.ts
@@ -0,0 +1,41 @@
+export interface CastData {
+ /** The ID of the callection we are casting. */
+ collectionID: string;
+ /** A key to decrypt the collection we are casting. */
+ collectionKey: string;
+ /** A credential to use for fetching media files for this cast session. */
+ castToken: string;
+}
+
+/**
+ * Save the data received after pairing with a sender into local storage.
+ *
+ * We will read in back when we start the slideshow.
+ */
+export const storeCastData = (payload: unknown) => {
+ if (!payload || typeof payload != "object")
+ throw new Error("Unexpected cast data");
+
+ // Iterate through all the keys of the payload object and save them to
+ // localStorage. We don't validate here, we'll validate when we read these
+ // values back in `readCastData`.
+ for (const key in payload) {
+ window.localStorage.setItem(key, payload[key]);
+ }
+};
+
+/**
+ * Read back the cast data we got after pairing.
+ *
+ * Sibling of {@link storeCastData}. It returns undefined if the expected data
+ * is not present in localStorage.
+ */
+export const readCastData = (): CastData | undefined => {
+ const collectionID = localStorage.getItem("collectionID");
+ const collectionKey = localStorage.getItem("collectionKey");
+ const castToken = localStorage.getItem("castToken");
+
+ return collectionID && collectionKey && castToken
+ ? { collectionID, collectionKey, castToken }
+ : undefined;
+};
diff --git a/web/apps/cast/src/services/cast/castService.ts b/web/apps/cast/src/services/cast/castService.ts
deleted file mode 100644
index 84636d3a15b7b125b3b988ab4a4102ee2abe296d..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/services/cast/castService.ts
+++ /dev/null
@@ -1,304 +0,0 @@
-import log from "@/next/log";
-import ComlinkCryptoWorker from "@ente/shared/crypto";
-import { CustomError, parseSharingErrorCodes } from "@ente/shared/error";
-import HTTPService from "@ente/shared/network/HTTPService";
-import { getEndpoint } from "@ente/shared/network/api";
-import localForage from "@ente/shared/storage/localForage";
-import { Collection, CollectionPublicMagicMetadata } from "types/collection";
-import { EncryptedEnteFile, EnteFile } from "types/file";
-import { decryptFile, mergeMetadata, sortFiles } from "utils/file";
-
-export interface SavedCollectionFiles {
- collectionLocalID: string;
- files: EnteFile[];
-}
-const ENDPOINT = getEndpoint();
-const COLLECTION_FILES_TABLE = "collection-files";
-const COLLECTIONS_TABLE = "collections";
-
-const getLastSyncKey = (collectionUID: string) => `${collectionUID}-time`;
-
-export const getLocalFiles = async (
- collectionUID: string,
-): Promise => {
- const localSavedcollectionFiles =
- (await localForage.getItem(
- COLLECTION_FILES_TABLE,
- )) || [];
- const matchedCollection = localSavedcollectionFiles.find(
- (item) => item.collectionLocalID === collectionUID,
- );
- return matchedCollection?.files || [];
-};
-
-const savecollectionFiles = async (
- collectionUID: string,
- files: EnteFile[],
-) => {
- const collectionFiles =
- (await localForage.getItem(
- COLLECTION_FILES_TABLE,
- )) || [];
- await localForage.setItem(
- COLLECTION_FILES_TABLE,
- dedupeCollectionFiles([
- { collectionLocalID: collectionUID, files },
- ...collectionFiles,
- ]),
- );
-};
-
-export const getLocalCollections = async (collectionKey: string) => {
- const localCollections =
- (await localForage.getItem(COLLECTIONS_TABLE)) || [];
- const collection =
- localCollections.find(
- (localSavedPublicCollection) =>
- localSavedPublicCollection.key === collectionKey,
- ) || null;
- return collection;
-};
-
-const saveCollection = async (collection: Collection) => {
- const collections =
- (await localForage.getItem(COLLECTIONS_TABLE)) ?? [];
- await localForage.setItem(
- COLLECTIONS_TABLE,
- dedupeCollections([collection, ...collections]),
- );
-};
-
-const dedupeCollections = (collections: Collection[]) => {
- const keySet = new Set([]);
- return collections.filter((collection) => {
- if (!keySet.has(collection.key)) {
- keySet.add(collection.key);
- return true;
- } else {
- return false;
- }
- });
-};
-
-const dedupeCollectionFiles = (collectionFiles: SavedCollectionFiles[]) => {
- const keySet = new Set([]);
- return collectionFiles.filter(({ collectionLocalID: collectionUID }) => {
- if (!keySet.has(collectionUID)) {
- keySet.add(collectionUID);
- return true;
- } else {
- return false;
- }
- });
-};
-
-async function getSyncTime(collectionUID: string): Promise {
- const lastSyncKey = getLastSyncKey(collectionUID);
- const lastSyncTime = await localForage.getItem(lastSyncKey);
- return lastSyncTime ?? 0;
-}
-
-const updateSyncTime = async (collectionUID: string, time: number) =>
- await localForage.setItem(getLastSyncKey(collectionUID), time);
-
-export const syncPublicFiles = async (
- token: string,
- collection: Collection,
- setPublicFiles: (files: EnteFile[]) => void,
-) => {
- try {
- let files: EnteFile[] = [];
- const sortAsc = collection?.pubMagicMetadata?.data.asc ?? false;
- const collectionUID = String(collection.id);
- const localFiles = await getLocalFiles(collectionUID);
- files = [...files, ...localFiles];
- try {
- const lastSyncTime = await getSyncTime(collectionUID);
- if (collection.updationTime === lastSyncTime) {
- return sortFiles(files, sortAsc);
- }
- const fetchedFiles = await fetchFiles(
- token,
- collection,
- lastSyncTime,
- files,
- setPublicFiles,
- );
-
- files = [...files, ...fetchedFiles];
- const latestVersionFiles = new Map();
- files.forEach((file) => {
- const uid = `${file.collectionID}-${file.id}`;
- if (
- !latestVersionFiles.has(uid) ||
- latestVersionFiles.get(uid).updationTime < file.updationTime
- ) {
- latestVersionFiles.set(uid, file);
- }
- });
- files = [];
- // eslint-disable-next-line @typescript-eslint/no-unused-vars
- for (const [_, file] of latestVersionFiles) {
- if (file.isDeleted) {
- continue;
- }
- files.push(file);
- }
- await savecollectionFiles(collectionUID, files);
- await updateSyncTime(collectionUID, collection.updationTime);
- setPublicFiles([...sortFiles(mergeMetadata(files), sortAsc)]);
- } catch (e) {
- const parsedError = parseSharingErrorCodes(e);
- log.error("failed to sync shared collection files", e);
- if (parsedError.message === CustomError.TOKEN_EXPIRED) {
- throw e;
- }
- }
- return [...sortFiles(mergeMetadata(files), sortAsc)];
- } catch (e) {
- log.error("failed to get local or sync shared collection files", e);
- throw e;
- }
-};
-
-const fetchFiles = async (
- castToken: string,
- collection: Collection,
- sinceTime: number,
- files: EnteFile[],
- setPublicFiles: (files: EnteFile[]) => void,
-): Promise => {
- try {
- let decryptedFiles: EnteFile[] = [];
- let time = sinceTime;
- let resp;
- const sortAsc = collection?.pubMagicMetadata?.data.asc ?? false;
- do {
- if (!castToken) {
- break;
- }
- resp = await HTTPService.get(
- `${ENDPOINT}/cast/diff`,
- {
- sinceTime: time,
- },
- {
- "Cache-Control": "no-cache",
- "X-Cast-Access-Token": castToken,
- },
- );
- decryptedFiles = [
- ...decryptedFiles,
- ...(await Promise.all(
- resp.data.diff.map(async (file: EncryptedEnteFile) => {
- if (!file.isDeleted) {
- return await decryptFile(file, collection.key);
- } else {
- return file;
- }
- }) as Promise[],
- )),
- ];
-
- if (resp.data.diff.length) {
- time = resp.data.diff.slice(-1)[0].updationTime;
- }
- setPublicFiles(
- sortFiles(
- mergeMetadata(
- [...(files || []), ...decryptedFiles].filter(
- (item) => !item.isDeleted,
- ),
- ),
- sortAsc,
- ),
- );
- } while (resp.data.hasMore);
- return decryptedFiles;
- } catch (e) {
- log.error("Get cast files failed", e);
- throw e;
- }
-};
-
-export const getCastCollection = async (
- castToken: string,
- collectionKey: string,
-): Promise => {
- try {
- const resp = await HTTPService.get(`${ENDPOINT}/cast/info`, null, {
- "Cache-Control": "no-cache",
- "X-Cast-Access-Token": castToken,
- });
- const fetchedCollection = resp.data.collection;
-
- const cryptoWorker = await ComlinkCryptoWorker.getInstance();
-
- const collectionName = (fetchedCollection.name =
- fetchedCollection.name ||
- (await cryptoWorker.decryptToUTF8(
- fetchedCollection.encryptedName,
- fetchedCollection.nameDecryptionNonce,
- collectionKey,
- )));
-
- let collectionPublicMagicMetadata: CollectionPublicMagicMetadata;
- if (fetchedCollection.pubMagicMetadata?.data) {
- collectionPublicMagicMetadata = {
- ...fetchedCollection.pubMagicMetadata,
- data: await cryptoWorker.decryptMetadata(
- fetchedCollection.pubMagicMetadata.data,
- fetchedCollection.pubMagicMetadata.header,
- collectionKey,
- ),
- };
- }
-
- const collection = {
- ...fetchedCollection,
- name: collectionName,
- key: collectionKey,
- pubMagicMetadata: collectionPublicMagicMetadata,
- };
- await saveCollection(collection);
- return collection;
- } catch (e) {
- log.error("failed to get cast collection", e);
- throw e;
- }
-};
-
-export const removeCollection = async (
- collectionUID: string,
- collectionKey: string,
-) => {
- const collections =
- (await localForage.getItem(COLLECTIONS_TABLE)) || [];
- await localForage.setItem(
- COLLECTIONS_TABLE,
- collections.filter((collection) => collection.key !== collectionKey),
- );
- await removeCollectionFiles(collectionUID);
-};
-
-export const removeCollectionFiles = async (collectionUID: string) => {
- await localForage.removeItem(getLastSyncKey(collectionUID));
- const collectionFiles =
- (await localForage.getItem(
- COLLECTION_FILES_TABLE,
- )) ?? [];
- await localForage.setItem(
- COLLECTION_FILES_TABLE,
- collectionFiles.filter(
- (collectionFiles) =>
- collectionFiles.collectionLocalID !== collectionUID,
- ),
- );
-};
-
-export const storeCastData = (payloadObj: Object) => {
- // iterate through all the keys in the payload object and set them in localStorage.
- for (const key in payloadObj) {
- window.localStorage.setItem(key, payloadObj[key]);
- }
-};
diff --git a/web/apps/cast/src/services/castDownloadManager.ts b/web/apps/cast/src/services/castDownloadManager.ts
deleted file mode 100644
index 2314ed54ea4fa620b27a650ba8ffcfa09f8faa79..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/services/castDownloadManager.ts
+++ /dev/null
@@ -1,103 +0,0 @@
-import { FILE_TYPE } from "@/media/file-type";
-import ComlinkCryptoWorker from "@ente/shared/crypto";
-import { CustomError } from "@ente/shared/error";
-import HTTPService from "@ente/shared/network/HTTPService";
-import { getCastFileURL } from "@ente/shared/network/api";
-import { EnteFile } from "types/file";
-import { generateStreamFromArrayBuffer } from "utils/file";
-
-class CastDownloadManager {
- async downloadFile(castToken: string, file: EnteFile) {
- const cryptoWorker = await ComlinkCryptoWorker.getInstance();
-
- if (
- file.metadata.fileType === FILE_TYPE.IMAGE ||
- file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
- ) {
- const resp = await HTTPService.get(
- getCastFileURL(file.id),
- null,
- {
- "X-Cast-Access-Token": castToken,
- },
- { responseType: "arraybuffer" },
- );
- if (typeof resp.data === "undefined") {
- throw Error(CustomError.REQUEST_FAILED);
- }
- const decrypted = await cryptoWorker.decryptFile(
- new Uint8Array(resp.data),
- await cryptoWorker.fromB64(file.file.decryptionHeader),
- file.key,
- );
- return generateStreamFromArrayBuffer(decrypted);
- }
- const resp = await fetch(getCastFileURL(file.id), {
- headers: {
- "X-Cast-Access-Token": castToken,
- },
- });
- const reader = resp.body.getReader();
-
- const stream = new ReadableStream({
- async start(controller) {
- const decryptionHeader = await cryptoWorker.fromB64(
- file.file.decryptionHeader,
- );
- const fileKey = await cryptoWorker.fromB64(file.key);
- const { pullState, decryptionChunkSize } =
- await cryptoWorker.initChunkDecryption(
- decryptionHeader,
- fileKey,
- );
- let data = new Uint8Array();
- // The following function handles each data chunk
- function push() {
- // "done" is a Boolean and value a "Uint8Array"
- reader.read().then(async ({ done, value }) => {
- // Is there more data to read?
- if (!done) {
- const buffer = new Uint8Array(
- data.byteLength + value.byteLength,
- );
- buffer.set(new Uint8Array(data), 0);
- buffer.set(new Uint8Array(value), data.byteLength);
- if (buffer.length > decryptionChunkSize) {
- const fileData = buffer.slice(
- 0,
- decryptionChunkSize,
- );
- const { decryptedData } =
- await cryptoWorker.decryptFileChunk(
- fileData,
- pullState,
- );
- controller.enqueue(decryptedData);
- data = buffer.slice(decryptionChunkSize);
- } else {
- data = buffer;
- }
- push();
- } else {
- if (data) {
- const { decryptedData } =
- await cryptoWorker.decryptFileChunk(
- data,
- pullState,
- );
- controller.enqueue(decryptedData);
- data = null;
- }
- controller.close();
- }
- });
- }
-
- push();
- },
- });
- return stream;
- }
-}
-
-export default new CastDownloadManager();
diff --git a/web/apps/cast/src/services/chromecast.ts b/web/apps/cast/src/services/chromecast.ts
new file mode 100644
index 0000000000000000000000000000000000000000..e7539e8c51b2aa6ba1f0d33e8e3187d2c01d12c7
--- /dev/null
+++ b/web/apps/cast/src/services/chromecast.ts
@@ -0,0 +1,227 @@
+///
+
+import log from "@/next/log";
+
+export type Cast = typeof cast;
+
+/**
+ * A holder for the "cast" global object exposed by the Chromecast SDK,
+ * alongwith auxiliary state we need around it.
+ */
+class CastReceiver {
+ /**
+ * A reference to the `cast` global object that the Chromecast Web Receiver
+ * SDK attaches to the window.
+ *
+ * https://developers.google.com/cast/docs/web_receiver/basic
+ */
+ cast: Cast | undefined;
+ /**
+ * A promise that allows us to ensure multiple requests to load are funneled
+ * through the same reified load.
+ */
+ loader: Promise | undefined;
+ /**
+ * True if we have already attached listeners (i.e. if we have "started" the
+ * Chromecast SDK).
+ *
+ * Note that "stopping" the Chromecast SDK causes the Chromecast device to
+ * reload our tab, so this is a one way flag. The stop is something that'll
+ * only get triggered when we're actually running on a Chromecast since it
+ * always happens in response to a message handler.
+ */
+ haveStarted = false;
+ /**
+ * Cached result of the isChromecast test.
+ */
+ isChromecast: boolean | undefined;
+ /**
+ * A callback to invoke to get the pairing code when we get a new incoming
+ * pairing request.
+ */
+ pairingCode: (() => string | undefined) | undefined;
+ /**
+ * A callback to invoke to get the ID of the collection that is currently
+ * being shown (if any).
+ */
+ collectionID: (() => string | undefined) | undefined;
+}
+
+/** Singleton instance of {@link CastReceiver}. */
+const castReceiver = new CastReceiver();
+
+/**
+ * Listen for incoming messages on the given {@link cast} receiver, replying to
+ * each of them with a pairing code obtained using the given {@link pairingCode}
+ * callback. Phase 2 of the pairing protocol.
+ *
+ * Calling this function multiple times is fine. The first time around, the
+ * Chromecast SDK will be loaded and will start listening. Subsequently, each
+ * time this is call, we'll update the callbacks, but otherwise just return
+ * immediately (letting the already attached listeners do their thing).
+ *
+ * @param pairingCode A callback to invoke to get the pairing code when we get a
+ * new incoming pairing request.
+ *
+ * @param collectionID A callback to invoke to get the ID of the collection that
+ * is currently being shown (if any).
+ *
+ * See: [Note: Pairing protocol].
+ */
+export const advertiseOnChromecast = (
+ pairingCode: () => string | undefined,
+ collectionID: () => string | undefined,
+) => {
+ // Always update the callbacks.
+ castReceiver.pairingCode = pairingCode;
+ castReceiver.collectionID = collectionID;
+
+ // No-op if we're already running.
+ if (castReceiver.haveStarted) return;
+
+ void loadingChromecastSDKIfNeeded().then((cast) => advertiseCode(cast));
+};
+
+/**
+ * Load the Chromecast Web Receiver SDK and return a reference to the `cast`
+ * global object that the SDK attaches to the window.
+ *
+ * Calling this function multiple times is fine, once the Chromecast SDK is
+ * loaded it'll thereafter return the reference to the same object always.
+ */
+const loadingChromecastSDKIfNeeded = async (): Promise => {
+ if (castReceiver.cast) return castReceiver.cast;
+ if (castReceiver.loader) return await castReceiver.loader;
+
+ castReceiver.loader = new Promise((resolve) => {
+ const script = document.createElement("script");
+ script.src =
+ "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js";
+ script.addEventListener("load", () => {
+ castReceiver.cast = cast;
+ resolve(cast);
+ });
+ document.body.appendChild(script);
+ });
+
+ return await castReceiver.loader;
+};
+
+const advertiseCode = (cast: Cast) => {
+ if (castReceiver.haveStarted) {
+ // Multiple attempts raced to completion, ignore all but the first.
+ return;
+ }
+
+ castReceiver.haveStarted = true;
+
+ // Prepare the Chromecast "context".
+ const context = cast.framework.CastReceiverContext.getInstance();
+ const namespace = "urn:x-cast:pair-request";
+
+ const options = new cast.framework.CastReceiverOptions();
+ // We don't use the media features of the Cast SDK.
+ options.skipPlayersLoad = true;
+ // Do not stop the casting if the receiver is unreachable. A user should be
+ // able to start a cast on their phone and then put it away, leaving the
+ // cast running on their big screen.
+ options.disableIdleTimeout = true;
+
+ type ListenerProps = {
+ senderId: string;
+ data: unknown;
+ };
+
+ // Reply with the code that we have if anyone asks over Chromecast.
+ const incomingMessageListener = ({ senderId, data }: ListenerProps) => {
+ // The collection ID with is currently paired (if any).
+ const pairedCollectionID = castReceiver.collectionID?.();
+
+ // The collection ID in the request (if any).
+ const collectionID =
+ data &&
+ typeof data == "object" &&
+ typeof data["collectionID"] == "string"
+ ? data["collectionID"]
+ : undefined;
+
+ // If the request does not have a collectionID (or if we're not showing
+ // anything currently), forego this check.
+
+ if (collectionID && pairedCollectionID) {
+ // If we get another connection request for a _different_ collection
+ // ID, stop the app to allow the second device to reconnect using a
+ // freshly generated pairing code.
+ if (pairedCollectionID != collectionID) {
+ log.info(`request for a new collection ${collectionID}`);
+ context.stop();
+ } else {
+ // Duplicate request for same collection that we're already
+ // showing. Ignore.
+ }
+ return;
+ }
+
+ const code = castReceiver.pairingCode?.();
+ if (!code) {
+ // No code, but if we're already showing a collection, then ignore.
+ if (pairedCollectionID) return;
+
+ // Our caller waits until it has a pairing code before it calls
+ // `advertiseCode`, but there is still an edge case where we can
+ // find ourselves without a pairing code:
+ //
+ // 1. The current pairing code expires. We start the process to get
+ // a new one.
+ //
+ // 2. But before that happens, someone connects.
+ //
+ // The window where this can happen is short, so if we do find
+ // ourselves in this scenario, just shutdown.
+ log.error("got pairing request when refreshing pairing codes");
+ context.stop();
+ return;
+ }
+
+ context.sendCustomMessage(namespace, senderId, { code });
+ };
+
+ context.addCustomMessageListener(
+ namespace,
+ // We need to cast, the `senderId` is present in the message we get but
+ // not present in the TypeScript type.
+ incomingMessageListener as unknown as SystemEventHandler,
+ );
+
+ // Close the (chromecast) tab if the sender disconnects.
+ //
+ // Chromecast does a "shutdown" of our cast app when we call `context.stop`.
+ // This translates into it closing the tab where it is showing our app.
+ context.addEventListener(
+ cast.framework.system.EventType.SENDER_DISCONNECTED,
+ () => context.stop(),
+ );
+
+ // Start listening for Chromecast connections.
+ context.start(options);
+};
+
+/**
+ * Return true if we're running on a Chromecast device.
+ *
+ * This allows changing our app's behaviour when we're running on Chromecast.
+ * Such checks are needed because during our testing we found that in practice,
+ * some processing is too heavy for Chromecast hardware (we tested with a 2nd
+ * gen device, this might not be true for newer variants).
+ *
+ * This variable is lazily updated when we enter {@link renderableImageURLs}. It
+ * is kept at the top level to avoid passing it around.
+ */
+export const isChromecast = () => {
+ let isCast = castReceiver.isChromecast;
+ if (isCast === undefined) {
+ isCast = window.navigator.userAgent.includes("CrKey");
+ castReceiver.isChromecast = isCast;
+ }
+ return isCast;
+};
diff --git a/web/apps/cast/src/services/detect-type.ts b/web/apps/cast/src/services/detect-type.ts
index 187e19df841742529e027e3cf5e34c6c8ad1d2e6..c43529aaedc4e5b231181bb7777ae711e65aef37 100644
--- a/web/apps/cast/src/services/detect-type.ts
+++ b/web/apps/cast/src/services/detect-type.ts
@@ -9,6 +9,9 @@ import FileType from "file-type";
*
* It first peeks into the file's initial contents to detect the MIME type. If
* that doesn't give any results, it tries to deduce it from the file's name.
+ *
+ * For the list of returned extensions, see (for our installed version):
+ * https://github.com/sindresorhus/file-type/blob/main/core.d.ts
*/
export const detectMediaMIMEType = async (file: File): Promise => {
const chunkSizeForTypeDetection = 4100;
diff --git a/web/apps/cast/src/services/pair.ts b/web/apps/cast/src/services/pair.ts
new file mode 100644
index 0000000000000000000000000000000000000000..36b54cf7595225747fb03518b45bb5df88aec236
--- /dev/null
+++ b/web/apps/cast/src/services/pair.ts
@@ -0,0 +1,134 @@
+import log from "@/next/log";
+import { wait } from "@/utils/promise";
+import { boxSealOpen, toB64 } from "@ente/shared/crypto/internal/libsodium";
+import castGateway from "@ente/shared/network/cast";
+import _sodium from "libsodium-wrappers";
+
+export interface Registration {
+ /** A pairing code shown on the screen. A client can use this to connect. */
+ pairingCode: string;
+ /** The public part of the keypair we registered with the server. */
+ publicKeyB64: string;
+ /** The private part of the keypair we registered with the server. */
+ privateKeyB64: string;
+}
+
+/**
+ * Register a keypair with the server and return a pairing code that can be used
+ * to connect to us. Phase 1 of the pairing protocol.
+ *
+ * [Note: Pairing protocol]
+ *
+ * The Chromecast Framework (represented here by our handle to the Chromecast
+ * Web SDK, {@link cast}) itself is used for only the initial handshake, none of
+ * the data, even encrypted passes over it thereafter.
+ *
+ * The pairing happens in two phases:
+ *
+ * Phase 1 - {@link register}
+ *
+ * 1. We (the receiver) generate a public/private keypair. and register the
+ * public part of it with museum.
+ *
+ * 2. Museum gives us a pairing "code" in lieu. Show this on the screen.
+ *
+ * Phase 2 - {@link advertiseCode}
+ *
+ * There are two ways the client can connect - either by sending us a blank
+ * message over the Chromecast protocol (to which we'll reply with the pairing
+ * code), or by the user manually entering the pairing code on their screen.
+ *
+ * 3. Listen for incoming messages over the Chromecast connection.
+ *
+ * 4. The client (our Web or mobile app) will connect using the "sender"
+ * Chromecast SDK. This will result in a bi-directional channel between us
+ * ("receiver") and the Ente client app ("sender").
+ *
+ * 5. Thereafter, if at any time the sender disconnects, close the Chromecast
+ * context. This effectively shuts us down, causing the entire page to get
+ * reloaded.
+ *
+ * 6. After connecting, the sender sends an (empty) message. We reply by sending
+ * them a message containing the pairing code. This exchange is the only data
+ * that traverses over the Chromecast connection.
+ *
+ * Once the client gets the pairing code (via Chromecast or manual entry),
+ * they'll let museum know. So in parallel with Phase 2, we perform Phase 3.
+ *
+ * Phase 3 - {@link getCastData} in a setInterval.
+ *
+ * 7. Keep polling museum to ask it if anyone has claimed that code we vended
+ * out and used that to send us an payload encrypted using our public key.
+ *
+ * 8. When that happens, decrypt that data with our private key, and return this
+ * payload. It is a JSON object that contains the data we need to initiate a
+ * slideshow for a particular Ente collection.
+ *
+ * Phase 1 (Steps 1 and 2) are done by the {@link register} function, which
+ * returns a {@link Registration}.
+ *
+ * At this time we start showing the pairing code on the UI, and start phase 2,
+ * {@link advertiseCode} to vend out the pairing code to Chromecast connections.
+ *
+ * In parallel, we start Phase 3, calling {@link getCastData} in a loop. Once we
+ * get a response, we decrypt it to get the data we need to start the slideshow.
+ */
+export const register = async (): Promise => {
+ // Generate keypair.
+ const keypair = await generateKeyPair();
+ const publicKeyB64 = await toB64(keypair.publicKey);
+ const privateKeyB64 = await toB64(keypair.privateKey);
+
+ // Register keypair with museum to get a pairing code.
+ let pairingCode: string;
+ // eslint has fixed this spurious warning, but we're not on the latest
+ // version yet, so add a disable.
+ // https://github.com/eslint/eslint/pull/18286
+ /* eslint-disable no-constant-condition */
+ while (true) {
+ try {
+ pairingCode = await castGateway.registerDevice(publicKeyB64);
+ } catch (e) {
+ log.error("Failed to register public key with server", e);
+ }
+ if (pairingCode) break;
+ // Schedule retry after 10 seconds.
+ await wait(10000);
+ }
+
+ return { pairingCode, publicKeyB64, privateKeyB64 };
+};
+
+/**
+ * Ask museum if anyone has sent a (encrypted) payload corresponding to the
+ * given pairing code. If so, decrypt it using our private key and return the
+ * JSON payload. Phase 3 of the pairing protocol.
+ *
+ * Returns `undefined` if there hasn't been any data obtained yet.
+ *
+ * See: [Note: Pairing protocol].
+ */
+export const getCastData = async (registration: Registration) => {
+ const { pairingCode, publicKeyB64, privateKeyB64 } = registration;
+
+ // The client will send us the encrypted payload using our public key that
+ // we registered with museum.
+ const encryptedCastData = await castGateway.getCastData(pairingCode);
+ if (!encryptedCastData) return;
+
+ // Decrypt it using the private key of the pair and return the plaintext
+ // payload, which'll be a JSON object containing the data we need to start a
+ // slideshow for some collection.
+ const decryptedCastData = await boxSealOpen(
+ encryptedCastData,
+ publicKeyB64,
+ privateKeyB64,
+ );
+
+ return JSON.parse(atob(decryptedCastData));
+};
+
+const generateKeyPair = async () => {
+ await _sodium.ready;
+ return _sodium.crypto_box_keypair();
+};
diff --git a/web/apps/cast/src/services/render.ts b/web/apps/cast/src/services/render.ts
new file mode 100644
index 0000000000000000000000000000000000000000..79065c2afc88fabec9e74beebf21c784078bbb47
--- /dev/null
+++ b/web/apps/cast/src/services/render.ts
@@ -0,0 +1,352 @@
+import { FILE_TYPE } from "@/media/file-type";
+import { isHEICExtension, isNonWebImageFileExtension } from "@/media/formats";
+import { decodeLivePhoto } from "@/media/live-photo";
+import { createHEICConvertComlinkWorker } from "@/media/worker/heic-convert";
+import type { DedicatedHEICConvertWorker } from "@/media/worker/heic-convert.worker";
+import { nameAndExtension } from "@/next/file";
+import log from "@/next/log";
+import type { ComlinkWorker } from "@/next/worker/comlink-worker";
+import { shuffled } from "@/utils/array";
+import { wait } from "@/utils/promise";
+import ComlinkCryptoWorker from "@ente/shared/crypto";
+import { ApiError } from "@ente/shared/error";
+import HTTPService from "@ente/shared/network/HTTPService";
+import {
+ getCastFileURL,
+ getCastThumbnailURL,
+ getEndpoint,
+} from "@ente/shared/network/api";
+import type { AxiosResponse } from "axios";
+import type { CastData } from "services/cast-data";
+import { detectMediaMIMEType } from "services/detect-type";
+import {
+ EncryptedEnteFile,
+ EnteFile,
+ FileMagicMetadata,
+ FilePublicMagicMetadata,
+} from "types/file";
+import { isChromecast } from "./chromecast";
+
+/**
+ * If we're using HEIC conversion, then this variable caches the comlink web
+ * worker we're using to perform the actual conversion.
+ */
+let heicWorker: ComlinkWorker | undefined;
+
+/**
+ * An async generator function that loops through all the files in the
+ * collection, returning renderable image URLs to each that can be displayed in
+ * a slideshow.
+ *
+ * Each time it resolves with a (data) URL for the slideshow image to show next.
+ *
+ * If there are no renderable image in the collection, the sequence ends by
+ * yielding `{done: true}`.
+ *
+ * Otherwise when the generator reaches the end of the collection, it starts
+ * from the beginning again. So the sequence will continue indefinitely for
+ * non-empty collections.
+ *
+ * The generator ignores errors in the fetching and decoding of individual
+ * images in the collection, skipping the erroneous ones and moving onward to
+ * the next one.
+ *
+ * - It will however throw if there are errors when getting the collection
+ * itself. This can happen both the first time, or when we are about to loop
+ * around to the start of the collection.
+ *
+ * - It will also throw if three consecutive image fail.
+ *
+ * @param castData The collection to show and credentials to fetch the files
+ * within it.
+ */
+export const imageURLGenerator = async function* (castData: CastData) {
+ const { collectionKey, castToken } = castData;
+
+ /**
+ * Keep a FIFO queue of the URLs that we've vended out recently so that we
+ * can revoke those that are not being shown anymore.
+ */
+ const previousURLs: string[] = [];
+
+ /** Number of milliseconds to keep the slide on the screen. */
+ const slideDuration = 12000; /* 12 s */
+
+ /**
+ * Time when we last yielded.
+ *
+ * We use this to keep an roughly periodic spacing between yields that
+ * accounts for the time we spend fetching and processing the images.
+ */
+ let lastYieldTime = Date.now();
+
+ // The first time around regress the lastYieldTime into the past so that
+ // we don't wait around too long for the first slide (we do want to wait a
+ // bit, for the user to see the checkmark animation as reassurance).
+ lastYieldTime -= slideDuration - 2500; /* wait at most 2.5 s */
+
+ /**
+ * Number of time we have caught an exception while trying to generate an
+ * image URL for individual files.
+ *
+ * When this happens three times consecutively, we throw.
+ */
+ let consecutiveFailures = 0;
+
+ while (true) {
+ const encryptedFiles = shuffled(
+ await getEncryptedCollectionFiles(castToken),
+ );
+
+ let haveEligibleFiles = false;
+
+ for (const encryptedFile of encryptedFiles) {
+ const file = await decryptEnteFile(encryptedFile, collectionKey);
+
+ if (!isFileEligible(file)) continue;
+
+ let url: string;
+ try {
+ url = await createRenderableURL(castToken, file);
+ consecutiveFailures = 0;
+ haveEligibleFiles = true;
+ } catch (e) {
+ consecutiveFailures += 1;
+ // 1, 2, bang!
+ if (consecutiveFailures == 3) throw e;
+
+ if (e instanceof ApiError && e.httpStatusCode == 401) {
+ // The token has expired. This can happen, e.g., if the user
+ // opens the dialog to cast again, causing the client to
+ // invalidate existing tokens.
+ //
+ // Rethrow the error, which will bring us back to the
+ // pairing page.
+ throw e;
+ }
+
+ // On all other errors (including temporary network issues),
+ log.error("Skipping unrenderable file", e);
+ await wait(100); /* Breathe */
+ continue;
+ }
+
+ // The last element of previousURLs is the URL that is currently
+ // being shown on screen.
+ //
+ // The last to last element is the one that was shown prior to that,
+ // and now can be safely revoked.
+ if (previousURLs.length > 1)
+ URL.revokeObjectURL(previousURLs.shift());
+
+ previousURLs.push(url);
+
+ const elapsedTime = Date.now() - lastYieldTime;
+ if (elapsedTime > 0 && elapsedTime < slideDuration)
+ await wait(slideDuration - elapsedTime);
+
+ lastYieldTime = Date.now();
+ yield url;
+ }
+
+ // This collection does not have any files that we can show.
+ if (!haveEligibleFiles) return;
+ }
+};
+
+/**
+ * Fetch the list of non-deleted files in the given collection.
+ *
+ * The returned files are not decrypted yet, so their metadata will not be
+ * readable.
+ */
+const getEncryptedCollectionFiles = async (
+ castToken: string,
+): Promise => {
+ let files: EncryptedEnteFile[] = [];
+ let sinceTime = 0;
+ let resp: AxiosResponse;
+ do {
+ resp = await HTTPService.get(
+ `${getEndpoint()}/cast/diff`,
+ { sinceTime },
+ {
+ "Cache-Control": "no-cache",
+ "X-Cast-Access-Token": castToken,
+ },
+ );
+ const diff = resp.data.diff;
+ files = files.concat(diff.filter((file: EnteFile) => !file.isDeleted));
+ sinceTime = diff.reduce(
+ (max: number, file: EnteFile) => Math.max(max, file.updationTime),
+ sinceTime,
+ );
+ } while (resp.data.hasMore);
+ return files;
+};
+
+/**
+ * Decrypt the given {@link EncryptedEnteFile}, returning a {@link EnteFile}.
+ */
+const decryptEnteFile = async (
+ encryptedFile: EncryptedEnteFile,
+ collectionKey: string,
+): Promise => {
+ const worker = await ComlinkCryptoWorker.getInstance();
+ const {
+ encryptedKey,
+ keyDecryptionNonce,
+ metadata,
+ magicMetadata,
+ pubMagicMetadata,
+ ...restFileProps
+ } = encryptedFile;
+ const fileKey = await worker.decryptB64(
+ encryptedKey,
+ keyDecryptionNonce,
+ collectionKey,
+ );
+ const fileMetadata = await worker.decryptMetadata(
+ metadata.encryptedData,
+ metadata.decryptionHeader,
+ fileKey,
+ );
+ let fileMagicMetadata: FileMagicMetadata;
+ let filePubMagicMetadata: FilePublicMagicMetadata;
+ if (magicMetadata?.data) {
+ fileMagicMetadata = {
+ ...encryptedFile.magicMetadata,
+ data: await worker.decryptMetadata(
+ magicMetadata.data,
+ magicMetadata.header,
+ fileKey,
+ ),
+ };
+ }
+ if (pubMagicMetadata?.data) {
+ filePubMagicMetadata = {
+ ...pubMagicMetadata,
+ data: await worker.decryptMetadata(
+ pubMagicMetadata.data,
+ pubMagicMetadata.header,
+ fileKey,
+ ),
+ };
+ }
+ const file = {
+ ...restFileProps,
+ key: fileKey,
+ metadata: fileMetadata,
+ magicMetadata: fileMagicMetadata,
+ pubMagicMetadata: filePubMagicMetadata,
+ };
+ if (file.pubMagicMetadata?.data.editedTime) {
+ file.metadata.creationTime = file.pubMagicMetadata.data.editedTime;
+ }
+ if (file.pubMagicMetadata?.data.editedName) {
+ file.metadata.title = file.pubMagicMetadata.data.editedName;
+ }
+ return file;
+};
+
+const isFileEligible = (file: EnteFile) => {
+ if (!isImageOrLivePhoto(file)) return false;
+ if (file.info.fileSize > 100 * 1024 * 1024) return false;
+
+ // This check is fast but potentially incorrect because in practice we do
+ // encounter files that are incorrectly named and have a misleading
+ // extension. To detect the actual type, we need to sniff the MIME type, but
+ // that requires downloading and decrypting the file first.
+ const [, extension] = nameAndExtension(file.metadata.title);
+ if (isNonWebImageFileExtension(extension)) {
+ // Of the known non-web types, we support HEIC.
+ return isHEICExtension(extension);
+ }
+
+ return true;
+};
+
+const isImageOrLivePhoto = (file: EnteFile) => {
+ const fileType = file.metadata.fileType;
+ return fileType == FILE_TYPE.IMAGE || fileType == FILE_TYPE.LIVE_PHOTO;
+};
+
+export const heicToJPEG = async (heicBlob: Blob) => {
+ let worker = heicWorker;
+ if (!worker) heicWorker = worker = createHEICConvertComlinkWorker();
+ return await (await worker.remote).heicToJPEG(heicBlob);
+};
+
+/**
+ * Create and return a new data URL that can be used to show the given
+ * {@link file} in our slideshow image viewer.
+ *
+ * Once we're done showing the file, the URL should be revoked using
+ * {@link URL.revokeObjectURL} to free up browser resources.
+ */
+const createRenderableURL = async (castToken: string, file: EnteFile) => {
+ const imageBlob = await renderableImageBlob(castToken, file);
+ return URL.createObjectURL(imageBlob);
+};
+
+const renderableImageBlob = async (castToken: string, file: EnteFile) => {
+ const shouldUseThumbnail = isChromecast();
+
+ let blob = await downloadFile(castToken, file, shouldUseThumbnail);
+
+ let fileName = file.metadata.title;
+ if (!shouldUseThumbnail && file.metadata.fileType == FILE_TYPE.LIVE_PHOTO) {
+ const { imageData, imageFileName } = await decodeLivePhoto(
+ fileName,
+ blob,
+ );
+ fileName = imageFileName;
+ blob = new Blob([imageData]);
+ }
+
+ // We cannot rely on the file's extension to detect the file type, some
+ // files are incorrectly named. So use a MIME type sniffer first, but if
+ // that fails than fallback to the extension.
+ const mimeType = await detectMediaMIMEType(new File([blob], fileName));
+ if (!mimeType)
+ throw new Error(`Could not detect MIME type for file ${fileName}`);
+
+ if (mimeType == "image/heif" || mimeType == "image/heic")
+ blob = await heicToJPEG(blob);
+
+ return new Blob([blob], { type: mimeType });
+};
+
+const downloadFile = async (
+ castToken: string,
+ file: EnteFile,
+ shouldUseThumbnail: boolean,
+) => {
+ if (!isImageOrLivePhoto(file))
+ throw new Error("Can only cast images and live photos");
+
+ const url = shouldUseThumbnail
+ ? getCastThumbnailURL(file.id)
+ : getCastFileURL(file.id);
+ const resp = await HTTPService.get(
+ url,
+ null,
+ {
+ "X-Cast-Access-Token": castToken,
+ },
+ { responseType: "arraybuffer" },
+ );
+ if (resp.data === undefined) throw new Error(`Failed to get ${url}`);
+
+ const cryptoWorker = await ComlinkCryptoWorker.getInstance();
+ const decrypted = await cryptoWorker.decryptFile(
+ new Uint8Array(resp.data),
+ await cryptoWorker.fromB64(
+ shouldUseThumbnail
+ ? file.thumbnail.decryptionHeader
+ : file.file.decryptionHeader,
+ ),
+ file.key,
+ );
+ return new Response(decrypted).blob();
+};
diff --git a/web/apps/cast/src/types/collection.ts b/web/apps/cast/src/types/collection.ts
deleted file mode 100644
index c495937ae009468150781d6efbb54ecdf310f867..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/types/collection.ts
+++ /dev/null
@@ -1,100 +0,0 @@
-import { EnteFile } from "types/file";
-import {
- EncryptedMagicMetadata,
- MagicMetadataCore,
- SUB_TYPE,
- VISIBILITY_STATE,
-} from "types/magicMetadata";
-
-export enum COLLECTION_ROLE {
- VIEWER = "VIEWER",
- OWNER = "OWNER",
- COLLABORATOR = "COLLABORATOR",
- UNKNOWN = "UNKNOWN",
-}
-
-export interface CollectionUser {
- id: number;
- email: string;
- role: COLLECTION_ROLE;
-}
-
-enum CollectionType {
- folder = "folder",
- favorites = "favorites",
- album = "album",
- uncategorized = "uncategorized",
-}
-
-export interface EncryptedCollection {
- id: number;
- owner: CollectionUser;
- // collection name was unencrypted in the past, so we need to keep it as optional
- name?: string;
- encryptedKey: string;
- keyDecryptionNonce: string;
- encryptedName: string;
- nameDecryptionNonce: string;
- type: CollectionType;
- attributes: collectionAttributes;
- sharees: CollectionUser[];
- publicURLs?: unknown;
- updationTime: number;
- isDeleted: boolean;
- magicMetadata: EncryptedMagicMetadata;
- pubMagicMetadata: EncryptedMagicMetadata;
- sharedMagicMetadata: EncryptedMagicMetadata;
-}
-
-export interface Collection
- extends Omit<
- EncryptedCollection,
- | "encryptedKey"
- | "keyDecryptionNonce"
- | "encryptedName"
- | "nameDecryptionNonce"
- | "magicMetadata"
- | "pubMagicMetadata"
- | "sharedMagicMetadata"
- > {
- key: string;
- name: string;
- magicMetadata: CollectionMagicMetadata;
- pubMagicMetadata: CollectionPublicMagicMetadata;
- sharedMagicMetadata: CollectionShareeMagicMetadata;
-}
-
-// define a method on Collection interface to return the sync key as collection.id-time
-// this is used to store the last sync time of a collection in local storage
-
-export interface collectionAttributes {
- encryptedPath?: string;
- pathDecryptionNonce?: string;
-}
-
-export type CollectionToFileMap = Map;
-
-export interface CollectionMagicMetadataProps {
- visibility?: VISIBILITY_STATE;
- subType?: SUB_TYPE;
- order?: number;
-}
-
-export type CollectionMagicMetadata =
- MagicMetadataCore;
-
-export interface CollectionShareeMetadataProps {
- visibility?: VISIBILITY_STATE;
-}
-export type CollectionShareeMagicMetadata =
- MagicMetadataCore;
-
-export interface CollectionPublicMagicMetadataProps {
- asc?: boolean;
- coverID?: number;
-}
-
-export type CollectionPublicMagicMetadata =
- MagicMetadataCore;
-
-export type CollectionFilesCount = Map;
diff --git a/web/apps/cast/src/utils/file.ts b/web/apps/cast/src/utils/file.ts
deleted file mode 100644
index 91961b7becf4ac734a515553e21e080f2afa10f3..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/utils/file.ts
+++ /dev/null
@@ -1,144 +0,0 @@
-import { FILE_TYPE } from "@/media/file-type";
-import { decodeLivePhoto } from "@/media/live-photo";
-import log from "@/next/log";
-import ComlinkCryptoWorker from "@ente/shared/crypto";
-import { RAW_FORMATS } from "constants/upload";
-import CastDownloadManager from "services/castDownloadManager";
-import { detectMediaMIMEType } from "services/detect-type";
-import {
- EncryptedEnteFile,
- EnteFile,
- FileMagicMetadata,
- FilePublicMagicMetadata,
-} from "types/file";
-
-export function sortFiles(files: EnteFile[], sortAsc = false) {
- // sort based on the time of creation time of the file,
- // for files with same creation time, sort based on the time of last modification
- const factor = sortAsc ? -1 : 1;
- return files.sort((a, b) => {
- if (a.metadata.creationTime === b.metadata.creationTime) {
- return (
- factor *
- (b.metadata.modificationTime - a.metadata.modificationTime)
- );
- }
- return factor * (b.metadata.creationTime - a.metadata.creationTime);
- });
-}
-
-export async function decryptFile(
- file: EncryptedEnteFile,
- collectionKey: string,
-): Promise {
- try {
- const worker = await ComlinkCryptoWorker.getInstance();
- const {
- encryptedKey,
- keyDecryptionNonce,
- metadata,
- magicMetadata,
- pubMagicMetadata,
- ...restFileProps
- } = file;
- const fileKey = await worker.decryptB64(
- encryptedKey,
- keyDecryptionNonce,
- collectionKey,
- );
- const fileMetadata = await worker.decryptMetadata(
- metadata.encryptedData,
- metadata.decryptionHeader,
- fileKey,
- );
- let fileMagicMetadata: FileMagicMetadata;
- let filePubMagicMetadata: FilePublicMagicMetadata;
- if (magicMetadata?.data) {
- fileMagicMetadata = {
- ...file.magicMetadata,
- data: await worker.decryptMetadata(
- magicMetadata.data,
- magicMetadata.header,
- fileKey,
- ),
- };
- }
- if (pubMagicMetadata?.data) {
- filePubMagicMetadata = {
- ...pubMagicMetadata,
- data: await worker.decryptMetadata(
- pubMagicMetadata.data,
- pubMagicMetadata.header,
- fileKey,
- ),
- };
- }
- return {
- ...restFileProps,
- key: fileKey,
- metadata: fileMetadata,
- magicMetadata: fileMagicMetadata,
- pubMagicMetadata: filePubMagicMetadata,
- };
- } catch (e) {
- log.error("file decryption failed", e);
- throw e;
- }
-}
-
-export function generateStreamFromArrayBuffer(data: Uint8Array) {
- return new ReadableStream({
- async start(controller: ReadableStreamDefaultController) {
- controller.enqueue(data);
- controller.close();
- },
- });
-}
-
-export function isRawFileFromFileName(fileName: string) {
- for (const rawFormat of RAW_FORMATS) {
- if (fileName.toLowerCase().endsWith(rawFormat)) {
- return true;
- }
- }
- return false;
-}
-
-export function mergeMetadata(files: EnteFile[]): EnteFile[] {
- return files.map((file) => {
- if (file.pubMagicMetadata?.data.editedTime) {
- file.metadata.creationTime = file.pubMagicMetadata.data.editedTime;
- }
- if (file.pubMagicMetadata?.data.editedName) {
- file.metadata.title = file.pubMagicMetadata.data.editedName;
- }
-
- return file;
- });
-}
-
-export const getPreviewableImage = async (
- file: EnteFile,
- castToken: string,
-): Promise => {
- try {
- let fileBlob = await new Response(
- await CastDownloadManager.downloadFile(castToken, file),
- ).blob();
- if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
- const { imageData } = await decodeLivePhoto(
- file.metadata.title,
- fileBlob,
- );
- fileBlob = new Blob([imageData]);
- }
- const mimeType = await detectMediaMIMEType(
- new File([fileBlob], file.metadata.title),
- );
- if (!mimeType) return undefined;
- fileBlob = new Blob([fileBlob], { type: mimeType });
- return fileBlob;
- } catch (e) {
- log.error("failed to download file", e);
- }
-};
diff --git a/web/apps/cast/src/utils/useCastReceiver.tsx b/web/apps/cast/src/utils/useCastReceiver.tsx
deleted file mode 100644
index ff17b0910fdfead54ba85330c5b908b53df94093..0000000000000000000000000000000000000000
--- a/web/apps/cast/src/utils/useCastReceiver.tsx
+++ /dev/null
@@ -1,43 +0,0 @@
-declare const cast: any;
-
-import { useEffect, useState } from "react";
-
-type Receiver = {
- cast: typeof cast;
-};
-
-const load = (() => {
- let promise: Promise | null = null;
-
- return () => {
- if (promise === null) {
- promise = new Promise((resolve) => {
- const script = document.createElement("script");
- script.src =
- "https://www.gstatic.com/cast/sdk/libs/caf_receiver/v3/cast_receiver_framework.js";
-
- script.addEventListener("load", () => {
- resolve({
- cast,
- });
- });
- document.body.appendChild(script);
- });
- }
- return promise;
- };
-})();
-
-export const useCastReceiver = () => {
- const [receiver, setReceiver] = useState({
- cast: null,
- });
-
- useEffect(() => {
- load().then((receiver) => {
- setReceiver(receiver);
- });
- });
-
- return receiver;
-};
diff --git a/web/apps/payments/.env b/web/apps/payments/.env
new file mode 100644
index 0000000000000000000000000000000000000000..3f3b1cc9aeff1df0aabcc5d235dab2e39858b623
--- /dev/null
+++ b/web/apps/payments/.env
@@ -0,0 +1 @@
+NEXT_TELEMETRY_DISABLED = 1
diff --git a/web/apps/photos/.env b/web/apps/photos/.env
index a039e910559aa3f43c4e6725ae21d48ee0c01a01..978c677769f1427572600c7953d602069991fd90 100644
--- a/web/apps/photos/.env
+++ b/web/apps/photos/.env
@@ -88,3 +88,5 @@
# NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON=`cat path/to/expected.json` yarn dev
#
# NEXT_PUBLIC_ENTE_TEST_EXPECTED_JSON = {}
+
+NEXT_TELEMETRY_DISABLED = 1
diff --git a/web/apps/photos/package.json b/web/apps/photos/package.json
index 1196b4ddf7a386acacf4edc8961312556bf1c54a..ac658c0ea50b4541f776b37399b4d9af499d1de9 100644
--- a/web/apps/photos/package.json
+++ b/web/apps/photos/package.json
@@ -23,13 +23,11 @@
"ffmpeg-wasm": "file:./thirdparty/ffmpeg-wasm",
"formik": "^2.1.5",
"hdbscan": "0.0.1-alpha.5",
- "heic-convert": "^2.0.0",
"idb": "^7.1.1",
"leaflet": "^1.9.4",
"leaflet-defaulticon-compatibility": "^0.1.1",
"localforage": "^1.9.0",
"memoize-one": "^6.0.0",
- "mime-types": "^2.1.35",
"ml-matrix": "^6.10.4",
"otpauth": "^9.0.2",
"p-debounce": "^4.0.0",
@@ -37,7 +35,7 @@
"photoswipe": "file:./thirdparty/photoswipe",
"piexifjs": "^1.0.6",
"pure-react-carousel": "^1.30.1",
- "react-dropzone": "^11.2.4",
+ "react-dropzone": "^14.2",
"react-otp-input": "^2.3.1",
"react-select": "^4.3.1",
"react-top-loading-bar": "^2.0.1",
diff --git a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx
index 3d9d061663d3e51b593e8dc04f2cbb6d4b592346..8b92f1cbb1f35011848e9993cc9b16c9687911bd 100644
--- a/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx
+++ b/web/apps/photos/src/components/Collections/CollectionOptions/AlbumCastDialog.tsx
@@ -32,7 +32,11 @@ declare global {
}
}
-export default function AlbumCastDialog(props: Props) {
+export default function AlbumCastDialog({
+ show,
+ onHide,
+ currentCollection,
+}: Props) {
const [view, setView] = useState<
"choose" | "auto" | "pin" | "auto-cast-error"
>("choose");
@@ -51,7 +55,7 @@ export default function AlbumCastDialog(props: Props) {
) => {
try {
await doCast(value.trim());
- props.onHide();
+ onHide();
} catch (e) {
const error = e as Error;
let fieldError: string;
@@ -80,8 +84,8 @@ export default function AlbumCastDialog(props: Props) {
// ok, they exist. let's give them the good stuff.
const payload = JSON.stringify({
castToken: castToken,
- collectionID: props.currentCollection.id,
- collectionKey: props.currentCollection.key,
+ collectionID: currentCollection.id,
+ collectionKey: currentCollection.key,
});
const encryptedPayload = await boxSeal(btoa(payload), tvPublicKeyB64);
@@ -89,7 +93,7 @@ export default function AlbumCastDialog(props: Props) {
await castGateway.publishCastPayload(
pin,
encryptedPayload,
- props.currentCollection.id,
+ currentCollection.id,
castToken,
);
};
@@ -119,7 +123,7 @@ export default function AlbumCastDialog(props: Props) {
doCast(code)
.then(() => {
setView("choose");
- props.onHide();
+ onHide();
})
.catch((e) => {
setView("auto-cast-error");
@@ -129,8 +133,9 @@ export default function AlbumCastDialog(props: Props) {
},
);
+ const collectionID = currentCollection.id;
session
- .sendMessage("urn:x-cast:pair-request", {})
+ .sendMessage("urn:x-cast:pair-request", { collectionID })
.then(() => {
log.debug(() => "Message sent successfully");
})
@@ -142,16 +147,16 @@ export default function AlbumCastDialog(props: Props) {
}, [view]);
useEffect(() => {
- if (props.show) {
+ if (show) {
castGateway.revokeAllTokens();
}
- }, [props.show]);
+ }, [show]);
return (
{t("LEAVE_ALBUM")}
+ }
+ onClick={handleCollectionAction(
+ CollectionActions.SHOW_ALBUM_CAST_DIALOG,
+ false,
+ )}
+ >
+ {t("CAST_ALBUM_TO_TV")}
+
>
);
}
diff --git a/web/apps/photos/src/components/Directory/index.tsx b/web/apps/photos/src/components/Directory/index.tsx
index a9958113499b6fe3363d5edd45d19f985f2c9bce..2fc4be58ea064973a463e1bf3e878c44a35ab8fe 100644
--- a/web/apps/photos/src/components/Directory/index.tsx
+++ b/web/apps/photos/src/components/Directory/index.tsx
@@ -1,8 +1,7 @@
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
import LinkButton from "@ente/shared/components/LinkButton";
-import { Tooltip } from "@mui/material";
-import { styled } from "@mui/material/styles";
+import { Tooltip, styled } from "@mui/material";
const DirectoryPathContainer = styled(LinkButton)(
({ width }) => `
diff --git a/web/apps/photos/src/components/PhotoList/dedupe.tsx b/web/apps/photos/src/components/PhotoList/dedupe.tsx
index 7181f626754fce4e3f1c556adb2ec8dd3c8580f0..61b9958ef08b04fdbe8461154bd0728b48073d8f 100644
--- a/web/apps/photos/src/components/PhotoList/dedupe.tsx
+++ b/web/apps/photos/src/components/PhotoList/dedupe.tsx
@@ -19,7 +19,7 @@ import {
} from "react-window";
import { Duplicate } from "services/deduplicationService";
import { EnteFile } from "types/file";
-import { convertBytesToHumanReadable } from "utils/file";
+import { formattedByteSize } from "utils/units";
export enum ITEM_TYPE {
TIME = "TIME",
@@ -304,10 +304,13 @@ export function DedupePhotoList({
switch (listItem.itemType) {
case ITEM_TYPE.SIZE_AND_COUNT:
return (
+ /*TODO: Translate the full phrase instead of piecing
+ together parts like this See:
+ https://crowdin.com/editor/ente-photos-web/9/enus-de?view=comfortable&filter=basic&value=0#8104
+ */
{listItem.fileCount} {t("FILES")},{" "}
- {convertBytesToHumanReadable(listItem.fileSize || 0)}{" "}
- {t("EACH")}
+ {formattedByteSize(listItem.fileSize || 0)} {t("EACH")}
);
case ITEM_TYPE.FILE: {
diff --git a/web/apps/photos/src/components/PhotoList/index.tsx b/web/apps/photos/src/components/PhotoList/index.tsx
index 4803995d4f7f9e77b7899cfea81f9e0c63d26964..5ac6b263edaa583208828af453d0896aff30b15a 100644
--- a/web/apps/photos/src/components/PhotoList/index.tsx
+++ b/web/apps/photos/src/components/PhotoList/index.tsx
@@ -22,9 +22,9 @@ import {
areEqual,
} from "react-window";
import { EnteFile } from "types/file";
-import { convertBytesToHumanReadable } from "utils/file";
import { handleSelectCreator } from "utils/photoFrame";
import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
+import { formattedByteSize } from "utils/units";
const A_DAY = 24 * 60 * 60 * 1000;
const FOOTER_HEIGHT = 90;
@@ -829,8 +829,7 @@ export function PhotoList({
return (
{listItem.fileCount} {t("FILES")},{" "}
- {convertBytesToHumanReadable(listItem.fileSize || 0)}{" "}
- {t("EACH")}
+ {formattedByteSize(listItem.fileSize || 0)} {t("EACH")}
);
case ITEM_TYPE.FILE: {
diff --git a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx
index 39905118550f5441322458995a740372e0c2780f..e9e27d55e8e585bc1c0c392b8be52d7d41af28c3 100644
--- a/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/FileInfo/RenderFileName.tsx
@@ -7,8 +7,8 @@ import VideocamOutlined from "@mui/icons-material/VideocamOutlined";
import Box from "@mui/material/Box";
import { useEffect, useState } from "react";
import { EnteFile } from "types/file";
-import { makeHumanReadableStorage } from "utils/billing";
import { changeFileName, updateExistingFilePubMetadata } from "utils/file";
+import { formattedByteSize } from "utils/units";
import { FileNameEditDialog } from "./FileNameEditDialog";
import InfoItem from "./InfoItem";
@@ -33,7 +33,7 @@ const getCaption = (file: EnteFile, parsedExifData) => {
captionParts.push(resolution);
}
if (fileSize) {
- captionParts.push(makeHumanReadableStorage(fileSize));
+ captionParts.push(formattedByteSize(fileSize));
}
return (
diff --git a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx
index 42edddbf114b18df26ac981622ea91015e652ff9..c4e1f5854f41d547c691a6e0f1d10259c938c628 100644
--- a/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/ImageEditorOverlay/index.tsx
@@ -1,24 +1,6 @@
+import { nameAndExtension } from "@/next/file";
import log from "@/next/log";
-import {
- Backdrop,
- Box,
- CircularProgress,
- IconButton,
- Tab,
- Tabs,
- Typography,
-} from "@mui/material";
-import {
- Dispatch,
- MutableRefObject,
- SetStateAction,
- createContext,
- useContext,
- useEffect,
- useRef,
- useState,
-} from "react";
-
+import { ensure } from "@/utils/ensure";
import {
CenteredFlex,
HorizontalFlex,
@@ -32,6 +14,15 @@ import CropIcon from "@mui/icons-material/Crop";
import CropOriginalIcon from "@mui/icons-material/CropOriginal";
import DownloadIcon from "@mui/icons-material/Download";
import MenuIcon from "@mui/icons-material/Menu";
+import {
+ Backdrop,
+ Box,
+ CircularProgress,
+ IconButton,
+ Tab,
+ Tabs,
+ Typography,
+} from "@mui/material";
import { EnteDrawer } from "components/EnteDrawer";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import MenuItemDivider from "components/Menu/MenuItemDivider";
@@ -39,10 +30,18 @@ import { MenuItemGroup } from "components/Menu/MenuItemGroup";
import MenuSectionTitle from "components/Menu/MenuSectionTitle";
import { CORNER_THRESHOLD, FILTER_DEFAULT_VALUES } from "constants/photoEditor";
import { t } from "i18next";
-import mime from "mime-types";
import { AppContext } from "pages/_app";
+import {
+ Dispatch,
+ MutableRefObject,
+ SetStateAction,
+ createContext,
+ useContext,
+ useEffect,
+ useRef,
+ useState,
+} from "react";
import { getLocalCollections } from "services/collectionService";
-import { detectFileTypeInfo } from "services/detect-type";
import downloadManager from "services/download";
import uploadManager from "services/upload/uploadManager";
import { EnteFile } from "types/file";
@@ -72,13 +71,6 @@ export const ImageEditorOverlayContext = createContext(
type OperationTab = "crop" | "transform" | "colours";
-const getEditedFileName = (fileName: string) => {
- const fileNameParts = fileName.split(".");
- const extension = fileNameParts.pop();
- const editedFileName = `${fileNameParts.join(".")}-edited.${extension}`;
- return editedFileName;
-};
-
export interface CropBoxProps {
x: number;
y: number;
@@ -94,6 +86,10 @@ const ImageEditorOverlay = (props: IProps) => {
const parentRef = useRef(null);
const [fileURL, setFileURL] = useState("");
+ // The MIME type of the original file that we are editing.
+ //
+ // It _should_ generally be present, but it is not guaranteed to be.
+ const [mimeType, setMIMEType] = useState();
const [currentRotationAngle, setCurrentRotationAngle] = useState(0);
@@ -372,6 +368,10 @@ const ImageEditorOverlay = (props: IProps) => {
);
img.src = srcURLs.url as string;
setFileURL(srcURLs.url as string);
+ // We're casting the srcURLs.url to string above, i.e. this code
+ // is not meant to run for the live photos scenario. For images,
+ // we usually will have the mime type.
+ setMIMEType(srcURLs.mimeType);
} else {
img.src = fileURL;
}
@@ -430,37 +430,6 @@ const ImageEditorOverlay = (props: IProps) => {
loadCanvas();
}, [props.show, props.file]);
- const exportCanvasToBlob = (): Promise => {
- try {
- const canvas = originalSizeCanvasRef.current;
- if (!canvas) return;
-
- const mimeType = mime.lookup(props.file.metadata.title);
-
- const image = new Image();
- image.src = canvas.toDataURL();
-
- const context = canvas.getContext("2d");
- if (!context) return;
- return new Promise((resolve) => {
- canvas.toBlob(resolve, mimeType);
- });
- } catch (e) {
- log.error("Error exporting canvas to blob", e);
- throw e;
- }
- };
-
- const getEditedFile = async () => {
- const blob = await exportCanvasToBlob();
- if (!blob) {
- throw Error("no blob");
- }
- const editedFileName = getEditedFileName(props.file.metadata.title);
- const editedFile = new File([blob], editedFileName);
- return editedFile;
- };
-
const handleClose = () => {
setFileURL(null);
props.onClose();
@@ -480,25 +449,23 @@ const ImageEditorOverlay = (props: IProps) => {
return <>>;
}
+ const getEditedFile = async () => {
+ const originalSizeCanvas = ensure(originalSizeCanvasRef.current);
+ const originalFileName = props.file.metadata.title;
+ return canvasToFile(originalSizeCanvas, originalFileName, mimeType);
+ };
+
const downloadEditedPhoto = async () => {
- try {
- if (!canvasRef.current) return;
+ if (!canvasRef.current) return;
- const editedFile = await getEditedFile();
- const fileType = await detectFileTypeInfo(editedFile);
- const tempImgURL = URL.createObjectURL(
- new Blob([editedFile], { type: fileType.mimeType }),
- );
- downloadUsingAnchor(tempImgURL, editedFile.name);
- } catch (e) {
- log.error("Error downloading edited photo", e);
- }
+ const f = await getEditedFile();
+ // Revokes the URL after downloading.
+ downloadUsingAnchor(URL.createObjectURL(f), f.name);
};
const saveCopyToEnte = async () => {
+ if (!canvasRef.current) return;
try {
- if (!canvasRef.current) return;
-
const collections = await getLocalCollections();
const collection = collections.find(
@@ -678,7 +645,7 @@ const ImageEditorOverlay = (props: IProps) => {
setCurrentTab(value);
}}
>
-
+
{
};
export default ImageEditorOverlay;
+
+/**
+ * Create a new {@link File} with the contents of the given canvas.
+ *
+ * @param canvas A {@link HTMLCanvasElement} whose contents we want to download
+ * as a file.
+ *
+ * @param originalFileName The name of the original file which was used to seed
+ * the canvas. This will be used as a base name for the generated file (with an
+ * "-edited" suffix).
+ *
+ * @param originalMIMEType The MIME type of the original file which was used to
+ * seed the canvas. When possible, we try to download a file in the same format,
+ * but this is not guaranteed and depends on browser support. If the original
+ * MIME type can not be preserved, a PNG file will be downloaded.
+ */
+const canvasToFile = async (
+ canvas: HTMLCanvasElement,
+ originalFileName: string,
+ originalMIMEType?: string,
+): Promise => {
+ const image = new Image();
+ image.src = canvas.toDataURL();
+
+ // Browsers are required to support "image/png". They may also support
+ // "image/jpeg" and "image/webp". Potentially they may even support more
+ // formats, but to keep this scoped we limit to these three.
+ let [mimeType, extension] = ["image/png", "png"];
+ switch (originalMIMEType) {
+ case "image/jpeg":
+ mimeType = originalMIMEType;
+ extension = "jpeg";
+ break;
+ case "image/webp":
+ mimeType = originalMIMEType;
+ extension = "webp";
+ break;
+ default:
+ break;
+ }
+
+ const blob = ensure(
+ await new Promise((resolve) => canvas.toBlob(resolve, mimeType)),
+ );
+
+ const [originalName] = nameAndExtension(originalFileName);
+ const fileName = `${originalName}-edited.${extension}`;
+
+ log.debug(() => ({ a: "canvas => file", blob, type: blob.type, mimeType }));
+
+ return new File([blob], fileName);
+};
diff --git a/web/apps/photos/src/components/PhotoViewer/index.tsx b/web/apps/photos/src/components/PhotoViewer/index.tsx
index 8e6debf68b2ddcf70ec900bf06469b58538666e3..c7383efb13bf89c098df4a631a8885db378ac3ff 100644
--- a/web/apps/photos/src/components/PhotoViewer/index.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/index.tsx
@@ -11,11 +11,11 @@ import {
copyFileToClipboard,
downloadSingleFile,
getFileFromURL,
- isRawFile,
isSupportedRawFormat,
} from "utils/file";
import { FILE_TYPE } from "@/media/file-type";
+import { isNonWebImageFileExtension } from "@/media/formats";
import { lowercaseExtension } from "@/next/file";
import { FlexWrapper } from "@ente/shared/components/Container";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
@@ -350,7 +350,8 @@ function PhotoViewer(props: Iprops) {
function updateShowEditButton(file: EnteFile) {
const extension = lowercaseExtension(file.metadata.title);
const isSupported =
- !isRawFile(extension) || isSupportedRawFormat(extension);
+ !isNonWebImageFileExtension(extension) ||
+ isSupportedRawFormat(extension);
setShowEditButton(
file.metadata.fileType === FILE_TYPE.IMAGE && isSupported,
);
diff --git a/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx b/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx
index 40de098f5e81372641ca5ffe6109c0724906f321..00b8979d5ab0f87bdd574e258ec93db364c28655 100644
--- a/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx
+++ b/web/apps/photos/src/components/PhotoViewer/styledComponents/LivePhotoBtn.tsx
@@ -1,5 +1,4 @@
-import { Paper } from "@mui/material";
-import { styled } from "@mui/material/styles";
+import { Paper, styled } from "@mui/material";
export const LivePhotoBtnContainer = styled(Paper)`
border-radius: 4px;
diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx
index 6ebc0d942239d3b86f8a8692cd5d75f574e60b90..b9b7ea88d565096211edc83c2f0bd1aaccc6c606 100644
--- a/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx
+++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/MenuWithPeople.tsx
@@ -5,7 +5,7 @@ import { t } from "i18next";
import { AppContext } from "pages/_app";
import { useContext } from "react";
import { components } from "react-select";
-import { IndexStatus } from "types/machineLearning/ui";
+import { IndexStatus } from "services/ml/db";
import { Suggestion, SuggestionType } from "types/search";
const { Menu } = components;
diff --git a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx
index 3f737b3e0c6920a1474fe2766bc36bc23674d4fe..da462a3b5a072d87ed843a8921c49434be079d57 100644
--- a/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx
+++ b/web/apps/photos/src/components/Search/SearchBar/searchInput/index.tsx
@@ -10,6 +10,7 @@ import { components } from "react-select";
import AsyncSelect from "react-select/async";
import { InputActionMeta } from "react-select/src/types";
import { City } from "services/locationSearchService";
+import { Person } from "services/ml/types";
import {
getAutoCompleteSuggestions,
getDefaultOptions,
@@ -17,7 +18,6 @@ import {
import { Collection } from "types/collection";
import { LocationTagData } from "types/entity";
import { EnteFile } from "types/file";
-import { Person } from "types/machineLearning";
import {
ClipSearchScores,
DateValue,
diff --git a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx
index a9474a37d97c12ed2150ac770d95e91e59f8fb38..bdc0d5a84fd7ce855d450d65928a592365d63a04 100644
--- a/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx
+++ b/web/apps/photos/src/components/Sidebar/Preferences/LanguageSelector.tsx
@@ -19,6 +19,8 @@ export const localeName = (locale: SupportedLocale) => {
return "English";
case "fr-FR":
return "Français";
+ case "de-DE":
+ return "Deutsch";
case "zh-CN":
return "中文";
case "nl-NL":
diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx
index 4b0ce31b042df5c1e431118c75e884b81d96b4e3..8975941ad50f790ca4d655617555ef0c47311ec6 100644
--- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx
+++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/individual/usageSection.tsx
@@ -1,7 +1,7 @@
import { SpaceBetweenFlex } from "@ente/shared/components/Container";
import { Box, Typography } from "@mui/material";
import { t } from "i18next";
-import { makeHumanReadableStorage } from "utils/billing";
+import { formattedStorageByteSize } from "utils/units";
import { Progressbar } from "../../styledComponents";
@@ -19,7 +19,7 @@ export function IndividualUsageSection({ usage, storage, fileCount }: Iprops) {
marginTop: 1.5,
}}
>
- {`${makeHumanReadableStorage(
+ {`${formattedStorageByteSize(
storage - usage,
)} ${t("FREE")}`}
diff --git a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx
index 6143044f0d8f3bb8584d1ee31fd5117a4d9974b3..7f2712f7386c0df77453c612d3d9d7c9c4f2f45f 100644
--- a/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx
+++ b/web/apps/photos/src/components/Sidebar/SubscriptionCard/contentOverlay/storageSection.tsx
@@ -1,6 +1,6 @@
import { Box, styled, Typography } from "@mui/material";
import { t } from "i18next";
-import { convertBytesToGBs, makeHumanReadableStorage } from "utils/billing";
+import { bytesInGB, formattedStorageByteSize } from "utils/units";
const MobileSmallBox = styled(Box)`
display: none;
@@ -30,9 +30,9 @@ export default function StorageSection({ usage, storage }: Iprops) {
fontWeight={"bold"}
sx={{ fontSize: "24px", lineHeight: "30px" }}
>
- {`${makeHumanReadableStorage(usage, { roundUp: true })} ${t(
+ {`${formattedStorageByteSize(usage, { round: true })} ${t(
"OF",
- )} ${makeHumanReadableStorage(storage)} ${t("USED")}`}
+ )} ${formattedStorageByteSize(storage)} ${t("USED")}`}
@@ -40,9 +40,7 @@ export default function StorageSection({ usage, storage }: Iprops) {
fontWeight={"bold"}
sx={{ fontSize: "24px", lineHeight: "30px" }}
>
- {`${convertBytesToGBs(usage)} / ${convertBytesToGBs(
- storage,
- )} ${t("GB")} ${t("USED")}`}
+ {`${bytesInGB(usage)} / ${bytesInGB(storage)} ${t("storage_unit.gb")} ${t("USED")}`}
diff --git a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx
index 6b4a6f43d5be0cb7176b13a588db18b6beef70e7..32f61d976478557d43c34bf4576862e835b711b0 100644
--- a/web/apps/photos/src/components/Sidebar/UtilitySection.tsx
+++ b/web/apps/photos/src/components/Sidebar/UtilitySection.tsx
@@ -9,7 +9,7 @@ import { t } from "i18next";
import { useRouter } from "next/router";
import { AppContext } from "pages/_app";
import { useContext, useState } from "react";
-// import mlIDbStorage from 'utils/storage/mlIDbStorage';
+// import mlIDbStorage from 'services/ml/db';
import {
configurePasskeyRecovery,
isPasskeyRecoveryEnabled,
diff --git a/web/apps/photos/src/components/Upload/Uploader.tsx b/web/apps/photos/src/components/Upload/Uploader.tsx
index 717430655689867f54580f6ed3b5a2a47481f305..bea54c645b2fffd577987b54228d7595c2b79121 100644
--- a/web/apps/photos/src/components/Upload/Uploader.tsx
+++ b/web/apps/photos/src/components/Upload/Uploader.tsx
@@ -1,6 +1,8 @@
import { basename } from "@/next/file";
import log from "@/next/log";
import type { CollectionMapping, Electron, ZipItem } from "@/next/types/ipc";
+import { firstNonEmpty } from "@/utils/array";
+import { ensure } from "@/utils/ensure";
import { CustomError } from "@ente/shared/error";
import { isPromise } from "@ente/shared/utils";
import DiscFullIcon from "@mui/icons-material/DiscFull";
@@ -324,17 +326,17 @@ export default function Uploader({
// Trigger an upload when any of the dependencies change.
useEffect(() => {
- // Re the paths:
+ // About the paths:
//
// - These are not necessarily the full paths. In particular, when
// running on the browser they'll be the relative paths (at best) or
// just the file-name otherwise.
//
// - All the paths use POSIX separators. See inline comments.
+ //
const allItemAndPaths = [
- // See: [Note: webkitRelativePath]. In particular, they use POSIX
- // separators.
- webFiles.map((f) => [f, f.webkitRelativePath ?? f.name]),
+ // Relative path (using POSIX separators) or the file's name.
+ webFiles.map((f) => [f, pathLikeForWebFile(f)]),
// The paths we get from the desktop app all eventually come either
// from electron.selectDirectory or electron.pathForFile, both of
// which return POSIX paths.
@@ -822,6 +824,37 @@ const desktopFilesAndZipItems = async (electron: Electron, files: File[]) => {
return { fileAndPaths, zipItems };
};
+/**
+ * Return the relative path or name of a File object selected or
+ * drag-and-dropped on the web.
+ *
+ * There are three cases here:
+ *
+ * 1. If the user selects individual file(s), then the returned File objects
+ * will only have a `name`.
+ *
+ * 2. If the user selects directory(ies), then the returned File objects will
+ * have a `webkitRelativePath`. For more details, see [Note:
+ * webkitRelativePath]. In particular, these will POSIX separators.
+ *
+ * 3. If the user drags-and-drops, then the react-dropzone library that we use
+ * will internally convert `webkitRelativePath` to `path`, but otherwise it
+ * behaves same as case 2.
+ * https://github.com/react-dropzone/file-selector/blob/master/src/file.ts#L1214
+ */
+const pathLikeForWebFile = (file: File): string =>
+ ensure(
+ firstNonEmpty([
+ // We need to check first, since path is not a property of
+ // the standard File objects.
+ "path" in file && typeof file.path == "string"
+ ? file.path
+ : undefined,
+ file.webkitRelativePath,
+ file.name,
+ ]),
+ );
+
// This is used to prompt the user the make upload strategy choice
interface ImportSuggestion {
rootFolderName: string;
diff --git a/web/apps/photos/src/components/UploadSelectorInputs.tsx b/web/apps/photos/src/components/UploadSelectorInputs.tsx
index 13e33fc6d33ff576f4006085164469f79bc6a085..e22e2f541a3ec4ad29c5b1be6486e579979cbe29 100644
--- a/web/apps/photos/src/components/UploadSelectorInputs.tsx
+++ b/web/apps/photos/src/components/UploadSelectorInputs.tsx
@@ -1,9 +1,24 @@
-export default function UploadSelectorInputs({
+type GetInputProps = () => React.HTMLAttributes;
+
+interface UploadSelectorInputsProps {
+ getDragAndDropInputProps: GetInputProps;
+ getFileSelectorInputProps: GetInputProps;
+ getFolderSelectorInputProps: GetInputProps;
+ getZipFileSelectorInputProps?: GetInputProps;
+}
+
+/**
+ * Create a bunch of HTML inputs elements, one each for the given props.
+ *
+ * These hidden input element serve as the way for us to show various file /
+ * folder Selector dialogs and handle drag and drop inputs.
+ */
+export const UploadSelectorInputs: React.FC = ({
getDragAndDropInputProps,
getFileSelectorInputProps,
getFolderSelectorInputProps,
getZipFileSelectorInputProps,
-}) {
+}) => {
return (
<>
@@ -14,4 +29,4 @@ export default function UploadSelectorInputs({
)}
>
);
-}
+};
diff --git a/web/apps/photos/src/components/WatchFolder.tsx b/web/apps/photos/src/components/WatchFolder.tsx
index 710a54168384e02c8e04388b022a85ecac6ceb56..4d2144e0ceeee543721a94f9ec96d24d3a0ca1e5 100644
--- a/web/apps/photos/src/components/WatchFolder.tsx
+++ b/web/apps/photos/src/components/WatchFolder.tsx
@@ -25,8 +25,8 @@ import {
Stack,
Tooltip,
Typography,
+ styled,
} from "@mui/material";
-import { styled } from "@mui/material/styles";
import { CollectionMappingChoiceModal } from "components/Upload/CollectionMappingChoiceModal";
import { t } from "i18next";
import { AppContext } from "pages/_app";
diff --git a/web/apps/photos/src/components/ml/MLSearchSettings.tsx b/web/apps/photos/src/components/ml/MLSearchSettings.tsx
index 9b50c2d6ae4420c9ccd7d226d33d21e3482226a8..409df4fc6f1995e5e2e48f696a4d8796fd52fae0 100644
--- a/web/apps/photos/src/components/ml/MLSearchSettings.tsx
+++ b/web/apps/photos/src/components/ml/MLSearchSettings.tsx
@@ -22,7 +22,7 @@ import {
getFaceSearchEnabledStatus,
updateFaceSearchEnabledStatus,
} from "services/userService";
-import { isInternalUser } from "utils/user";
+import { isInternalUserForML } from "utils/user";
export const MLSearchSettings = ({ open, onClose, onRootClose }) => {
const {
@@ -280,7 +280,7 @@ function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) {
- {isInternalUser() && (
+ {isInternalUserForML() && (
{
}
>
@@ -140,7 +137,7 @@ export function UnidentifiedFaces(props: {
faces.map((face, index) => (
@@ -151,20 +148,24 @@ export function UnidentifiedFaces(props: {
}
interface FaceCropImageViewProps {
- faceId: string;
+ faceID: string;
cacheKey?: string;
}
const FaceCropImageView: React.FC = ({
- faceId,
+ faceID,
cacheKey,
}) => {
const [objectURL, setObjectURL] = useState();
useEffect(() => {
let didCancel = false;
+ const electron = globalThis.electron;
- if (cacheKey) {
+ if (faceID && electron) {
+ electron
+ .legacyFaceCrop(faceID)
+ /*
cachedOrNew("face-crops", cacheKey, async () => {
const user = await ensureLocalUser();
return machineLearningService.regenerateFaceCrop(
@@ -172,16 +173,20 @@ const FaceCropImageView: React.FC = ({
user.id,
faceId,
);
- }).then((blob) => {
- if (!didCancel) setObjectURL(URL.createObjectURL(blob));
- });
+ })*/
+ .then((data) => {
+ if (data) {
+ const blob = new Blob([data]);
+ if (!didCancel) setObjectURL(URL.createObjectURL(blob));
+ }
+ });
} else setObjectURL(undefined);
return () => {
didCancel = true;
if (objectURL) URL.revokeObjectURL(objectURL);
};
- }, [faceId, cacheKey]);
+ }, [faceID, cacheKey]);
return objectURL ? (
@@ -189,3 +194,45 @@ const FaceCropImageView: React.FC = ({
);
};
+
+async function getPeopleList(file: EnteFile): Promise> {
+ let startTime = Date.now();
+ const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
+ log.info(
+ "getPeopleList:mlFilesStore:getItem",
+ Date.now() - startTime,
+ "ms",
+ );
+ if (!mlFileData?.faces || mlFileData.faces.length < 1) {
+ return [];
+ }
+
+ const peopleIds = mlFileData.faces
+ .filter((f) => f.personId !== null && f.personId !== undefined)
+ .map((f) => f.personId);
+ if (!peopleIds || peopleIds.length < 1) {
+ return [];
+ }
+ // log.info("peopleIds: ", peopleIds);
+ startTime = Date.now();
+ const peoplePromises = peopleIds.map(
+ (p) => mlIDbStorage.getPerson(p) as Promise,
+ );
+ const peopleList = await Promise.all(peoplePromises);
+ log.info(
+ "getPeopleList:mlPeopleStore:getItems",
+ Date.now() - startTime,
+ "ms",
+ );
+ // log.info("peopleList: ", peopleList);
+
+ return peopleList;
+}
+
+async function getUnidentifiedFaces(file: EnteFile): Promise> {
+ const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
+
+ return mlFileData?.faces?.filter(
+ (f) => f.personId === null || f.personId === undefined,
+ );
+}
diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card.tsx
new file mode 100644
index 0000000000000000000000000000000000000000..6fe86769e12452044dee786eb7bcb2a96363ee8a
--- /dev/null
+++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/card.tsx
@@ -0,0 +1,356 @@
+import log from "@/next/log";
+import { SpaceBetweenFlex } from "@ente/shared/components/Container";
+import { SUPPORT_EMAIL } from "@ente/shared/constants/urls";
+import Close from "@mui/icons-material/Close";
+import { IconButton, Link, Stack } from "@mui/material";
+import Box from "@mui/material/Box";
+import Typography from "@mui/material/Typography";
+import { PLAN_PERIOD } from "constants/gallery";
+import { t } from "i18next";
+import { AppContext } from "pages/_app";
+import { GalleryContext } from "pages/gallery";
+import { useContext, useEffect, useMemo, useState } from "react";
+import { Trans } from "react-i18next";
+import billingService, { type PlansResponse } from "services/billingService";
+import { Plan } from "types/billing";
+import { SetLoading } from "types/gallery";
+import {
+ getLocalUserSubscription,
+ hasAddOnBonus,
+ hasMobileSubscription,
+ hasPaidSubscription,
+ hasStripeSubscription,
+ isOnFreePlan,
+ isSubscriptionActive,
+ isSubscriptionCancelled,
+ isUserSubscribedPlan,
+ planForSubscription,
+ updateSubscription,
+} from "utils/billing";
+import { bytesInGB } from "utils/units";
+import { getLocalUserDetails } from "utils/user";
+import { getTotalFamilyUsage, isPartOfFamily } from "utils/user/family";
+import { ManageSubscription } from "./manageSubscription";
+import { PeriodToggler } from "./periodToggler";
+import Plans from "./plans";
+import { BFAddOnRow } from "./plans/BfAddOnRow";
+
+interface Props {
+ closeModal: any;
+ setLoading: SetLoading;
+}
+
+function PlanSelectorCard(props: Props) {
+ const subscription = useMemo(() => getLocalUserSubscription(), []);
+ const [plansResponse, setPlansResponse] = useState<
+ PlansResponse | undefined
+ >();
+
+ const [planPeriod, setPlanPeriod] = useState(
+ subscription?.period || PLAN_PERIOD.MONTH,
+ );
+ const galleryContext = useContext(GalleryContext);
+ const appContext = useContext(AppContext);
+ const bonusData = useMemo(() => {
+ const userDetails = getLocalUserDetails();
+ if (!userDetails) {
+ return null;
+ }
+ return userDetails.bonusData;
+ }, []);
+
+ const usage = useMemo(() => {
+ const userDetails = getLocalUserDetails();
+ if (!userDetails) {
+ return 0;
+ }
+ return isPartOfFamily(userDetails.familyData)
+ ? getTotalFamilyUsage(userDetails.familyData)
+ : userDetails.usage;
+ }, []);
+
+ const togglePeriod = () => {
+ setPlanPeriod((prevPeriod) =>
+ prevPeriod === PLAN_PERIOD.MONTH
+ ? PLAN_PERIOD.YEAR
+ : PLAN_PERIOD.MONTH,
+ );
+ };
+ function onReopenClick() {
+ appContext.closeMessageDialog();
+ galleryContext.showPlanSelectorModal();
+ }
+ useEffect(() => {
+ const main = async () => {
+ try {
+ props.setLoading(true);
+ const response = await billingService.getPlans();
+ const { plans } = response;
+ if (isSubscriptionActive(subscription)) {
+ const planNotListed =
+ plans.filter((plan) =>
+ isUserSubscribedPlan(plan, subscription),
+ ).length === 0;
+ if (
+ subscription &&
+ !isOnFreePlan(subscription) &&
+ planNotListed
+ ) {
+ plans.push(planForSubscription(subscription));
+ }
+ }
+ setPlansResponse(response);
+ } catch (e) {
+ log.error("plan selector modal open failed", e);
+ props.closeModal();
+ appContext.setDialogMessage({
+ title: t("OPEN_PLAN_SELECTOR_MODAL_FAILED"),
+ content: t("UNKNOWN_ERROR"),
+ close: { text: t("CLOSE"), variant: "secondary" },
+ proceed: {
+ text: t("REOPEN_PLAN_SELECTOR_MODAL"),
+ variant: "accent",
+ action: onReopenClick,
+ },
+ });
+ } finally {
+ props.setLoading(false);
+ }
+ };
+ main();
+ }, []);
+
+ async function onPlanSelect(plan: Plan) {
+ if (
+ !hasPaidSubscription(subscription) ||
+ isSubscriptionCancelled(subscription)
+ ) {
+ try {
+ props.setLoading(true);
+ await billingService.buySubscription(plan.stripeID);
+ } catch (e) {
+ props.setLoading(false);
+ appContext.setDialogMessage({
+ title: t("ERROR"),
+ content: t("SUBSCRIPTION_PURCHASE_FAILED"),
+ close: { variant: "critical" },
+ });
+ }
+ } else if (hasStripeSubscription(subscription)) {
+ appContext.setDialogMessage({
+ title: t("update_subscription_title"),
+ content: t("UPDATE_SUBSCRIPTION_MESSAGE"),
+ proceed: {
+ text: t("UPDATE_SUBSCRIPTION"),
+ action: updateSubscription.bind(
+ null,
+ plan,
+ appContext.setDialogMessage,
+ props.setLoading,
+ props.closeModal,
+ ),
+ variant: "accent",
+ },
+ close: { text: t("CANCEL") },
+ });
+ } else if (hasMobileSubscription(subscription)) {
+ appContext.setDialogMessage({
+ title: t("CANCEL_SUBSCRIPTION_ON_MOBILE"),
+ content: t("CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE"),
+ close: { variant: "secondary" },
+ });
+ } else {
+ appContext.setDialogMessage({
+ title: t("MANAGE_PLAN"),
+ content: (
+ ,
+ }}
+ values={{ emailID: SUPPORT_EMAIL }}
+ />
+ ),
+ close: { variant: "secondary" },
+ });
+ }
+ }
+
+ const { closeModal, setLoading } = props;
+
+ const commonCardData = {
+ subscription,
+ bonusData,
+ closeModal,
+ planPeriod,
+ togglePeriod,
+ setLoading,
+ };
+
+ const plansList = (
+
+ );
+
+ return (
+ <>
+
+ {hasPaidSubscription(subscription) ? (
+
+ {plansList}
+
+ ) : (
+
+ {plansList}
+
+ )}
+
+ >
+ );
+}
+
+export default PlanSelectorCard;
+
+function FreeSubscriptionPlanSelectorCard({
+ children,
+ subscription,
+ bonusData,
+ closeModal,
+ setLoading,
+ planPeriod,
+ togglePeriod,
+}) {
+ return (
+ <>
+
+ {t("CHOOSE_PLAN")}
+
+
+
+
+
+
+
+ {t("TWO_MONTHS_FREE")}
+
+
+ {children}
+ {hasAddOnBonus(bonusData) && (
+
+ )}
+ {hasAddOnBonus(bonusData) && (
+
+ )}
+
+
+ >
+ );
+}
+
+function PaidSubscriptionPlanSelectorCard({
+ children,
+ subscription,
+ bonusData,
+ closeModal,
+ usage,
+ planPeriod,
+ togglePeriod,
+ setLoading,
+}) {
+ return (
+ <>
+
+
+
+
+ {t("SUBSCRIPTION")}
+
+
+ {bytesInGB(subscription.storage, 2)}{" "}
+ {t("storage_unit.gb")}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ `1px solid ${theme.palette.divider}`}
+ p={1.5}
+ borderRadius={(theme) => `${theme.shape.borderRadius}px`}
+ >
+
+
+
+ {t("TWO_MONTHS_FREE")}
+
+
+ {children}
+
+
+
+
+ {!isSubscriptionCancelled(subscription)
+ ? t("RENEWAL_ACTIVE_SUBSCRIPTION_STATUS", {
+ date: subscription.expiryTime,
+ })
+ : t("RENEWAL_CANCELLED_SUBSCRIPTION_STATUS", {
+ date: subscription.expiryTime,
+ })}
+
+ {hasAddOnBonus(bonusData) && (
+
+ )}
+
+
+
+
+ >
+ );
+}
diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/free.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/free.tsx
deleted file mode 100644
index a2ac1090b774925a4e4ef4ad104bf049213ef4a6..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/free.tsx
+++ /dev/null
@@ -1,64 +0,0 @@
-import { Stack } from "@mui/material";
-import Box from "@mui/material/Box";
-import Typography from "@mui/material/Typography";
-import { t } from "i18next";
-import { hasAddOnBonus } from "utils/billing";
-import { ManageSubscription } from "../manageSubscription";
-import { PeriodToggler } from "../periodToggler";
-import Plans from "../plans";
-import { BFAddOnRow } from "../plans/BfAddOnRow";
-
-export default function FreeSubscriptionPlanSelectorCard({
- plans,
- subscription,
- bonusData,
- closeModal,
- setLoading,
- planPeriod,
- togglePeriod,
- onPlanSelect,
-}) {
- return (
- <>
-
- {t("CHOOSE_PLAN")}
-
-
-
-
-
-
-
- {t("TWO_MONTHS_FREE")}
-
-
-
- {hasAddOnBonus(bonusData) && (
-
- )}
- {hasAddOnBonus(bonusData) && (
-
- )}
-
-
- >
- );
-}
diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/index.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/index.tsx
deleted file mode 100644
index 2ef3c361fdfc6ac85514130e51b5da31f6159212..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/index.tsx
+++ /dev/null
@@ -1,202 +0,0 @@
-import log from "@/next/log";
-import { SUPPORT_EMAIL } from "@ente/shared/constants/urls";
-import { useLocalState } from "@ente/shared/hooks/useLocalState";
-import { LS_KEYS } from "@ente/shared/storage/localStorage";
-import { Link, Stack } from "@mui/material";
-import { PLAN_PERIOD } from "constants/gallery";
-import { t } from "i18next";
-import { AppContext } from "pages/_app";
-import { GalleryContext } from "pages/gallery";
-import { useContext, useEffect, useMemo, useState } from "react";
-import { Trans } from "react-i18next";
-import billingService from "services/billingService";
-import { Plan } from "types/billing";
-import { SetLoading } from "types/gallery";
-import {
- getLocalUserSubscription,
- hasMobileSubscription,
- hasPaidSubscription,
- hasStripeSubscription,
- isOnFreePlan,
- isSubscriptionActive,
- isSubscriptionCancelled,
- isUserSubscribedPlan,
- planForSubscription,
- updateSubscription,
-} from "utils/billing";
-import { getLocalUserDetails } from "utils/user";
-import { getTotalFamilyUsage, isPartOfFamily } from "utils/user/family";
-import FreeSubscriptionPlanSelectorCard from "./free";
-import PaidSubscriptionPlanSelectorCard from "./paid";
-
-interface Props {
- closeModal: any;
- setLoading: SetLoading;
-}
-
-function PlanSelectorCard(props: Props) {
- const subscription = useMemo(() => getLocalUserSubscription(), []);
- const [plans, setPlans] = useLocalState(LS_KEYS.PLANS);
-
- const [planPeriod, setPlanPeriod] = useState(
- subscription?.period || PLAN_PERIOD.MONTH,
- );
- const galleryContext = useContext(GalleryContext);
- const appContext = useContext(AppContext);
- const bonusData = useMemo(() => {
- const userDetails = getLocalUserDetails();
- if (!userDetails) {
- return null;
- }
- return userDetails.bonusData;
- }, []);
-
- const usage = useMemo(() => {
- const userDetails = getLocalUserDetails();
- if (!userDetails) {
- return 0;
- }
- return isPartOfFamily(userDetails.familyData)
- ? getTotalFamilyUsage(userDetails.familyData)
- : userDetails.usage;
- }, []);
-
- const togglePeriod = () => {
- setPlanPeriod((prevPeriod) =>
- prevPeriod === PLAN_PERIOD.MONTH
- ? PLAN_PERIOD.YEAR
- : PLAN_PERIOD.MONTH,
- );
- };
- function onReopenClick() {
- appContext.closeMessageDialog();
- galleryContext.showPlanSelectorModal();
- }
- useEffect(() => {
- const main = async () => {
- try {
- props.setLoading(true);
- const plans = await billingService.getPlans();
- if (isSubscriptionActive(subscription)) {
- const planNotListed =
- plans.filter((plan) =>
- isUserSubscribedPlan(plan, subscription),
- ).length === 0;
- if (
- subscription &&
- !isOnFreePlan(subscription) &&
- planNotListed
- ) {
- plans.push(planForSubscription(subscription));
- }
- }
- setPlans(plans);
- } catch (e) {
- log.error("plan selector modal open failed", e);
- props.closeModal();
- appContext.setDialogMessage({
- title: t("OPEN_PLAN_SELECTOR_MODAL_FAILED"),
- content: t("UNKNOWN_ERROR"),
- close: { text: t("CLOSE"), variant: "secondary" },
- proceed: {
- text: t("REOPEN_PLAN_SELECTOR_MODAL"),
- variant: "accent",
- action: onReopenClick,
- },
- });
- } finally {
- props.setLoading(false);
- }
- };
- main();
- }, []);
-
- async function onPlanSelect(plan: Plan) {
- if (
- !hasPaidSubscription(subscription) ||
- isSubscriptionCancelled(subscription)
- ) {
- try {
- props.setLoading(true);
- await billingService.buySubscription(plan.stripeID);
- } catch (e) {
- props.setLoading(false);
- appContext.setDialogMessage({
- title: t("ERROR"),
- content: t("SUBSCRIPTION_PURCHASE_FAILED"),
- close: { variant: "critical" },
- });
- }
- } else if (hasStripeSubscription(subscription)) {
- appContext.setDialogMessage({
- title: t("update_subscription_title"),
- content: t("UPDATE_SUBSCRIPTION_MESSAGE"),
- proceed: {
- text: t("UPDATE_SUBSCRIPTION"),
- action: updateSubscription.bind(
- null,
- plan,
- appContext.setDialogMessage,
- props.setLoading,
- props.closeModal,
- ),
- variant: "accent",
- },
- close: { text: t("CANCEL") },
- });
- } else if (hasMobileSubscription(subscription)) {
- appContext.setDialogMessage({
- title: t("CANCEL_SUBSCRIPTION_ON_MOBILE"),
- content: t("CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE"),
- close: { variant: "secondary" },
- });
- } else {
- appContext.setDialogMessage({
- title: t("MANAGE_PLAN"),
- content: (
- ,
- }}
- values={{ emailID: SUPPORT_EMAIL }}
- />
- ),
- close: { variant: "secondary" },
- });
- }
- }
-
- return (
- <>
-
- {hasPaidSubscription(subscription) ? (
-
- ) : (
-
- )}
-
- >
- );
-}
-
-export default PlanSelectorCard;
diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx
deleted file mode 100644
index 4ef76a491ff1888e582ca4446c4c8051e10b67fa..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/components/pages/gallery/PlanSelector/card/paid.tsx
+++ /dev/null
@@ -1,112 +0,0 @@
-import { SpaceBetweenFlex } from "@ente/shared/components/Container";
-import Close from "@mui/icons-material/Close";
-import { IconButton, Stack } from "@mui/material";
-import Box from "@mui/material/Box";
-import Typography from "@mui/material/Typography";
-import { t } from "i18next";
-import { Trans } from "react-i18next";
-import {
- convertBytesToGBs,
- hasAddOnBonus,
- isSubscriptionCancelled,
-} from "utils/billing";
-import { ManageSubscription } from "../manageSubscription";
-import { PeriodToggler } from "../periodToggler";
-import Plans from "../plans";
-import { BFAddOnRow } from "../plans/BfAddOnRow";
-
-export default function PaidSubscriptionPlanSelectorCard({
- plans,
- subscription,
- bonusData,
- closeModal,
- usage,
- planPeriod,
- togglePeriod,
- onPlanSelect,
- setLoading,
-}) {
- return (
- <>
-
-
-
-
- {t("SUBSCRIPTION")}
-
-
- {convertBytesToGBs(subscription.storage, 2)}{" "}
- {t("GB")}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- `1px solid ${theme.palette.divider}`}
- p={1.5}
- borderRadius={(theme) => `${theme.shape.borderRadius}px`}
- >
-
-
-
- {t("TWO_MONTHS_FREE")}
-
-
-
-
-
-
-
- {!isSubscriptionCancelled(subscription)
- ? t("RENEWAL_ACTIVE_SUBSCRIPTION_STATUS", {
- date: subscription.expiryTime,
- })
- : t("RENEWAL_CANCELLED_SUBSCRIPTION_STATUS", {
- date: subscription.expiryTime,
- })}
-
- {hasAddOnBonus(bonusData) && (
-
- )}
-
-
-
-
- >
- );
-}
diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx
index 8b0ce7bd5f6e42517e045eda617f864df56e1ab6..5f7e13deb8be4081bcdbda67b7ba8b025f0248b1 100644
--- a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx
+++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/BfAddOnRow.tsx
@@ -2,7 +2,7 @@ import { SpaceBetweenFlex } from "@ente/shared/components/Container";
import { Box, styled, Typography } from "@mui/material";
import { Trans } from "react-i18next";
-import { makeHumanReadableStorage } from "utils/billing";
+import { formattedStorageByteSize } from "utils/units";
const RowContainer = styled(SpaceBetweenFlex)(({ theme }) => ({
// gap: theme.spacing(1.5),
@@ -24,7 +24,7 @@ export function BFAddOnRow({ bonusData, closeModal }) {
({
- gap: theme.spacing(1.5),
- padding: theme.spacing(1.5, 1),
- cursor: "pointer",
- "&:hover .endIcon": {
- backgroundColor: "rgba(255,255,255,0.08)",
- },
-}));
-export function FreePlanRow({ closeModal }) {
- return (
-
-
- {t("FREE_PLAN_OPTION_LABEL")}
-
- {t("FREE_PLAN_DESCRIPTION")}
-
-
-
-
-
-
- );
-}
diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx
index ed1a666edb14a89fdc603eb90bbe47f444b8f958..31e97c68e6a90c719ff308889a264b2ebf99f790 100644
--- a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx
+++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/index.tsx
@@ -1,5 +1,9 @@
-import { Stack } from "@mui/material";
+import { SpaceBetweenFlex } from "@ente/shared/components/Container";
+import ArrowForward from "@mui/icons-material/ArrowForward";
+import { Box, IconButton, Stack, Typography, styled } from "@mui/material";
import { PLAN_PERIOD } from "constants/gallery";
+import { t } from "i18next";
+import type { PlansResponse } from "services/billingService";
import { Plan, Subscription } from "types/billing";
import { BonusData } from "types/user";
import {
@@ -8,11 +12,11 @@ import {
isPopularPlan,
isUserSubscribedPlan,
} from "utils/billing";
-import { FreePlanRow } from "./FreePlanRow";
+import { formattedStorageByteSize } from "utils/units";
import { PlanRow } from "./planRow";
interface Iprops {
- plans: Plan[];
+ plansResponse: PlansResponse | undefined;
planPeriod: PLAN_PERIOD;
subscription: Subscription;
bonusData?: BonusData;
@@ -21,30 +25,70 @@ interface Iprops {
}
const Plans = ({
- plans,
+ plansResponse,
planPeriod,
subscription,
bonusData,
onPlanSelect,
closeModal,
-}: Iprops) => (
-
- {plans
- ?.filter((plan) => plan.period === planPeriod)
- ?.map((plan) => (
-
- ))}
- {!hasPaidSubscription(subscription) && !hasAddOnBonus(bonusData) && (
-
- )}
-
-);
+}: Iprops) => {
+ const { freePlan, plans } = plansResponse ?? {};
+ return (
+
+ {plans
+ ?.filter((plan) => plan.period === planPeriod)
+ ?.map((plan) => (
+
+ ))}
+ {!hasPaidSubscription(subscription) &&
+ !hasAddOnBonus(bonusData) &&
+ freePlan && (
+
+ )}
+
+ );
+};
export default Plans;
+
+interface FreePlanRowProps {
+ storage: number;
+ closeModal: () => void;
+}
+
+const FreePlanRow: React.FC = ({ closeModal, storage }) => {
+ return (
+
+
+ {t("FREE_PLAN_OPTION_LABEL")}
+
+ {t("free_plan_description", {
+ storage: formattedStorageByteSize(storage),
+ })}
+
+
+
+
+
+
+ );
+};
+
+const FreePlanRow_ = styled(SpaceBetweenFlex)(({ theme }) => ({
+ gap: theme.spacing(1.5),
+ padding: theme.spacing(1.5, 1),
+ cursor: "pointer",
+ "&:hover .endIcon": {
+ backgroundColor: "rgba(255,255,255,0.08)",
+ },
+}));
diff --git a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/planRow.tsx b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/planRow.tsx
index 6363caee4d2504221cd2126eb96436aa68e81192..9f1351b120f34bd7c34b0a6bb1f0cbbacc21d8a1 100644
--- a/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/planRow.tsx
+++ b/web/apps/photos/src/components/pages/gallery/PlanSelector/plans/planRow.tsx
@@ -6,11 +6,8 @@ import { Badge } from "components/Badge";
import { PLAN_PERIOD } from "constants/gallery";
import { t } from "i18next";
import { Plan, Subscription } from "types/billing";
-import {
- convertBytesToGBs,
- hasPaidSubscription,
- isUserSubscribedPlan,
-} from "utils/billing";
+import { hasPaidSubscription, isUserSubscribedPlan } from "utils/billing";
+import { bytesInGB } from "utils/units";
interface Iprops {
plan: Plan;
@@ -66,11 +63,11 @@ export function PlanRow({
- {convertBytesToGBs(plan.storage)}
+ {bytesInGB(plan.storage)}
- {t("GB")}
+ {t("storage_unit.gb")}
{popular && !hasPaidSubscription(subscription) && (
{t("POPULAR")}
diff --git a/web/apps/photos/src/constants/mlConfig.ts b/web/apps/photos/src/constants/mlConfig.ts
deleted file mode 100644
index 929594e1c1210460faacd8b0a6407f98b015a6e0..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/constants/mlConfig.ts
+++ /dev/null
@@ -1,56 +0,0 @@
-import { JobConfig } from "types/common/job";
-import { MLSearchConfig, MLSyncConfig } from "types/machineLearning";
-
-export const DEFAULT_ML_SYNC_JOB_CONFIG: JobConfig = {
- intervalSec: 5,
- // TODO: finalize this after seeing effects on and from machine sleep
- maxItervalSec: 960,
- backoffMultiplier: 2,
-};
-
-export const DEFAULT_ML_SYNC_CONFIG: MLSyncConfig = {
- batchSize: 200,
- imageSource: "Original",
- faceDetection: {
- method: "YoloFace",
- },
- faceCrop: {
- enabled: true,
- method: "ArcFace",
- padding: 0.25,
- maxSize: 256,
- blobOptions: {
- type: "image/jpeg",
- quality: 0.8,
- },
- },
- faceAlignment: {
- method: "ArcFace",
- },
- blurDetection: {
- method: "Laplacian",
- threshold: 15,
- },
- faceEmbedding: {
- method: "MobileFaceNet",
- faceSize: 112,
- generateTsne: true,
- },
- faceClustering: {
- method: "Hdbscan",
- minClusterSize: 3,
- minSamples: 5,
- clusterSelectionEpsilon: 0.6,
- clusterSelectionMethod: "leaf",
- minInputSize: 50,
- // maxDistanceInsideCluster: 0.4,
- generateDebugInfo: true,
- },
- mlVersion: 3,
-};
-
-export const DEFAULT_ML_SEARCH_CONFIG: MLSearchConfig = {
- enabled: false,
-};
-
-export const MAX_ML_SYNC_ERROR_COUNT = 1;
diff --git a/web/apps/photos/src/pages/_app.tsx b/web/apps/photos/src/pages/_app.tsx
index 0e80d0df9f03b6044846813381a2edcb8e961417..77e724d292e39f8ab410e10e043b430e745d7b50 100644
--- a/web/apps/photos/src/pages/_app.tsx
+++ b/web/apps/photos/src/pages/_app.tsx
@@ -53,6 +53,10 @@ import { createContext, useEffect, useRef, useState } from "react";
import LoadingBar from "react-top-loading-bar";
import DownloadManager from "services/download";
import exportService, { resumeExportsIfNeeded } from "services/export";
+import {
+ getMLSearchConfig,
+ updateMLSearchConfig,
+} from "services/machineLearning/machineLearningService";
import mlWorkManager from "services/machineLearning/mlWorkManager";
import {
getFamilyPortalRedirectURL,
@@ -64,10 +68,6 @@ import {
NotificationAttributes,
SetNotificationAttributes,
} from "types/Notification";
-import {
- getMLSearchConfig,
- updateMLSearchConfig,
-} from "utils/machineLearning/config";
import {
getUpdateAvailableForDownloadMessage,
getUpdateReadyToInstallMessage,
diff --git a/web/apps/photos/src/pages/gallery/index.tsx b/web/apps/photos/src/pages/gallery/index.tsx
index 20d95ce00f57f22aec61cc41ce3e03c050dd16a5..f870dfb76876518d1a3e2993b8fa2faa443e7de6 100644
--- a/web/apps/photos/src/pages/gallery/index.tsx
+++ b/web/apps/photos/src/pages/gallery/index.tsx
@@ -1,82 +1,36 @@
-import {
- SESSION_KEYS,
- clearKeys,
- getKey,
-} from "@ente/shared/storage/sessionStorage";
-import { Typography, styled } from "@mui/material";
-import { t } from "i18next";
-import { useRouter } from "next/router";
-import {
- createContext,
- useContext,
- useEffect,
- useMemo,
- useRef,
- useState,
-} from "react";
-import {
- constructEmailList,
- createAlbum,
- getAllLatestCollections,
- getAllLocalCollections,
- getCollectionSummaries,
- getFavItemIds,
- getHiddenItemsSummary,
- getSectionSummaries,
-} from "services/collectionService";
-import { getLocalFiles, syncFiles } from "services/fileService";
-
-import { checkSubscriptionPurchase } from "utils/billing";
-
-import EnteSpinner from "@ente/shared/components/EnteSpinner";
-import {
- isFirstLogin,
- justSignedUp,
- setIsFirstLogin,
- setJustSignedUp,
-} from "@ente/shared/storage/localStorage/helpers";
-import CollectionSelector, {
- CollectionSelectorAttributes,
-} from "components/Collections/CollectionSelector";
-import FullScreenDropZone from "components/FullScreenDropZone";
-import { LoadingOverlay } from "components/LoadingOverlay";
-import PhotoFrame from "components/PhotoFrame";
-import Sidebar from "components/Sidebar";
-import SelectedFileOptions from "components/pages/gallery/SelectedFileOptions";
-import { useDropzone } from "react-dropzone";
-import {
- isTokenValid,
- syncMapEnabled,
- validateKey,
-} from "services/userService";
-import { preloadImage } from "utils/common";
-import {
- FILE_OPS_TYPE,
- constructFileToCollectionMap,
- getSelectedFiles,
- getUniqueFiles,
- handleFileOps,
- mergeMetadata,
- sortFiles,
-} from "utils/file";
-
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import { CenteredFlex } from "@ente/shared/components/Container";
+import EnteSpinner from "@ente/shared/components/EnteSpinner";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error";
-import useFileInput from "@ente/shared/hooks/useFileInput";
+import { useFileInput } from "@ente/shared/hooks/useFileInput";
import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
-import { getToken } from "@ente/shared/storage/localStorage/helpers";
+import {
+ getToken,
+ isFirstLogin,
+ justSignedUp,
+ setIsFirstLogin,
+ setJustSignedUp,
+} from "@ente/shared/storage/localStorage/helpers";
+import {
+ SESSION_KEYS,
+ clearKeys,
+ getKey,
+} from "@ente/shared/storage/sessionStorage";
import { User } from "@ente/shared/user/types";
import { isPromise } from "@ente/shared/utils";
+import { Typography, styled } from "@mui/material";
import AuthenticateUserModal from "components/AuthenticateUserModal";
import Collections from "components/Collections";
import CollectionNamer, {
CollectionNamerAttributes,
} from "components/Collections/CollectionNamer";
+import CollectionSelector, {
+ CollectionSelectorAttributes,
+} from "components/Collections/CollectionSelector";
import ExportModal from "components/ExportModal";
import {
FilesDownloadProgress,
@@ -85,31 +39,65 @@ import {
import FixCreationTime, {
FixCreationTimeAttributes,
} from "components/FixCreationTime";
+import FullScreenDropZone from "components/FullScreenDropZone";
import GalleryEmptyState from "components/GalleryEmptyState";
+import { LoadingOverlay } from "components/LoadingOverlay";
+import PhotoFrame from "components/PhotoFrame";
import { ITEM_TYPE, TimeStampListItem } from "components/PhotoList";
import SearchResultInfo from "components/Search/SearchResultInfo";
+import Sidebar from "components/Sidebar";
import Uploader from "components/Upload/Uploader";
-import UploadInputs from "components/UploadSelectorInputs";
+import { UploadSelectorInputs } from "components/UploadSelectorInputs";
import { GalleryNavbar } from "components/pages/gallery/Navbar";
import PlanSelector from "components/pages/gallery/PlanSelector";
+import SelectedFileOptions from "components/pages/gallery/SelectedFileOptions";
import {
ALL_SECTION,
ARCHIVE_SECTION,
CollectionSummaryType,
- DUMMY_UNCATEGORIZED_COLLECTION,
HIDDEN_ITEMS_SECTION,
TRASH_SECTION,
} from "constants/collection";
import { SYNC_INTERVAL_IN_MICROSECONDS } from "constants/gallery";
+import { t } from "i18next";
+import { useRouter } from "next/router";
import { AppContext } from "pages/_app";
+import {
+ createContext,
+ useContext,
+ useEffect,
+ useMemo,
+ useRef,
+ useState,
+} from "react";
+import { useDropzone } from "react-dropzone";
import { clipService } from "services/clip-service";
-import { constructUserIDToEmailMap } from "services/collectionService";
+import {
+ constructEmailList,
+ constructUserIDToEmailMap,
+ createAlbum,
+ getAllLatestCollections,
+ getAllLocalCollections,
+ getCollectionSummaries,
+ getFavItemIds,
+ getHiddenItemsSummary,
+ getSectionSummaries,
+} from "services/collectionService";
import downloadManager from "services/download";
-import { syncEmbeddings, syncFileEmbeddings } from "services/embeddingService";
+import {
+ syncCLIPEmbeddings,
+ syncFaceEmbeddings,
+} from "services/embeddingService";
import { syncEntities } from "services/entityService";
+import { getLocalFiles, syncFiles } from "services/fileService";
import locationSearchService from "services/locationSearchService";
import { getLocalTrashedFiles, syncTrash } from "services/trashService";
import uploadManager from "services/upload/uploadManager";
+import {
+ isTokenValid,
+ syncMapEnabled,
+ validateKey,
+} from "services/userService";
import { Collection, CollectionSummaries } from "types/collection";
import { EnteFile } from "types/file";
import {
@@ -121,6 +109,7 @@ import {
} from "types/gallery";
import { Search, SearchResultSummary, UpdateSearch } from "types/search";
import { FamilyData } from "types/user";
+import { checkSubscriptionPurchase } from "utils/billing";
import {
COLLECTION_OPS_TYPE,
constructCollectionNameMap,
@@ -132,8 +121,19 @@ import {
splitNormalAndHiddenCollections,
} from "utils/collection";
import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker";
+import { preloadImage } from "utils/common";
+import {
+ FILE_OPS_TYPE,
+ constructFileToCollectionMap,
+ getSelectedFiles,
+ getUniqueFiles,
+ handleFileOps,
+ mergeMetadata,
+ sortFiles,
+} from "utils/file";
import { isArchivedFile } from "utils/magicMetadata";
import { getSessionExpiredMessage } from "utils/ui";
+import { isInternalUserForML } from "utils/user";
import { getLocalFamilyData } from "utils/user/family";
export const DeadCenter = styled("div")`
@@ -202,8 +202,11 @@ export default function Gallery() {
const [isPhotoSwipeOpen, setIsPhotoSwipeOpen] = useState(false);
const {
+ // A function to call to get the props we should apply to the container,
getRootProps: getDragAndDropRootProps,
+ // ... the props we should apply to the element,
getInputProps: getDragAndDropInputProps,
+ // ... and the files that we got.
acceptedFiles: dragAndDropFiles,
} = useDropzone({
noClick: true,
@@ -211,23 +214,23 @@ export default function Gallery() {
disabled: shouldDisableDropzone,
});
const {
- selectedFiles: fileSelectorFiles,
- open: openFileSelector,
getInputProps: getFileSelectorInputProps,
+ openSelector: openFileSelector,
+ selectedFiles: fileSelectorFiles,
} = useFileInput({
directory: false,
});
const {
- selectedFiles: folderSelectorFiles,
- open: openFolderSelector,
getInputProps: getFolderSelectorInputProps,
+ openSelector: openFolderSelector,
+ selectedFiles: folderSelectorFiles,
} = useFileInput({
directory: true,
});
const {
- selectedFiles: fileSelectorZipFiles,
- open: openZipFileSelector,
getInputProps: getZipFileSelectorInputProps,
+ openSelector: openZipFileSelector,
+ selectedFiles: fileSelectorZipFiles,
} = useFileInput({
directory: false,
accept: ".zip",
@@ -446,18 +449,8 @@ export default function Gallery() {
}
let collectionURL = "";
if (activeCollectionID !== ALL_SECTION) {
- collectionURL += "?collection=";
- if (activeCollectionID === ARCHIVE_SECTION) {
- collectionURL += t("ARCHIVE_SECTION_NAME");
- } else if (activeCollectionID === TRASH_SECTION) {
- collectionURL += t("TRASH");
- } else if (activeCollectionID === DUMMY_UNCATEGORIZED_COLLECTION) {
- collectionURL += t("UNCATEGORIZED");
- } else if (activeCollectionID === HIDDEN_ITEMS_SECTION) {
- collectionURL += t("HIDDEN_ITEMS_SECTION_NAME");
- } else {
- collectionURL += activeCollectionID;
- }
+ // TODO: Is this URL param even used?
+ collectionURL = `?collection=${activeCollectionID}`;
}
const href = `/gallery${collectionURL}`;
router.push(href, undefined, { shallow: true });
@@ -709,10 +702,10 @@ export default function Gallery() {
await syncTrash(collections, setTrashedFiles);
await syncEntities();
await syncMapEnabled();
- await syncEmbeddings();
+ await syncCLIPEmbeddings();
const electron = globalThis.electron;
- if (electron) {
- await syncFileEmbeddings();
+ if (isInternalUserForML() && electron) {
+ await syncFaceEmbeddings();
}
if (clipService.isPlatformSupported()) {
void clipService.scheduleImageEmbeddingExtraction();
@@ -1024,14 +1017,14 @@ export default function Gallery() {
setSelectedFiles: setSelected,
}}
>
-
-
+
{blockingLoad && (
diff --git a/web/apps/photos/src/pages/shared-albums/index.tsx b/web/apps/photos/src/pages/shared-albums/index.tsx
index ee6284d4a2fd1c4f13f52ba7a3dbe77002e859b8..ab35b23facf1e8ebc58329ceaab7c24a754105e5 100644
--- a/web/apps/photos/src/pages/shared-albums/index.tsx
+++ b/web/apps/photos/src/pages/shared-albums/index.tsx
@@ -1,40 +1,11 @@
import log from "@/next/log";
+import { logoutUser } from "@ente/accounts/services/user";
+import { APPS } from "@ente/shared/apps/constants";
import {
CenteredFlex,
SpaceBetweenFlex,
VerticallyCentered,
} from "@ente/shared/components/Container";
-import { CustomError, parseSharingErrorCodes } from "@ente/shared/error";
-import PhotoFrame from "components/PhotoFrame";
-import { ALL_SECTION } from "constants/collection";
-import { t } from "i18next";
-import { AppContext } from "pages/_app";
-import { useContext, useEffect, useMemo, useRef, useState } from "react";
-import {
- getLocalPublicCollection,
- getLocalPublicCollectionPassword,
- getLocalPublicFiles,
- getPublicCollection,
- getPublicCollectionUID,
- getReferralCode,
- removePublicCollectionWithFiles,
- removePublicFiles,
- savePublicCollectionPassword,
- syncPublicFiles,
- verifyPublicCollectionPassword,
-} from "services/publicCollectionService";
-import { Collection } from "types/collection";
-import { EnteFile } from "types/file";
-import {
- downloadSelectedFiles,
- getSelectedFiles,
- mergeMetadata,
- sortFiles,
-} from "utils/file";
-import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
-
-import { logoutUser } from "@ente/accounts/services/user";
-import { APPS } from "@ente/shared/apps/constants";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import FormPaper from "@ente/shared/components/Form/FormPaper";
import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title";
@@ -46,7 +17,8 @@ import SingleInputForm, {
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { ENTE_WEBSITE_LINK } from "@ente/shared/constants/urls";
import ComlinkCryptoWorker from "@ente/shared/crypto";
-import useFileInput from "@ente/shared/hooks/useFileInput";
+import { CustomError, parseSharingErrorCodes } from "@ente/shared/error";
+import { useFileInput } from "@ente/shared/hooks/useFileInput";
import AddPhotoAlternateOutlined from "@mui/icons-material/AddPhotoAlternateOutlined";
import FileDownloadOutlinedIcon from "@mui/icons-material/FileDownloadOutlined";
import MoreHoriz from "@mui/icons-material/MoreHoriz";
@@ -60,15 +32,35 @@ import {
} from "components/FilesDownloadProgress";
import FullScreenDropZone from "components/FullScreenDropZone";
import { LoadingOverlay } from "components/LoadingOverlay";
+import PhotoFrame from "components/PhotoFrame";
import { ITEM_TYPE, TimeStampListItem } from "components/PhotoList";
import UploadButton from "components/Upload/UploadButton";
import Uploader from "components/Upload/Uploader";
-import UploadSelectorInputs from "components/UploadSelectorInputs";
+import { UploadSelectorInputs } from "components/UploadSelectorInputs";
import SharedAlbumNavbar from "components/pages/sharedAlbum/Navbar";
import SelectedFileOptions from "components/pages/sharedAlbum/SelectedFileOptions";
+import { ALL_SECTION } from "constants/collection";
+import { t } from "i18next";
import { useRouter } from "next/router";
+import { AppContext } from "pages/_app";
+import { useContext, useEffect, useMemo, useRef, useState } from "react";
import { useDropzone } from "react-dropzone";
import downloadManager from "services/download";
+import {
+ getLocalPublicCollection,
+ getLocalPublicCollectionPassword,
+ getLocalPublicFiles,
+ getPublicCollection,
+ getPublicCollectionUID,
+ getReferralCode,
+ removePublicCollectionWithFiles,
+ removePublicFiles,
+ savePublicCollectionPassword,
+ syncPublicFiles,
+ verifyPublicCollectionPassword,
+} from "services/publicCollectionService";
+import { Collection } from "types/collection";
+import { EnteFile } from "types/file";
import {
SelectedState,
SetFilesDownloadProgressAttributes,
@@ -76,6 +68,13 @@ import {
UploadTypeSelectorIntent,
} from "types/gallery";
import { downloadCollectionFiles, isHiddenCollection } from "utils/collection";
+import {
+ downloadSelectedFiles,
+ getSelectedFiles,
+ mergeMetadata,
+ sortFiles,
+} from "utils/file";
+import { PublicCollectionGalleryContext } from "utils/publicCollectionGallery";
export default function PublicCollectionGallery() {
const token = useRef(null);
@@ -118,16 +117,16 @@ export default function PublicCollectionGallery() {
disabled: shouldDisableDropzone,
});
const {
- selectedFiles: fileSelectorFiles,
- open: openFileSelector,
getInputProps: getFileSelectorInputProps,
+ openSelector: openFileSelector,
+ selectedFiles: fileSelectorFiles,
} = useFileInput({
directory: false,
});
const {
- selectedFiles: folderSelectorFiles,
- open: openFolderSelector,
getInputProps: getFolderSelectorInputProps,
+ openSelector: openFolderSelector,
+ selectedFiles: folderSelectorFiles,
} = useFileInput({
directory: true,
});
@@ -543,14 +542,13 @@ export default function PublicCollectionGallery() {
photoListFooter,
}}
>
-
+
{
+ public async getPlans(): Promise {
const token = getToken();
try {
let response;
@@ -37,8 +47,7 @@ class billingService {
},
);
}
- const { plans } = response.data;
- return plans;
+ return response.data;
} catch (e) {
log.error("failed to get plans", e);
}
diff --git a/web/apps/photos/src/services/clip-service.ts b/web/apps/photos/src/services/clip-service.ts
index 703c89cf4bc78ea43551249bde2a8dc4d6e09fc5..aa724b4d5828183f77fae4ba527011c0d714a2c9 100644
--- a/web/apps/photos/src/services/clip-service.ts
+++ b/web/apps/photos/src/services/clip-service.ts
@@ -11,7 +11,7 @@ import { Embedding } from "types/embedding";
import { EnteFile } from "types/file";
import { getPersonalFiles } from "utils/file";
import downloadManager from "./download";
-import { getLocalEmbeddings, putEmbedding } from "./embeddingService";
+import { localCLIPEmbeddings, putEmbedding } from "./embeddingService";
import { getAllLocalFiles, getLocalFiles } from "./fileService";
/** Status of CLIP indexing on the images in the user's local library. */
@@ -195,7 +195,7 @@ class CLIPService {
return;
}
const localFiles = getPersonalFiles(await getAllLocalFiles(), user);
- const existingEmbeddings = await getLocalEmbeddings();
+ const existingEmbeddings = await localCLIPEmbeddings();
const pendingFiles = await getNonClipEmbeddingExtractedFiles(
localFiles,
existingEmbeddings,
@@ -394,7 +394,7 @@ export const computeClipMatchScore = async (
const initialIndexingStatus = async (): Promise => {
const user = getData(LS_KEYS.USER);
if (!user) throw new Error("Orphan CLIP indexing without a login");
- const allEmbeddings = await getLocalEmbeddings();
+ const allEmbeddings = await localCLIPEmbeddings();
const localFiles = getPersonalFiles(await getLocalFiles(), user);
const pendingFiles = await getNonClipEmbeddingExtractedFiles(
localFiles,
diff --git a/web/apps/photos/src/services/download/index.ts b/web/apps/photos/src/services/download/index.ts
index 7b0171da11023cef4f8b2c0bc78bba7a586bd3fb..a148f2bcfdb6c6f3ca1ebe74b82453dc5129dd2b 100644
--- a/web/apps/photos/src/services/download/index.ts
+++ b/web/apps/photos/src/services/download/index.ts
@@ -31,6 +31,16 @@ export type SourceURLs = {
isOriginal: boolean;
isRenderable: boolean;
type: "normal" | "livePhoto";
+ /**
+ * Best effort attempt at obtaining the MIME type.
+ *
+ * Known cases where it is missing:
+ *
+ * - Live photos (these have a different code path for obtaining the URL).
+ * - A video that is passes the isPlayable test in the browser.
+ *
+ */
+ mimeType?: string;
};
export type OnDownloadProgress = (event: {
@@ -304,7 +314,12 @@ class DownloadManagerImpl {
if (cachedBlob) res = new Response(cachedBlob);
else {
res = await this.downloadClient.downloadFileStream(file);
- this.fileCache?.put(cacheKey, await res.blob());
+ // We don't have a files cache currently, so this was already a
+ // no-op. But even if we had a cache, this seems sus, because
+ // res.blob() will read the stream and I'd think then trying to do
+ // the subsequent read of the stream again below won't work.
+
+ // this.fileCache?.put(cacheKey, await res.blob());
}
const reader = res.body.getReader();
@@ -323,92 +338,61 @@ class DownloadManagerImpl {
decryptionHeader,
fileKey,
);
+
let data = new Uint8Array();
- // The following function handles each data chunk
- const push = () => {
+ let more = true;
+ while (more) {
+ more = false;
+
// "done" is a Boolean and value a "Uint8Array"
- reader.read().then(async ({ done, value }) => {
- try {
- // Is there more data to read?
- if (!done) {
- downloadedBytes += value.byteLength;
- onDownloadProgress({
- loaded: downloadedBytes,
- total: contentLength,
- });
- const buffer = new Uint8Array(
- data.byteLength + value.byteLength,
+ const { done, value } = await reader.read();
+
+ // Is there more data to read?
+ if (!done) {
+ downloadedBytes += value.length;
+ onDownloadProgress({
+ loaded: downloadedBytes,
+ total: contentLength,
+ });
+
+ const buffer = new Uint8Array(
+ data.length + value.length,
+ );
+ buffer.set(new Uint8Array(data), 0);
+ buffer.set(new Uint8Array(value), data.length);
+
+ // Note that buffer.length might be a multiple of
+ // decryptionChunkSize. We let these accumulate, and
+ // drain it all with a nested while loop when done.
+
+ if (buffer.length > decryptionChunkSize) {
+ const { decryptedData } =
+ await this.cryptoWorker.decryptFileChunk(
+ buffer.slice(0, decryptionChunkSize),
+ pullState,
);
- buffer.set(new Uint8Array(data), 0);
- buffer.set(
- new Uint8Array(value),
- data.byteLength,
+ controller.enqueue(decryptedData);
+ data = buffer.slice(decryptionChunkSize);
+ } else {
+ data = buffer;
+ }
+ more = true;
+ } else {
+ while (data && data.length) {
+ const { decryptedData } =
+ await this.cryptoWorker.decryptFileChunk(
+ data.slice(0, decryptionChunkSize),
+ pullState,
);
- if (buffer.length > decryptionChunkSize) {
- const fileData = buffer.slice(
- 0,
- decryptionChunkSize,
- );
- try {
- const { decryptedData } =
- await this.cryptoWorker.decryptFileChunk(
- fileData,
- pullState,
- );
- controller.enqueue(decryptedData);
- data =
- buffer.slice(
- decryptionChunkSize,
- );
- } catch (e) {
- if (
- e.message ===
- CustomError.PROCESSING_FAILED
- ) {
- log.error(
- `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
- e,
- );
- }
- throw e;
- }
- } else {
- data = buffer;
- }
- push();
- } else {
- if (data) {
- try {
- const { decryptedData } =
- await this.cryptoWorker.decryptFileChunk(
- data,
- pullState,
- );
- controller.enqueue(decryptedData);
- data = null;
- } catch (e) {
- if (
- e.message ===
- CustomError.PROCESSING_FAILED
- ) {
- log.error(
- `Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
- e,
- );
- }
- throw e;
- }
- }
- controller.close();
- }
- } catch (e) {
- log.error("Failed to process file chunk", e);
- controller.error(e);
+ controller.enqueue(decryptedData);
+ data =
+ data.length > decryptionChunkSize
+ ? data.slice(decryptionChunkSize)
+ : undefined;
}
- });
- };
-
- push();
+ controller.close();
+ }
+ }
} catch (e) {
log.error("Failed to process file stream", e);
controller.error(e);
@@ -475,27 +459,37 @@ async function getRenderableFileURL(
originalFileURL: string,
forceConvert: boolean,
): Promise {
- let srcURLs: SourceURLs["url"];
+ const existingOrNewObjectURL = (convertedBlob: Blob) =>
+ convertedBlob
+ ? convertedBlob === fileBlob
+ ? originalFileURL
+ : URL.createObjectURL(convertedBlob)
+ : undefined;
+
+ let url: SourceURLs["url"];
+ let isOriginal: boolean;
+ let isRenderable: boolean;
+ let type: SourceURLs["type"] = "normal";
+ let mimeType: string | undefined;
+
switch (file.metadata.fileType) {
case FILE_TYPE.IMAGE: {
const convertedBlob = await getRenderableImage(
file.metadata.title,
fileBlob,
);
- const convertedURL = getFileObjectURL(
- originalFileURL,
- fileBlob,
- convertedBlob,
- );
- srcURLs = convertedURL;
+ const convertedURL = existingOrNewObjectURL(convertedBlob);
+ url = convertedURL;
+ isOriginal = convertedURL === originalFileURL;
+ isRenderable = !!convertedURL;
+ mimeType = convertedBlob?.type;
break;
}
case FILE_TYPE.LIVE_PHOTO: {
- srcURLs = await getRenderableLivePhotoURL(
- file,
- fileBlob,
- forceConvert,
- );
+ url = await getRenderableLivePhotoURL(file, fileBlob, forceConvert);
+ isOriginal = false;
+ isRenderable = false;
+ type = "livePhoto";
break;
}
case FILE_TYPE.VIDEO: {
@@ -504,52 +498,24 @@ async function getRenderableFileURL(
fileBlob,
forceConvert,
);
- const convertedURL = getFileObjectURL(
- originalFileURL,
- fileBlob,
- convertedBlob,
- );
- srcURLs = convertedURL;
+ const convertedURL = existingOrNewObjectURL(convertedBlob);
+ url = convertedURL;
+ isOriginal = convertedURL === originalFileURL;
+ isRenderable = !!convertedURL;
+ mimeType = convertedBlob?.type;
break;
}
default: {
- srcURLs = originalFileURL;
+ url = originalFileURL;
+ isOriginal = true;
+ isRenderable = false;
break;
}
}
- let isOriginal: boolean;
- if (file.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
- isOriginal = false;
- } else {
- isOriginal = (srcURLs as string) === (originalFileURL as string);
- }
-
- return {
- url: srcURLs,
- isOriginal,
- isRenderable:
- file.metadata.fileType !== FILE_TYPE.LIVE_PHOTO && !!srcURLs,
- type:
- file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
- ? "livePhoto"
- : "normal",
- };
+ return { url, isOriginal, isRenderable, type, mimeType };
}
-const getFileObjectURL = (
- originalFileURL: string,
- originalBlob: Blob,
- convertedBlob: Blob,
-) => {
- const convertedURL = convertedBlob
- ? convertedBlob === originalBlob
- ? originalFileURL
- : URL.createObjectURL(convertedBlob)
- : null;
- return convertedURL;
-};
-
async function getRenderableLivePhotoURL(
file: EnteFile,
fileBlob: Blob,
@@ -610,10 +576,9 @@ async function getPlayableVideo(
if (!forceConvert && !runOnWeb && !isElectron()) {
return null;
}
- // TODO(MR): This might not work for very large (~ GB) videos. Test.
log.info(`Converting video ${videoNameTitle} to mp4`);
const convertedVideoData = await ffmpeg.convertToMP4(videoBlob);
- return new Blob([convertedVideoData]);
+ return new Blob([convertedVideoData], { type: "video/mp4" });
}
} catch (e) {
log.error("Video conversion failed", e);
diff --git a/web/apps/photos/src/services/embeddingService.ts b/web/apps/photos/src/services/embeddingService.ts
index 36af848424a9671e1dce266ce5d32c445fbfefee..a60ae2a61b14a8828f607939d49b1ccbbd0b921c 100644
--- a/web/apps/photos/src/services/embeddingService.ts
+++ b/web/apps/photos/src/services/embeddingService.ts
@@ -7,6 +7,7 @@ import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
+import { FileML } from "services/machineLearning/machineLearningService";
import type {
Embedding,
EmbeddingModel,
@@ -15,31 +16,30 @@ import type {
PutEmbeddingRequest,
} from "types/embedding";
import { EnteFile } from "types/file";
-import {
- getLatestVersionEmbeddings,
- getLatestVersionFileEmbeddings,
-} from "utils/embedding";
-import { FileML } from "utils/machineLearning/mldataMappers";
import { getLocalCollections } from "./collectionService";
import { getAllLocalFiles } from "./fileService";
import { getLocalTrashedFiles } from "./trashService";
-const ENDPOINT = getEndpoint();
-
const DIFF_LIMIT = 500;
-const EMBEDDINGS_TABLE_V1 = "embeddings";
-const EMBEDDINGS_TABLE = "embeddings_v2";
+/** Local storage key suffix for embedding sync times */
+const embeddingSyncTimeLSKeySuffix = "embedding_sync_time";
+/** Local storage key for CLIP embeddings. */
+const clipEmbeddingsLSKey = "embeddings_v2";
const FILE_EMBEDING_TABLE = "file_embeddings";
-const EMBEDDING_SYNC_TIME_TABLE = "embedding_sync_time";
-export const getAllLocalEmbeddings = async () => {
+/** Return all CLIP embeddings that we have available locally. */
+export const localCLIPEmbeddings = async () =>
+ (await storedCLIPEmbeddings()).filter(({ model }) => model === "onnx-clip");
+
+const storedCLIPEmbeddings = async () => {
const embeddings: Array =
- await localForage.getItem(EMBEDDINGS_TABLE);
+ await localForage.getItem(clipEmbeddingsLSKey);
if (!embeddings) {
- await localForage.removeItem(EMBEDDINGS_TABLE_V1);
- await localForage.removeItem(EMBEDDING_SYNC_TIME_TABLE);
- await localForage.setItem(EMBEDDINGS_TABLE, []);
+ // Migrate
+ await localForage.removeItem("embeddings");
+ await localForage.removeItem("embedding_sync_time");
+ await localForage.setItem(clipEmbeddingsLSKey, []);
return [];
}
return embeddings;
@@ -54,15 +54,10 @@ export const getFileMLEmbeddings = async (): Promise => {
return embeddings;
};
-export const getLocalEmbeddings = async () => {
- const embeddings = await getAllLocalEmbeddings();
- return embeddings.filter((embedding) => embedding.model === "onnx-clip");
-};
-
const getModelEmbeddingSyncTime = async (model: EmbeddingModel) => {
return (
(await localForage.getItem(
- `${model}-${EMBEDDING_SYNC_TIME_TABLE}`,
+ `${model}-${embeddingSyncTimeLSKeySuffix}`,
)) ?? 0
);
};
@@ -71,13 +66,17 @@ const setModelEmbeddingSyncTime = async (
model: EmbeddingModel,
time: number,
) => {
- await localForage.setItem(`${model}-${EMBEDDING_SYNC_TIME_TABLE}`, time);
+ await localForage.setItem(`${model}-${embeddingSyncTimeLSKeySuffix}`, time);
};
-export const syncEmbeddings = async () => {
- const models: EmbeddingModel[] = ["onnx-clip"];
+/**
+ * Fetch new CLIP embeddings with the server and save them locally. Also prune
+ * local embeddings for any files no longer exist locally.
+ */
+export const syncCLIPEmbeddings = async () => {
+ const model: EmbeddingModel = "onnx-clip";
try {
- let allEmbeddings = await getAllLocalEmbeddings();
+ let allEmbeddings = await storedCLIPEmbeddings();
const localFiles = await getAllLocalFiles();
const hiddenAlbums = await getLocalCollections("hidden");
const localTrashFiles = await getLocalTrashedFiles();
@@ -89,79 +88,75 @@ export const syncEmbeddings = async () => {
await cleanupDeletedEmbeddings(
allLocalFiles,
allEmbeddings,
- EMBEDDINGS_TABLE,
+ clipEmbeddingsLSKey,
);
log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`);
- for (const model of models) {
- let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
- log.info(
- `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`,
- );
- let response: GetEmbeddingDiffResponse;
- do {
- response = await getEmbeddingsDiff(modelLastSinceTime, model);
- if (!response.diff?.length) {
- return;
- }
- const newEmbeddings = await Promise.all(
- response.diff.map(async (embedding) => {
- try {
- const {
- encryptedEmbedding,
- decryptionHeader,
- ...rest
- } = embedding;
- const worker =
- await ComlinkCryptoWorker.getInstance();
- const fileKey = fileIdToKeyMap.get(
- embedding.fileID,
- );
- if (!fileKey) {
- throw Error(CustomError.FILE_NOT_FOUND);
- }
- const decryptedData = await worker.decryptEmbedding(
- encryptedEmbedding,
- decryptionHeader,
- fileIdToKeyMap.get(embedding.fileID),
- );
- return {
- ...rest,
- embedding: decryptedData,
- } as Embedding;
- } catch (e) {
- let hasHiddenAlbums = false;
- if (e.message === CustomError.FILE_NOT_FOUND) {
- hasHiddenAlbums = hiddenAlbums?.length > 0;
- }
- log.error(
- `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`,
- e,
- );
+ let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
+ log.info(
+ `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`,
+ );
+ let response: GetEmbeddingDiffResponse;
+ do {
+ response = await getEmbeddingsDiff(modelLastSinceTime, model);
+ if (!response.diff?.length) {
+ return;
+ }
+ const newEmbeddings = await Promise.all(
+ response.diff.map(async (embedding) => {
+ try {
+ const {
+ encryptedEmbedding,
+ decryptionHeader,
+ ...rest
+ } = embedding;
+ const worker = await ComlinkCryptoWorker.getInstance();
+ const fileKey = fileIdToKeyMap.get(embedding.fileID);
+ if (!fileKey) {
+ throw Error(CustomError.FILE_NOT_FOUND);
}
- }),
- );
- allEmbeddings = getLatestVersionEmbeddings([
- ...allEmbeddings,
- ...newEmbeddings,
- ]);
- if (response.diff.length) {
- modelLastSinceTime = response.diff.slice(-1)[0].updatedAt;
- }
- await localForage.setItem(EMBEDDINGS_TABLE, allEmbeddings);
- await setModelEmbeddingSyncTime(model, modelLastSinceTime);
- log.info(
- `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`,
- );
- } while (response.diff.length === DIFF_LIMIT);
- }
+ const decryptedData = await worker.decryptEmbedding(
+ encryptedEmbedding,
+ decryptionHeader,
+ fileIdToKeyMap.get(embedding.fileID),
+ );
+
+ return {
+ ...rest,
+ embedding: decryptedData,
+ } as Embedding;
+ } catch (e) {
+ let hasHiddenAlbums = false;
+ if (e.message === CustomError.FILE_NOT_FOUND) {
+ hasHiddenAlbums = hiddenAlbums?.length > 0;
+ }
+ log.error(
+ `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`,
+ e,
+ );
+ }
+ }),
+ );
+ allEmbeddings = getLatestVersionEmbeddings([
+ ...allEmbeddings,
+ ...newEmbeddings,
+ ]);
+ if (response.diff.length) {
+ modelLastSinceTime = response.diff.slice(-1)[0].updatedAt;
+ }
+ await localForage.setItem(clipEmbeddingsLSKey, allEmbeddings);
+ await setModelEmbeddingSyncTime(model, modelLastSinceTime);
+ log.info(
+ `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`,
+ );
+ } while (response.diff.length === DIFF_LIMIT);
} catch (e) {
log.error("Sync embeddings failed", e);
}
};
-export const syncFileEmbeddings = async () => {
- const models: EmbeddingModel[] = ["file-ml-clip-face"];
+export const syncFaceEmbeddings = async () => {
+ const model: EmbeddingModel = "file-ml-clip-face";
try {
let allEmbeddings: FileML[] = await getFileMLEmbeddings();
const localFiles = await getAllLocalFiles();
@@ -178,69 +173,99 @@ export const syncFileEmbeddings = async () => {
FILE_EMBEDING_TABLE,
);
log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`);
- for (const model of models) {
- let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
- log.info(
- `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`,
- );
- let response: GetEmbeddingDiffResponse;
- do {
- response = await getEmbeddingsDiff(modelLastSinceTime, model);
- if (!response.diff?.length) {
- return;
- }
- const newEmbeddings = await Promise.all(
- response.diff.map(async (embedding) => {
- try {
- const worker =
- await ComlinkCryptoWorker.getInstance();
- const fileKey = fileIdToKeyMap.get(
- embedding.fileID,
- );
- if (!fileKey) {
- throw Error(CustomError.FILE_NOT_FOUND);
- }
- const decryptedData = await worker.decryptMetadata(
- embedding.encryptedEmbedding,
- embedding.decryptionHeader,
- fileIdToKeyMap.get(embedding.fileID),
- );
- return {
- ...decryptedData,
- updatedAt: embedding.updatedAt,
- } as unknown as FileML;
- } catch (e) {
- let hasHiddenAlbums = false;
- if (e.message === CustomError.FILE_NOT_FOUND) {
- hasHiddenAlbums = hiddenAlbums?.length > 0;
- }
- log.error(
- `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`,
- e,
- );
+ let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
+ log.info(
+ `Syncing ${model} model's embeddings sinceTime: ${modelLastSinceTime}`,
+ );
+ let response: GetEmbeddingDiffResponse;
+ do {
+ response = await getEmbeddingsDiff(modelLastSinceTime, model);
+ if (!response.diff?.length) {
+ return;
+ }
+ const newEmbeddings = await Promise.all(
+ response.diff.map(async (embedding) => {
+ try {
+ const worker = await ComlinkCryptoWorker.getInstance();
+ const fileKey = fileIdToKeyMap.get(embedding.fileID);
+ if (!fileKey) {
+ throw Error(CustomError.FILE_NOT_FOUND);
+ }
+ const decryptedData = await worker.decryptMetadata(
+ embedding.encryptedEmbedding,
+ embedding.decryptionHeader,
+ fileIdToKeyMap.get(embedding.fileID),
+ );
+
+ return {
+ ...decryptedData,
+ updatedAt: embedding.updatedAt,
+ } as unknown as FileML;
+ } catch (e) {
+ let hasHiddenAlbums = false;
+ if (e.message === CustomError.FILE_NOT_FOUND) {
+ hasHiddenAlbums = hiddenAlbums?.length > 0;
}
- }),
- );
- allEmbeddings = getLatestVersionFileEmbeddings([
- ...allEmbeddings,
- ...newEmbeddings,
- ]);
- if (response.diff.length) {
- modelLastSinceTime = response.diff.slice(-1)[0].updatedAt;
- }
- await localForage.setItem(FILE_EMBEDING_TABLE, allEmbeddings);
- await setModelEmbeddingSyncTime(model, modelLastSinceTime);
- log.info(
- `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`,
- );
- } while (response.diff.length === DIFF_LIMIT);
- }
+ log.error(
+ `decryptEmbedding failed for file (hasHiddenAlbums: ${hasHiddenAlbums})`,
+ e,
+ );
+ }
+ }),
+ );
+ allEmbeddings = getLatestVersionFileEmbeddings([
+ ...allEmbeddings,
+ ...newEmbeddings,
+ ]);
+ if (response.diff.length) {
+ modelLastSinceTime = response.diff.slice(-1)[0].updatedAt;
+ }
+ await localForage.setItem(FILE_EMBEDING_TABLE, allEmbeddings);
+ await setModelEmbeddingSyncTime(model, modelLastSinceTime);
+ log.info(
+ `Syncing embeddings syncedEmbeddingsCount: ${allEmbeddings.length}`,
+ );
+ } while (response.diff.length === DIFF_LIMIT);
} catch (e) {
log.error("Sync embeddings failed", e);
}
};
+const getLatestVersionEmbeddings = (embeddings: Embedding[]) => {
+ const latestVersionEntities = new Map();
+ embeddings.forEach((embedding) => {
+ if (!embedding?.fileID) {
+ return;
+ }
+ const existingEmbeddings = latestVersionEntities.get(embedding.fileID);
+ if (
+ !existingEmbeddings ||
+ existingEmbeddings.updatedAt < embedding.updatedAt
+ ) {
+ latestVersionEntities.set(embedding.fileID, embedding);
+ }
+ });
+ return Array.from(latestVersionEntities.values());
+};
+
+const getLatestVersionFileEmbeddings = (embeddings: FileML[]) => {
+ const latestVersionEntities = new Map();
+ embeddings.forEach((embedding) => {
+ if (!embedding?.fileID) {
+ return;
+ }
+ const existingEmbeddings = latestVersionEntities.get(embedding.fileID);
+ if (
+ !existingEmbeddings ||
+ existingEmbeddings.updatedAt < embedding.updatedAt
+ ) {
+ latestVersionEntities.set(embedding.fileID, embedding);
+ }
+ });
+ return Array.from(latestVersionEntities.values());
+};
+
export const getEmbeddingsDiff = async (
sinceTime: number,
model: EmbeddingModel,
@@ -251,7 +276,7 @@ export const getEmbeddingsDiff = async (
return;
}
const response = await HTTPService.get(
- `${ENDPOINT}/embeddings/diff`,
+ `${getEndpoint()}/embeddings/diff`,
{
sinceTime,
limit: DIFF_LIMIT,
@@ -280,7 +305,7 @@ export const putEmbedding = async (
throw Error(CustomError.TOKEN_MISSING);
}
const resp = await HTTPService.put(
- `${ENDPOINT}/embeddings`,
+ `${getEndpoint()}/embeddings`,
putEmbeddingReq,
null,
{
diff --git a/web/apps/photos/src/services/export/index.ts b/web/apps/photos/src/services/export/index.ts
index b02e05a4284d7e2fd9e0a58c4c988007703b89c4..3a68837e755dc4dce5923df20a4edc482769019d 100644
--- a/web/apps/photos/src/services/export/index.ts
+++ b/web/apps/photos/src/services/export/index.ts
@@ -3,12 +3,12 @@ import { decodeLivePhoto } from "@/media/live-photo";
import type { Metadata } from "@/media/types/file";
import { ensureElectron } from "@/next/electron";
import log from "@/next/log";
+import { wait } from "@/utils/promise";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
import { formatDateTimeShort } from "@ente/shared/time/format";
import { User } from "@ente/shared/user/types";
-import { wait } from "@ente/shared/utils";
import QueueProcessor, {
CancellationStatus,
RequestCanceller,
@@ -734,38 +734,31 @@ class ExportService {
const collectionExportName =
collectionIDExportNameMap.get(collectionID);
- await this.removeFileExportedRecord(exportDir, fileUID);
- try {
- if (isLivePhotoExportName(fileExportName)) {
- const { image, video } =
- parseLivePhotoExportName(fileExportName);
+ if (isLivePhotoExportName(fileExportName)) {
+ const { image, video } =
+ parseLivePhotoExportName(fileExportName);
- await moveToTrash(
- exportDir,
- collectionExportName,
- image,
- );
+ await moveToTrash(
+ exportDir,
+ collectionExportName,
+ image,
+ );
- await moveToTrash(
- exportDir,
- collectionExportName,
- video,
- );
- } else {
- await moveToTrash(
- exportDir,
- collectionExportName,
- fileExportName,
- );
- }
- } catch (e) {
- await this.addFileExportedRecord(
+ await moveToTrash(
exportDir,
- fileUID,
+ collectionExportName,
+ video,
+ );
+ } else {
+ await moveToTrash(
+ exportDir,
+ collectionExportName,
fileExportName,
);
- throw e;
}
+
+ await this.removeFileExportedRecord(exportDir, fileUID);
+
log.info(`Moved file id ${fileUID} to Trash`);
} catch (e) {
log.error("trashing failed for a file", e);
@@ -985,26 +978,21 @@ class ExportService {
file.metadata.title,
electron.fs.exists,
);
+ await this.saveMetadataFile(
+ collectionExportPath,
+ fileExportName,
+ file,
+ );
+ await writeStream(
+ electron,
+ `${collectionExportPath}/${fileExportName}`,
+ updatedFileStream,
+ );
await this.addFileExportedRecord(
exportDir,
fileUID,
fileExportName,
);
- try {
- await this.saveMetadataFile(
- collectionExportPath,
- fileExportName,
- file,
- );
- await writeStream(
- electron,
- `${collectionExportPath}/${fileExportName}`,
- updatedFileStream,
- );
- } catch (e) {
- await this.removeFileExportedRecord(exportDir, fileUID);
- throw e;
- }
}
} catch (e) {
log.error("download and save failed", e);
@@ -1032,52 +1020,46 @@ class ExportService {
livePhoto.videoFileName,
fs.exists,
);
+
const livePhotoExportName = getLivePhotoExportName(
imageExportName,
videoExportName,
);
- await this.addFileExportedRecord(
- exportDir,
- fileUID,
- livePhotoExportName,
+
+ const imageStream = generateStreamFromArrayBuffer(livePhoto.imageData);
+ await this.saveMetadataFile(
+ collectionExportPath,
+ imageExportName,
+ file,
+ );
+ await writeStream(
+ electron,
+ `${collectionExportPath}/${imageExportName}`,
+ imageStream,
+ );
+
+ const videoStream = generateStreamFromArrayBuffer(livePhoto.videoData);
+ await this.saveMetadataFile(
+ collectionExportPath,
+ videoExportName,
+ file,
);
try {
- const imageStream = generateStreamFromArrayBuffer(
- livePhoto.imageData,
- );
- await this.saveMetadataFile(
- collectionExportPath,
- imageExportName,
- file,
- );
await writeStream(
electron,
- `${collectionExportPath}/${imageExportName}`,
- imageStream,
+ `${collectionExportPath}/${videoExportName}`,
+ videoStream,
);
-
- const videoStream = generateStreamFromArrayBuffer(
- livePhoto.videoData,
- );
- await this.saveMetadataFile(
- collectionExportPath,
- videoExportName,
- file,
- );
- try {
- await writeStream(
- electron,
- `${collectionExportPath}/${videoExportName}`,
- videoStream,
- );
- } catch (e) {
- await fs.rm(`${collectionExportPath}/${imageExportName}`);
- throw e;
- }
} catch (e) {
- await this.removeFileExportedRecord(exportDir, fileUID);
+ await fs.rm(`${collectionExportPath}/${imageExportName}`);
throw e;
}
+
+ await this.addFileExportedRecord(
+ exportDir,
+ fileUID,
+ livePhotoExportName,
+ );
}
private async saveMetadataFile(
diff --git a/web/apps/photos/src/services/export/migration.ts b/web/apps/photos/src/services/export/migration.ts
index 9404ddde5b446d12ff7767f094649543bd5921f6..0c8de03e636ce3ac3169a6eb14f4e049729ce101 100644
--- a/web/apps/photos/src/services/export/migration.ts
+++ b/web/apps/photos/src/services/export/migration.ts
@@ -3,9 +3,9 @@ import { decodeLivePhoto } from "@/media/live-photo";
import { ensureElectron } from "@/next/electron";
import { nameAndExtension } from "@/next/file";
import log from "@/next/log";
+import { wait } from "@/utils/promise";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
-import { wait } from "@ente/shared/utils";
import { getLocalCollections } from "services/collectionService";
import downloadManager from "services/download";
import { getAllLocalFiles } from "services/fileService";
diff --git a/web/apps/photos/src/services/ffmpeg.ts b/web/apps/photos/src/services/ffmpeg.ts
index 4dfdb3f641f0a0cf7b603ef31339bab14f3f6588..85dd5db39754084b308648ecfc2d3ff3bdd59fba 100644
--- a/web/apps/photos/src/services/ffmpeg.ts
+++ b/web/apps/photos/src/services/ffmpeg.ts
@@ -9,6 +9,11 @@ import {
} from "constants/ffmpeg";
import { NULL_LOCATION } from "constants/upload";
import type { ParsedExtractedMetadata } from "types/metadata";
+import {
+ readConvertToMP4Done,
+ readConvertToMP4Stream,
+ writeConvertToMP4Stream,
+} from "utils/native-stream";
import type { DedicatedFFmpegWorker } from "worker/ffmpeg.worker";
import {
toDataOrPathOrZipEntry,
@@ -31,7 +36,7 @@ import {
*/
export const generateVideoThumbnailWeb = async (blob: Blob) =>
_generateVideoThumbnail((seekTime: number) =>
- ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg", 0),
+ ffmpegExecWeb(makeGenThumbnailCommand(seekTime), blob, "jpeg"),
);
const _generateVideoThumbnail = async (
@@ -70,7 +75,6 @@ export const generateVideoThumbnailNative = async (
makeGenThumbnailCommand(seekTime),
toDataOrPathOrZipEntry(desktopUploadItem),
"jpeg",
- 0,
),
);
@@ -98,8 +102,8 @@ const makeGenThumbnailCommand = (seekTime: number) => [
* of videos that the user is uploading.
*
* @param uploadItem A {@link File}, or the absolute path to a file on the
- * user's local filesytem. A path can only be provided when we're running in the
- * context of our desktop app.
+ * user's local file sytem. A path can only be provided when we're running in
+ * the context of our desktop app.
*/
export const extractVideoMetadata = async (
uploadItem: UploadItem,
@@ -107,12 +111,11 @@ export const extractVideoMetadata = async (
const command = extractVideoMetadataCommand;
const outputData =
uploadItem instanceof File
- ? await ffmpegExecWeb(command, uploadItem, "txt", 0)
+ ? await ffmpegExecWeb(command, uploadItem, "txt")
: await electron.ffmpegExec(
command,
toDataOrPathOrZipEntry(uploadItem),
"txt",
- 0,
);
return parseFFmpegExtractedMetadata(outputData);
@@ -219,10 +222,9 @@ const ffmpegExecWeb = async (
command: string[],
blob: Blob,
outputFileExtension: string,
- timeoutMs: number,
) => {
const worker = await workerFactory.lazy();
- return await worker.exec(command, blob, outputFileExtension, timeoutMs);
+ return await worker.exec(command, blob, outputFileExtension);
};
/**
@@ -234,61 +236,46 @@ const ffmpegExecWeb = async (
*
* @param blob The video blob.
*
- * @returns The mp4 video data.
+ * @returns The mp4 video blob.
*/
-export const convertToMP4 = async (blob: Blob) =>
- ffmpegExecNativeOrWeb(
- [
+export const convertToMP4 = async (blob: Blob): Promise => {
+ const electron = globalThis.electron;
+ if (electron) {
+ return convertToMP4Native(electron, blob);
+ } else {
+ const command = [
ffmpegPathPlaceholder,
"-i",
inputPathPlaceholder,
"-preset",
"ultrafast",
outputPathPlaceholder,
- ],
- blob,
- "mp4",
- 30 * 1000,
- );
+ ];
+ return ffmpegExecWeb(command, blob, "mp4");
+ }
+};
-/**
- * Run the given FFmpeg command using a native FFmpeg binary when we're running
- * in the context of our desktop app, otherwise using the browser based wasm
- * FFmpeg implemenation.
- *
- * See also: {@link ffmpegExecWeb}.
- */
-const ffmpegExecNativeOrWeb = async (
- command: string[],
- blob: Blob,
- outputFileExtension: string,
- timeoutMs: number,
-) => {
- const electron = globalThis.electron;
- if (electron)
- return electron.ffmpegExec(
- command,
- new Uint8Array(await blob.arrayBuffer()),
- outputFileExtension,
- timeoutMs,
- );
- else return ffmpegExecWeb(command, blob, outputFileExtension, timeoutMs);
+const convertToMP4Native = async (electron: Electron, blob: Blob) => {
+ const token = await writeConvertToMP4Stream(electron, blob);
+ const mp4Blob = await readConvertToMP4Stream(electron, token);
+ readConvertToMP4Done(electron, token);
+ return mp4Blob;
};
/** Lazily create a singleton instance of our worker */
class WorkerFactory {
private instance: Promise>;
+ private createComlinkWorker = () =>
+ new ComlinkWorker(
+ "ffmpeg-worker",
+ new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)),
+ );
+
async lazy() {
- if (!this.instance) this.instance = createComlinkWorker().remote;
+ if (!this.instance) this.instance = this.createComlinkWorker().remote;
return this.instance;
}
}
const workerFactory = new WorkerFactory();
-
-const createComlinkWorker = () =>
- new ComlinkWorker(
- "ffmpeg-worker",
- new Worker(new URL("worker/ffmpeg.worker.ts", import.meta.url)),
- );
diff --git a/web/apps/photos/src/services/heic-convert.ts b/web/apps/photos/src/services/heic-convert.ts
index c2ea198391dd2c91d4a05b795e728b5736efb408..2b37c31980eba70e9f3a15210e2d8aa6607ad9dc 100644
--- a/web/apps/photos/src/services/heic-convert.ts
+++ b/web/apps/photos/src/services/heic-convert.ts
@@ -1,9 +1,10 @@
+import { createHEICConvertComlinkWorker } from "@/media/worker/heic-convert";
+import type { DedicatedHEICConvertWorker } from "@/media/worker/heic-convert.worker";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error";
import { retryAsyncFunction } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
-import { type DedicatedHEICConvertWorker } from "worker/heic-convert.worker";
/**
* Convert a HEIC image to a JPEG.
@@ -29,7 +30,7 @@ class HEICConverter {
if (this.workerPool.length > 0) return;
this.workerPool = [];
for (let i = 0; i < WORKER_POOL_SIZE; i++)
- this.workerPool.push(createComlinkWorker());
+ this.workerPool.push(createHEICConvertComlinkWorker());
}
async convert(fileBlob: Blob): Promise {
@@ -79,7 +80,7 @@ class HEICConverter {
} catch (e) {
log.error("HEIC conversion failed", e);
convertWorker.terminate();
- this.workerPool.push(createComlinkWorker());
+ this.workerPool.push(createHEICConvertComlinkWorker());
throw e;
}
}, WAIT_TIME_BEFORE_NEXT_ATTEMPT_IN_MICROSECONDS),
@@ -99,9 +100,3 @@ class HEICConverter {
/** The singleton instance of {@link HEICConverter}. */
const converter = new HEICConverter();
-
-const createComlinkWorker = () =>
- new ComlinkWorker(
- "heic-convert-worker",
- new Worker(new URL("worker/heic-convert.worker.ts", import.meta.url)),
- );
diff --git a/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts b/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts
index 99063b3f250d26d126c64d1fe6e29e4896c89b67..f23a065c830b86a598f219cf8303698f7d94e754 100644
--- a/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts
+++ b/web/apps/photos/src/services/machineLearning/arcfaceAlignmentService.ts
@@ -1,11 +1,13 @@
+import { Matrix } from "ml-matrix";
+import { Point } from "services/ml/geom";
import {
FaceAlignment,
FaceAlignmentMethod,
FaceAlignmentService,
FaceDetection,
Versioned,
-} from "types/machineLearning";
-import { getArcfaceAlignment } from "utils/machineLearning/faceAlign";
+} from "services/ml/types";
+import { getSimilarityTransformation } from "similarity-transformation";
class ArcfaceAlignmentService implements FaceAlignmentService {
public method: Versioned;
@@ -23,3 +25,86 @@ class ArcfaceAlignmentService implements FaceAlignmentService {
}
export default new ArcfaceAlignmentService();
+
+const ARCFACE_LANDMARKS = [
+ [38.2946, 51.6963],
+ [73.5318, 51.5014],
+ [56.0252, 71.7366],
+ [56.1396, 92.2848],
+] as Array<[number, number]>;
+
+const ARCFACE_LANDMARKS_FACE_SIZE = 112;
+
+const ARC_FACE_5_LANDMARKS = [
+ [38.2946, 51.6963],
+ [73.5318, 51.5014],
+ [56.0252, 71.7366],
+ [41.5493, 92.3655],
+ [70.7299, 92.2041],
+] as Array<[number, number]>;
+
+export function getArcfaceAlignment(
+ faceDetection: FaceDetection,
+): FaceAlignment {
+ const landmarkCount = faceDetection.landmarks.length;
+ return getFaceAlignmentUsingSimilarityTransform(
+ faceDetection,
+ normalizeLandmarks(
+ landmarkCount === 5 ? ARC_FACE_5_LANDMARKS : ARCFACE_LANDMARKS,
+ ARCFACE_LANDMARKS_FACE_SIZE,
+ ),
+ );
+}
+
+function getFaceAlignmentUsingSimilarityTransform(
+ faceDetection: FaceDetection,
+ alignedLandmarks: Array<[number, number]>,
+ // alignmentMethod: Versioned
+): FaceAlignment {
+ const landmarksMat = new Matrix(
+ faceDetection.landmarks
+ .map((p) => [p.x, p.y])
+ .slice(0, alignedLandmarks.length),
+ ).transpose();
+ const alignedLandmarksMat = new Matrix(alignedLandmarks).transpose();
+
+ const simTransform = getSimilarityTransformation(
+ landmarksMat,
+ alignedLandmarksMat,
+ );
+
+ const RS = Matrix.mul(simTransform.rotation, simTransform.scale);
+ const TR = simTransform.translation;
+
+ const affineMatrix = [
+ [RS.get(0, 0), RS.get(0, 1), TR.get(0, 0)],
+ [RS.get(1, 0), RS.get(1, 1), TR.get(1, 0)],
+ [0, 0, 1],
+ ];
+
+ const size = 1 / simTransform.scale;
+ const meanTranslation = simTransform.toMean.sub(0.5).mul(size);
+ const centerMat = simTransform.fromMean.sub(meanTranslation);
+ const center = new Point(centerMat.get(0, 0), centerMat.get(1, 0));
+ const rotation = -Math.atan2(
+ simTransform.rotation.get(0, 1),
+ simTransform.rotation.get(0, 0),
+ );
+ // log.info({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size });
+
+ return {
+ affineMatrix,
+ center,
+ size,
+ rotation,
+ };
+}
+
+function normalizeLandmarks(
+ landmarks: Array<[number, number]>,
+ faceSize: number,
+): Array<[number, number]> {
+ return landmarks.map((landmark) =>
+ landmark.map((p) => p / faceSize),
+ ) as Array<[number, number]>;
+}
diff --git a/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts b/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts
index cb6ccd02986822f411aa7050cc7d6d6fed1064e6..2075d6acf86d6b412f931cd8ff916cdb66d5da9f 100644
--- a/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts
+++ b/web/apps/photos/src/services/machineLearning/arcfaceCropService.ts
@@ -1,13 +1,15 @@
+import { Box, enlargeBox } from "services/ml/geom";
import {
+ FaceAlignment,
FaceCrop,
FaceCropConfig,
FaceCropMethod,
FaceCropService,
FaceDetection,
Versioned,
-} from "types/machineLearning";
-import { getArcfaceAlignment } from "utils/machineLearning/faceAlign";
-import { getFaceCrop } from "utils/machineLearning/faceCrop";
+} from "services/ml/types";
+import { cropWithRotation } from "utils/image";
+import { getArcfaceAlignment } from "./arcfaceAlignmentService";
class ArcFaceCropService implements FaceCropService {
public method: Versioned;
@@ -32,3 +34,27 @@ class ArcFaceCropService implements FaceCropService {
}
export default new ArcFaceCropService();
+
+export function getFaceCrop(
+ imageBitmap: ImageBitmap,
+ alignment: FaceAlignment,
+ config: FaceCropConfig,
+): FaceCrop {
+ const alignmentBox = new Box({
+ x: alignment.center.x - alignment.size / 2,
+ y: alignment.center.y - alignment.size / 2,
+ width: alignment.size,
+ height: alignment.size,
+ }).round();
+ const scaleForPadding = 1 + config.padding * 2;
+ const paddedBox = enlargeBox(alignmentBox, scaleForPadding).round();
+ const faceImageBitmap = cropWithRotation(imageBitmap, paddedBox, 0, {
+ width: config.maxSize,
+ height: config.maxSize,
+ });
+
+ return {
+ image: faceImageBitmap,
+ imageBox: paddedBox,
+ };
+}
diff --git a/web/apps/photos/src/services/machineLearning/clusteringService.ts b/web/apps/photos/src/services/machineLearning/clusteringService.ts
index 03931b63b8595208c57df6ea68f43f943381b0cb..32c25f698c5e22e0cd57ea2c171d580311df3ced 100644
--- a/web/apps/photos/src/services/machineLearning/clusteringService.ts
+++ b/web/apps/photos/src/services/machineLearning/clusteringService.ts
@@ -8,7 +8,7 @@ import {
ClusteringResults,
HdbscanResults,
Versioned,
-} from "types/machineLearning";
+} from "services/ml/types";
class ClusteringService {
private dbscan: DBSCAN;
diff --git a/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts b/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts
index 33298eef3c248a2aebffc74267b089c0852fd4d4..57d181de4f5cf1902eb038136b03599a564a1cf6 100644
--- a/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts
+++ b/web/apps/photos/src/services/machineLearning/dbscanClusteringService.ts
@@ -6,7 +6,7 @@ import {
ClusteringService,
HdbscanResults,
Versioned,
-} from "types/machineLearning";
+} from "services/ml/types";
class DbscanClusteringService implements ClusteringService {
public method: Versioned;
diff --git a/web/apps/photos/src/services/machineLearning/faceService.ts b/web/apps/photos/src/services/machineLearning/faceService.ts
index 1dedadf151c933845ee03dfafa1f39af8cdfc68f..b7805b3360944f14a23417c558c9e9693a568374 100644
--- a/web/apps/photos/src/services/machineLearning/faceService.ts
+++ b/web/apps/photos/src/services/machineLearning/faceService.ts
@@ -1,22 +1,20 @@
import { openCache } from "@/next/blob-cache";
import log from "@/next/log";
+import mlIDbStorage from "services/ml/db";
import {
DetectedFace,
Face,
MLSyncContext,
MLSyncFileContext,
-} from "types/machineLearning";
-import { imageBitmapToBlob } from "utils/image";
-import {
- areFaceIdsSame,
- extractFaceImagesToFloat32,
+ type FaceAlignment,
+ type Versioned,
+} from "services/ml/types";
+import { imageBitmapToBlob, warpAffineFloat32List } from "utils/image";
+import ReaderService, {
getFaceId,
getLocalFile,
getOriginalImageBitmap,
- isDifferentOrOld,
-} from "utils/machineLearning";
-import mlIDbStorage from "utils/storage/mlIDbStorage";
-import ReaderService from "./readerService";
+} from "./readerService";
class FaceService {
async syncFileFaceDetections(
@@ -304,3 +302,58 @@ class FaceService {
}
export default new FaceService();
+
+export function areFaceIdsSame(ofFaces: Array, toFaces: Array) {
+ if (
+ (ofFaces === null || ofFaces === undefined) &&
+ (toFaces === null || toFaces === undefined)
+ ) {
+ return true;
+ }
+ return primitiveArrayEquals(
+ ofFaces?.map((f) => f.id),
+ toFaces?.map((f) => f.id),
+ );
+}
+
+function primitiveArrayEquals(a, b) {
+ return (
+ Array.isArray(a) &&
+ Array.isArray(b) &&
+ a.length === b.length &&
+ a.every((val, index) => val === b[index])
+ );
+}
+
+export function isDifferentOrOld(
+ method: Versioned,
+ thanMethod: Versioned,
+) {
+ return (
+ !method ||
+ method.value !== thanMethod.value ||
+ method.version < thanMethod.version
+ );
+}
+
+async function extractFaceImagesToFloat32(
+ faceAlignments: Array,
+ faceSize: number,
+ image: ImageBitmap,
+): Promise {
+ const faceData = new Float32Array(
+ faceAlignments.length * faceSize * faceSize * 3,
+ );
+ for (let i = 0; i < faceAlignments.length; i++) {
+ const alignedFace = faceAlignments[i];
+ const faceDataOffset = i * faceSize * faceSize * 3;
+ warpAffineFloat32List(
+ image,
+ alignedFace,
+ faceSize,
+ faceData,
+ faceDataOffset,
+ );
+ }
+ return faceData;
+}
diff --git a/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts b/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts
index 21e2118252918fddee9fc87adcc35b0c29d45587..0671b0bde1d5ae22fde22ad0a262176b97066126 100644
--- a/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts
+++ b/web/apps/photos/src/services/machineLearning/hdbscanClusteringService.ts
@@ -6,7 +6,7 @@ import {
ClusteringService,
HdbscanResults,
Versioned,
-} from "types/machineLearning";
+} from "services/ml/types";
class HdbscanClusteringService implements ClusteringService {
public method: Versioned;
diff --git a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts b/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts
index 3357e21ccd4634245393d89b7e24c68d2f5bbfb3..f1d7bf50083d06321c767ebc14daa9d402106a8d 100644
--- a/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts
+++ b/web/apps/photos/src/services/machineLearning/laplacianBlurDetectionService.ts
@@ -3,7 +3,7 @@ import {
BlurDetectionService,
Face,
Versioned,
-} from "types/machineLearning";
+} from "services/ml/types";
import { createGrayscaleIntMatrixFromNormalized2List } from "utils/image";
import { mobileFaceNetFaceSize } from "./mobileFaceNetEmbeddingService";
diff --git a/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts b/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts
deleted file mode 100644
index 991ae68087ab48c3e85b2e789eebd2a358fbfd17..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/services/machineLearning/machineLearningFactory.ts
+++ /dev/null
@@ -1,216 +0,0 @@
-import { haveWindow } from "@/next/env";
-import log from "@/next/log";
-import { ComlinkWorker } from "@/next/worker/comlink-worker";
-import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
-import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
-import PQueue from "p-queue";
-import { EnteFile } from "types/file";
-import {
- BlurDetectionMethod,
- BlurDetectionService,
- ClusteringMethod,
- ClusteringService,
- Face,
- FaceAlignmentMethod,
- FaceAlignmentService,
- FaceCropMethod,
- FaceCropService,
- FaceDetectionMethod,
- FaceDetectionService,
- FaceEmbeddingMethod,
- FaceEmbeddingService,
- MLLibraryData,
- MLSyncConfig,
- MLSyncContext,
-} from "types/machineLearning";
-import { logQueueStats } from "utils/machineLearning";
-import arcfaceAlignmentService from "./arcfaceAlignmentService";
-import arcfaceCropService from "./arcfaceCropService";
-import dbscanClusteringService from "./dbscanClusteringService";
-import hdbscanClusteringService from "./hdbscanClusteringService";
-import laplacianBlurDetectionService from "./laplacianBlurDetectionService";
-import mobileFaceNetEmbeddingService from "./mobileFaceNetEmbeddingService";
-import yoloFaceDetectionService from "./yoloFaceDetectionService";
-
-export class MLFactory {
- public static getFaceDetectionService(
- method: FaceDetectionMethod,
- ): FaceDetectionService {
- if (method === "YoloFace") {
- return yoloFaceDetectionService;
- }
-
- throw Error("Unknon face detection method: " + method);
- }
-
- public static getFaceCropService(method: FaceCropMethod) {
- if (method === "ArcFace") {
- return arcfaceCropService;
- }
-
- throw Error("Unknon face crop method: " + method);
- }
-
- public static getFaceAlignmentService(
- method: FaceAlignmentMethod,
- ): FaceAlignmentService {
- if (method === "ArcFace") {
- return arcfaceAlignmentService;
- }
-
- throw Error("Unknon face alignment method: " + method);
- }
-
- public static getBlurDetectionService(
- method: BlurDetectionMethod,
- ): BlurDetectionService {
- if (method === "Laplacian") {
- return laplacianBlurDetectionService;
- }
-
- throw Error("Unknon blur detection method: " + method);
- }
-
- public static getFaceEmbeddingService(
- method: FaceEmbeddingMethod,
- ): FaceEmbeddingService {
- if (method === "MobileFaceNet") {
- return mobileFaceNetEmbeddingService;
- }
-
- throw Error("Unknon face embedding method: " + method);
- }
-
- public static getClusteringService(
- method: ClusteringMethod,
- ): ClusteringService {
- if (method === "Hdbscan") {
- return hdbscanClusteringService;
- }
- if (method === "Dbscan") {
- return dbscanClusteringService;
- }
-
- throw Error("Unknon clustering method: " + method);
- }
-
- public static getMLSyncContext(
- token: string,
- userID: number,
- config: MLSyncConfig,
- shouldUpdateMLVersion: boolean = true,
- ) {
- return new LocalMLSyncContext(
- token,
- userID,
- config,
- shouldUpdateMLVersion,
- );
- }
-}
-
-export class LocalMLSyncContext implements MLSyncContext {
- public token: string;
- public userID: number;
- public config: MLSyncConfig;
- public shouldUpdateMLVersion: boolean;
-
- public faceDetectionService: FaceDetectionService;
- public faceCropService: FaceCropService;
- public faceAlignmentService: FaceAlignmentService;
- public blurDetectionService: BlurDetectionService;
- public faceEmbeddingService: FaceEmbeddingService;
- public faceClusteringService: ClusteringService;
-
- public localFilesMap: Map;
- public outOfSyncFiles: EnteFile[];
- public nSyncedFiles: number;
- public nSyncedFaces: number;
- public allSyncedFacesMap?: Map>;
-
- public error?: Error;
-
- public mlLibraryData: MLLibraryData;
-
- public syncQueue: PQueue;
- // TODO: wheather to limit concurrent downloads
- // private downloadQueue: PQueue;
-
- private concurrency: number;
- private comlinkCryptoWorker: Array<
- ComlinkWorker
- >;
- private enteWorkers: Array;
-
- constructor(
- token: string,
- userID: number,
- config: MLSyncConfig,
- shouldUpdateMLVersion: boolean = true,
- concurrency?: number,
- ) {
- this.token = token;
- this.userID = userID;
- this.config = config;
- this.shouldUpdateMLVersion = shouldUpdateMLVersion;
-
- this.faceDetectionService = MLFactory.getFaceDetectionService(
- this.config.faceDetection.method,
- );
- this.faceCropService = MLFactory.getFaceCropService(
- this.config.faceCrop.method,
- );
- this.faceAlignmentService = MLFactory.getFaceAlignmentService(
- this.config.faceAlignment.method,
- );
- this.blurDetectionService = MLFactory.getBlurDetectionService(
- this.config.blurDetection.method,
- );
- this.faceEmbeddingService = MLFactory.getFaceEmbeddingService(
- this.config.faceEmbedding.method,
- );
- this.faceClusteringService = MLFactory.getClusteringService(
- this.config.faceClustering.method,
- );
-
- this.outOfSyncFiles = [];
- this.nSyncedFiles = 0;
- this.nSyncedFaces = 0;
-
- this.concurrency = concurrency ?? getConcurrency();
-
- log.info("Using concurrency: ", this.concurrency);
- // timeout is added on downloads
- // timeout on queue will keep the operation open till worker is terminated
- this.syncQueue = new PQueue({ concurrency: this.concurrency });
- logQueueStats(this.syncQueue, "sync");
- // this.downloadQueue = new PQueue({ concurrency: 1 });
- // logQueueStats(this.downloadQueue, 'download');
-
- this.comlinkCryptoWorker = new Array(this.concurrency);
- this.enteWorkers = new Array(this.concurrency);
- }
-
- public async getEnteWorker(id: number): Promise {
- const wid = id % this.enteWorkers.length;
- console.log("getEnteWorker: ", id, wid);
- if (!this.enteWorkers[wid]) {
- this.comlinkCryptoWorker[wid] = getDedicatedCryptoWorker();
- this.enteWorkers[wid] = await this.comlinkCryptoWorker[wid].remote;
- }
-
- return this.enteWorkers[wid];
- }
-
- public async dispose() {
- this.localFilesMap = undefined;
- await this.syncQueue.onIdle();
- this.syncQueue.removeAllListeners();
- for (const enteComlinkWorker of this.comlinkCryptoWorker) {
- enteComlinkWorker?.terminate();
- }
- }
-}
-
-export const getConcurrency = () =>
- haveWindow() && Math.max(2, Math.ceil(navigator.hardwareConcurrency / 2));
diff --git a/web/apps/photos/src/services/machineLearning/machineLearningService.ts b/web/apps/photos/src/services/machineLearning/machineLearningService.ts
index 03a3b7e2c710134973e44232ce525a02b1637f3e..4ac17dbb8ce8b3ecca727760f666df074baf4fcb 100644
--- a/web/apps/photos/src/services/machineLearning/machineLearningService.ts
+++ b/web/apps/photos/src/services/machineLearning/machineLearningService.ts
@@ -1,25 +1,330 @@
+import { haveWindow } from "@/next/env";
import log from "@/next/log";
+import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { APPS } from "@ente/shared/apps/constants";
-import ComlinkCryptoWorker from "@ente/shared/crypto";
+import ComlinkCryptoWorker, {
+ getDedicatedCryptoWorker,
+} from "@ente/shared/crypto";
+import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError, parseUploadErrorCodes } from "@ente/shared/error";
-import { MAX_ML_SYNC_ERROR_COUNT } from "constants/mlConfig";
+import PQueue from "p-queue";
import downloadManager from "services/download";
import { putEmbedding } from "services/embeddingService";
import { getLocalFiles } from "services/fileService";
-import { EnteFile } from "types/file";
+import mlIDbStorage, {
+ ML_SEARCH_CONFIG_NAME,
+ ML_SYNC_CONFIG_NAME,
+ ML_SYNC_JOB_CONFIG_NAME,
+} from "services/ml/db";
import {
+ BlurDetectionMethod,
+ BlurDetectionService,
+ ClusteringMethod,
+ ClusteringService,
+ Face,
+ FaceAlignmentMethod,
+ FaceAlignmentService,
+ FaceCropMethod,
+ FaceCropService,
+ FaceDetection,
+ FaceDetectionMethod,
+ FaceDetectionService,
+ FaceEmbeddingMethod,
+ FaceEmbeddingService,
+ Landmark,
+ MLLibraryData,
+ MLSearchConfig,
+ MLSyncConfig,
MLSyncContext,
MLSyncFileContext,
MLSyncResult,
MlFileData,
-} from "types/machineLearning";
-import { getMLSyncConfig } from "utils/machineLearning/config";
-import { LocalFileMlDataToServerFileMl } from "utils/machineLearning/mldataMappers";
-import mlIDbStorage from "utils/storage/mlIDbStorage";
+} from "services/ml/types";
+import { EnteFile } from "types/file";
+import { isInternalUserForML } from "utils/user";
+import arcfaceAlignmentService from "./arcfaceAlignmentService";
+import arcfaceCropService from "./arcfaceCropService";
+import dbscanClusteringService from "./dbscanClusteringService";
import FaceService from "./faceService";
-import { MLFactory } from "./machineLearningFactory";
+import hdbscanClusteringService from "./hdbscanClusteringService";
+import laplacianBlurDetectionService from "./laplacianBlurDetectionService";
+import type { JobConfig } from "./mlWorkManager";
+import mobileFaceNetEmbeddingService from "./mobileFaceNetEmbeddingService";
import PeopleService from "./peopleService";
import ReaderService from "./readerService";
+import yoloFaceDetectionService from "./yoloFaceDetectionService";
+
+export const DEFAULT_ML_SYNC_JOB_CONFIG: JobConfig = {
+ intervalSec: 5,
+ // TODO: finalize this after seeing effects on and from machine sleep
+ maxItervalSec: 960,
+ backoffMultiplier: 2,
+};
+
+export const DEFAULT_ML_SYNC_CONFIG: MLSyncConfig = {
+ batchSize: 200,
+ imageSource: "Original",
+ faceDetection: {
+ method: "YoloFace",
+ },
+ faceCrop: {
+ enabled: true,
+ method: "ArcFace",
+ padding: 0.25,
+ maxSize: 256,
+ blobOptions: {
+ type: "image/jpeg",
+ quality: 0.8,
+ },
+ },
+ faceAlignment: {
+ method: "ArcFace",
+ },
+ blurDetection: {
+ method: "Laplacian",
+ threshold: 15,
+ },
+ faceEmbedding: {
+ method: "MobileFaceNet",
+ faceSize: 112,
+ generateTsne: true,
+ },
+ faceClustering: {
+ method: "Hdbscan",
+ minClusterSize: 3,
+ minSamples: 5,
+ clusterSelectionEpsilon: 0.6,
+ clusterSelectionMethod: "leaf",
+ minInputSize: 50,
+ // maxDistanceInsideCluster: 0.4,
+ generateDebugInfo: true,
+ },
+ mlVersion: 3,
+};
+
+export const DEFAULT_ML_SEARCH_CONFIG: MLSearchConfig = {
+ enabled: false,
+};
+
+export const MAX_ML_SYNC_ERROR_COUNT = 1;
+
+export async function getMLSyncJobConfig() {
+ return mlIDbStorage.getConfig(
+ ML_SYNC_JOB_CONFIG_NAME,
+ DEFAULT_ML_SYNC_JOB_CONFIG,
+ );
+}
+
+export async function getMLSyncConfig() {
+ return mlIDbStorage.getConfig(ML_SYNC_CONFIG_NAME, DEFAULT_ML_SYNC_CONFIG);
+}
+
+export async function getMLSearchConfig() {
+ if (isInternalUserForML()) {
+ return mlIDbStorage.getConfig(
+ ML_SEARCH_CONFIG_NAME,
+ DEFAULT_ML_SEARCH_CONFIG,
+ );
+ }
+ // Force disabled for everyone else while we finalize it to avoid redundant
+ // reindexing for users.
+ return DEFAULT_ML_SEARCH_CONFIG;
+}
+
+export async function updateMLSyncJobConfig(newConfig: JobConfig) {
+ return mlIDbStorage.putConfig(ML_SYNC_JOB_CONFIG_NAME, newConfig);
+}
+
+export async function updateMLSyncConfig(newConfig: MLSyncConfig) {
+ return mlIDbStorage.putConfig(ML_SYNC_CONFIG_NAME, newConfig);
+}
+
+export async function updateMLSearchConfig(newConfig: MLSearchConfig) {
+ return mlIDbStorage.putConfig(ML_SEARCH_CONFIG_NAME, newConfig);
+}
+
+export class MLFactory {
+ public static getFaceDetectionService(
+ method: FaceDetectionMethod,
+ ): FaceDetectionService {
+ if (method === "YoloFace") {
+ return yoloFaceDetectionService;
+ }
+
+ throw Error("Unknon face detection method: " + method);
+ }
+
+ public static getFaceCropService(method: FaceCropMethod) {
+ if (method === "ArcFace") {
+ return arcfaceCropService;
+ }
+
+ throw Error("Unknon face crop method: " + method);
+ }
+
+ public static getFaceAlignmentService(
+ method: FaceAlignmentMethod,
+ ): FaceAlignmentService {
+ if (method === "ArcFace") {
+ return arcfaceAlignmentService;
+ }
+
+ throw Error("Unknon face alignment method: " + method);
+ }
+
+ public static getBlurDetectionService(
+ method: BlurDetectionMethod,
+ ): BlurDetectionService {
+ if (method === "Laplacian") {
+ return laplacianBlurDetectionService;
+ }
+
+ throw Error("Unknon blur detection method: " + method);
+ }
+
+ public static getFaceEmbeddingService(
+ method: FaceEmbeddingMethod,
+ ): FaceEmbeddingService {
+ if (method === "MobileFaceNet") {
+ return mobileFaceNetEmbeddingService;
+ }
+
+ throw Error("Unknon face embedding method: " + method);
+ }
+
+ public static getClusteringService(
+ method: ClusteringMethod,
+ ): ClusteringService {
+ if (method === "Hdbscan") {
+ return hdbscanClusteringService;
+ }
+ if (method === "Dbscan") {
+ return dbscanClusteringService;
+ }
+
+ throw Error("Unknon clustering method: " + method);
+ }
+
+ public static getMLSyncContext(
+ token: string,
+ userID: number,
+ config: MLSyncConfig,
+ shouldUpdateMLVersion: boolean = true,
+ ) {
+ return new LocalMLSyncContext(
+ token,
+ userID,
+ config,
+ shouldUpdateMLVersion,
+ );
+ }
+}
+
+export class LocalMLSyncContext implements MLSyncContext {
+ public token: string;
+ public userID: number;
+ public config: MLSyncConfig;
+ public shouldUpdateMLVersion: boolean;
+
+ public faceDetectionService: FaceDetectionService;
+ public faceCropService: FaceCropService;
+ public faceAlignmentService: FaceAlignmentService;
+ public blurDetectionService: BlurDetectionService;
+ public faceEmbeddingService: FaceEmbeddingService;
+ public faceClusteringService: ClusteringService;
+
+ public localFilesMap: Map;
+ public outOfSyncFiles: EnteFile[];
+ public nSyncedFiles: number;
+ public nSyncedFaces: number;
+ public allSyncedFacesMap?: Map>;
+
+ public error?: Error;
+
+ public mlLibraryData: MLLibraryData;
+
+ public syncQueue: PQueue;
+ // TODO: wheather to limit concurrent downloads
+ // private downloadQueue: PQueue;
+
+ private concurrency: number;
+ private comlinkCryptoWorker: Array<
+ ComlinkWorker
+ >;
+ private enteWorkers: Array;
+
+ constructor(
+ token: string,
+ userID: number,
+ config: MLSyncConfig,
+ shouldUpdateMLVersion: boolean = true,
+ concurrency?: number,
+ ) {
+ this.token = token;
+ this.userID = userID;
+ this.config = config;
+ this.shouldUpdateMLVersion = shouldUpdateMLVersion;
+
+ this.faceDetectionService = MLFactory.getFaceDetectionService(
+ this.config.faceDetection.method,
+ );
+ this.faceCropService = MLFactory.getFaceCropService(
+ this.config.faceCrop.method,
+ );
+ this.faceAlignmentService = MLFactory.getFaceAlignmentService(
+ this.config.faceAlignment.method,
+ );
+ this.blurDetectionService = MLFactory.getBlurDetectionService(
+ this.config.blurDetection.method,
+ );
+ this.faceEmbeddingService = MLFactory.getFaceEmbeddingService(
+ this.config.faceEmbedding.method,
+ );
+ this.faceClusteringService = MLFactory.getClusteringService(
+ this.config.faceClustering.method,
+ );
+
+ this.outOfSyncFiles = [];
+ this.nSyncedFiles = 0;
+ this.nSyncedFaces = 0;
+
+ this.concurrency = concurrency ?? getConcurrency();
+
+ log.info("Using concurrency: ", this.concurrency);
+ // timeout is added on downloads
+ // timeout on queue will keep the operation open till worker is terminated
+ this.syncQueue = new PQueue({ concurrency: this.concurrency });
+ logQueueStats(this.syncQueue, "sync");
+ // this.downloadQueue = new PQueue({ concurrency: 1 });
+ // logQueueStats(this.downloadQueue, 'download');
+
+ this.comlinkCryptoWorker = new Array(this.concurrency);
+ this.enteWorkers = new Array(this.concurrency);
+ }
+
+ public async getEnteWorker(id: number): Promise {
+ const wid = id % this.enteWorkers.length;
+ console.log("getEnteWorker: ", id, wid);
+ if (!this.enteWorkers[wid]) {
+ this.comlinkCryptoWorker[wid] = getDedicatedCryptoWorker();
+ this.enteWorkers[wid] = await this.comlinkCryptoWorker[wid].remote;
+ }
+
+ return this.enteWorkers[wid];
+ }
+
+ public async dispose() {
+ this.localFilesMap = undefined;
+ await this.syncQueue.onIdle();
+ this.syncQueue.removeAllListeners();
+ for (const enteComlinkWorker of this.comlinkCryptoWorker) {
+ enteComlinkWorker?.terminate();
+ }
+ }
+}
+
+export const getConcurrency = () =>
+ haveWindow() && Math.max(2, Math.ceil(navigator.hardwareConcurrency / 2));
class MachineLearningService {
private localSyncContext: Promise;
@@ -348,7 +653,11 @@ class MachineLearningService {
private async persistOnServer(mlFileData: MlFileData, enteFile: EnteFile) {
const serverMl = LocalFileMlDataToServerFileMl(mlFileData);
- log.info(mlFileData);
+ log.debug(() => ({ t: "Local ML file data", mlFileData }));
+ log.debug(() => ({
+ t: "Uploaded ML file data",
+ d: JSON.stringify(serverMl),
+ }));
const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance();
const { file: encryptedEmbeddingData } =
@@ -441,3 +750,160 @@ class MachineLearningService {
}
export default new MachineLearningService();
+
+export interface FileML extends ServerFileMl {
+ updatedAt: number;
+}
+
+class ServerFileMl {
+ public fileID: number;
+ public height?: number;
+ public width?: number;
+ public faceEmbedding: ServerFaceEmbeddings;
+
+ public constructor(
+ fileID: number,
+ faceEmbedding: ServerFaceEmbeddings,
+ height?: number,
+ width?: number,
+ ) {
+ this.fileID = fileID;
+ this.height = height;
+ this.width = width;
+ this.faceEmbedding = faceEmbedding;
+ }
+}
+
+class ServerFaceEmbeddings {
+ public faces: ServerFace[];
+ public version: number;
+ public client?: string;
+ public error?: boolean;
+
+ public constructor(
+ faces: ServerFace[],
+ version: number,
+ client?: string,
+ error?: boolean,
+ ) {
+ this.faces = faces;
+ this.version = version;
+ this.client = client;
+ this.error = error;
+ }
+}
+
+class ServerFace {
+ public faceID: string;
+ public embeddings: number[];
+ public detection: ServerDetection;
+ public score: number;
+ public blur: number;
+
+ public constructor(
+ faceID: string,
+ embeddings: number[],
+ detection: ServerDetection,
+ score: number,
+ blur: number,
+ ) {
+ this.faceID = faceID;
+ this.embeddings = embeddings;
+ this.detection = detection;
+ this.score = score;
+ this.blur = blur;
+ }
+}
+
+class ServerDetection {
+ public box: ServerFaceBox;
+ public landmarks: Landmark[];
+
+ public constructor(box: ServerFaceBox, landmarks: Landmark[]) {
+ this.box = box;
+ this.landmarks = landmarks;
+ }
+}
+
+class ServerFaceBox {
+ public xMin: number;
+ public yMin: number;
+ public width: number;
+ public height: number;
+
+ public constructor(
+ xMin: number,
+ yMin: number,
+ width: number,
+ height: number,
+ ) {
+ this.xMin = xMin;
+ this.yMin = yMin;
+ this.width = width;
+ this.height = height;
+ }
+}
+
+function LocalFileMlDataToServerFileMl(
+ localFileMlData: MlFileData,
+): ServerFileMl {
+ if (
+ localFileMlData.errorCount > 0 &&
+ localFileMlData.lastErrorMessage !== undefined
+ ) {
+ return null;
+ }
+ const imageDimensions = localFileMlData.imageDimensions;
+
+ const faces: ServerFace[] = [];
+ for (let i = 0; i < localFileMlData.faces.length; i++) {
+ const face: Face = localFileMlData.faces[i];
+ const faceID = face.id;
+ const embedding = face.embedding;
+ const score = face.detection.probability;
+ const blur = face.blurValue;
+ const detection: FaceDetection = face.detection;
+ const box = detection.box;
+ const landmarks = detection.landmarks;
+ const newBox = new ServerFaceBox(box.x, box.y, box.width, box.height);
+ const newLandmarks: Landmark[] = [];
+ for (let j = 0; j < landmarks.length; j++) {
+ newLandmarks.push({
+ x: landmarks[j].x,
+ y: landmarks[j].y,
+ } as Landmark);
+ }
+
+ const newFaceObject = new ServerFace(
+ faceID,
+ Array.from(embedding),
+ new ServerDetection(newBox, newLandmarks),
+ score,
+ blur,
+ );
+ faces.push(newFaceObject);
+ }
+ const faceEmbeddings = new ServerFaceEmbeddings(
+ faces,
+ 1,
+ localFileMlData.lastErrorMessage,
+ );
+ return new ServerFileMl(
+ localFileMlData.fileId,
+ faceEmbeddings,
+ imageDimensions.height,
+ imageDimensions.width,
+ );
+}
+
+export function logQueueStats(queue: PQueue, name: string) {
+ queue.on("active", () =>
+ log.info(
+ `queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`,
+ ),
+ );
+ queue.on("idle", () => log.info(`queuestats: ${name}: Idle`));
+ queue.on("error", (error) =>
+ console.error(`queuestats: ${name}: Error, `, error),
+ );
+}
diff --git a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts
index d1c5e9db5e9da651516c6154dca61a80989b48f7..700d358e043ad9091b6a8ff8523e5f5eab0b977d 100644
--- a/web/apps/photos/src/services/machineLearning/mlWorkManager.ts
+++ b/web/apps/photos/src/services/machineLearning/mlWorkManager.ts
@@ -5,20 +5,110 @@ import { eventBus, Events } from "@ente/shared/events";
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
import debounce from "debounce";
import PQueue from "p-queue";
-import { JobResult } from "types/common/job";
+import { getMLSyncJobConfig } from "services/machineLearning/machineLearningService";
+import mlIDbStorage from "services/ml/db";
+import { MLSyncResult } from "services/ml/types";
import { EnteFile } from "types/file";
-import { MLSyncResult } from "types/machineLearning";
import { getDedicatedMLWorker } from "utils/comlink/ComlinkMLWorker";
-import { SimpleJob } from "utils/common/job";
-import { logQueueStats } from "utils/machineLearning";
-import { getMLSyncJobConfig } from "utils/machineLearning/config";
-import mlIDbStorage from "utils/storage/mlIDbStorage";
import { DedicatedMLWorker } from "worker/ml.worker";
+import { logQueueStats } from "./machineLearningService";
const LIVE_SYNC_IDLE_DEBOUNCE_SEC = 30;
const LIVE_SYNC_QUEUE_TIMEOUT_SEC = 300;
const LOCAL_FILES_UPDATED_DEBOUNCE_SEC = 30;
+export type JobState = "Scheduled" | "Running" | "NotScheduled";
+
+export interface JobConfig {
+ intervalSec: number;
+ maxItervalSec: number;
+ backoffMultiplier: number;
+}
+
+export interface JobResult {
+ shouldBackoff: boolean;
+}
+
+export class SimpleJob {
+ private config: JobConfig;
+ private runCallback: () => Promise;
+ private state: JobState;
+ private stopped: boolean;
+ private intervalSec: number;
+ private nextTimeoutId: ReturnType;
+
+ constructor(config: JobConfig, runCallback: () => Promise) {
+ this.config = config;
+ this.runCallback = runCallback;
+ this.state = "NotScheduled";
+ this.stopped = true;
+ this.intervalSec = this.config.intervalSec;
+ }
+
+ public resetInterval() {
+ this.intervalSec = this.config.intervalSec;
+ }
+
+ public start() {
+ this.stopped = false;
+ this.resetInterval();
+ if (this.state !== "Running") {
+ this.scheduleNext();
+ } else {
+ log.info("Job already running, not scheduling");
+ }
+ }
+
+ private scheduleNext() {
+ if (this.state === "Scheduled" || this.nextTimeoutId) {
+ this.clearScheduled();
+ }
+
+ this.nextTimeoutId = setTimeout(
+ () => this.run(),
+ this.intervalSec * 1000,
+ );
+ this.state = "Scheduled";
+ log.info("Scheduled next job after: ", this.intervalSec);
+ }
+
+ async run() {
+ this.nextTimeoutId = undefined;
+ this.state = "Running";
+
+ try {
+ const jobResult = await this.runCallback();
+ if (jobResult && jobResult.shouldBackoff) {
+ this.intervalSec = Math.min(
+ this.config.maxItervalSec,
+ this.intervalSec * this.config.backoffMultiplier,
+ );
+ } else {
+ this.resetInterval();
+ }
+ log.info("Job completed");
+ } catch (e) {
+ console.error("Error while running Job: ", e);
+ } finally {
+ this.state = "NotScheduled";
+ !this.stopped && this.scheduleNext();
+ }
+ }
+
+ // currently client is responsible to terminate running job
+ public stop() {
+ this.stopped = true;
+ this.clearScheduled();
+ }
+
+ private clearScheduled() {
+ clearTimeout(this.nextTimeoutId);
+ this.nextTimeoutId = undefined;
+ this.state = "NotScheduled";
+ log.info("Cleared next job");
+ }
+}
+
export interface MLSyncJobResult extends JobResult {
mlSyncResult: MLSyncResult;
}
diff --git a/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts b/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts
index 818b8a5d123582df6084b3a2b739bd56ba183c45..1b2205801e4a704dd1003dc8d46835fef37bd385 100644
--- a/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts
+++ b/web/apps/photos/src/services/machineLearning/mobileFaceNetEmbeddingService.ts
@@ -4,7 +4,7 @@ import {
FaceEmbeddingMethod,
FaceEmbeddingService,
Versioned,
-} from "types/machineLearning";
+} from "services/ml/types";
export const mobileFaceNetFaceSize = 112;
diff --git a/web/apps/photos/src/services/machineLearning/peopleService.ts b/web/apps/photos/src/services/machineLearning/peopleService.ts
index ad7d7bcec33a73acec5fae5b37545f151e501aad..f7d5cf38a1e77daa63d8bec8fe23a2f2ee4fb64f 100644
--- a/web/apps/photos/src/services/machineLearning/peopleService.ts
+++ b/web/apps/photos/src/services/machineLearning/peopleService.ts
@@ -1,14 +1,8 @@
import log from "@/next/log";
-import { Face, MLSyncContext, Person } from "types/machineLearning";
-import {
- findFirstIfSorted,
- getAllFacesFromMap,
- getLocalFile,
- getOriginalImageBitmap,
- isDifferentOrOld,
-} from "utils/machineLearning";
-import mlIDbStorage from "utils/storage/mlIDbStorage";
-import FaceService from "./faceService";
+import mlIDbStorage from "services/ml/db";
+import { Face, MLSyncContext, Person } from "services/ml/types";
+import FaceService, { isDifferentOrOld } from "./faceService";
+import { getLocalFile, getOriginalImageBitmap } from "./readerService";
class PeopleService {
async syncPeopleIndex(syncContext: MLSyncContext) {
@@ -92,3 +86,28 @@ class PeopleService {
}
export default new PeopleService();
+
+function findFirstIfSorted(
+ elements: Array,
+ comparator: (a: T, b: T) => number,
+) {
+ if (!elements || elements.length < 1) {
+ return;
+ }
+ let first = elements[0];
+
+ for (let i = 1; i < elements.length; i++) {
+ const comp = comparator(elements[i], first);
+ if (comp < 0) {
+ first = elements[i];
+ }
+ }
+
+ return first;
+}
+
+function getAllFacesFromMap(allFacesMap: Map>) {
+ const allFaces = [...allFacesMap.values()].flat();
+
+ return allFaces;
+}
diff --git a/web/apps/photos/src/services/machineLearning/readerService.ts b/web/apps/photos/src/services/machineLearning/readerService.ts
index 62aebdbd1fa993a0cd2c08e4ddd8774549e8bdf5..6ad4c80e816e639f2ba58ebc9618fa9e96451c07 100644
--- a/web/apps/photos/src/services/machineLearning/readerService.ts
+++ b/web/apps/photos/src/services/machineLearning/readerService.ts
@@ -1,11 +1,18 @@
import { FILE_TYPE } from "@/media/file-type";
+import { decodeLivePhoto } from "@/media/live-photo";
import log from "@/next/log";
-import { MLSyncContext, MLSyncFileContext } from "types/machineLearning";
+import PQueue from "p-queue";
+import DownloadManager from "services/download";
+import { getLocalFiles } from "services/fileService";
+import { Dimensions } from "services/ml/geom";
import {
- getLocalFileImageBitmap,
- getOriginalImageBitmap,
- getThumbnailImageBitmap,
-} from "utils/machineLearning";
+ DetectedFace,
+ MLSyncContext,
+ MLSyncFileContext,
+} from "services/ml/types";
+import { EnteFile } from "types/file";
+import { getRenderableImage } from "utils/file";
+import { clamp } from "utils/image";
class ReaderService {
async getImageBitmap(
@@ -55,3 +62,95 @@ class ReaderService {
}
}
export default new ReaderService();
+
+export async function getLocalFile(fileId: number) {
+ const localFiles = await getLocalFiles();
+ return localFiles.find((f) => f.id === fileId);
+}
+
+export function getFaceId(detectedFace: DetectedFace, imageDims: Dimensions) {
+ const xMin = clamp(
+ detectedFace.detection.box.x / imageDims.width,
+ 0.0,
+ 0.999999,
+ )
+ .toFixed(5)
+ .substring(2);
+ const yMin = clamp(
+ detectedFace.detection.box.y / imageDims.height,
+ 0.0,
+ 0.999999,
+ )
+ .toFixed(5)
+ .substring(2);
+ const xMax = clamp(
+ (detectedFace.detection.box.x + detectedFace.detection.box.width) /
+ imageDims.width,
+ 0.0,
+ 0.999999,
+ )
+ .toFixed(5)
+ .substring(2);
+ const yMax = clamp(
+ (detectedFace.detection.box.y + detectedFace.detection.box.height) /
+ imageDims.height,
+ 0.0,
+ 0.999999,
+ )
+ .toFixed(5)
+ .substring(2);
+
+ const rawFaceID = `${xMin}_${yMin}_${xMax}_${yMax}`;
+ const faceID = `${detectedFace.fileId}_${rawFaceID}`;
+
+ return faceID;
+}
+
+async function getImageBlobBitmap(blob: Blob): Promise {
+ return await createImageBitmap(blob);
+}
+
+async function getOriginalFile(file: EnteFile, queue?: PQueue) {
+ let fileStream;
+ if (queue) {
+ fileStream = await queue.add(() => DownloadManager.getFile(file));
+ } else {
+ fileStream = await DownloadManager.getFile(file);
+ }
+ return new Response(fileStream).blob();
+}
+
+async function getOriginalConvertedFile(file: EnteFile, queue?: PQueue) {
+ const fileBlob = await getOriginalFile(file, queue);
+ if (file.metadata.fileType === FILE_TYPE.IMAGE) {
+ return await getRenderableImage(file.metadata.title, fileBlob);
+ } else {
+ const { imageFileName, imageData } = await decodeLivePhoto(
+ file.metadata.title,
+ fileBlob,
+ );
+ return await getRenderableImage(imageFileName, new Blob([imageData]));
+ }
+}
+
+export async function getOriginalImageBitmap(file: EnteFile, queue?: PQueue) {
+ const fileBlob = await getOriginalConvertedFile(file, queue);
+ log.info("[MLService] Got file: ", file.id.toString());
+ return getImageBlobBitmap(fileBlob);
+}
+
+export async function getThumbnailImageBitmap(file: EnteFile) {
+ const thumb = await DownloadManager.getThumbnail(file);
+ log.info("[MLService] Got thumbnail: ", file.id.toString());
+
+ return getImageBlobBitmap(new Blob([thumb]));
+}
+
+export async function getLocalFileImageBitmap(
+ enteFile: EnteFile,
+ localFile: globalThis.File,
+) {
+ let fileBlob = localFile as Blob;
+ fileBlob = await getRenderableImage(enteFile.metadata.title, fileBlob);
+ return getImageBlobBitmap(fileBlob);
+}
diff --git a/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts b/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts
index 4fa840749d4cfc1e98d1a4843df90281bcaeaa23..8856576bee6f9d9edec9c1cbea332d3f9aa1f37b 100644
--- a/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts
+++ b/web/apps/photos/src/services/machineLearning/yoloFaceDetectionService.ts
@@ -1,5 +1,18 @@
import { workerBridge } from "@/next/worker/worker-bridge";
import { euclidean } from "hdbscan";
+import {
+ Box,
+ Dimensions,
+ Point,
+ boxFromBoundingBox,
+ newBox,
+} from "services/ml/geom";
+import {
+ FaceDetection,
+ FaceDetectionMethod,
+ FaceDetectionService,
+ Versioned,
+} from "services/ml/types";
import {
Matrix,
applyToPoint,
@@ -7,20 +20,11 @@ import {
scale,
translate,
} from "transformation-matrix";
-import { Dimensions } from "types/image";
-import {
- FaceDetection,
- FaceDetectionMethod,
- FaceDetectionService,
- Versioned,
-} from "types/machineLearning";
import {
clamp,
getPixelBilinear,
normalizePixelBetween0And1,
} from "utils/image";
-import { newBox } from "utils/machineLearning";
-import { Box, Point } from "../../../thirdparty/face-api/classes";
class YoloFaceDetectionService implements FaceDetectionService {
public method: Versioned;
@@ -296,7 +300,7 @@ function getDetectionCenter(detection: FaceDetection) {
center.y += p.y;
});
- return center.div({ x: 4, y: 4 });
+ return new Point(center.x / 4, center.y / 4);
}
function computeTransformToBox(inBox: Box, toBox: Box): Matrix {
@@ -319,14 +323,10 @@ function transformBox(box: Box, transform: Matrix) {
const topLeft = transformPoint(box.topLeft, transform);
const bottomRight = transformPoint(box.bottomRight, transform);
- return newBoxFromPoints(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y);
-}
-
-function newBoxFromPoints(
- left: number,
- top: number,
- right: number,
- bottom: number,
-) {
- return new Box({ left, top, right, bottom });
+ return boxFromBoundingBox({
+ left: topLeft.x,
+ top: topLeft.y,
+ right: bottomRight.x,
+ bottom: bottomRight.y,
+ });
}
diff --git a/web/apps/photos/src/utils/storage/mlIDbStorage.ts b/web/apps/photos/src/services/ml/db.ts
similarity index 98%
rename from web/apps/photos/src/utils/storage/mlIDbStorage.ts
rename to web/apps/photos/src/services/ml/db.ts
index 766c3ac9a98ca384653388ff569fed27c260eb07..90b2f4aa0745534f2aca29c9df0b0f13e4983ab0 100644
--- a/web/apps/photos/src/utils/storage/mlIDbStorage.ts
+++ b/web/apps/photos/src/services/ml/db.ts
@@ -1,11 +1,5 @@
import { haveWindow } from "@/next/env";
import log from "@/next/log";
-import {
- DEFAULT_ML_SEARCH_CONFIG,
- DEFAULT_ML_SYNC_CONFIG,
- DEFAULT_ML_SYNC_JOB_CONFIG,
- MAX_ML_SYNC_ERROR_COUNT,
-} from "constants/mlConfig";
import {
DBSchema,
IDBPDatabase,
@@ -15,8 +9,21 @@ import {
openDB,
} from "idb";
import isElectron from "is-electron";
-import { Face, MLLibraryData, MlFileData, Person } from "types/machineLearning";
-import { IndexStatus } from "types/machineLearning/ui";
+import {
+ DEFAULT_ML_SEARCH_CONFIG,
+ DEFAULT_ML_SYNC_CONFIG,
+ DEFAULT_ML_SYNC_JOB_CONFIG,
+ MAX_ML_SYNC_ERROR_COUNT,
+} from "services/machineLearning/machineLearningService";
+import { Face, MLLibraryData, MlFileData, Person } from "services/ml/types";
+
+export interface IndexStatus {
+ outOfSyncFilesExists: boolean;
+ nSyncedFiles: number;
+ nTotalFiles: number;
+ localFilesSynced: boolean;
+ peopleIndexSynced: boolean;
+}
interface Config {}
diff --git a/web/apps/photos/src/services/ml/geom.ts b/web/apps/photos/src/services/ml/geom.ts
new file mode 100644
index 0000000000000000000000000000000000000000..556e2b309d7cf4e8940bf211bd2ec382ff8749fc
--- /dev/null
+++ b/web/apps/photos/src/services/ml/geom.ts
@@ -0,0 +1,92 @@
+export class Point {
+ public x: number;
+ public y: number;
+
+ constructor(x: number, y: number) {
+ this.x = x;
+ this.y = y;
+ }
+}
+
+export interface Dimensions {
+ width: number;
+ height: number;
+}
+
+export interface IBoundingBox {
+ left: number;
+ top: number;
+ right: number;
+ bottom: number;
+}
+
+export interface IRect {
+ x: number;
+ y: number;
+ width: number;
+ height: number;
+}
+
+export function newBox(x: number, y: number, width: number, height: number) {
+ return new Box({ x, y, width, height });
+}
+
+export const boxFromBoundingBox = ({
+ left,
+ top,
+ right,
+ bottom,
+}: IBoundingBox) => {
+ return new Box({
+ x: left,
+ y: top,
+ width: right - left,
+ height: bottom - top,
+ });
+};
+
+export class Box implements IRect {
+ public x: number;
+ public y: number;
+ public width: number;
+ public height: number;
+
+ constructor({ x, y, width, height }: IRect) {
+ this.x = x;
+ this.y = y;
+ this.width = width;
+ this.height = height;
+ }
+
+ public get topLeft(): Point {
+ return new Point(this.x, this.y);
+ }
+
+ public get bottomRight(): Point {
+ return new Point(this.x + this.width, this.y + this.height);
+ }
+
+ public round(): Box {
+ const [x, y, width, height] = [
+ this.x,
+ this.y,
+ this.width,
+ this.height,
+ ].map((val) => Math.round(val));
+ return new Box({ x, y, width, height });
+ }
+}
+
+export function enlargeBox(box: Box, factor: number = 1.5) {
+ const center = new Point(box.x + box.width / 2, box.y + box.height / 2);
+
+ const size = new Point(box.width, box.height);
+ const newHalfSize = new Point((factor * size.x) / 2, (factor * size.y) / 2);
+
+ return boxFromBoundingBox({
+ left: center.x - newHalfSize.x,
+ top: center.y - newHalfSize.y,
+ right: center.x + newHalfSize.x,
+ bottom: center.y + newHalfSize.y,
+ });
+}
diff --git a/web/apps/photos/src/types/machineLearning/index.ts b/web/apps/photos/src/services/ml/types.ts
similarity index 98%
rename from web/apps/photos/src/types/machineLearning/index.ts
rename to web/apps/photos/src/services/ml/types.ts
index 2c3961cdf6bdac44d326a2f50fc1181e6cd1bec4..422cf9d4aa8d524c8eedc44bac4d24d86efc460e 100644
--- a/web/apps/photos/src/types/machineLearning/index.ts
+++ b/web/apps/photos/src/services/ml/types.ts
@@ -1,8 +1,8 @@
import { DebugInfo } from "hdbscan";
import PQueue from "p-queue";
+import { Dimensions } from "services/ml/geom";
import { EnteFile } from "types/file";
-import { Dimensions } from "types/image";
-import { Box, Point } from "../../../thirdparty/face-api/classes";
+import { Box, Point } from "./geom";
export interface MLSyncResult {
nOutOfSyncFiles: number;
diff --git a/web/apps/photos/src/services/searchService.ts b/web/apps/photos/src/services/searchService.ts
index 96c574b9ddf144f4311dfdf267888796d91d4f27..a212fc9dcf33111bda3d52506ef649caf8563c48 100644
--- a/web/apps/photos/src/services/searchService.ts
+++ b/web/apps/photos/src/services/searchService.ts
@@ -2,10 +2,12 @@ import { FILE_TYPE } from "@/media/file-type";
import log from "@/next/log";
import * as chrono from "chrono-node";
import { t } from "i18next";
+import { getMLSyncConfig } from "services/machineLearning/machineLearningService";
+import mlIDbStorage from "services/ml/db";
+import { Person } from "services/ml/types";
import { Collection } from "types/collection";
import { EntityType, LocationTag, LocationTagData } from "types/entity";
import { EnteFile } from "types/file";
-import { Person } from "types/machineLearning";
import {
ClipSearchScores,
DateValue,
@@ -16,12 +18,9 @@ import {
} from "types/search";
import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker";
import { getUniqueFiles } from "utils/file";
-import { getAllPeople } from "utils/machineLearning";
-import { getMLSyncConfig } from "utils/machineLearning/config";
import { getFormattedDate } from "utils/search";
-import mlIDbStorage from "utils/storage/mlIDbStorage";
import { clipService, computeClipMatchScore } from "./clip-service";
-import { getLocalEmbeddings } from "./embeddingService";
+import { localCLIPEmbeddings } from "./embeddingService";
import { getLatestEntities } from "./entityService";
import locationSearchService, { City } from "./locationSearchService";
@@ -376,7 +375,7 @@ const searchClip = async (
await clipService.getTextEmbeddingIfAvailable(searchPhrase);
if (!textEmbedding) return undefined;
- const imageEmbeddings = await getLocalEmbeddings();
+ const imageEmbeddings = await localCLIPEmbeddings();
const clipSearchResult = new Map(
(
await Promise.all(
@@ -430,3 +429,14 @@ function convertSuggestionToSearchQuery(option: Suggestion): Search {
return { clip: option.value as ClipSearchScores };
}
}
+
+async function getAllPeople(limit: number = undefined) {
+ let people: Array = await mlIDbStorage.getAllPeople();
+ // await mlPeopleStore.iterate((person) => {
+ // people.push(person);
+ // });
+ people = people ?? [];
+ return people
+ .sort((p1, p2) => p2.files.length - p1.files.length)
+ .slice(0, limit);
+}
diff --git a/web/apps/photos/src/services/upload/thumbnail.ts b/web/apps/photos/src/services/upload/thumbnail.ts
index 1dd448376ec6d5cf7b79c8bcf464de34f9833cea..10da88a6504b850d6663c1e445edb419ddbae798 100644
--- a/web/apps/photos/src/services/upload/thumbnail.ts
+++ b/web/apps/photos/src/services/upload/thumbnail.ts
@@ -1,7 +1,9 @@
import { FILE_TYPE, type FileTypeInfo } from "@/media/file-type";
+import { scaledImageDimensions } from "@/media/image";
import log from "@/next/log";
import { type Electron } from "@/next/types/ipc";
-import { withTimeout } from "@ente/shared/utils";
+import { ensure } from "@/utils/ensure";
+import { withTimeout } from "@/utils/promise";
import * as ffmpeg from "services/ffmpeg";
import { heicToJPEG } from "services/heic-convert";
import { toDataOrPathOrZipEntry, type DesktopUploadItem } from "./types";
@@ -30,10 +32,10 @@ export const generateThumbnailWeb = async (
fileTypeInfo: FileTypeInfo,
): Promise =>
fileTypeInfo.fileType === FILE_TYPE.IMAGE
- ? await generateImageThumbnailUsingCanvas(blob, fileTypeInfo)
+ ? await generateImageThumbnailWeb(blob, fileTypeInfo)
: await generateVideoThumbnailWeb(blob);
-const generateImageThumbnailUsingCanvas = async (
+const generateImageThumbnailWeb = async (
blob: Blob,
{ extension }: FileTypeInfo,
) => {
@@ -42,8 +44,12 @@ const generateImageThumbnailUsingCanvas = async (
blob = await heicToJPEG(blob);
}
+ return generateImageThumbnailUsingCanvas(blob);
+};
+
+const generateImageThumbnailUsingCanvas = async (blob: Blob) => {
const canvas = document.createElement("canvas");
- const canvasCtx = canvas.getContext("2d");
+ const canvasCtx = ensure(canvas.getContext("2d"));
const imageURL = URL.createObjectURL(blob);
await withTimeout(
@@ -53,7 +59,7 @@ const generateImageThumbnailUsingCanvas = async (
image.onload = () => {
try {
URL.revokeObjectURL(imageURL);
- const { width, height } = scaledThumbnailDimensions(
+ const { width, height } = scaledImageDimensions(
image.width,
image.height,
maxThumbnailDimension,
@@ -62,7 +68,7 @@ const generateImageThumbnailUsingCanvas = async (
canvas.height = height;
canvasCtx.drawImage(image, 0, 0, width, height);
resolve(undefined);
- } catch (e) {
+ } catch (e: unknown) {
reject(e);
}
};
@@ -73,6 +79,32 @@ const generateImageThumbnailUsingCanvas = async (
return await compressedJPEGData(canvas);
};
+const compressedJPEGData = async (canvas: HTMLCanvasElement) => {
+ let blob: Blob | undefined | null;
+ let prevSize = Number.MAX_SAFE_INTEGER;
+ let quality = 0.7;
+
+ do {
+ if (blob) prevSize = blob.size;
+ blob = await new Promise((resolve) => {
+ canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality);
+ });
+ quality -= 0.1;
+ } while (
+ quality >= 0.5 &&
+ blob &&
+ blob.size > maxThumbnailSize &&
+ percentageSizeDiff(blob.size, prevSize) >= 10
+ );
+
+ return new Uint8Array(await ensure(blob).arrayBuffer());
+};
+
+const percentageSizeDiff = (
+ newThumbnailSize: number,
+ oldThumbnailSize: number,
+) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
+
const generateVideoThumbnailWeb = async (blob: Blob) => {
try {
return await ffmpeg.generateVideoThumbnailWeb(blob);
@@ -85,9 +117,9 @@ const generateVideoThumbnailWeb = async (blob: Blob) => {
}
};
-const generateVideoThumbnailUsingCanvas = async (blob: Blob) => {
+export const generateVideoThumbnailUsingCanvas = async (blob: Blob) => {
const canvas = document.createElement("canvas");
- const canvasCtx = canvas.getContext("2d");
+ const canvasCtx = ensure(canvas.getContext("2d"));
const videoURL = URL.createObjectURL(blob);
await withTimeout(
@@ -98,7 +130,7 @@ const generateVideoThumbnailUsingCanvas = async (blob: Blob) => {
video.addEventListener("loadeddata", () => {
try {
URL.revokeObjectURL(videoURL);
- const { width, height } = scaledThumbnailDimensions(
+ const { width, height } = scaledImageDimensions(
video.videoWidth,
video.videoHeight,
maxThumbnailDimension,
@@ -118,59 +150,6 @@ const generateVideoThumbnailUsingCanvas = async (blob: Blob) => {
return await compressedJPEGData(canvas);
};
-/**
- * Compute the size of the thumbnail to create for an image with the given
- * {@link width} and {@link height}.
- *
- * This function calculates a new size of an image for limiting it to maximum
- * width and height (both specified by {@link maxDimension}), while maintaining
- * aspect ratio.
- *
- * It returns `{0, 0}` for invalid inputs.
- */
-const scaledThumbnailDimensions = (
- width: number,
- height: number,
- maxDimension: number,
-): { width: number; height: number } => {
- if (width === 0 || height === 0) return { width: 0, height: 0 };
- const widthScaleFactor = maxDimension / width;
- const heightScaleFactor = maxDimension / height;
- const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor);
- const thumbnailDimensions = {
- width: Math.round(width * scaleFactor),
- height: Math.round(height * scaleFactor),
- };
- if (thumbnailDimensions.width === 0 || thumbnailDimensions.height === 0)
- return { width: 0, height: 0 };
- return thumbnailDimensions;
-};
-
-const compressedJPEGData = async (canvas: HTMLCanvasElement) => {
- let blob: Blob;
- let prevSize = Number.MAX_SAFE_INTEGER;
- let quality = 0.7;
-
- do {
- if (blob) prevSize = blob.size;
- blob = await new Promise((resolve) => {
- canvas.toBlob((blob) => resolve(blob), "image/jpeg", quality);
- });
- quality -= 0.1;
- } while (
- quality >= 0.5 &&
- blob.size > maxThumbnailSize &&
- percentageSizeDiff(blob.size, prevSize) >= 10
- );
-
- return new Uint8Array(await blob.arrayBuffer());
-};
-
-const percentageSizeDiff = (
- newThumbnailSize: number,
- oldThumbnailSize: number,
-) => ((oldThumbnailSize - newThumbnailSize) * 100) / oldThumbnailSize;
-
/**
* Generate a JPEG thumbnail for the given file or path using native tools.
*
diff --git a/web/apps/photos/src/services/upload/uploadHttpClient.ts b/web/apps/photos/src/services/upload/uploadHttpClient.ts
index e8ae6de977f5a835e9c124840e1d94339374bbd5..c23a58b520ae11500ba19b3cec37103ed8eb5510 100644
--- a/web/apps/photos/src/services/upload/uploadHttpClient.ts
+++ b/web/apps/photos/src/services/upload/uploadHttpClient.ts
@@ -1,9 +1,9 @@
import log from "@/next/log";
+import { wait } from "@/utils/promise";
import { CustomError, handleUploadError } from "@ente/shared/error";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint, getUploadEndpoint } from "@ente/shared/network/api";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
-import { wait } from "@ente/shared/utils";
import { EnteFile } from "types/file";
import { MultipartUploadURLs, UploadFile, UploadURL } from "./uploadService";
diff --git a/web/apps/photos/src/services/upload/uploadManager.ts b/web/apps/photos/src/services/upload/uploadManager.ts
index 38fd7037bed3075e6270b34a0bda827308925913..0ab9ecff0fabb8269b456aa3d79bb3798dbd0113 100644
--- a/web/apps/photos/src/services/upload/uploadManager.ts
+++ b/web/apps/photos/src/services/upload/uploadManager.ts
@@ -6,11 +6,11 @@ import log from "@/next/log";
import type { Electron } from "@/next/types/ipc";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { ensure } from "@/utils/ensure";
+import { wait } from "@/utils/promise";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
-import { wait } from "@ente/shared/utils";
import { Canceler } from "axios";
import { Remote } from "comlink";
import {
diff --git a/web/apps/photos/src/services/upload/uploadService.ts b/web/apps/photos/src/services/upload/uploadService.ts
index 52f495785a48579b5fd45033858c31fcfa608bc3..d72fcd5c88331450f46fc04e0632a4241f7d33c4 100644
--- a/web/apps/photos/src/services/upload/uploadService.ts
+++ b/web/apps/photos/src/services/upload/uploadService.ts
@@ -1023,6 +1023,11 @@ const withThumbnail = async (
} catch (e) {
if (e.message.endsWith(CustomErrorMessage.NotAvailable)) {
moduleState.isNativeImageThumbnailGenerationNotAvailable = true;
+ // TODO(MR): release 1.7
+ log.info(
+ "Setting isNativeImageThumbnailGenerationNotAvailable",
+ e,
+ );
} else {
log.error("Native thumbnail generation failed", e);
}
diff --git a/web/apps/photos/src/types/billing/index.ts b/web/apps/photos/src/types/billing/index.ts
index b2058948bc17b51280e1b2f6aac607da83f9a8ef..ef203d49fe562103a2da3f7b75e670f6dd2abb54 100644
--- a/web/apps/photos/src/types/billing/index.ts
+++ b/web/apps/photos/src/types/billing/index.ts
@@ -14,6 +14,7 @@ export interface Subscription {
price: string;
period: PLAN_PERIOD;
}
+
export interface Plan {
id: string;
androidID: string;
diff --git a/web/apps/photos/src/types/common/job.ts b/web/apps/photos/src/types/common/job.ts
deleted file mode 100644
index fe42e4aaf21394c45eb1752099cf351650d598c5..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/types/common/job.ts
+++ /dev/null
@@ -1,11 +0,0 @@
-export type JobState = "Scheduled" | "Running" | "NotScheduled";
-
-export interface JobConfig {
- intervalSec: number;
- maxItervalSec: number;
- backoffMultiplier: number;
-}
-
-export interface JobResult {
- shouldBackoff: boolean;
-}
diff --git a/web/apps/photos/src/types/embedding.tsx b/web/apps/photos/src/types/embedding.tsx
index d4719986bc222770f70b9a038e3afd2b22b10f11..161244c1594b6f264dc10e836df8a0c71978d394 100644
--- a/web/apps/photos/src/types/embedding.tsx
+++ b/web/apps/photos/src/types/embedding.tsx
@@ -1,9 +1,9 @@
/**
- * The embeddings models that we support.
+ * The embeddings that we (the current client) knows how to handle.
*
* This is an exhaustive set of values we pass when PUT-ting encrypted
* embeddings on the server. However, we should be prepared to receive an
- * {@link EncryptedEmbedding} with a model value distinct from one of these.
+ * {@link EncryptedEmbedding} with a model value different from these.
*/
export type EmbeddingModel = "onnx-clip" | "file-ml-clip-face";
diff --git a/web/apps/photos/src/types/image/index.ts b/web/apps/photos/src/types/image/index.ts
index 8c9619e2ebec922faade99aac724caac56dfb594..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
--- a/web/apps/photos/src/types/image/index.ts
+++ b/web/apps/photos/src/types/image/index.ts
@@ -1,9 +0,0 @@
-export interface Dimensions {
- width: number;
- height: number;
-}
-
-export interface BlobOptions {
- type?: string;
- quality?: number;
-}
diff --git a/web/apps/photos/src/types/machineLearning/data/clip.ts b/web/apps/photos/src/types/machineLearning/data/clip.ts
deleted file mode 100644
index 0181e89e576d8879e9635bfbcbcacfe35fba941c..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/types/machineLearning/data/clip.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-export interface ClipEmbedding {
- embedding: Float32Array;
- model: "ggml-clip" | "onnx-clip";
-}
diff --git a/web/apps/photos/src/types/machineLearning/data/face.ts b/web/apps/photos/src/types/machineLearning/data/face.ts
deleted file mode 100644
index cac391994f45a8d353f959f6dd6261e9d512fd47..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/types/machineLearning/data/face.ts
+++ /dev/null
@@ -1,27 +0,0 @@
-/// [`x`] and [y] are the coordinates of the top left corner of the box, so the minimim values
-/// [width] and [height] are the width and height of the box.
-/// All values are in absolute pixels relative to the original image size.
-export interface CenterBox {
- x: number;
- y: number;
- height: number;
- width: number;
-}
-
-export interface Point {
- x: number;
- y: number;
-}
-
-export interface Detection {
- box: CenterBox;
- landmarks: Point[];
-}
-
-export interface Face {
- id: string;
- confidence: number;
- blur: number;
- embedding: Float32Array;
- detection: Detection;
-}
diff --git a/web/apps/photos/src/types/machineLearning/data/fileML.ts b/web/apps/photos/src/types/machineLearning/data/fileML.ts
deleted file mode 100644
index 7835450e77f2241920415eb80ac08efe40a2c0a5..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/types/machineLearning/data/fileML.ts
+++ /dev/null
@@ -1,12 +0,0 @@
-import { ClipEmbedding } from "./clip";
-import { Face } from "./face";
-
-export interface FileML {
- fileID: number;
- clip?: ClipEmbedding;
- faces: Face[];
- height: number;
- width: number;
- version: number;
- error?: string;
-}
diff --git a/web/apps/photos/src/types/machineLearning/ui.ts b/web/apps/photos/src/types/machineLearning/ui.ts
deleted file mode 100644
index cd9f63f185510a4e9a50e3e6fa2cce1a0afea454..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/types/machineLearning/ui.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-export interface IndexStatus {
- outOfSyncFilesExists: boolean;
- nSyncedFiles: number;
- nTotalFiles: number;
- localFilesSynced: boolean;
- peopleIndexSynced: boolean;
-}
diff --git a/web/apps/photos/src/types/search/index.ts b/web/apps/photos/src/types/search/index.ts
index cf50f4a0602f00575575db07ba5cbe79b111c3d0..e08b842a3011bb5ea568b2931003e7ebec205375 100644
--- a/web/apps/photos/src/types/search/index.ts
+++ b/web/apps/photos/src/types/search/index.ts
@@ -1,9 +1,9 @@
import { FILE_TYPE } from "@/media/file-type";
import { City } from "services/locationSearchService";
+import { IndexStatus } from "services/ml/db";
+import { Person } from "services/ml/types";
import { LocationTagData } from "types/entity";
import { EnteFile } from "types/file";
-import { Person } from "types/machineLearning";
-import { IndexStatus } from "types/machineLearning/ui";
export enum SuggestionType {
DATE = "DATE",
diff --git a/web/apps/photos/src/utils/billing/index.ts b/web/apps/photos/src/utils/billing/index.ts
index 3dfde5384bb1d0eee95bf948c207d958c6fe2710..d2e593e9e1a26df6abfe632e47740cfe6ef67d5b 100644
--- a/web/apps/photos/src/utils/billing/index.ts
+++ b/web/apps/photos/src/utils/billing/index.ts
@@ -31,44 +31,6 @@ enum RESPONSE_STATUS {
fail = "fail",
}
-const StorageUnits = ["B", "KB", "MB", "GB", "TB"];
-
-const ONE_GB = 1024 * 1024 * 1024;
-
-export function convertBytesToGBs(bytes: number, precision = 0): string {
- return (bytes / (1024 * 1024 * 1024)).toFixed(precision);
-}
-
-export function makeHumanReadableStorage(
- bytes: number,
- { roundUp } = { roundUp: false },
-): string {
- if (bytes <= 0) {
- return `0 ${t("STORAGE_UNITS.MB")}`;
- }
- const i = Math.floor(Math.log(bytes) / Math.log(1024));
-
- let quantity = bytes / Math.pow(1024, i);
- let unit = StorageUnits[i];
-
- if (quantity > 100 && unit !== "GB") {
- quantity /= 1024;
- unit = StorageUnits[i + 1];
- }
-
- quantity = Number(quantity.toFixed(1));
-
- if (bytes >= 10 * ONE_GB) {
- if (roundUp) {
- quantity = Math.ceil(quantity);
- } else {
- quantity = Math.round(quantity);
- }
- }
-
- return `${quantity} ${t(`STORAGE_UNITS.${unit}`)}`;
-}
-
export function hasPaidSubscription(subscription: Subscription) {
return (
subscription &&
@@ -160,9 +122,8 @@ export function isSubscriptionPastDue(subscription: Subscription) {
);
}
-export function isPopularPlan(plan: Plan) {
- return plan.storage === 100 * ONE_GB;
-}
+export const isPopularPlan = (plan: Plan) =>
+ plan.storage === 100 * 1024 * 1024 * 1024; /* 100 GB */
export async function updateSubscription(
plan: Plan,
diff --git a/web/apps/photos/src/utils/common/job.ts b/web/apps/photos/src/utils/common/job.ts
deleted file mode 100644
index 365f879e958325e56194e4abd986390508e0de8e..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/utils/common/job.ts
+++ /dev/null
@@ -1,82 +0,0 @@
-import log from "@/next/log";
-import { JobConfig, JobResult, JobState } from "types/common/job";
-
-export class SimpleJob {
- private config: JobConfig;
- private runCallback: () => Promise;
- private state: JobState;
- private stopped: boolean;
- private intervalSec: number;
- private nextTimeoutId: ReturnType;
-
- constructor(config: JobConfig, runCallback: () => Promise) {
- this.config = config;
- this.runCallback = runCallback;
- this.state = "NotScheduled";
- this.stopped = true;
- this.intervalSec = this.config.intervalSec;
- }
-
- public resetInterval() {
- this.intervalSec = this.config.intervalSec;
- }
-
- public start() {
- this.stopped = false;
- this.resetInterval();
- if (this.state !== "Running") {
- this.scheduleNext();
- } else {
- log.info("Job already running, not scheduling");
- }
- }
-
- private scheduleNext() {
- if (this.state === "Scheduled" || this.nextTimeoutId) {
- this.clearScheduled();
- }
-
- this.nextTimeoutId = setTimeout(
- () => this.run(),
- this.intervalSec * 1000,
- );
- this.state = "Scheduled";
- log.info("Scheduled next job after: ", this.intervalSec);
- }
-
- async run() {
- this.nextTimeoutId = undefined;
- this.state = "Running";
-
- try {
- const jobResult = await this.runCallback();
- if (jobResult && jobResult.shouldBackoff) {
- this.intervalSec = Math.min(
- this.config.maxItervalSec,
- this.intervalSec * this.config.backoffMultiplier,
- );
- } else {
- this.resetInterval();
- }
- log.info("Job completed");
- } catch (e) {
- console.error("Error while running Job: ", e);
- } finally {
- this.state = "NotScheduled";
- !this.stopped && this.scheduleNext();
- }
- }
-
- // currently client is responsible to terminate running job
- public stop() {
- this.stopped = true;
- this.clearScheduled();
- }
-
- private clearScheduled() {
- clearTimeout(this.nextTimeoutId);
- this.nextTimeoutId = undefined;
- this.state = "NotScheduled";
- log.info("Cleared next job");
- }
-}
diff --git a/web/apps/photos/src/utils/embedding.ts b/web/apps/photos/src/utils/embedding.ts
deleted file mode 100644
index 00012f174f93761fc516849a156ad1ccc441fbbd..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/utils/embedding.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import { Embedding } from "types/embedding";
-import { FileML } from "./machineLearning/mldataMappers";
-
-export const getLatestVersionEmbeddings = (embeddings: Embedding[]) => {
- const latestVersionEntities = new Map();
- embeddings.forEach((embedding) => {
- if (!embedding?.fileID) {
- return;
- }
- const existingEmbeddings = latestVersionEntities.get(embedding.fileID);
- if (
- !existingEmbeddings ||
- existingEmbeddings.updatedAt < embedding.updatedAt
- ) {
- latestVersionEntities.set(embedding.fileID, embedding);
- }
- });
- return Array.from(latestVersionEntities.values());
-};
-
-export const getLatestVersionFileEmbeddings = (embeddings: FileML[]) => {
- const latestVersionEntities = new Map();
- embeddings.forEach((embedding) => {
- if (!embedding?.fileID) {
- return;
- }
- const existingEmbeddings = latestVersionEntities.get(embedding.fileID);
- if (
- !existingEmbeddings ||
- existingEmbeddings.updatedAt < embedding.updatedAt
- ) {
- latestVersionEntities.set(embedding.fileID, embedding);
- }
- });
- return Array.from(latestVersionEntities.values());
-};
diff --git a/web/apps/photos/src/utils/file/index.ts b/web/apps/photos/src/utils/file/index.ts
index 212b2efd3125ddd799cd249025e3fb9b3a1659a6..4f9ec18459f72fb6cc28fbf798886247d56ed9d2 100644
--- a/web/apps/photos/src/utils/file/index.ts
+++ b/web/apps/photos/src/utils/file/index.ts
@@ -1,13 +1,15 @@
import { FILE_TYPE } from "@/media/file-type";
+import { isNonWebImageFileExtension } from "@/media/formats";
import { decodeLivePhoto } from "@/media/live-photo";
import { lowercaseExtension } from "@/next/file";
import log from "@/next/log";
import { CustomErrorMessage, type Electron } from "@/next/types/ipc";
import { workerBridge } from "@/next/worker/worker-bridge";
+import { withTimeout } from "@/utils/promise";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { User } from "@ente/shared/user/types";
-import { downloadUsingAnchor, withTimeout } from "@ente/shared/utils";
+import { downloadUsingAnchor } from "@ente/shared/utils";
import { t } from "i18next";
import isElectron from "is-electron";
import { moveToHiddenCollection } from "services/collectionService";
@@ -40,20 +42,6 @@ import { isArchivedFile, updateMagicMetadata } from "utils/magicMetadata";
import { safeFileName } from "utils/native-fs";
import { writeStream } from "utils/native-stream";
-const RAW_FORMATS = [
- "heic",
- "rw2",
- "tiff",
- "arw",
- "cr3",
- "cr2",
- "raf",
- "nef",
- "psd",
- "dng",
- "tif",
-];
-
const SUPPORTED_RAW_FORMATS = [
"heic",
"rw2",
@@ -116,19 +104,6 @@ export async function getUpdatedEXIFFileForDownload(
}
}
-export function convertBytesToHumanReadable(
- bytes: number,
- precision = 2,
-): string {
- if (bytes === 0 || isNaN(bytes)) {
- return "0 MB";
- }
-
- const i = Math.floor(Math.log(bytes) / Math.log(1024));
- const sizes = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
- return (bytes / Math.pow(1024, i)).toFixed(precision) + " " + sizes[i];
-}
-
export async function downloadFile(file: EnteFile) {
try {
const fileReader = new FileReader();
@@ -296,6 +271,10 @@ export function generateStreamFromArrayBuffer(data: Uint8Array) {
});
}
+/**
+ * The returned blob.type is filled in, whenever possible, with the MIME type of
+ * the data that we're dealing with.
+ */
export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
try {
const tempFile = new File([imageBlob], fileName);
@@ -306,10 +285,19 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
);
const { extension } = fileTypeInfo;
- if (!isRawFile(extension)) {
- // Either it is not something we know how to handle yet, or
- // something that the browser already knows how to render.
- return imageBlob;
+ if (!isNonWebImageFileExtension(extension)) {
+ // Either it is something that the browser already knows how to
+ // render, or something we don't even about yet.
+ const mimeType = fileTypeInfo.mimeType;
+ if (!mimeType) {
+ log.info(
+ "Trying to render a file without a MIME type",
+ fileName,
+ );
+ return imageBlob;
+ } else {
+ return new Blob([imageBlob], { type: mimeType });
+ }
}
const available = !moduleState.isNativeJPEGConversionNotAvailable;
@@ -321,6 +309,8 @@ export const getRenderableImage = async (fileName: string, imageBlob: Blob) => {
} catch (e) {
if (e.message.endsWith(CustomErrorMessage.NotAvailable)) {
moduleState.isNativeJPEGConversionNotAvailable = true;
+ // TODO(MR): release 1.7
+ log.info("Setting isNativeJPEGConversionNotAvailable", e);
} else {
log.error("Native conversion to JPEG failed", e);
}
@@ -350,13 +340,9 @@ const nativeConvertToJPEG = async (imageBlob: Blob) => {
? await electron.convertToJPEG(imageData)
: await workerBridge.convertToJPEG(imageData);
log.debug(() => `Native JPEG conversion took ${Date.now() - startTime} ms`);
- return new Blob([jpegData]);
+ return new Blob([jpegData], { type: "image/jpeg" });
};
-export function isRawFile(exactType: string) {
- return RAW_FORMATS.includes(exactType.toLowerCase());
-}
-
export function isSupportedRawFormat(exactType: string) {
return SUPPORTED_RAW_FORMATS.includes(exactType.toLowerCase());
}
diff --git a/web/apps/photos/src/utils/image/index.ts b/web/apps/photos/src/utils/image/index.ts
index e4884716cdcb79c5c459c9616de7b0f740aad31c..bdaf64d73528db480f9baa766e93a4ac3869d83a 100644
--- a/web/apps/photos/src/utils/image/index.ts
+++ b/web/apps/photos/src/utils/image/index.ts
@@ -1,10 +1,8 @@
// these utils only work in env where OffscreenCanvas is available
import { Matrix, inverse } from "ml-matrix";
-import { BlobOptions, Dimensions } from "types/image";
-import { FaceAlignment } from "types/machineLearning";
-import { enlargeBox } from "utils/machineLearning";
-import { Box } from "../../../thirdparty/face-api/classes";
+import { Box, Dimensions, enlargeBox } from "services/ml/geom";
+import { FaceAlignment } from "services/ml/types";
export function normalizePixelBetween0And1(pixelValue: number) {
return pixelValue / 255.0;
@@ -447,6 +445,11 @@ export function addPadding(image: ImageBitmap, padding: number) {
return offscreen.transferToImageBitmap();
}
+export interface BlobOptions {
+ type?: string;
+ quality?: number;
+}
+
export async function imageBitmapToBlob(
imageBitmap: ImageBitmap,
options?: BlobOptions,
diff --git a/web/apps/photos/src/utils/machineLearning/config.ts b/web/apps/photos/src/utils/machineLearning/config.ts
deleted file mode 100644
index 30a65b8f1d5f14a35effa920b5dc97fadf95f45e..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/utils/machineLearning/config.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-import {
- DEFAULT_ML_SEARCH_CONFIG,
- DEFAULT_ML_SYNC_CONFIG,
- DEFAULT_ML_SYNC_JOB_CONFIG,
-} from "constants/mlConfig";
-import { JobConfig } from "types/common/job";
-import { MLSearchConfig, MLSyncConfig } from "types/machineLearning";
-import mlIDbStorage, {
- ML_SEARCH_CONFIG_NAME,
- ML_SYNC_CONFIG_NAME,
- ML_SYNC_JOB_CONFIG_NAME,
-} from "utils/storage/mlIDbStorage";
-import { isInternalUser } from "utils/user";
-
-export async function getMLSyncJobConfig() {
- return mlIDbStorage.getConfig(
- ML_SYNC_JOB_CONFIG_NAME,
- DEFAULT_ML_SYNC_JOB_CONFIG,
- );
-}
-
-export async function getMLSyncConfig() {
- return mlIDbStorage.getConfig(ML_SYNC_CONFIG_NAME, DEFAULT_ML_SYNC_CONFIG);
-}
-
-export async function getMLSearchConfig() {
- if (isInternalUser()) {
- return mlIDbStorage.getConfig(
- ML_SEARCH_CONFIG_NAME,
- DEFAULT_ML_SEARCH_CONFIG,
- );
- }
- // Force disabled for everyone else while we finalize it to avoid redundant
- // reindexing for users.
- return DEFAULT_ML_SEARCH_CONFIG;
-}
-
-export async function updateMLSyncJobConfig(newConfig: JobConfig) {
- return mlIDbStorage.putConfig(ML_SYNC_JOB_CONFIG_NAME, newConfig);
-}
-
-export async function updateMLSyncConfig(newConfig: MLSyncConfig) {
- return mlIDbStorage.putConfig(ML_SYNC_CONFIG_NAME, newConfig);
-}
-
-export async function updateMLSearchConfig(newConfig: MLSearchConfig) {
- return mlIDbStorage.putConfig(ML_SEARCH_CONFIG_NAME, newConfig);
-}
diff --git a/web/apps/photos/src/utils/machineLearning/faceAlign.ts b/web/apps/photos/src/utils/machineLearning/faceAlign.ts
deleted file mode 100644
index beb98cea9aa7c745c9025224c881189e7f46cc4f..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/utils/machineLearning/faceAlign.ts
+++ /dev/null
@@ -1,87 +0,0 @@
-import { Matrix } from "ml-matrix";
-import { getSimilarityTransformation } from "similarity-transformation";
-import { FaceAlignment, FaceDetection } from "types/machineLearning";
-import { Point } from "../../../thirdparty/face-api/classes";
-
-const ARCFACE_LANDMARKS = [
- [38.2946, 51.6963],
- [73.5318, 51.5014],
- [56.0252, 71.7366],
- [56.1396, 92.2848],
-] as Array<[number, number]>;
-
-const ARCFACE_LANDMARKS_FACE_SIZE = 112;
-
-const ARC_FACE_5_LANDMARKS = [
- [38.2946, 51.6963],
- [73.5318, 51.5014],
- [56.0252, 71.7366],
- [41.5493, 92.3655],
- [70.7299, 92.2041],
-] as Array<[number, number]>;
-
-export function getArcfaceAlignment(
- faceDetection: FaceDetection,
-): FaceAlignment {
- const landmarkCount = faceDetection.landmarks.length;
- return getFaceAlignmentUsingSimilarityTransform(
- faceDetection,
- normalizeLandmarks(
- landmarkCount === 5 ? ARC_FACE_5_LANDMARKS : ARCFACE_LANDMARKS,
- ARCFACE_LANDMARKS_FACE_SIZE,
- ),
- );
-}
-
-function getFaceAlignmentUsingSimilarityTransform(
- faceDetection: FaceDetection,
- alignedLandmarks: Array<[number, number]>,
- // alignmentMethod: Versioned
-): FaceAlignment {
- const landmarksMat = new Matrix(
- faceDetection.landmarks
- .map((p) => [p.x, p.y])
- .slice(0, alignedLandmarks.length),
- ).transpose();
- const alignedLandmarksMat = new Matrix(alignedLandmarks).transpose();
-
- const simTransform = getSimilarityTransformation(
- landmarksMat,
- alignedLandmarksMat,
- );
-
- const RS = Matrix.mul(simTransform.rotation, simTransform.scale);
- const TR = simTransform.translation;
-
- const affineMatrix = [
- [RS.get(0, 0), RS.get(0, 1), TR.get(0, 0)],
- [RS.get(1, 0), RS.get(1, 1), TR.get(1, 0)],
- [0, 0, 1],
- ];
-
- const size = 1 / simTransform.scale;
- const meanTranslation = simTransform.toMean.sub(0.5).mul(size);
- const centerMat = simTransform.fromMean.sub(meanTranslation);
- const center = new Point(centerMat.get(0, 0), centerMat.get(1, 0));
- const rotation = -Math.atan2(
- simTransform.rotation.get(0, 1),
- simTransform.rotation.get(0, 0),
- );
- // log.info({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size });
-
- return {
- affineMatrix,
- center,
- size,
- rotation,
- };
-}
-
-function normalizeLandmarks(
- landmarks: Array<[number, number]>,
- faceSize: number,
-): Array<[number, number]> {
- return landmarks.map((landmark) =>
- landmark.map((p) => p / faceSize),
- ) as Array<[number, number]>;
-}
diff --git a/web/apps/photos/src/utils/machineLearning/faceCrop.ts b/web/apps/photos/src/utils/machineLearning/faceCrop.ts
deleted file mode 100644
index d437a942dcee8d11ba3208dc31c12806a4ff0878..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/utils/machineLearning/faceCrop.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import { FaceAlignment, FaceCrop, FaceCropConfig } from "types/machineLearning";
-import { cropWithRotation } from "utils/image";
-import { enlargeBox } from ".";
-import { Box } from "../../../thirdparty/face-api/classes";
-
-export function getFaceCrop(
- imageBitmap: ImageBitmap,
- alignment: FaceAlignment,
- config: FaceCropConfig,
-): FaceCrop {
- const alignmentBox = new Box({
- x: alignment.center.x - alignment.size / 2,
- y: alignment.center.y - alignment.size / 2,
- width: alignment.size,
- height: alignment.size,
- }).round();
- const scaleForPadding = 1 + config.padding * 2;
- const paddedBox = enlargeBox(alignmentBox, scaleForPadding).round();
- const faceImageBitmap = cropWithRotation(imageBitmap, paddedBox, 0, {
- width: config.maxSize,
- height: config.maxSize,
- });
-
- return {
- image: faceImageBitmap,
- imageBox: paddedBox,
- };
-}
diff --git a/web/apps/photos/src/utils/machineLearning/index.ts b/web/apps/photos/src/utils/machineLearning/index.ts
deleted file mode 100644
index bc9ae397496f0f84b4b362f46baad1dbbf652eeb..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/utils/machineLearning/index.ts
+++ /dev/null
@@ -1,284 +0,0 @@
-import { FILE_TYPE } from "@/media/file-type";
-import { decodeLivePhoto } from "@/media/live-photo";
-import log from "@/next/log";
-import PQueue from "p-queue";
-import DownloadManager from "services/download";
-import { getLocalFiles } from "services/fileService";
-import { EnteFile } from "types/file";
-import { Dimensions } from "types/image";
-import {
- DetectedFace,
- Face,
- FaceAlignment,
- MlFileData,
- Person,
- Versioned,
-} from "types/machineLearning";
-import { getRenderableImage } from "utils/file";
-import { clamp, warpAffineFloat32List } from "utils/image";
-import mlIDbStorage from "utils/storage/mlIDbStorage";
-import { Box, Point } from "../../../thirdparty/face-api/classes";
-
-export function newBox(x: number, y: number, width: number, height: number) {
- return new Box({ x, y, width, height });
-}
-
-export function getBoxCenterPt(topLeft: Point, bottomRight: Point): Point {
- return topLeft.add(bottomRight.sub(topLeft).div(new Point(2, 2)));
-}
-
-export function getBoxCenter(box: Box): Point {
- return getBoxCenterPt(box.topLeft, box.bottomRight);
-}
-
-export function enlargeBox(box: Box, factor: number = 1.5) {
- const center = getBoxCenter(box);
- const size = new Point(box.width, box.height);
- const newHalfSize = new Point((factor * size.x) / 2, (factor * size.y) / 2);
-
- return new Box({
- left: center.x - newHalfSize.x,
- top: center.y - newHalfSize.y,
- right: center.x + newHalfSize.x,
- bottom: center.y + newHalfSize.y,
- });
-}
-
-export function getAllFacesFromMap(allFacesMap: Map>) {
- const allFaces = [...allFacesMap.values()].flat();
-
- return allFaces;
-}
-
-export async function getLocalFile(fileId: number) {
- const localFiles = await getLocalFiles();
- return localFiles.find((f) => f.id === fileId);
-}
-
-export async function extractFaceImagesToFloat32(
- faceAlignments: Array,
- faceSize: number,
- image: ImageBitmap,
-): Promise {
- const faceData = new Float32Array(
- faceAlignments.length * faceSize * faceSize * 3,
- );
- for (let i = 0; i < faceAlignments.length; i++) {
- const alignedFace = faceAlignments[i];
- const faceDataOffset = i * faceSize * faceSize * 3;
- warpAffineFloat32List(
- image,
- alignedFace,
- faceSize,
- faceData,
- faceDataOffset,
- );
- }
- return faceData;
-}
-
-export function getFaceId(detectedFace: DetectedFace, imageDims: Dimensions) {
- const xMin = clamp(
- detectedFace.detection.box.x / imageDims.width,
- 0.0,
- 0.999999,
- )
- .toFixed(5)
- .substring(2);
- const yMin = clamp(
- detectedFace.detection.box.y / imageDims.height,
- 0.0,
- 0.999999,
- )
- .toFixed(5)
- .substring(2);
- const xMax = clamp(
- (detectedFace.detection.box.x + detectedFace.detection.box.width) /
- imageDims.width,
- 0.0,
- 0.999999,
- )
- .toFixed(5)
- .substring(2);
- const yMax = clamp(
- (detectedFace.detection.box.y + detectedFace.detection.box.height) /
- imageDims.height,
- 0.0,
- 0.999999,
- )
- .toFixed(5)
- .substring(2);
-
- const rawFaceID = `${xMin}_${yMin}_${xMax}_${yMax}`;
- const faceID = `${detectedFace.fileId}_${rawFaceID}`;
-
- return faceID;
-}
-
-export async function getImageBlobBitmap(blob: Blob): Promise {
- return await createImageBitmap(blob);
-}
-
-async function getOriginalFile(file: EnteFile, queue?: PQueue) {
- let fileStream;
- if (queue) {
- fileStream = await queue.add(() => DownloadManager.getFile(file));
- } else {
- fileStream = await DownloadManager.getFile(file);
- }
- return new Response(fileStream).blob();
-}
-
-async function getOriginalConvertedFile(file: EnteFile, queue?: PQueue) {
- const fileBlob = await getOriginalFile(file, queue);
- if (file.metadata.fileType === FILE_TYPE.IMAGE) {
- return await getRenderableImage(file.metadata.title, fileBlob);
- } else {
- const { imageFileName, imageData } = await decodeLivePhoto(
- file.metadata.title,
- fileBlob,
- );
- return await getRenderableImage(imageFileName, new Blob([imageData]));
- }
-}
-
-export async function getOriginalImageBitmap(file: EnteFile, queue?: PQueue) {
- const fileBlob = await getOriginalConvertedFile(file, queue);
- log.info("[MLService] Got file: ", file.id.toString());
- return getImageBlobBitmap(fileBlob);
-}
-
-export async function getThumbnailImageBitmap(file: EnteFile) {
- const thumb = await DownloadManager.getThumbnail(file);
- log.info("[MLService] Got thumbnail: ", file.id.toString());
-
- return getImageBlobBitmap(new Blob([thumb]));
-}
-
-export async function getLocalFileImageBitmap(
- enteFile: EnteFile,
- localFile: globalThis.File,
-) {
- let fileBlob = localFile as Blob;
- fileBlob = await getRenderableImage(enteFile.metadata.title, fileBlob);
- return getImageBlobBitmap(fileBlob);
-}
-
-export async function getPeopleList(file: EnteFile): Promise> {
- let startTime = Date.now();
- const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
- log.info(
- "getPeopleList:mlFilesStore:getItem",
- Date.now() - startTime,
- "ms",
- );
- if (!mlFileData?.faces || mlFileData.faces.length < 1) {
- return [];
- }
-
- const peopleIds = mlFileData.faces
- .filter((f) => f.personId !== null && f.personId !== undefined)
- .map((f) => f.personId);
- if (!peopleIds || peopleIds.length < 1) {
- return [];
- }
- // log.info("peopleIds: ", peopleIds);
- startTime = Date.now();
- const peoplePromises = peopleIds.map(
- (p) => mlIDbStorage.getPerson(p) as Promise,
- );
- const peopleList = await Promise.all(peoplePromises);
- log.info(
- "getPeopleList:mlPeopleStore:getItems",
- Date.now() - startTime,
- "ms",
- );
- // log.info("peopleList: ", peopleList);
-
- return peopleList;
-}
-
-export async function getUnidentifiedFaces(
- file: EnteFile,
-): Promise> {
- const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
-
- return mlFileData?.faces?.filter(
- (f) => f.personId === null || f.personId === undefined,
- );
-}
-
-export async function getAllPeople(limit: number = undefined) {
- let people: Array = await mlIDbStorage.getAllPeople();
- // await mlPeopleStore.iterate((person) => {
- // people.push(person);
- // });
- people = people ?? [];
- return people
- .sort((p1, p2) => p2.files.length - p1.files.length)
- .slice(0, limit);
-}
-
-export function findFirstIfSorted(
- elements: Array,
- comparator: (a: T, b: T) => number,
-) {
- if (!elements || elements.length < 1) {
- return;
- }
- let first = elements[0];
-
- for (let i = 1; i < elements.length; i++) {
- const comp = comparator(elements[i], first);
- if (comp < 0) {
- first = elements[i];
- }
- }
-
- return first;
-}
-
-export function isDifferentOrOld(
- method: Versioned,
- thanMethod: Versioned,
-) {
- return (
- !method ||
- method.value !== thanMethod.value ||
- method.version < thanMethod.version
- );
-}
-
-function primitiveArrayEquals(a, b) {
- return (
- Array.isArray(a) &&
- Array.isArray(b) &&
- a.length === b.length &&
- a.every((val, index) => val === b[index])
- );
-}
-
-export function areFaceIdsSame(ofFaces: Array, toFaces: Array) {
- if (
- (ofFaces === null || ofFaces === undefined) &&
- (toFaces === null || toFaces === undefined)
- ) {
- return true;
- }
- return primitiveArrayEquals(
- ofFaces?.map((f) => f.id),
- toFaces?.map((f) => f.id),
- );
-}
-
-export function logQueueStats(queue: PQueue, name: string) {
- queue.on("active", () =>
- log.info(
- `queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`,
- ),
- );
- queue.on("idle", () => log.info(`queuestats: ${name}: Idle`));
- queue.on("error", (error) =>
- console.error(`queuestats: ${name}: Error, `, error),
- );
-}
diff --git a/web/apps/photos/src/utils/machineLearning/mldataMappers.ts b/web/apps/photos/src/utils/machineLearning/mldataMappers.ts
deleted file mode 100644
index fb91420aa8567906a651bedbc7ad3f3ea34c001e..0000000000000000000000000000000000000000
--- a/web/apps/photos/src/utils/machineLearning/mldataMappers.ts
+++ /dev/null
@@ -1,265 +0,0 @@
-import {
- Face,
- FaceDetection,
- Landmark,
- MlFileData,
-} from "types/machineLearning";
-import { ClipEmbedding } from "types/machineLearning/data/clip";
-
-export interface FileML extends ServerFileMl {
- updatedAt: number;
-}
-
-class ServerFileMl {
- public fileID: number;
- public height?: number;
- public width?: number;
- public faceEmbedding: ServerFaceEmbeddings;
- public clipEmbedding?: ClipEmbedding;
-
- public constructor(
- fileID: number,
- faceEmbedding: ServerFaceEmbeddings,
- clipEmbedding?: ClipEmbedding,
- height?: number,
- width?: number,
- ) {
- this.fileID = fileID;
- this.height = height;
- this.width = width;
- this.faceEmbedding = faceEmbedding;
- this.clipEmbedding = clipEmbedding;
- }
-
- toJson(): string {
- return JSON.stringify(this);
- }
-
- static fromJson(json: string): ServerFileMl {
- return JSON.parse(json);
- }
-}
-
-class ServerFaceEmbeddings {
- public faces: ServerFace[];
- public version: number;
- public client?: string;
- public error?: boolean;
-
- public constructor(
- faces: ServerFace[],
- version: number,
- client?: string,
- error?: boolean,
- ) {
- this.faces = faces;
- this.version = version;
- this.client = client;
- this.error = error;
- }
-
- toJson(): string {
- return JSON.stringify(this);
- }
-
- static fromJson(json: string): ServerFaceEmbeddings {
- return JSON.parse(json);
- }
-}
-
-class ServerFace {
- public fileID: number;
- public faceID: string;
- public embeddings: number[];
- public detection: ServerDetection;
- public score: number;
- public blur: number;
- public fileInfo?: ServerFileInfo;
-
- public constructor(
- fileID: number,
- faceID: string,
- embeddings: number[],
- detection: ServerDetection,
- score: number,
- blur: number,
- fileInfo?: ServerFileInfo,
- ) {
- this.fileID = fileID;
- this.faceID = faceID;
- this.embeddings = embeddings;
- this.detection = detection;
- this.score = score;
- this.blur = blur;
- this.fileInfo = fileInfo;
- }
-
- toJson(): string {
- return JSON.stringify(this);
- }
-
- static fromJson(json: string): ServerFace {
- return JSON.parse(json);
- }
-}
-
-class ServerFileInfo {
- public imageWidth?: number;
- public imageHeight?: number;
-
- public constructor(imageWidth?: number, imageHeight?: number) {
- this.imageWidth = imageWidth;
- this.imageHeight = imageHeight;
- }
-}
-
-class ServerDetection {
- public box: ServerFaceBox;
- public landmarks: Landmark[];
-
- public constructor(box: ServerFaceBox, landmarks: Landmark[]) {
- this.box = box;
- this.landmarks = landmarks;
- }
-
- toJson(): string {
- return JSON.stringify(this);
- }
-
- static fromJson(json: string): ServerDetection {
- return JSON.parse(json);
- }
-}
-
-class ServerFaceBox {
- public xMin: number;
- public yMin: number;
- public width: number;
- public height: number;
-
- public constructor(
- xMin: number,
- yMin: number,
- width: number,
- height: number,
- ) {
- this.xMin = xMin;
- this.yMin = yMin;
- this.width = width;
- this.height = height;
- }
-
- toJson(): string {
- return JSON.stringify(this);
- }
-
- static fromJson(json: string): ServerFaceBox {
- return JSON.parse(json);
- }
-}
-
-export function LocalFileMlDataToServerFileMl(
- localFileMlData: MlFileData,
-): ServerFileMl {
- if (
- localFileMlData.errorCount > 0 &&
- localFileMlData.lastErrorMessage !== undefined
- ) {
- return null;
- }
- const imageDimensions = localFileMlData.imageDimensions;
- const fileInfo = new ServerFileInfo(
- imageDimensions.width,
- imageDimensions.height,
- );
- const faces: ServerFace[] = [];
- for (let i = 0; i < localFileMlData.faces.length; i++) {
- const face: Face = localFileMlData.faces[i];
- const faceID = face.id;
- const embedding = face.embedding;
- const score = face.detection.probability;
- const blur = face.blurValue;
- const detection: FaceDetection = face.detection;
- const box = detection.box;
- const landmarks = detection.landmarks;
- const newBox = new ServerFaceBox(box.x, box.y, box.width, box.height);
- const newLandmarks: Landmark[] = [];
- for (let j = 0; j < landmarks.length; j++) {
- newLandmarks.push({
- x: landmarks[j].x,
- y: landmarks[j].y,
- } as Landmark);
- }
-
- const newFaceObject = new ServerFace(
- localFileMlData.fileId,
- faceID,
- Array.from(embedding),
- new ServerDetection(newBox, newLandmarks),
- score,
- blur,
- fileInfo,
- );
- faces.push(newFaceObject);
- }
- const faceEmbeddings = new ServerFaceEmbeddings(
- faces,
- 1,
- localFileMlData.lastErrorMessage,
- );
- return new ServerFileMl(
- localFileMlData.fileId,
- faceEmbeddings,
- null,
- imageDimensions.height,
- imageDimensions.width,
- );
-}
-
-// // Not sure if this actually works
-// export function ServerFileMlToLocalFileMlData(
-// serverFileMl: ServerFileMl,
-// ): MlFileData {
-// const faces: Face[] = [];
-// const mlVersion: number = serverFileMl.faceEmbeddings.version;
-// const errorCount = serverFileMl.faceEmbeddings.error ? 1 : 0;
-// for (let i = 0; i < serverFileMl.faceEmbeddings.faces.length; i++) {
-// const face = serverFileMl.faceEmbeddings.faces[i];
-// if(face.detection.landmarks.length === 0) {
-// continue;
-// }
-// const detection = face.detection;
-// const box = detection.box;
-// const landmarks = detection.landmarks;
-// const newBox = new FaceBox(
-// box.xMin,
-// box.yMin,
-// box.width,
-// box.height,
-// );
-// const newLandmarks: Landmark[] = [];
-// for (let j = 0; j < landmarks.length; j++) {
-// newLandmarks.push(
-// {
-// x: landmarks[j].x,
-// y: landmarks[j].y,
-// } as Landmark
-// );
-// }
-// const newDetection = new Detection(newBox, newLandmarks);
-// const newFace = {
-
-// } as Face
-// faces.push(newFace);
-// }
-// return {
-// fileId: serverFileMl.fileID,
-// imageDimensions: {
-// width: serverFileMl.width,
-// height: serverFileMl.height,
-// },
-// faces,
-// mlVersion,
-// errorCount,
-// };
-// }
diff --git a/web/apps/photos/src/utils/machineLearning/transform.ts b/web/apps/photos/src/utils/machineLearning/transform.ts
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/web/apps/photos/src/utils/native-stream.ts b/web/apps/photos/src/utils/native-stream.ts
index 4ed9da753a4ba941b6e1f95bcca22c589f70c0ec..e922c262195a5d88a15626349513090154036a69 100644
--- a/web/apps/photos/src/utils/native-stream.ts
+++ b/web/apps/photos/src/utils/native-stream.ts
@@ -109,9 +109,81 @@ export const writeStream = async (
duplex: "half",
});
+ const res = await fetch(req);
+ if (!res.ok)
+ throw new Error(`Failed to write stream to ${url}: HTTP ${res.status}`);
+};
+
+/**
+ * Variant of {@link writeStream} tailored for video conversion.
+ *
+ * @param blob The video to convert.
+ *
+ * @returns a token that can then be passed to {@link readConvertToMP4Stream} to
+ * read back the converted video. See: [Note: Convert to MP4].
+ */
+export const writeConvertToMP4Stream = async (_: Electron, blob: Blob) => {
+ const url = "stream://convert-to-mp4";
+
+ const req = new Request(url, {
+ method: "POST",
+ body: blob,
+ // @ts-expect-error TypeScript's libdom.d.ts does not include the
+ // "duplex" parameter, e.g. see
+ // https://github.com/node-fetch/node-fetch/issues/1769.
+ duplex: "half",
+ });
+
+ const res = await fetch(req);
+ if (!res.ok)
+ throw new Error(`Failed to write stream to ${url}: HTTP ${res.status}`);
+
+ const token = res.text();
+ return token;
+};
+
+/**
+ * Variant of {@link readStream} tailored for video conversion.
+ *
+ * @param token A token obtained from {@link writeConvertToMP4Stream}.
+ *
+ * @returns the contents of the converted video. See: [Note: Convert to MP4].
+ */
+export const readConvertToMP4Stream = async (
+ _: Electron,
+ token: string,
+): Promise => {
+ const params = new URLSearchParams({ token });
+ const url = new URL(`stream://convert-to-mp4?${params.toString()}`);
+
+ const req = new Request(url, { method: "GET" });
+
const res = await fetch(req);
if (!res.ok)
throw new Error(
- `Failed to write stream to ${path}: HTTP ${res.status}`,
+ `Failed to read stream from ${url}: HTTP ${res.status}`,
);
+
+ return res.blob();
+};
+
+/**
+ * Sibling of {@link readConvertToMP4Stream} to let the native side know when we
+ * are done reading the response, and they can dispose any temporary resources
+ * it was using.
+ *
+ * @param token A token obtained from {@link writeConvertToMP4Stream}.
+ */
+export const readConvertToMP4Done = async (
+ _: Electron,
+ token: string,
+): Promise => {
+ // The value for `done` is arbitrary, only its presence matters.
+ const params = new URLSearchParams({ token, done: "1" });
+ const url = new URL(`stream://convert-to-mp4?${params.toString()}`);
+
+ const req = new Request(url, { method: "GET" });
+ const res = await fetch(req);
+ if (!res.ok)
+ throw new Error(`Failed to close stream at ${url}: HTTP ${res.status}`);
};
diff --git a/web/apps/photos/src/utils/units.ts b/web/apps/photos/src/utils/units.ts
new file mode 100644
index 0000000000000000000000000000000000000000..229ec2ab9d7eff898c7a292a7017832e5d2bc2aa
--- /dev/null
+++ b/web/apps/photos/src/utils/units.ts
@@ -0,0 +1,100 @@
+import { t } from "i18next";
+
+/**
+ * Localized unit keys.
+ *
+ * For each of these, there is expected to be a localized key under
+ * "storage_unit". e.g. "storage_unit.tb".
+ */
+const units = ["b", "kb", "mb", "gb", "tb"];
+
+/**
+ * Convert the given number of {@link bytes} to their equivalent GB string with
+ * {@link precision}.
+ *
+ * The returned string does not have the GB suffix.
+ */
+export const bytesInGB = (bytes: number, precision = 0): string =>
+ (bytes / (1024 * 1024 * 1024)).toFixed(precision);
+
+/**
+ * Convert the given number of {@link bytes} to a user visible string in an
+ * appropriately sized unit.
+ *
+ * The returned string includes the (localized) unit suffix, e.g. "TB".
+ *
+ * @param precision Modify the number of digits after the decimal point.
+ * Defaults to 2.
+ */
+export function formattedByteSize(bytes: number, precision = 2): string {
+ if (bytes <= 0) return `0 ${t("storage_unit.mb")}`;
+
+ const i = Math.min(
+ Math.floor(Math.log(bytes) / Math.log(1024)),
+ units.length - 1,
+ );
+ const quantity = bytes / Math.pow(1024, i);
+ const unit = units[i];
+
+ return `${quantity.toFixed(precision)} ${t(`storage_unit.${unit}`)}`;
+}
+
+interface FormattedStorageByteSizeOptions {
+ /**
+ * If `true` then round up the fractional quantity we obtain when dividing
+ * the number of bytes by the number of bytes in the unit that got chosen.
+ *
+ * The default behaviour is to take the ceiling.
+ */
+ round?: boolean;
+}
+
+/**
+ * Convert the given number of storage {@link bytes} to a user visible string in
+ * an appropriately sized unit.
+ *
+ * This differs from {@link formattedByteSize} in that while
+ * {@link formattedByteSize} is meant for arbitrary byte sizes, this function
+ * has a few additional beautification heuristics that we want to apply when
+ * displaying the "storage size" (in different contexts) as opposed to, say, a
+ * generic "file size".
+ *
+ * @param options {@link FormattedStorageByteSizeOptions}.
+ *
+ * @return A user visible string, including the localized unit suffix.
+ */
+export const formattedStorageByteSize = (
+ bytes: number,
+ options?: FormattedStorageByteSizeOptions,
+): string => {
+ if (bytes <= 0) return `0 ${t("storage_unit.mb")}`;
+
+ const i = Math.min(
+ Math.floor(Math.log(bytes) / Math.log(1024)),
+ units.length - 1,
+ );
+
+ let quantity = bytes / Math.pow(1024, i);
+ let unit = units[i];
+
+ // Round up bytes, KBs and MBs to the bigger unit whenever they'll come of
+ // as more than 0.1.
+ if (quantity > 100 && i < units.length - 2) {
+ quantity /= 1024;
+ unit = units[i + 1];
+ }
+
+ quantity = Number(quantity.toFixed(1));
+
+ // Truncate or round storage sizes to trim off unnecessary and potentially
+ // obscuring precision when they are larger that 10 GB.
+ if (bytes >= 10 * 1024 * 1024 * 1024 /* 10 GB */) {
+ if (options?.round) {
+ quantity = Math.ceil(quantity);
+ } else {
+ quantity = Math.round(quantity);
+ }
+ }
+
+ return `${quantity} ${t(`storage_unit.${unit}`)}`;
+};
diff --git a/web/apps/photos/src/utils/user/index.ts b/web/apps/photos/src/utils/user/index.ts
index 17551014d05dbe68db7dc26e83b8c961fbf94124..0f8ef142fb983131ec064281a39059df32759082 100644
--- a/web/apps/photos/src/utils/user/index.ts
+++ b/web/apps/photos/src/utils/user/index.ts
@@ -1,4 +1,5 @@
import { getData, LS_KEYS } from "@ente/shared/storage/localStorage";
+import type { User } from "@ente/shared/user/types";
import { UserDetails } from "types/user";
export function getLocalUserDetails(): UserDetails {
@@ -9,7 +10,12 @@ export const isInternalUser = () => {
const userEmail = getData(LS_KEYS.USER)?.email;
if (!userEmail) return false;
- return (
- userEmail.endsWith("@ente.io") || userEmail === "kr.anand619@gmail.com"
- );
+ return userEmail.endsWith("@ente.io");
+};
+
+export const isInternalUserForML = () => {
+ const userID = (getData(LS_KEYS.USER) as User)?.id;
+ if (userID == 1 || userID == 2) return true;
+
+ return isInternalUser();
};
diff --git a/web/apps/photos/src/worker/ffmpeg.worker.ts b/web/apps/photos/src/worker/ffmpeg.worker.ts
index 946a2090f00ac91a388356eec9efd3f2ddb021e0..d9d6c718f8faf37473d6eaef04e390f0a199eb15 100644
--- a/web/apps/photos/src/worker/ffmpeg.worker.ts
+++ b/web/apps/photos/src/worker/ffmpeg.worker.ts
@@ -1,5 +1,4 @@
import log from "@/next/log";
-import { withTimeout } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { expose } from "comlink";
import {
@@ -7,6 +6,24 @@ import {
inputPathPlaceholder,
outputPathPlaceholder,
} from "constants/ffmpeg";
+
+// When we run tsc on CI, the line below errors out
+//
+// > Error: src/worker/ffmpeg.worker.ts(10,38): error TS2307: Cannot find module
+// 'ffmpeg-wasm' or its corresponding type declarations.
+//
+// Building and running works fine. And this error does not occur when running
+// tsc locally either.
+//
+// Of course, there is some misconfiguration, but we plan to move off our old
+// fork and onto upstream ffmpeg-wasm, and the reason can be figured out then.
+// For now, disable the error to allow the CI lint to complete.
+//
+// Note that we can't use @ts-expect-error since it doesn't error out when
+// actually building!
+//
+// eslint-disable-next-line @typescript-eslint/ban-ts-comment
+// @ts-ignore
import { FFmpeg, createFFmpeg } from "ffmpeg-wasm";
export class DedicatedFFmpegWorker {
@@ -30,15 +47,11 @@ export class DedicatedFFmpegWorker {
command: string[],
blob: Blob,
outputFileExtension: string,
- timeoutMs,
): Promise {
if (!this.ffmpeg.isLoaded()) await this.ffmpeg.load();
- const go = () =>
- ffmpegExec(this.ffmpeg, command, outputFileExtension, blob);
-
const request = this.ffmpegTaskQueue.queueUpRequest(() =>
- timeoutMs ? withTimeout(go(), timeoutMs) : go(),
+ ffmpegExec(this.ffmpeg, command, outputFileExtension, blob),
);
return await request.promise;
diff --git a/web/apps/photos/src/worker/ml.worker.ts b/web/apps/photos/src/worker/ml.worker.ts
index ed46b7bd44bae653d9a7e1f33f673e8c63aba0c5..aa650d4affe7e126a6cf4b04f6942c76f13daa92 100644
--- a/web/apps/photos/src/worker/ml.worker.ts
+++ b/web/apps/photos/src/worker/ml.worker.ts
@@ -1,8 +1,8 @@
import log from "@/next/log";
import { expose } from "comlink";
import mlService from "services/machineLearning/machineLearningService";
+import { MachineLearningWorker } from "services/ml/types";
import { EnteFile } from "types/file";
-import { MachineLearningWorker } from "types/machineLearning";
export class DedicatedMLWorker implements MachineLearningWorker {
constructor() {
diff --git a/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts b/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts
deleted file mode 100644
index 7263b4b96c4a34eeb19cb9b04c0acb468b58e642..0000000000000000000000000000000000000000
--- a/web/apps/photos/thirdparty/face-api/classes/BoundingBox.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-import { Box } from './Box';
-
-export interface IBoundingBox {
- left: number
- top: number
- right: number
- bottom: number
-}
-
-export class BoundingBox extends Box implements IBoundingBox {
- constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions: boolean = false) {
- super({ left, top, right, bottom }, allowNegativeDimensions)
- }
-}
\ No newline at end of file
diff --git a/web/apps/photos/thirdparty/face-api/classes/Box.ts b/web/apps/photos/thirdparty/face-api/classes/Box.ts
deleted file mode 100644
index fcf1cbebb305fb0c85b5073482cb435e80feb456..0000000000000000000000000000000000000000
--- a/web/apps/photos/thirdparty/face-api/classes/Box.ts
+++ /dev/null
@@ -1,182 +0,0 @@
-import { IBoundingBox } from './BoundingBox';
-import { IDimensions } from './Dimensions';
-import { Point } from './Point';
-import { IRect } from './Rect';
-
-export class Box implements IBoundingBox, IRect {
-
- public static isRect(rect: any): boolean {
- return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber)
- }
-
- public static assertIsValidBox(box: any, callee: string, allowNegativeDimensions: boolean = false) {
- if (!Box.isRect(box)) {
- throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`)
- }
-
- if (!allowNegativeDimensions && (box.width < 0 || box.height < 0)) {
- throw new Error(`${callee} - width (${box.width}) and height (${box.height}) must be positive numbers`)
- }
- }
-
- public x: number
- public y: number
- public width: number
- public height: number
-
- constructor(_box: IBoundingBox | IRect, allowNegativeDimensions: boolean = true) {
- const box = (_box || {}) as any
-
- const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber)
- const isRect = [box.x, box.y, box.width, box.height].every(isValidNumber)
-
- if (!isRect && !isBbox) {
- throw new Error(`Box.constructor - expected box to be IBoundingBox | IRect, instead have ${JSON.stringify(box)}`)
- }
-
- const [x, y, width, height] = isRect
- ? [box.x, box.y, box.width, box.height]
- : [box.left, box.top, box.right - box.left, box.bottom - box.top]
-
- Box.assertIsValidBox({ x, y, width, height }, 'Box.constructor', allowNegativeDimensions)
-
- this.x = x
- this.y = y
- this.width = width
- this.height = height
- }
-
- // public get x(): number { return this._x }
- // public get y(): number { return this._y }
- // public get width(): number { return this._width }
- // public get height(): number { return this._height }
- public get left(): number { return this.x }
- public get top(): number { return this.y }
- public get right(): number { return this.x + this.width }
- public get bottom(): number { return this.y + this.height }
- public get area(): number { return this.width * this.height }
- public get topLeft(): Point { return new Point(this.left, this.top) }
- public get topRight(): Point { return new Point(this.right, this.top) }
- public get bottomLeft(): Point { return new Point(this.left, this.bottom) }
- public get bottomRight(): Point { return new Point(this.right, this.bottom) }
-
- public round(): Box {
- const [x, y, width, height] = [this.x, this.y, this.width, this.height]
- .map(val => Math.round(val))
- return new Box({ x, y, width, height })
- }
-
- public floor(): Box {
- const [x, y, width, height] = [this.x, this.y, this.width, this.height]
- .map(val => Math.floor(val))
- return new Box({ x, y, width, height })
- }
-
- public toSquare(): Box {
- let { x, y, width, height } = this
- const diff = Math.abs(width - height)
- if (width < height) {
- x -= (diff / 2)
- width += diff
- }
- if (height < width) {
- y -= (diff / 2)
- height += diff
- }
-
- return new Box({ x, y, width, height })
- }
-
- public rescale(s: IDimensions | number): Box {
- const scaleX = isDimensions(s) ? (s as IDimensions).width : s as number
- const scaleY = isDimensions(s) ? (s as IDimensions).height : s as number
- return new Box({
- x: this.x * scaleX,
- y: this.y * scaleY,
- width: this.width * scaleX,
- height: this.height * scaleY
- })
- }
-
- public pad(padX: number, padY: number): Box {
- let [x, y, width, height] = [
- this.x - (padX / 2),
- this.y - (padY / 2),
- this.width + padX,
- this.height + padY
- ]
- return new Box({ x, y, width, height })
- }
-
- public clipAtImageBorders(imgWidth: number, imgHeight: number): Box {
- const { x, y, right, bottom } = this
- const clippedX = Math.max(x, 0)
- const clippedY = Math.max(y, 0)
-
- const newWidth = right - clippedX
- const newHeight = bottom - clippedY
- const clippedWidth = Math.min(newWidth, imgWidth - clippedX)
- const clippedHeight = Math.min(newHeight, imgHeight - clippedY)
-
- return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight})).floor()
- }
-
- public shift(sx: number, sy: number): Box {
- const { width, height } = this
- const x = this.x + sx
- const y = this.y + sy
-
- return new Box({ x, y, width, height })
- }
-
- public padAtBorders(imageHeight: number, imageWidth: number) {
- const w = this.width + 1
- const h = this.height + 1
-
- let dx = 1
- let dy = 1
- let edx = w
- let edy = h
-
- let x = this.left
- let y = this.top
- let ex = this.right
- let ey = this.bottom
-
- if (ex > imageWidth) {
- edx = -ex + imageWidth + w
- ex = imageWidth
- }
- if (ey > imageHeight) {
- edy = -ey + imageHeight + h
- ey = imageHeight
- }
- if (x < 1) {
- edy = 2 - x
- x = 1
- }
- if (y < 1) {
- edy = 2 - y
- y = 1
- }
-
- return { dy, edy, dx, edx, y, ey, x, ex, w, h }
- }
-
- public calibrate(region: Box) {
- return new Box({
- left: this.left + (region.left * this.width),
- top: this.top + (region.top * this.height),
- right: this.right + (region.right * this.width),
- bottom: this.bottom + (region.bottom * this.height)
- }).toSquare().round()
- }
-}
-
-export function isValidNumber(num: any) {
- return !!num && num !== Infinity && num !== -Infinity && !isNaN(num) || num === 0
-}
-
-export function isDimensions(obj: any): boolean {
- return obj && obj.width && obj.height
-}
diff --git a/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts b/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts
deleted file mode 100644
index 0129f3b6782d7059ece7e54ab3de493f57725199..0000000000000000000000000000000000000000
--- a/web/apps/photos/thirdparty/face-api/classes/Dimensions.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import { isValidNumber } from './Box';
-
-export interface IDimensions {
- width: number
- height: number
-}
-
-export class Dimensions implements IDimensions {
-
- private _width: number
- private _height: number
-
- constructor(width: number, height: number) {
- if (!isValidNumber(width) || !isValidNumber(height)) {
- throw new Error(`Dimensions.constructor - expected width and height to be valid numbers, instead have ${JSON.stringify({ width, height })}`)
- }
-
- this._width = width
- this._height = height
- }
-
- public get width(): number { return this._width }
- public get height(): number { return this._height }
-
- public reverse(): Dimensions {
- return new Dimensions(1 / this.width, 1 / this.height)
- }
-}
diff --git a/web/apps/photos/thirdparty/face-api/classes/Point.ts b/web/apps/photos/thirdparty/face-api/classes/Point.ts
deleted file mode 100644
index 3c32d5bc19746e06b158ea23d24d2a2bb46aaf80..0000000000000000000000000000000000000000
--- a/web/apps/photos/thirdparty/face-api/classes/Point.ts
+++ /dev/null
@@ -1,55 +0,0 @@
-export interface IPoint {
- x: number
- y: number
-}
-
-export class Point implements IPoint {
- public x: number
- public y: number
-
- constructor(x: number, y: number) {
- this.x = x
- this.y = y
- }
-
- // get x(): number { return this._x }
- // get y(): number { return this._y }
-
- public add(pt: IPoint): Point {
- return new Point(this.x + pt.x, this.y + pt.y)
- }
-
- public sub(pt: IPoint): Point {
- return new Point(this.x - pt.x, this.y - pt.y)
- }
-
- public mul(pt: IPoint): Point {
- return new Point(this.x * pt.x, this.y * pt.y)
- }
-
- public div(pt: IPoint): Point {
- return new Point(this.x / pt.x, this.y / pt.y)
- }
-
- public abs(): Point {
- return new Point(Math.abs(this.x), Math.abs(this.y))
- }
-
- public magnitude(): number {
- return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2))
- }
-
- public floor(): Point {
- return new Point(Math.floor(this.x), Math.floor(this.y))
- }
-
- public round(): Point {
- return new Point(Math.round(this.x), Math.round(this.y))
- }
-
- public bound(lower: number, higher: number): Point {
- const x = Math.max(lower, Math.min(higher, this.x));
- const y = Math.max(lower, Math.min(higher, this.y));
- return new Point(x, y);
- }
-}
\ No newline at end of file
diff --git a/web/apps/photos/thirdparty/face-api/classes/Rect.ts b/web/apps/photos/thirdparty/face-api/classes/Rect.ts
deleted file mode 100644
index 550676984a9ab573cbed517f24965af1bfbb24b1..0000000000000000000000000000000000000000
--- a/web/apps/photos/thirdparty/face-api/classes/Rect.ts
+++ /dev/null
@@ -1,14 +0,0 @@
-import { Box } from './Box';
-
-export interface IRect {
- x: number
- y: number
- width: number
- height: number
-}
-
-export class Rect extends Box implements IRect {
- constructor(x: number, y: number, width: number, height: number, allowNegativeDimensions: boolean = false) {
- super({ x, y, width, height }, allowNegativeDimensions)
- }
-}
\ No newline at end of file
diff --git a/web/apps/photos/thirdparty/face-api/classes/index.ts b/web/apps/photos/thirdparty/face-api/classes/index.ts
deleted file mode 100644
index 9bb7cccf403ed35f774a9b8673c0e9826176f747..0000000000000000000000000000000000000000
--- a/web/apps/photos/thirdparty/face-api/classes/index.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-export * from './BoundingBox'
-export * from './Box'
-export * from './Dimensions'
-export * from './Point'
-export * from './Rect'
\ No newline at end of file
diff --git a/web/docs/dependencies.md b/web/docs/dependencies.md
index 83c4c16c842f5ef21d9bbd9e56d004191606a657..9c792e5ff7af87555bc67ad30c32030b76f1de4c 100644
--- a/web/docs/dependencies.md
+++ b/web/docs/dependencies.md
@@ -133,17 +133,27 @@ some cases.
## Media
-- ["jszip"](https://github.com/Stuk/jszip) is used for reading zip files in
+- [jszip](https://github.com/Stuk/jszip) is used for reading zip files in
JavaScript (Live photos are zip files under the hood).
-- ["file-type"](https://github.com/sindresorhus/file-type) is used for MIME
- type detection. We are at an old version 16.5.4 because v17 onwards the
- package became ESM only - for our limited use case, the custom Webpack
- configuration that entails is not worth the upgrade.
+- [file-type](https://github.com/sindresorhus/file-type) is used for MIME type
+ detection. We are at an old version 16.5.4 because v17 onwards the package
+ became ESM only - for our limited use case, the custom Webpack configuration
+ that entails is not worth the upgrade.
+
+- [heic-convert](https://github.com/catdad-experiments/heic-convert) is used
+ for converting HEIC files (which browsers don't natively support) into JPEG.
+
+## Processing
+
+- [comlink](https://github.com/GoogleChromeLabs/comlink) provides a minimal
+ layer on top of Web Workers to make them more easier to use.
## Photos app specific
-### Misc
+- [react-dropzone](https://github.com/react-dropzone/react-dropzone/) is a
+ React hook to create a drag-and-drop input zone.
-- "sanitize-filename" is for converting arbitrary strings into strings that
- are suitable for being used as filenames.
+- [sanitize-filename](https://github.com/parshap/node-sanitize-filename) is
+ for converting arbitrary strings into strings that are suitable for being
+ used as filenames.
diff --git a/web/docs/deploy.md b/web/docs/deploy.md
index 6358cb87f2bb4d4c16d92dc5dc13375d7ae859cb..75c3106d18d0ded3ee9944d6f28c90348a0af8d0 100644
--- a/web/docs/deploy.md
+++ b/web/docs/deploy.md
@@ -1,50 +1,46 @@
# Deploying
The various web apps and static sites in this repository are deployed on
-Cloudflare Pages.
+Cloudflare Pages using GitHub workflows.
-- Production deployments are triggered by pushing to the `deploy/*` branches.
+- Automated production deployments of `main` daily 8:00 AM IST.
+
+- Automated staging deployments `*.ente.sh` of `main` daily 3:00 PM IST.
- [help.ente.io](https://help.ente.io) gets deployed whenever a PR that
changes anything inside `docs/` gets merged to `main`.
-- Every night, all the web apps get automatically deployed to a nightly
- preview URLs (`*.ente.sh`) using the current code in main.
-
-- A preview deployment can be made by triggering the "Preview (web)" workflow.
- This allows us to deploy a build of any of the apps from an arbitrary branch
- to [preview.ente.sh](https://preview.ente.sh).
-
-Use the various `yarn deploy:*` commands to help with production deployments.
-For example, `yarn deploy:photos` will open a PR to merge the current `main`
-onto `deploy/photos`, which'll trigger the deployment workflow, which'll build
-and publish to [web.ente.io](https://web.ente.io).
+- Production or staging deployments can made manually by triggering the
+ corresponding workflow. There is variant to deploy a single app to
+ production using the `web-deploy-one.yml` workflow, and a variant to deploy
+ any one of the apps to `preview.ente.sh` (see below).
-> When merging these deployment PRs, remember to use rebase and merge so that
-> their HEAD is a fast forward of `main` instead of diverging from it because of
-> the merge commit.
+These GitHub workflows use the various `yarn deploy:*` commands. For example,
+`yarn deploy:photos` will open a PR to merge the current `main` onto
+`deploy/photos`, which'll trigger the deployment workflow, which'll build and
+publish to [web.ente.io](https://web.ente.io).
## Deployments
Here is a list of all the deployments, whether or not they are production
deployments, and the action that triggers them:
-| URL | Type | Deployment action |
-| -------------------------------------------- | ---------- | -------------------------------------------- |
-| [web.ente.io](https://web.ente.io) | Production | Push to `deploy/photos` |
-| [photos.ente.io](https://photos.ente.io) | Production | Alias of [web.ente.io](https://web.ente.io) |
-| [auth.ente.io](https://auth.ente.io) | Production | Push to `deploy/auth` |
-| [accounts.ente.io](https://accounts.ente.io) | Production | Push to `deploy/accounts` |
-| [cast.ente.io](https://cast.ente.io) | Production | Push to `deploy/cast` |
-| [payments.ente.io](https://payments.ente.io) | Production | Push to `deploy/payments` |
-| [help.ente.io](https://help.ente.io) | Production | Push to `main` + changes in `docs/` |
-| [staff.ente.sh](https://staff.ente.sh) | Production | Push to `main` + changes in `web/apps/staff` |
-| [accounts.ente.sh](https://accounts.ente.sh) | Preview | Nightly deploy of `main` |
-| [auth.ente.sh](https://auth.ente.sh) | Preview | Nightly deploy of `main` |
-| [cast.ente.sh](https://cast.ente.sh) | Preview | Nightly deploy of `main` |
-| [payments.ente.sh](https://payments.ente.sh) | Preview | Nightly deploy of `main` |
-| [photos.ente.sh](https://photos.ente.sh) | Preview | Nightly deploy of `main` |
-| [preview.ente.sh](https://preview.ente.sh) | Preview | Manually triggered |
+| URL | Type | Deployment action |
+| -------------------------------------------- | ---------- | --------------------------------------------- |
+| [web.ente.io](https://web.ente.io) | Production | Daily deploy of `main` |
+| [photos.ente.io](https://photos.ente.io) | Production | Alias of [web.ente.io](https://web.ente.io) |
+| [auth.ente.io](https://auth.ente.io) | Production | Daily deploy of `main` |
+| [accounts.ente.io](https://accounts.ente.io) | Production | Daily deploy of `main` |
+| [cast.ente.io](https://cast.ente.io) | Production | Daily deploy of `main` |
+| [payments.ente.io](https://payments.ente.io) | Production | Daily deploy of `main` |
+| [help.ente.io](https://help.ente.io) | Production | Changes in `docs/` on push to `main` |
+| [staff.ente.sh](https://staff.ente.sh) | Production | Changes in `web/apps/staff` on push to `main` |
+| [accounts.ente.sh](https://accounts.ente.sh) | Preview | Daily deploy of `main` |
+| [auth.ente.sh](https://auth.ente.sh) | Preview | Daily deploy of `main` |
+| [cast.ente.sh](https://cast.ente.sh) | Preview | Daily deploy of `main` |
+| [payments.ente.sh](https://payments.ente.sh) | Preview | Daily deploy of `main` |
+| [photos.ente.sh](https://photos.ente.sh) | Preview | Daily deploy of `main` |
+| [preview.ente.sh](https://preview.ente.sh) | Preview | Manually triggered |
### Other subdomains
@@ -60,10 +56,10 @@ Apart from this, there are also some other deployments:
### Preview deployments
-To trigger a preview deployment, manually trigger the "Preview (web)" workflow
-from the Actions tab on GitHub. You'll need to select the app to build, and the
-branch to use. This'll then build the specified app (e.g. "photos") from that
-branch, and deploy it to [preview.ente.sh](https://preview.ente.sh).
+To trigger a preview deployment, manually trigger the "Deploy preview (web)"
+workflow from the Actions tab on GitHub. You'll need to select the app to build,
+and the branch to use. This'll then build the specified app (e.g. "photos") from
+that branch, and deploy it to [preview.ente.sh](https://preview.ente.sh).
The workflow can also be triggered using GitHub's CLI, gh. e.g.
diff --git a/web/package.json b/web/package.json
index 647ee3ba3ac18baf6381f3712b8cc203a79a4fd0..cae1c3a1a6f278dc8fd33e91ea748e12b82ceb98 100644
--- a/web/package.json
+++ b/web/package.json
@@ -27,8 +27,8 @@
"dev:payments": "yarn workspace payments dev",
"dev:photos": "yarn workspace photos next dev",
"dev:staff": "yarn workspace staff dev",
- "lint": "yarn prettier --check --log-level warn . && yarn workspaces run eslint --report-unused-disable-directives .",
- "lint-fix": "yarn prettier --write --log-level warn . && yarn workspaces run eslint --fix .",
+ "lint": "yarn prettier --check --log-level warn . && yarn workspaces run eslint --report-unused-disable-directives . && yarn workspaces run tsc",
+ "lint-fix": "yarn prettier --write --log-level warn . && yarn workspaces run eslint --fix . && yarn workspaces run tsc",
"preview": "yarn preview:photos",
"preview:accounts": "yarn build:accounts && python3 -m http.server -d apps/accounts/out 3001",
"preview:auth": "yarn build:auth && python3 -m http.server -d apps/auth/out 3000",
diff --git a/web/packages/accounts/components/ChangeEmail.tsx b/web/packages/accounts/components/ChangeEmail.tsx
index ec647e6712037ff9d6e8b264821808eaf1e801ee..0b175344bcb10bd3eaf472a8e7da425fc22426a8 100644
--- a/web/packages/accounts/components/ChangeEmail.tsx
+++ b/web/packages/accounts/components/ChangeEmail.tsx
@@ -1,3 +1,4 @@
+import { wait } from "@/utils/promise";
import { changeEmail, sendOTTForEmailChange } from "@ente/accounts/api/user";
import { APP_HOMES } from "@ente/shared/apps/constants";
import { PageProps } from "@ente/shared/apps/types";
@@ -6,7 +7,6 @@ import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer";
import LinkButton from "@ente/shared/components/LinkButton";
import SubmitButton from "@ente/shared/components/SubmitButton";
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
-import { wait } from "@ente/shared/utils";
import { Alert, Box, TextField } from "@mui/material";
import { Formik, FormikHelpers } from "formik";
import { t } from "i18next";
diff --git a/web/packages/accounts/components/two-factor/VerifyForm.tsx b/web/packages/accounts/components/two-factor/VerifyForm.tsx
index b7f7fc2781d67093542de256699eed7a7edaa4df..76fd87ba05e35d5dae648d59bd43834f7b4ff590 100644
--- a/web/packages/accounts/components/two-factor/VerifyForm.tsx
+++ b/web/packages/accounts/components/two-factor/VerifyForm.tsx
@@ -1,16 +1,15 @@
-import { Formik, FormikHelpers } from "formik";
-import { t } from "i18next";
-import { useRef, useState } from "react";
-import OtpInput from "react-otp-input";
-
+import { wait } from "@/utils/promise";
import InvalidInputMessage from "@ente/accounts/components/two-factor/InvalidInputMessage";
import {
CenteredFlex,
VerticallyCentered,
} from "@ente/shared/components/Container";
import SubmitButton from "@ente/shared/components/SubmitButton";
-import { wait } from "@ente/shared/utils";
import { Box, Typography } from "@mui/material";
+import { Formik, FormikHelpers } from "formik";
+import { t } from "i18next";
+import { useRef, useState } from "react";
+import OtpInput from "react-otp-input";
interface formValues {
otp: string;
diff --git a/web/packages/accounts/pages/credentials.tsx b/web/packages/accounts/pages/credentials.tsx
index 3e8fbabbe69972a34daa48f54603b90e354d2222..1e93809c95838f8d6708a36b01b49d4d92e73e96 100644
--- a/web/packages/accounts/pages/credentials.tsx
+++ b/web/packages/accounts/pages/credentials.tsx
@@ -1,3 +1,4 @@
+import { isDevBuild } from "@/next/env";
import log from "@/next/log";
import { APP_HOMES } from "@ente/shared/apps/constants";
import { PageProps } from "@ente/shared/apps/types";
@@ -5,7 +6,6 @@ import { VerticallyCentered } from "@ente/shared/components/Container";
import EnteSpinner from "@ente/shared/components/EnteSpinner";
import FormPaper from "@ente/shared/components/Form/FormPaper";
import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer";
-import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title";
import LinkButton from "@ente/shared/components/LinkButton";
import VerifyMasterPasswordForm, {
VerifyMasterPasswordFormProps,
@@ -19,7 +19,7 @@ import {
} from "@ente/shared/crypto/helpers";
import { B64EncryptionResult } from "@ente/shared/crypto/types";
import { CustomError } from "@ente/shared/error";
-import { getAccountsURL } from "@ente/shared/network/api";
+import { getAccountsURL, getEndpoint } from "@ente/shared/network/api";
import InMemoryStore, { MS_KEYS } from "@ente/shared/storage/InMemoryStore";
import {
LS_KEYS,
@@ -39,6 +39,7 @@ import {
setKey,
} from "@ente/shared/storage/sessionStorage";
import { KeyAttributes, User } from "@ente/shared/user/types";
+import { Typography, styled } from "@mui/material";
import { t } from "i18next";
import { useRouter } from "next/router";
import { useEffect, useState } from "react";
@@ -259,7 +260,7 @@ export default function Credentials({ appContext, appName }: PageProps) {
return (
- {t("PASSWORD")}
+
+
{t("FORGOT_PASSWORD")}
@@ -277,7 +279,41 @@ export default function Credentials({ appContext, appName }: PageProps) {
{t("CHANGE_EMAIL")}
+
+ {isDevBuild && }
);
}
+
+const Header: React.FC = ({ children }) => {
+ return (
+
+ {t("PASSWORD")}
+ {children}
+
+ );
+};
+
+const Header_ = styled("div")`
+ margin-block-end: 4rem;
+ display: flex;
+ flex-direction: column;
+ gap: 8px;
+`;
+
+const ConnectionDetails: React.FC = () => {
+ const apiOrigin = new URL(getEndpoint());
+
+ return (
+
+
+ {apiOrigin.host}
+
+
+ );
+};
+
+const ConnectionDetails_ = styled("div")`
+ margin-block-start: 1rem;
+`;
diff --git a/web/packages/accounts/services/user.ts b/web/packages/accounts/services/user.ts
index 8f6d6609a155bbc6a0aa4429f87a3dc6867fa69b..fdbfc770b31cc10a0b468471c4322a08407d8549 100644
--- a/web/packages/accounts/services/user.ts
+++ b/web/packages/accounts/services/user.ts
@@ -47,6 +47,11 @@ export const logoutUser = async () => {
} catch (e) {
log.error("Ignoring error when resetting native folder watches", e);
}
+ try {
+ await electron.clearConvertToMP4Results();
+ } catch (e) {
+ log.error("Ignoring error when clearing convert-to-mp4 results", e);
+ }
try {
await electron.clearStores();
} catch (e) {
diff --git a/web/packages/media/formats.ts b/web/packages/media/formats.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1316b654f4be3d9c1c475d6df8266434b6ec3727
--- /dev/null
+++ b/web/packages/media/formats.ts
@@ -0,0 +1,34 @@
+/**
+ * Image file extensions that we know the browser is unlikely to have native
+ * support for.
+ */
+const nonWebImageFileExtensions = [
+ "heic",
+ "rw2",
+ "tiff",
+ "arw",
+ "cr3",
+ "cr2",
+ "raf",
+ "nef",
+ "psd",
+ "dng",
+ "tif",
+];
+
+/**
+ * Return `true` if {@link extension} is from amongst a known set of image file
+ * extensions that we know that the browser is unlikely to have native support
+ * for. If we want to display such files in the browser, we'll need to convert
+ * them to some other format first.
+ */
+export const isNonWebImageFileExtension = (extension: string) =>
+ nonWebImageFileExtensions.includes(extension.toLowerCase());
+
+/**
+ * Return `true` if {@link extension} in for an HEIC-like file.
+ */
+export const isHEICExtension = (extension: string) => {
+ const ext = extension.toLowerCase();
+ return ext == "heic" || ext == "heif";
+};
diff --git a/web/packages/media/image.ts b/web/packages/media/image.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2912af02a4a28e5479259ffef569f2a392ba25fc
--- /dev/null
+++ b/web/packages/media/image.ts
@@ -0,0 +1,33 @@
+/**
+ * Compute optimal dimensions for a resized version of an image while
+ * maintaining aspect ratio of the source image.
+ *
+ * @param width The width of the source image.
+ *
+ * @param height The height of the source image.
+ *
+ * @param maxDimension The maximum width of height of the resized image.
+ *
+ * This function returns a new size limiting it to maximum width and height
+ * (both specified by {@link maxDimension}), while maintaining aspect ratio of
+ * the source {@link width} and {@link height}.
+ *
+ * It returns `{0, 0}` for invalid inputs.
+ */
+export const scaledImageDimensions = (
+ width: number,
+ height: number,
+ maxDimension: number,
+): { width: number; height: number } => {
+ if (width == 0 || height == 0) return { width: 0, height: 0 };
+ const widthScaleFactor = maxDimension / width;
+ const heightScaleFactor = maxDimension / height;
+ const scaleFactor = Math.min(widthScaleFactor, heightScaleFactor);
+ const resizedDimensions = {
+ width: Math.round(width * scaleFactor),
+ height: Math.round(height * scaleFactor),
+ };
+ if (resizedDimensions.width == 0 || resizedDimensions.height == 0)
+ return { width: 0, height: 0 };
+ return resizedDimensions;
+};
diff --git a/web/packages/media/package.json b/web/packages/media/package.json
index 8be7e8bb6c22c435a4c280f134049f47ef021942..bf71ed37b2bc0198f2be93eb14869431f1a6d584 100644
--- a/web/packages/media/package.json
+++ b/web/packages/media/package.json
@@ -5,6 +5,10 @@
"dependencies": {
"@/next": "*",
"file-type": "16.5.4",
+ "heic-convert": "^2.1",
"jszip": "^3.10"
+ },
+ "devDependencies": {
+ "@types/heic-convert": "^1.2.3"
}
}
diff --git a/web/packages/media/tsconfig.json b/web/packages/media/tsconfig.json
index f29c34811385ff06a448608c23badaf01a5ea8ae..bcc1151c11e25288c7c2bce6b58e38e6ca17b00e 100644
--- a/web/packages/media/tsconfig.json
+++ b/web/packages/media/tsconfig.json
@@ -1,5 +1,13 @@
{
"extends": "@/build-config/tsconfig-typecheck.json",
+ "compilerOptions": {
+ /* Also indicate expectation of a WebWorker runtime */
+ "lib": ["ESnext", "DOM", "DOM.Iterable", "WebWorker"]
+ },
/* Typecheck all files with the given extensions (here or in subfolders) */
- "include": ["**/*.ts", "**/*.tsx"]
+ "include": [
+ "**/*.ts",
+ "**/*.tsx",
+ "../../packages/next/global-electron.d.ts"
+ ]
}
diff --git a/web/packages/media/worker/heic-convert.ts b/web/packages/media/worker/heic-convert.ts
new file mode 100644
index 0000000000000000000000000000000000000000..476eac00a360ee3204f13e0efc328f238fac1a1d
--- /dev/null
+++ b/web/packages/media/worker/heic-convert.ts
@@ -0,0 +1,11 @@
+import { ComlinkWorker } from "@/next/worker/comlink-worker";
+import type { DedicatedHEICConvertWorker } from "./heic-convert.worker";
+
+export const createHEICConvertWebWorker = () =>
+ new Worker(new URL("heic-convert.worker.ts", import.meta.url));
+
+export const createHEICConvertComlinkWorker = () =>
+ new ComlinkWorker(
+ "heic-convert-worker",
+ createHEICConvertWebWorker(),
+ );
diff --git a/web/apps/photos/src/worker/heic-convert.worker.ts b/web/packages/media/worker/heic-convert.worker.ts
similarity index 84%
rename from web/apps/photos/src/worker/heic-convert.worker.ts
rename to web/packages/media/worker/heic-convert.worker.ts
index 96a1a94684e5dc68e4bc7d5a6247d1df7cee43b9..ffb5eb158284e5f2b74f17cf901c021f2daf650f 100644
--- a/web/apps/photos/src/worker/heic-convert.worker.ts
+++ b/web/packages/media/worker/heic-convert.worker.ts
@@ -7,7 +7,7 @@ export class DedicatedHEICConvertWorker {
}
}
-expose(DedicatedHEICConvertWorker, self);
+expose(DedicatedHEICConvertWorker);
/**
* Convert a HEIC file to a JPEG file.
@@ -18,5 +18,5 @@ export const heicToJPEG = async (heicBlob: Blob): Promise => {
const buffer = new Uint8Array(await heicBlob.arrayBuffer());
const result = await HeicConvert({ buffer, format: "JPEG" });
const convertedData = new Uint8Array(result);
- return new Blob([convertedData]);
+ return new Blob([convertedData], { type: "image/jpeg" });
};
diff --git a/web/packages/next/i18n.ts b/web/packages/next/i18n.ts
index 913ecf746ea7fce208b79812d7d84758c851f311..cdc60e27ca480471d0ef8f520f83e2ea165b3384 100644
--- a/web/packages/next/i18n.ts
+++ b/web/packages/next/i18n.ts
@@ -22,6 +22,7 @@ import { object, string } from "yup";
export const supportedLocales = [
"en-US" /* English */,
"fr-FR" /* French */,
+ "de-DE" /* German */,
"zh-CN" /* Simplified Chinese */,
"nl-NL" /* Dutch */,
"es-ES" /* Spanish */,
@@ -209,6 +210,8 @@ const closestSupportedLocale = (
return "en-US";
} else if (ls.startsWith("fr")) {
return "fr-FR";
+ } else if (ls.startsWith("de")) {
+ return "de-DE";
} else if (ls.startsWith("zh")) {
return "zh-CN";
} else if (ls.startsWith("nl")) {
diff --git a/web/packages/next/locales/bg-BG/translation.json b/web/packages/next/locales/bg-BG/translation.json
index 699dae1dac26e2c9a4398b3c570dbd5570083f6a..dbdfc6e266c2b0c46da38abf5e96a2248ba98b3a 100644
--- a/web/packages/next/locales/bg-BG/translation.json
+++ b/web/packages/next/locales/bg-BG/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/de-DE/translation.json b/web/packages/next/locales/de-DE/translation.json
index 21875c549f8e4ab4de8426d94c915fd12d1365b1..183b1d8037417b6f2cb1eaa4629491d31e3aca46 100644
--- a/web/packages/next/locales/de-DE/translation.json
+++ b/web/packages/next/locales/de-DE/translation.json
@@ -340,11 +340,11 @@
"UPDATE_CREATION_TIME_COMPLETED": "Alle Dateien erfolgreich aktualisiert",
"UPDATE_CREATION_TIME_COMPLETED_WITH_ERROR": "Aktualisierung der Dateizeit für einige Dateien fehlgeschlagen, bitte versuche es erneut",
"CAPTION_CHARACTER_LIMIT": "Maximal 5000 Zeichen",
- "DATE_TIME_ORIGINAL": "",
- "DATE_TIME_DIGITIZED": "",
- "METADATA_DATE": "",
+ "DATE_TIME_ORIGINAL": "EXIF:DateTimeOriginal",
+ "DATE_TIME_DIGITIZED": "EXIF:DateTimeDigitized",
+ "METADATA_DATE": "EXIF:MetadataDate",
"CUSTOM_TIME": "Benutzerdefinierte Zeit",
- "REOPEN_PLAN_SELECTOR_MODAL": "",
+ "REOPEN_PLAN_SELECTOR_MODAL": "Aboauswahl erneut öffnen",
"OPEN_PLAN_SELECTOR_MODAL_FAILED": "Fehler beim Öffnen der Pläne",
"INSTALL": "Installieren",
"SHARING_DETAILS": "Details teilen",
@@ -374,7 +374,7 @@
"ADD_MORE": "Mehr hinzufügen",
"VIEWERS": "Zuschauer",
"OR_ADD_EXISTING": "Oder eine Vorherige auswählen",
- "REMOVE_PARTICIPANT_MESSAGE": "",
+ "REMOVE_PARTICIPANT_MESSAGE": "{{selectedEmail}} wird vom Album entfernt
Alle Bilder von {{selectedEmail}} werden ebenfalls aus dem Album entfernt
",
"NOT_FOUND": "404 - Nicht gefunden",
"LINK_EXPIRED": "Link ist abgelaufen",
"LINK_EXPIRED_MESSAGE": "Dieser Link ist abgelaufen oder wurde deaktiviert!",
@@ -388,9 +388,9 @@
"LINK_EXPIRY": "Ablaufdatum des Links",
"NEVER": "Niemals",
"DISABLE_FILE_DOWNLOAD": "Download deaktivieren",
- "DISABLE_FILE_DOWNLOAD_MESSAGE": "",
+ "DISABLE_FILE_DOWNLOAD_MESSAGE": "Bist du sicher, dass du den Downloadbutton für Dateien deaktivieren möchtest?
Betrachter können weiterhin Screenshots machen oder die Bilder mithilfe externer Werkzeuge speichern
",
"SHARED_USING": "Freigegeben über ",
- "SHARING_REFERRAL_CODE": "",
+ "SHARING_REFERRAL_CODE": "Benutze den code {{referralCode}} für 10GB extra",
"LIVE": "LIVE",
"DISABLE_PASSWORD": "Passwort-Sperre deaktivieren",
"DISABLE_PASSWORD_MESSAGE": "Sind Sie sicher, dass Sie die Passwort-Sperre deaktivieren möchten?",
@@ -400,12 +400,12 @@
"UPLOAD_FILES": "Datei",
"UPLOAD_DIRS": "Ordner",
"UPLOAD_GOOGLE_TAKEOUT": "Google Takeout",
- "DEDUPLICATE_FILES": "",
+ "DEDUPLICATE_FILES": "Duplikate bereinigen",
"NO_DUPLICATES_FOUND": "Du hast keine Duplikate, die gelöscht werden können",
"FILES": "dateien",
- "EACH": "",
- "DEDUPLICATE_BASED_ON_SIZE": "",
- "STOP_ALL_UPLOADS_MESSAGE": "",
+ "EACH": "pro Datei",
+ "DEDUPLICATE_BASED_ON_SIZE": "Die folgenden Dateien wurden aufgrund ihrer Größe zusammengefasst. Bitte prüfe und lösche Dateien, die du für duplikate hälst",
+ "STOP_ALL_UPLOADS_MESSAGE": "Bist du sicher, dass du alle laufenden Uploads abbrechen möchtest?",
"STOP_UPLOADS_HEADER": "Hochladen stoppen?",
"YES_STOP_UPLOADS": "Ja, Hochladen stoppen",
"STOP_DOWNLOADS_HEADER": "Downloads anhalten?",
@@ -415,14 +415,13 @@
"albums_other": "{{count, number}} Alben",
"ALL_ALBUMS": "Alle Alben",
"ALBUMS": "Alben",
- "ALL_HIDDEN_ALBUMS": "",
- "HIDDEN_ALBUMS": "",
- "HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
+ "ALL_HIDDEN_ALBUMS": "Alle versteckten Alben",
+ "HIDDEN_ALBUMS": "Versteckte Alben",
+ "HIDDEN_ITEMS": "Versteckte Dateien",
"ENTER_TWO_FACTOR_OTP": "Gib den 6-stelligen Code aus\ndeiner Authentifizierungs-App ein.",
"CREATE_ACCOUNT": "Account erstellen",
"COPIED": "Kopiert",
- "WATCH_FOLDERS": "",
+ "WATCH_FOLDERS": "Überwachte Ordner",
"UPGRADE_NOW": "Jetzt upgraden",
"RENEW_NOW": "Jetzt erneuern",
"STORAGE": "Speicher",
@@ -431,34 +430,33 @@
"FAMILY": "Familie",
"FREE": "frei",
"OF": "von",
- "WATCHED_FOLDERS": "",
+ "WATCHED_FOLDERS": "Überwachte Ordner",
"NO_FOLDERS_ADDED": "Noch keine Ordner hinzugefügt!",
- "FOLDERS_AUTOMATICALLY_MONITORED": "",
- "UPLOAD_NEW_FILES_TO_ENTE": "",
+ "FOLDERS_AUTOMATICALLY_MONITORED": "Die Ordner, die du hier hinzufügst, werden überwacht, um automatisch",
+ "UPLOAD_NEW_FILES_TO_ENTE": "Neue Dateien bei Ente zu sichern",
"REMOVE_DELETED_FILES_FROM_ENTE": "Gelöschte Dateien aus Ente entfernen",
"ADD_FOLDER": "Ordner hinzufügen",
- "STOP_WATCHING": "",
- "STOP_WATCHING_FOLDER": "",
- "STOP_WATCHING_DIALOG_MESSAGE": "",
+ "STOP_WATCHING": "Nicht mehr überwachen",
+ "STOP_WATCHING_FOLDER": "Ordner nicht mehr überwachen?",
+ "STOP_WATCHING_DIALOG_MESSAGE": "Deine bestehenden Dateien werden nicht gelöscht, aber das verknüpfte Ente-Album wird bei Änderungen in diesem Ordner nicht mehr aktualisiert.",
"YES_STOP": "Ja, Stopp",
- "MONTH_SHORT": "",
+ "MONTH_SHORT": "M",
"YEAR": "Jahr",
"FAMILY_PLAN": "Familientarif",
"DOWNLOAD_LOGS": "Logs herunterladen",
- "DOWNLOAD_LOGS_MESSAGE": "",
+ "DOWNLOAD_LOGS_MESSAGE": "Hier kannst du Debug-Logs herunterladen, die du uns zur Fehleranalyse zusenden kannst.
Beachte bitte, dass die Logs Dateinamen enthalten, um Probleme mit bestimmten Dateien nachvollziehen zu können.
",
"CHANGE_FOLDER": "Ordner ändern",
"TWO_MONTHS_FREE": "Erhalte 2 Monate kostenlos bei Jahresabonnements",
- "GB": "GB",
"POPULAR": "Beliebt",
"FREE_PLAN_OPTION_LABEL": "Mit kostenloser Testversion fortfahren",
- "FREE_PLAN_DESCRIPTION": "1 GB für 1 Jahr",
+ "free_plan_description": "{{storage}} für 1 Jahr",
"CURRENT_USAGE": "Aktuelle Nutzung ist {{usage}} ",
- "WEAK_DEVICE": "",
- "DRAG_AND_DROP_HINT": "",
+ "WEAK_DEVICE": "Dein Browser ist nicht leistungsstark genug, um deine Bilder zu verschlüsseln. Versuche, dich an einem Computer bei Ente anzumelden, oder lade dir die Ente-App für dein Gerät (Handy oder Desktop) herunter.",
+ "DRAG_AND_DROP_HINT": "Oder ziehe Dateien per Drag-and-Drop in das Ente-Fenster",
"CONFIRM_ACCOUNT_DELETION_MESSAGE": "Deine hochgeladenen Daten werden zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht. Dieser Vorgang kann nicht rückgängig gemacht werden.",
"AUTHENTICATE": "Authentifizieren",
- "UPLOADED_TO_SINGLE_COLLECTION": "",
- "UPLOADED_TO_SEPARATE_COLLECTIONS": "",
+ "UPLOADED_TO_SINGLE_COLLECTION": "In einzelnes Album hochgeladen",
+ "UPLOADED_TO_SEPARATE_COLLECTIONS": "In separate Alben hochgeladen",
"NEVERMIND": "Egal",
"UPDATE_AVAILABLE": "Neue Version verfügbar",
"UPDATE_INSTALLABLE_MESSAGE": "Eine neue Version von Ente ist für die Installation bereit.",
@@ -471,10 +469,10 @@
"YESTERDAY": "Gestern",
"NAME_PLACEHOLDER": "Name...",
"ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED": "Alben können nicht aus Datei/Ordnermix erstellt werden",
- "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED_MESSAGE": "",
+ "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED_MESSAGE": "Du hast sowohl Dateien als auch Ordner in das Ente-Fenster gezogen.
Bitte wähle entweder nur Dateien oder nur Ordner aus, wenn separate Alben erstellt werden sollen
",
"CHOSE_THEME": "Design auswählen",
"ML_SEARCH": "Gesichtserkennung",
- "ENABLE_ML_SEARCH_DESCRIPTION": "",
+ "ENABLE_ML_SEARCH_DESCRIPTION": "Hiermit wird on-device machine learning aktiviert, und die Gesichtserkennung beginnt damit, die Fotos auf deinem Gerät zu analysieren.
Beim ersten Durchlauf nach der Anmeldung oder Aktivierung der Funktion werden alle Bilder auf dein Gerät heruntergeladen, um analysiert zu werden. Bitte aktiviere diese Funktion nur, wenn du einverstanden bist, dass dein Gerät die dafür benötigte Bandbreite und Rechenleistung aufbringt.
Falls dies das erste Mal ist, dass du diese Funktion aktivierst, werden wir deine Erlaubnis zur Verarbeitung von Gesichtsdaten einholen.
",
"ML_MORE_DETAILS": "Weitere Details",
"ENABLE_FACE_SEARCH": "Gesichtserkennung aktivieren",
"ENABLE_FACE_SEARCH_TITLE": "Gesichtserkennung aktivieren?",
@@ -482,25 +480,25 @@
"DISABLE_BETA": "Beta deaktivieren",
"DISABLE_FACE_SEARCH": "Gesichtserkennung deaktivieren",
"DISABLE_FACE_SEARCH_TITLE": "Gesichtserkennung deaktivieren?",
- "DISABLE_FACE_SEARCH_DESCRIPTION": "",
+ "DISABLE_FACE_SEARCH_DESCRIPTION": "Ente wird aufhören, Gesichtsdaten zu verarbeiten.
Du kannst die Gesichtserkennung jederzeit wieder aktivieren, wenn du möchtest, daher ist dieser Vorgang risikofrei.
",
"ADVANCED": "Erweitert",
"FACE_SEARCH_CONFIRMATION": "Ich verstehe und möchte Ente erlauben, Gesichtsgeometrie zu verarbeiten",
"LABS": "Experimente",
- "YOURS": "",
+ "YOURS": "von dir",
"PASSPHRASE_STRENGTH_WEAK": "Passwortstärke: Schwach",
"PASSPHRASE_STRENGTH_MODERATE": "Passwortstärke: Moderat",
"PASSPHRASE_STRENGTH_STRONG": "Passwortstärke: Stark",
"PREFERENCES": "Einstellungen",
"LANGUAGE": "Sprache",
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "Ungültiges Exportverzeichnis",
- "EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
+ "EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "Das von dir gewählte Exportverzeichnis existiert nicht.
Bitte wähle einen gültigen Ordner.
",
"SUBSCRIPTION_VERIFICATION_ERROR": "Verifizierung des Abonnements fehlgeschlagen",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "KB",
- "MB": "MB",
- "GB": "GB",
- "TB": "TB"
+ "storage_unit": {
+ "b": "B",
+ "kb": "KB",
+ "mb": "MB",
+ "gb": "GB",
+ "tb": "TB"
},
"AFTER_TIME": {
"HOUR": "nach einer Stunde",
@@ -516,39 +514,39 @@
"CREATE_PUBLIC_SHARING": "Öffentlichen Link erstellen",
"PUBLIC_LINK_CREATED": "Öffentlicher Link erstellt",
"PUBLIC_LINK_ENABLED": "Öffentlicher Link aktiviert",
- "COLLECT_PHOTOS": "",
- "PUBLIC_COLLECT_SUBTEXT": "",
+ "COLLECT_PHOTOS": "Bilder sammeln",
+ "PUBLIC_COLLECT_SUBTEXT": "Erlaube Personen mit diesem Link, Fotos zum gemeinsamen Album hinzuzufügen.",
"STOP_EXPORT": "Stop",
- "EXPORT_PROGRESS": "",
+ "EXPORT_PROGRESS": "{{progress.success, number}} / {{progress.total, number}} Dateien synchronisiert",
"MIGRATING_EXPORT": "Vorbereiten...",
"RENAMING_COLLECTION_FOLDERS": "Albumordner umbenennen...",
- "TRASHING_DELETED_FILES": "",
- "TRASHING_DELETED_COLLECTIONS": "",
- "CONTINUOUS_EXPORT": "",
- "PENDING_ITEMS": "",
- "EXPORT_STARTING": "",
- "DELETE_ACCOUNT_REASON_LABEL": "",
- "DELETE_ACCOUNT_REASON_PLACEHOLDER": "",
+ "TRASHING_DELETED_FILES": "Verschiebe gelöschte Dateien in den Trash-Ordner...",
+ "TRASHING_DELETED_COLLECTIONS": "Verschiebe gelöschte Alben in den Trash-Ordner...",
+ "CONTINUOUS_EXPORT": "Stets aktuell halten",
+ "PENDING_ITEMS": "Ausstehende Dateien",
+ "EXPORT_STARTING": "Starte Export...",
+ "DELETE_ACCOUNT_REASON_LABEL": "Was ist der Hauptgrund für die Löschung deines Kontos?",
+ "DELETE_ACCOUNT_REASON_PLACEHOLDER": "Wähle einen Grund aus",
"DELETE_REASON": {
- "MISSING_FEATURE": "",
- "BROKEN_BEHAVIOR": "",
- "FOUND_ANOTHER_SERVICE": "",
- "NOT_LISTED": ""
+ "MISSING_FEATURE": "Es fehlt eine wichtige Funktion die ich benötige",
+ "BROKEN_BEHAVIOR": "Die App oder eine bestimmte Funktion verhält sich nicht so wie gedacht",
+ "FOUND_ANOTHER_SERVICE": "Ich habe einen anderen Dienst gefunden, der mir mehr zusagt",
+ "NOT_LISTED": "Mein Grund ist nicht aufgeführt"
},
- "DELETE_ACCOUNT_FEEDBACK_LABEL": "",
+ "DELETE_ACCOUNT_FEEDBACK_LABEL": "Wir bedauern sehr, dass uns verlässt. Bitte hilf uns besser zu werden, indem du uns sagst warum du gehst.",
"DELETE_ACCOUNT_FEEDBACK_PLACEHOLDER": "Feedback",
"CONFIRM_DELETE_ACCOUNT_CHECKBOX_LABEL": "Ja, ich möchte dieses Konto und alle enthaltenen Daten endgültig und unwiderruflich löschen",
"CONFIRM_DELETE_ACCOUNT": "Kontolöschung bestätigen",
- "FEEDBACK_REQUIRED": "",
+ "FEEDBACK_REQUIRED": "Bitte hilf uns durch das Angeben dieser Daten",
"FEEDBACK_REQUIRED_FOUND_ANOTHER_SERVICE": "Was macht der andere Dienst besser?",
"RECOVER_TWO_FACTOR": "Zwei-Faktor wiederherstellen",
- "at": "",
+ "at": "um",
"AUTH_NEXT": "Weiter",
- "AUTH_DOWNLOAD_MOBILE_APP": "",
+ "AUTH_DOWNLOAD_MOBILE_APP": "Lade unsere smartphone App herunter, um deine Schlüssel zu verwalten",
"HIDDEN": "Versteckt",
"HIDE": "Ausblenden",
"UNHIDE": "Einblenden",
- "UNHIDE_TO_COLLECTION": "",
+ "UNHIDE_TO_COLLECTION": "In Album wieder sichtbar machen",
"SORT_BY": "Sortieren nach",
"NEWEST_FIRST": "Neueste zuerst",
"OLDEST_FIRST": "Älteste zuerst",
@@ -562,14 +560,17 @@
"DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} Dateien",
"CHRISTMAS": "Weihnachten",
"CHRISTMAS_EVE": "Heiligabend",
- "NEW_YEAR": "",
- "NEW_YEAR_EVE": "",
+ "NEW_YEAR": "Neujahr",
+ "NEW_YEAR_EVE": "Silvester",
"IMAGE": "Bild",
"VIDEO": "Video",
"LIVE_PHOTO": "Live-Foto",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "Konvertieren",
- "CONFIRM_EDITOR_CLOSE_MESSAGE": "",
- "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
+ "CONFIRM_EDITOR_CLOSE_MESSAGE": "Editor wirklich schließen?",
+ "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Lade dein bearbeitetes Bild herunter oder speichere es in Ente, um die Änderungen nicht zu verlieren.",
"BRIGHTNESS": "Helligkeit",
"CONTRAST": "Kontrast",
"SATURATION": "Sättigung",
@@ -581,7 +582,7 @@
"ROTATE_RIGHT": "Nach rechts drehen",
"FLIP_VERTICALLY": "Vertikal spiegeln",
"FLIP_HORIZONTALLY": "Horizontal spiegeln",
- "DOWNLOAD_EDITED": "",
+ "DOWNLOAD_EDITED": "Bearbeitetes Bild herunterladen",
"SAVE_A_COPY_TO_ENTE": "Kopie in Ente speichern",
"RESTORE_ORIGINAL": "Original wiederherstellen",
"TRANSFORM": "Transformieren",
@@ -590,24 +591,24 @@
"ROTATION": "Drehen",
"RESET": "Zurücksetzen",
"PHOTO_EDITOR": "Foto-Editor",
- "FASTER_UPLOAD": "",
- "FASTER_UPLOAD_DESCRIPTION": "",
- "MAGIC_SEARCH_STATUS": "",
+ "FASTER_UPLOAD": "Schnelleres Hochladen",
+ "FASTER_UPLOAD_DESCRIPTION": "Uploads über nahegelegene Server leiten",
+ "MAGIC_SEARCH_STATUS": "Status der magischen Suche",
"INDEXED_ITEMS": "Indizierte Elemente",
"CAST_ALBUM_TO_TV": "Album auf Fernseher wiedergeben",
"ENTER_CAST_PIN_CODE": "Gib den Code auf dem Fernseher unten ein, um dieses Gerät zu koppeln.",
"PAIR_DEVICE_TO_TV": "Geräte koppeln",
"TV_NOT_FOUND": "Fernseher nicht gefunden. Hast du die PIN korrekt eingegeben?",
- "AUTO_CAST_PAIR": "",
- "AUTO_CAST_PAIR_DESC": "",
- "PAIR_WITH_PIN": "",
- "CHOOSE_DEVICE_FROM_BROWSER": "",
- "PAIR_WITH_PIN_DESC": "",
- "VISIT_CAST_ENTE_IO": "",
- "CAST_AUTO_PAIR_FAILED": "",
+ "AUTO_CAST_PAIR": "Automatisch verbinden",
+ "AUTO_CAST_PAIR_DESC": "Automatisches Verbinden funktioniert nur mit Geräten, die Chromecast unterstützen.",
+ "PAIR_WITH_PIN": "Mit PIN verbinden",
+ "CHOOSE_DEVICE_FROM_BROWSER": "Wähle ein Cast-Gerät aus dem Browser-Popup aus.",
+ "PAIR_WITH_PIN_DESC": "\"Mit PIN verbinden\" funktioniert mit jedem Bildschirm, auf dem du dein Album sehen möchtest.",
+ "VISIT_CAST_ENTE_IO": "Besuche {{url}} auf dem Gerät, das du verbinden möchtest.",
+ "CAST_AUTO_PAIR_FAILED": "Das automatische Verbinden über Chromecast ist fehlgeschlagen. Bitte versuche es erneut.",
"FREEHAND": "Freihand",
"APPLY_CROP": "Zuschnitt anwenden",
- "PHOTO_EDIT_REQUIRED_TO_SAVE": "",
+ "PHOTO_EDIT_REQUIRED_TO_SAVE": "Es muss mindestens eine Transformation oder Farbanpassung vorgenommen werden, bevor gespeichert werden kann.",
"PASSKEYS": "Passkeys",
"DELETE_PASSKEY": "Passkey löschen",
"DELETE_PASSKEY_CONFIRMATION": "Bist du sicher, dass du diesen Passkey löschen willst? Dieser Vorgang ist nicht umkehrbar.",
@@ -622,6 +623,6 @@
"TRY_AGAIN": "Erneut versuchen",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Folge den Schritten in deinem Browser, um mit dem Anmelden fortzufahren.",
"LOGIN_WITH_PASSKEY": "Mit Passkey anmelden",
- "autogenerated_first_album_name": "",
- "autogenerated_default_album_name": ""
+ "autogenerated_first_album_name": "Mein erstes Album",
+ "autogenerated_default_album_name": "Neues Album"
}
diff --git a/web/packages/next/locales/en-US/translation.json b/web/packages/next/locales/en-US/translation.json
index 2fb9eadc6ac2dd3d36c0cccd42bd231a1f59fac4..f7acb63c8979c7e94a28be367b7fdcaa2a71fc68 100644
--- a/web/packages/next/locales/en-US/translation.json
+++ b/web/packages/next/locales/en-US/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "All hidden albums",
"HIDDEN_ALBUMS": "Hidden albums",
"HIDDEN_ITEMS": "Hidden items",
- "HIDDEN_ITEMS_SECTION_NAME": "Hidden_items",
"ENTER_TWO_FACTOR_OTP": "Enter the 6-digit code from your authenticator app.",
"CREATE_ACCOUNT": "Create account",
"COPIED": "Copied",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "This will download debug logs, which you can email to us to help debug your issue.
Please note that file names will be included to help track issues with specific files.
",
"CHANGE_FOLDER": "Change Folder",
"TWO_MONTHS_FREE": "Get 2 months free on yearly plans",
- "GB": "GB",
"POPULAR": "Popular",
"FREE_PLAN_OPTION_LABEL": "Continue with free trial",
- "FREE_PLAN_DESCRIPTION": "1 GB for 1 year",
+ "free_plan_description": "{{storage}} for 1 year",
"CURRENT_USAGE": "Current usage is {{usage}} ",
"WEAK_DEVICE": "The web browser you're using is not powerful enough to encrypt your photos. Please try to log in to Ente on your computer, or download the Ente mobile/desktop app.",
"DRAG_AND_DROP_HINT": "Or drag and drop into the Ente window",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "Invalid export directory",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "The export directory you have selected does not exist.
Please select a valid directory.
",
"SUBSCRIPTION_VERIFICATION_ERROR": "Subscription verification failed",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "KB",
- "MB": "MB",
- "GB": "GB",
- "TB": "TB"
+ "storage_unit": {
+ "b": "B",
+ "kb": "KB",
+ "mb": "MB",
+ "gb": "GB",
+ "tb": "TB"
},
"AFTER_TIME": {
"HOUR": "after an hour",
@@ -567,6 +565,9 @@
"IMAGE": "Image",
"VIDEO": "Video",
"LIVE_PHOTO": "Live Photo",
+ "editor": {
+ "crop": "Crop"
+ },
"CONVERT": "Convert",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "Are you sure you want to close the editor?",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Download your edited image or save a copy to Ente to persist your changes.",
diff --git a/web/packages/next/locales/es-ES/translation.json b/web/packages/next/locales/es-ES/translation.json
index 2916b5d5c07f276b42ecd0c527f29dd92dcd50aa..ec46bb7185e7f6d3d73424601d9889fe285a6ba2 100644
--- a/web/packages/next/locales/es-ES/translation.json
+++ b/web/packages/next/locales/es-ES/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "Ingrese el código de seis dígitos de su aplicación de autenticación a continuación.",
"CREATE_ACCOUNT": "Crear cuenta",
"COPIED": "Copiado",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "Esto descargará los registros de depuración, que puede enviarnos por correo electrónico para ayudarnos a depurar su problema.
Tenga en cuenta que los nombres de los archivos se incluirán para ayudar al seguimiento de problemas con archivos específicos.
",
"CHANGE_FOLDER": "Cambiar carpeta",
"TWO_MONTHS_FREE": "Obtén 2 meses gratis en planes anuales",
- "GB": "GB",
"POPULAR": "Popular",
"FREE_PLAN_OPTION_LABEL": "Continuar con el plan gratuito",
- "FREE_PLAN_DESCRIPTION": "1 GB por 1 año",
+ "free_plan_description": "{{storage}} por 1 año",
"CURRENT_USAGE": "El uso actual es {{usage}} ",
"WEAK_DEVICE": "El navegador web que está utilizando no es lo suficientemente poderoso para cifrar sus fotos. Por favor, intente iniciar sesión en ente en su computadora, o descargue la aplicación ente para móvil/escritorio.",
"DRAG_AND_DROP_HINT": "O arrastre y suelte en la ventana ente",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "Archivo de exportación inválido",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "El directorio de exportación seleccionado no existe.
Por favor, seleccione un directorio válido.
",
"SUBSCRIPTION_VERIFICATION_ERROR": "Falló la verificación de la suscripción",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "KB",
- "MB": "MB",
- "GB": "GB",
- "TB": "TB"
+ "storage_unit": {
+ "b": "B",
+ "kb": "KB",
+ "mb": "MB",
+ "gb": "GB",
+ "tb": "TB"
},
"AFTER_TIME": {
"HOUR": "después de una hora",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "Video",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/fa-IR/translation.json b/web/packages/next/locales/fa-IR/translation.json
index f06066116cc363507f9f3440829dab9e512602e7..ce0e8e6e1050b8ea55d4097bd8789eef8cad7a5b 100644
--- a/web/packages/next/locales/fa-IR/translation.json
+++ b/web/packages/next/locales/fa-IR/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/fi-FI/translation.json b/web/packages/next/locales/fi-FI/translation.json
index 38455b3e2d4b00885eca419e2e1da812cfc4035b..9f549eb49b8c51cdf36931bb43c9b0258d017309 100644
--- a/web/packages/next/locales/fi-FI/translation.json
+++ b/web/packages/next/locales/fi-FI/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/fr-FR/translation.json b/web/packages/next/locales/fr-FR/translation.json
index 89a054c22b5c141f9b49c0a0b01ba94cc75533fa..9af40b690c00004653c528e34ffe6c81dda24c61 100644
--- a/web/packages/next/locales/fr-FR/translation.json
+++ b/web/packages/next/locales/fr-FR/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "Tous les albums masqués",
"HIDDEN_ALBUMS": "Albums masqués",
"HIDDEN_ITEMS": "Éléments masqués",
- "HIDDEN_ITEMS_SECTION_NAME": "Éléments masqués",
"ENTER_TWO_FACTOR_OTP": "Saisir le code à 6 caractères de votre appli d'authentification.",
"CREATE_ACCOUNT": "Créer un compte",
"COPIED": "Copié",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "Cela va télécharger les journaux de débug, que vous pourrez nosu envoyer par e-mail pour nous aider à résoudre votre problàme .
Veuillez noter que les noms de fichiers seront inclus .
",
"CHANGE_FOLDER": "Modifier le dossier",
"TWO_MONTHS_FREE": "Obtenir 2 mois gratuits sur les plans annuels",
- "GB": "Go",
"POPULAR": "Populaire",
"FREE_PLAN_OPTION_LABEL": "Poursuivre avec la version d'essai gratuite",
- "FREE_PLAN_DESCRIPTION": "1 Go pour 1 an",
+ "free_plan_description": "{{storage}} pour 1 an",
"CURRENT_USAGE": "L'utilisation actuelle est de {{usage}} ",
"WEAK_DEVICE": "Le navigateur que vous utilisez n'est pas assez puissant pour chiffrer vos photos. Veuillez essayer de vous connecter à Ente sur votre ordinateur, ou télécharger l'appli Ente mobile/ordinateur.",
"DRAG_AND_DROP_HINT": "Sinon glissez déposez dans la fenêtre Ente",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "Dossier d'export invalide",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": " Le dossier d'export que vous avez sélectionné n'existe pas
Veuillez sélectionner un dossier valide
",
"SUBSCRIPTION_VERIFICATION_ERROR": "Échec de la vérification de l'abonnement",
- "STORAGE_UNITS": {
- "B": "o",
- "KB": "Ko",
- "MB": "Mo",
- "GB": "Go",
- "TB": "To"
+ "storage_unit": {
+ "b": "o",
+ "kb": "Ko",
+ "mb": "Mo",
+ "gb": "Go",
+ "tb": "To"
},
"AFTER_TIME": {
"HOUR": "dans une heure",
@@ -567,6 +565,9 @@
"IMAGE": "Image",
"VIDEO": "Vidéo",
"LIVE_PHOTO": "Photos en direct",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "Convertir",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "Êtes-vous sûr de vouloir fermer l'éditeur ?",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Téléchargez votre image modifiée ou enregistrez une copie sur Ente pour maintenir vos modifications.",
diff --git a/web/packages/next/locales/is-IS/translation.json b/web/packages/next/locales/is-IS/translation.json
new file mode 100644
index 0000000000000000000000000000000000000000..80f443b5dea14415b7379c2a52d7e1cb39d587ef
--- /dev/null
+++ b/web/packages/next/locales/is-IS/translation.json
@@ -0,0 +1,628 @@
+{
+ "HERO_SLIDE_1_TITLE": "",
+ "HERO_SLIDE_1": "",
+ "HERO_SLIDE_2_TITLE": "",
+ "HERO_SLIDE_2": "",
+ "HERO_SLIDE_3_TITLE": "",
+ "HERO_SLIDE_3": "",
+ "LOGIN": "",
+ "SIGN_UP": "",
+ "NEW_USER": "",
+ "EXISTING_USER": "",
+ "ENTER_NAME": "",
+ "PUBLIC_UPLOADER_NAME_MESSAGE": "",
+ "ENTER_EMAIL": "",
+ "EMAIL_ERROR": "",
+ "REQUIRED": "",
+ "EMAIL_SENT": "",
+ "CHECK_INBOX": "",
+ "ENTER_OTT": "",
+ "RESEND_MAIL": "",
+ "VERIFY": "",
+ "UNKNOWN_ERROR": "",
+ "INVALID_CODE": "",
+ "EXPIRED_CODE": "",
+ "SENDING": "",
+ "SENT": "",
+ "PASSWORD": "Lykilorð",
+ "LINK_PASSWORD": "",
+ "RETURN_PASSPHRASE_HINT": "Lykilorð",
+ "SET_PASSPHRASE": "",
+ "VERIFY_PASSPHRASE": "",
+ "INCORRECT_PASSPHRASE": "Rangt lykilorð",
+ "ENTER_ENC_PASSPHRASE": "",
+ "PASSPHRASE_DISCLAIMER": "",
+ "WELCOME_TO_ENTE_HEADING": "",
+ "WELCOME_TO_ENTE_SUBHEADING": "",
+ "WHERE_YOUR_BEST_PHOTOS_LIVE": "",
+ "KEY_GENERATION_IN_PROGRESS_MESSAGE": "",
+ "PASSPHRASE_HINT": "",
+ "CONFIRM_PASSPHRASE": "",
+ "REFERRAL_CODE_HINT": "",
+ "REFERRAL_INFO": "",
+ "PASSPHRASE_MATCH_ERROR": "",
+ "CREATE_COLLECTION": "",
+ "ENTER_ALBUM_NAME": "",
+ "CLOSE_OPTION": "",
+ "ENTER_FILE_NAME": "",
+ "CLOSE": "Loka",
+ "NO": "Nei",
+ "NOTHING_HERE": "Ekkert að sjá hér ennþá 👀",
+ "UPLOAD": "Hlaða upp",
+ "IMPORT": "",
+ "ADD_PHOTOS": "",
+ "ADD_MORE_PHOTOS": "",
+ "add_photos_one": "",
+ "add_photos_other": "",
+ "SELECT_PHOTOS": "",
+ "FILE_UPLOAD": "",
+ "UPLOAD_STAGE_MESSAGE": {
+ "0": "",
+ "1": "",
+ "2": "",
+ "3": "",
+ "4": "",
+ "5": ""
+ },
+ "FILE_NOT_UPLOADED_LIST": "",
+ "SUBSCRIPTION_EXPIRED": "",
+ "SUBSCRIPTION_EXPIRED_MESSAGE": "",
+ "STORAGE_QUOTA_EXCEEDED": "",
+ "INITIAL_LOAD_DELAY_WARNING": "",
+ "USER_DOES_NOT_EXIST": "",
+ "NO_ACCOUNT": "",
+ "ACCOUNT_EXISTS": "",
+ "CREATE": "",
+ "DOWNLOAD": "",
+ "DOWNLOAD_OPTION": "",
+ "DOWNLOAD_FAVORITES": "",
+ "DOWNLOAD_UNCATEGORIZED": "",
+ "DOWNLOAD_HIDDEN_ITEMS": "",
+ "COPY_OPTION": "",
+ "TOGGLE_FULLSCREEN": "",
+ "ZOOM_IN_OUT": "",
+ "PREVIOUS": "",
+ "NEXT": "",
+ "TITLE_PHOTOS": "",
+ "TITLE_ALBUMS": "",
+ "TITLE_AUTH": "",
+ "UPLOAD_FIRST_PHOTO": "",
+ "IMPORT_YOUR_FOLDERS": "",
+ "UPLOAD_DROPZONE_MESSAGE": "",
+ "WATCH_FOLDER_DROPZONE_MESSAGE": "",
+ "TRASH_FILES_TITLE": "",
+ "TRASH_FILE_TITLE": "",
+ "DELETE_FILES_TITLE": "",
+ "DELETE_FILES_MESSAGE": "",
+ "DELETE": "Eyða",
+ "DELETE_OPTION": "",
+ "FAVORITE_OPTION": "",
+ "UNFAVORITE_OPTION": "",
+ "MULTI_FOLDER_UPLOAD": "",
+ "UPLOAD_STRATEGY_CHOICE": "",
+ "UPLOAD_STRATEGY_SINGLE_COLLECTION": "",
+ "OR": "eða",
+ "UPLOAD_STRATEGY_COLLECTION_PER_FOLDER": "",
+ "SESSION_EXPIRED_MESSAGE": "",
+ "SESSION_EXPIRED": "",
+ "PASSWORD_GENERATION_FAILED": "",
+ "CHANGE_PASSWORD": "",
+ "GO_BACK": "Fara til baka",
+ "RECOVERY_KEY": "",
+ "SAVE_LATER": "Gera þetta seinna",
+ "SAVE": "Vista Lykil",
+ "RECOVERY_KEY_DESCRIPTION": "",
+ "RECOVER_KEY_GENERATION_FAILED": "",
+ "KEY_NOT_STORED_DISCLAIMER": "",
+ "FORGOT_PASSWORD": "Gleymt lykilorð",
+ "RECOVER_ACCOUNT": "Endurheimta Reikning",
+ "RECOVERY_KEY_HINT": "Endurheimtunarlykill",
+ "RECOVER": "Endurheimta",
+ "NO_RECOVERY_KEY": "Enginn endurheimtunarlykill?",
+ "INCORRECT_RECOVERY_KEY": "",
+ "SORRY": "Fyrirgefðu",
+ "NO_RECOVERY_KEY_MESSAGE": "",
+ "NO_TWO_FACTOR_RECOVERY_KEY_MESSAGE": "",
+ "CONTACT_SUPPORT": "",
+ "REQUEST_FEATURE": "",
+ "SUPPORT": "",
+ "CONFIRM": "Staðfesta",
+ "CANCEL": "Hætta við",
+ "LOGOUT": "Útskrá",
+ "DELETE_ACCOUNT": "Eyða aðgangi",
+ "DELETE_ACCOUNT_MESSAGE": "",
+ "LOGOUT_MESSAGE": "Ertu viss um að þú viljir skrá þig út?",
+ "CHANGE_EMAIL": "Breyta netfangi",
+ "OK": "Í lagi",
+ "SUCCESS": "Tókst",
+ "ERROR": "Villa",
+ "MESSAGE": "Skilaboð",
+ "INSTALL_MOBILE_APP": "",
+ "DOWNLOAD_APP_MESSAGE": "",
+ "DOWNLOAD_APP": "",
+ "EXPORT": "",
+ "SUBSCRIPTION": "Áskrift",
+ "SUBSCRIBE": "Gerast áskrifandi",
+ "MANAGEMENT_PORTAL": "",
+ "MANAGE_FAMILY_PORTAL": "",
+ "LEAVE_FAMILY_PLAN": "",
+ "LEAVE": "",
+ "LEAVE_FAMILY_CONFIRM": "",
+ "CHOOSE_PLAN": "",
+ "MANAGE_PLAN": "",
+ "ACTIVE": "Virkur",
+ "OFFLINE_MSG": "",
+ "FREE_SUBSCRIPTION_INFO": "",
+ "FAMILY_SUBSCRIPTION_INFO": "",
+ "RENEWAL_ACTIVE_SUBSCRIPTION_STATUS": "",
+ "RENEWAL_CANCELLED_SUBSCRIPTION_STATUS": "",
+ "RENEWAL_CANCELLED_SUBSCRIPTION_INFO": "",
+ "ADD_ON_AVAILABLE_TILL": "",
+ "STORAGE_QUOTA_EXCEEDED_SUBSCRIPTION_INFO": "Þú hefur farið yfir geymsluplássið þitt, vinsamlegast uppfærðu ",
+ "SUBSCRIPTION_PURCHASE_SUCCESS": "",
+ "SUBSCRIPTION_PURCHASE_CANCELLED": "",
+ "SUBSCRIPTION_PURCHASE_FAILED": "",
+ "SUBSCRIPTION_UPDATE_FAILED": "",
+ "UPDATE_PAYMENT_METHOD_MESSAGE": "",
+ "STRIPE_AUTHENTICATION_FAILED": "",
+ "UPDATE_PAYMENT_METHOD": "",
+ "MONTHLY": "",
+ "YEARLY": "",
+ "update_subscription_title": "",
+ "UPDATE_SUBSCRIPTION_MESSAGE": "",
+ "UPDATE_SUBSCRIPTION": "",
+ "CANCEL_SUBSCRIPTION": "",
+ "CANCEL_SUBSCRIPTION_MESSAGE": "",
+ "CANCEL_SUBSCRIPTION_WITH_ADDON_MESSAGE": "",
+ "SUBSCRIPTION_CANCEL_FAILED": "",
+ "SUBSCRIPTION_CANCEL_SUCCESS": "",
+ "REACTIVATE_SUBSCRIPTION": "",
+ "REACTIVATE_SUBSCRIPTION_MESSAGE": "",
+ "SUBSCRIPTION_ACTIVATE_SUCCESS": "",
+ "SUBSCRIPTION_ACTIVATE_FAILED": "",
+ "SUBSCRIPTION_PURCHASE_SUCCESS_TITLE": "",
+ "CANCEL_SUBSCRIPTION_ON_MOBILE": "",
+ "CANCEL_SUBSCRIPTION_ON_MOBILE_MESSAGE": "",
+ "MAIL_TO_MANAGE_SUBSCRIPTION": "",
+ "RENAME": "",
+ "RENAME_FILE": "",
+ "RENAME_COLLECTION": "",
+ "DELETE_COLLECTION_TITLE": "",
+ "DELETE_COLLECTION": "",
+ "DELETE_COLLECTION_MESSAGE": "",
+ "DELETE_PHOTOS": "",
+ "KEEP_PHOTOS": "",
+ "SHARE_COLLECTION": "",
+ "SHARE_WITH_SELF": "",
+ "ALREADY_SHARED": "",
+ "SHARING_BAD_REQUEST_ERROR": "",
+ "SHARING_DISABLED_FOR_FREE_ACCOUNTS": "",
+ "DOWNLOAD_COLLECTION": "",
+ "CREATE_ALBUM_FAILED": "",
+ "SEARCH": "",
+ "SEARCH_RESULTS": "",
+ "NO_RESULTS": "",
+ "SEARCH_HINT": "",
+ "SEARCH_TYPE": {
+ "COLLECTION": "",
+ "LOCATION": "",
+ "CITY": "",
+ "DATE": "",
+ "FILE_NAME": "",
+ "THING": "",
+ "FILE_CAPTION": "",
+ "FILE_TYPE": "",
+ "CLIP": ""
+ },
+ "photos_count_zero": "",
+ "photos_count_one": "",
+ "photos_count_other": "",
+ "TERMS_AND_CONDITIONS": "",
+ "ADD_TO_COLLECTION": "",
+ "SELECTED": "",
+ "PEOPLE": "",
+ "INDEXING_SCHEDULED": "",
+ "ANALYZING_PHOTOS": "",
+ "INDEXING_PEOPLE": "",
+ "INDEXING_DONE": "",
+ "UNIDENTIFIED_FACES": "",
+ "OBJECTS": "",
+ "TEXT": "",
+ "INFO": "",
+ "INFO_OPTION": "",
+ "FILE_NAME": "",
+ "CAPTION_PLACEHOLDER": "",
+ "LOCATION": "",
+ "SHOW_ON_MAP": "",
+ "MAP": "",
+ "MAP_SETTINGS": "",
+ "ENABLE_MAPS": "",
+ "ENABLE_MAP": "",
+ "DISABLE_MAPS": "",
+ "ENABLE_MAP_DESCRIPTION": "",
+ "DISABLE_MAP_DESCRIPTION": "",
+ "DISABLE_MAP": "",
+ "DETAILS": "",
+ "VIEW_EXIF": "",
+ "NO_EXIF": "",
+ "EXIF": "",
+ "ISO": "",
+ "TWO_FACTOR": "",
+ "TWO_FACTOR_AUTHENTICATION": "",
+ "TWO_FACTOR_QR_INSTRUCTION": "",
+ "ENTER_CODE_MANUALLY": "",
+ "TWO_FACTOR_MANUAL_CODE_INSTRUCTION": "",
+ "SCAN_QR_CODE": "",
+ "ENABLE_TWO_FACTOR": "",
+ "ENABLE": "",
+ "LOST_DEVICE": "",
+ "INCORRECT_CODE": "",
+ "TWO_FACTOR_INFO": "",
+ "DISABLE_TWO_FACTOR_LABEL": "",
+ "UPDATE_TWO_FACTOR_LABEL": "",
+ "DISABLE": "",
+ "RECONFIGURE": "",
+ "UPDATE_TWO_FACTOR": "",
+ "UPDATE_TWO_FACTOR_MESSAGE": "",
+ "UPDATE": "",
+ "DISABLE_TWO_FACTOR": "",
+ "DISABLE_TWO_FACTOR_MESSAGE": "",
+ "TWO_FACTOR_DISABLE_FAILED": "",
+ "EXPORT_DATA": "",
+ "SELECT_FOLDER": "",
+ "DESTINATION": "",
+ "START": "",
+ "LAST_EXPORT_TIME": "",
+ "EXPORT_AGAIN": "",
+ "LOCAL_STORAGE_NOT_ACCESSIBLE": "",
+ "LOCAL_STORAGE_NOT_ACCESSIBLE_MESSAGE": "",
+ "SEND_OTT": "",
+ "EMAIl_ALREADY_OWNED": "",
+ "ETAGS_BLOCKED": "",
+ "LIVE_PHOTOS_DETECTED": "",
+ "RETRY_FAILED": "",
+ "FAILED_UPLOADS": "",
+ "SKIPPED_FILES": "",
+ "THUMBNAIL_GENERATION_FAILED_UPLOADS": "",
+ "UNSUPPORTED_FILES": "",
+ "SUCCESSFUL_UPLOADS": "",
+ "SKIPPED_INFO": "",
+ "UNSUPPORTED_INFO": "",
+ "BLOCKED_UPLOADS": "",
+ "INPROGRESS_METADATA_EXTRACTION": "",
+ "INPROGRESS_UPLOADS": "",
+ "TOO_LARGE_UPLOADS": "",
+ "LARGER_THAN_AVAILABLE_STORAGE_UPLOADS": "",
+ "LARGER_THAN_AVAILABLE_STORAGE_INFO": "",
+ "TOO_LARGE_INFO": "",
+ "THUMBNAIL_GENERATION_FAILED_INFO": "",
+ "UPLOAD_TO_COLLECTION": "",
+ "UNCATEGORIZED": "",
+ "ARCHIVE": "",
+ "FAVORITES": "",
+ "ARCHIVE_COLLECTION": "",
+ "ARCHIVE_SECTION_NAME": "",
+ "ALL_SECTION_NAME": "",
+ "MOVE_TO_COLLECTION": "",
+ "UNARCHIVE": "",
+ "UNARCHIVE_COLLECTION": "",
+ "HIDE_COLLECTION": "",
+ "UNHIDE_COLLECTION": "",
+ "MOVE": "",
+ "ADD": "",
+ "REMOVE": "",
+ "YES_REMOVE": "",
+ "REMOVE_FROM_COLLECTION": "",
+ "TRASH": "",
+ "MOVE_TO_TRASH": "",
+ "TRASH_FILES_MESSAGE": "",
+ "TRASH_FILE_MESSAGE": "",
+ "DELETE_PERMANENTLY": "",
+ "RESTORE": "",
+ "RESTORE_TO_COLLECTION": "",
+ "EMPTY_TRASH": "",
+ "EMPTY_TRASH_TITLE": "",
+ "EMPTY_TRASH_MESSAGE": "",
+ "LEAVE_SHARED_ALBUM": "",
+ "LEAVE_ALBUM": "",
+ "LEAVE_SHARED_ALBUM_TITLE": "",
+ "LEAVE_SHARED_ALBUM_MESSAGE": "",
+ "NOT_FILE_OWNER": "",
+ "CONFIRM_SELF_REMOVE_MESSAGE": "",
+ "CONFIRM_SELF_AND_OTHER_REMOVE_MESSAGE": "",
+ "SORT_BY_CREATION_TIME_ASCENDING": "",
+ "SORT_BY_UPDATION_TIME_DESCENDING": "",
+ "SORT_BY_NAME": "",
+ "FIX_CREATION_TIME": "",
+ "FIX_CREATION_TIME_IN_PROGRESS": "",
+ "CREATION_TIME_UPDATED": "",
+ "UPDATE_CREATION_TIME_NOT_STARTED": "",
+ "UPDATE_CREATION_TIME_COMPLETED": "",
+ "UPDATE_CREATION_TIME_COMPLETED_WITH_ERROR": "",
+ "CAPTION_CHARACTER_LIMIT": "hámark 5000 stafir",
+ "DATE_TIME_ORIGINAL": "",
+ "DATE_TIME_DIGITIZED": "",
+ "METADATA_DATE": "",
+ "CUSTOM_TIME": "",
+ "REOPEN_PLAN_SELECTOR_MODAL": "",
+ "OPEN_PLAN_SELECTOR_MODAL_FAILED": "",
+ "INSTALL": "",
+ "SHARING_DETAILS": "",
+ "MODIFY_SHARING": "",
+ "ADD_COLLABORATORS": "",
+ "ADD_NEW_EMAIL": "",
+ "shared_with_people_zero": "",
+ "shared_with_people_one": "",
+ "shared_with_people_other": "",
+ "participants_zero": "",
+ "participants_one": "",
+ "participants_other": "",
+ "ADD_VIEWERS": "",
+ "CHANGE_PERMISSIONS_TO_VIEWER": "",
+ "CHANGE_PERMISSIONS_TO_COLLABORATOR": "",
+ "CONVERT_TO_VIEWER": "",
+ "CONVERT_TO_COLLABORATOR": "",
+ "CHANGE_PERMISSION": "",
+ "REMOVE_PARTICIPANT": "",
+ "CONFIRM_REMOVE": "",
+ "MANAGE": "",
+ "ADDED_AS": "",
+ "COLLABORATOR_RIGHTS": "",
+ "REMOVE_PARTICIPANT_HEAD": "",
+ "OWNER": "Eigandi",
+ "COLLABORATORS": "",
+ "ADD_MORE": "",
+ "VIEWERS": "",
+ "OR_ADD_EXISTING": "",
+ "REMOVE_PARTICIPANT_MESSAGE": "",
+ "NOT_FOUND": "404 - fannst ekki",
+ "LINK_EXPIRED": "Hlekkur rann út",
+ "LINK_EXPIRED_MESSAGE": "",
+ "MANAGE_LINK": "Stjórna hlekk",
+ "LINK_TOO_MANY_REQUESTS": "",
+ "FILE_DOWNLOAD": "",
+ "LINK_PASSWORD_LOCK": "",
+ "PUBLIC_COLLECT": "",
+ "LINK_DEVICE_LIMIT": "",
+ "NO_DEVICE_LIMIT": "",
+ "LINK_EXPIRY": "",
+ "NEVER": "",
+ "DISABLE_FILE_DOWNLOAD": "",
+ "DISABLE_FILE_DOWNLOAD_MESSAGE": "",
+ "SHARED_USING": "",
+ "SHARING_REFERRAL_CODE": "",
+ "LIVE": "",
+ "DISABLE_PASSWORD": "",
+ "DISABLE_PASSWORD_MESSAGE": "",
+ "PASSWORD_LOCK": "",
+ "LOCK": "",
+ "DOWNLOAD_UPLOAD_LOGS": "",
+ "UPLOAD_FILES": "",
+ "UPLOAD_DIRS": "",
+ "UPLOAD_GOOGLE_TAKEOUT": "",
+ "DEDUPLICATE_FILES": "",
+ "NO_DUPLICATES_FOUND": "",
+ "FILES": "",
+ "EACH": "",
+ "DEDUPLICATE_BASED_ON_SIZE": "",
+ "STOP_ALL_UPLOADS_MESSAGE": "",
+ "STOP_UPLOADS_HEADER": "",
+ "YES_STOP_UPLOADS": "",
+ "STOP_DOWNLOADS_HEADER": "",
+ "YES_STOP_DOWNLOADS": "",
+ "STOP_ALL_DOWNLOADS_MESSAGE": "",
+ "albums_one": "",
+ "albums_other": "",
+ "ALL_ALBUMS": "",
+ "ALBUMS": "",
+ "ALL_HIDDEN_ALBUMS": "",
+ "HIDDEN_ALBUMS": "",
+ "HIDDEN_ITEMS": "",
+ "ENTER_TWO_FACTOR_OTP": "",
+ "CREATE_ACCOUNT": "",
+ "COPIED": "",
+ "WATCH_FOLDERS": "",
+ "UPGRADE_NOW": "",
+ "RENEW_NOW": "",
+ "STORAGE": "",
+ "USED": "",
+ "YOU": "",
+ "FAMILY": "",
+ "FREE": "",
+ "OF": "",
+ "WATCHED_FOLDERS": "",
+ "NO_FOLDERS_ADDED": "",
+ "FOLDERS_AUTOMATICALLY_MONITORED": "",
+ "UPLOAD_NEW_FILES_TO_ENTE": "",
+ "REMOVE_DELETED_FILES_FROM_ENTE": "",
+ "ADD_FOLDER": "",
+ "STOP_WATCHING": "",
+ "STOP_WATCHING_FOLDER": "",
+ "STOP_WATCHING_DIALOG_MESSAGE": "",
+ "YES_STOP": "",
+ "MONTH_SHORT": "",
+ "YEAR": "",
+ "FAMILY_PLAN": "",
+ "DOWNLOAD_LOGS": "",
+ "DOWNLOAD_LOGS_MESSAGE": "",
+ "CHANGE_FOLDER": "",
+ "TWO_MONTHS_FREE": "",
+ "POPULAR": "",
+ "FREE_PLAN_OPTION_LABEL": "",
+ "free_plan_description": "",
+ "CURRENT_USAGE": "",
+ "WEAK_DEVICE": "",
+ "DRAG_AND_DROP_HINT": "",
+ "CONFIRM_ACCOUNT_DELETION_MESSAGE": "",
+ "AUTHENTICATE": "",
+ "UPLOADED_TO_SINGLE_COLLECTION": "",
+ "UPLOADED_TO_SEPARATE_COLLECTIONS": "",
+ "NEVERMIND": "",
+ "UPDATE_AVAILABLE": "",
+ "UPDATE_INSTALLABLE_MESSAGE": "",
+ "INSTALL_NOW": "",
+ "INSTALL_ON_NEXT_LAUNCH": "",
+ "UPDATE_AVAILABLE_MESSAGE": "",
+ "DOWNLOAD_AND_INSTALL": "",
+ "IGNORE_THIS_VERSION": "",
+ "TODAY": "",
+ "YESTERDAY": "",
+ "NAME_PLACEHOLDER": "",
+ "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED": "",
+ "ROOT_LEVEL_FILE_WITH_FOLDER_NOT_ALLOWED_MESSAGE": "",
+ "CHOSE_THEME": "",
+ "ML_SEARCH": "",
+ "ENABLE_ML_SEARCH_DESCRIPTION": "",
+ "ML_MORE_DETAILS": "",
+ "ENABLE_FACE_SEARCH": "",
+ "ENABLE_FACE_SEARCH_TITLE": "",
+ "ENABLE_FACE_SEARCH_DESCRIPTION": "",
+ "DISABLE_BETA": "",
+ "DISABLE_FACE_SEARCH": "",
+ "DISABLE_FACE_SEARCH_TITLE": "",
+ "DISABLE_FACE_SEARCH_DESCRIPTION": "",
+ "ADVANCED": "",
+ "FACE_SEARCH_CONFIRMATION": "",
+ "LABS": "",
+ "YOURS": "",
+ "PASSPHRASE_STRENGTH_WEAK": "",
+ "PASSPHRASE_STRENGTH_MODERATE": "",
+ "PASSPHRASE_STRENGTH_STRONG": "",
+ "PREFERENCES": "",
+ "LANGUAGE": "",
+ "EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
+ "EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
+ "SUBSCRIPTION_VERIFICATION_ERROR": "",
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
+ },
+ "AFTER_TIME": {
+ "HOUR": "",
+ "DAY": "",
+ "WEEK": "",
+ "MONTH": "",
+ "YEAR": ""
+ },
+ "COPY_LINK": "",
+ "DONE": "",
+ "LINK_SHARE_TITLE": "",
+ "REMOVE_LINK": "",
+ "CREATE_PUBLIC_SHARING": "",
+ "PUBLIC_LINK_CREATED": "",
+ "PUBLIC_LINK_ENABLED": "",
+ "COLLECT_PHOTOS": "",
+ "PUBLIC_COLLECT_SUBTEXT": "",
+ "STOP_EXPORT": "",
+ "EXPORT_PROGRESS": "",
+ "MIGRATING_EXPORT": "",
+ "RENAMING_COLLECTION_FOLDERS": "",
+ "TRASHING_DELETED_FILES": "",
+ "TRASHING_DELETED_COLLECTIONS": "",
+ "CONTINUOUS_EXPORT": "",
+ "PENDING_ITEMS": "",
+ "EXPORT_STARTING": "",
+ "DELETE_ACCOUNT_REASON_LABEL": "",
+ "DELETE_ACCOUNT_REASON_PLACEHOLDER": "",
+ "DELETE_REASON": {
+ "MISSING_FEATURE": "",
+ "BROKEN_BEHAVIOR": "",
+ "FOUND_ANOTHER_SERVICE": "",
+ "NOT_LISTED": ""
+ },
+ "DELETE_ACCOUNT_FEEDBACK_LABEL": "",
+ "DELETE_ACCOUNT_FEEDBACK_PLACEHOLDER": "",
+ "CONFIRM_DELETE_ACCOUNT_CHECKBOX_LABEL": "",
+ "CONFIRM_DELETE_ACCOUNT": "",
+ "FEEDBACK_REQUIRED": "",
+ "FEEDBACK_REQUIRED_FOUND_ANOTHER_SERVICE": "",
+ "RECOVER_TWO_FACTOR": "",
+ "at": "",
+ "AUTH_NEXT": "",
+ "AUTH_DOWNLOAD_MOBILE_APP": "",
+ "HIDDEN": "",
+ "HIDE": "Fela",
+ "UNHIDE": "",
+ "UNHIDE_TO_COLLECTION": "",
+ "SORT_BY": "Raða eftir",
+ "NEWEST_FIRST": "Nýjast fyrst",
+ "OLDEST_FIRST": "Elsta fyrst",
+ "CONVERSION_FAILED_NOTIFICATION_MESSAGE": "",
+ "SELECT_COLLECTION": "",
+ "PIN_ALBUM": "",
+ "UNPIN_ALBUM": "",
+ "DOWNLOAD_COMPLETE": "",
+ "DOWNLOADING_COLLECTION": "",
+ "DOWNLOAD_FAILED": "",
+ "DOWNLOAD_PROGRESS": "",
+ "CHRISTMAS": "",
+ "CHRISTMAS_EVE": "",
+ "NEW_YEAR": "Nýtt ár",
+ "NEW_YEAR_EVE": "",
+ "IMAGE": "Mynd",
+ "VIDEO": "Mynband",
+ "LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
+ "CONVERT": "",
+ "CONFIRM_EDITOR_CLOSE_MESSAGE": "",
+ "CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
+ "BRIGHTNESS": "",
+ "CONTRAST": "",
+ "SATURATION": "",
+ "BLUR": "",
+ "INVERT_COLORS": "",
+ "ASPECT_RATIO": "",
+ "SQUARE": "",
+ "ROTATE_LEFT": "",
+ "ROTATE_RIGHT": "",
+ "FLIP_VERTICALLY": "",
+ "FLIP_HORIZONTALLY": "",
+ "DOWNLOAD_EDITED": "",
+ "SAVE_A_COPY_TO_ENTE": "",
+ "RESTORE_ORIGINAL": "",
+ "TRANSFORM": "",
+ "COLORS": "",
+ "FLIP": "",
+ "ROTATION": "",
+ "RESET": "",
+ "PHOTO_EDITOR": "",
+ "FASTER_UPLOAD": "",
+ "FASTER_UPLOAD_DESCRIPTION": "",
+ "MAGIC_SEARCH_STATUS": "",
+ "INDEXED_ITEMS": "",
+ "CAST_ALBUM_TO_TV": "",
+ "ENTER_CAST_PIN_CODE": "",
+ "PAIR_DEVICE_TO_TV": "",
+ "TV_NOT_FOUND": "",
+ "AUTO_CAST_PAIR": "",
+ "AUTO_CAST_PAIR_DESC": "",
+ "PAIR_WITH_PIN": "",
+ "CHOOSE_DEVICE_FROM_BROWSER": "",
+ "PAIR_WITH_PIN_DESC": "",
+ "VISIT_CAST_ENTE_IO": "",
+ "CAST_AUTO_PAIR_FAILED": "",
+ "FREEHAND": "",
+ "APPLY_CROP": "",
+ "PHOTO_EDIT_REQUIRED_TO_SAVE": "",
+ "PASSKEYS": "",
+ "DELETE_PASSKEY": "",
+ "DELETE_PASSKEY_CONFIRMATION": "",
+ "RENAME_PASSKEY": "",
+ "ADD_PASSKEY": "",
+ "ENTER_PASSKEY_NAME": "",
+ "PASSKEYS_DESCRIPTION": "",
+ "CREATED_AT": "",
+ "PASSKEY_LOGIN_FAILED": "",
+ "PASSKEY_LOGIN_URL_INVALID": "",
+ "PASSKEY_LOGIN_ERRORED": "",
+ "TRY_AGAIN": "",
+ "PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "",
+ "LOGIN_WITH_PASSKEY": "",
+ "autogenerated_first_album_name": "",
+ "autogenerated_default_album_name": ""
+}
diff --git a/web/packages/next/locales/it-IT/translation.json b/web/packages/next/locales/it-IT/translation.json
index eb7a68d451051c409a4fdde100bb5e013efcb368..d935126f7c014c550759f2f8abcf6187abc8811f 100644
--- a/web/packages/next/locales/it-IT/translation.json
+++ b/web/packages/next/locales/it-IT/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "Crea account",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "Cambia Cartella",
"TWO_MONTHS_FREE": "Ottieni 2 mesi gratis sui piani annuali",
- "GB": "GB",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "1 GB per 1 anno",
+ "free_plan_description": "{{storage}} per 1 anno",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "KB",
- "MB": "MB",
- "GB": "GB",
- "TB": "TB"
+ "storage_unit": {
+ "b": "B",
+ "kb": "KB",
+ "mb": "MB",
+ "gb": "GB",
+ "tb": "TB"
},
"AFTER_TIME": {
"HOUR": "dopo un'ora",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/ko-KR/translation.json b/web/packages/next/locales/ko-KR/translation.json
index 35aeff3395c6d0682dd089638281ae48b7e98326..cec77e0e407a0f022c65858a1002b755a541b6f8 100644
--- a/web/packages/next/locales/ko-KR/translation.json
+++ b/web/packages/next/locales/ko-KR/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/nl-NL/translation.json b/web/packages/next/locales/nl-NL/translation.json
index f75bd5e4743069354b34ed41968f5112148eead3..47775c0c21e5ab2c18454882ab10aa93dbf07397 100644
--- a/web/packages/next/locales/nl-NL/translation.json
+++ b/web/packages/next/locales/nl-NL/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "Alle verborgen albums",
"HIDDEN_ALBUMS": "Verborgen albums",
"HIDDEN_ITEMS": "Verborgen bestanden",
- "HIDDEN_ITEMS_SECTION_NAME": "Verborgen_items",
"ENTER_TWO_FACTOR_OTP": "Voer de 6-cijferige code van uw verificatie app in.",
"CREATE_ACCOUNT": "Account aanmaken",
"COPIED": "Gekopieerd",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "Dit zal logboeken downloaden, die u ons kunt e-mailen om te helpen bij het debuggen van uw probleem.
Houd er rekening mee dat bestandsnamen worden opgenomen om problemen met specifieke bestanden bij te houden.
",
"CHANGE_FOLDER": "Map wijzigen",
"TWO_MONTHS_FREE": "Krijg 2 maanden gratis op jaarlijkse abonnementen",
- "GB": "GB",
"POPULAR": "Populair",
"FREE_PLAN_OPTION_LABEL": "Doorgaan met gratis account",
- "FREE_PLAN_DESCRIPTION": "1 GB voor 1 jaar",
+ "free_plan_description": "{{storage}} voor 1 jaar",
"CURRENT_USAGE": "Huidig gebruik is {{usage}} ",
"WEAK_DEVICE": "De webbrowser die u gebruikt is niet krachtig genoeg om uw foto's te versleutelen. Probeer in te loggen op uw computer, of download de Ente mobiel/desktop app.",
"DRAG_AND_DROP_HINT": "Of sleep en plaats in het Ente venster",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "Ongeldige export map",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "De export map die u heeft geselecteerd bestaat niet.
Selecteer een geldige map.
",
"SUBSCRIPTION_VERIFICATION_ERROR": "Abonnementsverificatie mislukt",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "KB",
- "MB": "MB",
- "GB": "GB",
- "TB": "TB"
+ "storage_unit": {
+ "b": "B",
+ "kb": "KB",
+ "mb": "MB",
+ "gb": "GB",
+ "tb": "TB"
},
"AFTER_TIME": {
"HOUR": "na één uur",
@@ -567,6 +565,9 @@
"IMAGE": "Afbeelding",
"VIDEO": "Video",
"LIVE_PHOTO": "Live foto",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "Converteren",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "Weet u zeker dat u de editor wilt afsluiten?",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Download uw bewerkte afbeelding of sla een kopie op in Ente om uw wijzigingen te behouden.",
@@ -598,13 +599,13 @@
"ENTER_CAST_PIN_CODE": "Voer de code in die u op de TV ziet om dit apparaat te koppelen.",
"PAIR_DEVICE_TO_TV": "Koppel apparaten",
"TV_NOT_FOUND": "TV niet gevonden. Heeft u de pincode correct ingevoerd?",
- "AUTO_CAST_PAIR": "",
- "AUTO_CAST_PAIR_DESC": "",
+ "AUTO_CAST_PAIR": "Automatisch koppelen",
+ "AUTO_CAST_PAIR_DESC": "Automatisch koppelen werkt alleen met apparaten die Chromecast ondersteunen.",
"PAIR_WITH_PIN": "Koppelen met PIN",
"CHOOSE_DEVICE_FROM_BROWSER": "Kies een compatibel apparaat uit de browser popup.",
- "PAIR_WITH_PIN_DESC": "",
+ "PAIR_WITH_PIN_DESC": "Koppelen met de PIN werkt met elk scherm waarop je jouw album wilt zien.",
"VISIT_CAST_ENTE_IO": "Bezoek {{url}} op het apparaat dat je wilt koppelen.",
- "CAST_AUTO_PAIR_FAILED": "",
+ "CAST_AUTO_PAIR_FAILED": "Automatisch koppelen van Chromecast mislukt. Probeer het opnieuw.",
"FREEHAND": "Losse hand",
"APPLY_CROP": "Bijsnijden toepassen",
"PHOTO_EDIT_REQUIRED_TO_SAVE": "Tenminste één transformatie of kleuraanpassing moet worden uitgevoerd voordat u opslaat.",
@@ -622,6 +623,6 @@
"TRY_AGAIN": "Probeer opnieuw",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Volg de stappen van je browser om door te gaan met inloggen.",
"LOGIN_WITH_PASSKEY": "Inloggen met passkey",
- "autogenerated_first_album_name": "",
- "autogenerated_default_album_name": ""
+ "autogenerated_first_album_name": "Mijn eerste album",
+ "autogenerated_default_album_name": "Nieuw album"
}
diff --git a/web/packages/next/locales/pt-BR/translation.json b/web/packages/next/locales/pt-BR/translation.json
index 630c8cf65111ac00934bb4cbdaa6995e0d1e866d..a191a49272d9b1d0365d2e7cec7f344595e48e55 100644
--- a/web/packages/next/locales/pt-BR/translation.json
+++ b/web/packages/next/locales/pt-BR/translation.json
@@ -2,7 +2,7 @@
"HERO_SLIDE_1_TITLE": "Backups privados
para as suas memórias
",
"HERO_SLIDE_1": "Criptografia de ponta a ponta por padrão",
"HERO_SLIDE_2_TITLE": "Armazenado com segurança
em um abrigo avançado
",
- "HERO_SLIDE_2": "Feito para ter logenvidade",
+ "HERO_SLIDE_2": "Feito para ter longevidade",
"HERO_SLIDE_3_TITLE": "Disponível
em qualquer lugar
",
"HERO_SLIDE_3": "Android, iOS, Web, Desktop",
"LOGIN": "Entrar",
@@ -410,7 +410,7 @@
"YES_STOP_UPLOADS": "Sim, parar envios",
"STOP_DOWNLOADS_HEADER": "Parar downloads?",
"YES_STOP_DOWNLOADS": "Sim, parar downloads",
- "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos as transferências em andamento?",
+ "STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos os downloads em andamento?",
"albums_one": "1 Álbum",
"albums_other": "{{count, number}} Álbuns",
"ALL_ALBUMS": "Todos os álbuns",
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "Todos os álbuns ocultos",
"HIDDEN_ALBUMS": "Álbuns ocultos",
"HIDDEN_ITEMS": "Itens ocultos",
- "HIDDEN_ITEMS_SECTION_NAME": "Itens_ocultos",
"ENTER_TWO_FACTOR_OTP": "Digite o código de 6 dígitos de\nseu aplicativo autenticador.",
"CREATE_ACCOUNT": "Criar uma conta",
"COPIED": "Copiado",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "Isto irá baixar os logs de depuração, que você pode enviar para nós para ajudar a depurar seu problema.
Por favor, note que os nomes de arquivos serão incluídos para ajudar a rastrear problemas com arquivos específicos.
",
"CHANGE_FOLDER": "Alterar pasta",
"TWO_MONTHS_FREE": "Obtenha 2 meses gratuitos em planos anuais",
- "GB": "GB",
"POPULAR": "Popular",
"FREE_PLAN_OPTION_LABEL": "Continuar com teste gratuito",
- "FREE_PLAN_DESCRIPTION": "1 GB por 1 ano",
+ "free_plan_description": "{{storage}} por 1 ano",
"CURRENT_USAGE": "O uso atual é {{usage}} ",
"WEAK_DEVICE": "O navegador da web que você está usando não é poderoso o suficiente para criptografar suas fotos. Por favor, tente entrar para o ente no computador ou baixe o aplicativo móvel.",
"DRAG_AND_DROP_HINT": "Ou arraste e solte na janela ente",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "Diretório de exportação inválido",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "O diretório de exportação que você selecionou não existe.
Por favor, selecione um diretório válido.
",
"SUBSCRIPTION_VERIFICATION_ERROR": "Falha na verificação de assinatura",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "KB",
- "MB": "MB",
- "GB": "GB",
- "TB": "TB"
+ "storage_unit": {
+ "b": "B",
+ "kb": "KB",
+ "mb": "MB",
+ "gb": "GB",
+ "tb": "TB"
},
"AFTER_TIME": {
"HOUR": "após uma hora",
@@ -558,7 +556,7 @@
"UNPIN_ALBUM": "Desafixar álbum",
"DOWNLOAD_COMPLETE": "Download concluído",
"DOWNLOADING_COLLECTION": "Fazendo download de {{name}}",
- "DOWNLOAD_FAILED": "Falha ao baixar",
+ "DOWNLOAD_FAILED": "Falha no download",
"DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} arquivos",
"CHRISTMAS": "Natal",
"CHRISTMAS_EVE": "Véspera de Natal",
@@ -567,6 +565,9 @@
"IMAGE": "Imagem",
"VIDEO": "Vídeo",
"LIVE_PHOTO": "Fotos em movimento",
+ "editor": {
+ "crop": "Cortar"
+ },
"CONVERT": "Converter",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "Tem certeza de que deseja fechar o editor?",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Baixe sua imagem editada ou salve uma cópia para o ente para persistir nas alterações.",
@@ -598,13 +599,13 @@
"ENTER_CAST_PIN_CODE": "Digite o código que você vê na TV abaixo para parear este dispositivo.",
"PAIR_DEVICE_TO_TV": "Parear dispositivos",
"TV_NOT_FOUND": "TV não encontrada. Você inseriu o PIN correto?",
- "AUTO_CAST_PAIR": "",
- "AUTO_CAST_PAIR_DESC": "",
+ "AUTO_CAST_PAIR": "Pareamento automático",
+ "AUTO_CAST_PAIR_DESC": "O pareamento automático funciona apenas com dispositivos que suportam o Chromecast.",
"PAIR_WITH_PIN": "Parear com PIN",
"CHOOSE_DEVICE_FROM_BROWSER": "Escolha um dispositivo compatível com casts no navegador popup.",
- "PAIR_WITH_PIN_DESC": "",
+ "PAIR_WITH_PIN_DESC": "Parear com o PIN funciona com qualquer tela que você deseja ver o seu álbum ativado.",
"VISIT_CAST_ENTE_IO": "Acesse {{url}} no dispositivo que você deseja parear.",
- "CAST_AUTO_PAIR_FAILED": "",
+ "CAST_AUTO_PAIR_FAILED": "Falha no pareamento automático do Chromecast. Por favor, tente novamente.",
"FREEHAND": "Mão livre",
"APPLY_CROP": "Aplicar Recorte",
"PHOTO_EDIT_REQUIRED_TO_SAVE": "Pelo menos uma transformação ou ajuste de cor deve ser feito antes de salvar.",
diff --git a/web/packages/next/locales/pt-PT/translation.json b/web/packages/next/locales/pt-PT/translation.json
index 29b36622c1b702115e845ce4f1b443a0a6ed2651..981f33126aedafbcf8307a13daef6a350a1ecfe5 100644
--- a/web/packages/next/locales/pt-PT/translation.json
+++ b/web/packages/next/locales/pt-PT/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/ru-RU/translation.json b/web/packages/next/locales/ru-RU/translation.json
index 910c9253f46d66b554a8ff665fb0f4ffe0efd328..2d2af0293f4e0532b0a6bda4c2341b59d0523736 100644
--- a/web/packages/next/locales/ru-RU/translation.json
+++ b/web/packages/next/locales/ru-RU/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "Все скрытые альбомы",
"HIDDEN_ALBUMS": "Скрытые альбомы",
"HIDDEN_ITEMS": "Скрытые предметы",
- "HIDDEN_ITEMS_SECTION_NAME": "Скрытые_элементы",
"ENTER_TWO_FACTOR_OTP": "Введите 6-значный код из вашего приложения для проверки подлинности.",
"CREATE_ACCOUNT": "Создать аккаунт",
"COPIED": "Скопированный",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "При этом будут загружены журналы отладки, которые вы можете отправить нам по электронной почте, чтобы помочь в устранении вашей проблемы.
Пожалуйста, обратите внимание, что будут указаны имена файлов, которые помогут отслеживать проблемы с конкретными файлами.
",
"CHANGE_FOLDER": "Изменить папку",
"TWO_MONTHS_FREE": "Получите 2 месяца бесплатно по годовым планам",
- "GB": "Гб",
"POPULAR": "Популярный",
"FREE_PLAN_OPTION_LABEL": "Продолжайте пользоваться бесплатной пробной версией",
- "FREE_PLAN_DESCRIPTION": "1 ГБ на 1 год",
+ "free_plan_description": "{{storage}} на 1 год",
"CURRENT_USAGE": "Текущее использование составляет {{usage}} ",
"WEAK_DEVICE": "Используемый вами веб-браузер недостаточно мощный, чтобы зашифровать ваши фотографии. Пожалуйста, попробуйте войти в Ente на своем компьютере или загрузить мобильное/настольное приложение Ente.",
"DRAG_AND_DROP_HINT": "Или перетащите в основное окно",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "Недопустимый каталог экспорта",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "Выбранный вами каталог экспорта не существует.
Пожалуйста, выберите правильный каталог.
",
"SUBSCRIPTION_VERIFICATION_ERROR": "Не удалось подтвердить подписку",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "БЗ",
- "MB": "Мегабайт",
- "GB": "Гб",
- "TB": "Терабайт"
+ "storage_unit": {
+ "b": "B",
+ "kb": "БЗ",
+ "mb": "Мегабайт",
+ "gb": "Гб",
+ "tb": "Терабайт"
},
"AFTER_TIME": {
"HOUR": "через час",
@@ -567,6 +565,9 @@
"IMAGE": "Изображение",
"VIDEO": "Видео",
"LIVE_PHOTO": "Живое фото",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "Преобразовать",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "Вы уверены, что хотите закрыть редактор?",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "Загрузите отредактированное изображение или сохраните копию в ente, чтобы сохранить внесенные изменения.",
diff --git a/web/packages/next/locales/sv-SE/translation.json b/web/packages/next/locales/sv-SE/translation.json
index 78e7116e0ad348607f7db00bd86be4d5527589d5..2ec0352b0cad510575fdbe9fca0e69a8fe420214 100644
--- a/web/packages/next/locales/sv-SE/translation.json
+++ b/web/packages/next/locales/sv-SE/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "GB",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "Bild",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/th-TH/translation.json b/web/packages/next/locales/th-TH/translation.json
index 38455b3e2d4b00885eca419e2e1da812cfc4035b..9f549eb49b8c51cdf36931bb43c9b0258d017309 100644
--- a/web/packages/next/locales/th-TH/translation.json
+++ b/web/packages/next/locales/th-TH/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/tr-TR/translation.json b/web/packages/next/locales/tr-TR/translation.json
index 38455b3e2d4b00885eca419e2e1da812cfc4035b..9f549eb49b8c51cdf36931bb43c9b0258d017309 100644
--- a/web/packages/next/locales/tr-TR/translation.json
+++ b/web/packages/next/locales/tr-TR/translation.json
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "",
"HIDDEN_ALBUMS": "",
"HIDDEN_ITEMS": "",
- "HIDDEN_ITEMS_SECTION_NAME": "",
"ENTER_TWO_FACTOR_OTP": "",
"CREATE_ACCOUNT": "",
"COPIED": "",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "",
"CHANGE_FOLDER": "",
"TWO_MONTHS_FREE": "",
- "GB": "",
"POPULAR": "",
"FREE_PLAN_OPTION_LABEL": "",
- "FREE_PLAN_DESCRIPTION": "",
+ "free_plan_description": "",
"CURRENT_USAGE": "",
"WEAK_DEVICE": "",
"DRAG_AND_DROP_HINT": "",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "",
"SUBSCRIPTION_VERIFICATION_ERROR": "",
- "STORAGE_UNITS": {
- "B": "",
- "KB": "",
- "MB": "",
- "GB": "",
- "TB": ""
+ "storage_unit": {
+ "b": "",
+ "kb": "",
+ "mb": "",
+ "gb": "",
+ "tb": ""
},
"AFTER_TIME": {
"HOUR": "",
@@ -567,6 +565,9 @@
"IMAGE": "",
"VIDEO": "",
"LIVE_PHOTO": "",
+ "editor": {
+ "crop": ""
+ },
"CONVERT": "",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "",
diff --git a/web/packages/next/locales/zh-CN/translation.json b/web/packages/next/locales/zh-CN/translation.json
index 04d0428fbba90847f32938ac60a5272333e0ca94..4ac62c79645c20b055477b64d710817e00481f5b 100644
--- a/web/packages/next/locales/zh-CN/translation.json
+++ b/web/packages/next/locales/zh-CN/translation.json
@@ -7,7 +7,7 @@
"HERO_SLIDE_3": "安卓, iOS, 网页端, 桌面端",
"LOGIN": "登录",
"SIGN_UP": "注册",
- "NEW_USER": "刚来到 Ente",
+ "NEW_USER": "初来 Ente",
"EXISTING_USER": "现有用户",
"ENTER_NAME": "输入名字",
"PUBLIC_UPLOADER_NAME_MESSAGE": "请添加一个名字,以便您的朋友知晓该感谢谁拍摄了这些精美的照片!",
@@ -418,7 +418,6 @@
"ALL_HIDDEN_ALBUMS": "所有隐藏的相册",
"HIDDEN_ALBUMS": "隐藏的相册",
"HIDDEN_ITEMS": "隐藏的项目",
- "HIDDEN_ITEMS_SECTION_NAME": "隐藏的项目",
"ENTER_TWO_FACTOR_OTP": "请输入您从身份验证应用上获得的6位数代码",
"CREATE_ACCOUNT": "创建账户",
"COPIED": "已复制",
@@ -448,10 +447,9 @@
"DOWNLOAD_LOGS_MESSAGE": "这将下载调试日志,您可以发送电子邮件给我们来帮助调试您的问题。
请注意文件名将被包含,以帮助跟踪特定文件中的问题。
",
"CHANGE_FOLDER": "更改文件夹",
"TWO_MONTHS_FREE": "在年度计划上免费获得 2 个月",
- "GB": "GB",
"POPULAR": "流行的",
"FREE_PLAN_OPTION_LABEL": "继续免费试用",
- "FREE_PLAN_DESCRIPTION": "1 GB 1年",
+ "free_plan_description": "{{storage}} 1年",
"CURRENT_USAGE": "当前使用量是 {{usage}} ",
"WEAK_DEVICE": "您使用的网络浏览器功能不够强大,无法加密您的照片。 请尝试在电脑上登录Ente,或下载Ente移动/桌面应用程序。",
"DRAG_AND_DROP_HINT": "或者拖动并拖动到 Ente 窗口",
@@ -495,12 +493,12 @@
"EXPORT_DIRECTORY_DOES_NOT_EXIST": "无效的导出目录",
"EXPORT_DIRECTORY_DOES_NOT_EXIST_MESSAGE": "您选择的导出目录不存在。
请选择一个有效的目录。
",
"SUBSCRIPTION_VERIFICATION_ERROR": "订阅验证失败",
- "STORAGE_UNITS": {
- "B": "B",
- "KB": "KB",
- "MB": "MB",
- "GB": "GB",
- "TB": "TB"
+ "storage_unit": {
+ "b": "B",
+ "kb": "KB",
+ "mb": "MB",
+ "gb": "GB",
+ "tb": "TB"
},
"AFTER_TIME": {
"HOUR": "1小时后",
@@ -567,6 +565,9 @@
"IMAGE": "图像",
"VIDEO": "视频",
"LIVE_PHOTO": "实况照片",
+ "editor": {
+ "crop": "裁剪"
+ },
"CONVERT": "转换",
"CONFIRM_EDITOR_CLOSE_MESSAGE": "您确定要关闭编辑器吗?",
"CONFIRM_EDITOR_CLOSE_DESCRIPTION": "下载已编辑的图片或将副本保存到 Ente 以保留您的更改。",
@@ -598,13 +599,13 @@
"ENTER_CAST_PIN_CODE": "输入您在下面的电视上看到的代码来配对此设备。",
"PAIR_DEVICE_TO_TV": "配对设备",
"TV_NOT_FOUND": "未找到电视。您输入的 PIN 码正确吗?",
- "AUTO_CAST_PAIR": "",
- "AUTO_CAST_PAIR_DESC": "",
+ "AUTO_CAST_PAIR": "自动配对",
+ "AUTO_CAST_PAIR_DESC": "自动配对仅适用于支持 Chromecast 的设备。",
"PAIR_WITH_PIN": "用 PIN 配对",
"CHOOSE_DEVICE_FROM_BROWSER": "从浏览器弹出窗口中选择兼容 Cast 的设备。",
- "PAIR_WITH_PIN_DESC": "",
+ "PAIR_WITH_PIN_DESC": "用 PIN 码配对适用于您希望在其上查看相册的任何屏幕。",
"VISIT_CAST_ENTE_IO": "在您要配对的设备上访问 {{url}} 。",
- "CAST_AUTO_PAIR_FAILED": "",
+ "CAST_AUTO_PAIR_FAILED": "Chromecast 自动配对失败。请再试一次。",
"FREEHAND": "手画",
"APPLY_CROP": "应用裁剪",
"PHOTO_EDIT_REQUIRED_TO_SAVE": "保存之前必须至少执行一项转换或颜色调整。",
diff --git a/web/packages/next/log.ts b/web/packages/next/log.ts
index f9ef7e5493e3ba09b96ab262d68a6779acb2978c..e69d22b07e59f636c7b2d1c1cce7ebc8df8da36e 100644
--- a/web/packages/next/log.ts
+++ b/web/packages/next/log.ts
@@ -3,6 +3,19 @@ import { isDevBuild } from "./env";
import { logToDisk as webLogToDisk } from "./log-web";
import { workerBridge } from "./worker/worker-bridge";
+/**
+ * Whether logs go to disk or are always emitted to the console.
+ */
+let shouldLogToDisk = true;
+
+/**
+ * By default, logs get saved into a ring buffer in the browser's local storage.
+ * However, in some contexts, e.g. when we're running as the cast app, there is
+ * no mechanism for the user to retrieve these logs. So this function exists as
+ * a way to disable the on disk logging and always use the console.
+ */
+export const disableDiskLogs = () => (shouldLogToDisk = false);
+
/**
* Write a {@link message} to the on-disk log.
*
@@ -45,14 +58,14 @@ const messageWithError = (message: string, e?: unknown) => {
const logError = (message: string, e?: unknown) => {
const m = `[error] ${messageWithError(message, e)}`;
- if (isDevBuild) console.error(m);
- logToDisk(m);
+ console.error(m);
+ if (shouldLogToDisk) logToDisk(m);
};
const logWarn = (message: string, e?: unknown) => {
const m = `[warn] ${messageWithError(message, e)}`;
- if (isDevBuild) console.error(m);
- logToDisk(m);
+ console.error(m);
+ if (shouldLogToDisk) logToDisk(m);
};
const logInfo = (...params: unknown[]) => {
@@ -60,8 +73,8 @@ const logInfo = (...params: unknown[]) => {
.map((p) => (typeof p == "string" ? p : JSON.stringify(p)))
.join(" ");
const m = `[info] ${message}`;
- if (isDevBuild) console.log(m);
- logToDisk(m);
+ if (isDevBuild || !shouldLogToDisk) console.log(m);
+ if (shouldLogToDisk) logToDisk(m);
};
const logDebug = (param: () => unknown) => {
@@ -71,8 +84,8 @@ const logDebug = (param: () => unknown) => {
/**
* Ente's logger.
*
- * This is an object that provides three functions to log at the corresponding
- * levels - error, info or debug.
+ * This is an object that provides functions to log at the corresponding levels:
+ * error, warn, info or debug.
*
* Whenever we need to save a log message to disk,
*
@@ -89,8 +102,7 @@ export default {
* any arbitrary object that we obtain, say, when in a try-catch handler (in
* JavaScript any arbitrary value can be thrown).
*
- * The log is written to disk. In development builds, the log is also
- * printed to the browser console.
+ * The log is written to disk and printed to the browser console.
*/
error: logError,
/**
@@ -104,8 +116,10 @@ export default {
* This is meant as a replacement of {@link console.log}, and takes an
* arbitrary number of arbitrary parameters that it then serializes.
*
- * The log is written to disk. In development builds, the log is also
- * printed to the browser console.
+ * The log is written to disk. However, if logging to disk is disabled by
+ * using {@link disableDiskLogs}, then the log is printed to the console.
+ *
+ * In development builds, the log is always printed to the browser console.
*/
info: logInfo,
/**
@@ -118,8 +132,8 @@ export default {
* The function can return an arbitrary value which is serialized before
* being logged.
*
- * This log is NOT written to disk. And it is printed to the browser
- * console, but only in development builds.
+ * This log is NOT written to disk. It is printed to the browser console,
+ * but only in development builds.
*/
debug: logDebug,
};
diff --git a/web/packages/next/types/ipc.ts b/web/packages/next/types/ipc.ts
index 4b05838fa19c27ffdc39bd356377534773acb592..d91f7bf3fac13f048ad23291f1775f45896c97aa 100644
--- a/web/packages/next/types/ipc.ts
+++ b/web/packages/next/types/ipc.ts
@@ -67,10 +67,17 @@ export interface Electron {
* Clear any stored data.
*
* This is a coarse single shot cleanup, meant for use in clearing any
- * Electron side state during logout.
+ * persisted Electron side state during logout.
*/
clearStores: () => void;
+ /**
+ * Clear an state corresponding to in-flight convert-to-mp4 requests.
+ *
+ * This is meant for use during logout.
+ */
+ clearConvertToMP4Results: () => void;
+
/**
* Return the previously saved encryption key from persistent safe storage.
*
@@ -260,7 +267,7 @@ export interface Electron {
* This executes the command using a FFmpeg executable we bundle with our
* desktop app. We also have a wasm FFmpeg wasm implementation that we use
* when running on the web, which has a sibling function with the same
- * parameters. See [Note: ffmpeg in Electron].
+ * parameters. See [Note:FFmpeg in Electron].
*
* @param command An array of strings, each representing one positional
* parameter in the command to execute. Placeholders for the input, output
@@ -280,9 +287,6 @@ export interface Electron {
* just return its contents, for some FFmpeg command the extension matters
* (e.g. conversion to a JPEG fails if the extension is arbitrary).
*
- * @param timeoutMS If non-zero, then abort and throw a timeout error if the
- * ffmpeg command takes more than the given number of milliseconds.
- *
* @returns The contents of the output file produced by the ffmpeg command
* (specified as {@link outputPathPlaceholder} in {@link command}).
*/
@@ -290,7 +294,6 @@ export interface Electron {
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
- timeoutMS: number,
) => Promise;
// - ML
@@ -346,6 +349,28 @@ export interface Electron {
*/
faceEmbedding: (input: Float32Array) => Promise;
+ /**
+ * Return a face crop stored by a previous version of ML.
+ *
+ * [Note: Legacy face crops]
+ *
+ * Older versions of ML generated and stored face crops in a "face-crops"
+ * cache directory on the Electron side. For the time being, we have
+ * disabled the face search whilst we put finishing touches to it. However,
+ * it'll be nice to still show the existing faces that have been clustered
+ * for people who opted in to the older beta.
+ *
+ * So we retain the older "face-crops" disk cache, and use this method to
+ * serve faces from it when needed.
+ *
+ * @param faceID An identifier corresponding to which the face crop had been
+ * stored by the older version of our app.
+ *
+ * @returns the JPEG data of the face crop if a file is found for the given
+ * {@link faceID}, otherwise undefined.
+ */
+ legacyFaceCrop: (faceID: string) => Promise;
+
// - Watch
/**
diff --git a/web/packages/shared/hooks/useFileInput.tsx b/web/packages/shared/hooks/useFileInput.tsx
index 71f027cefea31be9569043693b8799e2b4b0fff3..88c247ecc1ae109f33561091b1bcda198103a727 100644
--- a/web/packages/shared/hooks/useFileInput.tsx
+++ b/web/packages/shared/hooks/useFileInput.tsx
@@ -1,56 +1,71 @@
import { useCallback, useRef, useState } from "react";
interface UseFileInputParams {
+ /**
+ * If `true`, the file open dialog will ask the user to select directories.
+ * Otherwise it'll ask the user to select files (default).
+ */
directory?: boolean;
+ /**
+ * If specified, it'll restrict the type of files that the user can select
+ * by setting the "accept" attribute of the underlying HTML input element we
+ * use to surface the file selector dialog. For value of accept can be an
+ * extension or a MIME type (See
+ * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept).
+ */
accept?: string;
}
+interface UseFileInputResult {
+ /**
+ * A function to call to get the properties that should be passed to a dummy
+ * `input` element that needs to be created to anchor the select file
+ * dialog. This input HTML element is not going to be visible, but it needs
+ * to be part of the DOM for {@link openSelector} to have effect.
+ */
+ getInputProps: () => React.HTMLAttributes;
+ /**
+ * A function that can be called to open the select file / directory dialog.
+ */
+ openSelector: () => void;
+ /**
+ * The list of {@link File}s that the user selected.
+ *
+ * This will be a list even if the user selected directories - in that case,
+ * it will be the recursive list of files within this directory.
+ */
+ selectedFiles: File[];
+}
+
/**
- * Return three things:
- *
- * - A function that can be called to trigger the showing of the select file /
- * directory dialog.
- *
- * - The list of properties that should be passed to a dummy `input` element
- * that needs to be created to anchor the select file dialog. This input HTML
- * element is not going to be visible, but it needs to be part of the DOM fro
- * the open trigger to have effect.
- *
- * - The list of files that the user selected. This will be a list even if the
- * user selected directories - in that case, it will be the recursive list of
- * files within this directory.
+ * Wrap a open file selector into an easy to use package.
*
- * @param param0
+ * Returns a {@link UseFileInputResult} which contains a function to get the
+ * props for an input element, a function to open the file selector, and the
+ * list of selected files.
*
- * - If {@link directory} is true, the file open dialog will ask the user to
- * select directories. Otherwise it'll ask the user to select files.
- *
- * - If {@link accept} is specified, it'll restrict the type of files that the
- * user can select by setting the "accept" attribute of the underlying HTML
- * input element we use to surface the file selector dialog. For value of
- * accept can be an extension or a MIME type (See
- * https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/accept).
+ * See the documentation of {@link UseFileInputParams} and
+ * {@link UseFileInputResult} for more details.
*/
-export default function useFileInput({
+export const useFileInput = ({
directory,
accept,
-}: UseFileInputParams) {
+}: UseFileInputParams): UseFileInputResult => {
const [selectedFiles, setSelectedFiles] = useState([]);
const inputRef = useRef();
- const openSelectorDialog = useCallback(() => {
+ const openSelector = useCallback(() => {
if (inputRef.current) {
inputRef.current.value = null;
inputRef.current.click();
}
}, []);
- const handleChange: React.ChangeEventHandler = async (
+ const handleChange: React.ChangeEventHandler = (
event,
) => {
- if (!!event.target && !!event.target.files) {
- setSelectedFiles([...event.target.files]);
- }
+ const files = event.target?.files;
+ if (files) setSelectedFiles([...files]);
};
// [Note: webkitRelativePath]
@@ -78,12 +93,8 @@ export default function useFileInput({
onChange: handleChange,
...(accept ? { accept } : {}),
}),
- [],
+ [directoryOpts, accept, handleChange],
);
- return {
- getInputProps,
- open: openSelectorDialog,
- selectedFiles: selectedFiles,
- };
-}
+ return { getInputProps, openSelector, selectedFiles };
+};
diff --git a/web/packages/shared/storage/localStorage/index.ts b/web/packages/shared/storage/localStorage/index.ts
index 70b9687cdc5ecbdab91ff6428943cb7d21392bbe..c6ec3f57f4978cf74e6906c87da1fd1cd8ee1dd3 100644
--- a/web/packages/shared/storage/localStorage/index.ts
+++ b/web/packages/shared/storage/localStorage/index.ts
@@ -7,7 +7,6 @@ export enum LS_KEYS {
ORIGINAL_KEY_ATTRIBUTES = "originalKeyAttributes",
SUBSCRIPTION = "subscription",
FAMILY_DATA = "familyData",
- PLANS = "plans",
IS_FIRST_LOGIN = "isFirstLogin",
JUST_SIGNED_UP = "justSignedUp",
SHOW_BACK_BUTTON = "showBackButton",
diff --git a/web/packages/shared/utils/index.ts b/web/packages/shared/utils/index.ts
index 568ec5cc408b59122d3521d762bbe62089cbef73..8b46f626706c75b0849f2c715bda246bf1c29b08 100644
--- a/web/packages/shared/utils/index.ts
+++ b/web/packages/shared/utils/index.ts
@@ -1,11 +1,4 @@
-/**
- * Wait for {@link ms} milliseconds
- *
- * This function is a promisified `setTimeout`. It returns a promise that
- * resolves after {@link ms} milliseconds.
- */
-export const wait = (ms: number) =>
- new Promise((resolve) => setTimeout(resolve, ms));
+import { wait } from "@/utils/promise";
export function downloadAsFile(filename: string, content: string) {
const file = new Blob([content], {
@@ -52,23 +45,3 @@ export async function retryAsyncFunction(
}
}
}
-
-/**
- * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
- * does not resolve within {@link timeoutMS}, then reject with a timeout error.
- */
-export const withTimeout = async (promise: Promise, ms: number) => {
- let timeoutId: ReturnType;
- const rejectOnTimeout = new Promise((_, reject) => {
- timeoutId = setTimeout(
- () => reject(new Error("Operation timed out")),
- ms,
- );
- });
- const promiseAndCancelTimeout = async () => {
- const result = await promise;
- clearTimeout(timeoutId);
- return result;
- };
- return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
-};
diff --git a/web/packages/utils/array.ts b/web/packages/utils/array.ts
new file mode 100644
index 0000000000000000000000000000000000000000..10030b189e9b97911fb496e7868b3463cb3309c7
--- /dev/null
+++ b/web/packages/utils/array.ts
@@ -0,0 +1,30 @@
+/**
+ * Shuffle.
+ *
+ * Return a new array containing the shuffled elements of the given array.
+ *
+ * The algorithm used is not the most efficient, but is effectively a one-liner
+ * whilst being reasonably efficient. To each element we assign a random key,
+ * then we sort by this key. Since the key is random, the sorted array will have
+ * the original elements in a random order.
+ */
+export const shuffled = (xs: T[]) =>
+ xs
+ .map((x) => [Math.random(), x])
+ .sort()
+ .map(([, x]) => x) as T[];
+
+/**
+ * Return the first non-empty string from the given list of strings.
+ *
+ * This function is needed because the `a ?? b` idiom doesn't do what you'd
+ * expect when a is "". Perhaps the behaviour is wrong, perhaps the expecation
+ * is wrong; this function papers over the differences.
+ *
+ * If none of the strings are non-empty, or if there are no strings in the given
+ * array, return undefined.
+ */
+export const firstNonEmpty = (ss: (string | undefined)[]) => {
+ for (const s of ss) if (s && s.length > 0) return s;
+ return undefined;
+};
diff --git a/web/packages/utils/ensure.ts b/web/packages/utils/ensure.ts
index 93706bfb61c15aea0a497ecfb23ada494eef6daf..41639ea2b5c788a9e3d24b6378f9df2b4abde4c8 100644
--- a/web/packages/utils/ensure.ts
+++ b/web/packages/utils/ensure.ts
@@ -3,7 +3,7 @@
*/
export const ensure = (v: T | null | undefined): T => {
if (v === null) throw new Error("Required value was null");
- if (v === undefined) throw new Error("Required value was not found");
+ if (v === undefined) throw new Error("Required value was undefined");
return v;
};
diff --git a/web/packages/utils/promise.ts b/web/packages/utils/promise.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4cb7648fd3663b0cf7bb3db26dff0aa149407dc3
--- /dev/null
+++ b/web/packages/utils/promise.ts
@@ -0,0 +1,28 @@
+/**
+ * Wait for {@link ms} milliseconds
+ *
+ * This function is a promisified `setTimeout`. It returns a promise that
+ * resolves after {@link ms} milliseconds.
+ */
+export const wait = (ms: number) =>
+ new Promise((resolve) => setTimeout(resolve, ms));
+
+/**
+ * Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
+ * does not resolve within {@link timeoutMS}, then reject with a timeout error.
+ */
+export const withTimeout = async (promise: Promise, ms: number) => {
+ let timeoutId: ReturnType;
+ const rejectOnTimeout = new Promise((_, reject) => {
+ timeoutId = setTimeout(
+ () => reject(new Error("Operation timed out")),
+ ms,
+ );
+ });
+ const promiseAndCancelTimeout = async () => {
+ const result = await promise;
+ clearTimeout(timeoutId);
+ return result;
+ };
+ return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
+};
diff --git a/web/yarn.lock b/web/yarn.lock
index af3a5f2102a0d7a52e549d2e9d052c2d699c145e..f4d92b6dd8ba6b29443772b9b91975fe788f8cb5 100644
--- a/web/yarn.lock
+++ b/web/yarn.lock
@@ -1000,6 +1000,11 @@
"@types/node" "*"
base-x "^3.0.6"
+"@types/chromecast-caf-receiver@^6.0.14":
+ version "6.0.14"
+ resolved "https://registry.yarnpkg.com/@types/chromecast-caf-receiver/-/chromecast-caf-receiver-6.0.14.tgz#e1e781c62c84ee85899fd20d658e258f8f45f5be"
+ integrity sha512-qvN4uE4MlYCEtniTtjxG4D+KeEXfs/Sgqex9sSZdPVh5rffdifINYzKH3z3QRl+0mk41vD6vYZ8s8ZfW/8iFoQ==
+
"@types/estree@1.0.5":
version "1.0.5"
resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4"
@@ -1010,6 +1015,11 @@
resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-7946.0.14.tgz#319b63ad6df705ee2a65a73ef042c8271e696613"
integrity sha512-WCfD5Ht3ZesJUsONdhvm84dmzWOiOzOAqOncN0++w0lBw1o8OuDNJF2McvvCef/yBqb/HYRahp1BYtODFQ8bRg==
+"@types/heic-convert@^1.2.3":
+ version "1.2.3"
+ resolved "https://registry.yarnpkg.com/@types/heic-convert/-/heic-convert-1.2.3.tgz#0705f36e467e7b6180806edd0b3f1e673514ff8c"
+ integrity sha512-5LJ2fGuVk/gnOLihoT56xJwrXxfnNepGvrHwlW5ZtT3HS4jO1AqBaAHCxXUpnY9UaD3zYcyxXMRM2fNN1AFF/Q==
+
"@types/hoist-non-react-statics@^3.3.1":
version "3.3.5"
resolved "https://registry.yarnpkg.com/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.5.tgz#dab7867ef789d87e2b4b0003c9d65c49cc44a494"
@@ -2498,12 +2508,12 @@ file-entry-cache@^6.0.1:
dependencies:
flat-cache "^3.0.4"
-file-selector@^0.4.0:
- version "0.4.0"
- resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.4.0.tgz#59ec4f27aa5baf0841e9c6385c8386bef4d18b17"
- integrity sha512-iACCiXeMYOvZqlF1kTiYINzgepRBymz1wwjiuup9u9nayhb6g4fSwiyJ/6adli+EPwrWtpgQAh2PoS7HukEGEg==
+file-selector@^0.6.0:
+ version "0.6.0"
+ resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-0.6.0.tgz#fa0a8d9007b829504db4d07dd4de0310b65287dc"
+ integrity sha512-QlZ5yJC0VxHxQQsQhXvBaC7VRJ2uaxTf+Tfpu4Z/OcVQJVpZO+DGU0rkoVW5ce2SccxugvpBJoMvUs59iILYdw==
dependencies:
- tslib "^2.0.3"
+ tslib "^2.4.0"
file-type@16.5.4:
version "16.5.4"
@@ -2834,7 +2844,7 @@ hdbscan@0.0.1-alpha.5:
dependencies:
kd-tree-javascript "^1.0.3"
-heic-convert@^2.0.0:
+heic-convert@^2.1:
version "2.1.0"
resolved "https://registry.yarnpkg.com/heic-convert/-/heic-convert-2.1.0.tgz#7f764529e37591ae263ef49582d1d0c13491526e"
integrity sha512-1qDuRvEHifTVAj3pFIgkqGgJIr0M3X7cxEPjEp0oG4mo8GFjq99DpCo8Eg3kg17Cy0MTjxpFdoBHOatj7ZVKtg==
@@ -3428,7 +3438,7 @@ mime-db@1.52.0:
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
-mime-types@^2.1.12, mime-types@^2.1.35:
+mime-types@^2.1.12:
version "2.1.35"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
@@ -3882,13 +3892,13 @@ react-dom@^18:
loose-envify "^1.1.0"
scheduler "^0.23.0"
-react-dropzone@^11.2.4:
- version "11.7.1"
- resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-11.7.1.tgz#3851bb75b26af0bf1b17ce1449fd980e643b9356"
- integrity sha512-zxCMwhfPy1olUEbw3FLNPLhAm/HnaYH5aELIEglRbqabizKAdHs0h+WuyOpmA+v1JXn0++fpQDdNfUagWt5hJQ==
+react-dropzone@^14.2:
+ version "14.2.3"
+ resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-14.2.3.tgz#0acab68308fda2d54d1273a1e626264e13d4e84b"
+ integrity sha512-O3om8I+PkFKbxCukfIR3QAGftYXDZfOE2N1mr/7qebQJHs7U+/RSL/9xomJNpRg9kM5h9soQSdf0Gc7OHF5Fug==
dependencies:
attr-accept "^2.2.2"
- file-selector "^0.4.0"
+ file-selector "^0.6.0"
prop-types "^15.8.1"
react-fast-compare@^2.0.1:
@@ -4587,7 +4597,7 @@ tsconfig-paths@^3.15.0:
minimist "^1.2.6"
strip-bom "^3.0.0"
-tslib@^2.0.0, tslib@^2.0.3, tslib@^2.4.0, tslib@^2.6.2:
+tslib@^2.0.0, tslib@^2.4.0, tslib@^2.6.2:
version "2.6.2"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==