Quellcode durchsuchen

Merge branch 'main' into mobile_face

Neeraj Gupta vor 1 Jahr
Ursprung
Commit
089aa16bc6
100 geänderte Dateien mit 929 neuen und 891 gelöschten Zeilen
  1. 1 1
      .github/workflows/auth-crowdin.yml
  2. 1 1
      .github/workflows/auth-lint.yml
  3. 3 3
      .github/workflows/auth-release.yml
  4. 30 0
      .github/workflows/desktop-lint.yml
  5. 1 1
      .github/workflows/docs-verify-build.yml
  6. 1 1
      .github/workflows/mobile-crowdin.yml
  7. 1 0
      .github/workflows/mobile-internal-release.yml
  8. 1 1
      .github/workflows/mobile-lint.yml
  9. 1 1
      .github/workflows/server-lint.yml
  10. 5 0
      .github/workflows/server-publish.yml
  11. 34 0
      .github/workflows/web-crowdin-push.yml
  12. 1 1
      .github/workflows/web-crowdin.yml
  13. 1 1
      .github/workflows/web-deploy-accounts.yml
  14. 1 1
      .github/workflows/web-deploy-cast.yml
  15. 1 1
      .github/workflows/web-lint.yml
  16. 1 1
      auth/assets/simple-icons
  17. 3 3
      auth/ios/Podfile.lock
  18. 0 1
      auth/lib/l10n/arb/app_ar.arb
  19. 0 1
      auth/lib/l10n/arb/app_de.arb
  20. 0 1
      auth/lib/l10n/arb/app_en.arb
  21. 9 2
      auth/lib/l10n/arb/app_es.arb
  22. 0 1
      auth/lib/l10n/arb/app_fa.arb
  23. 0 1
      auth/lib/l10n/arb/app_fi.arb
  24. 0 1
      auth/lib/l10n/arb/app_fr.arb
  25. 0 1
      auth/lib/l10n/arb/app_he.arb
  26. 0 1
      auth/lib/l10n/arb/app_it.arb
  27. 0 1
      auth/lib/l10n/arb/app_ja.arb
  28. 0 1
      auth/lib/l10n/arb/app_ka.arb
  29. 0 1
      auth/lib/l10n/arb/app_nl.arb
  30. 0 1
      auth/lib/l10n/arb/app_pl.arb
  31. 0 1
      auth/lib/l10n/arb/app_pt.arb
  32. 0 1
      auth/lib/l10n/arb/app_ru.arb
  33. 0 1
      auth/lib/l10n/arb/app_sv.arb
  34. 0 1
      auth/lib/l10n/arb/app_ti.arb
  35. 0 1
      auth/lib/l10n/arb/app_tr.arb
  36. 0 1
      auth/lib/l10n/arb/app_vi.arb
  37. 0 1
      auth/lib/l10n/arb/app_zh.arb
  38. 1 0
      auth/lib/main.dart
  39. 22 7
      auth/lib/models/code.dart
  40. 10 3
      auth/lib/onboarding/view/setup_enter_secret_key_page.dart
  41. 5 5
      auth/lib/ui/code_widget.dart
  42. 2 0
      auth/lib/ui/settings/data/import/bitwarden_import.dart
  43. 2 2
      auth/lib/utils/totp_util.dart
  44. 1 1
      auth/linux/packaging/rpm/make_config.yaml
  45. 3 3
      auth/pubspec.lock
  46. 3 2
      auth/pubspec.yaml
  47. 8 1
      cli/README.md
  48. 2 1
      cli/cmd/account.go
  49. 1 1
      cli/docs/generated/ente.md
  50. 2 2
      cli/docs/generated/ente_account.md
  51. 6 2
      cli/docs/generated/ente_account_add.md
  52. 1 1
      cli/docs/generated/ente_account_get-token.md
  53. 1 1
      cli/docs/generated/ente_account_list.md
  54. 1 1
      cli/docs/generated/ente_account_update.md
  55. 1 1
      cli/docs/generated/ente_admin.md
  56. 1 1
      cli/docs/generated/ente_admin_delete-user.md
  57. 1 1
      cli/docs/generated/ente_admin_disable-2fa.md
  58. 1 1
      cli/docs/generated/ente_admin_get-user-id.md
  59. 1 1
      cli/docs/generated/ente_admin_list-users.md
  60. 1 1
      cli/docs/generated/ente_admin_update-subscription.md
  61. 1 1
      cli/docs/generated/ente_auth.md
  62. 1 1
      cli/docs/generated/ente_auth_decrypt.md
  63. 1 1
      cli/docs/generated/ente_export.md
  64. 1 1
      cli/docs/generated/ente_version.md
  65. 1 1
      cli/pkg/account.go
  66. 19 4
      desktop/.eslintrc.js
  67. 0 55
      desktop/.github/workflows/build.yml
  68. 80 0
      desktop/.github/workflows/desktop-release.yml
  69. 8 0
      desktop/CHANGELOG.md
  70. 0 6
      desktop/README.md
  71. 3 0
      desktop/docs/dependencies.md
  72. 56 51
      desktop/docs/release.md
  73. 0 1
      desktop/electron-builder.yml
  74. 11 6
      desktop/package.json
  75. 66 87
      desktop/src/main.ts
  76. 0 72
      desktop/src/main/dialogs.ts
  77. 0 29
      desktop/src/main/fs.ts
  78. 0 21
      desktop/src/main/init.ts
  79. 32 30
      desktop/src/main/ipc.ts
  80. 6 6
      desktop/src/main/log.ts
  81. 21 22
      desktop/src/main/menu.ts
  82. 88 6
      desktop/src/main/services/app-update.ts
  83. 6 7
      desktop/src/main/services/auto-launcher.ts
  84. 89 0
      desktop/src/main/services/dir.ts
  85. 5 5
      desktop/src/main/services/ffmpeg.ts
  86. 19 143
      desktop/src/main/services/fs.ts
  87. 3 3
      desktop/src/main/services/image.ts
  88. 45 47
      desktop/src/main/services/ml-clip.ts
  89. 4 2
      desktop/src/main/services/ml-face.ts
  90. 8 5
      desktop/src/main/services/ml.ts
  91. 14 5
      desktop/src/main/services/store.ts
  92. 63 63
      desktop/src/main/services/upload.ts
  93. 19 22
      desktop/src/main/services/watch.ts
  94. 1 1
      desktop/src/main/stores/safe-storage.ts
  95. 0 5
      desktop/src/main/stores/upload-status.ts
  96. 1 1
      desktop/src/main/stores/user-preferences.ts
  97. 9 5
      desktop/src/main/stores/watch.ts
  98. 40 58
      desktop/src/main/stream.ts
  99. 12 3
      desktop/src/main/utils/common.ts
  100. 23 38
      desktop/src/main/utils/electron.ts

+ 1 - 1
.github/workflows/auth-crowdin.yml

@@ -30,7 +30,7 @@ jobs:
                   upload_sources: true
                   upload_translations: false
                   download_translations: true
-                  localization_branch_name: crowdin-translations-auth
+                  localization_branch_name: translations/auth
                   create_pull_request: true
                   skip_untranslated_strings: true
                   pull_request_title: "[auth] New translations"

+ 1 - 1
.github/workflows/auth-lint.yml

@@ -3,7 +3,7 @@ name: "Lint (auth)"
 on:
     # Run on every push to a branch other than main that changes auth/
     push:
-        branches-ignore: [main, "deploy/**"]
+        branches-ignore: [main, "deploy/**", "deploy-f/**"]
         paths:
             - "auth/**"
             - ".github/workflows/auth-lint.yml"

+ 3 - 3
.github/workflows/auth-release.yml

@@ -17,8 +17,8 @@ name: "Release (auth)"
 # We use a suffix like `-test` to indicate that these are test tags, and that
 # they belong to a pre-release.
 #
-# If you need to do multiple tests, add a +x at the end of the tag. e.g.
-# `auth-v1.2.3-test+1`.
+# If you need to do multiple tests, add a .x at the end of the tag. e.g.
+# `auth-v1.2.3-test.1`.
 #
 # Once the testing is done, also delete the tag(s) please.
 
@@ -85,7 +85,7 @@ jobs:
             - name: Install dependencies for desktop build
               run: |
                   sudo apt-get update -y
-                  sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5
+                  sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5
                   sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu'
 
             - name: Install appimagetool

+ 30 - 0
.github/workflows/desktop-lint.yml

@@ -0,0 +1,30 @@
+name: "Lint (desktop)"
+
+on:
+    # Run on every push to a branch other than main that changes desktop/
+    push:
+        branches-ignore: [main, "deploy/**", "deploy-f/**"]
+        paths:
+            - "desktop/**"
+            - ".github/workflows/desktop-lint.yml"
+
+jobs:
+    lint:
+        runs-on: ubuntu-latest
+        defaults:
+            run:
+                working-directory: desktop
+        steps:
+            - name: Checkout code
+              uses: actions/checkout@v4
+
+            - name: Setup node and enable yarn caching
+              uses: actions/setup-node@v4
+              with:
+                  node-version: 20
+                  cache: "yarn"
+                  cache-dependency-path: "desktop/yarn.lock"
+
+            - run: yarn install
+
+            - run: yarn lint

+ 1 - 1
.github/workflows/docs-verify-build.yml

@@ -6,7 +6,7 @@ name: "Verify build (docs)"
 on:
     # Run on every push to a branch other than main that changes docs/
     push:
-        branches-ignore: [main, "deploy/**"]
+        branches-ignore: [main, "deploy/**", "deploy-f/**"]
         paths:
             - "docs/**"
             - ".github/workflows/docs-verify-build.yml"

+ 1 - 1
.github/workflows/mobile-crowdin.yml

@@ -30,7 +30,7 @@ jobs:
                   upload_sources: true
                   upload_translations: false
                   download_translations: true
-                  localization_branch_name: crowdin-translations-mobile
+                  localization_branch_name: translations/mobile
                   create_pull_request: true
                   skip_untranslated_strings: true
                   pull_request_title: "[mobile] New translations"

+ 1 - 0
.github/workflows/mobile-internal-release.yml

@@ -54,3 +54,4 @@ jobs:
                   packageName: io.ente.photos
                   releaseFiles: mobile/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab
                   track: internal
+                  changesNotSentForReview: true

+ 1 - 1
.github/workflows/mobile-lint.yml

@@ -3,7 +3,7 @@ name: "Lint (mobile)"
 on:
     # Run on every push to a branch other than main that changes mobile/
     push:
-        branches-ignore: [main, f-droid, "deploy/**"]
+        branches-ignore: [main, f-droid, "deploy/**", "deploy-f/**"]
         paths:
             - "mobile/**"
             - ".github/workflows/mobile-lint.yml"

+ 1 - 1
.github/workflows/server-lint.yml

@@ -3,7 +3,7 @@ name: "Lint (server)"
 on:
     # Run on every push to a branch other than main that changes server/
     push:
-        branches-ignore: [main, "deploy/**"]
+        branches-ignore: [main, "deploy/**", "deploy-f/**"]
         paths:
             - "server/**"
             - ".github/workflows/server-lint.yml"

+ 5 - 0
.github/workflows/server-publish.yml

@@ -38,3 +38,8 @@ jobs:
                   tags: ${{ inputs.commit }}, latest
                   username: ${{ github.actor }}
                   password: ${{ secrets.GITHUB_TOKEN }}
+
+            - name: Tag as server/ghcr
+              run: |
+                  git tag -f server/ghcr
+                  git push -f origin server/ghcr

+ 34 - 0
.github/workflows/web-crowdin-push.yml

@@ -0,0 +1,34 @@
+name: "Push Crowdin translations (web)"
+
+# This is a variant of web-crowdin.yml that uploads the translated strings in
+# addition to the source strings.
+#
+# This allows us to change the strings in our source code for an automated
+# refactoring (e.g. renaming a key), and then run this workflow to update the
+# data in Crowdin taking our source code as the source of truth.
+
+on:
+    # Trigger manually, or using
+    # `gh workflow run web-crowdin-push.yml --ref <my-branch>`
+    workflow_dispatch:
+
+jobs:
+    push-to-crowdin:
+        runs-on: ubuntu-latest
+
+        steps:
+            - name: Checkout
+              uses: actions/checkout@v4
+
+            - name: Crowdin push
+              uses: crowdin/github-action@v1
+              with:
+                  base_path: "web/"
+                  config: "web/crowdin.yml"
+                  upload_sources: true
+                  upload_translations: true
+                  download_translations: false
+                  project_id: 569613
+              env:
+                  GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+                  CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}

+ 1 - 1
.github/workflows/web-crowdin.yml

@@ -36,7 +36,7 @@ jobs:
                   upload_sources: true
                   upload_translations: false
                   download_translations: true
-                  localization_branch_name: crowdin-translations-web
+                  localization_branch_name: translations/web
                   create_pull_request: true
                   skip_untranslated_strings: true
                   pull_request_title: "[web] New translations"

+ 1 - 1
.github/workflows/web-deploy-accounts.yml

@@ -3,7 +3,7 @@ name: "Deploy (accounts)"
 on:
     push:
         # Run workflow on pushes to the deploy/accounts
-        branches: [deploy/accounts]
+        branches: [deploy/accounts, deploy-f/accounts]
 
 jobs:
     deploy:

+ 1 - 1
.github/workflows/web-deploy-cast.yml

@@ -3,7 +3,7 @@ name: "Deploy (cast)"
 on:
     push:
         # Run workflow on pushes to the deploy/cast
-        branches: [deploy/cast]
+        branches: [deploy/cast, deploy-f/cast]
 
 jobs:
     deploy:

+ 1 - 1
.github/workflows/web-lint.yml

@@ -3,7 +3,7 @@ name: "Lint (web)"
 on:
     # Run on every push to a branch other than main that changes web/
     push:
-        branches-ignore: [main, "deploy/**"]
+        branches-ignore: [main, "deploy/**", "deploy-f/**"]
         paths:
             - "web/**"
             - ".github/workflows/web-lint.yml"

+ 1 - 1
auth/assets/simple-icons

@@ -1 +1 @@
-Subproject commit 8e7701d6a40462733043f54b3849faf35af70a83
+Subproject commit 8a3731352af133a02223a6c7b1f37c4abb096af0

+ 3 - 3
auth/ios/Podfile.lock

@@ -87,7 +87,7 @@ PODS:
   - SDWebImage/Core (5.19.0)
   - Sentry/HybridSDK (8.21.0):
     - SentryPrivate (= 8.21.0)
-  - sentry_flutter (0.0.1):
+  - sentry_flutter (7.19.0):
     - Flutter
     - FlutterMacOS
     - Sentry/HybridSDK (= 8.21.0)
@@ -249,7 +249,7 @@ SPEC CHECKSUMS:
   ReachabilitySwift: 5ae15e16814b5f9ef568963fb2c87aeb49158c66
   SDWebImage: 981fd7e860af070920f249fd092420006014c3eb
   Sentry: ebc12276bd17613a114ab359074096b6b3725203
-  sentry_flutter: dff1df05dc39c83d04f9330b36360fc374574c5e
+  sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5
   SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe
   share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
   shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
@@ -263,4 +263,4 @@ SPEC CHECKSUMS:
 
 PODFILE CHECKSUM: b4e3a7eabb03395b66e81fc061789f61526ee6bb
 
-COCOAPODS: 1.14.3
+COCOAPODS: 1.15.2

+ 0 - 1
auth/lib/l10n/arb/app_ar.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "المصدِّر",
   "codeSecretKeyHint": "الرمز السري",
   "codeAccountHint": "الحساب (you@domain.com)",
-  "accountKeyType": "نوع المفتاح",
   "sessionExpired": "انتهت صلاحية الجلسة",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_de.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Aussteller",
   "codeSecretKeyHint": "Geheimer Schlüssel",
   "codeAccountHint": "Konto (you@domain.com)",
-  "accountKeyType": "Art des Schlüssels",
   "sessionExpired": "Sitzung abgelaufen",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_en.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Issuer",
   "codeSecretKeyHint": "Secret Key",
   "codeAccountHint": "Account (you@domain.com)",
-  "accountKeyType": "Type of key",
   "sessionExpired": "Session expired",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 9 - 2
auth/lib/l10n/arb/app_es.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Emisor",
   "codeSecretKeyHint": "Llave Secreta",
   "codeAccountHint": "Cuenta (tu@dominio.com)",
-  "accountKeyType": "Tipo de llave",
   "sessionExpired": "La sesión ha expirado",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"
@@ -113,6 +112,7 @@
   "copied": "Copiado",
   "pleaseTryAgain": "Por favor, inténtalo nuevamente",
   "existingUser": "Usuario existente",
+  "newUser": "Nuevo a Ente",
   "delete": "Borrar",
   "enterYourPasswordHint": "Ingrese su contraseña",
   "forgotPassword": "Olvidé mi contraseña",
@@ -138,6 +138,8 @@
   "enterCodeHint": "Ingrese el código de seis dígitos de su aplicación de autenticación",
   "lostDeviceTitle": "¿Perdió su dispositivo?",
   "twoFactorAuthTitle": "Autenticación de dos factores",
+  "passkeyAuthTitle": "Verificación de llave de acceso",
+  "verifyPasskey": "Verificar llave de acceso",
   "recoverAccount": "Recuperar cuenta",
   "enterRecoveryKeyHint": "Introduzca su clave de recuperación",
   "recover": "Recuperar",
@@ -191,6 +193,8 @@
   "recoveryKeySaveDescription": "Nosotros no almacenamos esta clave, por favor guarde dicha clave de 24 palabras en un lugar seguro.",
   "doThisLater": "Hacer esto más tarde",
   "saveKey": "Guardar Clave",
+  "save": "Guardar",
+  "send": "Enviar",
   "back": "Atrás",
   "createAccount": "Crear cuenta",
   "passwordStrength": "Fortaleza de la contraseña: {passwordStrengthValue}",
@@ -397,5 +401,8 @@
   "signOutOtherDevices": "Cerrar la sesión de otros dispositivos",
   "doNotSignOut": "No cerrar la sesión",
   "hearUsWhereTitle": "¿Cómo conoció Ente? (opcional)",
-  "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!"
+  "hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!",
+  "passkey": "Llave de acceso",
+  "developerSettingsWarning": "¿Estás seguro de que quieres modificar los ajustes de desarrollador?",
+  "developerSettings": "Ajustes de desarrollador"
 }

+ 0 - 1
auth/lib/l10n/arb/app_fa.arb

@@ -14,7 +14,6 @@
   "codeIssuerHint": "صادر کننده",
   "codeSecretKeyHint": "کلید مخفی",
   "codeAccountHint": "حساب (you@domain.com)",
-  "accountKeyType": "نوع کلید",
   "sessionExpired": "نشست منقضی شده است",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_fi.arb

@@ -12,7 +12,6 @@
   "codeIssuerHint": "Myöntäjä",
   "codeSecretKeyHint": "Salainen avain",
   "codeAccountHint": "Tili (sinun@jokinosoite.com)",
-  "accountKeyType": "Avaimen tyyppi",
   "sessionExpired": "Istunto on vanheutunut",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_fr.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Émetteur",
   "codeSecretKeyHint": "Clé secrète",
   "codeAccountHint": "Compte (vous@exemple.com)",
-  "accountKeyType": "Type de clé",
   "sessionExpired": "Session expirée",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_he.arb

@@ -19,7 +19,6 @@
   "codeIssuerHint": "מנפיק",
   "codeSecretKeyHint": "מפתח סודי",
   "codeAccountHint": "חשבון(you@domain.com)",
-  "accountKeyType": "סוג מפתח",
   "sessionExpired": "זמן החיבור הסתיים",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_it.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Emittente",
   "codeSecretKeyHint": "Codice segreto",
   "codeAccountHint": "Account (username@dominio.it)",
-  "accountKeyType": "Tipo di chiave",
   "sessionExpired": "Sessione scaduta",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_ja.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "発行者",
   "codeSecretKeyHint": "秘密鍵",
   "codeAccountHint": "アカウント (you@domain.com)",
-  "accountKeyType": "鍵の種類",
   "sessionExpired": "セッションが失効しました",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_ka.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "მომწოდებელი",
   "codeSecretKeyHint": "გასაღები",
   "codeAccountHint": "ანგარიში (you@domain.com)",
-  "accountKeyType": "გასაღების ტიპი",
   "sessionExpired": "სესიის დრო ამოიწურა",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_nl.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Uitgever",
   "codeSecretKeyHint": "Geheime sleutel",
   "codeAccountHint": "Account (jij@domein.nl)",
-  "accountKeyType": "Type sleutel",
   "sessionExpired": "Sessie verlopen",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_pl.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Wydawca",
   "codeSecretKeyHint": "Tajny klucz",
   "codeAccountHint": "Konto (ty@domena.com)",
-  "accountKeyType": "Rodzaj klucza",
   "sessionExpired": "Sesja wygasła",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_pt.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Emissor",
   "codeSecretKeyHint": "Chave secreta",
   "codeAccountHint": "Conta (voce@dominio.com)",
-  "accountKeyType": "Tipo de chave",
   "sessionExpired": "Sessão expirada",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_ru.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Эмитент",
   "codeSecretKeyHint": "Секретный ключ",
   "codeAccountHint": "Аккаунт (you@domain.com)",
-  "accountKeyType": "Тип ключа",
   "sessionExpired": "Сеанс истек",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_sv.arb

@@ -16,7 +16,6 @@
   "codeIssuerHint": "Utfärdare",
   "codeSecretKeyHint": "Secret Key",
   "codeAccountHint": "Konto (du@domän.com)",
-  "accountKeyType": "Typ av nyckel",
   "sessionExpired": "Sessionen har gått ut",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_ti.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "ኣዋጂ",
   "codeSecretKeyHint": "ምስጢራዊ መፍትሕ",
   "codeAccountHint": "ሕሳብ (you@domain.com)",
-  "accountKeyType": "ዓይነት መፍትሕ",
   "sessionExpired": "ክፍለ ግዜኡ ኣኺሉ።",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_tr.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Yayınlayan",
   "codeSecretKeyHint": "Gizli Anahtar",
   "codeAccountHint": "Hesap (ornek@domain.com)",
-  "accountKeyType": "Anahtar türü",
   "sessionExpired": "Oturum süresi doldu",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_vi.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "Nhà phát hành",
   "codeSecretKeyHint": "Khóa bí mật",
   "codeAccountHint": "Tài khoản (bạn@miền.com)",
-  "accountKeyType": "Loại khóa",
   "sessionExpired": "Phiên làm việc đã hết hạn",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 0 - 1
auth/lib/l10n/arb/app_zh.arb

@@ -20,7 +20,6 @@
   "codeIssuerHint": "发行人",
   "codeSecretKeyHint": "私钥",
   "codeAccountHint": "账户 (you@domain.com)",
-  "accountKeyType": "密钥类型",
   "sessionExpired": "会话已过期",
   "@sessionExpired": {
     "description": "Title of the dialog when the users current session is invalid/expired"

+ 1 - 0
auth/lib/main.dart

@@ -37,6 +37,7 @@ import 'package:window_manager/window_manager.dart';
 final _logger = Logger("main");
 
 Future<void> initSystemTray() async {
+  if (PlatformUtil.isMobile()) return;
   String path = Platform.isWindows
       ? 'assets/icons/auth-icon.ico'
       : 'assets/icons/auth-icon.png';

+ 22 - 7
auth/lib/models/code.dart

@@ -2,6 +2,7 @@ import 'package:ente_auth/utils/totp_util.dart';
 
 class Code {
   static const defaultDigits = 6;
+  static const steamDigits = 5;
   static const defaultPeriod = 30;
 
   int? generatedID;
@@ -57,36 +58,42 @@ class Code {
       updatedAlgo,
       updatedType,
       updatedCounter,
-      "otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}&digits=$updatedDigits&issuer=$updateIssuer&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}",
+      "otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}"
+      "&digits=$updatedDigits&issuer=$updateIssuer"
+      "&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}",
       generatedID: generatedID,
     );
   }
 
   static Code fromAccountAndSecret(
+    Type type,
     String account,
     String issuer,
     String secret,
+    int digits,
   ) {
     return Code(
       account,
       issuer,
-      defaultDigits,
+      digits,
       defaultPeriod,
       secret,
       Algorithm.sha1,
-      Type.totp,
+      type,
       0,
-      "otpauth://totp/$issuer:$account?algorithm=SHA1&digits=6&issuer=$issuer&period=30&secret=$secret",
+      "otpauth://${type.name}/$issuer:$account?algorithm=SHA1&digits=$digits&issuer=$issuer&period=30&secret=$secret",
     );
   }
 
   static Code fromRawData(String rawData) {
     Uri uri = Uri.parse(rawData);
+    final issuer = _getIssuer(uri);
+
     try {
       return Code(
         _getAccount(uri),
-        _getIssuer(uri),
-        _getDigits(uri),
+        issuer,
+        _getDigits(uri, issuer),
         _getPeriod(uri),
         getSanitizedSecret(uri.queryParameters['secret']!),
         _getAlgorithm(uri),
@@ -140,10 +147,13 @@ class Code {
     }
   }
 
-  static int _getDigits(Uri uri) {
+  static int _getDigits(Uri uri, String issuer) {
     try {
       return int.parse(uri.queryParameters['digits']!);
     } catch (e) {
+      if (issuer.toLowerCase() == "steam") {
+        return steamDigits;
+      }
       return defaultDigits;
     }
   }
@@ -186,6 +196,8 @@ class Code {
   static Type _getType(Uri uri) {
     if (uri.host == "totp") {
       return Type.totp;
+    } else if (uri.host == "steam") {
+      return Type.steam;
     } else if (uri.host == "hotp") {
       return Type.hotp;
     }
@@ -223,6 +235,9 @@ class Code {
 enum Type {
   totp,
   hotp,
+  steam;
+
+  bool get isTOTPCompatible => this == totp || this == steam;
 }
 
 enum Algorithm {

+ 10 - 3
auth/lib/onboarding/view/setup_enter_secret_key_page.dart

@@ -61,6 +61,8 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
                   },
                   decoration: InputDecoration(
                     hintText: l10n.codeIssuerHint,
+                    floatingLabelBehavior: FloatingLabelBehavior.auto,
+                    labelText: l10n.codeIssuerHint,
                   ),
                   controller: _issuerController,
                   autofocus: true,
@@ -78,6 +80,8 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
                   },
                   decoration: InputDecoration(
                     hintText: l10n.codeSecretKeyHint,
+                    floatingLabelBehavior: FloatingLabelBehavior.auto,
+                    labelText: l10n.codeSecretKeyHint,
                     suffixIcon: IconButton(
                       onPressed: () {
                         setState(() {
@@ -105,12 +109,12 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
                   },
                   decoration: InputDecoration(
                     hintText: l10n.codeAccountHint,
+                    floatingLabelBehavior: FloatingLabelBehavior.auto,
+                    labelText: l10n.codeAccountHint,
                   ),
                   controller: _accountController,
                 ),
-                const SizedBox(
-                  height: 40,
-                ),
+                const SizedBox(height: 40),
                 SizedBox(
                   width: 400,
                   child: OutlinedButton(
@@ -152,6 +156,7 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
       final account = _accountController.text.trim();
       final issuer = _issuerController.text.trim();
       final secret = _secretController.text.trim().replaceAll(' ', '');
+      final isStreamCode = issuer.toLowerCase() == "steam";
       if (widget.code != null && widget.code!.secret != secret) {
         ButtonResult? result = await showChoiceActionSheet(
           context,
@@ -168,9 +173,11 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
       }
       final Code newCode = widget.code == null
           ? Code.fromAccountAndSecret(
+              isStreamCode ? Type.steam : Type.totp,
               account,
               issuer,
               secret,
+              isStreamCode ? Code.steamDigits : Code.defaultDigits,
             )
           : widget.code!.copyWith(
               account: account,

+ 5 - 5
auth/lib/ui/code_widget.dart

@@ -53,7 +53,7 @@ class _CodeWidgetState extends State<CodeWidget> {
       String newCode = _getCurrentOTP();
       if (newCode != _currentCode.value) {
         _currentCode.value = newCode;
-        if (widget.code.type == Type.totp) {
+        if (widget.code.type.isTOTPCompatible) {
           _nextCode.value = _getNextTotp();
         }
       }
@@ -78,7 +78,7 @@ class _CodeWidgetState extends State<CodeWidget> {
     _shouldShowLargeIcon = PreferenceService.instance.shouldShowLargeIcons();
     if (!_isInitialized) {
       _currentCode.value = _getCurrentOTP();
-      if (widget.code.type == Type.totp) {
+      if (widget.code.type.isTOTPCompatible) {
         _nextCode.value = _getNextTotp();
       }
       _isInitialized = true;
@@ -213,7 +213,7 @@ class _CodeWidgetState extends State<CodeWidget> {
         crossAxisAlignment: CrossAxisAlignment.start,
         mainAxisAlignment: MainAxisAlignment.center,
         children: [
-          if (widget.code.type == Type.totp)
+          if (widget.code.type.isTOTPCompatible)
             CodeTimerProgress(
               period: widget.code.period,
             ),
@@ -263,7 +263,7 @@ class _CodeWidgetState extends State<CodeWidget> {
               },
             ),
           ),
-          widget.code.type == Type.totp
+          widget.code.type.isTOTPCompatible
               ? GestureDetector(
                   onTap: () {
                     _copyNextToClipboard();
@@ -481,7 +481,7 @@ class _CodeWidgetState extends State<CodeWidget> {
 
   String _getNextTotp() {
     try {
-      assert(widget.code.type == Type.totp);
+      assert(widget.code.type.isTOTPCompatible);
       return getNextTotp(widget.code);
     } catch (e) {
       return context.l10n.error;

+ 2 - 0
auth/lib/ui/settings/data/import/bitwarden_import.dart

@@ -92,9 +92,11 @@ Future<int?> _processBitwardenExportFile(
         var account = item['login']['username'];
 
         code = Code.fromAccountAndSecret(
+          Type.totp,
           account,
           issuer,
           totp,
+          Code.defaultDigits,
         );
       }
 

+ 2 - 2
auth/lib/utils/totp_util.dart

@@ -3,7 +3,7 @@ import 'package:flutter/foundation.dart';
 import 'package:otp/otp.dart' as otp;
 
 String getOTP(Code code) {
-  if(code.type == Type.hotp) {
+  if (code.type == Type.hotp) {
     return _getHOTPCode(code);
   }
   return otp.OTP.generateTOTPCodeString(
@@ -60,4 +60,4 @@ String safeDecode(String value) {
     debugPrint("Failed to decode $e");
     return value;
   }
-}
+}

+ 1 - 1
auth/linux/packaging/rpm/make_config.yaml

@@ -11,7 +11,7 @@ display_name: Auth
 
 requires:
   - libsqlite3x
-  - webkit2gtk-4.0
+  - webkit2gtk4.0
   - libsodium
   - libsecret
   - libappindicator

+ 3 - 3
auth/pubspec.lock

@@ -293,9 +293,9 @@ packages:
     dependency: "direct main"
     description:
       path: "packages/desktop_webview_window"
-      ref: HEAD
-      resolved-ref: "8cbbf9cd6efcfee5e0f420a36f7f8e7e64b667a1"
-      url: "https://github.com/MixinNetwork/flutter-plugins"
+      ref: fix-webkit-version
+      resolved-ref: fe2223e4edfecdbb3a97bb9e3ced73db4ae9d979
+      url: "https://github.com/ente-io/flutter-desktopwebview-fork"
     source: git
     version: "0.2.4"
   device_info_plus:

+ 3 - 2
auth/pubspec.yaml

@@ -1,6 +1,6 @@
 name: ente_auth
 description: ente two-factor authenticator
-version: 2.0.55+255
+version: 2.0.57+257
 publish_to: none
 
 environment:
@@ -20,7 +20,8 @@ dependencies:
   convert: ^3.1.1
   desktop_webview_window:
     git:
-      url: https://github.com/MixinNetwork/flutter-plugins
+      url: https://github.com/ente-io/flutter-desktopwebview-fork
+      ref: fix-webkit-version
       path: packages/desktop_webview_window
   device_info_plus: ^9.1.1
   dio: ^5.4.0

+ 8 - 1
cli/README.md

@@ -36,7 +36,8 @@ ente --help
 
 ### Accounts
 
-If you wish, you can add multiple accounts (your own and that of your family members) and export all data using this tool.
+If you wish, you can add multiple accounts (your own and that of your family
+members) and export all data using this tool.
 
 #### Add an account
 
@@ -44,6 +45,12 @@ If you wish, you can add multiple accounts (your own and that of your family mem
 ente account add
 ```
 
+> [!NOTE]
+>
+> `ente account add` does not create new accounts, it just adds pre-existing
+> accounts to the list of accounts that the CLI knows about so that you can use
+> them for other actions.
+
 #### List accounts
 
 ```shell

+ 2 - 1
cli/cmd/account.go

@@ -27,7 +27,8 @@ var listAccCmd = &cobra.Command{
 // Subcommand for 'account add'
 var addAccCmd = &cobra.Command{
 	Use:   "add",
-	Short: "Add a new account",
+	Short: "login into existing account",
+	Long:  "Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app",
 	Run: func(cmd *cobra.Command, args []string) {
 		recoverWithLog()
 		ctrl.AddAccount(context.Background())

+ 1 - 1
cli/docs/generated/ente.md

@@ -25,4 +25,4 @@ ente [flags]
 * [ente export](ente_export.md)	 - Starts the export process
 * [ente version](ente_version.md)	 - Prints the current version
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 2 - 2
cli/docs/generated/ente_account.md

@@ -11,9 +11,9 @@ Manage account settings
 ### SEE ALSO
 
 * [ente](ente.md)	 - CLI tool for exporting your photos from ente.io
-* [ente account add](ente_account_add.md)	 - Add a new account
+* [ente account add](ente_account_add.md)	 - login into existing account
 * [ente account get-token](ente_account_get-token.md)	 - Get token for an account for a specific app
 * [ente account list](ente_account_list.md)	 - list configured accounts
 * [ente account update](ente_account_update.md)	 - Update an existing account's export directory
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 6 - 2
cli/docs/generated/ente_account_add.md

@@ -1,6 +1,10 @@
 ## ente account add
 
-Add a new account
+login into existing account
+
+### Synopsis
+
+Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app
 
 ```
 ente account add [flags]
@@ -16,4 +20,4 @@ ente account add [flags]
 
 * [ente account](ente_account.md)	 - Manage account settings
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_account_get-token.md

@@ -18,4 +18,4 @@ ente account get-token [flags]
 
 * [ente account](ente_account.md)	 - Manage account settings
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_account_list.md

@@ -16,4 +16,4 @@ ente account list [flags]
 
 * [ente account](ente_account.md)	 - Manage account settings
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_account_update.md

@@ -19,4 +19,4 @@ ente account update [flags]
 
 * [ente account](ente_account.md)	 - Manage account settings
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_admin.md

@@ -21,4 +21,4 @@ Commands for admin actions like disable or enabling 2fa, bumping up the storage
 * [ente admin list-users](ente_admin_list-users.md)	 - List all users
 * [ente admin update-subscription](ente_admin_update-subscription.md)	 - Update subscription for user
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_admin_delete-user.md

@@ -18,4 +18,4 @@ ente admin delete-user [flags]
 
 * [ente admin](ente_admin.md)	 - Commands for admin actions
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_admin_disable-2fa.md

@@ -18,4 +18,4 @@ ente admin disable-2fa [flags]
 
 * [ente admin](ente_admin.md)	 - Commands for admin actions
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_admin_get-user-id.md

@@ -18,4 +18,4 @@ ente admin get-user-id [flags]
 
 * [ente admin](ente_admin.md)	 - Commands for admin actions
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_admin_list-users.md

@@ -17,4 +17,4 @@ ente admin list-users [flags]
 
 * [ente admin](ente_admin.md)	 - Commands for admin actions
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_admin_update-subscription.md

@@ -23,4 +23,4 @@ ente admin update-subscription [flags]
 
 * [ente admin](ente_admin.md)	 - Commands for admin actions
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_auth.md

@@ -13,4 +13,4 @@ Authenticator commands
 * [ente](ente.md)	 - CLI tool for exporting your photos from ente.io
 * [ente auth decrypt](ente_auth_decrypt.md)	 - Decrypt authenticator export
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_auth_decrypt.md

@@ -16,4 +16,4 @@ ente auth decrypt [input] [output] [flags]
 
 * [ente auth](ente_auth.md)	 - Authenticator commands
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_export.md

@@ -16,4 +16,4 @@ ente export [flags]
 
 * [ente](ente.md)	 - CLI tool for exporting your photos from ente.io
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/docs/generated/ente_version.md

@@ -16,4 +16,4 @@ ente version [flags]
 
 * [ente](ente.md)	 - CLI tool for exporting your photos from ente.io
 
-###### Auto generated by spf13/cobra on 14-Mar-2024
+###### Auto generated by spf13/cobra on 6-May-2024

+ 1 - 1
cli/pkg/account.go

@@ -59,7 +59,7 @@ func (c *ClICtrl) AddAccount(cxt context.Context) {
 		authResponse, flowErr = c.validateTOTP(cxt, authResponse)
 	}
 	if authResponse.EncryptedToken == "" || authResponse.KeyAttributes == nil {
-		panic("no encrypted token or keyAttributes")
+		log.Fatalf("missing key attributes or token.\nNote: Please use the mobile,web or desktop app to create a new account.\nIf you are trying to login to an existing account, report a bug.")
 	}
 	secretInfo, decErr := c.decryptAccSecretInfo(cxt, authResponse, keyEncKey)
 	if decErr != nil {

+ 19 - 4
desktop/.eslintrc.js

@@ -1,21 +1,36 @@
 /* eslint-env node */
 module.exports = {
+    root: true,
     extends: [
         "eslint:recommended",
         "plugin:@typescript-eslint/eslint-recommended",
-        /* What we really want eventually */
-        // "plugin:@typescript-eslint/strict-type-checked",
-        // "plugin:@typescript-eslint/stylistic-type-checked",
+        "plugin:@typescript-eslint/strict-type-checked",
+        "plugin:@typescript-eslint/stylistic-type-checked",
     ],
     plugins: ["@typescript-eslint"],
     parser: "@typescript-eslint/parser",
     parserOptions: {
         project: true,
     },
-    root: true,
     ignorePatterns: [".eslintrc.js", "app", "out", "dist"],
     env: {
         es2022: true,
         node: true,
     },
+    rules: {
+        /* Allow numbers to be used in template literals */
+        "@typescript-eslint/restrict-template-expressions": [
+            "error",
+            {
+                allowNumber: true,
+            },
+        ],
+        /* Allow void expressions as the entire body of an arrow function */
+        "@typescript-eslint/no-confusing-void-expression": [
+            "error",
+            {
+                ignoreArrowShorthand: true,
+            },
+        ],
+    },
 };

+ 0 - 55
desktop/.github/workflows/build.yml

@@ -1,55 +0,0 @@
-name: Build/release
-
-on:
-    push:
-        tags:
-            - v*
-
-jobs:
-    release:
-        runs-on: ${{ matrix.os }}
-
-        strategy:
-            matrix:
-                os: [macos-latest, ubuntu-latest, windows-latest]
-
-        steps:
-            - name: Check out Git repository
-              uses: actions/checkout@v3
-              with:
-                  submodules: recursive
-
-            - name: Install Node.js, NPM and Yarn
-              uses: actions/setup-node@v3
-              with:
-                  node-version: 20
-
-            - name: Prepare for app notarization
-              if: startsWith(matrix.os, 'macos')
-              # Import Apple API key for app notarization on macOS
-              run: |
-                  mkdir -p ~/private_keys/
-                  echo '${{ secrets.api_key }}' > ~/private_keys/AuthKey_${{ secrets.api_key_id }}.p8
-
-            - name: Install libarchive-tools for pacman build # Related https://github.com/electron-userland/electron-builder/issues/4181
-              if: startsWith(matrix.os, 'ubuntu')
-              run: sudo apt-get install libarchive-tools
-
-            - name: Ente Electron Builder Action
-              uses: ente-io/action-electron-builder@v1.0.0
-              with:
-                  # GitHub token, automatically provided to the action
-                  # (No need to define this secret in the repo settings)
-                  github_token: ${{ secrets.github_token }}
-
-                  # If the commit is tagged with a version (e.g. "v1.0.0"),
-                  # release the app after building
-                  release: ${{ startsWith(github.ref, 'refs/tags/v') }}
-
-                  mac_certs: ${{ secrets.mac_certs }}
-                  mac_certs_password: ${{ secrets.mac_certs_password }}
-              env:
-                  # macOS notarization API key
-                  API_KEY_ID: ${{ secrets.api_key_id }}
-                  API_KEY_ISSUER_ID: ${{ secrets.api_key_issuer_id}}
-                  USE_HARD_LINKS: false

+ 80 - 0
desktop/.github/workflows/desktop-release.yml

@@ -0,0 +1,80 @@
+name: "Release"
+
+# Build the ente-io/ente's desktop/rc branch and create/update a draft release.
+#
+# For more details, see `docs/release.md` in ente-io/ente.
+
+on:
+    # Trigger manually or `gh workflow run desktop-release.yml`.
+    workflow_dispatch:
+    push:
+        # Run when a tag matching the pattern "v*"" is pushed.
+        #
+        # See: [Note: Testing release workflows that are triggered by tags].
+        tags:
+            - "v*"
+
+jobs:
+    release:
+        runs-on: ${{ matrix.os }}
+
+        defaults:
+            run:
+                working-directory: desktop
+
+        strategy:
+            matrix:
+                os: [macos-latest, ubuntu-latest, windows-latest]
+
+        steps:
+            - name: Checkout code
+              uses: actions/checkout@v4
+              with:
+                  # Checkout the desktop/rc branch from the source repository.
+                  repository: ente-io/ente
+                  ref: desktop/rc
+                  submodules: recursive
+
+            - name: Setup node
+              uses: actions/setup-node@v4
+              with:
+                  node-version: 20
+
+            - name: Increase yarn timeout
+              # `yarn install` times out sometimes on the Windows runner,
+              # resulting in flaky builds.
+              run: yarn config set network-timeout 900000 -g
+
+            - name: Install dependencies
+              run: yarn install
+
+            - name: Install libarchive-tools for pacman build
+              if: startsWith(matrix.os, 'ubuntu')
+              # See:
+              # https://github.com/electron-userland/electron-builder/issues/4181
+              run: sudo apt-get install libarchive-tools
+
+            - name: Build
+              uses: ente-io/action-electron-builder@v1.0.0
+              with:
+                  package_root: desktop
+                  build_script_name: build:ci
+
+                  # GitHub token, automatically provided to the action
+                  # (No need to define this secret in the repo settings)
+                  github_token: ${{ secrets.GITHUB_TOKEN }}
+
+                  # If the commit is tagged with a version (e.g. "v1.0.0"),
+                  # create a (draft) release after building. Otherwise upload
+                  # assets to the existing draft named after the version.
+                  release: ${{ startsWith(github.ref, 'refs/tags/v') }}
+
+                  mac_certs: ${{ secrets.MAC_CERTS }}
+                  mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }}
+              env:
+                  # macOS notarization credentials key details
+                  APPLE_ID: ${{ secrets.APPLE_ID }}
+                  APPLE_APP_SPECIFIC_PASSWORD:
+                      ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
+                  APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
+                  USE_HARD_LINKS: false

+ 8 - 0
desktop/CHANGELOG.md

@@ -1,5 +1,13 @@
 # CHANGELOG
 
+## v1.7.0 (Unreleased)
+
+v1.7 is a major rewrite to improve the security of our app. We have enabled
+sandboxing and disabled node integration for the renderer process. All this
+required restructuring our IPC mechanisms, which resulted in a lot of under the
+hood changes. The outcome is a more secure app that also uses the latest and
+greatest Electron recommendations.
+
 ## v1.6.63
 
 ### New

+ 0 - 6
desktop/README.md

@@ -10,12 +10,6 @@ To know more about Ente, see [our main README](../README.md) or visit
 
 ## Building from source
 
-> [!CAUTION]
->
-> We're improving the security of the desktop app further by migrating to
-> Electron's sandboxing and contextIsolation. These updates are still WIP and
-> meanwhile the instructions below might not fully work on the main branch.
-
 Fetch submodules
 
 ```sh

+ 3 - 0
desktop/docs/dependencies.md

@@ -90,6 +90,9 @@ Some extra ones specific to the code here are:
     Unix commands in our `package.json` scripts. This allows us to use the same
     commands (like `ln`) across different platforms like Linux and Windows.
 
+-   [@tsconfig/recommended](https://github.com/tsconfig/bases) gives us a base
+    tsconfig for the Node.js version that our current Electron version uses.
+
 ## Functionality
 
 ### Format conversion

+ 56 - 51
desktop/docs/release.md

@@ -1,43 +1,68 @@
 ## Releases
 
-> [!NOTE]
->
-> TODO(MR): This document needs to be audited and changed as we do the first
-> release from this new monorepo.
+Conceptually, the release is straightforward: We trigger a GitHub workflow that
+creates a draft release with artifacts built. When ready, we publish that
+release. The download links on our website, and existing apps already check the
+latest GitHub release and update accordingly.
 
-The Github Action that builds the desktop binaries is triggered by pushing a tag
-matching the pattern `photos-desktop-v1.2.3`. This value should match the
-version in `package.json`.
+The complication comes by the fact that electron-builder's auto updaterr (the
+mechanism that we use for auto updates) doesn't work with monorepos. So we need
+to keep a separate (non-mono) repository just for doing releases.
 
-So the process for doing a release would be.
+-   Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente).
 
-1. Create a new branch (can be named anything). On this branch, include your
-   changes.
+-   Releases are done from
+    [ente-io/photos-desktop](https://github.com/ente-io/photos-desktop).
 
-2. Mention the changes in `CHANGELOG.md`.
+## Workflow - Release Candidates
 
-3. Changing the `version` in `package.json` to `1.x.x`.
+Leading up to the release, we can make one or more draft releases that are not
+intended to be published, but serve as test release candidates.
 
-4. Commit and push to remote
+The workflow for making such "rc" builds is:
+
+1.  Update `package.json` in the source repo to use version `1.x.x-rc`. Create a
+    new draft release in the release repo with title `1.x.x-rc`. In the tag
+    input enter `v1.x.x-rc` and select the option to "create a new tag on
+    publish".
+
+2.  Push code to the `desktop/rc` branch in the source repo.
+
+3.  Trigger the GitHub action in the release repo
+
+    ```sh
+    gh workflow run desktop-release.yml
+    ```
+
+We can do steps 2 and 3 multiple times; each time it'll just update the
+artifacts attached to the same draft.
+
+## Workflow - Release
+
+1.  Update `package.json` in the source repo to use version `1.x.x`. Create a
+    new draft release in the release repo with tag `v1.x.x`.
+
+2.  Push code to the `desktop/rc` branch in the source repo. Remember to update
+    update the CHANGELOG.
+
+3.  In the release repo
 
     ```sh
-    git add package.json && git commit -m 'Release v1.x.x'
-    git tag v1.x.x
-    git push && git push --tags
+    ./.github/trigger-release.sh v1.x.x
     ```
 
-This by itself will already trigger a new release. The GitHub action will create
-a new draft release that can then be used as descibed below.
+4.  If the build is successful, tag `desktop/rc` and merge it into main:
 
-To wrap up, we also need to merge back these changes into main. So for that,
+    ```sh
+    # Assuming we're on desktop/rc that just got build
 
-5. Open a PR for the branch that we're working on (where the above tag was
-   pushed from) to get it merged into main.
+    git tag photosd-v1.x.x
+    git push origin photosd-v1.x.x
+
+    # Now open a PR to merge it into main
+    ```
 
-6. In this PR, also increase the version number for the next release train. That
-   is, supposed we just released `v4.0.1`. Then we'll change the version number
-   in main to `v4.0.2-next.0`. Each pre-release will modify the `next.0` part.
-   Finally, at the time of the next release, this'll become `v4.0.2`.
+## Post build
 
 The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts
 defined in the `build` value in `package.json`.
@@ -46,29 +71,11 @@ defined in the `build` value in `package.json`.
 -   Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`)
 -   macOS - A universal DMG
 
-Additionally, the GitHub action notarizes the macOS DMG. For this it needs
-credentials provided via GitHub secrets.
-
-During the build the Sentry webpack plugin checks to see if SENTRY_AUTH_TOKEN is
-defined. If so, it uploads the sourcemaps for the renderer process to Sentry
-(For our GitHub action, the SENTRY_AUTH_TOKEN is defined as a GitHub secret).
-
-The sourcemaps for the main (node) process are currently not sent to Sentry
-(this works fine in practice since the node process files are not minified, we
-only run `tsc`).
-
-Once the build is done, a draft release with all these artifacts attached is
-created. The build is idempotent, so if something goes wrong and we need to
-re-run the GitHub action, just delete the draft release (if it got created) and
-start a new run by pushing a new tag (if some code changes are required).
-
-If no code changes are required, say the build failed for some transient network
-or sentry issue, we can even be re-run by the build by going to Github Action
-age and rerun from there. This will re-trigger for the same tag.
+Additionally, the GitHub action notarizes and signs the macOS DMG (For this it
+uses credentials provided via GitHub secrets).
 
-If everything goes well, we'll have a release on GitHub, and the corresponding
-source maps for the renderer process uploaded to Sentry. There isn't anything
-else to do:
+To rollout the build, we need to publish the draft release. Thereafter,
+everything is automated:
 
 -   The website automatically redirects to the latest release on GitHub when
     people try to download.
@@ -76,7 +83,7 @@ else to do:
 -   The file formats with support auto update (Windows `exe`, the Linux AppImage
     and the macOS DMG) also check the latest GitHub release automatically to
     download and apply the update (the rest of the formats don't support auto
-    updates).
+    updates yet).
 
 -   We're not putting the desktop app in other stores currently. It is available
     as a `brew cask`, but we only had to open a PR to add the initial formula,
@@ -87,6 +94,4 @@ else to do:
 We can also publish the draft releases by checking the "pre-release" option.
 Such releases don't cause any of the channels (our website, or the desktop app
 auto updater, or brew) to be notified, instead these are useful for giving links
-to pre-release builds to customers. Generally, in the version number for these
-we'll add a label to the version, e.g. the "beta.x" in `1.x.x-beta.x`. This
-should be done both in `package.json`, and what we tag the commit with.
+to pre-release builds to customers.

+ 0 - 1
desktop/electron-builder.yml

@@ -29,4 +29,3 @@ mac:
         arch: [universal]
     category: public.app-category.photography
     hardenedRuntime: true
-afterSign: electron-builder-notarize

+ 11 - 6
desktop/package.json

@@ -1,8 +1,9 @@
 {
     "name": "ente",
-    "version": "1.6.63",
+    "version": "1.7.0-rc",
     "private": true,
     "description": "Desktop client for Ente Photos",
+    "repository": "github:ente-io/photos-desktop",
     "author": "Ente <code@ente.io>",
     "main": "app/main.js",
     "scripts": {
@@ -10,13 +11,17 @@
         "build-main": "tsc && electron-builder",
         "build-main:quick": "tsc && electron-builder --dir --config.compression=store --config.mac.identity=null",
         "build-renderer": "cd ../web && yarn install && yarn build:photos && cd ../desktop && shx rm -f out && shx ln -sf ../web/apps/photos/out out",
+        "build:ci": "yarn build-renderer && tsc",
         "build:quick": "yarn build-renderer && yarn build-main:quick",
         "dev": "concurrently --kill-others --success first --names 'main,rndr' \"yarn dev-main\" \"yarn dev-renderer\"",
         "dev-main": "tsc && electron app/main.js",
         "dev-renderer": "cd ../web && yarn install && yarn dev:photos",
         "postinstall": "electron-builder install-app-deps",
-        "lint": "yarn prettier --check . && eslint --ext .ts src",
-        "lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src"
+        "lint": "yarn prettier --check --log-level warn . && eslint --ext .ts src && yarn tsc",
+        "lint-fix": "yarn prettier --write --log-level warn . && eslint --fix --ext .ts src && yarn tsc"
+    },
+    "resolutions": {
+        "jackspeak": "2.1.1"
     },
     "dependencies": {
         "any-shell-escape": "^0.1",
@@ -34,14 +39,14 @@
         "onnxruntime-node": "^1.17"
     },
     "devDependencies": {
+        "@tsconfig/node20": "^20.1.4",
         "@types/auto-launch": "^5.0",
         "@types/ffmpeg-static": "^3.0",
         "@typescript-eslint/eslint-plugin": "^7",
         "@typescript-eslint/parser": "^7",
         "concurrently": "^8",
-        "electron": "^29",
-        "electron-builder": "^24",
-        "electron-builder-notarize": "^1.5",
+        "electron": "^30",
+        "electron-builder": "25.0.0-alpha.6",
         "eslint": "^8",
         "prettier": "^3",
         "prettier-plugin-organize-imports": "^3",

+ 66 - 87
desktop/src/main.ts

@@ -26,7 +26,7 @@ import { createWatcher } from "./main/services/watch";
 import { userPreferences } from "./main/stores/user-preferences";
 import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch";
 import { registerStreamProtocol } from "./main/stream";
-import { isDev } from "./main/utils-electron";
+import { isDev } from "./main/utils/electron";
 
 /**
  * The URL where the renderer HTML is being served from.
@@ -127,54 +127,22 @@ const registerPrivilegedSchemes = () => {
         {
             scheme: "stream",
             privileges: {
-                // TODO(MR): Remove the commented bits if we don't end up
-                // needing them by the time the IPC refactoring is done.
-
-                // Prevent the insecure origin issues when fetching this
-                // secure: true,
-                // Allow the web fetch API in the renderer to use this scheme.
                 supportFetchAPI: true,
-                // Allow it to be used with video tags.
-                // stream: true,
             },
         },
     ]);
 };
 
-/**
- * [Note: Increased disk cache for the desktop app]
- *
- * Set the "disk-cache-size" command line flag to ask the Chromium process to
- * use a larger size for the caches that it keeps on disk. This allows us to use
- * the web based caching mechanisms on both the web and the desktop app, just
- * ask the embedded Chromium to be a bit more generous in disk usage when
- * running as the desktop app.
- *
- * The size we provide is in bytes.
- * https://www.electronjs.org/docs/latest/api/command-line-switches#--disk-cache-sizesize
- *
- * Note that increasing the disk cache size does not guarantee that Chromium
- * will respect in verbatim, it uses its own heuristics atop this hint.
- * https://superuser.com/questions/378991/what-is-chrome-default-cache-size-limit/1577693#1577693
- *
- * See also: [Note: Caching files].
- */
-const increaseDiskCache = () =>
-    app.commandLine.appendSwitch(
-        "disk-cache-size",
-        `${5 * 1024 * 1024 * 1024}`, // 5 GB
-    );
-
 /**
  * Create an return the {@link BrowserWindow} that will form our app's UI.
  *
  * This window will show the HTML served from {@link rendererURL}.
  */
-const createMainWindow = async () => {
+const createMainWindow = () => {
     // Create the main window. This'll show our web content.
     const window = new BrowserWindow({
         webPreferences: {
-            preload: path.join(app.getAppPath(), "preload.js"),
+            preload: path.join(__dirname, "preload.js"),
             sandbox: true,
         },
         // The color to show in the window until the web content gets loaded.
@@ -184,7 +152,7 @@ const createMainWindow = async () => {
         show: false,
     });
 
-    const wasAutoLaunched = await autoLauncher.wasAutoLaunched();
+    const wasAutoLaunched = autoLauncher.wasAutoLaunched();
     if (wasAutoLaunched) {
         // Don't automatically show the app's window if we were auto-launched.
         // On macOS, also hide the dock icon on macOS.
@@ -198,7 +166,7 @@ const createMainWindow = async () => {
     if (isDev) window.webContents.openDevTools();
 
     window.webContents.on("render-process-gone", (_, details) => {
-        log.error(`render-process-gone: ${details}`);
+        log.error(`render-process-gone: ${details.reason}`);
         window.webContents.reload();
     });
 
@@ -227,7 +195,7 @@ const createMainWindow = async () => {
     });
 
     window.on("show", () => {
-        if (process.platform == "darwin") app.dock.show();
+        if (process.platform == "darwin") void app.dock.show();
     });
 
     // Let ipcRenderer know when mainWindow is in the foreground so that it can
@@ -281,7 +249,7 @@ export const allowExternalLinks = (webContents: WebContents) => {
     // Returning `action` "deny" accomplishes this.
     webContents.setWindowOpenHandler(({ url }) => {
         if (!url.startsWith(rendererURL)) {
-            shell.openExternal(url);
+            void shell.openExternal(url);
             return { action: "deny" };
         } else {
             return { action: "allow" };
@@ -319,30 +287,46 @@ const setupTrayItem = (mainWindow: BrowserWindow) => {
 
 /**
  * Older versions of our app used to maintain a cache dir using the main
- * process. This has been deprecated in favor of using a normal web cache.
+ * process. This has been removed in favor of cache on the web layer.
+ *
+ * Delete the old cache dir if it exists.
+ *
+ * This will happen in two phases. The cache had three subdirectories:
+ *
+ * - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024).
  *
- * See [Note: Increased disk cache for the desktop app]
+ * - The third one, "face-crops" will be removed once we finish the face search
+ *   changes. See: [Note: Legacy face crops].
  *
- * Delete the old cache dir if it exists. This code was added March 2024, and
- * can be removed after some time once most people have upgraded to newer
- * versions.
+ * This migration code can be removed after some time once most people have
+ * upgraded to newer versions.
  */
 const deleteLegacyDiskCacheDirIfExists = async () => {
-    // The existing code was passing "cache" as a parameter to getPath. This is
-    // incorrect if we go by the types - "cache" is not a valid value for the
-    // parameter to `app.getPath`.
+    const removeIfExists = async (dirPath: string) => {
+        if (existsSync(dirPath)) {
+            log.info(`Removing legacy disk cache from ${dirPath}`);
+            await fs.rm(dirPath, { recursive: true });
+        }
+    };
+    // [Note: Getting the cache path]
+    //
+    // The existing code was passing "cache" as a parameter to getPath.
     //
-    // It might be an issue in the types, since at runtime it seems to work. For
-    // example, on macOS I get `~/Library/Caches`.
+    // However, "cache" is not a valid parameter to getPath. It works! (for
+    // example, on macOS I get `~/Library/Caches`), but it is intentionally not
+    // documented as part of the public API:
+    //
+    // - docs: remove "cache" from app.getPath
+    //   https://github.com/electron/electron/pull/33509
     //
     // Irrespective, we replicate the original behaviour so that we get back the
-    // same path that the old got was getting.
+    // same path that the old code was getting.
     //
-    // @ts-expect-error
+    // @ts-expect-error "cache" works but is not part of the public API.
     const cacheDir = path.join(app.getPath("cache"), "ente");
     if (existsSync(cacheDir)) {
-        log.info(`Removing legacy disk cache from ${cacheDir}`);
-        await fs.rm(cacheDir, { recursive: true });
+        await removeIfExists(path.join(cacheDir, "thumbs"));
+        await removeIfExists(path.join(cacheDir, "files"));
     }
 };
 
@@ -375,7 +359,6 @@ const main = () => {
     // The order of the next two calls is important
     setupRendererServer();
     registerPrivilegedSchemes();
-    increaseDiskCache();
     migrateLegacyWatchStoreIfNeeded();
 
     app.on("second-instance", () => {
@@ -390,39 +373,35 @@ const main = () => {
     // Emitted once, when Electron has finished initializing.
     //
     // Note that some Electron APIs can only be used after this event occurs.
-    app.on("ready", async () => {
-        // Create window and prepare for the renderer.
-        mainWindow = await createMainWindow();
-        attachIPCHandlers();
-        attachFSWatchIPCHandlers(createWatcher(mainWindow));
-        registerStreamProtocol();
-
-        // Configure the renderer's environment.
-        setDownloadPath(mainWindow.webContents);
-        allowExternalLinks(mainWindow.webContents);
-
-        // TODO(MR): Remove or resurrect
-        // The commit that introduced this header override had the message
-        // "fix cors issue for uploads". Not sure what that means, so disabling
-        // it for now to see why exactly this is required.
-        // addAllowOriginHeader(mainWindow);
-
-        // Start loading the renderer.
-        mainWindow.loadURL(rendererURL);
-
-        // Continue on with the rest of the startup sequence.
-        Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
-        setupTrayItem(mainWindow);
-        if (!isDev) setupAutoUpdater(mainWindow);
-
-        try {
-            deleteLegacyDiskCacheDirIfExists();
-            deleteLegacyKeysStoreIfExists();
-        } catch (e) {
-            // Log but otherwise ignore errors during non-critical startup
-            // actions.
-            log.error("Ignoring startup error", e);
-        }
+    void app.whenReady().then(() => {
+        void (async () => {
+            // Create window and prepare for the renderer.
+            mainWindow = createMainWindow();
+            attachIPCHandlers();
+            attachFSWatchIPCHandlers(createWatcher(mainWindow));
+            registerStreamProtocol();
+
+            // Configure the renderer's environment.
+            setDownloadPath(mainWindow.webContents);
+            allowExternalLinks(mainWindow.webContents);
+
+            // Start loading the renderer.
+            void mainWindow.loadURL(rendererURL);
+
+            // Continue on with the rest of the startup sequence.
+            Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
+            setupTrayItem(mainWindow);
+            setupAutoUpdater(mainWindow);
+
+            try {
+                await deleteLegacyDiskCacheDirIfExists();
+                await deleteLegacyKeysStoreIfExists();
+            } catch (e) {
+                // Log but otherwise ignore errors during non-critical startup
+                // actions.
+                log.error("Ignoring startup error", e);
+            }
+        })();
     });
 
     // This is a macOS only event. Show our window when the user activates the

+ 0 - 72
desktop/src/main/dialogs.ts

@@ -1,72 +0,0 @@
-import { dialog } from "electron/main";
-import fs from "node:fs/promises";
-import path from "node:path";
-import type { ElectronFile } from "../types/ipc";
-import { getElectronFile } from "./services/fs";
-import { getElectronFilesFromGoogleZip } from "./services/upload";
-
-export const selectDirectory = async () => {
-    const result = await dialog.showOpenDialog({
-        properties: ["openDirectory"],
-    });
-    if (result.filePaths && result.filePaths.length > 0) {
-        return result.filePaths[0]?.split(path.sep)?.join(path.posix.sep);
-    }
-};
-
-export const showUploadFilesDialog = async () => {
-    const selectedFiles = await dialog.showOpenDialog({
-        properties: ["openFile", "multiSelections"],
-    });
-    const filePaths = selectedFiles.filePaths;
-    return await Promise.all(filePaths.map(getElectronFile));
-};
-
-export const showUploadDirsDialog = async () => {
-    const dir = await dialog.showOpenDialog({
-        properties: ["openDirectory", "multiSelections"],
-    });
-
-    let filePaths: string[] = [];
-    for (const dirPath of dir.filePaths) {
-        filePaths = [...filePaths, ...(await getDirFilePaths(dirPath))];
-    }
-
-    return await Promise.all(filePaths.map(getElectronFile));
-};
-
-// https://stackoverflow.com/a/63111390
-const getDirFilePaths = async (dirPath: string) => {
-    if (!(await fs.stat(dirPath)).isDirectory()) {
-        return [dirPath];
-    }
-
-    let files: string[] = [];
-    const filePaths = await fs.readdir(dirPath);
-
-    for (const filePath of filePaths) {
-        const absolute = path.join(dirPath, filePath);
-        files = [...files, ...(await getDirFilePaths(absolute))];
-    }
-
-    return files;
-};
-
-export const showUploadZipDialog = async () => {
-    const selectedFiles = await dialog.showOpenDialog({
-        properties: ["openFile", "multiSelections"],
-        filters: [{ name: "Zip File", extensions: ["zip"] }],
-    });
-    const filePaths = selectedFiles.filePaths;
-
-    let files: ElectronFile[] = [];
-
-    for (const filePath of filePaths) {
-        files = [...files, ...(await getElectronFilesFromGoogleZip(filePath))];
-    }
-
-    return {
-        zipPaths: filePaths,
-        files,
-    };
-};

+ 0 - 29
desktop/src/main/fs.ts

@@ -1,29 +0,0 @@
-/**
- * @file file system related functions exposed over the context bridge.
- */
-import { existsSync } from "node:fs";
-import fs from "node:fs/promises";
-
-export const fsExists = (path: string) => existsSync(path);
-
-export const fsRename = (oldPath: string, newPath: string) =>
-    fs.rename(oldPath, newPath);
-
-export const fsMkdirIfNeeded = (dirPath: string) =>
-    fs.mkdir(dirPath, { recursive: true });
-
-export const fsRmdir = (path: string) => fs.rmdir(path);
-
-export const fsRm = (path: string) => fs.rm(path);
-
-export const fsReadTextFile = async (filePath: string) =>
-    fs.readFile(filePath, "utf-8");
-
-export const fsWriteFile = (path: string, contents: string) =>
-    fs.writeFile(path, contents);
-
-export const fsIsDir = async (dirPath: string) => {
-    if (!existsSync(dirPath)) return false;
-    const stat = await fs.stat(dirPath);
-    return stat.isDirectory();
-};

+ 0 - 21
desktop/src/main/init.ts

@@ -1,21 +0,0 @@
-import { BrowserWindow } from "electron";
-
-export function addAllowOriginHeader(mainWindow: BrowserWindow) {
-    mainWindow.webContents.session.webRequest.onHeadersReceived(
-        (details, callback) => {
-            details.responseHeaders = lowerCaseHeaders(details.responseHeaders);
-            details.responseHeaders["access-control-allow-origin"] = ["*"];
-            callback({
-                responseHeaders: details.responseHeaders,
-            });
-        },
-    );
-}
-
-function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
-    const headers: Record<string, string[]> = {};
-    for (const key of Object.keys(responseHeaders)) {
-        headers[key.toLowerCase()] = responseHeaders[key];
-    }
-    return headers;
-}

+ 32 - 30
desktop/src/main/ipc.ts

@@ -16,12 +16,20 @@ import type {
     PendingUploads,
     ZipItem,
 } from "../types/ipc";
+import { logToDisk } from "./log";
+import {
+    appVersion,
+    skipAppUpdate,
+    updateAndRestart,
+    updateOnNextRestart,
+} from "./services/app-update";
 import {
+    legacyFaceCrop,
+    openDirectory,
+    openLogDirectory,
     selectDirectory,
-    showUploadDirsDialog,
-    showUploadFilesDialog,
-    showUploadZipDialog,
-} from "./dialogs";
+} from "./services/dir";
+import { ffmpegExec } from "./services/ffmpeg";
 import {
     fsExists,
     fsIsDir,
@@ -31,15 +39,7 @@ import {
     fsRm,
     fsRmdir,
     fsWriteFile,
-} from "./fs";
-import { logToDisk } from "./log";
-import {
-    appVersion,
-    skipAppUpdate,
-    updateAndRestart,
-    updateOnNextRestart,
-} from "./services/app-update";
-import { ffmpegExec } from "./services/ffmpeg";
+} from "./services/fs";
 import { convertToJPEG, generateImageThumbnail } from "./services/image";
 import {
     clipImageEmbedding,
@@ -65,10 +65,10 @@ import {
     watchFindFiles,
     watchGet,
     watchRemove,
+    watchReset,
     watchUpdateIgnoredFiles,
     watchUpdateSyncedFiles,
 } from "./services/watch";
-import { openDirectory, openLogDirectory } from "./utils-electron";
 
 /**
  * Listen for IPC events sent/invoked by the renderer process, and route them to
@@ -95,16 +95,20 @@ export const attachIPCHandlers = () => {
 
     ipcMain.handle("appVersion", () => appVersion());
 
-    ipcMain.handle("openDirectory", (_, dirPath) => openDirectory(dirPath));
+    ipcMain.handle("openDirectory", (_, dirPath: string) =>
+        openDirectory(dirPath),
+    );
 
     ipcMain.handle("openLogDirectory", () => openLogDirectory());
 
     // See [Note: Catching exception during .send/.on]
-    ipcMain.on("logToDisk", (_, message) => logToDisk(message));
+    ipcMain.on("logToDisk", (_, message: string) => logToDisk(message));
+
+    ipcMain.handle("selectDirectory", () => selectDirectory());
 
     ipcMain.on("clearStores", () => clearStores());
 
-    ipcMain.handle("saveEncryptionKey", (_, encryptionKey) =>
+    ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) =>
         saveEncryptionKey(encryptionKey),
     );
 
@@ -114,21 +118,23 @@ export const attachIPCHandlers = () => {
 
     ipcMain.on("updateAndRestart", () => updateAndRestart());
 
-    ipcMain.on("updateOnNextRestart", (_, version) =>
+    ipcMain.on("updateOnNextRestart", (_, version: string) =>
         updateOnNextRestart(version),
     );
 
-    ipcMain.on("skipAppUpdate", (_, version) => skipAppUpdate(version));
+    ipcMain.on("skipAppUpdate", (_, version: string) => skipAppUpdate(version));
 
     // - FS
 
-    ipcMain.handle("fsExists", (_, path) => fsExists(path));
+    ipcMain.handle("fsExists", (_, path: string) => fsExists(path));
 
     ipcMain.handle("fsRename", (_, oldPath: string, newPath: string) =>
         fsRename(oldPath, newPath),
     );
 
-    ipcMain.handle("fsMkdirIfNeeded", (_, dirPath) => fsMkdirIfNeeded(dirPath));
+    ipcMain.handle("fsMkdirIfNeeded", (_, dirPath: string) =>
+        fsMkdirIfNeeded(dirPath),
+    );
 
     ipcMain.handle("fsRmdir", (_, path: string) => fsRmdir(path));
 
@@ -193,15 +199,9 @@ export const attachIPCHandlers = () => {
         faceEmbedding(input),
     );
 
-    // - File selection
-
-    ipcMain.handle("selectDirectory", () => selectDirectory());
-
-    ipcMain.handle("showUploadFilesDialog", () => showUploadFilesDialog());
-
-    ipcMain.handle("showUploadDirsDialog", () => showUploadDirsDialog());
-
-    ipcMain.handle("showUploadZipDialog", () => showUploadZipDialog());
+    ipcMain.handle("legacyFaceCrop", (_, faceID: string) =>
+        legacyFaceCrop(faceID),
+    );
 
     // - Upload
 
@@ -269,4 +269,6 @@ export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => {
     ipcMain.handle("watchFindFiles", (_, folderPath: string) =>
         watchFindFiles(folderPath),
     );
+
+    ipcMain.handle("watchReset", () => watchReset(watcher));
 };

+ 6 - 6
desktop/src/main/log.ts

@@ -1,15 +1,15 @@
 import log from "electron-log";
 import util from "node:util";
-import { isDev } from "./utils-electron";
+import { isDev } from "./utils/electron";
 
 /**
  * Initialize logging in the main process.
  *
  * This will set our underlying logger up to log to a file named `ente.log`,
  *
- * - on Linux at ~/.config/ente/logs/main.log
- * - on macOS at ~/Library/Logs/ente/main.log
- * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\main.log
+ * - on Linux at ~/.config/ente/logs/ente.log
+ * - on macOS at ~/Library/Logs/ente/ente.log
+ * - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
  *
  * On dev builds, it will also log to the console.
  */
@@ -65,7 +65,7 @@ const logError_ = (message: string) => {
     if (isDev) console.error(`[error] ${message}`);
 };
 
-const logInfo = (...params: any[]) => {
+const logInfo = (...params: unknown[]) => {
     const message = params
         .map((p) => (typeof p == "string" ? p : util.inspect(p)))
         .join(" ");
@@ -73,7 +73,7 @@ const logInfo = (...params: any[]) => {
     if (isDev) console.log(`[info] ${message}`);
 };
 
-const logDebug = (param: () => any) => {
+const logDebug = (param: () => unknown) => {
     if (isDev) {
         const p = param();
         console.log(`[debug] ${typeof p == "string" ? p : util.inspect(p)}`);

+ 21 - 22
desktop/src/main/menu.ts

@@ -8,8 +8,8 @@ import {
 import { allowWindowClose } from "../main";
 import { forceCheckForAppUpdates } from "./services/app-update";
 import autoLauncher from "./services/auto-launcher";
+import { openLogDirectory } from "./services/dir";
 import { userPreferences } from "./stores/user-preferences";
-import { isDev, openLogDirectory } from "./utils-electron";
 
 /** Create and return the entries in the app's main menu bar */
 export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
@@ -18,23 +18,20 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
     // Whenever the menu is redrawn the current value of these variables is used
     // to set the checked state for the various settings checkboxes.
     let isAutoLaunchEnabled = await autoLauncher.isEnabled();
-    let shouldHideDockIcon = userPreferences.get("hideDockIcon");
+    let shouldHideDockIcon = !!userPreferences.get("hideDockIcon");
 
     const macOSOnly = (options: MenuItemConstructorOptions[]) =>
         process.platform == "darwin" ? options : [];
 
-    const devOnly = (options: MenuItemConstructorOptions[]) =>
-        isDev ? options : [];
-
     const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
 
     const handleViewChangelog = () =>
-        shell.openExternal(
+        void shell.openExternal(
             "https://github.com/ente-io/ente/blob/main/desktop/CHANGELOG.md",
         );
 
     const toggleAutoLaunch = () => {
-        autoLauncher.toggleAutoLaunch();
+        void autoLauncher.toggleAutoLaunch();
         isAutoLaunchEnabled = !isAutoLaunchEnabled;
     };
 
@@ -45,13 +42,15 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
         shouldHideDockIcon = !shouldHideDockIcon;
     };
 
-    const handleHelp = () => shell.openExternal("https://help.ente.io/photos/");
+    const handleHelp = () =>
+        void shell.openExternal("https://help.ente.io/photos/");
 
-    const handleSupport = () => shell.openExternal("mailto:support@ente.io");
+    const handleSupport = () =>
+        void shell.openExternal("mailto:support@ente.io");
 
-    const handleBlog = () => shell.openExternal("https://ente.io/blog/");
+    const handleBlog = () => void shell.openExternal("https://ente.io/blog/");
 
-    const handleViewLogs = openLogDirectory;
+    const handleViewLogs = () => void openLogDirectory();
 
     return Menu.buildFromTemplate([
         {
@@ -83,12 +82,14 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
                             checked: isAutoLaunchEnabled,
                             click: toggleAutoLaunch,
                         },
-                        {
-                            label: "Hide Dock Icon",
-                            type: "checkbox",
-                            checked: shouldHideDockIcon,
-                            click: toggleHideDockIcon,
-                        },
+                        ...macOSOnly([
+                            {
+                                label: "Hide Dock Icon",
+                                type: "checkbox",
+                                checked: shouldHideDockIcon,
+                                click: toggleHideDockIcon,
+                            },
+                        ]),
                     ],
                 },
 
@@ -127,11 +128,11 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
                         submenu: [
                             {
                                 role: "startSpeaking",
-                                label: "start speaking",
+                                label: "Start Speaking",
                             },
                             {
                                 role: "stopSpeaking",
-                                label: "stop speaking",
+                                label: "Stop Speaking",
                             },
                         ],
                     },
@@ -142,9 +143,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
             label: "View",
             submenu: [
                 { label: "Reload", role: "reload" },
-                ...devOnly([
-                    { label: "Toggle Dev Tools", role: "toggleDevTools" },
-                ]),
+                { label: "Toggle Dev Tools", role: "toggleDevTools" },
                 { type: "separator" },
                 { label: "Toggle Full Screen", role: "togglefullscreen" },
             ],

+ 88 - 6
desktop/src/main/services/app-update.ts

@@ -6,14 +6,93 @@ import { allowWindowClose } from "../../main";
 import { AppUpdate } from "../../types/ipc";
 import log from "../log";
 import { userPreferences } from "../stores/user-preferences";
+import { isDev } from "../utils/electron";
 
 export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
     autoUpdater.logger = electronLog;
     autoUpdater.autoDownload = false;
 
+    /**
+     * [Note: Testing auto updates]
+     *
+     * By default, we skip checking for updates automatically in dev builds.
+     * This is because even if installing updates would fail (at least on macOS)
+     * because auto updates only work for signed builds.
+     *
+     * So an end to end testing for updates requires using a temporary GitHub
+     * repository and signed builds therein. More on this later.
+     *
+     * ---------------
+     *
+     * [Note: Testing auto updates - Sanity checks]
+     *
+     * However, for partial checks of the UI flow, something like the following
+     * can be used to do a test of the update process (up until the actual
+     * installation itself).
+     *
+     * Create a `app/dev-app-update.yml` with:
+     *
+     *     provider: generic
+     *     url: http://127.0.0.1:7777/
+     *
+     * and start a local webserver in some directory:
+     *
+     *     python3 -m http.server 7777
+     *
+     * In this directory, put `latest-mac.yml` and the DMG file that this YAML
+     * file refers to.
+     *
+     * Alternatively, `dev-app-update.yml` can point to some arbitrary GitHub
+     * repository too, e.g.:
+     *
+     *       provider: github
+     *       owner: ente-io
+     *       repo: test-desktop-updates
+     *
+     * Now we can use the "Check for updates..." menu option to trigger the
+     * update flow.
+     */
+    autoUpdater.forceDevUpdateConfig = isDev;
+    if (isDev) return;
+
+    /**
+     * [Note: Testing auto updates - End to end checks]
+     *
+     * Since end-to-end update testing can only be done with signed builds, the
+     * easiest way is to create temporary builds in a test repository.
+     *
+     * Let us say we have v2.0.0 about to go out. We have builds artifacts for
+     * v2.0.0 also in some draft release in our normal release repository.
+     *
+     * Create a new test repository, say `ente-io/test-desktop-updates`. In this
+     * repository, create a release v2.0.0, attaching the actual build
+     * artifacts. Make this release the latest.
+     *
+     * Now we need to create a old signed build.
+     *
+     * First, modify `package.json` to put in a version number older than the
+     * new version number that we want to test updating to, e.g. `v1.0.0-test`.
+     *
+     * Then uncomment the following block of code. This tells the auto updater
+     * to use `ente-io/test-desktop-updates` to get updates.
+     *
+     * With these two changes (older version and setFeedURL), create a new
+     * release signed build on CI. Install this build - it will check for
+     * updates in the temporary feed URL that we set, and we'll be able to check
+     * the full update flow.
+     */
+
+    /*
+    autoUpdater.setFeedURL({
+        provider: "github",
+        owner: "ente-io",
+        repo: "test-desktop-updates",
+    });
+    */
+
     const oneDay = 1 * 24 * 60 * 60 * 1000;
-    setInterval(() => checkForUpdatesAndNotify(mainWindow), oneDay);
-    checkForUpdatesAndNotify(mainWindow);
+    setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay);
+    void checkForUpdatesAndNotify(mainWindow);
 };
 
 /**
@@ -22,7 +101,7 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
 export const forceCheckForAppUpdates = (mainWindow: BrowserWindow) => {
     userPreferences.delete("skipAppVersion");
     userPreferences.delete("muteUpdateNotificationVersion");
-    checkForUpdatesAndNotify(mainWindow);
+    void checkForUpdatesAndNotify(mainWindow);
 };
 
 const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
@@ -36,18 +115,21 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
 
     log.debug(() => `Update check found version ${version}`);
 
+    if (!version)
+        throw new Error("Unexpected empty version obtained from auto-updater");
+
     if (compareVersions(version, app.getVersion()) <= 0) {
         log.debug(() => "Skipping update, already at latest version");
         return;
     }
 
-    if (version === userPreferences.get("skipAppVersion")) {
+    if (version == userPreferences.get("skipAppVersion")) {
         log.info(`User chose to skip version ${version}`);
         return;
     }
 
     const mutedVersion = userPreferences.get("muteUpdateNotificationVersion");
-    if (version === mutedVersion) {
+    if (version == mutedVersion) {
         log.info(`User has muted update notifications for version ${version}`);
         return;
     }
@@ -56,7 +138,7 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
         mainWindow.webContents.send("appUpdateAvailable", update);
 
     log.debug(() => "Attempting auto update");
-    autoUpdater.downloadUpdate();
+    await autoUpdater.downloadUpdate();
 
     let timeoutId: ReturnType<typeof setTimeout>;
     const fiveMinutes = 5 * 60 * 1000;

+ 6 - 7
desktop/src/main/services/auto-launcher.ts

@@ -27,23 +27,22 @@ class AutoLauncher {
     }
 
     async toggleAutoLaunch() {
-        const isEnabled = await this.isEnabled();
+        const wasEnabled = await this.isEnabled();
         const autoLaunch = this.autoLaunch;
         if (autoLaunch) {
-            if (isEnabled) await autoLaunch.disable();
+            if (wasEnabled) await autoLaunch.disable();
             else await autoLaunch.enable();
         } else {
-            if (isEnabled) app.setLoginItemSettings({ openAtLogin: false });
-            else app.setLoginItemSettings({ openAtLogin: true });
+            const openAtLogin = !wasEnabled;
+            app.setLoginItemSettings({ openAtLogin });
         }
     }
 
-    async wasAutoLaunched() {
+    wasAutoLaunched() {
         if (this.autoLaunch) {
             return app.commandLine.hasSwitch("hidden");
         } else {
-            // TODO(MR): This apparently doesn't work anymore.
-            return app.getLoginItemSettings().wasOpenedAtLogin;
+            return app.getLoginItemSettings().openAtLogin;
         }
     }
 }

+ 89 - 0
desktop/src/main/services/dir.ts

@@ -0,0 +1,89 @@
+import { shell } from "electron/common";
+import { app, dialog } from "electron/main";
+import { existsSync } from "fs";
+import fs from "node:fs/promises";
+import path from "node:path";
+import { posixPath } from "../utils/electron";
+
+export const selectDirectory = async () => {
+    const result = await dialog.showOpenDialog({
+        properties: ["openDirectory"],
+    });
+    const dirPath = result.filePaths[0];
+    return dirPath ? posixPath(dirPath) : undefined;
+};
+
+/**
+ * Open the given {@link dirPath} in the system's folder viewer.
+ *
+ * For example, on macOS this'll open {@link dirPath} in Finder.
+ */
+export const openDirectory = async (dirPath: string) => {
+    // We need to use `path.normalize` because `shell.openPath; does not support
+    // POSIX path, it needs to be a platform specific path:
+    // https://github.com/electron/electron/issues/28831#issuecomment-826370589
+    const res = await shell.openPath(path.normalize(dirPath));
+    // `shell.openPath` resolves with a string containing the error message
+    // corresponding to the failure if a failure occurred, otherwise "".
+    if (res) throw new Error(`Failed to open directory ${dirPath}: res`);
+};
+
+/**
+ * Open the app's log directory in the system's folder viewer.
+ *
+ * @see {@link openDirectory}
+ */
+export const openLogDirectory = () => openDirectory(logDirectoryPath());
+
+/**
+ * Return the path where the logs for the app are saved.
+ *
+ * [Note: Electron app paths]
+ *
+ * There are three paths we need to be aware of usually.
+ *
+ * First is the "appData". We can obtain this with `app.getPath("appData")`.
+ * This is per-user application data directory. This is usually the following:
+ *
+ * - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local`
+ * - Linux: `~/.config`
+ * - macOS: `~/Library/Application Support`
+ *
+ * Now, if we suffix the app's name onto the appData directory, we get the
+ * "userData" directory. This is the **primary** place applications are meant to
+ * store user's data, e.g. various configuration files and saved state.
+ *
+ * During development, our app name is "Electron", so this'd be, for example,
+ * `~/Library/Application Support/Electron` if we run using `yarn dev`. For the
+ * packaged production app, our app name is "ente", so this would be:
+ *
+ * - Windows: `%APPDATA%\ente`, e.g. `C:\Users\<username>\AppData\Local\ente`
+ * - Linux: `~/.config/ente`
+ * - macOS: `~/Library/Application Support/ente`
+ *
+ * Note that Chromium also stores the browser state, e.g. localStorage or disk
+ * caches, in userData.
+ *
+ * Finally, there is the "logs" directory. This is not within "appData" but has
+ * a slightly different OS specific path. Since our log file is named
+ * "ente.log", it can be found at:
+ *
+ * - macOS: ~/Library/Logs/ente/ente.log (production)
+ * - macOS: ~/Library/Logs/Electron/ente.log    (dev)
+ *
+ * https://www.electronjs.org/docs/latest/api/app
+ */
+const logDirectoryPath = () => app.getPath("logs");
+
+/**
+ * See: [Note: Legacy face crops]
+ */
+export const legacyFaceCrop = async (
+    faceID: string,
+): Promise<Uint8Array | undefined> => {
+    // See: [Note: Getting the cache path]
+    // @ts-expect-error "cache" works but is not part of the public API.
+    const cacheDir = path.join(app.getPath("cache"), "ente");
+    const filePath = path.join(cacheDir, "face-crops", faceID);
+    return existsSync(filePath) ? await fs.readFile(filePath) : undefined;
+};

+ 5 - 5
desktop/src/main/services/ffmpeg.ts

@@ -2,13 +2,13 @@ import pathToFfmpeg from "ffmpeg-static";
 import fs from "node:fs/promises";
 import type { ZipItem } from "../../types/ipc";
 import log from "../log";
-import { withTimeout } from "../utils";
-import { execAsync } from "../utils-electron";
+import { ensure, withTimeout } from "../utils/common";
+import { execAsync } from "../utils/electron";
 import {
     deleteTempFile,
     makeFileForDataOrPathOrZipItem,
     makeTempFilePath,
-} from "../utils-temp";
+} from "../utils/temp";
 
 /* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
 const ffmpegPathPlaceholder = "FFMPEG";
@@ -69,7 +69,7 @@ export const ffmpegExec = async (
             outputFilePath,
         );
 
-        if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
+        if (timeoutMS) await withTimeout(execAsync(cmd), timeoutMS);
         else await execAsync(cmd);
 
         return fs.readFile(outputFilePath);
@@ -110,5 +110,5 @@ const ffmpegBinaryPath = () => {
     // This substitution of app.asar by app.asar.unpacked is suggested by the
     // ffmpeg-static library author themselves:
     // https://github.com/eugeneware/ffmpeg-static/issues/16
-    return pathToFfmpeg.replace("app.asar", "app.asar.unpacked");
+    return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked");
 };

+ 19 - 143
desktop/src/main/services/fs.ts

@@ -1,154 +1,30 @@
-import StreamZip from "node-stream-zip";
+/**
+ * @file file system related functions exposed over the context bridge.
+ */
+
 import { existsSync } from "node:fs";
 import fs from "node:fs/promises";
-import path from "node:path";
-import { ElectronFile } from "../../types/ipc";
-import log from "../log";
 
-const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
+export const fsExists = (path: string) => existsSync(path);
 
-const getFileStream = async (filePath: string) => {
-    const file = await fs.open(filePath, "r");
-    let offset = 0;
-    const readableStream = new ReadableStream<Uint8Array>({
-        async pull(controller) {
-            try {
-                const buff = new Uint8Array(FILE_STREAM_CHUNK_SIZE);
-                const bytesRead = (await file.read(
-                    buff,
-                    0,
-                    FILE_STREAM_CHUNK_SIZE,
-                    offset,
-                )) as unknown as number;
-                offset += bytesRead;
-                if (bytesRead === 0) {
-                    controller.close();
-                    await file.close();
-                } else {
-                    controller.enqueue(buff.slice(0, bytesRead));
-                }
-            } catch (e) {
-                await file.close();
-            }
-        },
-        async cancel() {
-            await file.close();
-        },
-    });
-    return readableStream;
-};
+export const fsRename = (oldPath: string, newPath: string) =>
+    fs.rename(oldPath, newPath);
 
-export async function getElectronFile(filePath: string): Promise<ElectronFile> {
-    const fileStats = await fs.stat(filePath);
-    return {
-        path: filePath.split(path.sep).join(path.posix.sep),
-        name: path.basename(filePath),
-        size: fileStats.size,
-        lastModified: fileStats.mtime.valueOf(),
-        stream: async () => {
-            if (!existsSync(filePath)) {
-                throw new Error("electronFile does not exist");
-            }
-            return await getFileStream(filePath);
-        },
-        blob: async () => {
-            if (!existsSync(filePath)) {
-                throw new Error("electronFile does not exist");
-            }
-            const blob = await fs.readFile(filePath);
-            return new Blob([new Uint8Array(blob)]);
-        },
-        arrayBuffer: async () => {
-            if (!existsSync(filePath)) {
-                throw new Error("electronFile does not exist");
-            }
-            const blob = await fs.readFile(filePath);
-            return new Uint8Array(blob);
-        },
-    };
-}
+export const fsMkdirIfNeeded = (dirPath: string) =>
+    fs.mkdir(dirPath, { recursive: true });
 
-export const getZipFileStream = async (
-    zip: StreamZip.StreamZipAsync,
-    filePath: string,
-) => {
-    const stream = await zip.stream(filePath);
-    const done = {
-        current: false,
-    };
-    const inProgress = {
-        current: false,
-    };
-    // eslint-disable-next-line no-unused-vars
-    let resolveObj: (value?: any) => void = null;
-    // eslint-disable-next-line no-unused-vars
-    let rejectObj: (reason?: any) => void = null;
-    stream.on("readable", () => {
-        try {
-            if (resolveObj) {
-                inProgress.current = true;
-                const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
-                if (chunk) {
-                    resolveObj(new Uint8Array(chunk));
-                    resolveObj = null;
-                }
-                inProgress.current = false;
-            }
-        } catch (e) {
-            rejectObj(e);
-        }
-    });
-    stream.on("end", () => {
-        try {
-            done.current = true;
-            if (resolveObj && !inProgress.current) {
-                resolveObj(null);
-                resolveObj = null;
-            }
-        } catch (e) {
-            rejectObj(e);
-        }
-    });
-    stream.on("error", (e) => {
-        try {
-            done.current = true;
-            if (rejectObj) {
-                rejectObj(e);
-                rejectObj = null;
-            }
-        } catch (e) {
-            rejectObj(e);
-        }
-    });
+export const fsRmdir = (path: string) => fs.rmdir(path);
 
-    const readStreamData = async () => {
-        return new Promise<Uint8Array>((resolve, reject) => {
-            const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
+export const fsRm = (path: string) => fs.rm(path);
 
-            if (chunk || done.current) {
-                resolve(chunk);
-            } else {
-                resolveObj = resolve;
-                rejectObj = reject;
-            }
-        });
-    };
+export const fsReadTextFile = async (filePath: string) =>
+    fs.readFile(filePath, "utf-8");
 
-    const readableStream = new ReadableStream<Uint8Array>({
-        async pull(controller) {
-            try {
-                const data = await readStreamData();
+export const fsWriteFile = (path: string, contents: string) =>
+    fs.writeFile(path, contents);
 
-                if (data) {
-                    controller.enqueue(data);
-                } else {
-                    controller.close();
-                }
-            } catch (e) {
-                log.error("Failed to pull from readableStream", e);
-                controller.close();
-            }
-        },
-    });
-    return readableStream;
+export const fsIsDir = async (dirPath: string) => {
+    if (!existsSync(dirPath)) return false;
+    const stat = await fs.stat(dirPath);
+    return stat.isDirectory();
 };

+ 3 - 3
desktop/src/main/services/image.ts

@@ -1,15 +1,15 @@
 /** @file Image format conversions and thumbnail generation */
 
 import fs from "node:fs/promises";
-import path from "path";
+import path from "node:path";
 import { CustomErrorMessage, type ZipItem } from "../../types/ipc";
 import log from "../log";
-import { execAsync, isDev } from "../utils-electron";
+import { execAsync, isDev } from "../utils/electron";
 import {
     deleteTempFile,
     makeFileForDataOrPathOrZipItem,
     makeTempFilePath,
-} from "../utils-temp";
+} from "../utils/temp";
 
 export const convertToJPEG = async (imageData: Uint8Array) => {
     const inputFilePath = await makeTempFilePath();

+ 45 - 47
desktop/src/main/services/ml-clip.ts

@@ -11,7 +11,8 @@ import * as ort from "onnxruntime-node";
 import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
 import log from "../log";
 import { writeStream } from "../stream";
-import { deleteTempFile, makeTempFilePath } from "../utils-temp";
+import { ensure } from "../utils/common";
+import { deleteTempFile, makeTempFilePath } from "../utils/temp";
 import { makeCachedInferenceSession } from "./ml";
 
 const cachedCLIPImageSession = makeCachedInferenceSession(
@@ -22,7 +23,7 @@ const cachedCLIPImageSession = makeCachedInferenceSession(
 export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
     const tempFilePath = await makeTempFilePath();
     const imageStream = new Response(jpegImageData.buffer).body;
-    await writeStream(tempFilePath, imageStream);
+    await writeStream(tempFilePath, ensure(imageStream));
     try {
         return await clipImageEmbedding_(tempFilePath);
     } finally {
@@ -44,30 +45,30 @@ const clipImageEmbedding_ = async (jpegFilePath: string) => {
             `onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
     );
     /* Need these model specific casts to type the result */
-    const imageEmbedding = results["output"].data as Float32Array;
+    const imageEmbedding = ensure(results.output).data as Float32Array;
     return normalizeEmbedding(imageEmbedding);
 };
 
-const getRGBData = async (jpegFilePath: string) => {
+const getRGBData = async (jpegFilePath: string): Promise<number[]> => {
     const jpegData = await fs.readFile(jpegFilePath);
     const rawImageData = jpeg.decode(jpegData, {
         useTArray: true,
         formatAsRGBA: false,
     });
 
-    const nx: number = rawImageData.width;
-    const ny: number = rawImageData.height;
-    const inputImage: Uint8Array = rawImageData.data;
+    const nx = rawImageData.width;
+    const ny = rawImageData.height;
+    const inputImage = rawImageData.data;
 
-    const nx2: number = 224;
-    const ny2: number = 224;
-    const totalSize: number = 3 * nx2 * ny2;
+    const nx2 = 224;
+    const ny2 = 224;
+    const totalSize = 3 * nx2 * ny2;
 
-    const result: number[] = Array(totalSize).fill(0);
-    const scale: number = Math.max(nx, ny) / 224;
+    const result = Array<number>(totalSize).fill(0);
+    const scale = Math.max(nx, ny) / 224;
 
-    const nx3: number = Math.round(nx / scale);
-    const ny3: number = Math.round(ny / scale);
+    const nx3 = Math.round(nx / scale);
+    const ny3 = Math.round(ny / scale);
 
     const mean: number[] = [0.48145466, 0.4578275, 0.40821073];
     const std: number[] = [0.26862954, 0.26130258, 0.27577711];
@@ -76,40 +77,40 @@ const getRGBData = async (jpegFilePath: string) => {
         for (let x = 0; x < nx3; x++) {
             for (let c = 0; c < 3; c++) {
                 // Linear interpolation
-                const sx: number = (x + 0.5) * scale - 0.5;
-                const sy: number = (y + 0.5) * scale - 0.5;
+                const sx = (x + 0.5) * scale - 0.5;
+                const sy = (y + 0.5) * scale - 0.5;
 
-                const x0: number = Math.max(0, Math.floor(sx));
-                const y0: number = Math.max(0, Math.floor(sy));
+                const x0 = Math.max(0, Math.floor(sx));
+                const y0 = Math.max(0, Math.floor(sy));
 
-                const x1: number = Math.min(x0 + 1, nx - 1);
-                const y1: number = Math.min(y0 + 1, ny - 1);
+                const x1 = Math.min(x0 + 1, nx - 1);
+                const y1 = Math.min(y0 + 1, ny - 1);
 
-                const dx: number = sx - x0;
-                const dy: number = sy - y0;
+                const dx = sx - x0;
+                const dy = sy - y0;
 
-                const j00: number = 3 * (y0 * nx + x0) + c;
-                const j01: number = 3 * (y0 * nx + x1) + c;
-                const j10: number = 3 * (y1 * nx + x0) + c;
-                const j11: number = 3 * (y1 * nx + x1) + c;
+                const j00 = 3 * (y0 * nx + x0) + c;
+                const j01 = 3 * (y0 * nx + x1) + c;
+                const j10 = 3 * (y1 * nx + x0) + c;
+                const j11 = 3 * (y1 * nx + x1) + c;
 
-                const v00: number = inputImage[j00];
-                const v01: number = inputImage[j01];
-                const v10: number = inputImage[j10];
-                const v11: number = inputImage[j11];
+                const v00 = inputImage[j00] ?? 0;
+                const v01 = inputImage[j01] ?? 0;
+                const v10 = inputImage[j10] ?? 0;
+                const v11 = inputImage[j11] ?? 0;
 
-                const v0: number = v00 * (1 - dx) + v01 * dx;
-                const v1: number = v10 * (1 - dx) + v11 * dx;
+                const v0 = v00 * (1 - dx) + v01 * dx;
+                const v1 = v10 * (1 - dx) + v11 * dx;
 
-                const v: number = v0 * (1 - dy) + v1 * dy;
+                const v = v0 * (1 - dy) + v1 * dy;
 
-                const v2: number = Math.min(Math.max(Math.round(v), 0), 255);
+                const v2 = Math.min(Math.max(Math.round(v), 0), 255);
 
                 // createTensorWithDataList is dumb compared to reshape and
                 // hence has to be given with one channel after another
-                const i: number = y * nx3 + x + (c % 3) * 224 * 224;
+                const i = y * nx3 + x + (c % 3) * 224 * 224;
 
-                result[i] = (v2 / 255 - mean[c]) / std[c];
+                result[i] = (v2 / 255 - (mean[c] ?? 0)) / (std[c] ?? 1);
             }
         }
     }
@@ -119,13 +120,12 @@ const getRGBData = async (jpegFilePath: string) => {
 
 const normalizeEmbedding = (embedding: Float32Array) => {
     let normalization = 0;
-    for (let index = 0; index < embedding.length; index++) {
-        normalization += embedding[index] * embedding[index];
-    }
+    for (const v of embedding) normalization += v * v;
+
     const sqrtNormalization = Math.sqrt(normalization);
-    for (let index = 0; index < embedding.length; index++) {
-        embedding[index] = embedding[index] / sqrtNormalization;
-    }
+    for (let index = 0; index < embedding.length; index++)
+        embedding[index] = ensure(embedding[index]) / sqrtNormalization;
+
     return embedding;
 };
 
@@ -134,11 +134,9 @@ const cachedCLIPTextSession = makeCachedInferenceSession(
     64173509 /* 61.2 MB */,
 );
 
-let _tokenizer: Tokenizer = null;
+let _tokenizer: Tokenizer | undefined;
 const getTokenizer = () => {
-    if (!_tokenizer) {
-        _tokenizer = new Tokenizer();
-    }
+    if (!_tokenizer) _tokenizer = new Tokenizer();
     return _tokenizer;
 };
 
@@ -169,6 +167,6 @@ export const clipTextEmbeddingIfAvailable = async (text: string) => {
         () =>
             `onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
     );
-    const textEmbedding = results["output"].data as Float32Array;
+    const textEmbedding = ensure(results.output).data as Float32Array;
     return normalizeEmbedding(textEmbedding);
 };

+ 4 - 2
desktop/src/main/services/ml-face.ts

@@ -8,6 +8,7 @@
  */
 import * as ort from "onnxruntime-node";
 import log from "../log";
+import { ensure } from "../utils/common";
 import { makeCachedInferenceSession } from "./ml";
 
 const cachedFaceDetectionSession = makeCachedInferenceSession(
@@ -23,7 +24,7 @@ export const detectFaces = async (input: Float32Array) => {
     };
     const results = await session.run(feeds);
     log.debug(() => `onnx/yolo face detection took ${Date.now() - t} ms`);
-    return results["output"].data;
+    return ensure(results.output).data;
 };
 
 const cachedFaceEmbeddingSession = makeCachedInferenceSession(
@@ -46,5 +47,6 @@ export const faceEmbedding = async (input: Float32Array) => {
     const results = await session.run(feeds);
     log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`);
     /* Need these model specific casts to extract and type the result */
-    return (results.embeddings as unknown as any)["cpuData"] as Float32Array;
+    return (results.embeddings as unknown as Record<string, unknown>)
+        .cpuData as Float32Array;
 };

+ 8 - 5
desktop/src/main/services/ml.ts

@@ -34,6 +34,7 @@ import { writeStream } from "../stream";
  * actively trigger a download until the returned function is called.
  *
  * @param modelName The name of the model to download.
+ *
  * @param modelByteSize The size in bytes that we expect the model to have. If
  * the size of the downloaded model does not match the expected size, then we
  * will redownload it.
@@ -99,13 +100,15 @@ const downloadModel = async (saveLocation: string, name: string) => {
     // `mkdir -p` the directory where we want to save the model.
     const saveDir = path.dirname(saveLocation);
     await fs.mkdir(saveDir, { recursive: true });
-    // Download
+    // Download.
     log.info(`Downloading ML model from ${name}`);
     const url = `https://models.ente.io/${name}`;
     const res = await net.fetch(url);
     if (!res.ok) throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`);
-    // Save
-    await writeStream(saveLocation, res.body);
+    const body = res.body;
+    if (!body) throw new Error(`Received an null response for ${url}`);
+    // Save.
+    await writeStream(saveLocation, body);
     log.info(`Downloaded CLIP model ${name}`);
 };
 
@@ -114,9 +117,9 @@ const downloadModel = async (saveLocation: string, name: string) => {
  */
 const createInferenceSession = async (modelPath: string) => {
     return await ort.InferenceSession.create(modelPath, {
-        // Restrict the number of threads to 1
+        // Restrict the number of threads to 1.
         intraOpNumThreads: 1,
-        // Be more conservative with RAM usage
+        // Be more conservative with RAM usage.
         enableCpuMemArena: false,
     });
 };

+ 14 - 5
desktop/src/main/services/store.ts

@@ -9,20 +9,29 @@ import { watchStore } from "../stores/watch";
  * This is useful to reset state when the user logs out.
  */
 export const clearStores = () => {
-    uploadStatusStore.clear();
     safeStorageStore.clear();
+    uploadStatusStore.clear();
     watchStore.clear();
 };
 
-export const saveEncryptionKey = async (encryptionKey: string) => {
-    const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey);
+/**
+ * [Note: Safe storage keys]
+ *
+ * On macOS, `safeStorage` stores our data under a Keychain entry named
+ * "<app-name> Safe Storage". Which resolves to:
+ *
+ * - Electron Safe Storage (dev)
+ * - ente Safe Storage    (prod)
+ */
+export const saveEncryptionKey = (encryptionKey: string) => {
+    const encryptedKey = safeStorage.encryptString(encryptionKey);
     const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
     safeStorageStore.set("encryptionKey", b64EncryptedKey);
 };
 
-export const encryptionKey = async (): Promise<string | undefined> => {
+export const encryptionKey = (): string | undefined => {
     const b64EncryptedKey = safeStorageStore.get("encryptionKey");
     if (!b64EncryptedKey) return undefined;
     const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
-    return await safeStorage.decryptString(keyBuffer);
+    return safeStorage.decryptString(keyBuffer);
 };

+ 63 - 63
desktop/src/main/services/upload.ts

@@ -1,10 +1,9 @@
 import StreamZip from "node-stream-zip";
 import fs from "node:fs/promises";
+import path from "node:path";
 import { existsSync } from "original-fs";
-import path from "path";
-import type { ElectronFile, PendingUploads, ZipItem } from "../../types/ipc";
+import type { PendingUploads, ZipItem } from "../../types/ipc";
 import { uploadStatusStore } from "../stores/upload-status";
-import { getZipFileStream } from "./fs";
 
 export const listZipItems = async (zipPath: string): Promise<ZipItem[]> => {
     const zip = new StreamZip.async({ file: zipPath });
@@ -15,13 +14,13 @@ export const listZipItems = async (zipPath: string): Promise<ZipItem[]> => {
     for (const entry of Object.values(entries)) {
         const basename = path.basename(entry.name);
         // Ignore "hidden" files (files whose names begins with a dot).
-        if (entry.isFile && basename.length > 0 && basename[0] != ".") {
+        if (entry.isFile && !basename.startsWith(".")) {
             // `entry.name` is the path within the zip.
             entryNames.push(entry.name);
         }
     }
 
-    zip.close();
+    await zip.close();
 
     return entryNames.map((entryName) => [zipPath, entryName]);
 };
@@ -36,14 +35,18 @@ export const pathOrZipItemSize = async (
         const [zipPath, entryName] = pathOrZipItem;
         const zip = new StreamZip.async({ file: zipPath });
         const entry = await zip.entry(entryName);
+        if (!entry)
+            throw new Error(
+                `An entry with name ${entryName} does not exist in the zip file at ${zipPath}`,
+            );
         const size = entry.size;
-        zip.close();
+        await zip.close();
         return size;
     }
 };
 
 export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
-    const collectionName = uploadStatusStore.get("collectionName");
+    const collectionName = uploadStatusStore.get("collectionName") ?? undefined;
 
     const allFilePaths = uploadStatusStore.get("filePaths") ?? [];
     const filePaths = allFilePaths.filter((f) => existsSync(f));
@@ -59,9 +62,9 @@ export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
     //
     // This potentially can be cause us to try reuploading an already uploaded
     // file, but the dedup logic will kick in at that point so no harm will come
-    // off it.
+    // of it.
     if (allZipItems === undefined) {
-        const allZipPaths = uploadStatusStore.get("filePaths");
+        const allZipPaths = uploadStatusStore.get("filePaths") ?? [];
         const zipPaths = allZipPaths.filter((f) => existsSync(f));
         zipItems = [];
         for (const zip of zipPaths)
@@ -79,19 +82,64 @@ export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
     };
 };
 
-export const setPendingUploads = async (pendingUploads: PendingUploads) =>
-    uploadStatusStore.set(pendingUploads);
+/**
+ * [Note: Missing values in electron-store]
+ *
+ * Suppose we were to create a store like this:
+ *
+ *     const store = new Store({
+ *         schema: {
+ *             foo: { type: "string" },
+ *             bars: { type: "array", items: { type: "string" } },
+ *         },
+ *     });
+ *
+ * If we fetch `store.get("foo")` or `store.get("bars")`, we get `undefined`.
+ * But if we try to set these back to `undefined`, say `store.set("foo",
+ * someUndefValue)`, we get asked to
+ *
+ *     TypeError: Use `delete()` to clear values
+ *
+ * This happens even if we do bulk object updates, e.g. with a JS object that
+ * has undefined keys:
+ *
+ * > TypeError: Setting a value of type `undefined` for key `collectionName` is
+ * > not allowed as it's not supported by JSON
+ *
+ * So what should the TypeScript type for "foo" be?
+ *
+ * If it is were to not include the possibility of `undefined`, then the type
+ * would lie because `store.get("foo")` can indeed be `undefined. But if we were
+ * to include the possibility of `undefined`, then trying to `store.set("foo",
+ * someUndefValue)` will throw.
+ *
+ * The approach we take is to rely on false-y values (empty strings and empty
+ * arrays) to indicate missing values, and then converting those to `undefined`
+ * when reading from the store, and converting `undefined` to the corresponding
+ * false-y value when writing.
+ */
+export const setPendingUploads = ({
+    collectionName,
+    filePaths,
+    zipItems,
+}: PendingUploads) => {
+    uploadStatusStore.set({
+        collectionName: collectionName ?? "",
+        filePaths: filePaths,
+        zipItems: zipItems,
+    });
+};
 
-export const markUploadedFiles = async (paths: string[]) => {
-    const existing = uploadStatusStore.get("filePaths");
+export const markUploadedFiles = (paths: string[]) => {
+    const existing = uploadStatusStore.get("filePaths") ?? [];
     const updated = existing.filter((p) => !paths.includes(p));
     uploadStatusStore.set("filePaths", updated);
 };
 
-export const markUploadedZipItems = async (
+export const markUploadedZipItems = (
     items: [zipPath: string, entryName: string][],
 ) => {
-    const existing = uploadStatusStore.get("zipItems");
+    const existing = uploadStatusStore.get("zipItems") ?? [];
     const updated = existing.filter(
         (z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]),
     );
@@ -99,51 +147,3 @@ export const markUploadedZipItems = async (
 };
 
 export const clearPendingUploads = () => uploadStatusStore.clear();
-
-export const getElectronFilesFromGoogleZip = async (filePath: string) => {
-    const zip = new StreamZip.async({
-        file: filePath,
-    });
-    const zipName = path.basename(filePath, ".zip");
-
-    const entries = await zip.entries();
-    const files: ElectronFile[] = [];
-
-    for (const entry of Object.values(entries)) {
-        const basename = path.basename(entry.name);
-        if (entry.isFile && basename.length > 0 && basename[0] !== ".") {
-            files.push(await getZipEntryAsElectronFile(zipName, zip, entry));
-        }
-    }
-
-    zip.close();
-
-    return files;
-};
-
-export async function getZipEntryAsElectronFile(
-    zipName: string,
-    zip: StreamZip.StreamZipAsync,
-    entry: StreamZip.ZipEntry,
-): Promise<ElectronFile> {
-    return {
-        path: path
-            .join(zipName, entry.name)
-            .split(path.sep)
-            .join(path.posix.sep),
-        name: path.basename(entry.name),
-        size: entry.size,
-        lastModified: entry.time,
-        stream: async () => {
-            return await getZipFileStream(zip, entry.name);
-        },
-        blob: async () => {
-            const buffer = await zip.entryData(entry.name);
-            return new Blob([new Uint8Array(buffer)]);
-        },
-        arrayBuffer: async () => {
-            const buffer = await zip.entryData(entry.name);
-            return new Uint8Array(buffer);
-        },
-    };
-}

+ 19 - 22
desktop/src/main/services/watch.ts

@@ -3,9 +3,10 @@ import { BrowserWindow } from "electron/main";
 import fs from "node:fs/promises";
 import path from "node:path";
 import { FolderWatch, type CollectionMapping } from "../../types/ipc";
-import { fsIsDir } from "../fs";
 import log from "../log";
 import { watchStore } from "../stores/watch";
+import { posixPath } from "../utils/electron";
+import { fsIsDir } from "./fs";
 
 /**
  * Create and return a new file system watcher.
@@ -34,8 +35,8 @@ export const createWatcher = (mainWindow: BrowserWindow) => {
     return watcher;
 };
 
-const eventData = (path: string): [string, FolderWatch] => {
-    path = posixPath(path);
+const eventData = (platformPath: string): [string, FolderWatch] => {
+    const path = posixPath(platformPath);
 
     const watch = folderWatches().find((watch) =>
         path.startsWith(watch.folderPath + "/"),
@@ -46,23 +47,15 @@ const eventData = (path: string): [string, FolderWatch] => {
     return [path, watch];
 };
 
-/**
- * Convert a file system {@link filePath} that uses the local system specific
- * path separators into a path that uses POSIX file separators.
- */
-const posixPath = (filePath: string) =>
-    filePath.split(path.sep).join(path.posix.sep);
-
-export const watchGet = (watcher: FSWatcher) => {
-    const [valid, deleted] = folderWatches().reduce(
-        ([valid, deleted], watch) => {
-            (fsIsDir(watch.folderPath) ? valid : deleted).push(watch);
-            return [valid, deleted];
-        },
-        [[], []],
-    );
-    if (deleted.length) {
-        for (const watch of deleted) watchRemove(watcher, watch.folderPath);
+export const watchGet = async (watcher: FSWatcher): Promise<FolderWatch[]> => {
+    const valid: FolderWatch[] = [];
+    const deletedPaths: string[] = [];
+    for (const watch of folderWatches()) {
+        if (await fsIsDir(watch.folderPath)) valid.push(watch);
+        else deletedPaths.push(watch.folderPath);
+    }
+    if (deletedPaths.length) {
+        await Promise.all(deletedPaths.map((p) => watchRemove(watcher, p)));
         setFolderWatches(valid);
     }
     return valid;
@@ -80,7 +73,7 @@ export const watchAdd = async (
 ) => {
     const watches = folderWatches();
 
-    if (!fsIsDir(folderPath))
+    if (!(await fsIsDir(folderPath)))
         throw new Error(
             `Attempting to add a folder watch for a folder path ${folderPath} that is not an existing directory`,
         );
@@ -104,7 +97,7 @@ export const watchAdd = async (
     return watches;
 };
 
-export const watchRemove = async (watcher: FSWatcher, folderPath: string) => {
+export const watchRemove = (watcher: FSWatcher, folderPath: string) => {
     const watches = folderWatches();
     const filtered = watches.filter((watch) => watch.folderPath != folderPath);
     if (watches.length == filtered.length)
@@ -157,3 +150,7 @@ export const watchFindFiles = async (dirPath: string) => {
     }
     return paths;
 };
+
+export const watchReset = (watcher: FSWatcher) => {
+    watcher.unwatch(folderWatches().map((watch) => watch.folderPath));
+};

+ 1 - 1
desktop/src/main/stores/safe-storage.ts

@@ -1,7 +1,7 @@
 import Store, { Schema } from "electron-store";
 
 interface SafeStorageStore {
-    encryptionKey: string;
+    encryptionKey?: string;
 }
 
 const safeStorageSchema: Schema<SafeStorageStore> = {

+ 0 - 5
desktop/src/main/stores/upload-status.ts

@@ -9,15 +9,10 @@ export interface UploadStatusStore {
     collectionName?: string;
     /**
      * Paths to regular files that are pending upload.
-     *
-     * This should generally be present, albeit empty, but it is marked optional
-     * in sympathy with its siblings.
      */
     filePaths?: string[];
     /**
      * Each item is the path to a zip file and the name of an entry within it.
-     *
-     * This is marked optional since legacy stores will not have it.
      */
     zipItems?: [zipPath: string, entryName: string][];
     /**

+ 1 - 1
desktop/src/main/stores/user-preferences.ts

@@ -1,7 +1,7 @@
 import Store, { Schema } from "electron-store";
 
 interface UserPreferences {
-    hideDockIcon: boolean;
+    hideDockIcon?: boolean;
     skipAppVersion?: string;
     muteUpdateNotificationVersion?: string;
 }

+ 9 - 5
desktop/src/main/stores/watch.ts

@@ -3,7 +3,7 @@ import { type FolderWatch } from "../../types/ipc";
 import log from "../log";
 
 interface WatchStore {
-    mappings: FolderWatchWithLegacyFields[];
+    mappings?: FolderWatchWithLegacyFields[];
 }
 
 type FolderWatchWithLegacyFields = FolderWatch & {
@@ -54,8 +54,12 @@ export const watchStore = new Store({
  */
 export const migrateLegacyWatchStoreIfNeeded = () => {
     let needsUpdate = false;
-    const watches = watchStore.get("mappings")?.map((watch) => {
+    const updatedWatches = [];
+    for (const watch of watchStore.get("mappings") ?? []) {
         let collectionMapping = watch.collectionMapping;
+        // The required type defines the latest schema, but before migration
+        // this'll be undefined, so tell ESLint to calm down.
+        // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
         if (!collectionMapping) {
             collectionMapping = watch.uploadStrategy == 1 ? "parent" : "root";
             needsUpdate = true;
@@ -64,10 +68,10 @@ export const migrateLegacyWatchStoreIfNeeded = () => {
             delete watch.rootFolderName;
             needsUpdate = true;
         }
-        return { ...watch, collectionMapping };
-    });
+        updatedWatches.push({ ...watch, collectionMapping });
+    }
     if (needsUpdate) {
-        watchStore.set("mappings", watches);
+        watchStore.set("mappings", updatedWatches);
         log.info("Migrated legacy watch store data to new schema");
     }
 };

+ 40 - 58
desktop/src/main/stream.ts

@@ -6,8 +6,10 @@ import StreamZip from "node-stream-zip";
 import { createWriteStream, existsSync } from "node:fs";
 import fs from "node:fs/promises";
 import { Readable } from "node:stream";
+import { ReadableStream } from "node:stream/web";
 import { pathToFileURL } from "node:url";
 import log from "./log";
+import { ensure } from "./utils/common";
 
 /**
  * Register a protocol handler that we use for streaming large files between the
@@ -35,25 +37,18 @@ export const registerStreamProtocol = () => {
     protocol.handle("stream", async (request: Request) => {
         const url = request.url;
         // The request URL contains the command to run as the host, and the
-        // pathname of the file as the path. An additional path can be specified
-        // as the URL hash.
-        //
-        // For example,
-        //
-        //     stream://write/path/to/file#/path/to/another/file
-        //              host[pathname----] [pathname-2---------]
-        //
-        const { host, pathname, hash } = new URL(url);
-        // Convert e.g. "%20" to spaces.
-        const path = decodeURIComponent(pathname);
-        const hashPath = decodeURIComponent(hash);
+        // pathname of the file(s) as the search params.
+        const { host, searchParams } = new URL(url);
         switch (host) {
             case "read":
-                return handleRead(path);
+                return handleRead(ensure(searchParams.get("path")));
             case "read-zip":
-                return handleReadZip(path, hashPath);
+                return handleReadZip(
+                    ensure(searchParams.get("zipPath")),
+                    ensure(searchParams.get("entryName")),
+                );
             case "write":
-                return handleWrite(path, request);
+                return handleWrite(ensure(searchParams.get("path")), request);
             default:
                 return new Response("", { status: 404 });
         }
@@ -89,7 +84,7 @@ const handleRead = async (path: string) => {
         return res;
     } catch (e) {
         log.error(`Failed to read stream at ${path}`, e);
-        return new Response(`Failed to read stream: ${e.message}`, {
+        return new Response(`Failed to read stream: ${String(e)}`, {
             status: 500,
         });
     }
@@ -99,10 +94,24 @@ const handleReadZip = async (zipPath: string, entryName: string) => {
     try {
         const zip = new StreamZip.async({ file: zipPath });
         const entry = await zip.entry(entryName);
-        const stream = await zip.stream(entry);
-        // TODO(MR): when to call zip.close()
+        if (!entry) return new Response("", { status: 404 });
 
-        return new Response(Readable.toWeb(new Readable(stream)), {
+        // This returns an "old style" NodeJS.ReadableStream.
+        const stream = await zip.stream(entry);
+        // Convert it into a new style NodeJS.Readable.
+        const nodeReadable = new Readable().wrap(stream);
+        // Then convert it into a Web stream.
+        const webReadableStreamAny = Readable.toWeb(nodeReadable);
+        // However, we get a ReadableStream<any> now. This doesn't go into the
+        // `BodyInit` expected by the Response constructor, which wants a
+        // ReadableStream<Uint8Array>. Force a cast.
+        const webReadableStream =
+            webReadableStreamAny as ReadableStream<Uint8Array>;
+
+        // Close the zip handle when the underlying stream closes.
+        stream.on("end", () => void zip.close());
+
+        return new Response(webReadableStream, {
             headers: {
                 // We don't know the exact type, but it doesn't really matter,
                 // just set it to a generic binary content-type so that the
@@ -122,7 +131,7 @@ const handleReadZip = async (zipPath: string, entryName: string) => {
             `Failed to read entry ${entryName} from zip file at ${zipPath}`,
             e,
         );
-        return new Response(`Failed to read stream: ${e.message}`, {
+        return new Response(`Failed to read stream: ${String(e)}`, {
             status: 500,
         });
     }
@@ -130,11 +139,11 @@ const handleReadZip = async (zipPath: string, entryName: string) => {
 
 const handleWrite = async (path: string, request: Request) => {
     try {
-        await writeStream(path, request.body);
+        await writeStream(path, ensure(request.body));
         return new Response("", { status: 200 });
     } catch (e) {
         log.error(`Failed to write stream to ${path}`, e);
-        return new Response(`Failed to write stream: ${e.message}`, {
+        return new Response(`Failed to write stream: ${String(e)}`, {
             status: 500,
         });
     }
@@ -146,56 +155,29 @@ const handleWrite = async (path: string, request: Request) => {
  * The returned promise resolves when the write completes.
  *
  * @param filePath The local filesystem path where the file should be written.
- * @param readableStream A [web
- * ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
- */
-export const writeStream = (filePath: string, readableStream: ReadableStream) =>
-    writeNodeStream(filePath, convertWebReadableStreamToNode(readableStream));
-
-/**
- * Convert a Web ReadableStream into a Node.js ReadableStream
  *
- * This can be used to, for example, write a ReadableStream obtained via
- * `net.fetch` into a file using the Node.js `fs` APIs
+ * @param readableStream A web
+ * [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
  */
-const convertWebReadableStreamToNode = (readableStream: ReadableStream) => {
-    const reader = readableStream.getReader();
-    const rs = new Readable();
-
-    rs._read = async () => {
-        try {
-            const result = await reader.read();
-
-            if (!result.done) {
-                rs.push(Buffer.from(result.value));
-            } else {
-                rs.push(null);
-                return;
-            }
-        } catch (e) {
-            rs.emit("error", e);
-        }
-    };
-
-    return rs;
-};
+export const writeStream = (filePath: string, readableStream: ReadableStream) =>
+    writeNodeStream(filePath, Readable.fromWeb(readableStream));
 
 const writeNodeStream = async (filePath: string, fileStream: Readable) => {
     const writeable = createWriteStream(filePath);
 
-    fileStream.on("error", (error) => {
-        writeable.destroy(error); // Close the writable stream with an error
+    fileStream.on("error", (err) => {
+        writeable.destroy(err); // Close the writable stream with an error
     });
 
     fileStream.pipe(writeable);
 
     await new Promise((resolve, reject) => {
         writeable.on("finish", resolve);
-        writeable.on("error", async (e: unknown) => {
+        writeable.on("error", (err) => {
             if (existsSync(filePath)) {
-                await fs.unlink(filePath);
+                void fs.unlink(filePath);
             }
-            reject(e);
+            reject(err);
         });
     });
 };

+ 12 - 3
desktop/src/main/utils.ts → desktop/src/main/utils/common.ts

@@ -1,10 +1,19 @@
 /**
- * @file grab bag of utitity functions.
+ * @file grab bag of utility functions.
  *
- * Many of these are verbatim copies of functions from web code since there
- * isn't currently a common package that both of them share.
+ * These are verbatim copies of functions from web code since there isn't
+ * currently a common package that both of them share.
  */
 
+/**
+ * Throw an exception if the given value is `null` or `undefined`.
+ */
+export const ensure = <T>(v: T | null | undefined): T => {
+    if (v === null) throw new Error("Required value was null");
+    if (v === undefined) throw new Error("Required value was not found");
+    return v;
+};
+
 /**
  * Wait for {@link ms} milliseconds
  *

+ 23 - 38
desktop/src/main/utils-electron.ts → desktop/src/main/utils/electron.ts

@@ -1,14 +1,35 @@
 import shellescape from "any-shell-escape";
-import { shell } from "electron"; /* TODO(MR): Why is this not in /main? */
 import { app } from "electron/main";
 import { exec } from "node:child_process";
 import path from "node:path";
 import { promisify } from "node:util";
-import log from "./log";
+import log from "../log";
 
 /** `true` if the app is running in development mode. */
 export const isDev = !app.isPackaged;
 
+/**
+ * Convert a file system {@link platformPath} that uses the local system
+ * specific path separators into a path that uses POSIX file separators.
+ *
+ * For all paths that we persist or pass over the IPC boundary, we always use
+ * POSIX paths, even on Windows.
+ *
+ * Windows recognizes both forward and backslashes. This also works with drive
+ * names. c:\foo\bar and c:/foo/bar are both valid.
+ *
+ * > Almost all paths passed to Windows APIs are normalized. During
+ * > normalization, Windows performs the following steps: ... All forward
+ * > slashes (/) are converted into the standard Windows separator, the back
+ * > slash (\).
+ * >
+ * > https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats
+ */
+export const posixPath = (platformPath: string) =>
+    path.sep == path.posix.sep
+        ? platformPath
+        : platformPath.split(path.sep).join(path.posix.sep);
+
 /**
  * Run a shell command asynchronously.
  *
@@ -41,39 +62,3 @@ export const execAsync = (command: string | string[]) => {
 };
 
 const execAsync_ = promisify(exec);
-
-/**
- * Open the given {@link dirPath} in the system's folder viewer.
- *
- * For example, on macOS this'll open {@link dirPath} in Finder.
- */
-export const openDirectory = async (dirPath: string) => {
-    const res = await shell.openPath(path.normalize(dirPath));
-    // shell.openPath resolves with a string containing the error message
-    // corresponding to the failure if a failure occurred, otherwise "".
-    if (res) throw new Error(`Failed to open directory ${dirPath}: res`);
-};
-
-/**
- * Open the app's log directory in the system's folder viewer.
- *
- * @see {@link openDirectory}
- */
-export const openLogDirectory = () => openDirectory(logDirectoryPath());
-
-/**
- * Return the path where the logs for the app are saved.
- *
- * [Note: Electron app paths]
- *
- * By default, these paths are at the following locations:
- *
- * - macOS: `~/Library/Application Support/ente`
- * - Linux: `~/.config/ente`
- * - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local\ente`
- * - Windows: C:\Users\<you>\AppData\Local\<Your App Name>
- *
- * https://www.electronjs.org/docs/latest/api/app
- *
- */
-const logDirectoryPath = () => app.getPath("logs");

Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden.