Merge branch 'main' into f-droid
This commit is contained in:
commit
0e33299863
391 changed files with 13154 additions and 11374 deletions
2
.github/workflows/auth-crowdin.yml
vendored
2
.github/workflows/auth-crowdin.yml
vendored
|
@ -30,7 +30,7 @@ jobs:
|
|||
upload_sources: true
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
localization_branch_name: crowdin-translations-auth
|
||||
localization_branch_name: translations/auth
|
||||
create_pull_request: true
|
||||
skip_untranslated_strings: true
|
||||
pull_request_title: "[auth] New translations"
|
||||
|
|
6
.github/workflows/auth-release.yml
vendored
6
.github/workflows/auth-release.yml
vendored
|
@ -17,8 +17,8 @@ name: "Release (auth)"
|
|||
# We use a suffix like `-test` to indicate that these are test tags, and that
|
||||
# they belong to a pre-release.
|
||||
#
|
||||
# If you need to do multiple tests, add a +x at the end of the tag. e.g.
|
||||
# `auth-v1.2.3-test+1`.
|
||||
# If you need to do multiple tests, add a .x at the end of the tag. e.g.
|
||||
# `auth-v1.2.3-test.1`.
|
||||
#
|
||||
# Once the testing is done, also delete the tag(s) please.
|
||||
|
||||
|
@ -85,7 +85,7 @@ jobs:
|
|||
- name: Install dependencies for desktop build
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5
|
||||
sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5
|
||||
sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu'
|
||||
|
||||
- name: Install appimagetool
|
||||
|
|
30
.github/workflows/desktop-lint.yml
vendored
Normal file
30
.github/workflows/desktop-lint.yml
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
name: "Lint (desktop)"
|
||||
|
||||
on:
|
||||
# Run on every push to a branch other than main that changes desktop/
|
||||
push:
|
||||
branches-ignore: [main, "deploy/**"]
|
||||
paths:
|
||||
- "desktop/**"
|
||||
- ".github/workflows/desktop-lint.yml"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: desktop
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup node and enable yarn caching
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: "yarn"
|
||||
cache-dependency-path: "desktop/yarn.lock"
|
||||
|
||||
- run: yarn install
|
||||
|
||||
- run: yarn lint
|
2
.github/workflows/mobile-crowdin.yml
vendored
2
.github/workflows/mobile-crowdin.yml
vendored
|
@ -30,7 +30,7 @@ jobs:
|
|||
upload_sources: true
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
localization_branch_name: crowdin-translations-mobile
|
||||
localization_branch_name: translations/mobile
|
||||
create_pull_request: true
|
||||
skip_untranslated_strings: true
|
||||
pull_request_title: "[mobile] New translations"
|
||||
|
|
|
@ -54,3 +54,4 @@ jobs:
|
|||
packageName: io.ente.photos
|
||||
releaseFiles: mobile/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab
|
||||
track: internal
|
||||
changesNotSentForReview: true
|
||||
|
|
34
.github/workflows/web-crowdin-push.yml
vendored
Normal file
34
.github/workflows/web-crowdin-push.yml
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
name: "Push Crowdin translations (web)"
|
||||
|
||||
# This is a variant of web-crowdin.yml that uploads the translated strings in
|
||||
# addition to the source strings.
|
||||
#
|
||||
# This allows us to change the strings in our source code for an automated
|
||||
# refactoring (e.g. renaming a key), and then run this workflow to update the
|
||||
# data in Crowdin taking our source code as the source of truth.
|
||||
|
||||
on:
|
||||
# Trigger manually, or using
|
||||
# `gh workflow run web-crowdin-push.yml --ref <my-branch>`
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
push-to-crowdin:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Crowdin push
|
||||
uses: crowdin/github-action@v1
|
||||
with:
|
||||
base_path: "web/"
|
||||
config: "web/crowdin.yml"
|
||||
upload_sources: true
|
||||
upload_translations: true
|
||||
download_translations: false
|
||||
project_id: 569613
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
2
.github/workflows/web-crowdin.yml
vendored
2
.github/workflows/web-crowdin.yml
vendored
|
@ -36,7 +36,7 @@ jobs:
|
|||
upload_sources: true
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
localization_branch_name: crowdin-translations-web
|
||||
localization_branch_name: translations/web
|
||||
create_pull_request: true
|
||||
skip_untranslated_strings: true
|
||||
pull_request_title: "[web] New translations"
|
||||
|
|
2
.github/workflows/web-deploy-accounts.yml
vendored
2
.github/workflows/web-deploy-accounts.yml
vendored
|
@ -3,7 +3,7 @@ name: "Deploy (accounts)"
|
|||
on:
|
||||
push:
|
||||
# Run workflow on pushes to the deploy/accounts
|
||||
branches: [deploy/accounts]
|
||||
branches: [deploy/accounts, deploy-f/accounts]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
|
|
2
.github/workflows/web-deploy-cast.yml
vendored
2
.github/workflows/web-deploy-cast.yml
vendored
|
@ -3,7 +3,7 @@ name: "Deploy (cast)"
|
|||
on:
|
||||
push:
|
||||
# Run workflow on pushes to the deploy/cast
|
||||
branches: [deploy/cast]
|
||||
branches: [deploy/cast, deploy-f/cast]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 8e7701d6a40462733043f54b3849faf35af70a83
|
||||
Subproject commit 8a3731352af133a02223a6c7b1f37c4abb096af0
|
|
@ -87,7 +87,7 @@ PODS:
|
|||
- SDWebImage/Core (5.19.0)
|
||||
- Sentry/HybridSDK (8.21.0):
|
||||
- SentryPrivate (= 8.21.0)
|
||||
- sentry_flutter (0.0.1):
|
||||
- sentry_flutter (7.19.0):
|
||||
- Flutter
|
||||
- FlutterMacOS
|
||||
- Sentry/HybridSDK (= 8.21.0)
|
||||
|
@ -249,7 +249,7 @@ SPEC CHECKSUMS:
|
|||
ReachabilitySwift: 5ae15e16814b5f9ef568963fb2c87aeb49158c66
|
||||
SDWebImage: 981fd7e860af070920f249fd092420006014c3eb
|
||||
Sentry: ebc12276bd17613a114ab359074096b6b3725203
|
||||
sentry_flutter: dff1df05dc39c83d04f9330b36360fc374574c5e
|
||||
sentry_flutter: 88ebea3f595b0bc16acc5bedacafe6d60c12dcd5
|
||||
SentryPrivate: d651efb234cf385ec9a1cdd3eff94b5e78a0e0fe
|
||||
share_plus: c3fef564749587fc939ef86ffb283ceac0baf9f5
|
||||
shared_preferences_foundation: b4c3b4cddf1c21f02770737f147a3f5da9d39695
|
||||
|
@ -263,4 +263,4 @@ SPEC CHECKSUMS:
|
|||
|
||||
PODFILE CHECKSUM: b4e3a7eabb03395b66e81fc061789f61526ee6bb
|
||||
|
||||
COCOAPODS: 1.14.3
|
||||
COCOAPODS: 1.15.2
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "المصدِّر",
|
||||
"codeSecretKeyHint": "الرمز السري",
|
||||
"codeAccountHint": "الحساب (you@domain.com)",
|
||||
"accountKeyType": "نوع المفتاح",
|
||||
"sessionExpired": "انتهت صلاحية الجلسة",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Aussteller",
|
||||
"codeSecretKeyHint": "Geheimer Schlüssel",
|
||||
"codeAccountHint": "Konto (you@domain.com)",
|
||||
"accountKeyType": "Art des Schlüssels",
|
||||
"sessionExpired": "Sitzung abgelaufen",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
@ -78,12 +77,14 @@
|
|||
"data": "Datei",
|
||||
"importCodes": "Codes importieren",
|
||||
"importTypePlainText": "Klartext",
|
||||
"importTypeEnteEncrypted": "Verschlüsselter Ente-Export",
|
||||
"passwordForDecryptingExport": "Passwort um den Export zu entschlüsseln",
|
||||
"passwordEmptyError": "Passwort kann nicht leer sein",
|
||||
"importFromApp": "Importiere Codes von {appName}",
|
||||
"importGoogleAuthGuide": "Exportiere deine Accounts von Google Authenticator zu einem QR-Code, durch die \"Konten übertragen\" Option. Scanne den QR-Code danach mit einem anderen Gerät.\n\nTipp: Du kannst die Kamera eines Laptops verwenden, um ein Foto den dem QR-Code zu erstellen.",
|
||||
"importSelectJsonFile": "Wähle eine JSON-Datei",
|
||||
"importSelectAppExport": "{appName} Exportdatei auswählen",
|
||||
"importEnteEncGuide": "Wähle die von Ente exportierte, verschlüsselte JSON-Datei",
|
||||
"importRaivoGuide": "Verwenden Sie die Option \"Export OTPs to Zip archive\" in den Raivo-Einstellungen.\n\nEntpacken Sie die Zip-Datei und importieren Sie die JSON-Datei.",
|
||||
"importBitwardenGuide": "Verwenden Sie die Option \"Tresor exportieren\" innerhalb der Bitwarden Tools und importieren Sie die unverschlüsselte JSON-Datei.",
|
||||
"importAegisGuide": "Verwenden Sie die Option \"Tresor exportieren\" in den Aegis-Einstellungen.\n\nFalls Ihr Tresor verschlüsselt ist, müssen Sie das Passwort für den Tresor eingeben, um ihn zu entschlüsseln.",
|
||||
|
@ -121,12 +122,14 @@
|
|||
"suggestFeatures": "Features vorschlagen",
|
||||
"faq": "FAQ",
|
||||
"faq_q_1": "Wie sicher ist Auth?",
|
||||
"faq_a_1": "Alle Codes, die du über Auth sicherst, werden Ende-zu-Ende-verschlüsselt gespeichert. Das bedeutet, dass nur du auf deine Codes zugreifen kannst. Unsere Anwendungen sind quelloffen und unsere Kryptografie wurde extern geprüft.",
|
||||
"faq_q_2": "Kann ich auf meine Codes auf dem Desktop zugreifen?",
|
||||
"faq_a_2": "Sie können auf Ihre Codes im Web via auth.ente.io zugreifen.",
|
||||
"faq_q_3": "Wie kann ich Codes löschen?",
|
||||
"faq_a_3": "Sie können einen Code löschen, indem Sie auf dem Code nach links wischen.",
|
||||
"faq_q_4": "Wie kann ich das Projekt unterstützen?",
|
||||
"faq_a_4": "Sie können die Entwicklung dieses Projekts unterstützen, indem Sie unsere Fotos-App auf ente.io abonnieren.",
|
||||
"faq_q_5": "Wie kann ich die FaceID-Sperre in Auth aktivieren",
|
||||
"faq_a_5": "Sie können FaceID unter Einstellungen → Sicherheit → Sperrbildschirm aktivieren.",
|
||||
"somethingWentWrongMessage": "Ein Fehler ist aufgetreten, bitte versuchen Sie es erneut",
|
||||
"leaveFamily": "Familie verlassen",
|
||||
|
@ -196,6 +199,9 @@
|
|||
"doThisLater": "Auf später verschieben",
|
||||
"saveKey": "Schlüssel speichern",
|
||||
"save": "Speichern",
|
||||
"send": "Senden",
|
||||
"saveOrSendDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) oder an andere Apps senden?",
|
||||
"saveOnlyDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) speichern?",
|
||||
"back": "Zurück",
|
||||
"createAccount": "Account erstellen",
|
||||
"passwordStrength": "Passwortstärke: {passwordStrengthValue}",
|
||||
|
@ -343,6 +349,7 @@
|
|||
"deleteCodeAuthMessage": "Authentifizieren, um Code zu löschen",
|
||||
"showQRAuthMessage": "Authentifizieren, um QR-Code anzuzeigen",
|
||||
"confirmAccountDeleteTitle": "Kontolöschung bestätigen",
|
||||
"confirmAccountDeleteMessage": "Dieses Konto ist mit anderen Ente-Apps verknüpft, falls du welche verwendest.\n\nDeine hochgeladenen Daten werden in allen Ente-Apps zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht.",
|
||||
"androidBiometricHint": "Identität bestätigen",
|
||||
"@androidBiometricHint": {
|
||||
"description": "Hint message advising the user how to authenticate with biometrics. It is used on Android side. Maximum 60 characters."
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Issuer",
|
||||
"codeSecretKeyHint": "Secret Key",
|
||||
"codeAccountHint": "Account (you@domain.com)",
|
||||
"accountKeyType": "Type of key",
|
||||
"sessionExpired": "Session expired",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Emisor",
|
||||
"codeSecretKeyHint": "Llave Secreta",
|
||||
"codeAccountHint": "Cuenta (tu@dominio.com)",
|
||||
"accountKeyType": "Tipo de llave",
|
||||
"sessionExpired": "La sesión ha expirado",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
@ -113,6 +112,7 @@
|
|||
"copied": "Copiado",
|
||||
"pleaseTryAgain": "Por favor, inténtalo nuevamente",
|
||||
"existingUser": "Usuario existente",
|
||||
"newUser": "Nuevo a Ente",
|
||||
"delete": "Borrar",
|
||||
"enterYourPasswordHint": "Ingrese su contraseña",
|
||||
"forgotPassword": "Olvidé mi contraseña",
|
||||
|
@ -138,6 +138,8 @@
|
|||
"enterCodeHint": "Ingrese el código de seis dígitos de su aplicación de autenticación",
|
||||
"lostDeviceTitle": "¿Perdió su dispositivo?",
|
||||
"twoFactorAuthTitle": "Autenticación de dos factores",
|
||||
"passkeyAuthTitle": "Verificación de llave de acceso",
|
||||
"verifyPasskey": "Verificar llave de acceso",
|
||||
"recoverAccount": "Recuperar cuenta",
|
||||
"enterRecoveryKeyHint": "Introduzca su clave de recuperación",
|
||||
"recover": "Recuperar",
|
||||
|
@ -191,6 +193,8 @@
|
|||
"recoveryKeySaveDescription": "Nosotros no almacenamos esta clave, por favor guarde dicha clave de 24 palabras en un lugar seguro.",
|
||||
"doThisLater": "Hacer esto más tarde",
|
||||
"saveKey": "Guardar Clave",
|
||||
"save": "Guardar",
|
||||
"send": "Enviar",
|
||||
"back": "Atrás",
|
||||
"createAccount": "Crear cuenta",
|
||||
"passwordStrength": "Fortaleza de la contraseña: {passwordStrengthValue}",
|
||||
|
@ -397,5 +401,8 @@
|
|||
"signOutOtherDevices": "Cerrar la sesión de otros dispositivos",
|
||||
"doNotSignOut": "No cerrar la sesión",
|
||||
"hearUsWhereTitle": "¿Cómo conoció Ente? (opcional)",
|
||||
"hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!"
|
||||
"hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!",
|
||||
"passkey": "Llave de acceso",
|
||||
"developerSettingsWarning": "¿Estás seguro de que quieres modificar los ajustes de desarrollador?",
|
||||
"developerSettings": "Ajustes de desarrollador"
|
||||
}
|
|
@ -14,7 +14,6 @@
|
|||
"codeIssuerHint": "صادر کننده",
|
||||
"codeSecretKeyHint": "کلید مخفی",
|
||||
"codeAccountHint": "حساب (you@domain.com)",
|
||||
"accountKeyType": "نوع کلید",
|
||||
"sessionExpired": "نشست منقضی شده است",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
"codeIssuerHint": "Myöntäjä",
|
||||
"codeSecretKeyHint": "Salainen avain",
|
||||
"codeAccountHint": "Tili (sinun@jokinosoite.com)",
|
||||
"accountKeyType": "Avaimen tyyppi",
|
||||
"sessionExpired": "Istunto on vanheutunut",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Émetteur",
|
||||
"codeSecretKeyHint": "Clé secrète",
|
||||
"codeAccountHint": "Compte (vous@exemple.com)",
|
||||
"accountKeyType": "Type de clé",
|
||||
"sessionExpired": "Session expirée",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
"codeIssuerHint": "מנפיק",
|
||||
"codeSecretKeyHint": "מפתח סודי",
|
||||
"codeAccountHint": "חשבון(you@domain.com)",
|
||||
"accountKeyType": "סוג מפתח",
|
||||
"sessionExpired": "זמן החיבור הסתיים",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Emittente",
|
||||
"codeSecretKeyHint": "Codice segreto",
|
||||
"codeAccountHint": "Account (username@dominio.it)",
|
||||
"accountKeyType": "Tipo di chiave",
|
||||
"sessionExpired": "Sessione scaduta",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "発行者",
|
||||
"codeSecretKeyHint": "秘密鍵",
|
||||
"codeAccountHint": "アカウント (you@domain.com)",
|
||||
"accountKeyType": "鍵の種類",
|
||||
"sessionExpired": "セッションが失効しました",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "მომწოდებელი",
|
||||
"codeSecretKeyHint": "გასაღები",
|
||||
"codeAccountHint": "ანგარიში (you@domain.com)",
|
||||
"accountKeyType": "გასაღების ტიპი",
|
||||
"sessionExpired": "სესიის დრო ამოიწურა",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Uitgever",
|
||||
"codeSecretKeyHint": "Geheime sleutel",
|
||||
"codeAccountHint": "Account (jij@domein.nl)",
|
||||
"accountKeyType": "Type sleutel",
|
||||
"sessionExpired": "Sessie verlopen",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Wydawca",
|
||||
"codeSecretKeyHint": "Tajny klucz",
|
||||
"codeAccountHint": "Konto (ty@domena.com)",
|
||||
"accountKeyType": "Rodzaj klucza",
|
||||
"sessionExpired": "Sesja wygasła",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Emissor",
|
||||
"codeSecretKeyHint": "Chave secreta",
|
||||
"codeAccountHint": "Conta (voce@dominio.com)",
|
||||
"accountKeyType": "Tipo de chave",
|
||||
"sessionExpired": "Sessão expirada",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Эмитент",
|
||||
"codeSecretKeyHint": "Секретный ключ",
|
||||
"codeAccountHint": "Аккаунт (you@domain.com)",
|
||||
"accountKeyType": "Тип ключа",
|
||||
"sessionExpired": "Сеанс истек",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
"codeIssuerHint": "Utfärdare",
|
||||
"codeSecretKeyHint": "Secret Key",
|
||||
"codeAccountHint": "Konto (du@domän.com)",
|
||||
"accountKeyType": "Typ av nyckel",
|
||||
"sessionExpired": "Sessionen har gått ut",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "ኣዋጂ",
|
||||
"codeSecretKeyHint": "ምስጢራዊ መፍትሕ",
|
||||
"codeAccountHint": "ሕሳብ (you@domain.com)",
|
||||
"accountKeyType": "ዓይነት መፍትሕ",
|
||||
"sessionExpired": "ክፍለ ግዜኡ ኣኺሉ።",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Yayınlayan",
|
||||
"codeSecretKeyHint": "Gizli Anahtar",
|
||||
"codeAccountHint": "Hesap (ornek@domain.com)",
|
||||
"accountKeyType": "Anahtar türü",
|
||||
"sessionExpired": "Oturum süresi doldu",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "Nhà phát hành",
|
||||
"codeSecretKeyHint": "Khóa bí mật",
|
||||
"codeAccountHint": "Tài khoản (bạn@miền.com)",
|
||||
"accountKeyType": "Loại khóa",
|
||||
"sessionExpired": "Phiên làm việc đã hết hạn",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
"codeIssuerHint": "发行人",
|
||||
"codeSecretKeyHint": "私钥",
|
||||
"codeAccountHint": "账户 (you@domain.com)",
|
||||
"accountKeyType": "密钥类型",
|
||||
"sessionExpired": "会话已过期",
|
||||
"@sessionExpired": {
|
||||
"description": "Title of the dialog when the users current session is invalid/expired"
|
||||
|
|
|
@ -37,6 +37,7 @@ import 'package:window_manager/window_manager.dart';
|
|||
final _logger = Logger("main");
|
||||
|
||||
Future<void> initSystemTray() async {
|
||||
if (PlatformUtil.isMobile()) return;
|
||||
String path = Platform.isWindows
|
||||
? 'assets/icons/auth-icon.ico'
|
||||
: 'assets/icons/auth-icon.png';
|
||||
|
|
|
@ -2,6 +2,7 @@ import 'package:ente_auth/utils/totp_util.dart';
|
|||
|
||||
class Code {
|
||||
static const defaultDigits = 6;
|
||||
static const steamDigits = 5;
|
||||
static const defaultPeriod = 30;
|
||||
|
||||
int? generatedID;
|
||||
|
@ -57,36 +58,42 @@ class Code {
|
|||
updatedAlgo,
|
||||
updatedType,
|
||||
updatedCounter,
|
||||
"otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}&digits=$updatedDigits&issuer=$updateIssuer&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}",
|
||||
"otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}"
|
||||
"&digits=$updatedDigits&issuer=$updateIssuer"
|
||||
"&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}",
|
||||
generatedID: generatedID,
|
||||
);
|
||||
}
|
||||
|
||||
static Code fromAccountAndSecret(
|
||||
Type type,
|
||||
String account,
|
||||
String issuer,
|
||||
String secret,
|
||||
int digits,
|
||||
) {
|
||||
return Code(
|
||||
account,
|
||||
issuer,
|
||||
defaultDigits,
|
||||
digits,
|
||||
defaultPeriod,
|
||||
secret,
|
||||
Algorithm.sha1,
|
||||
Type.totp,
|
||||
type,
|
||||
0,
|
||||
"otpauth://totp/$issuer:$account?algorithm=SHA1&digits=6&issuer=$issuer&period=30&secret=$secret",
|
||||
"otpauth://${type.name}/$issuer:$account?algorithm=SHA1&digits=$digits&issuer=$issuer&period=30&secret=$secret",
|
||||
);
|
||||
}
|
||||
|
||||
static Code fromRawData(String rawData) {
|
||||
Uri uri = Uri.parse(rawData);
|
||||
final issuer = _getIssuer(uri);
|
||||
|
||||
try {
|
||||
return Code(
|
||||
_getAccount(uri),
|
||||
_getIssuer(uri),
|
||||
_getDigits(uri),
|
||||
issuer,
|
||||
_getDigits(uri, issuer),
|
||||
_getPeriod(uri),
|
||||
getSanitizedSecret(uri.queryParameters['secret']!),
|
||||
_getAlgorithm(uri),
|
||||
|
@ -140,10 +147,13 @@ class Code {
|
|||
}
|
||||
}
|
||||
|
||||
static int _getDigits(Uri uri) {
|
||||
static int _getDigits(Uri uri, String issuer) {
|
||||
try {
|
||||
return int.parse(uri.queryParameters['digits']!);
|
||||
} catch (e) {
|
||||
if (issuer.toLowerCase() == "steam") {
|
||||
return steamDigits;
|
||||
}
|
||||
return defaultDigits;
|
||||
}
|
||||
}
|
||||
|
@ -186,6 +196,8 @@ class Code {
|
|||
static Type _getType(Uri uri) {
|
||||
if (uri.host == "totp") {
|
||||
return Type.totp;
|
||||
} else if (uri.host == "steam") {
|
||||
return Type.steam;
|
||||
} else if (uri.host == "hotp") {
|
||||
return Type.hotp;
|
||||
}
|
||||
|
@ -223,6 +235,9 @@ class Code {
|
|||
enum Type {
|
||||
totp,
|
||||
hotp,
|
||||
steam;
|
||||
|
||||
bool get isTOTPCompatible => this == totp || this == steam;
|
||||
}
|
||||
|
||||
enum Algorithm {
|
||||
|
|
|
@ -61,6 +61,8 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
|
|||
},
|
||||
decoration: InputDecoration(
|
||||
hintText: l10n.codeIssuerHint,
|
||||
floatingLabelBehavior: FloatingLabelBehavior.auto,
|
||||
labelText: l10n.codeIssuerHint,
|
||||
),
|
||||
controller: _issuerController,
|
||||
autofocus: true,
|
||||
|
@ -78,6 +80,8 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
|
|||
},
|
||||
decoration: InputDecoration(
|
||||
hintText: l10n.codeSecretKeyHint,
|
||||
floatingLabelBehavior: FloatingLabelBehavior.auto,
|
||||
labelText: l10n.codeSecretKeyHint,
|
||||
suffixIcon: IconButton(
|
||||
onPressed: () {
|
||||
setState(() {
|
||||
|
@ -105,12 +109,12 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
|
|||
},
|
||||
decoration: InputDecoration(
|
||||
hintText: l10n.codeAccountHint,
|
||||
floatingLabelBehavior: FloatingLabelBehavior.auto,
|
||||
labelText: l10n.codeAccountHint,
|
||||
),
|
||||
controller: _accountController,
|
||||
),
|
||||
const SizedBox(
|
||||
height: 40,
|
||||
),
|
||||
const SizedBox(height: 40),
|
||||
SizedBox(
|
||||
width: 400,
|
||||
child: OutlinedButton(
|
||||
|
@ -152,6 +156,7 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
|
|||
final account = _accountController.text.trim();
|
||||
final issuer = _issuerController.text.trim();
|
||||
final secret = _secretController.text.trim().replaceAll(' ', '');
|
||||
final isStreamCode = issuer.toLowerCase() == "steam";
|
||||
if (widget.code != null && widget.code!.secret != secret) {
|
||||
ButtonResult? result = await showChoiceActionSheet(
|
||||
context,
|
||||
|
@ -168,9 +173,11 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
|
|||
}
|
||||
final Code newCode = widget.code == null
|
||||
? Code.fromAccountAndSecret(
|
||||
isStreamCode ? Type.steam : Type.totp,
|
||||
account,
|
||||
issuer,
|
||||
secret,
|
||||
isStreamCode ? Code.steamDigits : Code.defaultDigits,
|
||||
)
|
||||
: widget.code!.copyWith(
|
||||
account: account,
|
||||
|
|
|
@ -53,7 +53,7 @@ class _CodeWidgetState extends State<CodeWidget> {
|
|||
String newCode = _getCurrentOTP();
|
||||
if (newCode != _currentCode.value) {
|
||||
_currentCode.value = newCode;
|
||||
if (widget.code.type == Type.totp) {
|
||||
if (widget.code.type.isTOTPCompatible) {
|
||||
_nextCode.value = _getNextTotp();
|
||||
}
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ class _CodeWidgetState extends State<CodeWidget> {
|
|||
_shouldShowLargeIcon = PreferenceService.instance.shouldShowLargeIcons();
|
||||
if (!_isInitialized) {
|
||||
_currentCode.value = _getCurrentOTP();
|
||||
if (widget.code.type == Type.totp) {
|
||||
if (widget.code.type.isTOTPCompatible) {
|
||||
_nextCode.value = _getNextTotp();
|
||||
}
|
||||
_isInitialized = true;
|
||||
|
@ -213,7 +213,7 @@ class _CodeWidgetState extends State<CodeWidget> {
|
|||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
if (widget.code.type == Type.totp)
|
||||
if (widget.code.type.isTOTPCompatible)
|
||||
CodeTimerProgress(
|
||||
period: widget.code.period,
|
||||
),
|
||||
|
@ -263,7 +263,7 @@ class _CodeWidgetState extends State<CodeWidget> {
|
|||
},
|
||||
),
|
||||
),
|
||||
widget.code.type == Type.totp
|
||||
widget.code.type.isTOTPCompatible
|
||||
? GestureDetector(
|
||||
onTap: () {
|
||||
_copyNextToClipboard();
|
||||
|
@ -481,7 +481,7 @@ class _CodeWidgetState extends State<CodeWidget> {
|
|||
|
||||
String _getNextTotp() {
|
||||
try {
|
||||
assert(widget.code.type == Type.totp);
|
||||
assert(widget.code.type.isTOTPCompatible);
|
||||
return getNextTotp(widget.code);
|
||||
} catch (e) {
|
||||
return context.l10n.error;
|
||||
|
|
|
@ -92,9 +92,11 @@ Future<int?> _processBitwardenExportFile(
|
|||
var account = item['login']['username'];
|
||||
|
||||
code = Code.fromAccountAndSecret(
|
||||
Type.totp,
|
||||
account,
|
||||
issuer,
|
||||
totp,
|
||||
Code.defaultDigits,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import 'package:flutter/foundation.dart';
|
|||
import 'package:otp/otp.dart' as otp;
|
||||
|
||||
String getOTP(Code code) {
|
||||
if(code.type == Type.hotp) {
|
||||
if (code.type == Type.hotp) {
|
||||
return _getHOTPCode(code);
|
||||
}
|
||||
return otp.OTP.generateTOTPCodeString(
|
||||
|
@ -60,4 +60,4 @@ String safeDecode(String value) {
|
|||
debugPrint("Failed to decode $e");
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ display_name: Auth
|
|||
|
||||
requires:
|
||||
- libsqlite3x
|
||||
- webkit2gtk-4.0
|
||||
- webkit2gtk4.0
|
||||
- libsodium
|
||||
- libsecret
|
||||
- libappindicator
|
||||
|
|
|
@ -293,9 +293,9 @@ packages:
|
|||
dependency: "direct main"
|
||||
description:
|
||||
path: "packages/desktop_webview_window"
|
||||
ref: HEAD
|
||||
resolved-ref: "8cbbf9cd6efcfee5e0f420a36f7f8e7e64b667a1"
|
||||
url: "https://github.com/MixinNetwork/flutter-plugins"
|
||||
ref: fix-webkit-version
|
||||
resolved-ref: fe2223e4edfecdbb3a97bb9e3ced73db4ae9d979
|
||||
url: "https://github.com/ente-io/flutter-desktopwebview-fork"
|
||||
source: git
|
||||
version: "0.2.4"
|
||||
device_info_plus:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
name: ente_auth
|
||||
description: ente two-factor authenticator
|
||||
version: 2.0.55+255
|
||||
version: 2.0.57+257
|
||||
publish_to: none
|
||||
|
||||
environment:
|
||||
|
@ -20,7 +20,8 @@ dependencies:
|
|||
convert: ^3.1.1
|
||||
desktop_webview_window:
|
||||
git:
|
||||
url: https://github.com/MixinNetwork/flutter-plugins
|
||||
url: https://github.com/ente-io/flutter-desktopwebview-fork
|
||||
ref: fix-webkit-version
|
||||
path: packages/desktop_webview_window
|
||||
device_info_plus: ^9.1.1
|
||||
dio: ^5.4.0
|
||||
|
|
|
@ -36,7 +36,8 @@ ente --help
|
|||
|
||||
### Accounts
|
||||
|
||||
If you wish, you can add multiple accounts (your own and that of your family members) and export all data using this tool.
|
||||
If you wish, you can add multiple accounts (your own and that of your family
|
||||
members) and export all data using this tool.
|
||||
|
||||
#### Add an account
|
||||
|
||||
|
@ -44,6 +45,12 @@ If you wish, you can add multiple accounts (your own and that of your family mem
|
|||
ente account add
|
||||
```
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> `ente account add` does not create new accounts, it just adds pre-existing
|
||||
> accounts to the list of accounts that the CLI knows about so that you can use
|
||||
> them for other actions.
|
||||
|
||||
#### List accounts
|
||||
|
||||
```shell
|
||||
|
|
|
@ -27,7 +27,8 @@ var listAccCmd = &cobra.Command{
|
|||
// Subcommand for 'account add'
|
||||
var addAccCmd = &cobra.Command{
|
||||
Use: "add",
|
||||
Short: "Add a new account",
|
||||
Short: "login into existing account",
|
||||
Long: "Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
recoverWithLog()
|
||||
ctrl.AddAccount(context.Background())
|
||||
|
|
2
cli/docs/generated/ente.md
generated
2
cli/docs/generated/ente.md
generated
|
@ -25,4 +25,4 @@ ente [flags]
|
|||
* [ente export](ente_export.md) - Starts the export process
|
||||
* [ente version](ente_version.md) - Prints the current version
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
4
cli/docs/generated/ente_account.md
generated
4
cli/docs/generated/ente_account.md
generated
|
@ -11,9 +11,9 @@ Manage account settings
|
|||
### SEE ALSO
|
||||
|
||||
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
|
||||
* [ente account add](ente_account_add.md) - Add a new account
|
||||
* [ente account add](ente_account_add.md) - login into existing account
|
||||
* [ente account get-token](ente_account_get-token.md) - Get token for an account for a specific app
|
||||
* [ente account list](ente_account_list.md) - list configured accounts
|
||||
* [ente account update](ente_account_update.md) - Update an existing account's export directory
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
8
cli/docs/generated/ente_account_add.md
generated
8
cli/docs/generated/ente_account_add.md
generated
|
@ -1,6 +1,10 @@
|
|||
## ente account add
|
||||
|
||||
Add a new account
|
||||
login into existing account
|
||||
|
||||
### Synopsis
|
||||
|
||||
Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app
|
||||
|
||||
```
|
||||
ente account add [flags]
|
||||
|
@ -16,4 +20,4 @@ ente account add [flags]
|
|||
|
||||
* [ente account](ente_account.md) - Manage account settings
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_account_get-token.md
generated
2
cli/docs/generated/ente_account_get-token.md
generated
|
@ -18,4 +18,4 @@ ente account get-token [flags]
|
|||
|
||||
* [ente account](ente_account.md) - Manage account settings
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_account_list.md
generated
2
cli/docs/generated/ente_account_list.md
generated
|
@ -16,4 +16,4 @@ ente account list [flags]
|
|||
|
||||
* [ente account](ente_account.md) - Manage account settings
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_account_update.md
generated
2
cli/docs/generated/ente_account_update.md
generated
|
@ -19,4 +19,4 @@ ente account update [flags]
|
|||
|
||||
* [ente account](ente_account.md) - Manage account settings
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_admin.md
generated
2
cli/docs/generated/ente_admin.md
generated
|
@ -21,4 +21,4 @@ Commands for admin actions like disable or enabling 2fa, bumping up the storage
|
|||
* [ente admin list-users](ente_admin_list-users.md) - List all users
|
||||
* [ente admin update-subscription](ente_admin_update-subscription.md) - Update subscription for user
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_admin_delete-user.md
generated
2
cli/docs/generated/ente_admin_delete-user.md
generated
|
@ -18,4 +18,4 @@ ente admin delete-user [flags]
|
|||
|
||||
* [ente admin](ente_admin.md) - Commands for admin actions
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_admin_disable-2fa.md
generated
2
cli/docs/generated/ente_admin_disable-2fa.md
generated
|
@ -18,4 +18,4 @@ ente admin disable-2fa [flags]
|
|||
|
||||
* [ente admin](ente_admin.md) - Commands for admin actions
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_admin_get-user-id.md
generated
2
cli/docs/generated/ente_admin_get-user-id.md
generated
|
@ -18,4 +18,4 @@ ente admin get-user-id [flags]
|
|||
|
||||
* [ente admin](ente_admin.md) - Commands for admin actions
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_admin_list-users.md
generated
2
cli/docs/generated/ente_admin_list-users.md
generated
|
@ -17,4 +17,4 @@ ente admin list-users [flags]
|
|||
|
||||
* [ente admin](ente_admin.md) - Commands for admin actions
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
|
@ -23,4 +23,4 @@ ente admin update-subscription [flags]
|
|||
|
||||
* [ente admin](ente_admin.md) - Commands for admin actions
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_auth.md
generated
2
cli/docs/generated/ente_auth.md
generated
|
@ -13,4 +13,4 @@ Authenticator commands
|
|||
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
|
||||
* [ente auth decrypt](ente_auth_decrypt.md) - Decrypt authenticator export
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_auth_decrypt.md
generated
2
cli/docs/generated/ente_auth_decrypt.md
generated
|
@ -16,4 +16,4 @@ ente auth decrypt [input] [output] [flags]
|
|||
|
||||
* [ente auth](ente_auth.md) - Authenticator commands
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_export.md
generated
2
cli/docs/generated/ente_export.md
generated
|
@ -16,4 +16,4 @@ ente export [flags]
|
|||
|
||||
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
2
cli/docs/generated/ente_version.md
generated
2
cli/docs/generated/ente_version.md
generated
|
@ -16,4 +16,4 @@ ente version [flags]
|
|||
|
||||
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
|
||||
|
||||
###### Auto generated by spf13/cobra on 14-Mar-2024
|
||||
###### Auto generated by spf13/cobra on 6-May-2024
|
||||
|
|
|
@ -59,7 +59,7 @@ func (c *ClICtrl) AddAccount(cxt context.Context) {
|
|||
authResponse, flowErr = c.validateTOTP(cxt, authResponse)
|
||||
}
|
||||
if authResponse.EncryptedToken == "" || authResponse.KeyAttributes == nil {
|
||||
panic("no encrypted token or keyAttributes")
|
||||
log.Fatalf("missing key attributes or token.\nNote: Please use the mobile,web or desktop app to create a new account.\nIf you are trying to login to an existing account, report a bug.")
|
||||
}
|
||||
secretInfo, decErr := c.decryptAccSecretInfo(cxt, authResponse, keyEncKey)
|
||||
if decErr != nil {
|
||||
|
|
|
@ -1,26 +1,36 @@
|
|||
/* eslint-env node */
|
||||
module.exports = {
|
||||
root: true,
|
||||
extends: [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
/* What we really want eventually */
|
||||
// "plugin:@typescript-eslint/strict-type-checked",
|
||||
// "plugin:@typescript-eslint/stylistic-type-checked",
|
||||
"plugin:@typescript-eslint/strict-type-checked",
|
||||
"plugin:@typescript-eslint/stylistic-type-checked",
|
||||
],
|
||||
/* Temporarily add a global
|
||||
Enhancement: Remove me */
|
||||
globals: {
|
||||
NodeJS: "readonly",
|
||||
},
|
||||
plugins: ["@typescript-eslint"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
parserOptions: {
|
||||
project: true,
|
||||
},
|
||||
root: true,
|
||||
ignorePatterns: [".eslintrc.js", "app", "out", "dist"],
|
||||
env: {
|
||||
es2022: true,
|
||||
node: true,
|
||||
},
|
||||
rules: {
|
||||
/* Allow numbers to be used in template literals */
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
},
|
||||
],
|
||||
/* Allow void expressions as the entire body of an arrow function */
|
||||
"@typescript-eslint/no-confusing-void-expression": [
|
||||
"error",
|
||||
{
|
||||
ignoreArrowShorthand: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
|
55
desktop/.github/workflows/build.yml
vendored
55
desktop/.github/workflows/build.yml
vendored
|
@ -1,55 +0,0 @@
|
|||
name: Build/release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
|
||||
steps:
|
||||
- name: Check out Git repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Install Node.js, NPM and Yarn
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Prepare for app notarization
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
# Import Apple API key for app notarization on macOS
|
||||
run: |
|
||||
mkdir -p ~/private_keys/
|
||||
echo '${{ secrets.api_key }}' > ~/private_keys/AuthKey_${{ secrets.api_key_id }}.p8
|
||||
|
||||
- name: Install libarchive-tools for pacman build # Related https://github.com/electron-userland/electron-builder/issues/4181
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
run: sudo apt-get install libarchive-tools
|
||||
|
||||
- name: Ente Electron Builder Action
|
||||
uses: ente-io/action-electron-builder@v1.0.0
|
||||
with:
|
||||
# GitHub token, automatically provided to the action
|
||||
# (No need to define this secret in the repo settings)
|
||||
github_token: ${{ secrets.github_token }}
|
||||
|
||||
# If the commit is tagged with a version (e.g. "v1.0.0"),
|
||||
# release the app after building
|
||||
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
|
||||
mac_certs: ${{ secrets.mac_certs }}
|
||||
mac_certs_password: ${{ secrets.mac_certs_password }}
|
||||
env:
|
||||
# macOS notarization API key
|
||||
API_KEY_ID: ${{ secrets.api_key_id }}
|
||||
API_KEY_ISSUER_ID: ${{ secrets.api_key_issuer_id}}
|
||||
USE_HARD_LINKS: false
|
70
desktop/.github/workflows/desktop-draft-release.yml
vendored
Normal file
70
desktop/.github/workflows/desktop-draft-release.yml
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
name: "Draft release"
|
||||
|
||||
# Build the desktop/draft-release branch and update the existing draft release
|
||||
# with the resultant artifacts.
|
||||
#
|
||||
# This is meant for doing tests that require the app to be signed and packaged.
|
||||
# Such releases should not be published to end users.
|
||||
#
|
||||
# Workflow:
|
||||
#
|
||||
# 1. Push your changes to the "desktop/draft-release" branch on
|
||||
# https://github.com/ente-io/ente.
|
||||
#
|
||||
# 2. Create a draft release with tag equal to the version in the `package.json`.
|
||||
#
|
||||
# 3. Trigger this workflow. You can trigger it multiple times, each time it'll
|
||||
# just update the artifacts attached to the same draft.
|
||||
#
|
||||
# 4. Once testing is done delete the draft.
|
||||
|
||||
on:
|
||||
# Trigger manually or `gh workflow run desktop-draft-release.yml`.
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: macos-latest
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: desktop
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: ente-io/ente
|
||||
ref: desktop/draft-release
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: Build
|
||||
uses: ente-io/action-electron-builder@v1.0.0
|
||||
with:
|
||||
package_root: desktop
|
||||
|
||||
# GitHub token, automatically provided to the action
|
||||
# (No need to define this secret in the repo settings)
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# If the commit is tagged with a version (e.g. "v1.0.0"),
|
||||
# release the app after building.
|
||||
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
|
||||
mac_certs: ${{ secrets.MAC_CERTS }}
|
||||
mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }}
|
||||
env:
|
||||
# macOS notarization credentials key details
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD:
|
||||
${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
USE_HARD_LINKS: false
|
83
desktop/.github/workflows/desktop-release.yml
vendored
Normal file
83
desktop/.github/workflows/desktop-release.yml
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
name: "Release"
|
||||
|
||||
# This will create a new draft release with public artifacts.
|
||||
#
|
||||
# Note that a release will only get created if there is an associated tag
|
||||
# (GitHub releases need a corresponding tag).
|
||||
#
|
||||
# The canonical source for this action is in the repository where we keep the
|
||||
# source code for the Ente Photos desktop app: https://github.com/ente-io/ente
|
||||
#
|
||||
# However, it actually lives and runs in the repository that we use for making
|
||||
# releases: https://github.com/ente-io/photos-desktop
|
||||
#
|
||||
# We need two repositories because Electron updater currently doesn't work well
|
||||
# with monorepos. For more details, see `docs/release.md`.
|
||||
|
||||
on:
|
||||
push:
|
||||
# Run when a tag matching the pattern "v*"" is pushed.
|
||||
#
|
||||
# See: [Note: Testing release workflows that are triggered by tags].
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: desktop
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-latest, windows-latest]
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Checkout the tag photosd-v1.x.x from the source code
|
||||
# repository when we're invoked for tag v1.x.x on the releases
|
||||
# repository.
|
||||
repository: ente-io/ente
|
||||
ref: photosd-${{ github.ref_name }}
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: yarn install
|
||||
|
||||
- name: Install libarchive-tools for pacman build
|
||||
if: startsWith(matrix.os, 'ubuntu')
|
||||
# See:
|
||||
# https://github.com/electron-userland/electron-builder/issues/4181
|
||||
run: sudo apt-get install libarchive-tools
|
||||
|
||||
- name: Build
|
||||
uses: ente-io/action-electron-builder@v1.0.0
|
||||
with:
|
||||
package_root: desktop
|
||||
|
||||
# GitHub token, automatically provided to the action
|
||||
# (No need to define this secret in the repo settings)
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# If the commit is tagged with a version (e.g. "v1.0.0"),
|
||||
# release the app after building.
|
||||
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
|
||||
mac_certs: ${{ secrets.MAC_CERTS }}
|
||||
mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }}
|
||||
env:
|
||||
# macOS notarization credentials key details
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_APP_SPECIFIC_PASSWORD:
|
||||
${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
USE_HARD_LINKS: false
|
|
@ -1,5 +1,13 @@
|
|||
# CHANGELOG
|
||||
|
||||
## v1.7.0 (Unreleased)
|
||||
|
||||
v1.7 is a major rewrite to improve the security of our app. We have enabled
|
||||
sandboxing and disabled node integration for the renderer process. All this
|
||||
required restructuring our IPC mechanisms, which resulted in a lot of under the
|
||||
hood changes. The outcome is a more secure app that also uses the latest and
|
||||
greatest Electron recommendations.
|
||||
|
||||
## v1.6.63
|
||||
|
||||
### New
|
||||
|
|
|
@ -10,12 +10,6 @@ To know more about Ente, see [our main README](../README.md) or visit
|
|||
|
||||
## Building from source
|
||||
|
||||
> [!CAUTION]
|
||||
>
|
||||
> We're improving the security of the desktop app further by migrating to
|
||||
> Electron's sandboxing and contextIsolation. These updates are still WIP and
|
||||
> meanwhile the instructions below might not fully work on the main branch.
|
||||
|
||||
Fetch submodules
|
||||
|
||||
```sh
|
||||
|
|
|
@ -13,7 +13,7 @@ Electron embeds Chromium and Node.js in the generated app's binary. The
|
|||
generated app thus consists of two separate processes - the _main_ process, and
|
||||
a _renderer_ process.
|
||||
|
||||
- The _main_ process is runs the embedded node. This process can deal with the
|
||||
- The _main_ process runs the embedded node. This process can deal with the
|
||||
host OS - it is conceptually like a `node` repl running on your machine. In
|
||||
our case, the TypeScript code (in the `src/` directory) gets transpiled by
|
||||
`tsc` into JavaScript in the `build/app/` directory, which gets bundled in
|
||||
|
@ -90,16 +90,19 @@ Some extra ones specific to the code here are:
|
|||
Unix commands in our `package.json` scripts. This allows us to use the same
|
||||
commands (like `ln`) across different platforms like Linux and Windows.
|
||||
|
||||
- [@tsconfig/recommended](https://github.com/tsconfig/bases) gives us a base
|
||||
tsconfig for the Node.js version that our current Electron version uses.
|
||||
|
||||
## Functionality
|
||||
|
||||
### Format conversion
|
||||
|
||||
The main tool we use is for arbitrary conversions is FFMPEG. To bundle a
|
||||
The main tool we use is for arbitrary conversions is ffmpeg. To bundle a
|
||||
(platform specific) static binary of ffmpeg with our app, we use
|
||||
[ffmpeg-static](https://github.com/eugeneware/ffmpeg-static).
|
||||
|
||||
> There is a significant (~20x) speed difference between using the compiled
|
||||
> FFMPEG binary and using the WASM one (that our renderer process already has).
|
||||
> ffmpeg binary and using the wasm one (that our renderer process already has).
|
||||
> Which is why we bundle it to speed up operations on the desktop app.
|
||||
|
||||
In addition, we also bundle a static Linux binary of imagemagick in our extra
|
||||
|
|
|
@ -1,43 +1,47 @@
|
|||
## Releases
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> TODO(MR): This document needs to be audited and changed as we do the first
|
||||
> release from this new monorepo.
|
||||
Conceptually, the release is straightforward: We push a tag, a GitHub workflow
|
||||
gets triggered that creates a draft release with artifacts built from that tag.
|
||||
We then publish that release. The download links on our website, and existing
|
||||
apps already know how to check for the latest GitHub release and update
|
||||
accordingly.
|
||||
|
||||
The Github Action that builds the desktop binaries is triggered by pushing a tag
|
||||
matching the pattern `photos-desktop-v1.2.3`. This value should match the
|
||||
version in `package.json`.
|
||||
The complication comes by the fact that Electron Updater (the mechanism that we
|
||||
use for auto updates) doesn't work well with monorepos. So we need to keep a
|
||||
separate (non-mono) repository just for doing releases.
|
||||
|
||||
So the process for doing a release would be.
|
||||
- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente).
|
||||
|
||||
1. Create a new branch (can be named anything). On this branch, include your
|
||||
changes.
|
||||
- Releases are done from
|
||||
[ente-io/photos-desktop](https://github.com/ente-io/photos-desktop).
|
||||
|
||||
2. Mention the changes in `CHANGELOG.md`.
|
||||
## Workflow
|
||||
|
||||
3. Changing the `version` in `package.json` to `1.x.x`.
|
||||
The workflow is:
|
||||
|
||||
4. Commit and push to remote
|
||||
1. Finalize the changes in the source repo.
|
||||
|
||||
- Update the CHANGELOG.
|
||||
- Update the version in `package.json`
|
||||
- `git commit -m "[photosd] Release v1.2.3"`
|
||||
- Open PR, merge into main.
|
||||
|
||||
2. Tag the merge commit with a tag matching the pattern `photosd-v1.2.3`, where
|
||||
`1.2.3` is the version in `package.json`
|
||||
|
||||
```sh
|
||||
git add package.json && git commit -m 'Release v1.x.x'
|
||||
git tag v1.x.x
|
||||
git push && git push --tags
|
||||
git tag photosd-v1.x.x
|
||||
git push origin photosd-v1.x.x
|
||||
```
|
||||
|
||||
This by itself will already trigger a new release. The GitHub action will create
|
||||
a new draft release that can then be used as descibed below.
|
||||
3. Head over to the releases repository and run the trigger script, passing it
|
||||
the tag _without_ the `photosd-` prefix.
|
||||
|
||||
To wrap up, we also need to merge back these changes into main. So for that,
|
||||
```sh
|
||||
./.github/trigger-release.sh v1.x.x
|
||||
```
|
||||
|
||||
5. Open a PR for the branch that we're working on (where the above tag was
|
||||
pushed from) to get it merged into main.
|
||||
|
||||
6. In this PR, also increase the version number for the next release train. That
|
||||
is, supposed we just released `v4.0.1`. Then we'll change the version number
|
||||
in main to `v4.0.2-next.0`. Each pre-release will modify the `next.0` part.
|
||||
Finally, at the time of the next release, this'll become `v4.0.2`.
|
||||
## Post build
|
||||
|
||||
The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts
|
||||
defined in the `build` value in `package.json`.
|
||||
|
@ -46,29 +50,11 @@ defined in the `build` value in `package.json`.
|
|||
- Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`)
|
||||
- macOS - A universal DMG
|
||||
|
||||
Additionally, the GitHub action notarizes the macOS DMG. For this it needs
|
||||
credentials provided via GitHub secrets.
|
||||
Additionally, the GitHub action notarizes and signs the macOS DMG (For this it
|
||||
uses credentials provided via GitHub secrets).
|
||||
|
||||
During the build the Sentry webpack plugin checks to see if SENTRY_AUTH_TOKEN is
|
||||
defined. If so, it uploads the sourcemaps for the renderer process to Sentry
|
||||
(For our GitHub action, the SENTRY_AUTH_TOKEN is defined as a GitHub secret).
|
||||
|
||||
The sourcemaps for the main (node) process are currently not sent to Sentry
|
||||
(this works fine in practice since the node process files are not minified, we
|
||||
only run `tsc`).
|
||||
|
||||
Once the build is done, a draft release with all these artifacts attached is
|
||||
created. The build is idempotent, so if something goes wrong and we need to
|
||||
re-run the GitHub action, just delete the draft release (if it got created) and
|
||||
start a new run by pushing a new tag (if some code changes are required).
|
||||
|
||||
If no code changes are required, say the build failed for some transient network
|
||||
or sentry issue, we can even be re-run by the build by going to Github Action
|
||||
age and rerun from there. This will re-trigger for the same tag.
|
||||
|
||||
If everything goes well, we'll have a release on GitHub, and the corresponding
|
||||
source maps for the renderer process uploaded to Sentry. There isn't anything
|
||||
else to do:
|
||||
To rollout the build, we need to publish the draft release. Thereafter,
|
||||
everything is automated:
|
||||
|
||||
- The website automatically redirects to the latest release on GitHub when
|
||||
people try to download.
|
||||
|
@ -76,7 +62,7 @@ else to do:
|
|||
- The file formats with support auto update (Windows `exe`, the Linux AppImage
|
||||
and the macOS DMG) also check the latest GitHub release automatically to
|
||||
download and apply the update (the rest of the formats don't support auto
|
||||
updates).
|
||||
updates yet).
|
||||
|
||||
- We're not putting the desktop app in other stores currently. It is available
|
||||
as a `brew cask`, but we only had to open a PR to add the initial formula,
|
||||
|
@ -87,6 +73,4 @@ else to do:
|
|||
We can also publish the draft releases by checking the "pre-release" option.
|
||||
Such releases don't cause any of the channels (our website, or the desktop app
|
||||
auto updater, or brew) to be notified, instead these are useful for giving links
|
||||
to pre-release builds to customers. Generally, in the version number for these
|
||||
we'll add a label to the version, e.g. the "beta.x" in `1.x.x-beta.x`. This
|
||||
should be done both in `package.json`, and what we tag the commit with.
|
||||
to pre-release builds to customers.
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
{
|
||||
"name": "ente",
|
||||
"version": "1.6.63",
|
||||
"version": "1.7.0-beta.0",
|
||||
"private": true,
|
||||
"description": "Desktop client for Ente Photos",
|
||||
"repository": "github:ente-io/photos-desktop",
|
||||
"author": "Ente <code@ente.io>",
|
||||
"main": "app/main.js",
|
||||
"scripts": {
|
||||
|
@ -15,8 +16,11 @@
|
|||
"dev-main": "tsc && electron app/main.js",
|
||||
"dev-renderer": "cd ../web && yarn install && yarn dev:photos",
|
||||
"postinstall": "electron-builder install-app-deps",
|
||||
"lint": "yarn prettier --check . && eslint --ext .ts src",
|
||||
"lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src"
|
||||
"lint": "yarn prettier --check --log-level warn . && eslint --ext .ts src && yarn tsc",
|
||||
"lint-fix": "yarn prettier --write --log-level warn . && eslint --fix --ext .ts src && yarn tsc"
|
||||
},
|
||||
"resolutions": {
|
||||
"jackspeak": "2.1.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"any-shell-escape": "^0.1",
|
||||
|
@ -34,13 +38,14 @@
|
|||
"onnxruntime-node": "^1.17"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tsconfig/node20": "^20.1.4",
|
||||
"@types/auto-launch": "^5.0",
|
||||
"@types/ffmpeg-static": "^3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7",
|
||||
"@typescript-eslint/parser": "^7",
|
||||
"concurrently": "^8",
|
||||
"electron": "^29",
|
||||
"electron-builder": "^24",
|
||||
"electron": "^30",
|
||||
"electron-builder": "25.0.0-alpha.6",
|
||||
"electron-builder-notarize": "^1.5",
|
||||
"eslint": "^8",
|
||||
"prettier": "^3",
|
||||
|
|
|
@ -8,18 +8,15 @@
|
|||
*
|
||||
* https://www.electronjs.org/docs/latest/tutorial/process-model#the-main-process
|
||||
*/
|
||||
import { nativeImage } from "electron";
|
||||
import { app, BrowserWindow, Menu, protocol, Tray } from "electron/main";
|
||||
|
||||
import { nativeImage, shell } from "electron/common";
|
||||
import type { WebContents } from "electron/main";
|
||||
import { BrowserWindow, Menu, Tray, app, protocol } from "electron/main";
|
||||
import serveNextAt from "next-electron-server";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import {
|
||||
addAllowOriginHeader,
|
||||
handleDownloads,
|
||||
handleExternalLinks,
|
||||
} from "./main/init";
|
||||
import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc";
|
||||
import log, { initLogging } from "./main/log";
|
||||
import { createApplicationMenu, createTrayContextMenu } from "./main/menu";
|
||||
|
@ -29,12 +26,12 @@ import { createWatcher } from "./main/services/watch";
|
|||
import { userPreferences } from "./main/stores/user-preferences";
|
||||
import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch";
|
||||
import { registerStreamProtocol } from "./main/stream";
|
||||
import { isDev } from "./main/util";
|
||||
import { isDev } from "./main/utils/electron";
|
||||
|
||||
/**
|
||||
* The URL where the renderer HTML is being served from.
|
||||
*/
|
||||
export const rendererURL = "ente://app";
|
||||
const rendererURL = "ente://app";
|
||||
|
||||
/**
|
||||
* We want to hide our window instead of closing it when the user presses the
|
||||
|
@ -130,54 +127,22 @@ const registerPrivilegedSchemes = () => {
|
|||
{
|
||||
scheme: "stream",
|
||||
privileges: {
|
||||
// TODO(MR): Remove the commented bits if we don't end up
|
||||
// needing them by the time the IPC refactoring is done.
|
||||
|
||||
// Prevent the insecure origin issues when fetching this
|
||||
// secure: true,
|
||||
// Allow the web fetch API in the renderer to use this scheme.
|
||||
supportFetchAPI: true,
|
||||
// Allow it to be used with video tags.
|
||||
// stream: true,
|
||||
},
|
||||
},
|
||||
]);
|
||||
};
|
||||
|
||||
/**
|
||||
* [Note: Increased disk cache for the desktop app]
|
||||
*
|
||||
* Set the "disk-cache-size" command line flag to ask the Chromium process to
|
||||
* use a larger size for the caches that it keeps on disk. This allows us to use
|
||||
* the web based caching mechanisms on both the web and the desktop app, just
|
||||
* ask the embedded Chromium to be a bit more generous in disk usage when
|
||||
* running as the desktop app.
|
||||
*
|
||||
* The size we provide is in bytes.
|
||||
* https://www.electronjs.org/docs/latest/api/command-line-switches#--disk-cache-sizesize
|
||||
*
|
||||
* Note that increasing the disk cache size does not guarantee that Chromium
|
||||
* will respect in verbatim, it uses its own heuristics atop this hint.
|
||||
* https://superuser.com/questions/378991/what-is-chrome-default-cache-size-limit/1577693#1577693
|
||||
*
|
||||
* See also: [Note: Caching files].
|
||||
*/
|
||||
const increaseDiskCache = () =>
|
||||
app.commandLine.appendSwitch(
|
||||
"disk-cache-size",
|
||||
`${5 * 1024 * 1024 * 1024}`, // 5 GB
|
||||
);
|
||||
|
||||
/**
|
||||
* Create an return the {@link BrowserWindow} that will form our app's UI.
|
||||
*
|
||||
* This window will show the HTML served from {@link rendererURL}.
|
||||
*/
|
||||
const createMainWindow = async () => {
|
||||
const createMainWindow = () => {
|
||||
// Create the main window. This'll show our web content.
|
||||
const window = new BrowserWindow({
|
||||
webPreferences: {
|
||||
preload: path.join(app.getAppPath(), "preload.js"),
|
||||
preload: path.join(__dirname, "preload.js"),
|
||||
sandbox: true,
|
||||
},
|
||||
// The color to show in the window until the web content gets loaded.
|
||||
|
@ -187,7 +152,7 @@ const createMainWindow = async () => {
|
|||
show: false,
|
||||
});
|
||||
|
||||
const wasAutoLaunched = await autoLauncher.wasAutoLaunched();
|
||||
const wasAutoLaunched = autoLauncher.wasAutoLaunched();
|
||||
if (wasAutoLaunched) {
|
||||
// Don't automatically show the app's window if we were auto-launched.
|
||||
// On macOS, also hide the dock icon on macOS.
|
||||
|
@ -201,13 +166,15 @@ const createMainWindow = async () => {
|
|||
if (isDev) window.webContents.openDevTools();
|
||||
|
||||
window.webContents.on("render-process-gone", (_, details) => {
|
||||
log.error(`render-process-gone: ${details}`);
|
||||
log.error(`render-process-gone: ${details.reason}`);
|
||||
window.webContents.reload();
|
||||
});
|
||||
|
||||
// "The unresponsive event is fired when Chromium detects that your
|
||||
// webContents is not responding to input messages for > 30 seconds."
|
||||
window.webContents.on("unresponsive", () => {
|
||||
log.error(
|
||||
"Main window's webContents are unresponsive, will restart the renderer process",
|
||||
"MainWindow's webContents are unresponsive, will restart the renderer process",
|
||||
);
|
||||
window.webContents.forcefullyCrashRenderer();
|
||||
});
|
||||
|
@ -228,7 +195,7 @@ const createMainWindow = async () => {
|
|||
});
|
||||
|
||||
window.on("show", () => {
|
||||
if (process.platform == "darwin") app.dock.show();
|
||||
if (process.platform == "darwin") void app.dock.show();
|
||||
});
|
||||
|
||||
// Let ipcRenderer know when mainWindow is in the foreground so that it can
|
||||
|
@ -238,6 +205,58 @@ const createMainWindow = async () => {
|
|||
return window;
|
||||
};
|
||||
|
||||
/**
|
||||
* Automatically set the save path for user initiated downloads to the system's
|
||||
* "downloads" directory instead of asking the user to select a save location.
|
||||
*/
|
||||
export const setDownloadPath = (webContents: WebContents) => {
|
||||
webContents.session.on("will-download", (_, item) => {
|
||||
item.setSavePath(
|
||||
uniqueSavePath(app.getPath("downloads"), item.getFilename()),
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
const uniqueSavePath = (dirPath: string, fileName: string) => {
|
||||
const { name, ext } = path.parse(fileName);
|
||||
|
||||
let savePath = path.join(dirPath, fileName);
|
||||
let n = 1;
|
||||
while (existsSync(savePath)) {
|
||||
const suffixedName = [`${name}(${n})`, ext].filter((x) => x).join(".");
|
||||
savePath = path.join(dirPath, suffixedName);
|
||||
n++;
|
||||
}
|
||||
return savePath;
|
||||
};
|
||||
|
||||
/**
|
||||
* Allow opening external links, e.g. when the user clicks on the "Feature
|
||||
* requests" button in the sidebar (to open our GitHub repository), or when they
|
||||
* click the "Support" button to send an email to support.
|
||||
*
|
||||
* @param webContents The renderer to configure.
|
||||
*/
|
||||
export const allowExternalLinks = (webContents: WebContents) => {
|
||||
// By default, if the user were open a link, say
|
||||
// https://github.com/ente-io/ente/discussions, then it would open a _new_
|
||||
// BrowserWindow within our app.
|
||||
//
|
||||
// This is not the behaviour we want; what we want is to ask the system to
|
||||
// handle the link (e.g. open the URL in the default browser, or if it is a
|
||||
// mailto: link, then open the user's mail client).
|
||||
//
|
||||
// Returning `action` "deny" accomplishes this.
|
||||
webContents.setWindowOpenHandler(({ url }) => {
|
||||
if (!url.startsWith(rendererURL)) {
|
||||
void shell.openExternal(url);
|
||||
return { action: "deny" };
|
||||
} else {
|
||||
return { action: "allow" };
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Add an icon for our app in the system tray.
|
||||
*
|
||||
|
@ -268,30 +287,46 @@ const setupTrayItem = (mainWindow: BrowserWindow) => {
|
|||
|
||||
/**
|
||||
* Older versions of our app used to maintain a cache dir using the main
|
||||
* process. This has been deprecated in favor of using a normal web cache.
|
||||
* process. This has been removed in favor of cache on the web layer.
|
||||
*
|
||||
* See [Note: Increased disk cache for the desktop app]
|
||||
* Delete the old cache dir if it exists.
|
||||
*
|
||||
* Delete the old cache dir if it exists. This code was added March 2024, and
|
||||
* can be removed after some time once most people have upgraded to newer
|
||||
* versions.
|
||||
* This will happen in two phases. The cache had three subdirectories:
|
||||
*
|
||||
* - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024).
|
||||
*
|
||||
* - The third one, "face-crops" will be removed once we finish the face search
|
||||
* changes. See: [Note: Legacy face crops].
|
||||
*
|
||||
* This migration code can be removed after some time once most people have
|
||||
* upgraded to newer versions.
|
||||
*/
|
||||
const deleteLegacyDiskCacheDirIfExists = async () => {
|
||||
// The existing code was passing "cache" as a parameter to getPath. This is
|
||||
// incorrect if we go by the types - "cache" is not a valid value for the
|
||||
// parameter to `app.getPath`.
|
||||
const removeIfExists = async (dirPath: string) => {
|
||||
if (existsSync(dirPath)) {
|
||||
log.info(`Removing legacy disk cache from ${dirPath}`);
|
||||
await fs.rm(dirPath, { recursive: true });
|
||||
}
|
||||
};
|
||||
// [Note: Getting the cache path]
|
||||
//
|
||||
// It might be an issue in the types, since at runtime it seems to work. For
|
||||
// example, on macOS I get `~/Library/Caches`.
|
||||
// The existing code was passing "cache" as a parameter to getPath.
|
||||
//
|
||||
// However, "cache" is not a valid parameter to getPath. It works! (for
|
||||
// example, on macOS I get `~/Library/Caches`), but it is intentionally not
|
||||
// documented as part of the public API:
|
||||
//
|
||||
// - docs: remove "cache" from app.getPath
|
||||
// https://github.com/electron/electron/pull/33509
|
||||
//
|
||||
// Irrespective, we replicate the original behaviour so that we get back the
|
||||
// same path that the old got was getting.
|
||||
// same path that the old code was getting.
|
||||
//
|
||||
// @ts-expect-error
|
||||
// @ts-expect-error "cache" works but is not part of the public API.
|
||||
const cacheDir = path.join(app.getPath("cache"), "ente");
|
||||
if (existsSync(cacheDir)) {
|
||||
log.info(`Removing legacy disk cache from ${cacheDir}`);
|
||||
await fs.rm(cacheDir, { recursive: true });
|
||||
await removeIfExists(path.join(cacheDir, "thumbs"));
|
||||
await removeIfExists(path.join(cacheDir, "files"));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -324,7 +359,6 @@ const main = () => {
|
|||
// The order of the next two calls is important
|
||||
setupRendererServer();
|
||||
registerPrivilegedSchemes();
|
||||
increaseDiskCache();
|
||||
migrateLegacyWatchStoreIfNeeded();
|
||||
|
||||
app.on("second-instance", () => {
|
||||
|
@ -339,32 +373,35 @@ const main = () => {
|
|||
// Emitted once, when Electron has finished initializing.
|
||||
//
|
||||
// Note that some Electron APIs can only be used after this event occurs.
|
||||
app.on("ready", async () => {
|
||||
// Create window and prepare for renderer
|
||||
mainWindow = await createMainWindow();
|
||||
attachIPCHandlers();
|
||||
attachFSWatchIPCHandlers(createWatcher(mainWindow));
|
||||
registerStreamProtocol();
|
||||
handleDownloads(mainWindow);
|
||||
handleExternalLinks(mainWindow);
|
||||
addAllowOriginHeader(mainWindow);
|
||||
void app.whenReady().then(() => {
|
||||
void (async () => {
|
||||
// Create window and prepare for the renderer.
|
||||
mainWindow = createMainWindow();
|
||||
attachIPCHandlers();
|
||||
attachFSWatchIPCHandlers(createWatcher(mainWindow));
|
||||
registerStreamProtocol();
|
||||
|
||||
// Start loading the renderer
|
||||
mainWindow.loadURL(rendererURL);
|
||||
// Configure the renderer's environment.
|
||||
setDownloadPath(mainWindow.webContents);
|
||||
allowExternalLinks(mainWindow.webContents);
|
||||
|
||||
// Continue on with the rest of the startup sequence
|
||||
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
|
||||
setupTrayItem(mainWindow);
|
||||
if (!isDev) setupAutoUpdater(mainWindow);
|
||||
// Start loading the renderer.
|
||||
void mainWindow.loadURL(rendererURL);
|
||||
|
||||
try {
|
||||
deleteLegacyDiskCacheDirIfExists();
|
||||
deleteLegacyKeysStoreIfExists();
|
||||
} catch (e) {
|
||||
// Log but otherwise ignore errors during non-critical startup
|
||||
// actions.
|
||||
log.error("Ignoring startup error", e);
|
||||
}
|
||||
// Continue on with the rest of the startup sequence.
|
||||
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
|
||||
setupTrayItem(mainWindow);
|
||||
setupAutoUpdater(mainWindow);
|
||||
|
||||
try {
|
||||
await deleteLegacyDiskCacheDirIfExists();
|
||||
await deleteLegacyKeysStoreIfExists();
|
||||
} catch (e) {
|
||||
// Log but otherwise ignore errors during non-critical startup
|
||||
// actions.
|
||||
log.error("Ignoring startup error", e);
|
||||
}
|
||||
})();
|
||||
});
|
||||
|
||||
// This is a macOS only event. Show our window when the user activates the
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
import { dialog } from "electron/main";
|
||||
import path from "node:path";
|
||||
import type { ElectronFile } from "../types/ipc";
|
||||
import { getDirFilePaths, getElectronFile } from "./services/fs";
|
||||
import { getElectronFilesFromGoogleZip } from "./services/upload";
|
||||
|
||||
export const selectDirectory = async () => {
|
||||
const result = await dialog.showOpenDialog({
|
||||
properties: ["openDirectory"],
|
||||
});
|
||||
if (result.filePaths && result.filePaths.length > 0) {
|
||||
return result.filePaths[0]?.split(path.sep)?.join(path.posix.sep);
|
||||
}
|
||||
};
|
||||
|
||||
export const showUploadFilesDialog = async () => {
|
||||
const selectedFiles = await dialog.showOpenDialog({
|
||||
properties: ["openFile", "multiSelections"],
|
||||
});
|
||||
const filePaths = selectedFiles.filePaths;
|
||||
return await Promise.all(filePaths.map(getElectronFile));
|
||||
};
|
||||
|
||||
export const showUploadDirsDialog = async () => {
|
||||
const dir = await dialog.showOpenDialog({
|
||||
properties: ["openDirectory", "multiSelections"],
|
||||
});
|
||||
|
||||
let filePaths: string[] = [];
|
||||
for (const dirPath of dir.filePaths) {
|
||||
filePaths = [...filePaths, ...(await getDirFilePaths(dirPath))];
|
||||
}
|
||||
|
||||
return await Promise.all(filePaths.map(getElectronFile));
|
||||
};
|
||||
|
||||
export const showUploadZipDialog = async () => {
|
||||
const selectedFiles = await dialog.showOpenDialog({
|
||||
properties: ["openFile", "multiSelections"],
|
||||
filters: [{ name: "Zip File", extensions: ["zip"] }],
|
||||
});
|
||||
const filePaths = selectedFiles.filePaths;
|
||||
|
||||
let files: ElectronFile[] = [];
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
files = [...files, ...(await getElectronFilesFromGoogleZip(filePath))];
|
||||
}
|
||||
|
||||
return {
|
||||
zipPaths: filePaths,
|
||||
files,
|
||||
};
|
||||
};
|
|
@ -1,29 +0,0 @@
|
|||
/**
|
||||
* @file file system related functions exposed over the context bridge.
|
||||
*/
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
|
||||
export const fsExists = (path: string) => existsSync(path);
|
||||
|
||||
export const fsRename = (oldPath: string, newPath: string) =>
|
||||
fs.rename(oldPath, newPath);
|
||||
|
||||
export const fsMkdirIfNeeded = (dirPath: string) =>
|
||||
fs.mkdir(dirPath, { recursive: true });
|
||||
|
||||
export const fsRmdir = (path: string) => fs.rmdir(path);
|
||||
|
||||
export const fsRm = (path: string) => fs.rm(path);
|
||||
|
||||
export const fsReadTextFile = async (filePath: string) =>
|
||||
fs.readFile(filePath, "utf-8");
|
||||
|
||||
export const fsWriteFile = (path: string, contents: string) =>
|
||||
fs.writeFile(path, contents);
|
||||
|
||||
export const fsIsDir = async (dirPath: string) => {
|
||||
if (!existsSync(dirPath)) return false;
|
||||
const stat = await fs.stat(dirPath);
|
||||
return stat.isDirectory();
|
||||
};
|
|
@ -1,63 +0,0 @@
|
|||
import { BrowserWindow, app, shell } from "electron";
|
||||
import { existsSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { rendererURL } from "../main";
|
||||
|
||||
export function handleDownloads(mainWindow: BrowserWindow) {
|
||||
mainWindow.webContents.session.on("will-download", (_, item) => {
|
||||
item.setSavePath(
|
||||
getUniqueSavePath(item.getFilename(), app.getPath("downloads")),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export function handleExternalLinks(mainWindow: BrowserWindow) {
|
||||
mainWindow.webContents.setWindowOpenHandler(({ url }) => {
|
||||
if (!url.startsWith(rendererURL)) {
|
||||
shell.openExternal(url);
|
||||
return { action: "deny" };
|
||||
} else {
|
||||
return { action: "allow" };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function getUniqueSavePath(filename: string, directory: string): string {
|
||||
let uniqueFileSavePath = path.join(directory, filename);
|
||||
const { name: filenameWithoutExtension, ext: extension } =
|
||||
path.parse(filename);
|
||||
let n = 0;
|
||||
while (existsSync(uniqueFileSavePath)) {
|
||||
n++;
|
||||
// filter need to remove undefined extension from the array
|
||||
// else [`${fileName}`, undefined].join(".") will lead to `${fileName}.` as joined string
|
||||
const fileNameWithNumberedSuffix = [
|
||||
`${filenameWithoutExtension}(${n})`,
|
||||
extension,
|
||||
]
|
||||
.filter((x) => x) // filters out undefined/null values
|
||||
.join("");
|
||||
uniqueFileSavePath = path.join(directory, fileNameWithNumberedSuffix);
|
||||
}
|
||||
return uniqueFileSavePath;
|
||||
}
|
||||
|
||||
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
|
||||
const headers: Record<string, string[]> = {};
|
||||
for (const key of Object.keys(responseHeaders)) {
|
||||
headers[key.toLowerCase()] = responseHeaders[key];
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
export function addAllowOriginHeader(mainWindow: BrowserWindow) {
|
||||
mainWindow.webContents.session.webRequest.onHeadersReceived(
|
||||
(details, callback) => {
|
||||
details.responseHeaders = lowerCaseHeaders(details.responseHeaders);
|
||||
details.responseHeaders["access-control-allow-origin"] = ["*"];
|
||||
callback({
|
||||
responseHeaders: details.responseHeaders,
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
|
@ -12,16 +12,24 @@ import type { FSWatcher } from "chokidar";
|
|||
import { ipcMain } from "electron/main";
|
||||
import type {
|
||||
CollectionMapping,
|
||||
ElectronFile,
|
||||
FolderWatch,
|
||||
PendingUploads,
|
||||
ZipItem,
|
||||
} from "../types/ipc";
|
||||
import { logToDisk } from "./log";
|
||||
import {
|
||||
appVersion,
|
||||
skipAppUpdate,
|
||||
updateAndRestart,
|
||||
updateOnNextRestart,
|
||||
} from "./services/app-update";
|
||||
import {
|
||||
legacyFaceCrop,
|
||||
openDirectory,
|
||||
openLogDirectory,
|
||||
selectDirectory,
|
||||
showUploadDirsDialog,
|
||||
showUploadFilesDialog,
|
||||
showUploadZipDialog,
|
||||
} from "./dialogs";
|
||||
} from "./services/dir";
|
||||
import { ffmpegExec } from "./services/ffmpeg";
|
||||
import {
|
||||
fsExists,
|
||||
fsIsDir,
|
||||
|
@ -31,21 +39,12 @@ import {
|
|||
fsRm,
|
||||
fsRmdir,
|
||||
fsWriteFile,
|
||||
} from "./fs";
|
||||
import { logToDisk } from "./log";
|
||||
} from "./services/fs";
|
||||
import { convertToJPEG, generateImageThumbnail } from "./services/image";
|
||||
import {
|
||||
appVersion,
|
||||
skipAppUpdate,
|
||||
updateAndRestart,
|
||||
updateOnNextRestart,
|
||||
} from "./services/app-update";
|
||||
import { runFFmpegCmd } from "./services/ffmpeg";
|
||||
import { getDirFiles } from "./services/fs";
|
||||
import {
|
||||
convertToJPEG,
|
||||
generateImageThumbnail,
|
||||
} from "./services/imageProcessor";
|
||||
import { clipImageEmbedding, clipTextEmbedding } from "./services/ml-clip";
|
||||
clipImageEmbedding,
|
||||
clipTextEmbeddingIfAvailable,
|
||||
} from "./services/ml-clip";
|
||||
import { detectFaces, faceEmbedding } from "./services/ml-face";
|
||||
import {
|
||||
clearStores,
|
||||
|
@ -53,20 +52,23 @@ import {
|
|||
saveEncryptionKey,
|
||||
} from "./services/store";
|
||||
import {
|
||||
getElectronFilesFromGoogleZip,
|
||||
clearPendingUploads,
|
||||
listZipItems,
|
||||
markUploadedFiles,
|
||||
markUploadedZipItems,
|
||||
pathOrZipItemSize,
|
||||
pendingUploads,
|
||||
setPendingUploadCollection,
|
||||
setPendingUploadFiles,
|
||||
setPendingUploads,
|
||||
} from "./services/upload";
|
||||
import {
|
||||
watchAdd,
|
||||
watchFindFiles,
|
||||
watchGet,
|
||||
watchRemove,
|
||||
watchReset,
|
||||
watchUpdateIgnoredFiles,
|
||||
watchUpdateSyncedFiles,
|
||||
} from "./services/watch";
|
||||
import { openDirectory, openLogDirectory } from "./util";
|
||||
|
||||
/**
|
||||
* Listen for IPC events sent/invoked by the renderer process, and route them to
|
||||
|
@ -93,16 +95,20 @@ export const attachIPCHandlers = () => {
|
|||
|
||||
ipcMain.handle("appVersion", () => appVersion());
|
||||
|
||||
ipcMain.handle("openDirectory", (_, dirPath) => openDirectory(dirPath));
|
||||
ipcMain.handle("openDirectory", (_, dirPath: string) =>
|
||||
openDirectory(dirPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("openLogDirectory", () => openLogDirectory());
|
||||
|
||||
// See [Note: Catching exception during .send/.on]
|
||||
ipcMain.on("logToDisk", (_, message) => logToDisk(message));
|
||||
ipcMain.on("logToDisk", (_, message: string) => logToDisk(message));
|
||||
|
||||
ipcMain.handle("selectDirectory", () => selectDirectory());
|
||||
|
||||
ipcMain.on("clearStores", () => clearStores());
|
||||
|
||||
ipcMain.handle("saveEncryptionKey", (_, encryptionKey) =>
|
||||
ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) =>
|
||||
saveEncryptionKey(encryptionKey),
|
||||
);
|
||||
|
||||
|
@ -112,21 +118,23 @@ export const attachIPCHandlers = () => {
|
|||
|
||||
ipcMain.on("updateAndRestart", () => updateAndRestart());
|
||||
|
||||
ipcMain.on("updateOnNextRestart", (_, version) =>
|
||||
ipcMain.on("updateOnNextRestart", (_, version: string) =>
|
||||
updateOnNextRestart(version),
|
||||
);
|
||||
|
||||
ipcMain.on("skipAppUpdate", (_, version) => skipAppUpdate(version));
|
||||
ipcMain.on("skipAppUpdate", (_, version: string) => skipAppUpdate(version));
|
||||
|
||||
// - FS
|
||||
|
||||
ipcMain.handle("fsExists", (_, path) => fsExists(path));
|
||||
ipcMain.handle("fsExists", (_, path: string) => fsExists(path));
|
||||
|
||||
ipcMain.handle("fsRename", (_, oldPath: string, newPath: string) =>
|
||||
fsRename(oldPath, newPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("fsMkdirIfNeeded", (_, dirPath) => fsMkdirIfNeeded(dirPath));
|
||||
ipcMain.handle("fsMkdirIfNeeded", (_, dirPath: string) =>
|
||||
fsMkdirIfNeeded(dirPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("fsRmdir", (_, path: string) => fsRmdir(path));
|
||||
|
||||
|
@ -142,25 +150,35 @@ export const attachIPCHandlers = () => {
|
|||
|
||||
// - Conversion
|
||||
|
||||
ipcMain.handle("convertToJPEG", (_, fileData, filename) =>
|
||||
convertToJPEG(fileData, filename),
|
||||
ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) =>
|
||||
convertToJPEG(imageData),
|
||||
);
|
||||
|
||||
ipcMain.handle(
|
||||
"generateImageThumbnail",
|
||||
(_, inputFile, maxDimension, maxSize) =>
|
||||
generateImageThumbnail(inputFile, maxDimension, maxSize),
|
||||
(
|
||||
_,
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
) => generateImageThumbnail(dataOrPathOrZipItem, maxDimension, maxSize),
|
||||
);
|
||||
|
||||
ipcMain.handle(
|
||||
"runFFmpegCmd",
|
||||
"ffmpegExec",
|
||||
(
|
||||
_,
|
||||
cmd: string[],
|
||||
inputFile: File | ElectronFile,
|
||||
outputFileName: string,
|
||||
dontTimeout?: boolean,
|
||||
) => runFFmpegCmd(cmd, inputFile, outputFileName, dontTimeout),
|
||||
command: string[],
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
) =>
|
||||
ffmpegExec(
|
||||
command,
|
||||
dataOrPathOrZipItem,
|
||||
outputFileExtension,
|
||||
timeoutMS,
|
||||
),
|
||||
);
|
||||
|
||||
// - ML
|
||||
|
@ -169,8 +187,8 @@ export const attachIPCHandlers = () => {
|
|||
clipImageEmbedding(jpegImageData),
|
||||
);
|
||||
|
||||
ipcMain.handle("clipTextEmbedding", (_, text: string) =>
|
||||
clipTextEmbedding(text),
|
||||
ipcMain.handle("clipTextEmbeddingIfAvailable", (_, text: string) =>
|
||||
clipTextEmbeddingIfAvailable(text),
|
||||
);
|
||||
|
||||
ipcMain.handle("detectFaces", (_, input: Float32Array) =>
|
||||
|
@ -181,37 +199,37 @@ export const attachIPCHandlers = () => {
|
|||
faceEmbedding(input),
|
||||
);
|
||||
|
||||
// - File selection
|
||||
|
||||
ipcMain.handle("selectDirectory", () => selectDirectory());
|
||||
|
||||
ipcMain.handle("showUploadFilesDialog", () => showUploadFilesDialog());
|
||||
|
||||
ipcMain.handle("showUploadDirsDialog", () => showUploadDirsDialog());
|
||||
|
||||
ipcMain.handle("showUploadZipDialog", () => showUploadZipDialog());
|
||||
ipcMain.handle("legacyFaceCrop", (_, faceID: string) =>
|
||||
legacyFaceCrop(faceID),
|
||||
);
|
||||
|
||||
// - Upload
|
||||
|
||||
ipcMain.handle("listZipItems", (_, zipPath: string) =>
|
||||
listZipItems(zipPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("pathOrZipItemSize", (_, pathOrZipItem: string | ZipItem) =>
|
||||
pathOrZipItemSize(pathOrZipItem),
|
||||
);
|
||||
|
||||
ipcMain.handle("pendingUploads", () => pendingUploads());
|
||||
|
||||
ipcMain.handle("setPendingUploadCollection", (_, collectionName: string) =>
|
||||
setPendingUploadCollection(collectionName),
|
||||
ipcMain.handle("setPendingUploads", (_, pendingUploads: PendingUploads) =>
|
||||
setPendingUploads(pendingUploads),
|
||||
);
|
||||
|
||||
ipcMain.handle(
|
||||
"setPendingUploadFiles",
|
||||
(_, type: PendingUploads["type"], filePaths: string[]) =>
|
||||
setPendingUploadFiles(type, filePaths),
|
||||
"markUploadedFiles",
|
||||
(_, paths: PendingUploads["filePaths"]) => markUploadedFiles(paths),
|
||||
);
|
||||
|
||||
// -
|
||||
|
||||
ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) =>
|
||||
getElectronFilesFromGoogleZip(filePath),
|
||||
ipcMain.handle(
|
||||
"markUploadedZipItems",
|
||||
(_, items: PendingUploads["zipItems"]) => markUploadedZipItems(items),
|
||||
);
|
||||
|
||||
ipcMain.handle("getDirFiles", (_, dirPath: string) => getDirFiles(dirPath));
|
||||
ipcMain.handle("clearPendingUploads", () => clearPendingUploads());
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -251,4 +269,6 @@ export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => {
|
|||
ipcMain.handle("watchFindFiles", (_, folderPath: string) =>
|
||||
watchFindFiles(folderPath),
|
||||
);
|
||||
|
||||
ipcMain.handle("watchReset", () => watchReset(watcher));
|
||||
};
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import log from "electron-log";
|
||||
import util from "node:util";
|
||||
import { isDev } from "./util";
|
||||
import { isDev } from "./utils/electron";
|
||||
|
||||
/**
|
||||
* Initialize logging in the main process.
|
||||
*
|
||||
* This will set our underlying logger up to log to a file named `ente.log`,
|
||||
*
|
||||
* - on Linux at ~/.config/ente/logs/main.log
|
||||
* - on macOS at ~/Library/Logs/ente/main.log
|
||||
* - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\main.log
|
||||
* - on Linux at ~/.config/ente/logs/ente.log
|
||||
* - on macOS at ~/Library/Logs/ente/ente.log
|
||||
* - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
|
||||
*
|
||||
* On dev builds, it will also log to the console.
|
||||
*/
|
||||
|
@ -65,7 +65,7 @@ const logError_ = (message: string) => {
|
|||
if (isDev) console.error(`[error] ${message}`);
|
||||
};
|
||||
|
||||
const logInfo = (...params: any[]) => {
|
||||
const logInfo = (...params: unknown[]) => {
|
||||
const message = params
|
||||
.map((p) => (typeof p == "string" ? p : util.inspect(p)))
|
||||
.join(" ");
|
||||
|
@ -73,7 +73,7 @@ const logInfo = (...params: any[]) => {
|
|||
if (isDev) console.log(`[info] ${message}`);
|
||||
};
|
||||
|
||||
const logDebug = (param: () => any) => {
|
||||
const logDebug = (param: () => unknown) => {
|
||||
if (isDev) {
|
||||
const p = param();
|
||||
console.log(`[debug] ${typeof p == "string" ? p : util.inspect(p)}`);
|
||||
|
|
|
@ -8,8 +8,8 @@ import {
|
|||
import { allowWindowClose } from "../main";
|
||||
import { forceCheckForAppUpdates } from "./services/app-update";
|
||||
import autoLauncher from "./services/auto-launcher";
|
||||
import { openLogDirectory } from "./services/dir";
|
||||
import { userPreferences } from "./stores/user-preferences";
|
||||
import { openLogDirectory } from "./util";
|
||||
|
||||
/** Create and return the entries in the app's main menu bar */
|
||||
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
||||
|
@ -18,7 +18,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
// Whenever the menu is redrawn the current value of these variables is used
|
||||
// to set the checked state for the various settings checkboxes.
|
||||
let isAutoLaunchEnabled = await autoLauncher.isEnabled();
|
||||
let shouldHideDockIcon = userPreferences.get("hideDockIcon");
|
||||
let shouldHideDockIcon = !!userPreferences.get("hideDockIcon");
|
||||
|
||||
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
|
||||
process.platform == "darwin" ? options : [];
|
||||
|
@ -26,12 +26,12 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
|
||||
|
||||
const handleViewChangelog = () =>
|
||||
shell.openExternal(
|
||||
void shell.openExternal(
|
||||
"https://github.com/ente-io/ente/blob/main/desktop/CHANGELOG.md",
|
||||
);
|
||||
|
||||
const toggleAutoLaunch = () => {
|
||||
autoLauncher.toggleAutoLaunch();
|
||||
void autoLauncher.toggleAutoLaunch();
|
||||
isAutoLaunchEnabled = !isAutoLaunchEnabled;
|
||||
};
|
||||
|
||||
|
@ -42,13 +42,15 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
shouldHideDockIcon = !shouldHideDockIcon;
|
||||
};
|
||||
|
||||
const handleHelp = () => shell.openExternal("https://help.ente.io/photos/");
|
||||
const handleHelp = () =>
|
||||
void shell.openExternal("https://help.ente.io/photos/");
|
||||
|
||||
const handleSupport = () => shell.openExternal("mailto:support@ente.io");
|
||||
const handleSupport = () =>
|
||||
void shell.openExternal("mailto:support@ente.io");
|
||||
|
||||
const handleBlog = () => shell.openExternal("https://ente.io/blog/");
|
||||
const handleBlog = () => void shell.openExternal("https://ente.io/blog/");
|
||||
|
||||
const handleViewLogs = openLogDirectory;
|
||||
const handleViewLogs = () => void openLogDirectory();
|
||||
|
||||
return Menu.buildFromTemplate([
|
||||
{
|
||||
|
@ -124,11 +126,11 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
|
|||
submenu: [
|
||||
{
|
||||
role: "startSpeaking",
|
||||
label: "start speaking",
|
||||
label: "Start Speaking",
|
||||
},
|
||||
{
|
||||
role: "stopSpeaking",
|
||||
label: "stop speaking",
|
||||
label: "Stop Speaking",
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -6,14 +6,23 @@ import { allowWindowClose } from "../../main";
|
|||
import { AppUpdate } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { userPreferences } from "../stores/user-preferences";
|
||||
import { isDev } from "../utils/electron";
|
||||
|
||||
export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
|
||||
autoUpdater.logger = electronLog;
|
||||
autoUpdater.autoDownload = false;
|
||||
|
||||
// Skip checking for updates automatically in dev builds. Installing an
|
||||
// update would fail anyway since (at least on macOS), the auto update
|
||||
// process requires signed builds.
|
||||
//
|
||||
// Even though this is skipped on app start, we can still use the "Check for
|
||||
// updates..." menu option to trigger the update if we wish in dev builds.
|
||||
if (isDev) return;
|
||||
|
||||
const oneDay = 1 * 24 * 60 * 60 * 1000;
|
||||
setInterval(() => checkForUpdatesAndNotify(mainWindow), oneDay);
|
||||
checkForUpdatesAndNotify(mainWindow);
|
||||
setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay);
|
||||
void checkForUpdatesAndNotify(mainWindow);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -22,7 +31,7 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
|
|||
export const forceCheckForAppUpdates = (mainWindow: BrowserWindow) => {
|
||||
userPreferences.delete("skipAppVersion");
|
||||
userPreferences.delete("muteUpdateNotificationVersion");
|
||||
checkForUpdatesAndNotify(mainWindow);
|
||||
void checkForUpdatesAndNotify(mainWindow);
|
||||
};
|
||||
|
||||
const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
|
||||
|
@ -36,18 +45,21 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
|
|||
|
||||
log.debug(() => `Update check found version ${version}`);
|
||||
|
||||
if (!version)
|
||||
throw new Error("Unexpected empty version obtained from auto-updater");
|
||||
|
||||
if (compareVersions(version, app.getVersion()) <= 0) {
|
||||
log.debug(() => "Skipping update, already at latest version");
|
||||
return;
|
||||
}
|
||||
|
||||
if (version === userPreferences.get("skipAppVersion")) {
|
||||
if (version == userPreferences.get("skipAppVersion")) {
|
||||
log.info(`User chose to skip version ${version}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const mutedVersion = userPreferences.get("muteUpdateNotificationVersion");
|
||||
if (version === mutedVersion) {
|
||||
if (version == mutedVersion) {
|
||||
log.info(`User has muted update notifications for version ${version}`);
|
||||
return;
|
||||
}
|
||||
|
@ -56,19 +68,19 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
|
|||
mainWindow.webContents.send("appUpdateAvailable", update);
|
||||
|
||||
log.debug(() => "Attempting auto update");
|
||||
autoUpdater.downloadUpdate();
|
||||
await autoUpdater.downloadUpdate();
|
||||
|
||||
let timeout: NodeJS.Timeout;
|
||||
let timeoutId: ReturnType<typeof setTimeout>;
|
||||
const fiveMinutes = 5 * 60 * 1000;
|
||||
autoUpdater.on("update-downloaded", () => {
|
||||
timeout = setTimeout(
|
||||
timeoutId = setTimeout(
|
||||
() => showUpdateDialog({ autoUpdatable: true, version }),
|
||||
fiveMinutes,
|
||||
);
|
||||
});
|
||||
|
||||
autoUpdater.on("error", (error) => {
|
||||
clearTimeout(timeout);
|
||||
clearTimeout(timeoutId);
|
||||
log.error("Auto update failed", error);
|
||||
showUpdateDialog({ autoUpdatable: false, version });
|
||||
});
|
||||
|
|
|
@ -27,23 +27,22 @@ class AutoLauncher {
|
|||
}
|
||||
|
||||
async toggleAutoLaunch() {
|
||||
const isEnabled = await this.isEnabled();
|
||||
const wasEnabled = await this.isEnabled();
|
||||
const autoLaunch = this.autoLaunch;
|
||||
if (autoLaunch) {
|
||||
if (isEnabled) await autoLaunch.disable();
|
||||
if (wasEnabled) await autoLaunch.disable();
|
||||
else await autoLaunch.enable();
|
||||
} else {
|
||||
if (isEnabled) app.setLoginItemSettings({ openAtLogin: false });
|
||||
else app.setLoginItemSettings({ openAtLogin: true });
|
||||
const openAtLogin = !wasEnabled;
|
||||
app.setLoginItemSettings({ openAtLogin });
|
||||
}
|
||||
}
|
||||
|
||||
async wasAutoLaunched() {
|
||||
wasAutoLaunched() {
|
||||
if (this.autoLaunch) {
|
||||
return app.commandLine.hasSwitch("hidden");
|
||||
} else {
|
||||
// TODO(MR): This apparently doesn't work anymore.
|
||||
return app.getLoginItemSettings().wasOpenedAtLogin;
|
||||
return app.getLoginItemSettings().openAtLogin;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
89
desktop/src/main/services/dir.ts
Normal file
89
desktop/src/main/services/dir.ts
Normal file
|
@ -0,0 +1,89 @@
|
|||
import { shell } from "electron/common";
|
||||
import { app, dialog } from "electron/main";
|
||||
import { existsSync } from "fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { posixPath } from "../utils/electron";
|
||||
|
||||
export const selectDirectory = async () => {
|
||||
const result = await dialog.showOpenDialog({
|
||||
properties: ["openDirectory"],
|
||||
});
|
||||
const dirPath = result.filePaths[0];
|
||||
return dirPath ? posixPath(dirPath) : undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Open the given {@link dirPath} in the system's folder viewer.
|
||||
*
|
||||
* For example, on macOS this'll open {@link dirPath} in Finder.
|
||||
*/
|
||||
export const openDirectory = async (dirPath: string) => {
|
||||
// We need to use `path.normalize` because `shell.openPath; does not support
|
||||
// POSIX path, it needs to be a platform specific path:
|
||||
// https://github.com/electron/electron/issues/28831#issuecomment-826370589
|
||||
const res = await shell.openPath(path.normalize(dirPath));
|
||||
// `shell.openPath` resolves with a string containing the error message
|
||||
// corresponding to the failure if a failure occurred, otherwise "".
|
||||
if (res) throw new Error(`Failed to open directory ${dirPath}: res`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Open the app's log directory in the system's folder viewer.
|
||||
*
|
||||
* @see {@link openDirectory}
|
||||
*/
|
||||
export const openLogDirectory = () => openDirectory(logDirectoryPath());
|
||||
|
||||
/**
|
||||
* Return the path where the logs for the app are saved.
|
||||
*
|
||||
* [Note: Electron app paths]
|
||||
*
|
||||
* There are three paths we need to be aware of usually.
|
||||
*
|
||||
* First is the "appData". We can obtain this with `app.getPath("appData")`.
|
||||
* This is per-user application data directory. This is usually the following:
|
||||
*
|
||||
* - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local`
|
||||
* - Linux: `~/.config`
|
||||
* - macOS: `~/Library/Application Support`
|
||||
*
|
||||
* Now, if we suffix the app's name onto the appData directory, we get the
|
||||
* "userData" directory. This is the **primary** place applications are meant to
|
||||
* store user's data, e.g. various configuration files and saved state.
|
||||
*
|
||||
* During development, our app name is "Electron", so this'd be, for example,
|
||||
* `~/Library/Application Support/Electron` if we run using `yarn dev`. For the
|
||||
* packaged production app, our app name is "ente", so this would be:
|
||||
*
|
||||
* - Windows: `%APPDATA%\ente`, e.g. `C:\Users\<username>\AppData\Local\ente`
|
||||
* - Linux: `~/.config/ente`
|
||||
* - macOS: `~/Library/Application Support/ente`
|
||||
*
|
||||
* Note that Chromium also stores the browser state, e.g. localStorage or disk
|
||||
* caches, in userData.
|
||||
*
|
||||
* Finally, there is the "logs" directory. This is not within "appData" but has
|
||||
* a slightly different OS specific path. Since our log file is named
|
||||
* "ente.log", it can be found at:
|
||||
*
|
||||
* - macOS: ~/Library/Logs/ente/ente.log (production)
|
||||
* - macOS: ~/Library/Logs/Electron/ente.log (dev)
|
||||
*
|
||||
* https://www.electronjs.org/docs/latest/api/app
|
||||
*/
|
||||
const logDirectoryPath = () => app.getPath("logs");
|
||||
|
||||
/**
|
||||
* See: [Note: Legacy face crops]
|
||||
*/
|
||||
export const legacyFaceCrop = async (
|
||||
faceID: string,
|
||||
): Promise<Uint8Array | undefined> => {
|
||||
// See: [Note: Getting the cache path]
|
||||
// @ts-expect-error "cache" works but is not part of the public API.
|
||||
const cacheDir = path.join(app.getPath("cache"), "ente");
|
||||
const filePath = path.join(cacheDir, "face-crops", faceID);
|
||||
return existsSync(filePath) ? await fs.readFile(filePath) : undefined;
|
||||
};
|
|
@ -1,33 +1,37 @@
|
|||
import pathToFfmpeg from "ffmpeg-static";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { ElectronFile } from "../../types/ipc";
|
||||
import type { ZipItem } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { generateTempFilePath, getTempDirPath } from "../temp";
|
||||
import { execAsync } from "../util";
|
||||
import { ensure, withTimeout } from "../utils/common";
|
||||
import { execAsync } from "../utils/electron";
|
||||
import {
|
||||
deleteTempFile,
|
||||
makeFileForDataOrPathOrZipItem,
|
||||
makeTempFilePath,
|
||||
} from "../utils/temp";
|
||||
|
||||
const INPUT_PATH_PLACEHOLDER = "INPUT";
|
||||
const FFMPEG_PLACEHOLDER = "FFMPEG";
|
||||
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
|
||||
/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
|
||||
const ffmpegPathPlaceholder = "FFMPEG";
|
||||
const inputPathPlaceholder = "INPUT";
|
||||
const outputPathPlaceholder = "OUTPUT";
|
||||
|
||||
/**
|
||||
* Run a ffmpeg command
|
||||
* Run a FFmpeg command
|
||||
*
|
||||
* [Note: FFMPEG in Electron]
|
||||
* [Note: FFmpeg in Electron]
|
||||
*
|
||||
* There is a wasm build of FFMPEG, but that is currently 10-20 times slower
|
||||
* There is a wasm build of FFmpeg, but that is currently 10-20 times slower
|
||||
* that the native build. That is slow enough to be unusable for our purposes.
|
||||
* https://ffmpegwasm.netlify.app/docs/performance
|
||||
*
|
||||
* So the alternative is to bundle a ffmpeg binary with our app. e.g.
|
||||
* So the alternative is to bundle a FFmpeg executable binary with our app. e.g.
|
||||
*
|
||||
* yarn add fluent-ffmpeg ffmpeg-static ffprobe-static
|
||||
*
|
||||
* (we only use ffmpeg-static, the rest are mentioned for completeness' sake).
|
||||
*
|
||||
* Interestingly, Electron already bundles an ffmpeg library (it comes from the
|
||||
* ffmpeg fork maintained by Chromium).
|
||||
* Interestingly, Electron already bundles an binary FFmpeg library (it comes
|
||||
* from the ffmpeg fork maintained by Chromium).
|
||||
* https://chromium.googlesource.com/chromium/third_party/ffmpeg
|
||||
* https://stackoverflow.com/questions/53963672/what-version-of-ffmpeg-is-bundled-inside-electron
|
||||
*
|
||||
|
@ -36,126 +40,75 @@ const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
|
|||
* $ file ente.app/Contents/Frameworks/Electron\ Framework.framework/Versions/Current/Libraries/libffmpeg.dylib
|
||||
* .../libffmpeg.dylib: Mach-O 64-bit dynamically linked shared library arm64
|
||||
*
|
||||
* I'm not sure if our code is supposed to be able to use it, and how.
|
||||
* But I'm not sure if our code is supposed to be able to use it, and how.
|
||||
*/
|
||||
export async function runFFmpegCmd(
|
||||
cmd: string[],
|
||||
inputFile: File | ElectronFile,
|
||||
outputFileName: string,
|
||||
dontTimeout?: boolean,
|
||||
) {
|
||||
let inputFilePath = null;
|
||||
let createdTempInputFile = null;
|
||||
export const ffmpegExec = async (
|
||||
command: string[],
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
): Promise<Uint8Array> => {
|
||||
// TODO (MR): This currently copies files for both input (when
|
||||
// dataOrPathOrZipItem is data) and output. This needs to be tested
|
||||
// extremely large video files when invoked downstream of `convertToMP4` in
|
||||
// the web code.
|
||||
|
||||
const {
|
||||
path: inputFilePath,
|
||||
isFileTemporary: isInputFileTemporary,
|
||||
writeToTemporaryFile: writeToTemporaryInputFile,
|
||||
} = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem);
|
||||
|
||||
const outputFilePath = await makeTempFilePath(outputFileExtension);
|
||||
try {
|
||||
if (!existsSync(inputFile.path)) {
|
||||
const tempFilePath = await generateTempFilePath(inputFile.name);
|
||||
await writeStream(tempFilePath, await inputFile.stream());
|
||||
inputFilePath = tempFilePath;
|
||||
createdTempInputFile = true;
|
||||
} else {
|
||||
inputFilePath = inputFile.path;
|
||||
}
|
||||
const outputFileData = await runFFmpegCmd_(
|
||||
cmd,
|
||||
await writeToTemporaryInputFile();
|
||||
|
||||
const cmd = substitutePlaceholders(
|
||||
command,
|
||||
inputFilePath,
|
||||
outputFileName,
|
||||
dontTimeout,
|
||||
outputFilePath,
|
||||
);
|
||||
return new File([outputFileData], outputFileName);
|
||||
|
||||
if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
|
||||
else await execAsync(cmd);
|
||||
|
||||
return fs.readFile(outputFilePath);
|
||||
} finally {
|
||||
if (createdTempInputFile) {
|
||||
await deleteTempFile(inputFilePath);
|
||||
try {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Could not clean up temp files", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export async function runFFmpegCmd_(
|
||||
cmd: string[],
|
||||
const substitutePlaceholders = (
|
||||
command: string[],
|
||||
inputFilePath: string,
|
||||
outputFileName: string,
|
||||
dontTimeout = false,
|
||||
) {
|
||||
let tempOutputFilePath: string;
|
||||
try {
|
||||
tempOutputFilePath = await generateTempFilePath(outputFileName);
|
||||
|
||||
cmd = cmd.map((cmdPart) => {
|
||||
if (cmdPart === FFMPEG_PLACEHOLDER) {
|
||||
return ffmpegBinaryPath();
|
||||
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
} else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
} else {
|
||||
return cmdPart;
|
||||
}
|
||||
});
|
||||
|
||||
if (dontTimeout) {
|
||||
await execAsync(cmd);
|
||||
outputFilePath: string,
|
||||
) =>
|
||||
command.map((segment) => {
|
||||
if (segment == ffmpegPathPlaceholder) {
|
||||
return ffmpegBinaryPath();
|
||||
} else if (segment == inputPathPlaceholder) {
|
||||
return inputFilePath;
|
||||
} else if (segment == outputPathPlaceholder) {
|
||||
return outputFilePath;
|
||||
} else {
|
||||
await promiseWithTimeout(execAsync(cmd), 30 * 1000);
|
||||
return segment;
|
||||
}
|
||||
|
||||
if (!existsSync(tempOutputFilePath)) {
|
||||
throw new Error("ffmpeg output file not found");
|
||||
}
|
||||
const outputFile = await fs.readFile(tempOutputFilePath);
|
||||
return new Uint8Array(outputFile);
|
||||
} catch (e) {
|
||||
log.error("FFMPEG command failed", e);
|
||||
throw e;
|
||||
} finally {
|
||||
await deleteTempFile(tempOutputFilePath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Return the path to the `ffmpeg` binary.
|
||||
*
|
||||
* At runtime, the ffmpeg binary is present in a path like (macOS example):
|
||||
* At runtime, the FFmpeg binary is present in a path like (macOS example):
|
||||
* `ente.app/Contents/Resources/app.asar.unpacked/node_modules/ffmpeg-static/ffmpeg`
|
||||
*/
|
||||
const ffmpegBinaryPath = () => {
|
||||
// This substitution of app.asar by app.asar.unpacked is suggested by the
|
||||
// ffmpeg-static library author themselves:
|
||||
// https://github.com/eugeneware/ffmpeg-static/issues/16
|
||||
return pathToFfmpeg.replace("app.asar", "app.asar.unpacked");
|
||||
};
|
||||
|
||||
export async function writeTempFile(fileStream: Uint8Array, fileName: string) {
|
||||
const tempFilePath = await generateTempFilePath(fileName);
|
||||
await fs.writeFile(tempFilePath, fileStream);
|
||||
return tempFilePath;
|
||||
}
|
||||
|
||||
export async function deleteTempFile(tempFilePath: string) {
|
||||
const tempDirPath = await getTempDirPath();
|
||||
if (!tempFilePath.startsWith(tempDirPath))
|
||||
log.error("Attempting to delete a non-temp file ${tempFilePath}");
|
||||
await fs.rm(tempFilePath, { force: true });
|
||||
}
|
||||
|
||||
const promiseWithTimeout = async <T>(
|
||||
request: Promise<T>,
|
||||
timeout: number,
|
||||
): Promise<T> => {
|
||||
const timeoutRef: {
|
||||
current: NodeJS.Timeout;
|
||||
} = { current: null };
|
||||
const rejectOnTimeout = new Promise<null>((_, reject) => {
|
||||
timeoutRef.current = setTimeout(
|
||||
() => reject(new Error("Operation timed out")),
|
||||
timeout,
|
||||
);
|
||||
});
|
||||
const requestWithTimeOutCancellation = async () => {
|
||||
const resp = await request;
|
||||
clearTimeout(timeoutRef.current);
|
||||
return resp;
|
||||
};
|
||||
return await Promise.race([
|
||||
requestWithTimeOutCancellation(),
|
||||
rejectOnTimeout,
|
||||
]);
|
||||
return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked");
|
||||
};
|
||||
|
|
|
@ -1,177 +1,30 @@
|
|||
import StreamZip from "node-stream-zip";
|
||||
/**
|
||||
* @file file system related functions exposed over the context bridge.
|
||||
*/
|
||||
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { ElectronFile } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
|
||||
const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
|
||||
export const fsExists = (path: string) => existsSync(path);
|
||||
|
||||
export async function getDirFiles(dirPath: string) {
|
||||
const files = await getDirFilePaths(dirPath);
|
||||
const electronFiles = await Promise.all(files.map(getElectronFile));
|
||||
return electronFiles;
|
||||
}
|
||||
export const fsRename = (oldPath: string, newPath: string) =>
|
||||
fs.rename(oldPath, newPath);
|
||||
|
||||
// https://stackoverflow.com/a/63111390
|
||||
export const getDirFilePaths = async (dirPath: string) => {
|
||||
if (!(await fs.stat(dirPath)).isDirectory()) {
|
||||
return [dirPath];
|
||||
}
|
||||
export const fsMkdirIfNeeded = (dirPath: string) =>
|
||||
fs.mkdir(dirPath, { recursive: true });
|
||||
|
||||
let files: string[] = [];
|
||||
const filePaths = await fs.readdir(dirPath);
|
||||
export const fsRmdir = (path: string) => fs.rmdir(path);
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
const absolute = path.join(dirPath, filePath);
|
||||
files = [...files, ...(await getDirFilePaths(absolute))];
|
||||
}
|
||||
export const fsRm = (path: string) => fs.rm(path);
|
||||
|
||||
return files;
|
||||
};
|
||||
|
||||
const getFileStream = async (filePath: string) => {
|
||||
const file = await fs.open(filePath, "r");
|
||||
let offset = 0;
|
||||
const readableStream = new ReadableStream<Uint8Array>({
|
||||
async pull(controller) {
|
||||
try {
|
||||
const buff = new Uint8Array(FILE_STREAM_CHUNK_SIZE);
|
||||
const bytesRead = (await file.read(
|
||||
buff,
|
||||
0,
|
||||
FILE_STREAM_CHUNK_SIZE,
|
||||
offset,
|
||||
)) as unknown as number;
|
||||
offset += bytesRead;
|
||||
if (bytesRead === 0) {
|
||||
controller.close();
|
||||
await file.close();
|
||||
} else {
|
||||
controller.enqueue(buff.slice(0, bytesRead));
|
||||
}
|
||||
} catch (e) {
|
||||
await file.close();
|
||||
}
|
||||
},
|
||||
async cancel() {
|
||||
await file.close();
|
||||
},
|
||||
});
|
||||
return readableStream;
|
||||
};
|
||||
|
||||
export async function getElectronFile(filePath: string): Promise<ElectronFile> {
|
||||
const fileStats = await fs.stat(filePath);
|
||||
return {
|
||||
path: filePath.split(path.sep).join(path.posix.sep),
|
||||
name: path.basename(filePath),
|
||||
size: fileStats.size,
|
||||
lastModified: fileStats.mtime.valueOf(),
|
||||
stream: async () => {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error("electronFile does not exist");
|
||||
}
|
||||
return await getFileStream(filePath);
|
||||
},
|
||||
blob: async () => {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error("electronFile does not exist");
|
||||
}
|
||||
const blob = await fs.readFile(filePath);
|
||||
return new Blob([new Uint8Array(blob)]);
|
||||
},
|
||||
arrayBuffer: async () => {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error("electronFile does not exist");
|
||||
}
|
||||
const blob = await fs.readFile(filePath);
|
||||
return new Uint8Array(blob);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export const getZipFileStream = async (
|
||||
zip: StreamZip.StreamZipAsync,
|
||||
filePath: string,
|
||||
) => {
|
||||
const stream = await zip.stream(filePath);
|
||||
const done = {
|
||||
current: false,
|
||||
};
|
||||
const inProgress = {
|
||||
current: false,
|
||||
};
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let resolveObj: (value?: any) => void = null;
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let rejectObj: (reason?: any) => void = null;
|
||||
stream.on("readable", () => {
|
||||
try {
|
||||
if (resolveObj) {
|
||||
inProgress.current = true;
|
||||
const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
|
||||
if (chunk) {
|
||||
resolveObj(new Uint8Array(chunk));
|
||||
resolveObj = null;
|
||||
}
|
||||
inProgress.current = false;
|
||||
}
|
||||
} catch (e) {
|
||||
rejectObj(e);
|
||||
}
|
||||
});
|
||||
stream.on("end", () => {
|
||||
try {
|
||||
done.current = true;
|
||||
if (resolveObj && !inProgress.current) {
|
||||
resolveObj(null);
|
||||
resolveObj = null;
|
||||
}
|
||||
} catch (e) {
|
||||
rejectObj(e);
|
||||
}
|
||||
});
|
||||
stream.on("error", (e) => {
|
||||
try {
|
||||
done.current = true;
|
||||
if (rejectObj) {
|
||||
rejectObj(e);
|
||||
rejectObj = null;
|
||||
}
|
||||
} catch (e) {
|
||||
rejectObj(e);
|
||||
}
|
||||
});
|
||||
|
||||
const readStreamData = async () => {
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
|
||||
|
||||
if (chunk || done.current) {
|
||||
resolve(chunk);
|
||||
} else {
|
||||
resolveObj = resolve;
|
||||
rejectObj = reject;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const readableStream = new ReadableStream<Uint8Array>({
|
||||
async pull(controller) {
|
||||
try {
|
||||
const data = await readStreamData();
|
||||
|
||||
if (data) {
|
||||
controller.enqueue(data);
|
||||
} else {
|
||||
controller.close();
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("Failed to pull from readableStream", e);
|
||||
controller.close();
|
||||
}
|
||||
},
|
||||
});
|
||||
return readableStream;
|
||||
export const fsReadTextFile = async (filePath: string) =>
|
||||
fs.readFile(filePath, "utf-8");
|
||||
|
||||
export const fsWriteFile = (path: string, contents: string) =>
|
||||
fs.writeFile(path, contents);
|
||||
|
||||
export const fsIsDir = async (dirPath: string) => {
|
||||
if (!existsSync(dirPath)) return false;
|
||||
const stat = await fs.stat(dirPath);
|
||||
return stat.isDirectory();
|
||||
};
|
||||
|
|
159
desktop/src/main/services/image.ts
Normal file
159
desktop/src/main/services/image.ts
Normal file
|
@ -0,0 +1,159 @@
|
|||
/** @file Image format conversions and thumbnail generation */
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { CustomErrorMessage, type ZipItem } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { execAsync, isDev } from "../utils/electron";
|
||||
import {
|
||||
deleteTempFile,
|
||||
makeFileForDataOrPathOrZipItem,
|
||||
makeTempFilePath,
|
||||
} from "../utils/temp";
|
||||
|
||||
export const convertToJPEG = async (imageData: Uint8Array) => {
|
||||
const inputFilePath = await makeTempFilePath();
|
||||
const outputFilePath = await makeTempFilePath("jpeg");
|
||||
|
||||
// Construct the command first, it may throw NotAvailable on win32.
|
||||
const command = convertToJPEGCommand(inputFilePath, outputFilePath);
|
||||
|
||||
try {
|
||||
await fs.writeFile(inputFilePath, imageData);
|
||||
await execAsync(command);
|
||||
return new Uint8Array(await fs.readFile(outputFilePath));
|
||||
} finally {
|
||||
try {
|
||||
await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Could not clean up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const convertToJPEGCommand = (
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
) => {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
inputFilePath,
|
||||
"--out",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
case "linux":
|
||||
return [
|
||||
imageMagickPath(),
|
||||
inputFilePath,
|
||||
"-quality",
|
||||
"100%",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
default: // "win32"
|
||||
throw new Error(CustomErrorMessage.NotAvailable);
|
||||
}
|
||||
};
|
||||
|
||||
/** Path to the Linux image-magick executable bundled with our app */
|
||||
const imageMagickPath = () =>
|
||||
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
|
||||
|
||||
export const generateImageThumbnail = async (
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> => {
|
||||
const {
|
||||
path: inputFilePath,
|
||||
isFileTemporary: isInputFileTemporary,
|
||||
writeToTemporaryFile: writeToTemporaryInputFile,
|
||||
} = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem);
|
||||
|
||||
const outputFilePath = await makeTempFilePath("jpeg");
|
||||
|
||||
// Construct the command first, it may throw `NotAvailable` on win32.
|
||||
let quality = 70;
|
||||
let command = generateImageThumbnailCommand(
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
maxDimension,
|
||||
quality,
|
||||
);
|
||||
|
||||
try {
|
||||
await writeToTemporaryInputFile();
|
||||
|
||||
let thumbnail: Uint8Array;
|
||||
do {
|
||||
await execAsync(command);
|
||||
thumbnail = new Uint8Array(await fs.readFile(outputFilePath));
|
||||
quality -= 10;
|
||||
command = generateImageThumbnailCommand(
|
||||
inputFilePath,
|
||||
outputFilePath,
|
||||
maxDimension,
|
||||
quality,
|
||||
);
|
||||
} while (thumbnail.length > maxSize && quality > 50);
|
||||
return thumbnail;
|
||||
} finally {
|
||||
try {
|
||||
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
|
||||
await deleteTempFile(outputFilePath);
|
||||
} catch (e) {
|
||||
log.error("Could not clean up temp files", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const generateImageThumbnailCommand = (
|
||||
inputFilePath: string,
|
||||
outputFilePath: string,
|
||||
maxDimension: number,
|
||||
quality: number,
|
||||
) => {
|
||||
switch (process.platform) {
|
||||
case "darwin":
|
||||
return [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
"-s",
|
||||
"formatOptions",
|
||||
`${quality}`,
|
||||
"-Z",
|
||||
`${maxDimension}`,
|
||||
inputFilePath,
|
||||
"--out",
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
case "linux":
|
||||
return [
|
||||
imageMagickPath(),
|
||||
inputFilePath,
|
||||
"-auto-orient",
|
||||
"-define",
|
||||
`jpeg:size=${2 * maxDimension}x${2 * maxDimension}`,
|
||||
"-thumbnail",
|
||||
`${maxDimension}x${maxDimension}>`,
|
||||
"-unsharp",
|
||||
"0x.5",
|
||||
"-quality",
|
||||
`${quality}`,
|
||||
outputFilePath,
|
||||
];
|
||||
|
||||
default: // "win32"
|
||||
throw new Error(CustomErrorMessage.NotAvailable);
|
||||
}
|
||||
};
|
|
@ -1,288 +0,0 @@
|
|||
import { existsSync } from "fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
import { CustomErrors, ElectronFile } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { generateTempFilePath } from "../temp";
|
||||
import { execAsync, isDev } from "../util";
|
||||
import { deleteTempFile } from "./ffmpeg";
|
||||
|
||||
const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK";
|
||||
const MAX_DIMENSION_PLACEHOLDER = "MAX_DIMENSION";
|
||||
const SAMPLE_SIZE_PLACEHOLDER = "SAMPLE_SIZE";
|
||||
const INPUT_PATH_PLACEHOLDER = "INPUT";
|
||||
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
|
||||
const QUALITY_PLACEHOLDER = "QUALITY";
|
||||
|
||||
const MAX_QUALITY = 70;
|
||||
const MIN_QUALITY = 50;
|
||||
|
||||
const SIPS_HEIC_CONVERT_COMMAND_TEMPLATE = [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"--out",
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
|
||||
"sips",
|
||||
"-s",
|
||||
"format",
|
||||
"jpeg",
|
||||
"-s",
|
||||
"formatOptions",
|
||||
QUALITY_PLACEHOLDER,
|
||||
"-Z",
|
||||
MAX_DIMENSION_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"--out",
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE = [
|
||||
IMAGE_MAGICK_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"-quality",
|
||||
"100%",
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
|
||||
IMAGE_MAGICK_PLACEHOLDER,
|
||||
INPUT_PATH_PLACEHOLDER,
|
||||
"-auto-orient",
|
||||
"-define",
|
||||
`jpeg:size=${SAMPLE_SIZE_PLACEHOLDER}x${SAMPLE_SIZE_PLACEHOLDER}`,
|
||||
"-thumbnail",
|
||||
`${MAX_DIMENSION_PLACEHOLDER}x${MAX_DIMENSION_PLACEHOLDER}>`,
|
||||
"-unsharp",
|
||||
"0x.5",
|
||||
"-quality",
|
||||
QUALITY_PLACEHOLDER,
|
||||
OUTPUT_PATH_PLACEHOLDER,
|
||||
];
|
||||
|
||||
const imageMagickStaticPath = () =>
|
||||
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
|
||||
|
||||
export async function convertToJPEG(
|
||||
fileData: Uint8Array,
|
||||
filename: string,
|
||||
): Promise<Uint8Array> {
|
||||
if (process.platform == "win32")
|
||||
throw Error(CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED);
|
||||
const convertedFileData = await convertToJPEG_(fileData, filename);
|
||||
return convertedFileData;
|
||||
}
|
||||
|
||||
async function convertToJPEG_(
|
||||
fileData: Uint8Array,
|
||||
filename: string,
|
||||
): Promise<Uint8Array> {
|
||||
let tempInputFilePath: string;
|
||||
let tempOutputFilePath: string;
|
||||
try {
|
||||
tempInputFilePath = await generateTempFilePath(filename);
|
||||
tempOutputFilePath = await generateTempFilePath("output.jpeg");
|
||||
|
||||
await fs.writeFile(tempInputFilePath, fileData);
|
||||
|
||||
await execAsync(
|
||||
constructConvertCommand(tempInputFilePath, tempOutputFilePath),
|
||||
);
|
||||
|
||||
return new Uint8Array(await fs.readFile(tempOutputFilePath));
|
||||
} catch (e) {
|
||||
log.error("Failed to convert HEIC", e);
|
||||
throw e;
|
||||
} finally {
|
||||
try {
|
||||
await fs.rm(tempInputFilePath, { force: true });
|
||||
} catch (e) {
|
||||
log.error(`Failed to remove tempInputFile ${tempInputFilePath}`, e);
|
||||
}
|
||||
try {
|
||||
await fs.rm(tempOutputFilePath, { force: true });
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function constructConvertCommand(
|
||||
tempInputFilePath: string,
|
||||
tempOutputFilePath: string,
|
||||
) {
|
||||
let convertCmd: string[];
|
||||
if (process.platform == "darwin") {
|
||||
convertCmd = SIPS_HEIC_CONVERT_COMMAND_TEMPLATE.map((cmdPart) => {
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return tempInputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
return cmdPart;
|
||||
});
|
||||
} else if (process.platform == "linux") {
|
||||
convertCmd = IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE.map(
|
||||
(cmdPart) => {
|
||||
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
|
||||
return imageMagickStaticPath();
|
||||
}
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return tempInputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
return cmdPart;
|
||||
},
|
||||
);
|
||||
} else {
|
||||
throw new Error(`Unsupported OS ${process.platform}`);
|
||||
}
|
||||
return convertCmd;
|
||||
}
|
||||
|
||||
export async function generateImageThumbnail(
|
||||
inputFile: File | ElectronFile,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> {
|
||||
let inputFilePath = null;
|
||||
let createdTempInputFile = null;
|
||||
try {
|
||||
if (process.platform == "win32")
|
||||
throw Error(
|
||||
CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED,
|
||||
);
|
||||
if (!existsSync(inputFile.path)) {
|
||||
const tempFilePath = await generateTempFilePath(inputFile.name);
|
||||
await writeStream(tempFilePath, await inputFile.stream());
|
||||
inputFilePath = tempFilePath;
|
||||
createdTempInputFile = true;
|
||||
} else {
|
||||
inputFilePath = inputFile.path;
|
||||
}
|
||||
const thumbnail = await generateImageThumbnail_(
|
||||
inputFilePath,
|
||||
maxDimension,
|
||||
maxSize,
|
||||
);
|
||||
return thumbnail;
|
||||
} finally {
|
||||
if (createdTempInputFile) {
|
||||
try {
|
||||
await deleteTempFile(inputFilePath);
|
||||
} catch (e) {
|
||||
log.error(`Failed to deleteTempFile ${inputFilePath}`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function generateImageThumbnail_(
|
||||
inputFilePath: string,
|
||||
width: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> {
|
||||
let tempOutputFilePath: string;
|
||||
let quality = MAX_QUALITY;
|
||||
try {
|
||||
tempOutputFilePath = await generateTempFilePath("thumb.jpeg");
|
||||
let thumbnail: Uint8Array;
|
||||
do {
|
||||
await execAsync(
|
||||
constructThumbnailGenerationCommand(
|
||||
inputFilePath,
|
||||
tempOutputFilePath,
|
||||
width,
|
||||
quality,
|
||||
),
|
||||
);
|
||||
thumbnail = new Uint8Array(await fs.readFile(tempOutputFilePath));
|
||||
quality -= 10;
|
||||
} while (thumbnail.length > maxSize && quality > MIN_QUALITY);
|
||||
return thumbnail;
|
||||
} catch (e) {
|
||||
log.error("Failed to generate image thumbnail", e);
|
||||
throw e;
|
||||
} finally {
|
||||
try {
|
||||
await fs.rm(tempOutputFilePath, { force: true });
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function constructThumbnailGenerationCommand(
|
||||
inputFilePath: string,
|
||||
tempOutputFilePath: string,
|
||||
maxDimension: number,
|
||||
quality: number,
|
||||
) {
|
||||
let thumbnailGenerationCmd: string[];
|
||||
if (process.platform == "darwin") {
|
||||
thumbnailGenerationCmd = SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map(
|
||||
(cmdPart) => {
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
if (cmdPart === MAX_DIMENSION_PLACEHOLDER) {
|
||||
return maxDimension.toString();
|
||||
}
|
||||
if (cmdPart === QUALITY_PLACEHOLDER) {
|
||||
return quality.toString();
|
||||
}
|
||||
return cmdPart;
|
||||
},
|
||||
);
|
||||
} else if (process.platform == "linux") {
|
||||
thumbnailGenerationCmd =
|
||||
IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map((cmdPart) => {
|
||||
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
|
||||
return imageMagickStaticPath();
|
||||
}
|
||||
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
|
||||
return inputFilePath;
|
||||
}
|
||||
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
|
||||
return tempOutputFilePath;
|
||||
}
|
||||
if (cmdPart.includes(SAMPLE_SIZE_PLACEHOLDER)) {
|
||||
return cmdPart.replaceAll(
|
||||
SAMPLE_SIZE_PLACEHOLDER,
|
||||
(2 * maxDimension).toString(),
|
||||
);
|
||||
}
|
||||
if (cmdPart.includes(MAX_DIMENSION_PLACEHOLDER)) {
|
||||
return cmdPart.replaceAll(
|
||||
MAX_DIMENSION_PLACEHOLDER,
|
||||
maxDimension.toString(),
|
||||
);
|
||||
}
|
||||
if (cmdPart === QUALITY_PLACEHOLDER) {
|
||||
return quality.toString();
|
||||
}
|
||||
return cmdPart;
|
||||
});
|
||||
} else {
|
||||
throw new Error(`Unsupported OS ${process.platform}`);
|
||||
}
|
||||
return thumbnailGenerationCmd;
|
||||
}
|
|
@ -5,117 +5,25 @@
|
|||
*
|
||||
* @see `web/apps/photos/src/services/clip-service.ts` for more details.
|
||||
*/
|
||||
import { existsSync } from "fs";
|
||||
import jpeg from "jpeg-js";
|
||||
import fs from "node:fs/promises";
|
||||
import * as ort from "onnxruntime-node";
|
||||
import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
|
||||
import { CustomErrors } from "../../types/ipc";
|
||||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
import { generateTempFilePath } from "../temp";
|
||||
import { deleteTempFile } from "./ffmpeg";
|
||||
import {
|
||||
createInferenceSession,
|
||||
downloadModel,
|
||||
modelPathDownloadingIfNeeded,
|
||||
modelSavePath,
|
||||
} from "./ml";
|
||||
import { ensure } from "../utils/common";
|
||||
import { deleteTempFile, makeTempFilePath } from "../utils/temp";
|
||||
import { makeCachedInferenceSession } from "./ml";
|
||||
|
||||
const textModelName = "clip-text-vit-32-uint8.onnx";
|
||||
const textModelByteSize = 64173509; // 61.2 MB
|
||||
|
||||
const imageModelName = "clip-image-vit-32-float32.onnx";
|
||||
const imageModelByteSize = 351468764; // 335.2 MB
|
||||
|
||||
let activeImageModelDownload: Promise<string> | undefined;
|
||||
|
||||
const imageModelPathDownloadingIfNeeded = async () => {
|
||||
try {
|
||||
if (activeImageModelDownload) {
|
||||
log.info("Waiting for CLIP image model download to finish");
|
||||
await activeImageModelDownload;
|
||||
} else {
|
||||
activeImageModelDownload = modelPathDownloadingIfNeeded(
|
||||
imageModelName,
|
||||
imageModelByteSize,
|
||||
);
|
||||
return await activeImageModelDownload;
|
||||
}
|
||||
} finally {
|
||||
activeImageModelDownload = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
let textModelDownloadInProgress = false;
|
||||
|
||||
/* TODO(MR): use the generic method. Then we can remove the exports for the
|
||||
internal details functions that we use here */
|
||||
const textModelPathDownloadingIfNeeded = async () => {
|
||||
if (textModelDownloadInProgress)
|
||||
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
|
||||
|
||||
const modelPath = modelSavePath(textModelName);
|
||||
if (!existsSync(modelPath)) {
|
||||
log.info("CLIP text model not found, downloading");
|
||||
textModelDownloadInProgress = true;
|
||||
downloadModel(modelPath, textModelName)
|
||||
.catch((e) => {
|
||||
// log but otherwise ignore
|
||||
log.error("CLIP text model download failed", e);
|
||||
})
|
||||
.finally(() => {
|
||||
textModelDownloadInProgress = false;
|
||||
});
|
||||
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
|
||||
} else {
|
||||
const localFileSize = (await fs.stat(modelPath)).size;
|
||||
if (localFileSize !== textModelByteSize) {
|
||||
log.error(
|
||||
`CLIP text model size ${localFileSize} does not match the expected size, downloading again`,
|
||||
);
|
||||
textModelDownloadInProgress = true;
|
||||
downloadModel(modelPath, textModelName)
|
||||
.catch((e) => {
|
||||
// log but otherwise ignore
|
||||
log.error("CLIP text model download failed", e);
|
||||
})
|
||||
.finally(() => {
|
||||
textModelDownloadInProgress = false;
|
||||
});
|
||||
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
|
||||
}
|
||||
}
|
||||
|
||||
return modelPath;
|
||||
};
|
||||
|
||||
let imageSessionPromise: Promise<any> | undefined;
|
||||
|
||||
const onnxImageSession = async () => {
|
||||
if (!imageSessionPromise) {
|
||||
imageSessionPromise = (async () => {
|
||||
const modelPath = await imageModelPathDownloadingIfNeeded();
|
||||
return createInferenceSession(modelPath);
|
||||
})();
|
||||
}
|
||||
return imageSessionPromise;
|
||||
};
|
||||
|
||||
let _textSession: any = null;
|
||||
|
||||
const onnxTextSession = async () => {
|
||||
if (!_textSession) {
|
||||
const modelPath = await textModelPathDownloadingIfNeeded();
|
||||
_textSession = await createInferenceSession(modelPath);
|
||||
}
|
||||
return _textSession;
|
||||
};
|
||||
const cachedCLIPImageSession = makeCachedInferenceSession(
|
||||
"clip-image-vit-32-float32.onnx",
|
||||
351468764 /* 335.2 MB */,
|
||||
);
|
||||
|
||||
export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
|
||||
const tempFilePath = await generateTempFilePath("");
|
||||
const tempFilePath = await makeTempFilePath();
|
||||
const imageStream = new Response(jpegImageData.buffer).body;
|
||||
await writeStream(tempFilePath, imageStream);
|
||||
await writeStream(tempFilePath, ensure(imageStream));
|
||||
try {
|
||||
return await clipImageEmbedding_(tempFilePath);
|
||||
} finally {
|
||||
|
@ -124,42 +32,43 @@ export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
|
|||
};
|
||||
|
||||
const clipImageEmbedding_ = async (jpegFilePath: string) => {
|
||||
const imageSession = await onnxImageSession();
|
||||
const session = await cachedCLIPImageSession();
|
||||
const t1 = Date.now();
|
||||
const rgbData = await getRGBData(jpegFilePath);
|
||||
const feeds = {
|
||||
input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]),
|
||||
};
|
||||
const t2 = Date.now();
|
||||
const results = await imageSession.run(feeds);
|
||||
const results = await session.run(feeds);
|
||||
log.debug(
|
||||
() =>
|
||||
`onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
|
||||
);
|
||||
const imageEmbedding = results["output"].data; // Float32Array
|
||||
/* Need these model specific casts to type the result */
|
||||
const imageEmbedding = ensure(results.output).data as Float32Array;
|
||||
return normalizeEmbedding(imageEmbedding);
|
||||
};
|
||||
|
||||
const getRGBData = async (jpegFilePath: string) => {
|
||||
const getRGBData = async (jpegFilePath: string): Promise<number[]> => {
|
||||
const jpegData = await fs.readFile(jpegFilePath);
|
||||
const rawImageData = jpeg.decode(jpegData, {
|
||||
useTArray: true,
|
||||
formatAsRGBA: false,
|
||||
});
|
||||
|
||||
const nx: number = rawImageData.width;
|
||||
const ny: number = rawImageData.height;
|
||||
const inputImage: Uint8Array = rawImageData.data;
|
||||
const nx = rawImageData.width;
|
||||
const ny = rawImageData.height;
|
||||
const inputImage = rawImageData.data;
|
||||
|
||||
const nx2: number = 224;
|
||||
const ny2: number = 224;
|
||||
const totalSize: number = 3 * nx2 * ny2;
|
||||
const nx2 = 224;
|
||||
const ny2 = 224;
|
||||
const totalSize = 3 * nx2 * ny2;
|
||||
|
||||
const result: number[] = Array(totalSize).fill(0);
|
||||
const scale: number = Math.max(nx, ny) / 224;
|
||||
const result = Array<number>(totalSize).fill(0);
|
||||
const scale = Math.max(nx, ny) / 224;
|
||||
|
||||
const nx3: number = Math.round(nx / scale);
|
||||
const ny3: number = Math.round(ny / scale);
|
||||
const nx3 = Math.round(nx / scale);
|
||||
const ny3 = Math.round(ny / scale);
|
||||
|
||||
const mean: number[] = [0.48145466, 0.4578275, 0.40821073];
|
||||
const std: number[] = [0.26862954, 0.26130258, 0.27577711];
|
||||
|
@ -168,40 +77,40 @@ const getRGBData = async (jpegFilePath: string) => {
|
|||
for (let x = 0; x < nx3; x++) {
|
||||
for (let c = 0; c < 3; c++) {
|
||||
// Linear interpolation
|
||||
const sx: number = (x + 0.5) * scale - 0.5;
|
||||
const sy: number = (y + 0.5) * scale - 0.5;
|
||||
const sx = (x + 0.5) * scale - 0.5;
|
||||
const sy = (y + 0.5) * scale - 0.5;
|
||||
|
||||
const x0: number = Math.max(0, Math.floor(sx));
|
||||
const y0: number = Math.max(0, Math.floor(sy));
|
||||
const x0 = Math.max(0, Math.floor(sx));
|
||||
const y0 = Math.max(0, Math.floor(sy));
|
||||
|
||||
const x1: number = Math.min(x0 + 1, nx - 1);
|
||||
const y1: number = Math.min(y0 + 1, ny - 1);
|
||||
const x1 = Math.min(x0 + 1, nx - 1);
|
||||
const y1 = Math.min(y0 + 1, ny - 1);
|
||||
|
||||
const dx: number = sx - x0;
|
||||
const dy: number = sy - y0;
|
||||
const dx = sx - x0;
|
||||
const dy = sy - y0;
|
||||
|
||||
const j00: number = 3 * (y0 * nx + x0) + c;
|
||||
const j01: number = 3 * (y0 * nx + x1) + c;
|
||||
const j10: number = 3 * (y1 * nx + x0) + c;
|
||||
const j11: number = 3 * (y1 * nx + x1) + c;
|
||||
const j00 = 3 * (y0 * nx + x0) + c;
|
||||
const j01 = 3 * (y0 * nx + x1) + c;
|
||||
const j10 = 3 * (y1 * nx + x0) + c;
|
||||
const j11 = 3 * (y1 * nx + x1) + c;
|
||||
|
||||
const v00: number = inputImage[j00];
|
||||
const v01: number = inputImage[j01];
|
||||
const v10: number = inputImage[j10];
|
||||
const v11: number = inputImage[j11];
|
||||
const v00 = inputImage[j00] ?? 0;
|
||||
const v01 = inputImage[j01] ?? 0;
|
||||
const v10 = inputImage[j10] ?? 0;
|
||||
const v11 = inputImage[j11] ?? 0;
|
||||
|
||||
const v0: number = v00 * (1 - dx) + v01 * dx;
|
||||
const v1: number = v10 * (1 - dx) + v11 * dx;
|
||||
const v0 = v00 * (1 - dx) + v01 * dx;
|
||||
const v1 = v10 * (1 - dx) + v11 * dx;
|
||||
|
||||
const v: number = v0 * (1 - dy) + v1 * dy;
|
||||
const v = v0 * (1 - dy) + v1 * dy;
|
||||
|
||||
const v2: number = Math.min(Math.max(Math.round(v), 0), 255);
|
||||
const v2 = Math.min(Math.max(Math.round(v), 0), 255);
|
||||
|
||||
// createTensorWithDataList is dumb compared to reshape and
|
||||
// hence has to be given with one channel after another
|
||||
const i: number = y * nx3 + x + (c % 3) * 224 * 224;
|
||||
const i = y * nx3 + x + (c % 3) * 224 * 224;
|
||||
|
||||
result[i] = (v2 / 255 - mean[c]) / std[c];
|
||||
result[i] = (v2 / 255 - (mean[c] ?? 0)) / (std[c] ?? 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -211,26 +120,41 @@ const getRGBData = async (jpegFilePath: string) => {
|
|||
|
||||
const normalizeEmbedding = (embedding: Float32Array) => {
|
||||
let normalization = 0;
|
||||
for (let index = 0; index < embedding.length; index++) {
|
||||
normalization += embedding[index] * embedding[index];
|
||||
}
|
||||
for (const v of embedding) normalization += v * v;
|
||||
|
||||
const sqrtNormalization = Math.sqrt(normalization);
|
||||
for (let index = 0; index < embedding.length; index++) {
|
||||
embedding[index] = embedding[index] / sqrtNormalization;
|
||||
}
|
||||
for (let index = 0; index < embedding.length; index++)
|
||||
embedding[index] = ensure(embedding[index]) / sqrtNormalization;
|
||||
|
||||
return embedding;
|
||||
};
|
||||
|
||||
let _tokenizer: Tokenizer = null;
|
||||
const cachedCLIPTextSession = makeCachedInferenceSession(
|
||||
"clip-text-vit-32-uint8.onnx",
|
||||
64173509 /* 61.2 MB */,
|
||||
);
|
||||
|
||||
let _tokenizer: Tokenizer | undefined;
|
||||
const getTokenizer = () => {
|
||||
if (!_tokenizer) {
|
||||
_tokenizer = new Tokenizer();
|
||||
}
|
||||
if (!_tokenizer) _tokenizer = new Tokenizer();
|
||||
return _tokenizer;
|
||||
};
|
||||
|
||||
export const clipTextEmbedding = async (text: string) => {
|
||||
const imageSession = await onnxTextSession();
|
||||
export const clipTextEmbeddingIfAvailable = async (text: string) => {
|
||||
const sessionOrStatus = await Promise.race([
|
||||
cachedCLIPTextSession(),
|
||||
"downloading-model",
|
||||
]);
|
||||
|
||||
// Don't wait for the download to complete
|
||||
if (typeof sessionOrStatus == "string") {
|
||||
log.info(
|
||||
"Ignoring CLIP text embedding request because model download is pending",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const session = sessionOrStatus;
|
||||
const t1 = Date.now();
|
||||
const tokenizer = getTokenizer();
|
||||
const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text));
|
||||
|
@ -238,11 +162,11 @@ export const clipTextEmbedding = async (text: string) => {
|
|||
input: new ort.Tensor("int32", tokenizedText, [1, 77]),
|
||||
};
|
||||
const t2 = Date.now();
|
||||
const results = await imageSession.run(feeds);
|
||||
const results = await session.run(feeds);
|
||||
log.debug(
|
||||
() =>
|
||||
`onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
|
||||
);
|
||||
const textEmbedding = results["output"].data;
|
||||
const textEmbedding = ensure(results.output).data as Float32Array;
|
||||
return normalizeEmbedding(textEmbedding);
|
||||
};
|
||||
|
|
|
@ -8,87 +8,30 @@
|
|||
*/
|
||||
import * as ort from "onnxruntime-node";
|
||||
import log from "../log";
|
||||
import { createInferenceSession, modelPathDownloadingIfNeeded } from "./ml";
|
||||
import { ensure } from "../utils/common";
|
||||
import { makeCachedInferenceSession } from "./ml";
|
||||
|
||||
const faceDetectionModelName = "yolov5s_face_640_640_dynamic.onnx";
|
||||
const faceDetectionModelByteSize = 30762872; // 29.3 MB
|
||||
|
||||
const faceEmbeddingModelName = "mobilefacenet_opset15.onnx";
|
||||
const faceEmbeddingModelByteSize = 5286998; // 5 MB
|
||||
|
||||
let activeFaceDetectionModelDownload: Promise<string> | undefined;
|
||||
|
||||
const faceDetectionModelPathDownloadingIfNeeded = async () => {
|
||||
try {
|
||||
if (activeFaceDetectionModelDownload) {
|
||||
log.info("Waiting for face detection model download to finish");
|
||||
await activeFaceDetectionModelDownload;
|
||||
} else {
|
||||
activeFaceDetectionModelDownload = modelPathDownloadingIfNeeded(
|
||||
faceDetectionModelName,
|
||||
faceDetectionModelByteSize,
|
||||
);
|
||||
return await activeFaceDetectionModelDownload;
|
||||
}
|
||||
} finally {
|
||||
activeFaceDetectionModelDownload = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
let _faceDetectionSession: Promise<ort.InferenceSession> | undefined;
|
||||
|
||||
const faceDetectionSession = async () => {
|
||||
if (!_faceDetectionSession) {
|
||||
_faceDetectionSession =
|
||||
faceDetectionModelPathDownloadingIfNeeded().then((modelPath) =>
|
||||
createInferenceSession(modelPath),
|
||||
);
|
||||
}
|
||||
return _faceDetectionSession;
|
||||
};
|
||||
|
||||
let activeFaceEmbeddingModelDownload: Promise<string> | undefined;
|
||||
|
||||
const faceEmbeddingModelPathDownloadingIfNeeded = async () => {
|
||||
try {
|
||||
if (activeFaceEmbeddingModelDownload) {
|
||||
log.info("Waiting for face embedding model download to finish");
|
||||
await activeFaceEmbeddingModelDownload;
|
||||
} else {
|
||||
activeFaceEmbeddingModelDownload = modelPathDownloadingIfNeeded(
|
||||
faceEmbeddingModelName,
|
||||
faceEmbeddingModelByteSize,
|
||||
);
|
||||
return await activeFaceEmbeddingModelDownload;
|
||||
}
|
||||
} finally {
|
||||
activeFaceEmbeddingModelDownload = undefined;
|
||||
}
|
||||
};
|
||||
|
||||
let _faceEmbeddingSession: Promise<ort.InferenceSession> | undefined;
|
||||
|
||||
const faceEmbeddingSession = async () => {
|
||||
if (!_faceEmbeddingSession) {
|
||||
_faceEmbeddingSession =
|
||||
faceEmbeddingModelPathDownloadingIfNeeded().then((modelPath) =>
|
||||
createInferenceSession(modelPath),
|
||||
);
|
||||
}
|
||||
return _faceEmbeddingSession;
|
||||
};
|
||||
const cachedFaceDetectionSession = makeCachedInferenceSession(
|
||||
"yolov5s_face_640_640_dynamic.onnx",
|
||||
30762872 /* 29.3 MB */,
|
||||
);
|
||||
|
||||
export const detectFaces = async (input: Float32Array) => {
|
||||
const session = await faceDetectionSession();
|
||||
const session = await cachedFaceDetectionSession();
|
||||
const t = Date.now();
|
||||
const feeds = {
|
||||
input: new ort.Tensor("float32", input, [1, 3, 640, 640]),
|
||||
};
|
||||
const results = await session.run(feeds);
|
||||
log.debug(() => `onnx/yolo face detection took ${Date.now() - t} ms`);
|
||||
return results["output"].data;
|
||||
return ensure(results.output).data;
|
||||
};
|
||||
|
||||
const cachedFaceEmbeddingSession = makeCachedInferenceSession(
|
||||
"mobilefacenet_opset15.onnx",
|
||||
5286998 /* 5 MB */,
|
||||
);
|
||||
|
||||
export const faceEmbedding = async (input: Float32Array) => {
|
||||
// Dimension of each face (alias)
|
||||
const mobileFaceNetFaceSize = 112;
|
||||
|
@ -98,11 +41,12 @@ export const faceEmbedding = async (input: Float32Array) => {
|
|||
const n = Math.round(input.length / (z * z * 3));
|
||||
const inputTensor = new ort.Tensor("float32", input, [n, z, z, 3]);
|
||||
|
||||
const session = await faceEmbeddingSession();
|
||||
const session = await cachedFaceEmbeddingSession();
|
||||
const t = Date.now();
|
||||
const feeds = { img_inputs: inputTensor };
|
||||
const results = await session.run(feeds);
|
||||
log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`);
|
||||
// TODO: What's with this type? It works in practice, but double check.
|
||||
return (results.embeddings as unknown as any)["cpuData"]; // as Float32Array;
|
||||
/* Need these model specific casts to extract and type the result */
|
||||
return (results.embeddings as unknown as Record<string, unknown>)
|
||||
.cpuData as Float32Array;
|
||||
};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
* @file AI/ML related functionality.
|
||||
* @file AI/ML related functionality, generic layer.
|
||||
*
|
||||
* @see also `ml-clip.ts`, `ml-face.ts`.
|
||||
*
|
||||
|
@ -18,6 +18,50 @@ import * as ort from "onnxruntime-node";
|
|||
import log from "../log";
|
||||
import { writeStream } from "../stream";
|
||||
|
||||
/**
|
||||
* Return a function that can be used to trigger a download of the specified
|
||||
* model, and the creating of an ONNX inference session initialized using it.
|
||||
*
|
||||
* Multiple parallel calls to the returned function are fine, it ensures that
|
||||
* the the model will be downloaded and the session created using it only once.
|
||||
* All pending calls to it meanwhile will just await on the same promise.
|
||||
*
|
||||
* And once the promise is resolved, the create ONNX inference session will be
|
||||
* cached, so subsequent calls to the returned function will just reuse the same
|
||||
* session.
|
||||
*
|
||||
* {@link makeCachedInferenceSession} can itself be called anytime, it doesn't
|
||||
* actively trigger a download until the returned function is called.
|
||||
*
|
||||
* @param modelName The name of the model to download.
|
||||
*
|
||||
* @param modelByteSize The size in bytes that we expect the model to have. If
|
||||
* the size of the downloaded model does not match the expected size, then we
|
||||
* will redownload it.
|
||||
*
|
||||
* @returns a function. calling that function returns a promise to an ONNX
|
||||
* session.
|
||||
*/
|
||||
export const makeCachedInferenceSession = (
|
||||
modelName: string,
|
||||
modelByteSize: number,
|
||||
) => {
|
||||
let session: Promise<ort.InferenceSession> | undefined;
|
||||
|
||||
const download = () =>
|
||||
modelPathDownloadingIfNeeded(modelName, modelByteSize);
|
||||
|
||||
const createSession = (modelPath: string) =>
|
||||
createInferenceSession(modelPath);
|
||||
|
||||
const cachedInferenceSession = () => {
|
||||
if (!session) session = download().then(createSession);
|
||||
return session;
|
||||
};
|
||||
|
||||
return cachedInferenceSession;
|
||||
};
|
||||
|
||||
/**
|
||||
* Download the model named {@link modelName} if we don't already have it.
|
||||
*
|
||||
|
@ -26,7 +70,7 @@ import { writeStream } from "../stream";
|
|||
*
|
||||
* @returns the path to the model on the local machine.
|
||||
*/
|
||||
export const modelPathDownloadingIfNeeded = async (
|
||||
const modelPathDownloadingIfNeeded = async (
|
||||
modelName: string,
|
||||
expectedByteSize: number,
|
||||
) => {
|
||||
|
@ -49,31 +93,33 @@ export const modelPathDownloadingIfNeeded = async (
|
|||
};
|
||||
|
||||
/** Return the path where the given {@link modelName} is meant to be saved */
|
||||
export const modelSavePath = (modelName: string) =>
|
||||
const modelSavePath = (modelName: string) =>
|
||||
path.join(app.getPath("userData"), "models", modelName);
|
||||
|
||||
export const downloadModel = async (saveLocation: string, name: string) => {
|
||||
const downloadModel = async (saveLocation: string, name: string) => {
|
||||
// `mkdir -p` the directory where we want to save the model.
|
||||
const saveDir = path.dirname(saveLocation);
|
||||
await fs.mkdir(saveDir, { recursive: true });
|
||||
// Download
|
||||
// Download.
|
||||
log.info(`Downloading ML model from ${name}`);
|
||||
const url = `https://models.ente.io/${name}`;
|
||||
const res = await net.fetch(url);
|
||||
if (!res.ok) throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`);
|
||||
// Save
|
||||
await writeStream(saveLocation, res.body);
|
||||
const body = res.body;
|
||||
if (!body) throw new Error(`Received an null response for ${url}`);
|
||||
// Save.
|
||||
await writeStream(saveLocation, body);
|
||||
log.info(`Downloaded CLIP model ${name}`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Crete an ONNX {@link InferenceSession} with some defaults.
|
||||
*/
|
||||
export const createInferenceSession = async (modelPath: string) => {
|
||||
const createInferenceSession = async (modelPath: string) => {
|
||||
return await ort.InferenceSession.create(modelPath, {
|
||||
// Restrict the number of threads to 1
|
||||
// Restrict the number of threads to 1.
|
||||
intraOpNumThreads: 1,
|
||||
// Be more conservative with RAM usage
|
||||
// Be more conservative with RAM usage.
|
||||
enableCpuMemArena: false,
|
||||
});
|
||||
};
|
||||
|
|
|
@ -9,20 +9,29 @@ import { watchStore } from "../stores/watch";
|
|||
* This is useful to reset state when the user logs out.
|
||||
*/
|
||||
export const clearStores = () => {
|
||||
uploadStatusStore.clear();
|
||||
safeStorageStore.clear();
|
||||
uploadStatusStore.clear();
|
||||
watchStore.clear();
|
||||
};
|
||||
|
||||
export const saveEncryptionKey = async (encryptionKey: string) => {
|
||||
const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey);
|
||||
/**
|
||||
* [Note: Safe storage keys]
|
||||
*
|
||||
* On macOS, `safeStorage` stores our data under a Keychain entry named
|
||||
* "<app-name> Safe Storage". Which resolves to:
|
||||
*
|
||||
* - Electron Safe Storage (dev)
|
||||
* - ente Safe Storage (prod)
|
||||
*/
|
||||
export const saveEncryptionKey = (encryptionKey: string) => {
|
||||
const encryptedKey = safeStorage.encryptString(encryptionKey);
|
||||
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
|
||||
safeStorageStore.set("encryptionKey", b64EncryptedKey);
|
||||
};
|
||||
|
||||
export const encryptionKey = async (): Promise<string | undefined> => {
|
||||
export const encryptionKey = (): string | undefined => {
|
||||
const b64EncryptedKey = safeStorageStore.get("encryptionKey");
|
||||
if (!b64EncryptedKey) return undefined;
|
||||
const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
|
||||
return await safeStorage.decryptString(keyBuffer);
|
||||
return safeStorage.decryptString(keyBuffer);
|
||||
};
|
||||
|
|
|
@ -1,116 +1,149 @@
|
|||
import StreamZip from "node-stream-zip";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { existsSync } from "original-fs";
|
||||
import path from "path";
|
||||
import { ElectronFile, type PendingUploads } from "../../types/ipc";
|
||||
import {
|
||||
uploadStatusStore,
|
||||
type UploadStatusStore,
|
||||
} from "../stores/upload-status";
|
||||
import { getElectronFile, getZipFileStream } from "./fs";
|
||||
import type { PendingUploads, ZipItem } from "../../types/ipc";
|
||||
import { uploadStatusStore } from "../stores/upload-status";
|
||||
|
||||
export const pendingUploads = async () => {
|
||||
const collectionName = uploadStatusStore.get("collectionName");
|
||||
const filePaths = validSavedPaths("files");
|
||||
const zipPaths = validSavedPaths("zips");
|
||||
|
||||
let files: ElectronFile[] = [];
|
||||
let type: PendingUploads["type"];
|
||||
|
||||
if (zipPaths.length) {
|
||||
type = "zips";
|
||||
for (const zipPath of zipPaths) {
|
||||
files = [
|
||||
...files,
|
||||
...(await getElectronFilesFromGoogleZip(zipPath)),
|
||||
];
|
||||
}
|
||||
const pendingFilePaths = new Set(filePaths);
|
||||
files = files.filter((file) => pendingFilePaths.has(file.path));
|
||||
} else if (filePaths.length) {
|
||||
type = "files";
|
||||
files = await Promise.all(filePaths.map(getElectronFile));
|
||||
}
|
||||
|
||||
return {
|
||||
files,
|
||||
collectionName,
|
||||
type,
|
||||
};
|
||||
};
|
||||
|
||||
export const validSavedPaths = (type: PendingUploads["type"]) => {
|
||||
const key = storeKey(type);
|
||||
const savedPaths = (uploadStatusStore.get(key) as string[]) ?? [];
|
||||
const paths = savedPaths.filter((p) => existsSync(p));
|
||||
uploadStatusStore.set(key, paths);
|
||||
return paths;
|
||||
};
|
||||
|
||||
export const setPendingUploadCollection = (collectionName: string) => {
|
||||
if (collectionName) uploadStatusStore.set("collectionName", collectionName);
|
||||
else uploadStatusStore.delete("collectionName");
|
||||
};
|
||||
|
||||
export const setPendingUploadFiles = (
|
||||
type: PendingUploads["type"],
|
||||
filePaths: string[],
|
||||
) => {
|
||||
const key = storeKey(type);
|
||||
if (filePaths) uploadStatusStore.set(key, filePaths);
|
||||
else uploadStatusStore.delete(key);
|
||||
};
|
||||
|
||||
const storeKey = (type: PendingUploads["type"]): keyof UploadStatusStore => {
|
||||
switch (type) {
|
||||
case "zips":
|
||||
return "zipPaths";
|
||||
case "files":
|
||||
return "filePaths";
|
||||
}
|
||||
};
|
||||
|
||||
export const getElectronFilesFromGoogleZip = async (filePath: string) => {
|
||||
const zip = new StreamZip.async({
|
||||
file: filePath,
|
||||
});
|
||||
const zipName = path.basename(filePath, ".zip");
|
||||
export const listZipItems = async (zipPath: string): Promise<ZipItem[]> => {
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
|
||||
const entries = await zip.entries();
|
||||
const files: ElectronFile[] = [];
|
||||
const entryNames: string[] = [];
|
||||
|
||||
for (const entry of Object.values(entries)) {
|
||||
const basename = path.basename(entry.name);
|
||||
if (entry.isFile && basename.length > 0 && basename[0] !== ".") {
|
||||
files.push(await getZipEntryAsElectronFile(zipName, zip, entry));
|
||||
// Ignore "hidden" files (files whose names begins with a dot).
|
||||
if (entry.isFile && !basename.startsWith(".")) {
|
||||
// `entry.name` is the path within the zip.
|
||||
entryNames.push(entry.name);
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
await zip.close();
|
||||
|
||||
return entryNames.map((entryName) => [zipPath, entryName]);
|
||||
};
|
||||
|
||||
export async function getZipEntryAsElectronFile(
|
||||
zipName: string,
|
||||
zip: StreamZip.StreamZipAsync,
|
||||
entry: StreamZip.ZipEntry,
|
||||
): Promise<ElectronFile> {
|
||||
export const pathOrZipItemSize = async (
|
||||
pathOrZipItem: string | ZipItem,
|
||||
): Promise<number> => {
|
||||
if (typeof pathOrZipItem == "string") {
|
||||
const stat = await fs.stat(pathOrZipItem);
|
||||
return stat.size;
|
||||
} else {
|
||||
const [zipPath, entryName] = pathOrZipItem;
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
const entry = await zip.entry(entryName);
|
||||
if (!entry)
|
||||
throw new Error(
|
||||
`An entry with name ${entryName} does not exist in the zip file at ${zipPath}`,
|
||||
);
|
||||
const size = entry.size;
|
||||
await zip.close();
|
||||
return size;
|
||||
}
|
||||
};
|
||||
|
||||
export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
|
||||
const collectionName = uploadStatusStore.get("collectionName") ?? undefined;
|
||||
|
||||
const allFilePaths = uploadStatusStore.get("filePaths") ?? [];
|
||||
const filePaths = allFilePaths.filter((f) => existsSync(f));
|
||||
|
||||
const allZipItems = uploadStatusStore.get("zipItems");
|
||||
let zipItems: typeof allZipItems;
|
||||
|
||||
// Migration code - May 2024. Remove after a bit.
|
||||
//
|
||||
// The older store formats will not have zipItems and instead will have
|
||||
// zipPaths. If we find such a case, read the zipPaths and enqueue all of
|
||||
// their files as zipItems in the result.
|
||||
//
|
||||
// This potentially can be cause us to try reuploading an already uploaded
|
||||
// file, but the dedup logic will kick in at that point so no harm will come
|
||||
// of it.
|
||||
if (allZipItems === undefined) {
|
||||
const allZipPaths = uploadStatusStore.get("filePaths") ?? [];
|
||||
const zipPaths = allZipPaths.filter((f) => existsSync(f));
|
||||
zipItems = [];
|
||||
for (const zip of zipPaths)
|
||||
zipItems = zipItems.concat(await listZipItems(zip));
|
||||
} else {
|
||||
zipItems = allZipItems.filter(([z]) => existsSync(z));
|
||||
}
|
||||
|
||||
if (filePaths.length == 0 && zipItems.length == 0) return undefined;
|
||||
|
||||
return {
|
||||
path: path
|
||||
.join(zipName, entry.name)
|
||||
.split(path.sep)
|
||||
.join(path.posix.sep),
|
||||
name: path.basename(entry.name),
|
||||
size: entry.size,
|
||||
lastModified: entry.time,
|
||||
stream: async () => {
|
||||
return await getZipFileStream(zip, entry.name);
|
||||
},
|
||||
blob: async () => {
|
||||
const buffer = await zip.entryData(entry.name);
|
||||
return new Blob([new Uint8Array(buffer)]);
|
||||
},
|
||||
arrayBuffer: async () => {
|
||||
const buffer = await zip.entryData(entry.name);
|
||||
return new Uint8Array(buffer);
|
||||
},
|
||||
collectionName,
|
||||
filePaths,
|
||||
zipItems,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* [Note: Missing values in electron-store]
|
||||
*
|
||||
* Suppose we were to create a store like this:
|
||||
*
|
||||
* const store = new Store({
|
||||
* schema: {
|
||||
* foo: { type: "string" },
|
||||
* bars: { type: "array", items: { type: "string" } },
|
||||
* },
|
||||
* });
|
||||
*
|
||||
* If we fetch `store.get("foo")` or `store.get("bars")`, we get `undefined`.
|
||||
* But if we try to set these back to `undefined`, say `store.set("foo",
|
||||
* someUndefValue)`, we get asked to
|
||||
*
|
||||
* TypeError: Use `delete()` to clear values
|
||||
*
|
||||
* This happens even if we do bulk object updates, e.g. with a JS object that
|
||||
* has undefined keys:
|
||||
*
|
||||
* > TypeError: Setting a value of type `undefined` for key `collectionName` is
|
||||
* > not allowed as it's not supported by JSON
|
||||
*
|
||||
* So what should the TypeScript type for "foo" be?
|
||||
*
|
||||
* If it is were to not include the possibility of `undefined`, then the type
|
||||
* would lie because `store.get("foo")` can indeed be `undefined. But if we were
|
||||
* to include the possibility of `undefined`, then trying to `store.set("foo",
|
||||
* someUndefValue)` will throw.
|
||||
*
|
||||
* The approach we take is to rely on false-y values (empty strings and empty
|
||||
* arrays) to indicate missing values, and then converting those to `undefined`
|
||||
* when reading from the store, and converting `undefined` to the corresponding
|
||||
* false-y value when writing.
|
||||
*/
|
||||
export const setPendingUploads = ({
|
||||
collectionName,
|
||||
filePaths,
|
||||
zipItems,
|
||||
}: PendingUploads) => {
|
||||
uploadStatusStore.set({
|
||||
collectionName: collectionName ?? "",
|
||||
filePaths: filePaths,
|
||||
zipItems: zipItems,
|
||||
});
|
||||
};
|
||||
|
||||
export const markUploadedFiles = (paths: string[]) => {
|
||||
const existing = uploadStatusStore.get("filePaths") ?? [];
|
||||
const updated = existing.filter((p) => !paths.includes(p));
|
||||
uploadStatusStore.set("filePaths", updated);
|
||||
};
|
||||
|
||||
export const markUploadedZipItems = (
|
||||
items: [zipPath: string, entryName: string][],
|
||||
) => {
|
||||
const existing = uploadStatusStore.get("zipItems") ?? [];
|
||||
const updated = existing.filter(
|
||||
(z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]),
|
||||
);
|
||||
uploadStatusStore.set("zipItems", updated);
|
||||
};
|
||||
|
||||
export const clearPendingUploads = () => uploadStatusStore.clear();
|
||||
|
|
|
@ -3,9 +3,10 @@ import { BrowserWindow } from "electron/main";
|
|||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { FolderWatch, type CollectionMapping } from "../../types/ipc";
|
||||
import { fsIsDir } from "../fs";
|
||||
import log from "../log";
|
||||
import { watchStore } from "../stores/watch";
|
||||
import { posixPath } from "../utils/electron";
|
||||
import { fsIsDir } from "./fs";
|
||||
|
||||
/**
|
||||
* Create and return a new file system watcher.
|
||||
|
@ -34,8 +35,8 @@ export const createWatcher = (mainWindow: BrowserWindow) => {
|
|||
return watcher;
|
||||
};
|
||||
|
||||
const eventData = (path: string): [string, FolderWatch] => {
|
||||
path = posixPath(path);
|
||||
const eventData = (platformPath: string): [string, FolderWatch] => {
|
||||
const path = posixPath(platformPath);
|
||||
|
||||
const watch = folderWatches().find((watch) =>
|
||||
path.startsWith(watch.folderPath + "/"),
|
||||
|
@ -46,23 +47,15 @@ const eventData = (path: string): [string, FolderWatch] => {
|
|||
return [path, watch];
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a file system {@link filePath} that uses the local system specific
|
||||
* path separators into a path that uses POSIX file separators.
|
||||
*/
|
||||
const posixPath = (filePath: string) =>
|
||||
filePath.split(path.sep).join(path.posix.sep);
|
||||
|
||||
export const watchGet = (watcher: FSWatcher) => {
|
||||
const [valid, deleted] = folderWatches().reduce(
|
||||
([valid, deleted], watch) => {
|
||||
(fsIsDir(watch.folderPath) ? valid : deleted).push(watch);
|
||||
return [valid, deleted];
|
||||
},
|
||||
[[], []],
|
||||
);
|
||||
if (deleted.length) {
|
||||
for (const watch of deleted) watchRemove(watcher, watch.folderPath);
|
||||
export const watchGet = async (watcher: FSWatcher): Promise<FolderWatch[]> => {
|
||||
const valid: FolderWatch[] = [];
|
||||
const deletedPaths: string[] = [];
|
||||
for (const watch of folderWatches()) {
|
||||
if (await fsIsDir(watch.folderPath)) valid.push(watch);
|
||||
else deletedPaths.push(watch.folderPath);
|
||||
}
|
||||
if (deletedPaths.length) {
|
||||
await Promise.all(deletedPaths.map((p) => watchRemove(watcher, p)));
|
||||
setFolderWatches(valid);
|
||||
}
|
||||
return valid;
|
||||
|
@ -80,7 +73,7 @@ export const watchAdd = async (
|
|||
) => {
|
||||
const watches = folderWatches();
|
||||
|
||||
if (!fsIsDir(folderPath))
|
||||
if (!(await fsIsDir(folderPath)))
|
||||
throw new Error(
|
||||
`Attempting to add a folder watch for a folder path ${folderPath} that is not an existing directory`,
|
||||
);
|
||||
|
@ -104,7 +97,7 @@ export const watchAdd = async (
|
|||
return watches;
|
||||
};
|
||||
|
||||
export const watchRemove = async (watcher: FSWatcher, folderPath: string) => {
|
||||
export const watchRemove = (watcher: FSWatcher, folderPath: string) => {
|
||||
const watches = folderWatches();
|
||||
const filtered = watches.filter((watch) => watch.folderPath != folderPath);
|
||||
if (watches.length == filtered.length)
|
||||
|
@ -157,3 +150,7 @@ export const watchFindFiles = async (dirPath: string) => {
|
|||
}
|
||||
return paths;
|
||||
};
|
||||
|
||||
export const watchReset = (watcher: FSWatcher) => {
|
||||
watcher.unwatch(folderWatches().map((watch) => watch.folderPath));
|
||||
};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import Store, { Schema } from "electron-store";
|
||||
|
||||
interface SafeStorageStore {
|
||||
encryptionKey: string;
|
||||
encryptionKey?: string;
|
||||
}
|
||||
|
||||
const safeStorageSchema: Schema<SafeStorageStore> = {
|
||||
|
|
|
@ -1,27 +1,51 @@
|
|||
import Store, { Schema } from "electron-store";
|
||||
|
||||
export interface UploadStatusStore {
|
||||
filePaths: string[];
|
||||
zipPaths: string[];
|
||||
collectionName: string;
|
||||
/**
|
||||
* The collection to which we're uploading, or the root collection.
|
||||
*
|
||||
* Not all pending uploads will have an associated collection.
|
||||
*/
|
||||
collectionName?: string;
|
||||
/**
|
||||
* Paths to regular files that are pending upload.
|
||||
*/
|
||||
filePaths?: string[];
|
||||
/**
|
||||
* Each item is the path to a zip file and the name of an entry within it.
|
||||
*/
|
||||
zipItems?: [zipPath: string, entryName: string][];
|
||||
/**
|
||||
* @deprecated Legacy paths to zip files, now subsumed into zipItems.
|
||||
*/
|
||||
zipPaths?: string[];
|
||||
}
|
||||
|
||||
const uploadStatusSchema: Schema<UploadStatusStore> = {
|
||||
collectionName: {
|
||||
type: "string",
|
||||
},
|
||||
filePaths: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
zipItems: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
zipPaths: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "string",
|
||||
},
|
||||
},
|
||||
collectionName: {
|
||||
type: "string",
|
||||
},
|
||||
};
|
||||
|
||||
export const uploadStatusStore = new Store({
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import Store, { Schema } from "electron-store";
|
||||
|
||||
interface UserPreferences {
|
||||
hideDockIcon: boolean;
|
||||
hideDockIcon?: boolean;
|
||||
skipAppVersion?: string;
|
||||
muteUpdateNotificationVersion?: string;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import { type FolderWatch } from "../../types/ipc";
|
|||
import log from "../log";
|
||||
|
||||
interface WatchStore {
|
||||
mappings: FolderWatchWithLegacyFields[];
|
||||
mappings?: FolderWatchWithLegacyFields[];
|
||||
}
|
||||
|
||||
type FolderWatchWithLegacyFields = FolderWatch & {
|
||||
|
@ -54,8 +54,12 @@ export const watchStore = new Store({
|
|||
*/
|
||||
export const migrateLegacyWatchStoreIfNeeded = () => {
|
||||
let needsUpdate = false;
|
||||
const watches = watchStore.get("mappings")?.map((watch) => {
|
||||
const updatedWatches = [];
|
||||
for (const watch of watchStore.get("mappings") ?? []) {
|
||||
let collectionMapping = watch.collectionMapping;
|
||||
// The required type defines the latest schema, but before migration
|
||||
// this'll be undefined, so tell ESLint to calm down.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!collectionMapping) {
|
||||
collectionMapping = watch.uploadStrategy == 1 ? "parent" : "root";
|
||||
needsUpdate = true;
|
||||
|
@ -64,10 +68,10 @@ export const migrateLegacyWatchStoreIfNeeded = () => {
|
|||
delete watch.rootFolderName;
|
||||
needsUpdate = true;
|
||||
}
|
||||
return { ...watch, collectionMapping };
|
||||
});
|
||||
updatedWatches.push({ ...watch, collectionMapping });
|
||||
}
|
||||
if (needsUpdate) {
|
||||
watchStore.set("mappings", watches);
|
||||
watchStore.set("mappings", updatedWatches);
|
||||
log.info("Migrated legacy watch store data to new schema");
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
/**
|
||||
* @file stream data to-from renderer using a custom protocol handler.
|
||||
*/
|
||||
import { protocol } from "electron/main";
|
||||
import { net, protocol } from "electron/main";
|
||||
import StreamZip from "node-stream-zip";
|
||||
import { createWriteStream, existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import { Readable } from "node:stream";
|
||||
import { ReadableStream } from "node:stream/web";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import log from "./log";
|
||||
import { ensure } from "./utils/common";
|
||||
|
||||
/**
|
||||
* Register a protocol handler that we use for streaming large files between the
|
||||
* main process (node) and the renderer process (browser) layer.
|
||||
* main (Node.js) and renderer (Chromium) processes.
|
||||
*
|
||||
* [Note: IPC streams]
|
||||
*
|
||||
|
@ -17,11 +21,14 @@ import log from "./log";
|
|||
* across IPC. And passing the entire contents of the file is not feasible for
|
||||
* large video files because of the memory pressure the copying would entail.
|
||||
*
|
||||
* As an alternative, we register a custom protocol handler that can provided a
|
||||
* As an alternative, we register a custom protocol handler that can provides a
|
||||
* bi-directional stream. The renderer can stream data to the node side by
|
||||
* streaming the request. The node side can stream to the renderer side by
|
||||
* streaming the response.
|
||||
*
|
||||
* The stream is not full duplex - while both reads and writes can be streamed,
|
||||
* they need to be streamed separately.
|
||||
*
|
||||
* See also: [Note: Transferring large amount of data over IPC]
|
||||
*
|
||||
* Depends on {@link registerPrivilegedSchemes}.
|
||||
|
@ -29,88 +36,148 @@ import log from "./log";
|
|||
export const registerStreamProtocol = () => {
|
||||
protocol.handle("stream", async (request: Request) => {
|
||||
const url = request.url;
|
||||
const { host, pathname } = new URL(url);
|
||||
// Convert e.g. "%20" to spaces.
|
||||
const path = decodeURIComponent(pathname);
|
||||
// The request URL contains the command to run as the host, and the
|
||||
// pathname of the file(s) as the search params.
|
||||
const { host, searchParams } = new URL(url);
|
||||
switch (host) {
|
||||
/* stream://write/path/to/file */
|
||||
/* host-pathname----- */
|
||||
case "read":
|
||||
return handleRead(ensure(searchParams.get("path")));
|
||||
case "read-zip":
|
||||
return handleReadZip(
|
||||
ensure(searchParams.get("zipPath")),
|
||||
ensure(searchParams.get("entryName")),
|
||||
);
|
||||
case "write":
|
||||
try {
|
||||
await writeStream(path, request.body);
|
||||
return new Response("", { status: 200 });
|
||||
} catch (e) {
|
||||
log.error(`Failed to write stream for ${url}`, e);
|
||||
return new Response(
|
||||
`Failed to write stream: ${e.message}`,
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
return handleWrite(ensure(searchParams.get("path")), request);
|
||||
default:
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const handleRead = async (path: string) => {
|
||||
try {
|
||||
const res = await net.fetch(pathToFileURL(path).toString());
|
||||
if (res.ok) {
|
||||
// net.fetch already seems to add "Content-Type" and "Last-Modified"
|
||||
// headers, but I couldn't find documentation for this. In any case,
|
||||
// since we already are stat-ting the file for the "Content-Length",
|
||||
// we explicitly add the "X-Last-Modified-Ms" too,
|
||||
//
|
||||
// 1. Guaranteeing its presence,
|
||||
//
|
||||
// 2. Having it be in the exact format we want (no string <-> date
|
||||
// conversions),
|
||||
//
|
||||
// 3. Retaining milliseconds.
|
||||
|
||||
const stat = await fs.stat(path);
|
||||
|
||||
// Add the file's size as the Content-Length header.
|
||||
const fileSize = stat.size;
|
||||
res.headers.set("Content-Length", `${fileSize}`);
|
||||
|
||||
// Add the file's last modified time (as epoch milliseconds).
|
||||
const mtimeMs = stat.mtimeMs;
|
||||
res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
|
||||
}
|
||||
return res;
|
||||
} catch (e) {
|
||||
log.error(`Failed to read stream at ${path}`, e);
|
||||
return new Response(`Failed to read stream: ${String(e)}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleReadZip = async (zipPath: string, entryName: string) => {
|
||||
try {
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
const entry = await zip.entry(entryName);
|
||||
if (!entry) return new Response("", { status: 404 });
|
||||
|
||||
// This returns an "old style" NodeJS.ReadableStream.
|
||||
const stream = await zip.stream(entry);
|
||||
// Convert it into a new style NodeJS.Readable.
|
||||
const nodeReadable = new Readable().wrap(stream);
|
||||
// Then convert it into a Web stream.
|
||||
const webReadableStreamAny = Readable.toWeb(nodeReadable);
|
||||
// However, we get a ReadableStream<any> now. This doesn't go into the
|
||||
// `BodyInit` expected by the Response constructor, which wants a
|
||||
// ReadableStream<Uint8Array>. Force a cast.
|
||||
const webReadableStream =
|
||||
webReadableStreamAny as ReadableStream<Uint8Array>;
|
||||
|
||||
// Close the zip handle when the underlying stream closes.
|
||||
stream.on("end", () => void zip.close());
|
||||
|
||||
return new Response(webReadableStream, {
|
||||
headers: {
|
||||
// We don't know the exact type, but it doesn't really matter,
|
||||
// just set it to a generic binary content-type so that the
|
||||
// browser doesn't tinker with it thinking of it as text.
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Length": `${entry.size}`,
|
||||
// While it is documented that entry.time is the modification
|
||||
// time, the units are not mentioned. By seeing the source code,
|
||||
// we can verify that it is indeed epoch milliseconds. See
|
||||
// `parseZipTime` in the node-stream-zip source,
|
||||
// https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js
|
||||
"X-Last-Modified-Ms": `${entry.time}`,
|
||||
},
|
||||
});
|
||||
} catch (e) {
|
||||
log.error(
|
||||
`Failed to read entry ${entryName} from zip file at ${zipPath}`,
|
||||
e,
|
||||
);
|
||||
return new Response(`Failed to read stream: ${String(e)}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleWrite = async (path: string, request: Request) => {
|
||||
try {
|
||||
await writeStream(path, ensure(request.body));
|
||||
return new Response("", { status: 200 });
|
||||
} catch (e) {
|
||||
log.error(`Failed to write stream to ${path}`, e);
|
||||
return new Response(`Failed to write stream: ${String(e)}`, {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Write a (web) ReadableStream to a file at the given {@link filePath}.
|
||||
*
|
||||
* The returned promise resolves when the write completes.
|
||||
*
|
||||
* @param filePath The local filesystem path where the file should be written.
|
||||
* @param readableStream A [web
|
||||
* ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
|
||||
*
|
||||
* @param readableStream A web
|
||||
* [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
|
||||
*/
|
||||
export const writeStream = (filePath: string, readableStream: ReadableStream) =>
|
||||
writeNodeStream(filePath, convertWebReadableStreamToNode(readableStream));
|
||||
writeNodeStream(filePath, Readable.fromWeb(readableStream));
|
||||
|
||||
/**
|
||||
* Convert a Web ReadableStream into a Node.js ReadableStream
|
||||
*
|
||||
* This can be used to, for example, write a ReadableStream obtained via
|
||||
* `net.fetch` into a file using the Node.js `fs` APIs
|
||||
*/
|
||||
const convertWebReadableStreamToNode = (readableStream: ReadableStream) => {
|
||||
const reader = readableStream.getReader();
|
||||
const rs = new Readable();
|
||||
|
||||
rs._read = async () => {
|
||||
try {
|
||||
const result = await reader.read();
|
||||
|
||||
if (!result.done) {
|
||||
rs.push(Buffer.from(result.value));
|
||||
} else {
|
||||
rs.push(null);
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
rs.emit("error", e);
|
||||
}
|
||||
};
|
||||
|
||||
return rs;
|
||||
};
|
||||
|
||||
const writeNodeStream = async (
|
||||
filePath: string,
|
||||
fileStream: NodeJS.ReadableStream,
|
||||
) => {
|
||||
const writeNodeStream = async (filePath: string, fileStream: Readable) => {
|
||||
const writeable = createWriteStream(filePath);
|
||||
|
||||
fileStream.on("error", (error) => {
|
||||
writeable.destroy(error); // Close the writable stream with an error
|
||||
fileStream.on("error", (err) => {
|
||||
writeable.destroy(err); // Close the writable stream with an error
|
||||
});
|
||||
|
||||
fileStream.pipe(writeable);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
writeable.on("finish", resolve);
|
||||
writeable.on("error", async (e: unknown) => {
|
||||
writeable.on("error", (err) => {
|
||||
if (existsSync(filePath)) {
|
||||
await fs.unlink(filePath);
|
||||
void fs.unlink(filePath);
|
||||
}
|
||||
reject(e);
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
import { app } from "electron/main";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "path";
|
||||
|
||||
const CHARACTERS =
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
|
||||
export async function getTempDirPath() {
|
||||
const tempDirPath = path.join(app.getPath("temp"), "ente");
|
||||
await fs.mkdir(tempDirPath, { recursive: true });
|
||||
return tempDirPath;
|
||||
}
|
||||
|
||||
function generateTempName(length: number) {
|
||||
let result = "";
|
||||
|
||||
const charactersLength = CHARACTERS.length;
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += CHARACTERS.charAt(
|
||||
Math.floor(Math.random() * charactersLength),
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export async function generateTempFilePath(formatSuffix: string) {
|
||||
let tempFilePath: string;
|
||||
do {
|
||||
const tempDirPath = await getTempDirPath();
|
||||
const namePrefix = generateTempName(10);
|
||||
tempFilePath = path.join(tempDirPath, namePrefix + "-" + formatSuffix);
|
||||
} while (existsSync(tempFilePath));
|
||||
return tempFilePath;
|
||||
}
|
44
desktop/src/main/utils/common.ts
Normal file
44
desktop/src/main/utils/common.ts
Normal file
|
@ -0,0 +1,44 @@
|
|||
/**
|
||||
* @file grab bag of utility functions.
|
||||
*
|
||||
* These are verbatim copies of functions from web code since there isn't
|
||||
* currently a common package that both of them share.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Throw an exception if the given value is `null` or `undefined`.
|
||||
*/
|
||||
export const ensure = <T>(v: T | null | undefined): T => {
|
||||
if (v === null) throw new Error("Required value was null");
|
||||
if (v === undefined) throw new Error("Required value was not found");
|
||||
return v;
|
||||
};
|
||||
|
||||
/**
|
||||
* Wait for {@link ms} milliseconds
|
||||
*
|
||||
* This function is a promisified `setTimeout`. It returns a promise that
|
||||
* resolves after {@link ms} milliseconds.
|
||||
*/
|
||||
export const wait = (ms: number) =>
|
||||
new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
||||
/**
|
||||
* Await the given {@link promise} for {@link timeoutMS} milliseconds. If it
|
||||
* does not resolve within {@link timeoutMS}, then reject with a timeout error.
|
||||
*/
|
||||
export const withTimeout = async <T>(promise: Promise<T>, ms: number) => {
|
||||
let timeoutId: ReturnType<typeof setTimeout>;
|
||||
const rejectOnTimeout = new Promise<T>((_, reject) => {
|
||||
timeoutId = setTimeout(
|
||||
() => reject(new Error("Operation timed out")),
|
||||
ms,
|
||||
);
|
||||
});
|
||||
const promiseAndCancelTimeout = async () => {
|
||||
const result = await promise;
|
||||
clearTimeout(timeoutId);
|
||||
return result;
|
||||
};
|
||||
return Promise.race([promiseAndCancelTimeout(), rejectOnTimeout]);
|
||||
};
|
|
@ -1,14 +1,35 @@
|
|||
import shellescape from "any-shell-escape";
|
||||
import { shell } from "electron"; /* TODO(MR): Why is this not in /main? */
|
||||
import { app } from "electron/main";
|
||||
import { exec } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import log from "./log";
|
||||
import log from "../log";
|
||||
|
||||
/** `true` if the app is running in development mode. */
|
||||
export const isDev = !app.isPackaged;
|
||||
|
||||
/**
|
||||
* Convert a file system {@link platformPath} that uses the local system
|
||||
* specific path separators into a path that uses POSIX file separators.
|
||||
*
|
||||
* For all paths that we persist or pass over the IPC boundary, we always use
|
||||
* POSIX paths, even on Windows.
|
||||
*
|
||||
* Windows recognizes both forward and backslashes. This also works with drive
|
||||
* names. c:\foo\bar and c:/foo/bar are both valid.
|
||||
*
|
||||
* > Almost all paths passed to Windows APIs are normalized. During
|
||||
* > normalization, Windows performs the following steps: ... All forward
|
||||
* > slashes (/) are converted into the standard Windows separator, the back
|
||||
* > slash (\).
|
||||
* >
|
||||
* > https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats
|
||||
*/
|
||||
export const posixPath = (platformPath: string) =>
|
||||
path.sep == path.posix.sep
|
||||
? platformPath
|
||||
: platformPath.split(path.sep).join(path.posix.sep);
|
||||
|
||||
/**
|
||||
* Run a shell command asynchronously.
|
||||
*
|
||||
|
@ -33,49 +54,11 @@ export const execAsync = (command: string | string[]) => {
|
|||
? shellescape(command)
|
||||
: command;
|
||||
const startTime = Date.now();
|
||||
log.debug(() => `Running shell command: ${escapedCommand}`);
|
||||
const result = execAsync_(escapedCommand);
|
||||
log.debug(
|
||||
() =>
|
||||
`Completed in ${Math.round(Date.now() - startTime)} ms (${escapedCommand})`,
|
||||
() => `${escapedCommand} (${Math.round(Date.now() - startTime)} ms)`,
|
||||
);
|
||||
return result;
|
||||
};
|
||||
|
||||
const execAsync_ = promisify(exec);
|
||||
|
||||
/**
|
||||
* Open the given {@link dirPath} in the system's folder viewer.
|
||||
*
|
||||
* For example, on macOS this'll open {@link dirPath} in Finder.
|
||||
*/
|
||||
export const openDirectory = async (dirPath: string) => {
|
||||
const res = await shell.openPath(path.normalize(dirPath));
|
||||
// shell.openPath resolves with a string containing the error message
|
||||
// corresponding to the failure if a failure occurred, otherwise "".
|
||||
if (res) throw new Error(`Failed to open directory ${dirPath}: res`);
|
||||
};
|
||||
|
||||
/**
|
||||
* Open the app's log directory in the system's folder viewer.
|
||||
*
|
||||
* @see {@link openDirectory}
|
||||
*/
|
||||
export const openLogDirectory = () => openDirectory(logDirectoryPath());
|
||||
|
||||
/**
|
||||
* Return the path where the logs for the app are saved.
|
||||
*
|
||||
* [Note: Electron app paths]
|
||||
*
|
||||
* By default, these paths are at the following locations:
|
||||
*
|
||||
* - macOS: `~/Library/Application Support/ente`
|
||||
* - Linux: `~/.config/ente`
|
||||
* - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local\ente`
|
||||
* - Windows: C:\Users\<you>\AppData\Local\<Your App Name>
|
||||
*
|
||||
* https://www.electronjs.org/docs/latest/api/app
|
||||
*
|
||||
*/
|
||||
const logDirectoryPath = () => app.getPath("logs");
|
125
desktop/src/main/utils/temp.ts
Normal file
125
desktop/src/main/utils/temp.ts
Normal file
|
@ -0,0 +1,125 @@
|
|||
import { app } from "electron/main";
|
||||
import StreamZip from "node-stream-zip";
|
||||
import { existsSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { ZipItem } from "../../types/ipc";
|
||||
import { ensure } from "./common";
|
||||
|
||||
/**
|
||||
* Our very own directory within the system temp directory. Go crazy, but
|
||||
* remember to clean up, especially in exception handlers.
|
||||
*/
|
||||
const enteTempDirPath = async () => {
|
||||
const result = path.join(app.getPath("temp"), "ente");
|
||||
await fs.mkdir(result, { recursive: true });
|
||||
return result;
|
||||
};
|
||||
|
||||
/** Generate a random string suitable for being used as a file name prefix */
|
||||
const randomPrefix = () => {
|
||||
const ch = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
|
||||
const randomChar = () => ensure(ch[Math.floor(Math.random() * ch.length)]);
|
||||
|
||||
return Array(10).fill("").map(randomChar).join("");
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the path to a temporary file with the given {@link suffix}.
|
||||
*
|
||||
* The function returns the path to a file in the system temp directory (in an
|
||||
* Ente specific folder therin) with a random prefix and an (optional)
|
||||
* {@link extension}.
|
||||
*
|
||||
* It ensures that there is no existing item with the same name already.
|
||||
*
|
||||
* Use {@link deleteTempFile} to remove this file when you're done.
|
||||
*/
|
||||
export const makeTempFilePath = async (extension?: string) => {
|
||||
const tempDir = await enteTempDirPath();
|
||||
const suffix = extension ? "." + extension : "";
|
||||
let result: string;
|
||||
do {
|
||||
result = path.join(tempDir, randomPrefix() + suffix);
|
||||
} while (existsSync(result));
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a temporary file at the given path if it exists.
|
||||
*
|
||||
* This is the same as a vanilla {@link fs.rm}, except it first checks that the
|
||||
* given path is within the Ente specific directory in the system temp
|
||||
* directory. This acts as an additional safety check.
|
||||
*
|
||||
* @param tempFilePath The path to the temporary file to delete. This path
|
||||
* should've been previously created using {@link makeTempFilePath}.
|
||||
*/
|
||||
export const deleteTempFile = async (tempFilePath: string) => {
|
||||
const tempDir = await enteTempDirPath();
|
||||
if (!tempFilePath.startsWith(tempDir))
|
||||
throw new Error(`Attempting to delete a non-temp file ${tempFilePath}`);
|
||||
await fs.rm(tempFilePath, { force: true });
|
||||
};
|
||||
|
||||
/** The result of {@link makeFileForDataOrPathOrZipItem}. */
|
||||
interface FileForDataOrPathOrZipItem {
|
||||
/**
|
||||
* The path to the file (possibly temporary).
|
||||
*/
|
||||
path: string;
|
||||
/**
|
||||
* `true` if {@link path} points to a temporary file which should be deleted
|
||||
* once we are done processing.
|
||||
*/
|
||||
isFileTemporary: boolean;
|
||||
/**
|
||||
* A function that can be called to actually write the contents of the
|
||||
* source `Uint8Array | string | ZipItem` into the file at {@link path}.
|
||||
*
|
||||
* It will do nothing in the case when the source is already a path. In the
|
||||
* other two cases this function will write the data or zip item into the
|
||||
* file at {@link path}.
|
||||
*/
|
||||
writeToTemporaryFile: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the path to a file, a boolean indicating if this is a temporary path
|
||||
* that needs to be deleted after processing, and a function to write the given
|
||||
* {@link dataOrPathOrZipItem} into that temporary file if needed.
|
||||
*
|
||||
* @param dataOrPathOrZipItem The contents of the file, or the path to an
|
||||
* existing file, or a (path to a zip file, name of an entry within that zip
|
||||
* file) tuple.
|
||||
*/
|
||||
export const makeFileForDataOrPathOrZipItem = async (
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
): Promise<FileForDataOrPathOrZipItem> => {
|
||||
let path: string;
|
||||
let isFileTemporary: boolean;
|
||||
let writeToTemporaryFile = async () => {
|
||||
/* no-op */
|
||||
};
|
||||
|
||||
if (typeof dataOrPathOrZipItem == "string") {
|
||||
path = dataOrPathOrZipItem;
|
||||
isFileTemporary = false;
|
||||
} else {
|
||||
path = await makeTempFilePath();
|
||||
isFileTemporary = true;
|
||||
if (dataOrPathOrZipItem instanceof Uint8Array) {
|
||||
writeToTemporaryFile = () =>
|
||||
fs.writeFile(path, dataOrPathOrZipItem);
|
||||
} else {
|
||||
writeToTemporaryFile = async () => {
|
||||
const [zipPath, entryName] = dataOrPathOrZipItem;
|
||||
const zip = new StreamZip.async({ file: zipPath });
|
||||
await zip.extract(entryName, path);
|
||||
await zip.close();
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return { path, isFileTemporary, writeToTemporaryFile };
|
||||
};
|
|
@ -37,37 +37,37 @@
|
|||
* - [main] desktop/src/main/ipc.ts contains impl
|
||||
*/
|
||||
|
||||
import { contextBridge, ipcRenderer } from "electron/renderer";
|
||||
import { contextBridge, ipcRenderer, webUtils } from "electron/renderer";
|
||||
|
||||
// While we can't import other code, we can import types since they're just
|
||||
// needed when compiling and will not be needed or looked around for at runtime.
|
||||
import type {
|
||||
AppUpdate,
|
||||
CollectionMapping,
|
||||
ElectronFile,
|
||||
FolderWatch,
|
||||
PendingUploads,
|
||||
ZipItem,
|
||||
} from "./types/ipc";
|
||||
|
||||
// - General
|
||||
|
||||
const appVersion = (): Promise<string> => ipcRenderer.invoke("appVersion");
|
||||
const appVersion = () => ipcRenderer.invoke("appVersion");
|
||||
|
||||
const logToDisk = (message: string): void =>
|
||||
ipcRenderer.send("logToDisk", message);
|
||||
|
||||
const openDirectory = (dirPath: string): Promise<void> =>
|
||||
const openDirectory = (dirPath: string) =>
|
||||
ipcRenderer.invoke("openDirectory", dirPath);
|
||||
|
||||
const openLogDirectory = (): Promise<void> =>
|
||||
ipcRenderer.invoke("openLogDirectory");
|
||||
const openLogDirectory = () => ipcRenderer.invoke("openLogDirectory");
|
||||
|
||||
const selectDirectory = () => ipcRenderer.invoke("selectDirectory");
|
||||
|
||||
const clearStores = () => ipcRenderer.send("clearStores");
|
||||
|
||||
const encryptionKey = (): Promise<string | undefined> =>
|
||||
ipcRenderer.invoke("encryptionKey");
|
||||
const encryptionKey = () => ipcRenderer.invoke("encryptionKey");
|
||||
|
||||
const saveEncryptionKey = (encryptionKey: string): Promise<void> =>
|
||||
const saveEncryptionKey = (encryptionKey: string) =>
|
||||
ipcRenderer.invoke("saveEncryptionKey", encryptionKey);
|
||||
|
||||
const onMainWindowFocus = (cb?: () => void) => {
|
||||
|
@ -99,121 +99,93 @@ const skipAppUpdate = (version: string) => {
|
|||
|
||||
// - FS
|
||||
|
||||
const fsExists = (path: string): Promise<boolean> =>
|
||||
ipcRenderer.invoke("fsExists", path);
|
||||
const fsExists = (path: string) => ipcRenderer.invoke("fsExists", path);
|
||||
|
||||
const fsMkdirIfNeeded = (dirPath: string): Promise<void> =>
|
||||
const fsMkdirIfNeeded = (dirPath: string) =>
|
||||
ipcRenderer.invoke("fsMkdirIfNeeded", dirPath);
|
||||
|
||||
const fsRename = (oldPath: string, newPath: string): Promise<void> =>
|
||||
const fsRename = (oldPath: string, newPath: string) =>
|
||||
ipcRenderer.invoke("fsRename", oldPath, newPath);
|
||||
|
||||
const fsRmdir = (path: string): Promise<void> =>
|
||||
ipcRenderer.invoke("fsRmdir", path);
|
||||
const fsRmdir = (path: string) => ipcRenderer.invoke("fsRmdir", path);
|
||||
|
||||
const fsRm = (path: string): Promise<void> => ipcRenderer.invoke("fsRm", path);
|
||||
const fsRm = (path: string) => ipcRenderer.invoke("fsRm", path);
|
||||
|
||||
const fsReadTextFile = (path: string): Promise<string> =>
|
||||
const fsReadTextFile = (path: string) =>
|
||||
ipcRenderer.invoke("fsReadTextFile", path);
|
||||
|
||||
const fsWriteFile = (path: string, contents: string): Promise<void> =>
|
||||
const fsWriteFile = (path: string, contents: string) =>
|
||||
ipcRenderer.invoke("fsWriteFile", path, contents);
|
||||
|
||||
const fsIsDir = (dirPath: string): Promise<boolean> =>
|
||||
ipcRenderer.invoke("fsIsDir", dirPath);
|
||||
|
||||
// - AUDIT below this
|
||||
const fsIsDir = (dirPath: string) => ipcRenderer.invoke("fsIsDir", dirPath);
|
||||
|
||||
// - Conversion
|
||||
|
||||
const convertToJPEG = (
|
||||
fileData: Uint8Array,
|
||||
filename: string,
|
||||
): Promise<Uint8Array> =>
|
||||
ipcRenderer.invoke("convertToJPEG", fileData, filename);
|
||||
const convertToJPEG = (imageData: Uint8Array) =>
|
||||
ipcRenderer.invoke("convertToJPEG", imageData);
|
||||
|
||||
const generateImageThumbnail = (
|
||||
inputFile: File | ElectronFile,
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
maxDimension: number,
|
||||
maxSize: number,
|
||||
): Promise<Uint8Array> =>
|
||||
) =>
|
||||
ipcRenderer.invoke(
|
||||
"generateImageThumbnail",
|
||||
inputFile,
|
||||
dataOrPathOrZipItem,
|
||||
maxDimension,
|
||||
maxSize,
|
||||
);
|
||||
|
||||
const runFFmpegCmd = (
|
||||
cmd: string[],
|
||||
inputFile: File | ElectronFile,
|
||||
outputFileName: string,
|
||||
dontTimeout?: boolean,
|
||||
): Promise<File> =>
|
||||
const ffmpegExec = (
|
||||
command: string[],
|
||||
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
|
||||
outputFileExtension: string,
|
||||
timeoutMS: number,
|
||||
) =>
|
||||
ipcRenderer.invoke(
|
||||
"runFFmpegCmd",
|
||||
cmd,
|
||||
inputFile,
|
||||
outputFileName,
|
||||
dontTimeout,
|
||||
"ffmpegExec",
|
||||
command,
|
||||
dataOrPathOrZipItem,
|
||||
outputFileExtension,
|
||||
timeoutMS,
|
||||
);
|
||||
|
||||
// - ML
|
||||
|
||||
const clipImageEmbedding = (jpegImageData: Uint8Array): Promise<Float32Array> =>
|
||||
const clipImageEmbedding = (jpegImageData: Uint8Array) =>
|
||||
ipcRenderer.invoke("clipImageEmbedding", jpegImageData);
|
||||
|
||||
const clipTextEmbedding = (text: string): Promise<Float32Array> =>
|
||||
ipcRenderer.invoke("clipTextEmbedding", text);
|
||||
const clipTextEmbeddingIfAvailable = (text: string) =>
|
||||
ipcRenderer.invoke("clipTextEmbeddingIfAvailable", text);
|
||||
|
||||
const detectFaces = (input: Float32Array): Promise<Float32Array> =>
|
||||
const detectFaces = (input: Float32Array) =>
|
||||
ipcRenderer.invoke("detectFaces", input);
|
||||
|
||||
const faceEmbedding = (input: Float32Array): Promise<Float32Array> =>
|
||||
const faceEmbedding = (input: Float32Array) =>
|
||||
ipcRenderer.invoke("faceEmbedding", input);
|
||||
|
||||
// - File selection
|
||||
|
||||
// TODO: Deprecated - use dialogs on the renderer process itself
|
||||
|
||||
const selectDirectory = (): Promise<string> =>
|
||||
ipcRenderer.invoke("selectDirectory");
|
||||
|
||||
const showUploadFilesDialog = (): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("showUploadFilesDialog");
|
||||
|
||||
const showUploadDirsDialog = (): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("showUploadDirsDialog");
|
||||
|
||||
const showUploadZipDialog = (): Promise<{
|
||||
zipPaths: string[];
|
||||
files: ElectronFile[];
|
||||
}> => ipcRenderer.invoke("showUploadZipDialog");
|
||||
const legacyFaceCrop = (faceID: string) =>
|
||||
ipcRenderer.invoke("legacyFaceCrop", faceID);
|
||||
|
||||
// - Watch
|
||||
|
||||
const watchGet = (): Promise<FolderWatch[]> => ipcRenderer.invoke("watchGet");
|
||||
const watchGet = () => ipcRenderer.invoke("watchGet");
|
||||
|
||||
const watchAdd = (
|
||||
folderPath: string,
|
||||
collectionMapping: CollectionMapping,
|
||||
): Promise<FolderWatch[]> =>
|
||||
const watchAdd = (folderPath: string, collectionMapping: CollectionMapping) =>
|
||||
ipcRenderer.invoke("watchAdd", folderPath, collectionMapping);
|
||||
|
||||
const watchRemove = (folderPath: string): Promise<FolderWatch[]> =>
|
||||
const watchRemove = (folderPath: string) =>
|
||||
ipcRenderer.invoke("watchRemove", folderPath);
|
||||
|
||||
const watchUpdateSyncedFiles = (
|
||||
syncedFiles: FolderWatch["syncedFiles"],
|
||||
folderPath: string,
|
||||
): Promise<void> =>
|
||||
ipcRenderer.invoke("watchUpdateSyncedFiles", syncedFiles, folderPath);
|
||||
) => ipcRenderer.invoke("watchUpdateSyncedFiles", syncedFiles, folderPath);
|
||||
|
||||
const watchUpdateIgnoredFiles = (
|
||||
ignoredFiles: FolderWatch["ignoredFiles"],
|
||||
folderPath: string,
|
||||
): Promise<void> =>
|
||||
ipcRenderer.invoke("watchUpdateIgnoredFiles", ignoredFiles, folderPath);
|
||||
) => ipcRenderer.invoke("watchUpdateIgnoredFiles", ignoredFiles, folderPath);
|
||||
|
||||
const watchOnAddFile = (f: (path: string, watch: FolderWatch) => void) => {
|
||||
ipcRenderer.removeAllListeners("watchAddFile");
|
||||
|
@ -236,69 +208,97 @@ const watchOnRemoveDir = (f: (path: string, watch: FolderWatch) => void) => {
|
|||
);
|
||||
};
|
||||
|
||||
const watchFindFiles = (folderPath: string): Promise<string[]> =>
|
||||
const watchFindFiles = (folderPath: string) =>
|
||||
ipcRenderer.invoke("watchFindFiles", folderPath);
|
||||
|
||||
const watchReset = async () => {
|
||||
ipcRenderer.removeAllListeners("watchAddFile");
|
||||
ipcRenderer.removeAllListeners("watchRemoveFile");
|
||||
ipcRenderer.removeAllListeners("watchRemoveDir");
|
||||
await ipcRenderer.invoke("watchReset");
|
||||
};
|
||||
|
||||
// - Upload
|
||||
|
||||
const pendingUploads = (): Promise<PendingUploads | undefined> =>
|
||||
ipcRenderer.invoke("pendingUploads");
|
||||
const pathForFile = (file: File) => {
|
||||
const path = webUtils.getPathForFile(file);
|
||||
// The path that we get back from `webUtils.getPathForFile` on Windows uses
|
||||
// "/" as the path separator. Convert them to POSIX separators.
|
||||
//
|
||||
// Note that we do not have access to the path or the os module in the
|
||||
// preload script, thus this hand rolled transformation.
|
||||
|
||||
const setPendingUploadCollection = (collectionName: string): Promise<void> =>
|
||||
ipcRenderer.invoke("setPendingUploadCollection", collectionName);
|
||||
// However that makes TypeScript fidgety since we it cannot find navigator,
|
||||
// as we haven't included "lib": ["dom"] in our tsconfig to avoid making DOM
|
||||
// APIs available to our main Node.js code. We could create a separate
|
||||
// tsconfig just for the preload script, but for now let's go with a cast.
|
||||
//
|
||||
// @ts-expect-error navigator is not defined.
|
||||
const platform = (navigator as { platform: string }).platform;
|
||||
return platform.toLowerCase().includes("win")
|
||||
? path.split("\\").join("/")
|
||||
: path;
|
||||
};
|
||||
|
||||
const setPendingUploadFiles = (
|
||||
type: PendingUploads["type"],
|
||||
filePaths: string[],
|
||||
): Promise<void> =>
|
||||
ipcRenderer.invoke("setPendingUploadFiles", type, filePaths);
|
||||
const listZipItems = (zipPath: string) =>
|
||||
ipcRenderer.invoke("listZipItems", zipPath);
|
||||
|
||||
// -
|
||||
const pathOrZipItemSize = (pathOrZipItem: string | ZipItem) =>
|
||||
ipcRenderer.invoke("pathOrZipItemSize", pathOrZipItem);
|
||||
|
||||
const getElectronFilesFromGoogleZip = (
|
||||
filePath: string,
|
||||
): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("getElectronFilesFromGoogleZip", filePath);
|
||||
const pendingUploads = () => ipcRenderer.invoke("pendingUploads");
|
||||
|
||||
const getDirFiles = (dirPath: string): Promise<ElectronFile[]> =>
|
||||
ipcRenderer.invoke("getDirFiles", dirPath);
|
||||
const setPendingUploads = (pendingUploads: PendingUploads) =>
|
||||
ipcRenderer.invoke("setPendingUploads", pendingUploads);
|
||||
|
||||
//
|
||||
// These objects exposed here will become available to the JS code in our
|
||||
// renderer (the web/ code) as `window.ElectronAPIs.*`
|
||||
//
|
||||
// There are a few related concepts at play here, and it might be worthwhile to
|
||||
// read their (excellent) documentation to get an understanding;
|
||||
//`
|
||||
// - ContextIsolation:
|
||||
// https://www.electronjs.org/docs/latest/tutorial/context-isolation
|
||||
//
|
||||
// - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
|
||||
//
|
||||
// [Note: Transferring large amount of data over IPC]
|
||||
//
|
||||
// Electron's IPC implementation uses the HTML standard Structured Clone
|
||||
// Algorithm to serialize objects passed between processes.
|
||||
// https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
|
||||
//
|
||||
// In particular, ArrayBuffer is eligible for structured cloning.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
|
||||
//
|
||||
// Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
|
||||
// operation when it happens across threads.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
|
||||
//
|
||||
// In our case though, we're not dealing with threads but separate processes. So
|
||||
// the ArrayBuffer will be copied:
|
||||
// > "parameters, errors and return values are **copied** when they're sent over
|
||||
// the bridge".
|
||||
// https://www.electronjs.org/docs/latest/api/context-bridge#methods
|
||||
//
|
||||
// The copy itself is relatively fast, but the problem with transfering large
|
||||
// amounts of data is potentially running out of memory during the copy.
|
||||
//
|
||||
// For an alternative, see [Note: IPC streams].
|
||||
//
|
||||
const markUploadedFiles = (paths: PendingUploads["filePaths"]) =>
|
||||
ipcRenderer.invoke("markUploadedFiles", paths);
|
||||
|
||||
const markUploadedZipItems = (items: PendingUploads["zipItems"]) =>
|
||||
ipcRenderer.invoke("markUploadedZipItems", items);
|
||||
|
||||
const clearPendingUploads = () => ipcRenderer.invoke("clearPendingUploads");
|
||||
|
||||
/**
|
||||
* These objects exposed here will become available to the JS code in our
|
||||
* renderer (the web/ code) as `window.ElectronAPIs.*`
|
||||
*
|
||||
* There are a few related concepts at play here, and it might be worthwhile to
|
||||
* read their (excellent) documentation to get an understanding;
|
||||
*`
|
||||
* - ContextIsolation:
|
||||
* https://www.electronjs.org/docs/latest/tutorial/context-isolation
|
||||
*
|
||||
* - IPC https://www.electronjs.org/docs/latest/tutorial/ipc
|
||||
*
|
||||
* ---
|
||||
*
|
||||
* [Note: Transferring large amount of data over IPC]
|
||||
*
|
||||
* Electron's IPC implementation uses the HTML standard Structured Clone
|
||||
* Algorithm to serialize objects passed between processes.
|
||||
* https://www.electronjs.org/docs/latest/tutorial/ipc#object-serialization
|
||||
*
|
||||
* In particular, ArrayBuffer is eligible for structured cloning.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
|
||||
*
|
||||
* Also, ArrayBuffer is "transferable", which means it is a zero-copy operation
|
||||
* operation when it happens across threads.
|
||||
* https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects
|
||||
*
|
||||
* In our case though, we're not dealing with threads but separate processes. So
|
||||
* the ArrayBuffer will be copied:
|
||||
*
|
||||
* > "parameters, errors and return values are **copied** when they're sent over
|
||||
* > the bridge".
|
||||
* >
|
||||
* > https://www.electronjs.org/docs/latest/api/context-bridge#methods
|
||||
*
|
||||
* The copy itself is relatively fast, but the problem with transfering large
|
||||
* amounts of data is potentially running out of memory during the copy.
|
||||
*
|
||||
* For an alternative, see [Note: IPC streams].
|
||||
*/
|
||||
contextBridge.exposeInMainWorld("electron", {
|
||||
// - General
|
||||
|
||||
|
@ -306,6 +306,7 @@ contextBridge.exposeInMainWorld("electron", {
|
|||
logToDisk,
|
||||
openDirectory,
|
||||
openLogDirectory,
|
||||
selectDirectory,
|
||||
clearStores,
|
||||
encryptionKey,
|
||||
saveEncryptionKey,
|
||||
|
@ -335,21 +336,15 @@ contextBridge.exposeInMainWorld("electron", {
|
|||
|
||||
convertToJPEG,
|
||||
generateImageThumbnail,
|
||||
runFFmpegCmd,
|
||||
ffmpegExec,
|
||||
|
||||
// - ML
|
||||
|
||||
clipImageEmbedding,
|
||||
clipTextEmbedding,
|
||||
clipTextEmbeddingIfAvailable,
|
||||
detectFaces,
|
||||
faceEmbedding,
|
||||
|
||||
// - File selection
|
||||
|
||||
selectDirectory,
|
||||
showUploadFilesDialog,
|
||||
showUploadDirsDialog,
|
||||
showUploadZipDialog,
|
||||
legacyFaceCrop,
|
||||
|
||||
// - Watch
|
||||
|
||||
|
@ -357,22 +352,23 @@ contextBridge.exposeInMainWorld("electron", {
|
|||
get: watchGet,
|
||||
add: watchAdd,
|
||||
remove: watchRemove,
|
||||
updateSyncedFiles: watchUpdateSyncedFiles,
|
||||
updateIgnoredFiles: watchUpdateIgnoredFiles,
|
||||
onAddFile: watchOnAddFile,
|
||||
onRemoveFile: watchOnRemoveFile,
|
||||
onRemoveDir: watchOnRemoveDir,
|
||||
findFiles: watchFindFiles,
|
||||
updateSyncedFiles: watchUpdateSyncedFiles,
|
||||
updateIgnoredFiles: watchUpdateIgnoredFiles,
|
||||
reset: watchReset,
|
||||
},
|
||||
|
||||
// - Upload
|
||||
|
||||
pathForFile,
|
||||
listZipItems,
|
||||
pathOrZipItemSize,
|
||||
pendingUploads,
|
||||
setPendingUploadCollection,
|
||||
setPendingUploadFiles,
|
||||
|
||||
// -
|
||||
|
||||
getElectronFilesFromGoogleZip,
|
||||
getDirFiles,
|
||||
setPendingUploads,
|
||||
markUploadedFiles,
|
||||
markUploadedZipItems,
|
||||
clearPendingUploads,
|
||||
});
|
||||
|
|
5
desktop/src/thirdparty/clip-bpe-ts/mod.ts
vendored
5
desktop/src/thirdparty/clip-bpe-ts/mod.ts
vendored
|
@ -1,3 +1,5 @@
|
|||
/* eslint-disable */
|
||||
|
||||
import * as htmlEntities from "html-entities";
|
||||
import bpeVocabData from "./bpe_simple_vocab_16e6";
|
||||
// import ftfy from "https://deno.land/x/ftfy_pyodide@v0.1.1/mod.js";
|
||||
|
@ -410,6 +412,7 @@ export default class {
|
|||
newWord.push(first + second);
|
||||
i += 2;
|
||||
} else {
|
||||
// @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code"
|
||||
newWord.push(word[i]);
|
||||
i += 1;
|
||||
}
|
||||
|
@ -434,6 +437,7 @@ export default class {
|
|||
.map((b) => this.byteEncoder[b.charCodeAt(0) as number])
|
||||
.join("");
|
||||
bpeTokens.push(
|
||||
// @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code"
|
||||
...this.bpe(token)
|
||||
.split(" ")
|
||||
.map((bpeToken: string) => this.encoder[bpeToken]),
|
||||
|
@ -458,6 +462,7 @@ export default class {
|
|||
.join("");
|
||||
text = [...text]
|
||||
.map((c) => this.byteDecoder[c])
|
||||
// @ts-expect-error "Array indexing can return undefined but not modifying thirdparty code"
|
||||
.map((v) => String.fromCharCode(v))
|
||||
.join("")
|
||||
.replace(/<\/w>/g, " ");
|
||||
|
|
|
@ -25,55 +25,20 @@ export interface FolderWatchSyncedFile {
|
|||
collectionID: number;
|
||||
}
|
||||
|
||||
export type ZipItem = [zipPath: string, entryName: string];
|
||||
|
||||
export interface PendingUploads {
|
||||
collectionName: string;
|
||||
type: "files" | "zips";
|
||||
files: ElectronFile[];
|
||||
collectionName: string | undefined;
|
||||
filePaths: string[];
|
||||
zipItems: ZipItem[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Errors that have special semantics on the web side.
|
||||
* See: [Note: Custom errors across Electron/Renderer boundary]
|
||||
*
|
||||
* [Note: Custom errors across Electron/Renderer boundary]
|
||||
*
|
||||
* We need to use the `message` field to disambiguate between errors thrown by
|
||||
* the main process when invoked from the renderer process. This is because:
|
||||
*
|
||||
* > Errors thrown throw `handle` in the main process are not transparent as
|
||||
* > they are serialized and only the `message` property from the original error
|
||||
* > is provided to the renderer process.
|
||||
* >
|
||||
* > - https://www.electronjs.org/docs/latest/tutorial/ipc
|
||||
* >
|
||||
* > Ref: https://github.com/electron/electron/issues/24427
|
||||
* Note: this is not a type, and cannot be used in preload.js; it is only meant
|
||||
* for use in the main process code.
|
||||
*/
|
||||
export const CustomErrors = {
|
||||
WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED:
|
||||
"Windows native image processing is not supported",
|
||||
UNSUPPORTED_PLATFORM: (platform: string, arch: string) =>
|
||||
`Unsupported platform - ${platform} ${arch}`,
|
||||
MODEL_DOWNLOAD_PENDING:
|
||||
"Model download pending, skipping clip search request",
|
||||
export const CustomErrorMessage = {
|
||||
NotAvailable: "This feature in not available on the current OS/arch",
|
||||
};
|
||||
|
||||
/**
|
||||
* Deprecated - Use File + webUtils.getPathForFile instead
|
||||
*
|
||||
* Electron used to augment the standard web
|
||||
* [File](https://developer.mozilla.org/en-US/docs/Web/API/File) object with an
|
||||
* additional `path` property. This is now deprecated, and will be removed in a
|
||||
* future release.
|
||||
* https://www.electronjs.org/docs/latest/api/file-object
|
||||
*
|
||||
* The alternative to the `path` property is to use `webUtils.getPathForFile`
|
||||
* https://www.electronjs.org/docs/latest/api/web-utils
|
||||
*/
|
||||
export interface ElectronFile {
|
||||
name: string;
|
||||
path: string;
|
||||
size: number;
|
||||
lastModified: number;
|
||||
stream: () => Promise<ReadableStream<Uint8Array>>;
|
||||
blob: () => Promise<Blob>;
|
||||
arrayBuffer: () => Promise<Uint8Array>;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue