Merge branch 'main' into mobile-resumable

This commit is contained in:
Neeraj Gupta 2024-05-07 15:36:29 +05:30
commit 12b4ab122e
429 changed files with 14490 additions and 12930 deletions

View file

@ -30,7 +30,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
localization_branch_name: crowdin-translations-auth
localization_branch_name: translations/auth
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[auth] New translations"

View file

@ -17,8 +17,8 @@ name: "Release (auth)"
# We use a suffix like `-test` to indicate that these are test tags, and that
# they belong to a pre-release.
#
# If you need to do multiple tests, add a +x at the end of the tag. e.g.
# `auth-v1.2.3-test+1`.
# If you need to do multiple tests, add a .x at the end of the tag. e.g.
# `auth-v1.2.3-test.1`.
#
# Once the testing is done, also delete the tag(s) please.
@ -85,7 +85,7 @@ jobs:
- name: Install dependencies for desktop build
run: |
sudo apt-get update -y
sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5
sudo apt-get install -y libsecret-1-dev libsodium-dev libwebkit2gtk-4.0-dev libfuse2 ninja-build libgtk-3-dev dpkg-dev pkg-config rpm patchelf libsqlite3-dev locate appindicator3-0.1 libappindicator3-dev libffi-dev libtiff5
sudo updatedb --localpaths='/usr/lib/x86_64-linux-gnu'
- name: Install appimagetool

30
.github/workflows/desktop-lint.yml vendored Normal file
View file

@ -0,0 +1,30 @@
name: "Lint (desktop)"
on:
# Run on every push to a branch other than main that changes desktop/
push:
branches-ignore: [main, "deploy/**"]
paths:
- "desktop/**"
- ".github/workflows/desktop-lint.yml"
jobs:
lint:
runs-on: ubuntu-latest
defaults:
run:
working-directory: desktop
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup node and enable yarn caching
uses: actions/setup-node@v4
with:
node-version: 20
cache: "yarn"
cache-dependency-path: "desktop/yarn.lock"
- run: yarn install
- run: yarn lint

View file

@ -30,7 +30,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
localization_branch_name: crowdin-translations-mobile
localization_branch_name: translations/mobile
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[mobile] New translations"

View file

@ -0,0 +1,57 @@
name: "Internal Release - Photos"
on:
workflow_dispatch: # Allow manually running the action
env:
FLUTTER_VERSION: "3.19.3"
jobs:
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: mobile
steps:
- name: Checkout code and submodules
uses: actions/checkout@v4
with:
submodules: recursive
- name: Setup JDK 17
uses: actions/setup-java@v1
with:
java-version: 17
- name: Install Flutter ${{ env.FLUTTER_VERSION }}
uses: subosito/flutter-action@v2
with:
channel: "stable"
flutter-version: ${{ env.FLUTTER_VERSION }}
cache: true
- name: Setup keys
uses: timheuer/base64-to-file@v1
with:
fileName: "keystore/ente_photos_key.jks"
encodedString: ${{ secrets.SIGNING_KEY_PHOTOS }}
- name: Build PlayStore AAB
run: |
flutter build appbundle --release --flavor playstore
env:
SIGNING_KEY_PATH: "/home/runner/work/_temp/keystore/ente_photos_key.jks"
SIGNING_KEY_ALIAS: ${{ secrets.SIGNING_KEY_ALIAS_PHOTOS }}
SIGNING_KEY_PASSWORD: ${{ secrets.SIGNING_KEY_PASSWORD_PHOTOS }}
SIGNING_STORE_PASSWORD: ${{ secrets.SIGNING_STORE_PASSWORD_PHOTOS }}
- name: Upload AAB to PlayStore
uses: r0adkll/upload-google-play@v1
with:
serviceAccountJsonPlainText: ${{ secrets.SERVICE_ACCOUNT_JSON }}
packageName: io.ente.photos
releaseFiles: mobile/build/app/outputs/bundle/playstoreRelease/app-playstore-release.aab
track: internal
changesNotSentForReview: true

View file

@ -9,7 +9,7 @@ on:
- "photos-v*"
env:
FLUTTER_VERSION: "3.19.5"
FLUTTER_VERSION: "3.19.3"
jobs:
build:
@ -25,6 +25,11 @@ jobs:
with:
submodules: recursive
- name: Setup JDK 17
uses: actions/setup-java@v1
with:
java-version: 17
- name: Install Flutter ${{ env.FLUTTER_VERSION }}
uses: subosito/flutter-action@v2
with:

View file

@ -0,0 +1,39 @@
name: "Update Crowdin translations (web)"
# This is a variant of web-crowdin.yml that also uploads the translated strings
# (in addition to the source strings). This allows us to change the strings in
# our source code for an automated refactoring (e.g. renaming a key), and then
# run this workflow to update the data in Crowdin taking our source code as the
# source of truth.
on:
# Only allow running manually.
workflow_dispatch:
jobs:
synchronize-with-crowdin:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Crowdin's action
uses: crowdin/github-action@v1
with:
base_path: "web/"
config: "web/crowdin.yml"
upload_sources: true
# This is what differs from web-crowdin.yml
upload_translations: true
download_translations: true
localization_branch_name: translations/web
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[web] Updated translations"
pull_request_body: "Updated translations from [Crowdin](https://crowdin.com/project/ente-photos-web)"
pull_request_base_branch_name: "main"
project_id: 569613
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}

View file

@ -36,7 +36,7 @@ jobs:
upload_sources: true
upload_translations: false
download_translations: true
localization_branch_name: crowdin-translations-web
localization_branch_name: translations/web
create_pull_request: true
skip_untranslated_strings: true
pull_request_title: "[web] New translations"

View file

@ -3,7 +3,7 @@ name: "Deploy (accounts)"
on:
push:
# Run workflow on pushes to the deploy/accounts
branches: [deploy/accounts]
branches: [deploy/accounts, deploy-f/accounts]
jobs:
deploy:

View file

@ -3,7 +3,7 @@ name: "Deploy (cast)"
on:
push:
# Run workflow on pushes to the deploy/cast
branches: [deploy/cast]
branches: [deploy/cast, deploy-f/cast]
jobs:
deploy:

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "المصدِّر",
"codeSecretKeyHint": "الرمز السري",
"codeAccountHint": "الحساب (you@domain.com)",
"accountKeyType": "نوع المفتاح",
"sessionExpired": "انتهت صلاحية الجلسة",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Aussteller",
"codeSecretKeyHint": "Geheimer Schlüssel",
"codeAccountHint": "Konto (you@domain.com)",
"accountKeyType": "Art des Schlüssels",
"sessionExpired": "Sitzung abgelaufen",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
@ -78,12 +77,14 @@
"data": "Datei",
"importCodes": "Codes importieren",
"importTypePlainText": "Klartext",
"importTypeEnteEncrypted": "Verschlüsselter Ente-Export",
"passwordForDecryptingExport": "Passwort um den Export zu entschlüsseln",
"passwordEmptyError": "Passwort kann nicht leer sein",
"importFromApp": "Importiere Codes von {appName}",
"importGoogleAuthGuide": "Exportiere deine Accounts von Google Authenticator zu einem QR-Code, durch die \"Konten übertragen\" Option. Scanne den QR-Code danach mit einem anderen Gerät.\n\nTipp: Du kannst die Kamera eines Laptops verwenden, um ein Foto den dem QR-Code zu erstellen.",
"importSelectJsonFile": "Wähle eine JSON-Datei",
"importSelectAppExport": "{appName} Exportdatei auswählen",
"importEnteEncGuide": "Wähle die von Ente exportierte, verschlüsselte JSON-Datei",
"importRaivoGuide": "Verwenden Sie die Option \"Export OTPs to Zip archive\" in den Raivo-Einstellungen.\n\nEntpacken Sie die Zip-Datei und importieren Sie die JSON-Datei.",
"importBitwardenGuide": "Verwenden Sie die Option \"Tresor exportieren\" innerhalb der Bitwarden Tools und importieren Sie die unverschlüsselte JSON-Datei.",
"importAegisGuide": "Verwenden Sie die Option \"Tresor exportieren\" in den Aegis-Einstellungen.\n\nFalls Ihr Tresor verschlüsselt ist, müssen Sie das Passwort für den Tresor eingeben, um ihn zu entschlüsseln.",
@ -121,12 +122,14 @@
"suggestFeatures": "Features vorschlagen",
"faq": "FAQ",
"faq_q_1": "Wie sicher ist Auth?",
"faq_a_1": "Alle Codes, die du über Auth sicherst, werden Ende-zu-Ende-verschlüsselt gespeichert. Das bedeutet, dass nur du auf deine Codes zugreifen kannst. Unsere Anwendungen sind quelloffen und unsere Kryptografie wurde extern geprüft.",
"faq_q_2": "Kann ich auf meine Codes auf dem Desktop zugreifen?",
"faq_a_2": "Sie können auf Ihre Codes im Web via auth.ente.io zugreifen.",
"faq_q_3": "Wie kann ich Codes löschen?",
"faq_a_3": "Sie können einen Code löschen, indem Sie auf dem Code nach links wischen.",
"faq_q_4": "Wie kann ich das Projekt unterstützen?",
"faq_a_4": "Sie können die Entwicklung dieses Projekts unterstützen, indem Sie unsere Fotos-App auf ente.io abonnieren.",
"faq_q_5": "Wie kann ich die FaceID-Sperre in Auth aktivieren",
"faq_a_5": "Sie können FaceID unter Einstellungen → Sicherheit → Sperrbildschirm aktivieren.",
"somethingWentWrongMessage": "Ein Fehler ist aufgetreten, bitte versuchen Sie es erneut",
"leaveFamily": "Familie verlassen",
@ -196,6 +199,9 @@
"doThisLater": "Auf später verschieben",
"saveKey": "Schlüssel speichern",
"save": "Speichern",
"send": "Senden",
"saveOrSendDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) oder an andere Apps senden?",
"saveOnlyDescription": "Möchtest du dies in deinem Speicher (standardmäßig im Ordner Downloads) speichern?",
"back": "Zurück",
"createAccount": "Account erstellen",
"passwordStrength": "Passwortstärke: {passwordStrengthValue}",
@ -343,6 +349,7 @@
"deleteCodeAuthMessage": "Authentifizieren, um Code zu löschen",
"showQRAuthMessage": "Authentifizieren, um QR-Code anzuzeigen",
"confirmAccountDeleteTitle": "Kontolöschung bestätigen",
"confirmAccountDeleteMessage": "Dieses Konto ist mit anderen Ente-Apps verknüpft, falls du welche verwendest.\n\nDeine hochgeladenen Daten werden in allen Ente-Apps zur Löschung vorgemerkt und dein Konto wird endgültig gelöscht.",
"androidBiometricHint": "Identität bestätigen",
"@androidBiometricHint": {
"description": "Hint message advising the user how to authenticate with biometrics. It is used on Android side. Maximum 60 characters."

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Issuer",
"codeSecretKeyHint": "Secret Key",
"codeAccountHint": "Account (you@domain.com)",
"accountKeyType": "Type of key",
"sessionExpired": "Session expired",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Emisor",
"codeSecretKeyHint": "Llave Secreta",
"codeAccountHint": "Cuenta (tu@dominio.com)",
"accountKeyType": "Tipo de llave",
"sessionExpired": "La sesión ha expirado",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"
@ -113,6 +112,7 @@
"copied": "Copiado",
"pleaseTryAgain": "Por favor, inténtalo nuevamente",
"existingUser": "Usuario existente",
"newUser": "Nuevo a Ente",
"delete": "Borrar",
"enterYourPasswordHint": "Ingrese su contraseña",
"forgotPassword": "Olvidé mi contraseña",
@ -138,6 +138,8 @@
"enterCodeHint": "Ingrese el código de seis dígitos de su aplicación de autenticación",
"lostDeviceTitle": "¿Perdió su dispositivo?",
"twoFactorAuthTitle": "Autenticación de dos factores",
"passkeyAuthTitle": "Verificación de llave de acceso",
"verifyPasskey": "Verificar llave de acceso",
"recoverAccount": "Recuperar cuenta",
"enterRecoveryKeyHint": "Introduzca su clave de recuperación",
"recover": "Recuperar",
@ -191,6 +193,8 @@
"recoveryKeySaveDescription": "Nosotros no almacenamos esta clave, por favor guarde dicha clave de 24 palabras en un lugar seguro.",
"doThisLater": "Hacer esto más tarde",
"saveKey": "Guardar Clave",
"save": "Guardar",
"send": "Enviar",
"back": "Atrás",
"createAccount": "Crear cuenta",
"passwordStrength": "Fortaleza de la contraseña: {passwordStrengthValue}",
@ -397,5 +401,8 @@
"signOutOtherDevices": "Cerrar la sesión de otros dispositivos",
"doNotSignOut": "No cerrar la sesión",
"hearUsWhereTitle": "¿Cómo conoció Ente? (opcional)",
"hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!"
"hearUsExplanation": "No rastreamos las aplicaciones instaladas. ¡Nos ayudaría si nos dijera dónde nos encontró!",
"passkey": "Llave de acceso",
"developerSettingsWarning": "¿Estás seguro de que quieres modificar los ajustes de desarrollador?",
"developerSettings": "Ajustes de desarrollador"
}

View file

@ -14,7 +14,6 @@
"codeIssuerHint": "صادر کننده",
"codeSecretKeyHint": "کلید مخفی",
"codeAccountHint": "حساب (you@domain.com)",
"accountKeyType": "نوع کلید",
"sessionExpired": "نشست منقضی شده است",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -12,7 +12,6 @@
"codeIssuerHint": "Myöntäjä",
"codeSecretKeyHint": "Salainen avain",
"codeAccountHint": "Tili (sinun@jokinosoite.com)",
"accountKeyType": "Avaimen tyyppi",
"sessionExpired": "Istunto on vanheutunut",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Émetteur",
"codeSecretKeyHint": "Clé secrète",
"codeAccountHint": "Compte (vous@exemple.com)",
"accountKeyType": "Type de clé",
"sessionExpired": "Session expirée",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -19,7 +19,6 @@
"codeIssuerHint": "מנפיק",
"codeSecretKeyHint": "מפתח סודי",
"codeAccountHint": "חשבון(you@domain.com)",
"accountKeyType": "סוג מפתח",
"sessionExpired": "זמן החיבור הסתיים",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Emittente",
"codeSecretKeyHint": "Codice segreto",
"codeAccountHint": "Account (username@dominio.it)",
"accountKeyType": "Tipo di chiave",
"sessionExpired": "Sessione scaduta",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "発行者",
"codeSecretKeyHint": "秘密鍵",
"codeAccountHint": "アカウント (you@domain.com)",
"accountKeyType": "鍵の種類",
"sessionExpired": "セッションが失効しました",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "მომწოდებელი",
"codeSecretKeyHint": "გასაღები",
"codeAccountHint": "ანგარიში (you@domain.com)",
"accountKeyType": "გასაღების ტიპი",
"sessionExpired": "სესიის დრო ამოიწურა",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Uitgever",
"codeSecretKeyHint": "Geheime sleutel",
"codeAccountHint": "Account (jij@domein.nl)",
"accountKeyType": "Type sleutel",
"sessionExpired": "Sessie verlopen",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Wydawca",
"codeSecretKeyHint": "Tajny klucz",
"codeAccountHint": "Konto (ty@domena.com)",
"accountKeyType": "Rodzaj klucza",
"sessionExpired": "Sesja wygasła",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Emissor",
"codeSecretKeyHint": "Chave secreta",
"codeAccountHint": "Conta (voce@dominio.com)",
"accountKeyType": "Tipo de chave",
"sessionExpired": "Sessão expirada",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Эмитент",
"codeSecretKeyHint": "Секретный ключ",
"codeAccountHint": "Аккаунт (you@domain.com)",
"accountKeyType": "Тип ключа",
"sessionExpired": "Сеанс истек",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -16,7 +16,6 @@
"codeIssuerHint": "Utfärdare",
"codeSecretKeyHint": "Secret Key",
"codeAccountHint": "Konto (du@domän.com)",
"accountKeyType": "Typ av nyckel",
"sessionExpired": "Sessionen har gått ut",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "ኣዋጂ",
"codeSecretKeyHint": "ምስጢራዊ መፍትሕ",
"codeAccountHint": "ሕሳብ (you@domain.com)",
"accountKeyType": "ዓይነት መፍትሕ",
"sessionExpired": "ክፍለ ግዜኡ ኣኺሉ።",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Yayınlayan",
"codeSecretKeyHint": "Gizli Anahtar",
"codeAccountHint": "Hesap (ornek@domain.com)",
"accountKeyType": "Anahtar türü",
"sessionExpired": "Oturum süresi doldu",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "Nhà phát hành",
"codeSecretKeyHint": "Khóa bí mật",
"codeAccountHint": "Tài khoản (bạn@miền.com)",
"accountKeyType": "Loại khóa",
"sessionExpired": "Phiên làm việc đã hết hạn",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -20,7 +20,6 @@
"codeIssuerHint": "发行人",
"codeSecretKeyHint": "私钥",
"codeAccountHint": "账户 (you@domain.com)",
"accountKeyType": "密钥类型",
"sessionExpired": "会话已过期",
"@sessionExpired": {
"description": "Title of the dialog when the users current session is invalid/expired"

View file

@ -37,6 +37,7 @@ import 'package:window_manager/window_manager.dart';
final _logger = Logger("main");
Future<void> initSystemTray() async {
if (PlatformUtil.isMobile()) return;
String path = Platform.isWindows
? 'assets/icons/auth-icon.ico'
: 'assets/icons/auth-icon.png';

View file

@ -2,6 +2,7 @@ import 'package:ente_auth/utils/totp_util.dart';
class Code {
static const defaultDigits = 6;
static const steamDigits = 5;
static const defaultPeriod = 30;
int? generatedID;
@ -57,36 +58,42 @@ class Code {
updatedAlgo,
updatedType,
updatedCounter,
"otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}&digits=$updatedDigits&issuer=$updateIssuer&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}",
"otpauth://${updatedType.name}/$updateIssuer:$updateAccount?algorithm=${updatedAlgo.name}"
"&digits=$updatedDigits&issuer=$updateIssuer"
"&period=$updatePeriod&secret=$updatedSecret${updatedType == Type.hotp ? "&counter=$updatedCounter" : ""}",
generatedID: generatedID,
);
}
static Code fromAccountAndSecret(
Type type,
String account,
String issuer,
String secret,
int digits,
) {
return Code(
account,
issuer,
defaultDigits,
digits,
defaultPeriod,
secret,
Algorithm.sha1,
Type.totp,
type,
0,
"otpauth://totp/$issuer:$account?algorithm=SHA1&digits=6&issuer=$issuer&period=30&secret=$secret",
"otpauth://${type.name}/$issuer:$account?algorithm=SHA1&digits=$digits&issuer=$issuer&period=30&secret=$secret",
);
}
static Code fromRawData(String rawData) {
Uri uri = Uri.parse(rawData);
final issuer = _getIssuer(uri);
try {
return Code(
_getAccount(uri),
_getIssuer(uri),
_getDigits(uri),
issuer,
_getDigits(uri, issuer),
_getPeriod(uri),
getSanitizedSecret(uri.queryParameters['secret']!),
_getAlgorithm(uri),
@ -140,10 +147,13 @@ class Code {
}
}
static int _getDigits(Uri uri) {
static int _getDigits(Uri uri, String issuer) {
try {
return int.parse(uri.queryParameters['digits']!);
} catch (e) {
if (issuer.toLowerCase() == "steam") {
return steamDigits;
}
return defaultDigits;
}
}
@ -186,6 +196,8 @@ class Code {
static Type _getType(Uri uri) {
if (uri.host == "totp") {
return Type.totp;
} else if (uri.host == "steam") {
return Type.steam;
} else if (uri.host == "hotp") {
return Type.hotp;
}
@ -223,6 +235,9 @@ class Code {
enum Type {
totp,
hotp,
steam;
bool get isTOTPCompatible => this == totp || this == steam;
}
enum Algorithm {

View file

@ -61,6 +61,8 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
},
decoration: InputDecoration(
hintText: l10n.codeIssuerHint,
floatingLabelBehavior: FloatingLabelBehavior.auto,
labelText: l10n.codeIssuerHint,
),
controller: _issuerController,
autofocus: true,
@ -78,6 +80,8 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
},
decoration: InputDecoration(
hintText: l10n.codeSecretKeyHint,
floatingLabelBehavior: FloatingLabelBehavior.auto,
labelText: l10n.codeSecretKeyHint,
suffixIcon: IconButton(
onPressed: () {
setState(() {
@ -105,12 +109,12 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
},
decoration: InputDecoration(
hintText: l10n.codeAccountHint,
floatingLabelBehavior: FloatingLabelBehavior.auto,
labelText: l10n.codeAccountHint,
),
controller: _accountController,
),
const SizedBox(
height: 40,
),
const SizedBox(height: 40),
SizedBox(
width: 400,
child: OutlinedButton(
@ -152,6 +156,7 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
final account = _accountController.text.trim();
final issuer = _issuerController.text.trim();
final secret = _secretController.text.trim().replaceAll(' ', '');
final isStreamCode = issuer.toLowerCase() == "steam";
if (widget.code != null && widget.code!.secret != secret) {
ButtonResult? result = await showChoiceActionSheet(
context,
@ -168,9 +173,11 @@ class _SetupEnterSecretKeyPageState extends State<SetupEnterSecretKeyPage> {
}
final Code newCode = widget.code == null
? Code.fromAccountAndSecret(
isStreamCode ? Type.steam : Type.totp,
account,
issuer,
secret,
isStreamCode ? Code.steamDigits : Code.defaultDigits,
)
: widget.code!.copyWith(
account: account,

View file

@ -53,7 +53,7 @@ class _CodeWidgetState extends State<CodeWidget> {
String newCode = _getCurrentOTP();
if (newCode != _currentCode.value) {
_currentCode.value = newCode;
if (widget.code.type == Type.totp) {
if (widget.code.type.isTOTPCompatible) {
_nextCode.value = _getNextTotp();
}
}
@ -78,7 +78,7 @@ class _CodeWidgetState extends State<CodeWidget> {
_shouldShowLargeIcon = PreferenceService.instance.shouldShowLargeIcons();
if (!_isInitialized) {
_currentCode.value = _getCurrentOTP();
if (widget.code.type == Type.totp) {
if (widget.code.type.isTOTPCompatible) {
_nextCode.value = _getNextTotp();
}
_isInitialized = true;
@ -213,7 +213,7 @@ class _CodeWidgetState extends State<CodeWidget> {
crossAxisAlignment: CrossAxisAlignment.start,
mainAxisAlignment: MainAxisAlignment.center,
children: [
if (widget.code.type == Type.totp)
if (widget.code.type.isTOTPCompatible)
CodeTimerProgress(
period: widget.code.period,
),
@ -263,7 +263,7 @@ class _CodeWidgetState extends State<CodeWidget> {
},
),
),
widget.code.type == Type.totp
widget.code.type.isTOTPCompatible
? GestureDetector(
onTap: () {
_copyNextToClipboard();
@ -481,7 +481,7 @@ class _CodeWidgetState extends State<CodeWidget> {
String _getNextTotp() {
try {
assert(widget.code.type == Type.totp);
assert(widget.code.type.isTOTPCompatible);
return getNextTotp(widget.code);
} catch (e) {
return context.l10n.error;

View file

@ -92,9 +92,11 @@ Future<int?> _processBitwardenExportFile(
var account = item['login']['username'];
code = Code.fromAccountAndSecret(
Type.totp,
account,
issuer,
totp,
Code.defaultDigits,
);
}

View file

@ -3,7 +3,7 @@ import 'package:flutter/foundation.dart';
import 'package:otp/otp.dart' as otp;
String getOTP(Code code) {
if(code.type == Type.hotp) {
if (code.type == Type.hotp) {
return _getHOTPCode(code);
}
return otp.OTP.generateTOTPCodeString(
@ -60,4 +60,4 @@ String safeDecode(String value) {
debugPrint("Failed to decode $e");
return value;
}
}
}

View file

@ -11,7 +11,7 @@ display_name: Auth
requires:
- libsqlite3x
- webkit2gtk-4.0
- webkit2gtk4.0
- libsodium
- libsecret
- libappindicator

View file

@ -293,9 +293,9 @@ packages:
dependency: "direct main"
description:
path: "packages/desktop_webview_window"
ref: HEAD
resolved-ref: "8cbbf9cd6efcfee5e0f420a36f7f8e7e64b667a1"
url: "https://github.com/MixinNetwork/flutter-plugins"
ref: fix-webkit-version
resolved-ref: fe2223e4edfecdbb3a97bb9e3ced73db4ae9d979
url: "https://github.com/ente-io/flutter-desktopwebview-fork"
source: git
version: "0.2.4"
device_info_plus:

View file

@ -1,6 +1,6 @@
name: ente_auth
description: ente two-factor authenticator
version: 2.0.55+255
version: 2.0.57+257
publish_to: none
environment:
@ -20,7 +20,8 @@ dependencies:
convert: ^3.1.1
desktop_webview_window:
git:
url: https://github.com/MixinNetwork/flutter-plugins
url: https://github.com/ente-io/flutter-desktopwebview-fork
ref: fix-webkit-version
path: packages/desktop_webview_window
device_info_plus: ^9.1.1
dio: ^5.4.0

View file

@ -36,7 +36,8 @@ ente --help
### Accounts
If you wish, you can add multiple accounts (your own and that of your family members) and export all data using this tool.
If you wish, you can add multiple accounts (your own and that of your family
members) and export all data using this tool.
#### Add an account
@ -44,6 +45,12 @@ If you wish, you can add multiple accounts (your own and that of your family mem
ente account add
```
> [!NOTE]
>
> `ente account add` does not create new accounts, it just adds pre-existing
> accounts to the list of accounts that the CLI knows about so that you can use
> them for other actions.
#### List accounts
```shell

View file

@ -27,7 +27,8 @@ var listAccCmd = &cobra.Command{
// Subcommand for 'account add'
var addAccCmd = &cobra.Command{
Use: "add",
Short: "Add a new account",
Short: "login into existing account",
Long: "Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app",
Run: func(cmd *cobra.Command, args []string) {
recoverWithLog()
ctrl.AddAccount(context.Background())

View file

@ -25,4 +25,4 @@ ente [flags]
* [ente export](ente_export.md) - Starts the export process
* [ente version](ente_version.md) - Prints the current version
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -11,9 +11,9 @@ Manage account settings
### SEE ALSO
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
* [ente account add](ente_account_add.md) - Add a new account
* [ente account add](ente_account_add.md) - login into existing account
* [ente account get-token](ente_account_get-token.md) - Get token for an account for a specific app
* [ente account list](ente_account_list.md) - list configured accounts
* [ente account update](ente_account_update.md) - Update an existing account's export directory
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -1,6 +1,10 @@
## ente account add
Add a new account
login into existing account
### Synopsis
Use this command to add an existing account to cli. For creating a new account, use the mobile,web or desktop app
```
ente account add [flags]
@ -16,4 +20,4 @@ ente account add [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente account get-token [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente account list [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -19,4 +19,4 @@ ente account update [flags]
* [ente account](ente_account.md) - Manage account settings
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -21,4 +21,4 @@ Commands for admin actions like disable or enabling 2fa, bumping up the storage
* [ente admin list-users](ente_admin_list-users.md) - List all users
* [ente admin update-subscription](ente_admin_update-subscription.md) - Update subscription for user
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente admin delete-user [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente admin disable-2fa [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -18,4 +18,4 @@ ente admin get-user-id [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -17,4 +17,4 @@ ente admin list-users [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -23,4 +23,4 @@ ente admin update-subscription [flags]
* [ente admin](ente_admin.md) - Commands for admin actions
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -13,4 +13,4 @@ Authenticator commands
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
* [ente auth decrypt](ente_auth_decrypt.md) - Decrypt authenticator export
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente auth decrypt [input] [output] [flags]
* [ente auth](ente_auth.md) - Authenticator commands
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente export [flags]
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -16,4 +16,4 @@ ente version [flags]
* [ente](ente.md) - CLI tool for exporting your photos from ente.io
###### Auto generated by spf13/cobra on 14-Mar-2024
###### Auto generated by spf13/cobra on 6-May-2024

View file

@ -59,7 +59,7 @@ func (c *ClICtrl) AddAccount(cxt context.Context) {
authResponse, flowErr = c.validateTOTP(cxt, authResponse)
}
if authResponse.EncryptedToken == "" || authResponse.KeyAttributes == nil {
panic("no encrypted token or keyAttributes")
log.Fatalf("missing key attributes or token.\nNote: Please use the mobile,web or desktop app to create a new account.\nIf you are trying to login to an existing account, report a bug.")
}
secretInfo, decErr := c.decryptAccSecretInfo(cxt, authResponse, keyEncKey)
if decErr != nil {

View file

@ -1,26 +1,36 @@
/* eslint-env node */
module.exports = {
root: true,
extends: [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
/* What we really want eventually */
// "plugin:@typescript-eslint/strict-type-checked",
// "plugin:@typescript-eslint/stylistic-type-checked",
"plugin:@typescript-eslint/strict-type-checked",
"plugin:@typescript-eslint/stylistic-type-checked",
],
/* Temporarily add a global
Enhancement: Remove me */
globals: {
NodeJS: "readonly",
},
plugins: ["@typescript-eslint"],
parser: "@typescript-eslint/parser",
parserOptions: {
project: true,
},
root: true,
ignorePatterns: [".eslintrc.js", "app", "out", "dist"],
env: {
es2022: true,
node: true,
},
rules: {
/* Allow numbers to be used in template literals */
"@typescript-eslint/restrict-template-expressions": [
"error",
{
allowNumber: true,
},
],
/* Allow void expressions as the entire body of an arrow function */
"@typescript-eslint/no-confusing-void-expression": [
"error",
{
ignoreArrowShorthand: true,
},
],
},
};

View file

@ -1,55 +0,0 @@
name: Build/release
on:
push:
tags:
- v*
jobs:
release:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
steps:
- name: Check out Git repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Node.js, NPM and Yarn
uses: actions/setup-node@v3
with:
node-version: 20
- name: Prepare for app notarization
if: startsWith(matrix.os, 'macos')
# Import Apple API key for app notarization on macOS
run: |
mkdir -p ~/private_keys/
echo '${{ secrets.api_key }}' > ~/private_keys/AuthKey_${{ secrets.api_key_id }}.p8
- name: Install libarchive-tools for pacman build # Related https://github.com/electron-userland/electron-builder/issues/4181
if: startsWith(matrix.os, 'ubuntu')
run: sudo apt-get install libarchive-tools
- name: Ente Electron Builder Action
uses: ente-io/action-electron-builder@v1.0.0
with:
# GitHub token, automatically provided to the action
# (No need to define this secret in the repo settings)
github_token: ${{ secrets.github_token }}
# If the commit is tagged with a version (e.g. "v1.0.0"),
# release the app after building
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
mac_certs: ${{ secrets.mac_certs }}
mac_certs_password: ${{ secrets.mac_certs_password }}
env:
# macOS notarization API key
API_KEY_ID: ${{ secrets.api_key_id }}
API_KEY_ISSUER_ID: ${{ secrets.api_key_issuer_id}}
USE_HARD_LINKS: false

View file

@ -0,0 +1,70 @@
name: "Draft release"
# Build the desktop/draft-release branch and update the existing draft release
# with the resultant artifacts.
#
# This is meant for doing tests that require the app to be signed and packaged.
# Such releases should not be published to end users.
#
# Workflow:
#
# 1. Push your changes to the "desktop/draft-release" branch on
# https://github.com/ente-io/ente.
#
# 2. Create a draft release with tag equal to the version in the `package.json`.
#
# 3. Trigger this workflow. You can trigger it multiple times, each time it'll
# just update the artifacts attached to the same draft.
#
# 4. Once testing is done delete the draft.
on:
# Trigger manually or `gh workflow run desktop-draft-release.yml`.
workflow_dispatch:
jobs:
release:
runs-on: macos-latest
defaults:
run:
working-directory: desktop
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
repository: ente-io/ente
ref: desktop/draft-release
submodules: recursive
- name: Setup node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install dependencies
run: yarn install
- name: Build
uses: ente-io/action-electron-builder@v1.0.0
with:
package_root: desktop
# GitHub token, automatically provided to the action
# (No need to define this secret in the repo settings)
github_token: ${{ secrets.GITHUB_TOKEN }}
# If the commit is tagged with a version (e.g. "v1.0.0"),
# release the app after building.
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
mac_certs: ${{ secrets.MAC_CERTS }}
mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }}
env:
# macOS notarization credentials key details
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD:
${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
USE_HARD_LINKS: false

View file

@ -0,0 +1,83 @@
name: "Release"
# This will create a new draft release with public artifacts.
#
# Note that a release will only get created if there is an associated tag
# (GitHub releases need a corresponding tag).
#
# The canonical source for this action is in the repository where we keep the
# source code for the Ente Photos desktop app: https://github.com/ente-io/ente
#
# However, it actually lives and runs in the repository that we use for making
# releases: https://github.com/ente-io/photos-desktop
#
# We need two repositories because Electron updater currently doesn't work well
# with monorepos. For more details, see `docs/release.md`.
on:
push:
# Run when a tag matching the pattern "v*"" is pushed.
#
# See: [Note: Testing release workflows that are triggered by tags].
tags:
- "v*"
jobs:
release:
runs-on: ${{ matrix.os }}
defaults:
run:
working-directory: desktop
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# Checkout the tag photosd-v1.x.x from the source code
# repository when we're invoked for tag v1.x.x on the releases
# repository.
repository: ente-io/ente
ref: photosd-${{ github.ref_name }}
submodules: recursive
- name: Setup node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install dependencies
run: yarn install
- name: Install libarchive-tools for pacman build
if: startsWith(matrix.os, 'ubuntu')
# See:
# https://github.com/electron-userland/electron-builder/issues/4181
run: sudo apt-get install libarchive-tools
- name: Build
uses: ente-io/action-electron-builder@v1.0.0
with:
package_root: desktop
# GitHub token, automatically provided to the action
# (No need to define this secret in the repo settings)
github_token: ${{ secrets.GITHUB_TOKEN }}
# If the commit is tagged with a version (e.g. "v1.0.0"),
# release the app after building.
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
mac_certs: ${{ secrets.MAC_CERTS }}
mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }}
env:
# macOS notarization credentials key details
APPLE_ID: ${{ secrets.APPLE_ID }}
APPLE_APP_SPECIFIC_PASSWORD:
${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
USE_HARD_LINKS: false

View file

@ -1,5 +1,13 @@
# CHANGELOG
## v1.7.0 (Unreleased)
v1.7 is a major rewrite to improve the security of our app. We have enabled
sandboxing and disabled node integration for the renderer process. All this
required restructuring our IPC mechanisms, which resulted in a lot of under the
hood changes. The outcome is a more secure app that also uses the latest and
greatest Electron recommendations.
## v1.6.63
### New

View file

@ -10,12 +10,6 @@ To know more about Ente, see [our main README](../README.md) or visit
## Building from source
> [!CAUTION]
>
> We're improving the security of the desktop app further by migrating to
> Electron's sandboxing and contextIsolation. These updates are still WIP and
> meanwhile the instructions below might not fully work on the main branch.
Fetch submodules
```sh

View file

@ -13,7 +13,7 @@ Electron embeds Chromium and Node.js in the generated app's binary. The
generated app thus consists of two separate processes - the _main_ process, and
a _renderer_ process.
- The _main_ process is runs the embedded node. This process can deal with the
- The _main_ process runs the embedded node. This process can deal with the
host OS - it is conceptually like a `node` repl running on your machine. In
our case, the TypeScript code (in the `src/` directory) gets transpiled by
`tsc` into JavaScript in the `build/app/` directory, which gets bundled in
@ -90,16 +90,19 @@ Some extra ones specific to the code here are:
Unix commands in our `package.json` scripts. This allows us to use the same
commands (like `ln`) across different platforms like Linux and Windows.
- [@tsconfig/recommended](https://github.com/tsconfig/bases) gives us a base
tsconfig for the Node.js version that our current Electron version uses.
## Functionality
### Format conversion
The main tool we use is for arbitrary conversions is FFMPEG. To bundle a
The main tool we use is for arbitrary conversions is ffmpeg. To bundle a
(platform specific) static binary of ffmpeg with our app, we use
[ffmpeg-static](https://github.com/eugeneware/ffmpeg-static).
> There is a significant (~20x) speed difference between using the compiled
> FFMPEG binary and using the WASM one (that our renderer process already has).
> ffmpeg binary and using the wasm one (that our renderer process already has).
> Which is why we bundle it to speed up operations on the desktop app.
In addition, we also bundle a static Linux binary of imagemagick in our extra

View file

@ -1,43 +1,47 @@
## Releases
> [!NOTE]
>
> TODO(MR): This document needs to be audited and changed as we do the first
> release from this new monorepo.
Conceptually, the release is straightforward: We push a tag, a GitHub workflow
gets triggered that creates a draft release with artifacts built from that tag.
We then publish that release. The download links on our website, and existing
apps already know how to check for the latest GitHub release and update
accordingly.
The Github Action that builds the desktop binaries is triggered by pushing a tag
matching the pattern `photos-desktop-v1.2.3`. This value should match the
version in `package.json`.
The complication comes by the fact that Electron Updater (the mechanism that we
use for auto updates) doesn't work well with monorepos. So we need to keep a
separate (non-mono) repository just for doing releases.
So the process for doing a release would be.
- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente).
1. Create a new branch (can be named anything). On this branch, include your
changes.
- Releases are done from
[ente-io/photos-desktop](https://github.com/ente-io/photos-desktop).
2. Mention the changes in `CHANGELOG.md`.
## Workflow
3. Changing the `version` in `package.json` to `1.x.x`.
The workflow is:
4. Commit and push to remote
1. Finalize the changes in the source repo.
- Update the CHANGELOG.
- Update the version in `package.json`
- `git commit -m "[photosd] Release v1.2.3"`
- Open PR, merge into main.
2. Tag the merge commit with a tag matching the pattern `photosd-v1.2.3`, where
`1.2.3` is the version in `package.json`
```sh
git add package.json && git commit -m 'Release v1.x.x'
git tag v1.x.x
git push && git push --tags
git tag photosd-v1.x.x
git push origin photosd-v1.x.x
```
This by itself will already trigger a new release. The GitHub action will create
a new draft release that can then be used as descibed below.
3. Head over to the releases repository and run the trigger script, passing it
the tag _without_ the `photosd-` prefix.
To wrap up, we also need to merge back these changes into main. So for that,
```sh
./.github/trigger-release.sh v1.x.x
```
5. Open a PR for the branch that we're working on (where the above tag was
pushed from) to get it merged into main.
6. In this PR, also increase the version number for the next release train. That
is, supposed we just released `v4.0.1`. Then we'll change the version number
in main to `v4.0.2-next.0`. Each pre-release will modify the `next.0` part.
Finally, at the time of the next release, this'll become `v4.0.2`.
## Post build
The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts
defined in the `build` value in `package.json`.
@ -46,29 +50,11 @@ defined in the `build` value in `package.json`.
- Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`)
- macOS - A universal DMG
Additionally, the GitHub action notarizes the macOS DMG. For this it needs
credentials provided via GitHub secrets.
Additionally, the GitHub action notarizes and signs the macOS DMG (For this it
uses credentials provided via GitHub secrets).
During the build the Sentry webpack plugin checks to see if SENTRY_AUTH_TOKEN is
defined. If so, it uploads the sourcemaps for the renderer process to Sentry
(For our GitHub action, the SENTRY_AUTH_TOKEN is defined as a GitHub secret).
The sourcemaps for the main (node) process are currently not sent to Sentry
(this works fine in practice since the node process files are not minified, we
only run `tsc`).
Once the build is done, a draft release with all these artifacts attached is
created. The build is idempotent, so if something goes wrong and we need to
re-run the GitHub action, just delete the draft release (if it got created) and
start a new run by pushing a new tag (if some code changes are required).
If no code changes are required, say the build failed for some transient network
or sentry issue, we can even be re-run by the build by going to Github Action
age and rerun from there. This will re-trigger for the same tag.
If everything goes well, we'll have a release on GitHub, and the corresponding
source maps for the renderer process uploaded to Sentry. There isn't anything
else to do:
To rollout the build, we need to publish the draft release. Thereafter,
everything is automated:
- The website automatically redirects to the latest release on GitHub when
people try to download.
@ -76,7 +62,7 @@ else to do:
- The file formats with support auto update (Windows `exe`, the Linux AppImage
and the macOS DMG) also check the latest GitHub release automatically to
download and apply the update (the rest of the formats don't support auto
updates).
updates yet).
- We're not putting the desktop app in other stores currently. It is available
as a `brew cask`, but we only had to open a PR to add the initial formula,
@ -87,6 +73,4 @@ else to do:
We can also publish the draft releases by checking the "pre-release" option.
Such releases don't cause any of the channels (our website, or the desktop app
auto updater, or brew) to be notified, instead these are useful for giving links
to pre-release builds to customers. Generally, in the version number for these
we'll add a label to the version, e.g. the "beta.x" in `1.x.x-beta.x`. This
should be done both in `package.json`, and what we tag the commit with.
to pre-release builds to customers.

View file

@ -1,8 +1,9 @@
{
"name": "ente",
"version": "1.6.63",
"version": "1.7.0-beta.0",
"private": true,
"description": "Desktop client for Ente Photos",
"repository": "github:ente-io/photos-desktop",
"author": "Ente <code@ente.io>",
"main": "app/main.js",
"scripts": {
@ -15,8 +16,11 @@
"dev-main": "tsc && electron app/main.js",
"dev-renderer": "cd ../web && yarn install && yarn dev:photos",
"postinstall": "electron-builder install-app-deps",
"lint": "yarn prettier --check . && eslint --ext .ts src",
"lint-fix": "yarn prettier --write . && eslint --fix --ext .ts src"
"lint": "yarn prettier --check --log-level warn . && eslint --ext .ts src && yarn tsc",
"lint-fix": "yarn prettier --write --log-level warn . && eslint --fix --ext .ts src && yarn tsc"
},
"resolutions": {
"jackspeak": "2.1.1"
},
"dependencies": {
"any-shell-escape": "^0.1",
@ -34,13 +38,14 @@
"onnxruntime-node": "^1.17"
},
"devDependencies": {
"@tsconfig/node20": "^20.1.4",
"@types/auto-launch": "^5.0",
"@types/ffmpeg-static": "^3.0",
"@typescript-eslint/eslint-plugin": "^7",
"@typescript-eslint/parser": "^7",
"concurrently": "^8",
"electron": "^29",
"electron-builder": "^24",
"electron": "^30",
"electron-builder": "25.0.0-alpha.6",
"electron-builder-notarize": "^1.5",
"eslint": "^8",
"prettier": "^3",

View file

@ -8,32 +8,30 @@
*
* https://www.electronjs.org/docs/latest/tutorial/process-model#the-main-process
*/
import { nativeImage } from "electron";
import { app, BrowserWindow, Menu, protocol, Tray } from "electron/main";
import { nativeImage, shell } from "electron/common";
import type { WebContents } from "electron/main";
import { BrowserWindow, Menu, Tray, app, protocol } from "electron/main";
import serveNextAt from "next-electron-server";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import {
addAllowOriginHeader,
handleDownloads,
handleExternalLinks,
} from "./main/init";
import { attachFSWatchIPCHandlers, attachIPCHandlers } from "./main/ipc";
import log, { initLogging } from "./main/log";
import { createApplicationMenu, createTrayContextMenu } from "./main/menu";
import { setupAutoUpdater } from "./main/services/app-update";
import autoLauncher from "./main/services/autoLauncher";
import { initWatcher } from "./main/services/chokidar";
import autoLauncher from "./main/services/auto-launcher";
import { createWatcher } from "./main/services/watch";
import { userPreferences } from "./main/stores/user-preferences";
import { migrateLegacyWatchStoreIfNeeded } from "./main/stores/watch";
import { registerStreamProtocol } from "./main/stream";
import { isDev } from "./main/util";
import { isDev } from "./main/utils/electron";
/**
* The URL where the renderer HTML is being served from.
*/
export const rendererURL = "ente://app";
const rendererURL = "ente://app";
/**
* We want to hide our window instead of closing it when the user presses the
@ -129,54 +127,22 @@ const registerPrivilegedSchemes = () => {
{
scheme: "stream",
privileges: {
// TODO(MR): Remove the commented bits if we don't end up
// needing them by the time the IPC refactoring is done.
// Prevent the insecure origin issues when fetching this
// secure: true,
// Allow the web fetch API in the renderer to use this scheme.
supportFetchAPI: true,
// Allow it to be used with video tags.
// stream: true,
},
},
]);
};
/**
* [Note: Increased disk cache for the desktop app]
*
* Set the "disk-cache-size" command line flag to ask the Chromium process to
* use a larger size for the caches that it keeps on disk. This allows us to use
* the web based caching mechanisms on both the web and the desktop app, just
* ask the embedded Chromium to be a bit more generous in disk usage when
* running as the desktop app.
*
* The size we provide is in bytes.
* https://www.electronjs.org/docs/latest/api/command-line-switches#--disk-cache-sizesize
*
* Note that increasing the disk cache size does not guarantee that Chromium
* will respect in verbatim, it uses its own heuristics atop this hint.
* https://superuser.com/questions/378991/what-is-chrome-default-cache-size-limit/1577693#1577693
*
* See also: [Note: Caching files].
*/
const increaseDiskCache = () =>
app.commandLine.appendSwitch(
"disk-cache-size",
`${5 * 1024 * 1024 * 1024}`, // 5 GB
);
/**
* Create an return the {@link BrowserWindow} that will form our app's UI.
*
* This window will show the HTML served from {@link rendererURL}.
*/
const createMainWindow = async () => {
const createMainWindow = () => {
// Create the main window. This'll show our web content.
const window = new BrowserWindow({
webPreferences: {
preload: path.join(app.getAppPath(), "preload.js"),
preload: path.join(__dirname, "preload.js"),
sandbox: true,
},
// The color to show in the window until the web content gets loaded.
@ -186,7 +152,7 @@ const createMainWindow = async () => {
show: false,
});
const wasAutoLaunched = await autoLauncher.wasAutoLaunched();
const wasAutoLaunched = autoLauncher.wasAutoLaunched();
if (wasAutoLaunched) {
// Don't automatically show the app's window if we were auto-launched.
// On macOS, also hide the dock icon on macOS.
@ -196,19 +162,19 @@ const createMainWindow = async () => {
window.maximize();
}
window.loadURL(rendererURL);
// Open the DevTools automatically when running in dev mode
if (isDev) window.webContents.openDevTools();
window.webContents.on("render-process-gone", (_, details) => {
log.error(`render-process-gone: ${details}`);
log.error(`render-process-gone: ${details.reason}`);
window.webContents.reload();
});
// "The unresponsive event is fired when Chromium detects that your
// webContents is not responding to input messages for > 30 seconds."
window.webContents.on("unresponsive", () => {
log.error(
"Main window's webContents are unresponsive, will restart the renderer process",
"MainWindow's webContents are unresponsive, will restart the renderer process",
);
window.webContents.forcefullyCrashRenderer();
});
@ -229,7 +195,7 @@ const createMainWindow = async () => {
});
window.on("show", () => {
if (process.platform == "darwin") app.dock.show();
if (process.platform == "darwin") void app.dock.show();
});
// Let ipcRenderer know when mainWindow is in the foreground so that it can
@ -239,6 +205,58 @@ const createMainWindow = async () => {
return window;
};
/**
* Automatically set the save path for user initiated downloads to the system's
* "downloads" directory instead of asking the user to select a save location.
*/
export const setDownloadPath = (webContents: WebContents) => {
webContents.session.on("will-download", (_, item) => {
item.setSavePath(
uniqueSavePath(app.getPath("downloads"), item.getFilename()),
);
});
};
const uniqueSavePath = (dirPath: string, fileName: string) => {
const { name, ext } = path.parse(fileName);
let savePath = path.join(dirPath, fileName);
let n = 1;
while (existsSync(savePath)) {
const suffixedName = [`${name}(${n})`, ext].filter((x) => x).join(".");
savePath = path.join(dirPath, suffixedName);
n++;
}
return savePath;
};
/**
* Allow opening external links, e.g. when the user clicks on the "Feature
* requests" button in the sidebar (to open our GitHub repository), or when they
* click the "Support" button to send an email to support.
*
* @param webContents The renderer to configure.
*/
export const allowExternalLinks = (webContents: WebContents) => {
// By default, if the user were open a link, say
// https://github.com/ente-io/ente/discussions, then it would open a _new_
// BrowserWindow within our app.
//
// This is not the behaviour we want; what we want is to ask the system to
// handle the link (e.g. open the URL in the default browser, or if it is a
// mailto: link, then open the user's mail client).
//
// Returning `action` "deny" accomplishes this.
webContents.setWindowOpenHandler(({ url }) => {
if (!url.startsWith(rendererURL)) {
void shell.openExternal(url);
return { action: "deny" };
} else {
return { action: "allow" };
}
});
};
/**
* Add an icon for our app in the system tray.
*
@ -269,30 +287,61 @@ const setupTrayItem = (mainWindow: BrowserWindow) => {
/**
* Older versions of our app used to maintain a cache dir using the main
* process. This has been deprecated in favor of using a normal web cache.
* process. This has been removed in favor of cache on the web layer.
*
* See [Note: Increased disk cache for the desktop app]
* Delete the old cache dir if it exists.
*
* Delete the old cache dir if it exists. This code was added March 2024, and
* can be removed after some time once most people have upgraded to newer
* versions.
* This will happen in two phases. The cache had three subdirectories:
*
* - Two of them, "thumbs" and "files", will be removed now (v1.7.0, May 2024).
*
* - The third one, "face-crops" will be removed once we finish the face search
* changes. See: [Note: Legacy face crops].
*
* This migration code can be removed after some time once most people have
* upgraded to newer versions.
*/
const deleteLegacyDiskCacheDirIfExists = async () => {
// The existing code was passing "cache" as a parameter to getPath. This is
// incorrect if we go by the types - "cache" is not a valid value for the
// parameter to `app.getPath`.
const removeIfExists = async (dirPath: string) => {
if (existsSync(dirPath)) {
log.info(`Removing legacy disk cache from ${dirPath}`);
await fs.rm(dirPath, { recursive: true });
}
};
// [Note: Getting the cache path]
//
// It might be an issue in the types, since at runtime it seems to work. For
// example, on macOS I get `~/Library/Caches`.
// The existing code was passing "cache" as a parameter to getPath.
//
// However, "cache" is not a valid parameter to getPath. It works! (for
// example, on macOS I get `~/Library/Caches`), but it is intentionally not
// documented as part of the public API:
//
// - docs: remove "cache" from app.getPath
// https://github.com/electron/electron/pull/33509
//
// Irrespective, we replicate the original behaviour so that we get back the
// same path that the old got was getting.
// same path that the old code was getting.
//
// @ts-expect-error
// @ts-expect-error "cache" works but is not part of the public API.
const cacheDir = path.join(app.getPath("cache"), "ente");
if (existsSync(cacheDir)) {
log.info(`Removing legacy disk cache from ${cacheDir}`);
await fs.rm(cacheDir, { recursive: true });
await removeIfExists(path.join(cacheDir, "thumbs"));
await removeIfExists(path.join(cacheDir, "files"));
}
};
/**
* Older versions of our app used to keep a keys.json. It is not needed anymore,
* remove it if it exists.
*
* This code was added March 2024, and can be removed after some time once most
* people have upgraded to newer versions.
*/
const deleteLegacyKeysStoreIfExists = async () => {
const keysStore = path.join(app.getPath("userData"), "keys.json");
if (existsSync(keysStore)) {
log.info(`Removing legacy keys store at ${keysStore}`);
await fs.rm(keysStore);
}
};
@ -310,7 +359,7 @@ const main = () => {
// The order of the next two calls is important
setupRendererServer();
registerPrivilegedSchemes();
increaseDiskCache();
migrateLegacyWatchStoreIfNeeded();
app.on("second-instance", () => {
// Someone tried to run a second instance, we should focus our window.
@ -324,25 +373,35 @@ const main = () => {
// Emitted once, when Electron has finished initializing.
//
// Note that some Electron APIs can only be used after this event occurs.
app.on("ready", async () => {
mainWindow = await createMainWindow();
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
setupTrayItem(mainWindow);
attachIPCHandlers();
attachFSWatchIPCHandlers(initWatcher(mainWindow));
registerStreamProtocol();
if (!isDev) setupAutoUpdater(mainWindow);
handleDownloads(mainWindow);
handleExternalLinks(mainWindow);
addAllowOriginHeader(mainWindow);
void app.whenReady().then(() => {
void (async () => {
// Create window and prepare for the renderer.
mainWindow = createMainWindow();
attachIPCHandlers();
attachFSWatchIPCHandlers(createWatcher(mainWindow));
registerStreamProtocol();
try {
deleteLegacyDiskCacheDirIfExists();
} catch (e) {
// Log but otherwise ignore errors during non-critical startup
// actions.
log.error("Ignoring startup error", e);
}
// Configure the renderer's environment.
setDownloadPath(mainWindow.webContents);
allowExternalLinks(mainWindow.webContents);
// Start loading the renderer.
void mainWindow.loadURL(rendererURL);
// Continue on with the rest of the startup sequence.
Menu.setApplicationMenu(await createApplicationMenu(mainWindow));
setupTrayItem(mainWindow);
setupAutoUpdater(mainWindow);
try {
await deleteLegacyDiskCacheDirIfExists();
await deleteLegacyKeysStoreIfExists();
} catch (e) {
// Log but otherwise ignore errors during non-critical startup
// actions.
log.error("Ignoring startup error", e);
}
})();
});
// This is a macOS only event. Show our window when the user activates the

View file

@ -1,54 +0,0 @@
import { dialog } from "electron/main";
import path from "node:path";
import type { ElectronFile } from "../types/ipc";
import { getDirFilePaths, getElectronFile } from "./services/fs";
import { getElectronFilesFromGoogleZip } from "./services/upload";
export const selectDirectory = async () => {
const result = await dialog.showOpenDialog({
properties: ["openDirectory"],
});
if (result.filePaths && result.filePaths.length > 0) {
return result.filePaths[0]?.split(path.sep)?.join(path.posix.sep);
}
};
export const showUploadFilesDialog = async () => {
const selectedFiles = await dialog.showOpenDialog({
properties: ["openFile", "multiSelections"],
});
const filePaths = selectedFiles.filePaths;
return await Promise.all(filePaths.map(getElectronFile));
};
export const showUploadDirsDialog = async () => {
const dir = await dialog.showOpenDialog({
properties: ["openDirectory", "multiSelections"],
});
let filePaths: string[] = [];
for (const dirPath of dir.filePaths) {
filePaths = [...filePaths, ...(await getDirFilePaths(dirPath))];
}
return await Promise.all(filePaths.map(getElectronFile));
};
export const showUploadZipDialog = async () => {
const selectedFiles = await dialog.showOpenDialog({
properties: ["openFile", "multiSelections"],
filters: [{ name: "Zip File", extensions: ["zip"] }],
});
const filePaths = selectedFiles.filePaths;
let files: ElectronFile[] = [];
for (const filePath of filePaths) {
files = [...files, ...(await getElectronFilesFromGoogleZip(filePath))];
}
return {
zipPaths: filePaths,
files,
};
};

View file

@ -1,31 +0,0 @@
/**
* @file file system related functions exposed over the context bridge.
*/
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
export const fsExists = (path: string) => existsSync(path);
export const fsRename = (oldPath: string, newPath: string) =>
fs.rename(oldPath, newPath);
export const fsMkdirIfNeeded = (dirPath: string) =>
fs.mkdir(dirPath, { recursive: true });
export const fsRmdir = (path: string) => fs.rmdir(path);
export const fsRm = (path: string) => fs.rm(path);
export const fsReadTextFile = async (filePath: string) =>
fs.readFile(filePath, "utf-8");
export const fsWriteFile = (path: string, contents: string) =>
fs.writeFile(path, contents);
/* TODO: Audit below this */
export const isFolder = async (dirPath: string) => {
if (!existsSync(dirPath)) return false;
const stats = await fs.stat(dirPath);
return stats.isDirectory();
};

View file

@ -1,63 +0,0 @@
import { BrowserWindow, app, shell } from "electron";
import { existsSync } from "node:fs";
import path from "node:path";
import { rendererURL } from "../main";
export function handleDownloads(mainWindow: BrowserWindow) {
mainWindow.webContents.session.on("will-download", (_, item) => {
item.setSavePath(
getUniqueSavePath(item.getFilename(), app.getPath("downloads")),
);
});
}
export function handleExternalLinks(mainWindow: BrowserWindow) {
mainWindow.webContents.setWindowOpenHandler(({ url }) => {
if (!url.startsWith(rendererURL)) {
shell.openExternal(url);
return { action: "deny" };
} else {
return { action: "allow" };
}
});
}
export function getUniqueSavePath(filename: string, directory: string): string {
let uniqueFileSavePath = path.join(directory, filename);
const { name: filenameWithoutExtension, ext: extension } =
path.parse(filename);
let n = 0;
while (existsSync(uniqueFileSavePath)) {
n++;
// filter need to remove undefined extension from the array
// else [`${fileName}`, undefined].join(".") will lead to `${fileName}.` as joined string
const fileNameWithNumberedSuffix = [
`${filenameWithoutExtension}(${n})`,
extension,
]
.filter((x) => x) // filters out undefined/null values
.join("");
uniqueFileSavePath = path.join(directory, fileNameWithNumberedSuffix);
}
return uniqueFileSavePath;
}
function lowerCaseHeaders(responseHeaders: Record<string, string[]>) {
const headers: Record<string, string[]> = {};
for (const key of Object.keys(responseHeaders)) {
headers[key.toLowerCase()] = responseHeaders[key];
}
return headers;
}
export function addAllowOriginHeader(mainWindow: BrowserWindow) {
mainWindow.webContents.session.webRequest.onHeadersReceived(
(details, callback) => {
details.responseHeaders = lowerCaseHeaders(details.responseHeaders);
details.responseHeaders["access-control-allow-origin"] = ["*"];
callback({
responseHeaders: details.responseHeaders,
});
},
);
}

View file

@ -10,23 +10,12 @@
import type { FSWatcher } from "chokidar";
import { ipcMain } from "electron/main";
import type { ElectronFile, FILE_PATH_TYPE, FolderWatch } from "../types/ipc";
import {
selectDirectory,
showUploadDirsDialog,
showUploadFilesDialog,
showUploadZipDialog,
} from "./dialogs";
import {
fsExists,
fsMkdirIfNeeded,
fsReadTextFile,
fsRename,
fsRm,
fsRmdir,
fsWriteFile,
isFolder,
} from "./fs";
import type {
CollectionMapping,
FolderWatch,
PendingUploads,
ZipItem,
} from "../types/ipc";
import { logToDisk } from "./log";
import {
appVersion,
@ -34,13 +23,28 @@ import {
updateAndRestart,
updateOnNextRestart,
} from "./services/app-update";
import { runFFmpegCmd } from "./services/ffmpeg";
import { getDirFiles } from "./services/fs";
import {
convertToJPEG,
generateImageThumbnail,
} from "./services/imageProcessor";
import { clipImageEmbedding, clipTextEmbedding } from "./services/ml-clip";
legacyFaceCrop,
openDirectory,
openLogDirectory,
selectDirectory,
} from "./services/dir";
import { ffmpegExec } from "./services/ffmpeg";
import {
fsExists,
fsIsDir,
fsMkdirIfNeeded,
fsReadTextFile,
fsRename,
fsRm,
fsRmdir,
fsWriteFile,
} from "./services/fs";
import { convertToJPEG, generateImageThumbnail } from "./services/image";
import {
clipImageEmbedding,
clipTextEmbeddingIfAvailable,
} from "./services/ml-clip";
import { detectFaces, faceEmbedding } from "./services/ml-face";
import {
clearStores,
@ -48,19 +52,23 @@ import {
saveEncryptionKey,
} from "./services/store";
import {
getElectronFilesFromGoogleZip,
getPendingUploads,
setToUploadCollection,
setToUploadFiles,
clearPendingUploads,
listZipItems,
markUploadedFiles,
markUploadedZipItems,
pathOrZipItemSize,
pendingUploads,
setPendingUploads,
} from "./services/upload";
import {
addWatchMapping,
getWatchMappings,
removeWatchMapping,
updateWatchMappingIgnoredFiles,
updateWatchMappingSyncedFiles,
watchAdd,
watchFindFiles,
watchGet,
watchRemove,
watchReset,
watchUpdateIgnoredFiles,
watchUpdateSyncedFiles,
} from "./services/watch";
import { openDirectory, openLogDirectory } from "./util";
/**
* Listen for IPC events sent/invoked by the renderer process, and route them to
@ -87,16 +95,20 @@ export const attachIPCHandlers = () => {
ipcMain.handle("appVersion", () => appVersion());
ipcMain.handle("openDirectory", (_, dirPath) => openDirectory(dirPath));
ipcMain.handle("openDirectory", (_, dirPath: string) =>
openDirectory(dirPath),
);
ipcMain.handle("openLogDirectory", () => openLogDirectory());
// See [Note: Catching exception during .send/.on]
ipcMain.on("logToDisk", (_, message) => logToDisk(message));
ipcMain.on("logToDisk", (_, message: string) => logToDisk(message));
ipcMain.handle("selectDirectory", () => selectDirectory());
ipcMain.on("clearStores", () => clearStores());
ipcMain.handle("saveEncryptionKey", (_, encryptionKey) =>
ipcMain.handle("saveEncryptionKey", (_, encryptionKey: string) =>
saveEncryptionKey(encryptionKey),
);
@ -106,21 +118,23 @@ export const attachIPCHandlers = () => {
ipcMain.on("updateAndRestart", () => updateAndRestart());
ipcMain.on("updateOnNextRestart", (_, version) =>
ipcMain.on("updateOnNextRestart", (_, version: string) =>
updateOnNextRestart(version),
);
ipcMain.on("skipAppUpdate", (_, version) => skipAppUpdate(version));
ipcMain.on("skipAppUpdate", (_, version: string) => skipAppUpdate(version));
// - FS
ipcMain.handle("fsExists", (_, path) => fsExists(path));
ipcMain.handle("fsExists", (_, path: string) => fsExists(path));
ipcMain.handle("fsRename", (_, oldPath: string, newPath: string) =>
fsRename(oldPath, newPath),
);
ipcMain.handle("fsMkdirIfNeeded", (_, dirPath) => fsMkdirIfNeeded(dirPath));
ipcMain.handle("fsMkdirIfNeeded", (_, dirPath: string) =>
fsMkdirIfNeeded(dirPath),
);
ipcMain.handle("fsRmdir", (_, path: string) => fsRmdir(path));
@ -132,27 +146,39 @@ export const attachIPCHandlers = () => {
fsWriteFile(path, contents),
);
ipcMain.handle("fsIsDir", (_, dirPath: string) => fsIsDir(dirPath));
// - Conversion
ipcMain.handle("convertToJPEG", (_, fileData, filename) =>
convertToJPEG(fileData, filename),
ipcMain.handle("convertToJPEG", (_, imageData: Uint8Array) =>
convertToJPEG(imageData),
);
ipcMain.handle(
"generateImageThumbnail",
(_, inputFile, maxDimension, maxSize) =>
generateImageThumbnail(inputFile, maxDimension, maxSize),
(
_,
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
maxDimension: number,
maxSize: number,
) => generateImageThumbnail(dataOrPathOrZipItem, maxDimension, maxSize),
);
ipcMain.handle(
"runFFmpegCmd",
"ffmpegExec",
(
_,
cmd: string[],
inputFile: File | ElectronFile,
outputFileName: string,
dontTimeout?: boolean,
) => runFFmpegCmd(cmd, inputFile, outputFileName, dontTimeout),
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
) =>
ffmpegExec(
command,
dataOrPathOrZipItem,
outputFileExtension,
timeoutMS,
),
);
// - ML
@ -161,8 +187,8 @@ export const attachIPCHandlers = () => {
clipImageEmbedding(jpegImageData),
);
ipcMain.handle("clipTextEmbedding", (_, text: string) =>
clipTextEmbedding(text),
ipcMain.handle("clipTextEmbeddingIfAvailable", (_, text: string) =>
clipTextEmbeddingIfAvailable(text),
);
ipcMain.handle("detectFaces", (_, input: Float32Array) =>
@ -173,39 +199,37 @@ export const attachIPCHandlers = () => {
faceEmbedding(input),
);
// - File selection
ipcMain.handle("selectDirectory", () => selectDirectory());
ipcMain.handle("showUploadFilesDialog", () => showUploadFilesDialog());
ipcMain.handle("showUploadDirsDialog", () => showUploadDirsDialog());
ipcMain.handle("showUploadZipDialog", () => showUploadZipDialog());
// - FS Legacy
ipcMain.handle("isFolder", (_, dirPath: string) => isFolder(dirPath));
ipcMain.handle("legacyFaceCrop", (_, faceID: string) =>
legacyFaceCrop(faceID),
);
// - Upload
ipcMain.handle("getPendingUploads", () => getPendingUploads());
ipcMain.handle("listZipItems", (_, zipPath: string) =>
listZipItems(zipPath),
);
ipcMain.handle("pathOrZipItemSize", (_, pathOrZipItem: string | ZipItem) =>
pathOrZipItemSize(pathOrZipItem),
);
ipcMain.handle("pendingUploads", () => pendingUploads());
ipcMain.handle("setPendingUploads", (_, pendingUploads: PendingUploads) =>
setPendingUploads(pendingUploads),
);
ipcMain.handle(
"setToUploadFiles",
(_, type: FILE_PATH_TYPE, filePaths: string[]) =>
setToUploadFiles(type, filePaths),
"markUploadedFiles",
(_, paths: PendingUploads["filePaths"]) => markUploadedFiles(paths),
);
ipcMain.handle("getElectronFilesFromGoogleZip", (_, filePath: string) =>
getElectronFilesFromGoogleZip(filePath),
ipcMain.handle(
"markUploadedZipItems",
(_, items: PendingUploads["zipItems"]) => markUploadedZipItems(items),
);
ipcMain.handle("setToUploadCollection", (_, collectionName: string) =>
setToUploadCollection(collectionName),
);
ipcMain.handle("getDirFiles", (_, dirPath: string) => getDirFiles(dirPath));
ipcMain.handle("clearPendingUploads", () => clearPendingUploads());
};
/**
@ -213,42 +237,38 @@ export const attachIPCHandlers = () => {
* watch folder functionality.
*
* It gets passed a {@link FSWatcher} instance which it can then forward to the
* actual handlers.
* actual handlers if they need access to it to do their thing.
*/
export const attachFSWatchIPCHandlers = (watcher: FSWatcher) => {
// - Watch
ipcMain.handle(
"addWatchMapping",
(
_,
collectionName: string,
folderPath: string,
uploadStrategy: number,
) =>
addWatchMapping(
watcher,
collectionName,
folderPath,
uploadStrategy,
),
);
ipcMain.handle("removeWatchMapping", (_, folderPath: string) =>
removeWatchMapping(watcher, folderPath),
);
ipcMain.handle("getWatchMappings", () => getWatchMappings());
ipcMain.handle("watchGet", () => watchGet(watcher));
ipcMain.handle(
"updateWatchMappingSyncedFiles",
(_, folderPath: string, files: FolderWatch["syncedFiles"]) =>
updateWatchMappingSyncedFiles(folderPath, files),
"watchAdd",
(_, folderPath: string, collectionMapping: CollectionMapping) =>
watchAdd(watcher, folderPath, collectionMapping),
);
ipcMain.handle("watchRemove", (_, folderPath: string) =>
watchRemove(watcher, folderPath),
);
ipcMain.handle(
"updateWatchMappingIgnoredFiles",
(_, folderPath: string, files: FolderWatch["ignoredFiles"]) =>
updateWatchMappingIgnoredFiles(folderPath, files),
"watchUpdateSyncedFiles",
(_, syncedFiles: FolderWatch["syncedFiles"], folderPath: string) =>
watchUpdateSyncedFiles(syncedFiles, folderPath),
);
ipcMain.handle(
"watchUpdateIgnoredFiles",
(_, ignoredFiles: FolderWatch["ignoredFiles"], folderPath: string) =>
watchUpdateIgnoredFiles(ignoredFiles, folderPath),
);
ipcMain.handle("watchFindFiles", (_, folderPath: string) =>
watchFindFiles(folderPath),
);
ipcMain.handle("watchReset", () => watchReset(watcher));
};

View file

@ -1,15 +1,15 @@
import log from "electron-log";
import util from "node:util";
import { isDev } from "./util";
import { isDev } from "./utils/electron";
/**
* Initialize logging in the main process.
*
* This will set our underlying logger up to log to a file named `ente.log`,
*
* - on Linux at ~/.config/ente/logs/main.log
* - on macOS at ~/Library/Logs/ente/main.log
* - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\main.log
* - on Linux at ~/.config/ente/logs/ente.log
* - on macOS at ~/Library/Logs/ente/ente.log
* - on Windows at %USERPROFILE%\AppData\Roaming\ente\logs\ente.log
*
* On dev builds, it will also log to the console.
*/
@ -65,7 +65,7 @@ const logError_ = (message: string) => {
if (isDev) console.error(`[error] ${message}`);
};
const logInfo = (...params: any[]) => {
const logInfo = (...params: unknown[]) => {
const message = params
.map((p) => (typeof p == "string" ? p : util.inspect(p)))
.join(" ");
@ -73,7 +73,7 @@ const logInfo = (...params: any[]) => {
if (isDev) console.log(`[info] ${message}`);
};
const logDebug = (param: () => any) => {
const logDebug = (param: () => unknown) => {
if (isDev) {
const p = param();
console.log(`[debug] ${typeof p == "string" ? p : util.inspect(p)}`);

View file

@ -7,9 +7,9 @@ import {
} from "electron";
import { allowWindowClose } from "../main";
import { forceCheckForAppUpdates } from "./services/app-update";
import autoLauncher from "./services/autoLauncher";
import autoLauncher from "./services/auto-launcher";
import { openLogDirectory } from "./services/dir";
import { userPreferences } from "./stores/user-preferences";
import { openLogDirectory } from "./util";
/** Create and return the entries in the app's main menu bar */
export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
@ -18,7 +18,7 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
// Whenever the menu is redrawn the current value of these variables is used
// to set the checked state for the various settings checkboxes.
let isAutoLaunchEnabled = await autoLauncher.isEnabled();
let shouldHideDockIcon = userPreferences.get("hideDockIcon");
let shouldHideDockIcon = !!userPreferences.get("hideDockIcon");
const macOSOnly = (options: MenuItemConstructorOptions[]) =>
process.platform == "darwin" ? options : [];
@ -26,12 +26,12 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
const handleCheckForUpdates = () => forceCheckForAppUpdates(mainWindow);
const handleViewChangelog = () =>
shell.openExternal(
void shell.openExternal(
"https://github.com/ente-io/ente/blob/main/desktop/CHANGELOG.md",
);
const toggleAutoLaunch = () => {
autoLauncher.toggleAutoLaunch();
void autoLauncher.toggleAutoLaunch();
isAutoLaunchEnabled = !isAutoLaunchEnabled;
};
@ -42,13 +42,15 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
shouldHideDockIcon = !shouldHideDockIcon;
};
const handleHelp = () => shell.openExternal("https://help.ente.io/photos/");
const handleHelp = () =>
void shell.openExternal("https://help.ente.io/photos/");
const handleSupport = () => shell.openExternal("mailto:support@ente.io");
const handleSupport = () =>
void shell.openExternal("mailto:support@ente.io");
const handleBlog = () => shell.openExternal("https://ente.io/blog/");
const handleBlog = () => void shell.openExternal("https://ente.io/blog/");
const handleViewLogs = openLogDirectory;
const handleViewLogs = () => void openLogDirectory();
return Menu.buildFromTemplate([
{
@ -124,11 +126,11 @@ export const createApplicationMenu = async (mainWindow: BrowserWindow) => {
submenu: [
{
role: "startSpeaking",
label: "start speaking",
label: "Start Speaking",
},
{
role: "stopSpeaking",
label: "stop speaking",
label: "Stop Speaking",
},
],
},

View file

@ -1,19 +0,0 @@
export function isPlatform(platform: "mac" | "windows" | "linux") {
return getPlatform() === platform;
}
export function getPlatform(): "mac" | "windows" | "linux" {
switch (process.platform) {
case "aix":
case "freebsd":
case "linux":
case "openbsd":
case "android":
return "linux";
case "darwin":
case "sunos":
return "mac";
case "win32":
return "windows";
}
}

View file

@ -1,19 +1,28 @@
import { compareVersions } from "compare-versions";
import { app, BrowserWindow } from "electron";
import { default as electronLog } from "electron-log";
import { autoUpdater } from "electron-updater";
import { app, BrowserWindow } from "electron/main";
import { allowWindowClose } from "../../main";
import { AppUpdateInfo } from "../../types/ipc";
import { AppUpdate } from "../../types/ipc";
import log from "../log";
import { userPreferences } from "../stores/user-preferences";
import { isDev } from "../utils/electron";
export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
autoUpdater.logger = electronLog;
autoUpdater.autoDownload = false;
// Skip checking for updates automatically in dev builds. Installing an
// update would fail anyway since (at least on macOS), the auto update
// process requires signed builds.
//
// Even though this is skipped on app start, we can still use the "Check for
// updates..." menu option to trigger the update if we wish in dev builds.
if (isDev) return;
const oneDay = 1 * 24 * 60 * 60 * 1000;
setInterval(() => checkForUpdatesAndNotify(mainWindow), oneDay);
checkForUpdatesAndNotify(mainWindow);
setInterval(() => void checkForUpdatesAndNotify(mainWindow), oneDay);
void checkForUpdatesAndNotify(mainWindow);
};
/**
@ -22,7 +31,7 @@ export const setupAutoUpdater = (mainWindow: BrowserWindow) => {
export const forceCheckForAppUpdates = (mainWindow: BrowserWindow) => {
userPreferences.delete("skipAppVersion");
userPreferences.delete("muteUpdateNotificationVersion");
checkForUpdatesAndNotify(mainWindow);
void checkForUpdatesAndNotify(mainWindow);
};
const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
@ -36,39 +45,42 @@ const checkForUpdatesAndNotify = async (mainWindow: BrowserWindow) => {
log.debug(() => `Update check found version ${version}`);
if (!version)
throw new Error("Unexpected empty version obtained from auto-updater");
if (compareVersions(version, app.getVersion()) <= 0) {
log.debug(() => "Skipping update, already at latest version");
return;
}
if (version === userPreferences.get("skipAppVersion")) {
if (version == userPreferences.get("skipAppVersion")) {
log.info(`User chose to skip version ${version}`);
return;
}
const mutedVersion = userPreferences.get("muteUpdateNotificationVersion");
if (version === mutedVersion) {
if (version == mutedVersion) {
log.info(`User has muted update notifications for version ${version}`);
return;
}
const showUpdateDialog = (updateInfo: AppUpdateInfo) =>
mainWindow.webContents.send("appUpdateAvailable", updateInfo);
const showUpdateDialog = (update: AppUpdate) =>
mainWindow.webContents.send("appUpdateAvailable", update);
log.debug(() => "Attempting auto update");
autoUpdater.downloadUpdate();
await autoUpdater.downloadUpdate();
let timeout: NodeJS.Timeout;
let timeoutId: ReturnType<typeof setTimeout>;
const fiveMinutes = 5 * 60 * 1000;
autoUpdater.on("update-downloaded", () => {
timeout = setTimeout(
timeoutId = setTimeout(
() => showUpdateDialog({ autoUpdatable: true, version }),
fiveMinutes,
);
});
autoUpdater.on("error", (error) => {
clearTimeout(timeout);
clearTimeout(timeoutId);
log.error("Auto update failed", error);
showUpdateDialog({ autoUpdatable: false, version });
});

View file

@ -0,0 +1,50 @@
import AutoLaunch from "auto-launch";
import { app } from "electron/main";
class AutoLauncher {
/**
* This property will be set and used on Linux and Windows. On macOS,
* there's a separate API
*/
private autoLaunch?: AutoLaunch;
constructor() {
if (process.platform != "darwin") {
this.autoLaunch = new AutoLaunch({
name: "ente",
isHidden: true,
});
}
}
async isEnabled() {
const autoLaunch = this.autoLaunch;
if (autoLaunch) {
return await autoLaunch.isEnabled();
} else {
return app.getLoginItemSettings().openAtLogin;
}
}
async toggleAutoLaunch() {
const wasEnabled = await this.isEnabled();
const autoLaunch = this.autoLaunch;
if (autoLaunch) {
if (wasEnabled) await autoLaunch.disable();
else await autoLaunch.enable();
} else {
const openAtLogin = !wasEnabled;
app.setLoginItemSettings({ openAtLogin });
}
}
wasAutoLaunched() {
if (this.autoLaunch) {
return app.commandLine.hasSwitch("hidden");
} else {
return app.getLoginItemSettings().openAtLogin;
}
}
}
export default new AutoLauncher();

View file

@ -1,41 +0,0 @@
import { AutoLauncherClient } from "../../types/main";
import { isPlatform } from "../platform";
import linuxAndWinAutoLauncher from "./autoLauncherClients/linuxAndWinAutoLauncher";
import macAutoLauncher from "./autoLauncherClients/macAutoLauncher";
class AutoLauncher {
private client: AutoLauncherClient;
async init() {
if (isPlatform("linux") || isPlatform("windows")) {
this.client = linuxAndWinAutoLauncher;
} else {
this.client = macAutoLauncher;
}
// migrate old auto launch settings for windows from mac auto launcher to linux and windows auto launcher
if (isPlatform("windows") && (await macAutoLauncher.isEnabled())) {
await macAutoLauncher.toggleAutoLaunch();
await linuxAndWinAutoLauncher.toggleAutoLaunch();
}
}
async isEnabled() {
if (!this.client) {
await this.init();
}
return await this.client.isEnabled();
}
async toggleAutoLaunch() {
if (!this.client) {
await this.init();
}
await this.client.toggleAutoLaunch();
}
async wasAutoLaunched() {
if (!this.client) {
await this.init();
}
return this.client.wasAutoLaunched();
}
}
export default new AutoLauncher();

View file

@ -1,39 +0,0 @@
import AutoLaunch from "auto-launch";
import { app } from "electron";
import { AutoLauncherClient } from "../../../types/main";
const LAUNCHED_AS_HIDDEN_FLAG = "hidden";
class LinuxAndWinAutoLauncher implements AutoLauncherClient {
private instance: AutoLaunch;
constructor() {
const autoLauncher = new AutoLaunch({
name: "ente",
isHidden: true,
});
this.instance = autoLauncher;
}
async isEnabled() {
return await this.instance.isEnabled();
}
async toggleAutoLaunch() {
if (await this.isEnabled()) {
await this.disableAutoLaunch();
} else {
await this.enableAutoLaunch();
}
}
async wasAutoLaunched() {
return app.commandLine.hasSwitch(LAUNCHED_AS_HIDDEN_FLAG);
}
private async disableAutoLaunch() {
await this.instance.disable();
}
private async enableAutoLaunch() {
await this.instance.enable();
}
}
export default new LinuxAndWinAutoLauncher();

View file

@ -1,28 +0,0 @@
import { app } from "electron";
import { AutoLauncherClient } from "../../../types/main";
class MacAutoLauncher implements AutoLauncherClient {
async isEnabled() {
return app.getLoginItemSettings().openAtLogin;
}
async toggleAutoLaunch() {
if (await this.isEnabled()) {
this.disableAutoLaunch();
} else {
this.enableAutoLaunch();
}
}
async wasAutoLaunched() {
return app.getLoginItemSettings().wasOpenedAtLogin;
}
private disableAutoLaunch() {
app.setLoginItemSettings({ openAtLogin: false });
}
private enableAutoLaunch() {
app.setLoginItemSettings({ openAtLogin: true });
}
}
export default new MacAutoLauncher();

View file

@ -1,45 +0,0 @@
import chokidar from "chokidar";
import { BrowserWindow } from "electron";
import path from "path";
import log from "../log";
import { getElectronFile } from "./fs";
import { getWatchMappings } from "./watch";
/**
* Convert a file system {@link filePath} that uses the local system specific
* path separators into a path that uses POSIX file separators.
*/
const normalizeToPOSIX = (filePath: string) =>
filePath.split(path.sep).join(path.posix.sep);
export function initWatcher(mainWindow: BrowserWindow) {
const mappings = getWatchMappings();
const folderPaths = mappings.map((mapping) => {
return mapping.folderPath;
});
const watcher = chokidar.watch(folderPaths, {
awaitWriteFinish: true,
});
watcher
.on("add", async (path) => {
mainWindow.webContents.send(
"watch-add",
await getElectronFile(normalizeToPOSIX(path)),
);
})
.on("unlink", (path) => {
mainWindow.webContents.send("watch-unlink", normalizeToPOSIX(path));
})
.on("unlinkDir", (path) => {
mainWindow.webContents.send(
"watch-unlink-dir",
normalizeToPOSIX(path),
);
})
.on("error", (error) => {
log.error("Error while watching files", error);
});
return watcher;
}

View file

@ -0,0 +1,89 @@
import { shell } from "electron/common";
import { app, dialog } from "electron/main";
import { existsSync } from "fs";
import fs from "node:fs/promises";
import path from "node:path";
import { posixPath } from "../utils/electron";
export const selectDirectory = async () => {
const result = await dialog.showOpenDialog({
properties: ["openDirectory"],
});
const dirPath = result.filePaths[0];
return dirPath ? posixPath(dirPath) : undefined;
};
/**
* Open the given {@link dirPath} in the system's folder viewer.
*
* For example, on macOS this'll open {@link dirPath} in Finder.
*/
export const openDirectory = async (dirPath: string) => {
// We need to use `path.normalize` because `shell.openPath; does not support
// POSIX path, it needs to be a platform specific path:
// https://github.com/electron/electron/issues/28831#issuecomment-826370589
const res = await shell.openPath(path.normalize(dirPath));
// `shell.openPath` resolves with a string containing the error message
// corresponding to the failure if a failure occurred, otherwise "".
if (res) throw new Error(`Failed to open directory ${dirPath}: res`);
};
/**
* Open the app's log directory in the system's folder viewer.
*
* @see {@link openDirectory}
*/
export const openLogDirectory = () => openDirectory(logDirectoryPath());
/**
* Return the path where the logs for the app are saved.
*
* [Note: Electron app paths]
*
* There are three paths we need to be aware of usually.
*
* First is the "appData". We can obtain this with `app.getPath("appData")`.
* This is per-user application data directory. This is usually the following:
*
* - Windows: `%APPDATA%`, e.g. `C:\Users\<username>\AppData\Local`
* - Linux: `~/.config`
* - macOS: `~/Library/Application Support`
*
* Now, if we suffix the app's name onto the appData directory, we get the
* "userData" directory. This is the **primary** place applications are meant to
* store user's data, e.g. various configuration files and saved state.
*
* During development, our app name is "Electron", so this'd be, for example,
* `~/Library/Application Support/Electron` if we run using `yarn dev`. For the
* packaged production app, our app name is "ente", so this would be:
*
* - Windows: `%APPDATA%\ente`, e.g. `C:\Users\<username>\AppData\Local\ente`
* - Linux: `~/.config/ente`
* - macOS: `~/Library/Application Support/ente`
*
* Note that Chromium also stores the browser state, e.g. localStorage or disk
* caches, in userData.
*
* Finally, there is the "logs" directory. This is not within "appData" but has
* a slightly different OS specific path. Since our log file is named
* "ente.log", it can be found at:
*
* - macOS: ~/Library/Logs/ente/ente.log (production)
* - macOS: ~/Library/Logs/Electron/ente.log (dev)
*
* https://www.electronjs.org/docs/latest/api/app
*/
const logDirectoryPath = () => app.getPath("logs");
/**
* See: [Note: Legacy face crops]
*/
export const legacyFaceCrop = async (
faceID: string,
): Promise<Uint8Array | undefined> => {
// See: [Note: Getting the cache path]
// @ts-expect-error "cache" works but is not part of the public API.
const cacheDir = path.join(app.getPath("cache"), "ente");
const filePath = path.join(cacheDir, "face-crops", faceID);
return existsSync(filePath) ? await fs.readFile(filePath) : undefined;
};

View file

@ -1,33 +1,37 @@
import pathToFfmpeg from "ffmpeg-static";
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import { ElectronFile } from "../../types/ipc";
import type { ZipItem } from "../../types/ipc";
import log from "../log";
import { writeStream } from "../stream";
import { generateTempFilePath, getTempDirPath } from "../temp";
import { execAsync } from "../util";
import { ensure, withTimeout } from "../utils/common";
import { execAsync } from "../utils/electron";
import {
deleteTempFile,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
const INPUT_PATH_PLACEHOLDER = "INPUT";
const FFMPEG_PLACEHOLDER = "FFMPEG";
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
/* Duplicated in the web app's code (used by the WASM FFmpeg implementation). */
const ffmpegPathPlaceholder = "FFMPEG";
const inputPathPlaceholder = "INPUT";
const outputPathPlaceholder = "OUTPUT";
/**
* Run a ffmpeg command
* Run a FFmpeg command
*
* [Note: FFMPEG in Electron]
* [Note: FFmpeg in Electron]
*
* There is a wasm build of FFMPEG, but that is currently 10-20 times slower
* There is a wasm build of FFmpeg, but that is currently 10-20 times slower
* that the native build. That is slow enough to be unusable for our purposes.
* https://ffmpegwasm.netlify.app/docs/performance
*
* So the alternative is to bundle a ffmpeg binary with our app. e.g.
* So the alternative is to bundle a FFmpeg executable binary with our app. e.g.
*
* yarn add fluent-ffmpeg ffmpeg-static ffprobe-static
*
* (we only use ffmpeg-static, the rest are mentioned for completeness' sake).
*
* Interestingly, Electron already bundles an ffmpeg library (it comes from the
* ffmpeg fork maintained by Chromium).
* Interestingly, Electron already bundles an binary FFmpeg library (it comes
* from the ffmpeg fork maintained by Chromium).
* https://chromium.googlesource.com/chromium/third_party/ffmpeg
* https://stackoverflow.com/questions/53963672/what-version-of-ffmpeg-is-bundled-inside-electron
*
@ -36,126 +40,75 @@ const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
* $ file ente.app/Contents/Frameworks/Electron\ Framework.framework/Versions/Current/Libraries/libffmpeg.dylib
* .../libffmpeg.dylib: Mach-O 64-bit dynamically linked shared library arm64
*
* I'm not sure if our code is supposed to be able to use it, and how.
* But I'm not sure if our code is supposed to be able to use it, and how.
*/
export async function runFFmpegCmd(
cmd: string[],
inputFile: File | ElectronFile,
outputFileName: string,
dontTimeout?: boolean,
) {
let inputFilePath = null;
let createdTempInputFile = null;
export const ffmpegExec = async (
command: string[],
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
outputFileExtension: string,
timeoutMS: number,
): Promise<Uint8Array> => {
// TODO (MR): This currently copies files for both input (when
// dataOrPathOrZipItem is data) and output. This needs to be tested
// extremely large video files when invoked downstream of `convertToMP4` in
// the web code.
const {
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
writeToTemporaryFile: writeToTemporaryInputFile,
} = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem);
const outputFilePath = await makeTempFilePath(outputFileExtension);
try {
if (!existsSync(inputFile.path)) {
const tempFilePath = await generateTempFilePath(inputFile.name);
await writeStream(tempFilePath, await inputFile.stream());
inputFilePath = tempFilePath;
createdTempInputFile = true;
} else {
inputFilePath = inputFile.path;
}
const outputFileData = await runFFmpegCmd_(
cmd,
await writeToTemporaryInputFile();
const cmd = substitutePlaceholders(
command,
inputFilePath,
outputFileName,
dontTimeout,
outputFilePath,
);
return new File([outputFileData], outputFileName);
if (timeoutMS) await withTimeout(execAsync(cmd), 30 * 1000);
else await execAsync(cmd);
return fs.readFile(outputFilePath);
} finally {
if (createdTempInputFile) {
await deleteTempFile(inputFilePath);
try {
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
await deleteTempFile(outputFilePath);
} catch (e) {
log.error("Could not clean up temp files", e);
}
}
}
};
export async function runFFmpegCmd_(
cmd: string[],
const substitutePlaceholders = (
command: string[],
inputFilePath: string,
outputFileName: string,
dontTimeout = false,
) {
let tempOutputFilePath: string;
try {
tempOutputFilePath = await generateTempFilePath(outputFileName);
cmd = cmd.map((cmdPart) => {
if (cmdPart === FFMPEG_PLACEHOLDER) {
return ffmpegBinaryPath();
} else if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return inputFilePath;
} else if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
} else {
return cmdPart;
}
});
if (dontTimeout) {
await execAsync(cmd);
outputFilePath: string,
) =>
command.map((segment) => {
if (segment == ffmpegPathPlaceholder) {
return ffmpegBinaryPath();
} else if (segment == inputPathPlaceholder) {
return inputFilePath;
} else if (segment == outputPathPlaceholder) {
return outputFilePath;
} else {
await promiseWithTimeout(execAsync(cmd), 30 * 1000);
return segment;
}
if (!existsSync(tempOutputFilePath)) {
throw new Error("ffmpeg output file not found");
}
const outputFile = await fs.readFile(tempOutputFilePath);
return new Uint8Array(outputFile);
} catch (e) {
log.error("FFMPEG command failed", e);
throw e;
} finally {
await deleteTempFile(tempOutputFilePath);
}
}
});
/**
* Return the path to the `ffmpeg` binary.
*
* At runtime, the ffmpeg binary is present in a path like (macOS example):
* At runtime, the FFmpeg binary is present in a path like (macOS example):
* `ente.app/Contents/Resources/app.asar.unpacked/node_modules/ffmpeg-static/ffmpeg`
*/
const ffmpegBinaryPath = () => {
// This substitution of app.asar by app.asar.unpacked is suggested by the
// ffmpeg-static library author themselves:
// https://github.com/eugeneware/ffmpeg-static/issues/16
return pathToFfmpeg.replace("app.asar", "app.asar.unpacked");
};
export async function writeTempFile(fileStream: Uint8Array, fileName: string) {
const tempFilePath = await generateTempFilePath(fileName);
await fs.writeFile(tempFilePath, fileStream);
return tempFilePath;
}
export async function deleteTempFile(tempFilePath: string) {
const tempDirPath = await getTempDirPath();
if (!tempFilePath.startsWith(tempDirPath))
log.error("Attempting to delete a non-temp file ${tempFilePath}");
await fs.rm(tempFilePath, { force: true });
}
const promiseWithTimeout = async <T>(
request: Promise<T>,
timeout: number,
): Promise<T> => {
const timeoutRef: {
current: NodeJS.Timeout;
} = { current: null };
const rejectOnTimeout = new Promise<null>((_, reject) => {
timeoutRef.current = setTimeout(
() => reject(new Error("Operation timed out")),
timeout,
);
});
const requestWithTimeOutCancellation = async () => {
const resp = await request;
clearTimeout(timeoutRef.current);
return resp;
};
return await Promise.race([
requestWithTimeOutCancellation(),
rejectOnTimeout,
]);
return ensure(pathToFfmpeg).replace("app.asar", "app.asar.unpacked");
};

View file

@ -1,190 +1,30 @@
import StreamZip from "node-stream-zip";
/**
* @file file system related functions exposed over the context bridge.
*/
import { existsSync } from "node:fs";
import fs from "node:fs/promises";
import path from "node:path";
import { ElectronFile } from "../../types/ipc";
import log from "../log";
const FILE_STREAM_CHUNK_SIZE: number = 4 * 1024 * 1024;
export const fsExists = (path: string) => existsSync(path);
export async function getDirFiles(dirPath: string) {
const files = await getDirFilePaths(dirPath);
const electronFiles = await Promise.all(files.map(getElectronFile));
return electronFiles;
}
export const fsRename = (oldPath: string, newPath: string) =>
fs.rename(oldPath, newPath);
// https://stackoverflow.com/a/63111390
export const getDirFilePaths = async (dirPath: string) => {
if (!(await fs.stat(dirPath)).isDirectory()) {
return [dirPath];
}
export const fsMkdirIfNeeded = (dirPath: string) =>
fs.mkdir(dirPath, { recursive: true });
let files: string[] = [];
const filePaths = await fs.readdir(dirPath);
export const fsRmdir = (path: string) => fs.rmdir(path);
for (const filePath of filePaths) {
const absolute = path.join(dirPath, filePath);
files = [...files, ...(await getDirFilePaths(absolute))];
}
export const fsRm = (path: string) => fs.rm(path);
return files;
};
const getFileStream = async (filePath: string) => {
const file = await fs.open(filePath, "r");
let offset = 0;
const readableStream = new ReadableStream<Uint8Array>({
async pull(controller) {
try {
const buff = new Uint8Array(FILE_STREAM_CHUNK_SIZE);
const bytesRead = (await file.read(
buff,
0,
FILE_STREAM_CHUNK_SIZE,
offset,
)) as unknown as number;
offset += bytesRead;
if (bytesRead === 0) {
controller.close();
await file.close();
} else {
controller.enqueue(buff.slice(0, bytesRead));
}
} catch (e) {
await file.close();
}
},
async cancel() {
await file.close();
},
});
return readableStream;
};
export async function getElectronFile(filePath: string): Promise<ElectronFile> {
const fileStats = await fs.stat(filePath);
return {
path: filePath.split(path.sep).join(path.posix.sep),
name: path.basename(filePath),
size: fileStats.size,
lastModified: fileStats.mtime.valueOf(),
stream: async () => {
if (!existsSync(filePath)) {
throw new Error("electronFile does not exist");
}
return await getFileStream(filePath);
},
blob: async () => {
if (!existsSync(filePath)) {
throw new Error("electronFile does not exist");
}
const blob = await fs.readFile(filePath);
return new Blob([new Uint8Array(blob)]);
},
arrayBuffer: async () => {
if (!existsSync(filePath)) {
throw new Error("electronFile does not exist");
}
const blob = await fs.readFile(filePath);
return new Uint8Array(blob);
},
};
}
export const getValidPaths = (paths: string[]) => {
if (!paths) {
return [] as string[];
}
return paths.filter(async (path) => {
try {
await fs.stat(path).then((stat) => stat.isFile());
} catch (e) {
return false;
}
});
};
export const getZipFileStream = async (
zip: StreamZip.StreamZipAsync,
filePath: string,
) => {
const stream = await zip.stream(filePath);
const done = {
current: false,
};
const inProgress = {
current: false,
};
// eslint-disable-next-line no-unused-vars
let resolveObj: (value?: any) => void = null;
// eslint-disable-next-line no-unused-vars
let rejectObj: (reason?: any) => void = null;
stream.on("readable", () => {
try {
if (resolveObj) {
inProgress.current = true;
const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
if (chunk) {
resolveObj(new Uint8Array(chunk));
resolveObj = null;
}
inProgress.current = false;
}
} catch (e) {
rejectObj(e);
}
});
stream.on("end", () => {
try {
done.current = true;
if (resolveObj && !inProgress.current) {
resolveObj(null);
resolveObj = null;
}
} catch (e) {
rejectObj(e);
}
});
stream.on("error", (e) => {
try {
done.current = true;
if (rejectObj) {
rejectObj(e);
rejectObj = null;
}
} catch (e) {
rejectObj(e);
}
});
const readStreamData = async () => {
return new Promise<Uint8Array>((resolve, reject) => {
const chunk = stream.read(FILE_STREAM_CHUNK_SIZE) as Buffer;
if (chunk || done.current) {
resolve(chunk);
} else {
resolveObj = resolve;
rejectObj = reject;
}
});
};
const readableStream = new ReadableStream<Uint8Array>({
async pull(controller) {
try {
const data = await readStreamData();
if (data) {
controller.enqueue(data);
} else {
controller.close();
}
} catch (e) {
log.error("Failed to pull from readableStream", e);
controller.close();
}
},
});
return readableStream;
export const fsReadTextFile = async (filePath: string) =>
fs.readFile(filePath, "utf-8");
export const fsWriteFile = (path: string, contents: string) =>
fs.writeFile(path, contents);
export const fsIsDir = async (dirPath: string) => {
if (!existsSync(dirPath)) return false;
const stat = await fs.stat(dirPath);
return stat.isDirectory();
};

View file

@ -0,0 +1,159 @@
/** @file Image format conversions and thumbnail generation */
import fs from "node:fs/promises";
import path from "node:path";
import { CustomErrorMessage, type ZipItem } from "../../types/ipc";
import log from "../log";
import { execAsync, isDev } from "../utils/electron";
import {
deleteTempFile,
makeFileForDataOrPathOrZipItem,
makeTempFilePath,
} from "../utils/temp";
export const convertToJPEG = async (imageData: Uint8Array) => {
const inputFilePath = await makeTempFilePath();
const outputFilePath = await makeTempFilePath("jpeg");
// Construct the command first, it may throw NotAvailable on win32.
const command = convertToJPEGCommand(inputFilePath, outputFilePath);
try {
await fs.writeFile(inputFilePath, imageData);
await execAsync(command);
return new Uint8Array(await fs.readFile(outputFilePath));
} finally {
try {
await deleteTempFile(inputFilePath);
await deleteTempFile(outputFilePath);
} catch (e) {
log.error("Could not clean up temp files", e);
}
}
};
const convertToJPEGCommand = (
inputFilePath: string,
outputFilePath: string,
) => {
switch (process.platform) {
case "darwin":
return [
"sips",
"-s",
"format",
"jpeg",
inputFilePath,
"--out",
outputFilePath,
];
case "linux":
return [
imageMagickPath(),
inputFilePath,
"-quality",
"100%",
outputFilePath,
];
default: // "win32"
throw new Error(CustomErrorMessage.NotAvailable);
}
};
/** Path to the Linux image-magick executable bundled with our app */
const imageMagickPath = () =>
path.join(isDev ? "build" : process.resourcesPath, "image-magick");
export const generateImageThumbnail = async (
dataOrPathOrZipItem: Uint8Array | string | ZipItem,
maxDimension: number,
maxSize: number,
): Promise<Uint8Array> => {
const {
path: inputFilePath,
isFileTemporary: isInputFileTemporary,
writeToTemporaryFile: writeToTemporaryInputFile,
} = await makeFileForDataOrPathOrZipItem(dataOrPathOrZipItem);
const outputFilePath = await makeTempFilePath("jpeg");
// Construct the command first, it may throw `NotAvailable` on win32.
let quality = 70;
let command = generateImageThumbnailCommand(
inputFilePath,
outputFilePath,
maxDimension,
quality,
);
try {
await writeToTemporaryInputFile();
let thumbnail: Uint8Array;
do {
await execAsync(command);
thumbnail = new Uint8Array(await fs.readFile(outputFilePath));
quality -= 10;
command = generateImageThumbnailCommand(
inputFilePath,
outputFilePath,
maxDimension,
quality,
);
} while (thumbnail.length > maxSize && quality > 50);
return thumbnail;
} finally {
try {
if (isInputFileTemporary) await deleteTempFile(inputFilePath);
await deleteTempFile(outputFilePath);
} catch (e) {
log.error("Could not clean up temp files", e);
}
}
};
const generateImageThumbnailCommand = (
inputFilePath: string,
outputFilePath: string,
maxDimension: number,
quality: number,
) => {
switch (process.platform) {
case "darwin":
return [
"sips",
"-s",
"format",
"jpeg",
"-s",
"formatOptions",
`${quality}`,
"-Z",
`${maxDimension}`,
inputFilePath,
"--out",
outputFilePath,
];
case "linux":
return [
imageMagickPath(),
inputFilePath,
"-auto-orient",
"-define",
`jpeg:size=${2 * maxDimension}x${2 * maxDimension}`,
"-thumbnail",
`${maxDimension}x${maxDimension}>`,
"-unsharp",
"0x.5",
"-quality",
`${quality}`,
outputFilePath,
];
default: // "win32"
throw new Error(CustomErrorMessage.NotAvailable);
}
};

View file

@ -1,294 +0,0 @@
import { existsSync } from "fs";
import fs from "node:fs/promises";
import path from "path";
import { CustomErrors, ElectronFile } from "../../types/ipc";
import log from "../log";
import { isPlatform } from "../platform";
import { writeStream } from "../stream";
import { generateTempFilePath } from "../temp";
import { execAsync, isDev } from "../util";
import { deleteTempFile } from "./ffmpeg";
const IMAGE_MAGICK_PLACEHOLDER = "IMAGE_MAGICK";
const MAX_DIMENSION_PLACEHOLDER = "MAX_DIMENSION";
const SAMPLE_SIZE_PLACEHOLDER = "SAMPLE_SIZE";
const INPUT_PATH_PLACEHOLDER = "INPUT";
const OUTPUT_PATH_PLACEHOLDER = "OUTPUT";
const QUALITY_PLACEHOLDER = "QUALITY";
const MAX_QUALITY = 70;
const MIN_QUALITY = 50;
const SIPS_HEIC_CONVERT_COMMAND_TEMPLATE = [
"sips",
"-s",
"format",
"jpeg",
INPUT_PATH_PLACEHOLDER,
"--out",
OUTPUT_PATH_PLACEHOLDER,
];
const SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
"sips",
"-s",
"format",
"jpeg",
"-s",
"formatOptions",
QUALITY_PLACEHOLDER,
"-Z",
MAX_DIMENSION_PLACEHOLDER,
INPUT_PATH_PLACEHOLDER,
"--out",
OUTPUT_PATH_PLACEHOLDER,
];
const IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE = [
IMAGE_MAGICK_PLACEHOLDER,
INPUT_PATH_PLACEHOLDER,
"-quality",
"100%",
OUTPUT_PATH_PLACEHOLDER,
];
const IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE = [
IMAGE_MAGICK_PLACEHOLDER,
INPUT_PATH_PLACEHOLDER,
"-auto-orient",
"-define",
`jpeg:size=${SAMPLE_SIZE_PLACEHOLDER}x${SAMPLE_SIZE_PLACEHOLDER}`,
"-thumbnail",
`${MAX_DIMENSION_PLACEHOLDER}x${MAX_DIMENSION_PLACEHOLDER}>`,
"-unsharp",
"0x.5",
"-quality",
QUALITY_PLACEHOLDER,
OUTPUT_PATH_PLACEHOLDER,
];
function getImageMagickStaticPath() {
return isDev
? "resources/image-magick"
: path.join(process.resourcesPath, "image-magick");
}
export async function convertToJPEG(
fileData: Uint8Array,
filename: string,
): Promise<Uint8Array> {
if (isPlatform("windows")) {
throw Error(CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED);
}
const convertedFileData = await convertToJPEG_(fileData, filename);
return convertedFileData;
}
async function convertToJPEG_(
fileData: Uint8Array,
filename: string,
): Promise<Uint8Array> {
let tempInputFilePath: string;
let tempOutputFilePath: string;
try {
tempInputFilePath = await generateTempFilePath(filename);
tempOutputFilePath = await generateTempFilePath("output.jpeg");
await fs.writeFile(tempInputFilePath, fileData);
await execAsync(
constructConvertCommand(tempInputFilePath, tempOutputFilePath),
);
return new Uint8Array(await fs.readFile(tempOutputFilePath));
} catch (e) {
log.error("Failed to convert HEIC", e);
throw e;
} finally {
try {
await fs.rm(tempInputFilePath, { force: true });
} catch (e) {
log.error(`Failed to remove tempInputFile ${tempInputFilePath}`, e);
}
try {
await fs.rm(tempOutputFilePath, { force: true });
} catch (e) {
log.error(
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
e,
);
}
}
}
function constructConvertCommand(
tempInputFilePath: string,
tempOutputFilePath: string,
) {
let convertCmd: string[];
if (isPlatform("mac")) {
convertCmd = SIPS_HEIC_CONVERT_COMMAND_TEMPLATE.map((cmdPart) => {
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return tempInputFilePath;
}
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
}
return cmdPart;
});
} else if (isPlatform("linux")) {
convertCmd = IMAGEMAGICK_HEIC_CONVERT_COMMAND_TEMPLATE.map(
(cmdPart) => {
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
return getImageMagickStaticPath();
}
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return tempInputFilePath;
}
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
}
return cmdPart;
},
);
} else {
throw new Error(`Unsupported OS ${process.platform}`);
}
return convertCmd;
}
export async function generateImageThumbnail(
inputFile: File | ElectronFile,
maxDimension: number,
maxSize: number,
): Promise<Uint8Array> {
let inputFilePath = null;
let createdTempInputFile = null;
try {
if (isPlatform("windows")) {
throw Error(
CustomErrors.WINDOWS_NATIVE_IMAGE_PROCESSING_NOT_SUPPORTED,
);
}
if (!existsSync(inputFile.path)) {
const tempFilePath = await generateTempFilePath(inputFile.name);
await writeStream(tempFilePath, await inputFile.stream());
inputFilePath = tempFilePath;
createdTempInputFile = true;
} else {
inputFilePath = inputFile.path;
}
const thumbnail = await generateImageThumbnail_(
inputFilePath,
maxDimension,
maxSize,
);
return thumbnail;
} finally {
if (createdTempInputFile) {
try {
await deleteTempFile(inputFilePath);
} catch (e) {
log.error(`Failed to deleteTempFile ${inputFilePath}`, e);
}
}
}
}
async function generateImageThumbnail_(
inputFilePath: string,
width: number,
maxSize: number,
): Promise<Uint8Array> {
let tempOutputFilePath: string;
let quality = MAX_QUALITY;
try {
tempOutputFilePath = await generateTempFilePath("thumb.jpeg");
let thumbnail: Uint8Array;
do {
await execAsync(
constructThumbnailGenerationCommand(
inputFilePath,
tempOutputFilePath,
width,
quality,
),
);
thumbnail = new Uint8Array(await fs.readFile(tempOutputFilePath));
quality -= 10;
} while (thumbnail.length > maxSize && quality > MIN_QUALITY);
return thumbnail;
} catch (e) {
log.error("Failed to generate image thumbnail", e);
throw e;
} finally {
try {
await fs.rm(tempOutputFilePath, { force: true });
} catch (e) {
log.error(
`Failed to remove tempOutputFile ${tempOutputFilePath}`,
e,
);
}
}
}
function constructThumbnailGenerationCommand(
inputFilePath: string,
tempOutputFilePath: string,
maxDimension: number,
quality: number,
) {
let thumbnailGenerationCmd: string[];
if (isPlatform("mac")) {
thumbnailGenerationCmd = SIPS_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map(
(cmdPart) => {
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return inputFilePath;
}
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
}
if (cmdPart === MAX_DIMENSION_PLACEHOLDER) {
return maxDimension.toString();
}
if (cmdPart === QUALITY_PLACEHOLDER) {
return quality.toString();
}
return cmdPart;
},
);
} else if (isPlatform("linux")) {
thumbnailGenerationCmd =
IMAGE_MAGICK_THUMBNAIL_GENERATE_COMMAND_TEMPLATE.map((cmdPart) => {
if (cmdPart === IMAGE_MAGICK_PLACEHOLDER) {
return getImageMagickStaticPath();
}
if (cmdPart === INPUT_PATH_PLACEHOLDER) {
return inputFilePath;
}
if (cmdPart === OUTPUT_PATH_PLACEHOLDER) {
return tempOutputFilePath;
}
if (cmdPart.includes(SAMPLE_SIZE_PLACEHOLDER)) {
return cmdPart.replaceAll(
SAMPLE_SIZE_PLACEHOLDER,
(2 * maxDimension).toString(),
);
}
if (cmdPart.includes(MAX_DIMENSION_PLACEHOLDER)) {
return cmdPart.replaceAll(
MAX_DIMENSION_PLACEHOLDER,
maxDimension.toString(),
);
}
if (cmdPart === QUALITY_PLACEHOLDER) {
return quality.toString();
}
return cmdPart;
});
} else {
throw new Error(`Unsupported OS ${process.platform}`);
}
return thumbnailGenerationCmd;
}

View file

@ -5,117 +5,25 @@
*
* @see `web/apps/photos/src/services/clip-service.ts` for more details.
*/
import { existsSync } from "fs";
import jpeg from "jpeg-js";
import fs from "node:fs/promises";
import * as ort from "onnxruntime-node";
import Tokenizer from "../../thirdparty/clip-bpe-ts/mod";
import { CustomErrors } from "../../types/ipc";
import log from "../log";
import { writeStream } from "../stream";
import { generateTempFilePath } from "../temp";
import { deleteTempFile } from "./ffmpeg";
import {
createInferenceSession,
downloadModel,
modelPathDownloadingIfNeeded,
modelSavePath,
} from "./ml";
import { ensure } from "../utils/common";
import { deleteTempFile, makeTempFilePath } from "../utils/temp";
import { makeCachedInferenceSession } from "./ml";
const textModelName = "clip-text-vit-32-uint8.onnx";
const textModelByteSize = 64173509; // 61.2 MB
const imageModelName = "clip-image-vit-32-float32.onnx";
const imageModelByteSize = 351468764; // 335.2 MB
let activeImageModelDownload: Promise<string> | undefined;
const imageModelPathDownloadingIfNeeded = async () => {
try {
if (activeImageModelDownload) {
log.info("Waiting for CLIP image model download to finish");
await activeImageModelDownload;
} else {
activeImageModelDownload = modelPathDownloadingIfNeeded(
imageModelName,
imageModelByteSize,
);
return await activeImageModelDownload;
}
} finally {
activeImageModelDownload = undefined;
}
};
let textModelDownloadInProgress = false;
/* TODO(MR): use the generic method. Then we can remove the exports for the
internal details functions that we use here */
const textModelPathDownloadingIfNeeded = async () => {
if (textModelDownloadInProgress)
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
const modelPath = modelSavePath(textModelName);
if (!existsSync(modelPath)) {
log.info("CLIP text model not found, downloading");
textModelDownloadInProgress = true;
downloadModel(modelPath, textModelName)
.catch((e) => {
// log but otherwise ignore
log.error("CLIP text model download failed", e);
})
.finally(() => {
textModelDownloadInProgress = false;
});
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
} else {
const localFileSize = (await fs.stat(modelPath)).size;
if (localFileSize !== textModelByteSize) {
log.error(
`CLIP text model size ${localFileSize} does not match the expected size, downloading again`,
);
textModelDownloadInProgress = true;
downloadModel(modelPath, textModelName)
.catch((e) => {
// log but otherwise ignore
log.error("CLIP text model download failed", e);
})
.finally(() => {
textModelDownloadInProgress = false;
});
throw Error(CustomErrors.MODEL_DOWNLOAD_PENDING);
}
}
return modelPath;
};
let imageSessionPromise: Promise<any> | undefined;
const onnxImageSession = async () => {
if (!imageSessionPromise) {
imageSessionPromise = (async () => {
const modelPath = await imageModelPathDownloadingIfNeeded();
return createInferenceSession(modelPath);
})();
}
return imageSessionPromise;
};
let _textSession: any = null;
const onnxTextSession = async () => {
if (!_textSession) {
const modelPath = await textModelPathDownloadingIfNeeded();
_textSession = await createInferenceSession(modelPath);
}
return _textSession;
};
const cachedCLIPImageSession = makeCachedInferenceSession(
"clip-image-vit-32-float32.onnx",
351468764 /* 335.2 MB */,
);
export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
const tempFilePath = await generateTempFilePath("");
const tempFilePath = await makeTempFilePath();
const imageStream = new Response(jpegImageData.buffer).body;
await writeStream(tempFilePath, imageStream);
await writeStream(tempFilePath, ensure(imageStream));
try {
return await clipImageEmbedding_(tempFilePath);
} finally {
@ -124,42 +32,43 @@ export const clipImageEmbedding = async (jpegImageData: Uint8Array) => {
};
const clipImageEmbedding_ = async (jpegFilePath: string) => {
const imageSession = await onnxImageSession();
const session = await cachedCLIPImageSession();
const t1 = Date.now();
const rgbData = await getRGBData(jpegFilePath);
const feeds = {
input: new ort.Tensor("float32", rgbData, [1, 3, 224, 224]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
const results = await session.run(feeds);
log.debug(
() =>
`onnx/clip image embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
);
const imageEmbedding = results["output"].data; // Float32Array
/* Need these model specific casts to type the result */
const imageEmbedding = ensure(results.output).data as Float32Array;
return normalizeEmbedding(imageEmbedding);
};
const getRGBData = async (jpegFilePath: string) => {
const getRGBData = async (jpegFilePath: string): Promise<number[]> => {
const jpegData = await fs.readFile(jpegFilePath);
const rawImageData = jpeg.decode(jpegData, {
useTArray: true,
formatAsRGBA: false,
});
const nx: number = rawImageData.width;
const ny: number = rawImageData.height;
const inputImage: Uint8Array = rawImageData.data;
const nx = rawImageData.width;
const ny = rawImageData.height;
const inputImage = rawImageData.data;
const nx2: number = 224;
const ny2: number = 224;
const totalSize: number = 3 * nx2 * ny2;
const nx2 = 224;
const ny2 = 224;
const totalSize = 3 * nx2 * ny2;
const result: number[] = Array(totalSize).fill(0);
const scale: number = Math.max(nx, ny) / 224;
const result = Array<number>(totalSize).fill(0);
const scale = Math.max(nx, ny) / 224;
const nx3: number = Math.round(nx / scale);
const ny3: number = Math.round(ny / scale);
const nx3 = Math.round(nx / scale);
const ny3 = Math.round(ny / scale);
const mean: number[] = [0.48145466, 0.4578275, 0.40821073];
const std: number[] = [0.26862954, 0.26130258, 0.27577711];
@ -168,40 +77,40 @@ const getRGBData = async (jpegFilePath: string) => {
for (let x = 0; x < nx3; x++) {
for (let c = 0; c < 3; c++) {
// Linear interpolation
const sx: number = (x + 0.5) * scale - 0.5;
const sy: number = (y + 0.5) * scale - 0.5;
const sx = (x + 0.5) * scale - 0.5;
const sy = (y + 0.5) * scale - 0.5;
const x0: number = Math.max(0, Math.floor(sx));
const y0: number = Math.max(0, Math.floor(sy));
const x0 = Math.max(0, Math.floor(sx));
const y0 = Math.max(0, Math.floor(sy));
const x1: number = Math.min(x0 + 1, nx - 1);
const y1: number = Math.min(y0 + 1, ny - 1);
const x1 = Math.min(x0 + 1, nx - 1);
const y1 = Math.min(y0 + 1, ny - 1);
const dx: number = sx - x0;
const dy: number = sy - y0;
const dx = sx - x0;
const dy = sy - y0;
const j00: number = 3 * (y0 * nx + x0) + c;
const j01: number = 3 * (y0 * nx + x1) + c;
const j10: number = 3 * (y1 * nx + x0) + c;
const j11: number = 3 * (y1 * nx + x1) + c;
const j00 = 3 * (y0 * nx + x0) + c;
const j01 = 3 * (y0 * nx + x1) + c;
const j10 = 3 * (y1 * nx + x0) + c;
const j11 = 3 * (y1 * nx + x1) + c;
const v00: number = inputImage[j00];
const v01: number = inputImage[j01];
const v10: number = inputImage[j10];
const v11: number = inputImage[j11];
const v00 = inputImage[j00] ?? 0;
const v01 = inputImage[j01] ?? 0;
const v10 = inputImage[j10] ?? 0;
const v11 = inputImage[j11] ?? 0;
const v0: number = v00 * (1 - dx) + v01 * dx;
const v1: number = v10 * (1 - dx) + v11 * dx;
const v0 = v00 * (1 - dx) + v01 * dx;
const v1 = v10 * (1 - dx) + v11 * dx;
const v: number = v0 * (1 - dy) + v1 * dy;
const v = v0 * (1 - dy) + v1 * dy;
const v2: number = Math.min(Math.max(Math.round(v), 0), 255);
const v2 = Math.min(Math.max(Math.round(v), 0), 255);
// createTensorWithDataList is dumb compared to reshape and
// hence has to be given with one channel after another
const i: number = y * nx3 + x + (c % 3) * 224 * 224;
const i = y * nx3 + x + (c % 3) * 224 * 224;
result[i] = (v2 / 255 - mean[c]) / std[c];
result[i] = (v2 / 255 - (mean[c] ?? 0)) / (std[c] ?? 1);
}
}
}
@ -211,26 +120,41 @@ const getRGBData = async (jpegFilePath: string) => {
const normalizeEmbedding = (embedding: Float32Array) => {
let normalization = 0;
for (let index = 0; index < embedding.length; index++) {
normalization += embedding[index] * embedding[index];
}
for (const v of embedding) normalization += v * v;
const sqrtNormalization = Math.sqrt(normalization);
for (let index = 0; index < embedding.length; index++) {
embedding[index] = embedding[index] / sqrtNormalization;
}
for (let index = 0; index < embedding.length; index++)
embedding[index] = ensure(embedding[index]) / sqrtNormalization;
return embedding;
};
let _tokenizer: Tokenizer = null;
const cachedCLIPTextSession = makeCachedInferenceSession(
"clip-text-vit-32-uint8.onnx",
64173509 /* 61.2 MB */,
);
let _tokenizer: Tokenizer | undefined;
const getTokenizer = () => {
if (!_tokenizer) {
_tokenizer = new Tokenizer();
}
if (!_tokenizer) _tokenizer = new Tokenizer();
return _tokenizer;
};
export const clipTextEmbedding = async (text: string) => {
const imageSession = await onnxTextSession();
export const clipTextEmbeddingIfAvailable = async (text: string) => {
const sessionOrStatus = await Promise.race([
cachedCLIPTextSession(),
"downloading-model",
]);
// Don't wait for the download to complete
if (typeof sessionOrStatus == "string") {
log.info(
"Ignoring CLIP text embedding request because model download is pending",
);
return undefined;
}
const session = sessionOrStatus;
const t1 = Date.now();
const tokenizer = getTokenizer();
const tokenizedText = Int32Array.from(tokenizer.encodeForCLIP(text));
@ -238,11 +162,11 @@ export const clipTextEmbedding = async (text: string) => {
input: new ort.Tensor("int32", tokenizedText, [1, 77]),
};
const t2 = Date.now();
const results = await imageSession.run(feeds);
const results = await session.run(feeds);
log.debug(
() =>
`onnx/clip text embedding took ${Date.now() - t1} ms (prep: ${t2 - t1} ms, inference: ${Date.now() - t2} ms)`,
);
const textEmbedding = results["output"].data;
const textEmbedding = ensure(results.output).data as Float32Array;
return normalizeEmbedding(textEmbedding);
};

View file

@ -8,87 +8,30 @@
*/
import * as ort from "onnxruntime-node";
import log from "../log";
import { createInferenceSession, modelPathDownloadingIfNeeded } from "./ml";
import { ensure } from "../utils/common";
import { makeCachedInferenceSession } from "./ml";
const faceDetectionModelName = "yolov5s_face_640_640_dynamic.onnx";
const faceDetectionModelByteSize = 30762872; // 29.3 MB
const faceEmbeddingModelName = "mobilefacenet_opset15.onnx";
const faceEmbeddingModelByteSize = 5286998; // 5 MB
let activeFaceDetectionModelDownload: Promise<string> | undefined;
const faceDetectionModelPathDownloadingIfNeeded = async () => {
try {
if (activeFaceDetectionModelDownload) {
log.info("Waiting for face detection model download to finish");
await activeFaceDetectionModelDownload;
} else {
activeFaceDetectionModelDownload = modelPathDownloadingIfNeeded(
faceDetectionModelName,
faceDetectionModelByteSize,
);
return await activeFaceDetectionModelDownload;
}
} finally {
activeFaceDetectionModelDownload = undefined;
}
};
let _faceDetectionSession: Promise<ort.InferenceSession> | undefined;
const faceDetectionSession = async () => {
if (!_faceDetectionSession) {
_faceDetectionSession =
faceDetectionModelPathDownloadingIfNeeded().then((modelPath) =>
createInferenceSession(modelPath),
);
}
return _faceDetectionSession;
};
let activeFaceEmbeddingModelDownload: Promise<string> | undefined;
const faceEmbeddingModelPathDownloadingIfNeeded = async () => {
try {
if (activeFaceEmbeddingModelDownload) {
log.info("Waiting for face embedding model download to finish");
await activeFaceEmbeddingModelDownload;
} else {
activeFaceEmbeddingModelDownload = modelPathDownloadingIfNeeded(
faceEmbeddingModelName,
faceEmbeddingModelByteSize,
);
return await activeFaceEmbeddingModelDownload;
}
} finally {
activeFaceEmbeddingModelDownload = undefined;
}
};
let _faceEmbeddingSession: Promise<ort.InferenceSession> | undefined;
const faceEmbeddingSession = async () => {
if (!_faceEmbeddingSession) {
_faceEmbeddingSession =
faceEmbeddingModelPathDownloadingIfNeeded().then((modelPath) =>
createInferenceSession(modelPath),
);
}
return _faceEmbeddingSession;
};
const cachedFaceDetectionSession = makeCachedInferenceSession(
"yolov5s_face_640_640_dynamic.onnx",
30762872 /* 29.3 MB */,
);
export const detectFaces = async (input: Float32Array) => {
const session = await faceDetectionSession();
const session = await cachedFaceDetectionSession();
const t = Date.now();
const feeds = {
input: new ort.Tensor("float32", input, [1, 3, 640, 640]),
};
const results = await session.run(feeds);
log.debug(() => `onnx/yolo face detection took ${Date.now() - t} ms`);
return results["output"].data;
return ensure(results.output).data;
};
const cachedFaceEmbeddingSession = makeCachedInferenceSession(
"mobilefacenet_opset15.onnx",
5286998 /* 5 MB */,
);
export const faceEmbedding = async (input: Float32Array) => {
// Dimension of each face (alias)
const mobileFaceNetFaceSize = 112;
@ -98,11 +41,12 @@ export const faceEmbedding = async (input: Float32Array) => {
const n = Math.round(input.length / (z * z * 3));
const inputTensor = new ort.Tensor("float32", input, [n, z, z, 3]);
const session = await faceEmbeddingSession();
const session = await cachedFaceEmbeddingSession();
const t = Date.now();
const feeds = { img_inputs: inputTensor };
const results = await session.run(feeds);
log.debug(() => `onnx/yolo face embedding took ${Date.now() - t} ms`);
// TODO: What's with this type? It works in practice, but double check.
return (results.embeddings as unknown as any)["cpuData"]; // as Float32Array;
/* Need these model specific casts to extract and type the result */
return (results.embeddings as unknown as Record<string, unknown>)
.cpuData as Float32Array;
};

View file

@ -1,5 +1,5 @@
/**
* @file AI/ML related functionality.
* @file AI/ML related functionality, generic layer.
*
* @see also `ml-clip.ts`, `ml-face.ts`.
*
@ -18,6 +18,50 @@ import * as ort from "onnxruntime-node";
import log from "../log";
import { writeStream } from "../stream";
/**
* Return a function that can be used to trigger a download of the specified
* model, and the creating of an ONNX inference session initialized using it.
*
* Multiple parallel calls to the returned function are fine, it ensures that
* the the model will be downloaded and the session created using it only once.
* All pending calls to it meanwhile will just await on the same promise.
*
* And once the promise is resolved, the create ONNX inference session will be
* cached, so subsequent calls to the returned function will just reuse the same
* session.
*
* {@link makeCachedInferenceSession} can itself be called anytime, it doesn't
* actively trigger a download until the returned function is called.
*
* @param modelName The name of the model to download.
*
* @param modelByteSize The size in bytes that we expect the model to have. If
* the size of the downloaded model does not match the expected size, then we
* will redownload it.
*
* @returns a function. calling that function returns a promise to an ONNX
* session.
*/
export const makeCachedInferenceSession = (
modelName: string,
modelByteSize: number,
) => {
let session: Promise<ort.InferenceSession> | undefined;
const download = () =>
modelPathDownloadingIfNeeded(modelName, modelByteSize);
const createSession = (modelPath: string) =>
createInferenceSession(modelPath);
const cachedInferenceSession = () => {
if (!session) session = download().then(createSession);
return session;
};
return cachedInferenceSession;
};
/**
* Download the model named {@link modelName} if we don't already have it.
*
@ -26,7 +70,7 @@ import { writeStream } from "../stream";
*
* @returns the path to the model on the local machine.
*/
export const modelPathDownloadingIfNeeded = async (
const modelPathDownloadingIfNeeded = async (
modelName: string,
expectedByteSize: number,
) => {
@ -49,31 +93,33 @@ export const modelPathDownloadingIfNeeded = async (
};
/** Return the path where the given {@link modelName} is meant to be saved */
export const modelSavePath = (modelName: string) =>
const modelSavePath = (modelName: string) =>
path.join(app.getPath("userData"), "models", modelName);
export const downloadModel = async (saveLocation: string, name: string) => {
const downloadModel = async (saveLocation: string, name: string) => {
// `mkdir -p` the directory where we want to save the model.
const saveDir = path.dirname(saveLocation);
await fs.mkdir(saveDir, { recursive: true });
// Download
// Download.
log.info(`Downloading ML model from ${name}`);
const url = `https://models.ente.io/${name}`;
const res = await net.fetch(url);
if (!res.ok) throw new Error(`Failed to fetch ${url}: HTTP ${res.status}`);
// Save
await writeStream(saveLocation, res.body);
const body = res.body;
if (!body) throw new Error(`Received an null response for ${url}`);
// Save.
await writeStream(saveLocation, body);
log.info(`Downloaded CLIP model ${name}`);
};
/**
* Crete an ONNX {@link InferenceSession} with some defaults.
*/
export const createInferenceSession = async (modelPath: string) => {
const createInferenceSession = async (modelPath: string) => {
return await ort.InferenceSession.create(modelPath, {
// Restrict the number of threads to 1
// Restrict the number of threads to 1.
intraOpNumThreads: 1,
// Be more conservative with RAM usage
// Be more conservative with RAM usage.
enableCpuMemArena: false,
});
};

View file

@ -1,25 +1,37 @@
import { safeStorage } from "electron/main";
import { keysStore } from "../stores/keys.store";
import { safeStorageStore } from "../stores/safeStorage.store";
import { uploadStatusStore } from "../stores/upload.store";
import { watchStore } from "../stores/watch.store";
import { safeStorageStore } from "../stores/safe-storage";
import { uploadStatusStore } from "../stores/upload-status";
import { watchStore } from "../stores/watch";
/**
* Clear all stores except user preferences.
*
* This is useful to reset state when the user logs out.
*/
export const clearStores = () => {
uploadStatusStore.clear();
keysStore.clear();
safeStorageStore.clear();
uploadStatusStore.clear();
watchStore.clear();
};
export const saveEncryptionKey = async (encryptionKey: string) => {
const encryptedKey: Buffer = await safeStorage.encryptString(encryptionKey);
/**
* [Note: Safe storage keys]
*
* On macOS, `safeStorage` stores our data under a Keychain entry named
* "<app-name> Safe Storage". Which resolves to:
*
* - Electron Safe Storage (dev)
* - ente Safe Storage (prod)
*/
export const saveEncryptionKey = (encryptionKey: string) => {
const encryptedKey = safeStorage.encryptString(encryptionKey);
const b64EncryptedKey = Buffer.from(encryptedKey).toString("base64");
safeStorageStore.set("encryptionKey", b64EncryptedKey);
};
export const encryptionKey = async (): Promise<string | undefined> => {
export const encryptionKey = (): string | undefined => {
const b64EncryptedKey = safeStorageStore.get("encryptionKey");
if (!b64EncryptedKey) return undefined;
const keyBuffer = Buffer.from(b64EncryptedKey, "base64");
return await safeStorage.decryptString(keyBuffer);
return safeStorage.decryptString(keyBuffer);
};

View file

@ -1,107 +1,149 @@
import StreamZip from "node-stream-zip";
import path from "path";
import { ElectronFile, FILE_PATH_TYPE } from "../../types/ipc";
import { FILE_PATH_KEYS } from "../../types/main";
import { uploadStatusStore } from "../stores/upload.store";
import { getElectronFile, getValidPaths, getZipFileStream } from "./fs";
import fs from "node:fs/promises";
import path from "node:path";
import { existsSync } from "original-fs";
import type { PendingUploads, ZipItem } from "../../types/ipc";
import { uploadStatusStore } from "../stores/upload-status";
export const getPendingUploads = async () => {
const filePaths = getSavedFilePaths(FILE_PATH_TYPE.FILES);
const zipPaths = getSavedFilePaths(FILE_PATH_TYPE.ZIPS);
const collectionName = uploadStatusStore.get("collectionName");
let files: ElectronFile[] = [];
let type: FILE_PATH_TYPE;
if (zipPaths.length) {
type = FILE_PATH_TYPE.ZIPS;
for (const zipPath of zipPaths) {
files = [
...files,
...(await getElectronFilesFromGoogleZip(zipPath)),
];
}
const pendingFilePaths = new Set(filePaths);
files = files.filter((file) => pendingFilePaths.has(file.path));
} else if (filePaths.length) {
type = FILE_PATH_TYPE.FILES;
files = await Promise.all(filePaths.map(getElectronFile));
}
return {
files,
collectionName,
type,
};
};
export const getSavedFilePaths = (type: FILE_PATH_TYPE) => {
const paths =
getValidPaths(
uploadStatusStore.get(FILE_PATH_KEYS[type]) as string[],
) ?? [];
setToUploadFiles(type, paths);
return paths;
};
export async function getZipEntryAsElectronFile(
zipName: string,
zip: StreamZip.StreamZipAsync,
entry: StreamZip.ZipEntry,
): Promise<ElectronFile> {
return {
path: path
.join(zipName, entry.name)
.split(path.sep)
.join(path.posix.sep),
name: path.basename(entry.name),
size: entry.size,
lastModified: entry.time,
stream: async () => {
return await getZipFileStream(zip, entry.name);
},
blob: async () => {
const buffer = await zip.entryData(entry.name);
return new Blob([new Uint8Array(buffer)]);
},
arrayBuffer: async () => {
const buffer = await zip.entryData(entry.name);
return new Uint8Array(buffer);
},
};
}
export const setToUploadFiles = (type: FILE_PATH_TYPE, filePaths: string[]) => {
const key = FILE_PATH_KEYS[type];
if (filePaths) {
uploadStatusStore.set(key, filePaths);
} else {
uploadStatusStore.delete(key);
}
};
export const setToUploadCollection = (collectionName: string) => {
if (collectionName) {
uploadStatusStore.set("collectionName", collectionName);
} else {
uploadStatusStore.delete("collectionName");
}
};
export const getElectronFilesFromGoogleZip = async (filePath: string) => {
const zip = new StreamZip.async({
file: filePath,
});
const zipName = path.basename(filePath, ".zip");
export const listZipItems = async (zipPath: string): Promise<ZipItem[]> => {
const zip = new StreamZip.async({ file: zipPath });
const entries = await zip.entries();
const files: ElectronFile[] = [];
const entryNames: string[] = [];
for (const entry of Object.values(entries)) {
const basename = path.basename(entry.name);
if (entry.isFile && basename.length > 0 && basename[0] !== ".") {
files.push(await getZipEntryAsElectronFile(zipName, zip, entry));
// Ignore "hidden" files (files whose names begins with a dot).
if (entry.isFile && !basename.startsWith(".")) {
// `entry.name` is the path within the zip.
entryNames.push(entry.name);
}
}
return files;
await zip.close();
return entryNames.map((entryName) => [zipPath, entryName]);
};
export const pathOrZipItemSize = async (
pathOrZipItem: string | ZipItem,
): Promise<number> => {
if (typeof pathOrZipItem == "string") {
const stat = await fs.stat(pathOrZipItem);
return stat.size;
} else {
const [zipPath, entryName] = pathOrZipItem;
const zip = new StreamZip.async({ file: zipPath });
const entry = await zip.entry(entryName);
if (!entry)
throw new Error(
`An entry with name ${entryName} does not exist in the zip file at ${zipPath}`,
);
const size = entry.size;
await zip.close();
return size;
}
};
export const pendingUploads = async (): Promise<PendingUploads | undefined> => {
const collectionName = uploadStatusStore.get("collectionName") ?? undefined;
const allFilePaths = uploadStatusStore.get("filePaths") ?? [];
const filePaths = allFilePaths.filter((f) => existsSync(f));
const allZipItems = uploadStatusStore.get("zipItems");
let zipItems: typeof allZipItems;
// Migration code - May 2024. Remove after a bit.
//
// The older store formats will not have zipItems and instead will have
// zipPaths. If we find such a case, read the zipPaths and enqueue all of
// their files as zipItems in the result.
//
// This potentially can be cause us to try reuploading an already uploaded
// file, but the dedup logic will kick in at that point so no harm will come
// of it.
if (allZipItems === undefined) {
const allZipPaths = uploadStatusStore.get("filePaths") ?? [];
const zipPaths = allZipPaths.filter((f) => existsSync(f));
zipItems = [];
for (const zip of zipPaths)
zipItems = zipItems.concat(await listZipItems(zip));
} else {
zipItems = allZipItems.filter(([z]) => existsSync(z));
}
if (filePaths.length == 0 && zipItems.length == 0) return undefined;
return {
collectionName,
filePaths,
zipItems,
};
};
/**
* [Note: Missing values in electron-store]
*
* Suppose we were to create a store like this:
*
* const store = new Store({
* schema: {
* foo: { type: "string" },
* bars: { type: "array", items: { type: "string" } },
* },
* });
*
* If we fetch `store.get("foo")` or `store.get("bars")`, we get `undefined`.
* But if we try to set these back to `undefined`, say `store.set("foo",
* someUndefValue)`, we get asked to
*
* TypeError: Use `delete()` to clear values
*
* This happens even if we do bulk object updates, e.g. with a JS object that
* has undefined keys:
*
* > TypeError: Setting a value of type `undefined` for key `collectionName` is
* > not allowed as it's not supported by JSON
*
* So what should the TypeScript type for "foo" be?
*
* If it is were to not include the possibility of `undefined`, then the type
* would lie because `store.get("foo")` can indeed be `undefined. But if we were
* to include the possibility of `undefined`, then trying to `store.set("foo",
* someUndefValue)` will throw.
*
* The approach we take is to rely on false-y values (empty strings and empty
* arrays) to indicate missing values, and then converting those to `undefined`
* when reading from the store, and converting `undefined` to the corresponding
* false-y value when writing.
*/
export const setPendingUploads = ({
collectionName,
filePaths,
zipItems,
}: PendingUploads) => {
uploadStatusStore.set({
collectionName: collectionName ?? "",
filePaths: filePaths,
zipItems: zipItems,
});
};
export const markUploadedFiles = (paths: string[]) => {
const existing = uploadStatusStore.get("filePaths") ?? [];
const updated = existing.filter((p) => !paths.includes(p));
uploadStatusStore.set("filePaths", updated);
};
export const markUploadedZipItems = (
items: [zipPath: string, entryName: string][],
) => {
const existing = uploadStatusStore.get("zipItems") ?? [];
const updated = existing.filter(
(z) => !items.some((e) => z[0] == e[0] && z[1] == e[1]),
);
uploadStatusStore.set("zipItems", updated);
};
export const clearPendingUploads = () => uploadStatusStore.clear();

View file

@ -1,101 +1,156 @@
import type { FSWatcher } from "chokidar";
import ElectronLog from "electron-log";
import { FolderWatch, WatchStoreType } from "../../types/ipc";
import { watchStore } from "../stores/watch.store";
import chokidar, { type FSWatcher } from "chokidar";
import { BrowserWindow } from "electron/main";
import fs from "node:fs/promises";
import path from "node:path";
import { FolderWatch, type CollectionMapping } from "../../types/ipc";
import log from "../log";
import { watchStore } from "../stores/watch";
import { posixPath } from "../utils/electron";
import { fsIsDir } from "./fs";
export const addWatchMapping = async (
watcher: FSWatcher,
rootFolderName: string,
folderPath: string,
uploadStrategy: number,
) => {
ElectronLog.log(`Adding watch mapping: ${folderPath}`);
const watchMappings = getWatchMappings();
if (isMappingPresent(watchMappings, folderPath)) {
throw new Error(`Watch mapping already exists`);
/**
* Create and return a new file system watcher.
*
* Internally this uses the watcher from the chokidar package.
*
* @param mainWindow The window handle is used to notify the renderer process of
* pertinent file system events.
*/
export const createWatcher = (mainWindow: BrowserWindow) => {
const send = (eventName: string) => (path: string) =>
mainWindow.webContents.send(eventName, ...eventData(path));
const folderPaths = folderWatches().map((watch) => watch.folderPath);
const watcher = chokidar.watch(folderPaths, {
awaitWriteFinish: true,
});
watcher
.on("add", send("watchAddFile"))
.on("unlink", send("watchRemoveFile"))
.on("unlinkDir", send("watchRemoveDir"))
.on("error", (error) => log.error("Error while watching files", error));
return watcher;
};
const eventData = (platformPath: string): [string, FolderWatch] => {
const path = posixPath(platformPath);
const watch = folderWatches().find((watch) =>
path.startsWith(watch.folderPath + "/"),
);
if (!watch) throw new Error(`No folder watch was found for path ${path}`);
return [path, watch];
};
export const watchGet = async (watcher: FSWatcher): Promise<FolderWatch[]> => {
const valid: FolderWatch[] = [];
const deletedPaths: string[] = [];
for (const watch of folderWatches()) {
if (await fsIsDir(watch.folderPath)) valid.push(watch);
else deletedPaths.push(watch.folderPath);
}
if (deletedPaths.length) {
await Promise.all(deletedPaths.map((p) => watchRemove(watcher, p)));
setFolderWatches(valid);
}
return valid;
};
watcher.add(folderPath);
const folderWatches = (): FolderWatch[] => watchStore.get("mappings") ?? [];
watchMappings.push({
rootFolderName,
uploadStrategy,
const setFolderWatches = (watches: FolderWatch[]) =>
watchStore.set("mappings", watches);
export const watchAdd = async (
watcher: FSWatcher,
folderPath: string,
collectionMapping: CollectionMapping,
) => {
const watches = folderWatches();
if (!(await fsIsDir(folderPath)))
throw new Error(
`Attempting to add a folder watch for a folder path ${folderPath} that is not an existing directory`,
);
if (watches.find((watch) => watch.folderPath == folderPath))
throw new Error(
`A folder watch with the given folder path ${folderPath} already exists`,
);
watches.push({
folderPath,
collectionMapping,
syncedFiles: [],
ignoredFiles: [],
});
setWatchMappings(watchMappings);
setFolderWatches(watches);
watcher.add(folderPath);
return watches;
};
function isMappingPresent(watchMappings: FolderWatch[], folderPath: string) {
const watchMapping = watchMappings?.find(
(mapping) => mapping.folderPath === folderPath,
);
return !!watchMapping;
}
export const watchRemove = (watcher: FSWatcher, folderPath: string) => {
const watches = folderWatches();
const filtered = watches.filter((watch) => watch.folderPath != folderPath);
if (watches.length == filtered.length)
throw new Error(
`Attempting to remove a non-existing folder watch for folder path ${folderPath}`,
);
setFolderWatches(filtered);
watcher.unwatch(folderPath);
return filtered;
};
export const removeWatchMapping = async (
watcher: FSWatcher,
export const watchUpdateSyncedFiles = (
syncedFiles: FolderWatch["syncedFiles"],
folderPath: string,
) => {
let watchMappings = getWatchMappings();
const watchMapping = watchMappings.find(
(mapping) => mapping.folderPath === folderPath,
setFolderWatches(
folderWatches().map((watch) => {
if (watch.folderPath == folderPath) {
watch.syncedFiles = syncedFiles;
}
return watch;
}),
);
if (!watchMapping) {
throw new Error(`Watch mapping does not exist`);
}
watcher.unwatch(watchMapping.folderPath);
watchMappings = watchMappings.filter(
(mapping) => mapping.folderPath !== watchMapping.folderPath,
);
setWatchMappings(watchMappings);
};
export function updateWatchMappingSyncedFiles(
export const watchUpdateIgnoredFiles = (
ignoredFiles: FolderWatch["ignoredFiles"],
folderPath: string,
files: FolderWatch["syncedFiles"],
): void {
const watchMappings = getWatchMappings();
const watchMapping = watchMappings.find(
(mapping) => mapping.folderPath === folderPath,
) => {
setFolderWatches(
folderWatches().map((watch) => {
if (watch.folderPath == folderPath) {
watch.ignoredFiles = ignoredFiles;
}
return watch;
}),
);
};
if (!watchMapping) {
throw Error(`Watch mapping not found`);
export const watchFindFiles = async (dirPath: string) => {
const items = await fs.readdir(dirPath, { withFileTypes: true });
let paths: string[] = [];
for (const item of items) {
const itemPath = path.posix.join(dirPath, item.name);
if (item.isFile()) {
paths.push(itemPath);
} else if (item.isDirectory()) {
paths = [...paths, ...(await watchFindFiles(itemPath))];
}
}
return paths;
};
watchMapping.syncedFiles = files;
setWatchMappings(watchMappings);
}
export function updateWatchMappingIgnoredFiles(
folderPath: string,
files: FolderWatch["ignoredFiles"],
): void {
const watchMappings = getWatchMappings();
const watchMapping = watchMappings.find(
(mapping) => mapping.folderPath === folderPath,
);
if (!watchMapping) {
throw Error(`Watch mapping not found`);
}
watchMapping.ignoredFiles = files;
setWatchMappings(watchMappings);
}
export function getWatchMappings() {
const mappings = watchStore.get("mappings") ?? [];
return mappings;
}
function setWatchMappings(watchMappings: WatchStoreType["mappings"]) {
watchStore.set("mappings", watchMappings);
}
export const watchReset = (watcher: FSWatcher) => {
watcher.unwatch(folderWatches().map((watch) => watch.folderPath));
};

View file

@ -1,18 +0,0 @@
import Store, { Schema } from "electron-store";
import type { KeysStoreType } from "../../types/main";
const keysStoreSchema: Schema<KeysStoreType> = {
AnonymizeUserID: {
type: "object",
properties: {
id: {
type: "string",
},
},
},
};
export const keysStore = new Store({
name: "keys",
schema: keysStoreSchema,
});

View file

@ -1,7 +1,10 @@
import Store, { Schema } from "electron-store";
import type { SafeStorageStoreType } from "../../types/main";
const safeStorageSchema: Schema<SafeStorageStoreType> = {
interface SafeStorageStore {
encryptionKey?: string;
}
const safeStorageSchema: Schema<SafeStorageStore> = {
encryptionKey: {
type: "string",
},

View file

@ -0,0 +1,54 @@
import Store, { Schema } from "electron-store";
export interface UploadStatusStore {
/**
* The collection to which we're uploading, or the root collection.
*
* Not all pending uploads will have an associated collection.
*/
collectionName?: string;
/**
* Paths to regular files that are pending upload.
*/
filePaths?: string[];
/**
* Each item is the path to a zip file and the name of an entry within it.
*/
zipItems?: [zipPath: string, entryName: string][];
/**
* @deprecated Legacy paths to zip files, now subsumed into zipItems.
*/
zipPaths?: string[];
}
const uploadStatusSchema: Schema<UploadStatusStore> = {
collectionName: {
type: "string",
},
filePaths: {
type: "array",
items: {
type: "string",
},
},
zipItems: {
type: "array",
items: {
type: "array",
items: {
type: "string",
},
},
},
zipPaths: {
type: "array",
items: {
type: "string",
},
},
};
export const uploadStatusStore = new Store({
name: "upload-status",
schema: uploadStatusSchema,
});

View file

@ -1,25 +0,0 @@
import Store, { Schema } from "electron-store";
import type { UploadStoreType } from "../../types/main";
const uploadStoreSchema: Schema<UploadStoreType> = {
filePaths: {
type: "array",
items: {
type: "string",
},
},
zipPaths: {
type: "array",
items: {
type: "string",
},
},
collectionName: {
type: "string",
},
};
export const uploadStatusStore = new Store({
name: "upload-status",
schema: uploadStoreSchema,
});

View file

@ -1,12 +1,12 @@
import Store, { Schema } from "electron-store";
interface UserPreferencesSchema {
hideDockIcon: boolean;
interface UserPreferences {
hideDockIcon?: boolean;
skipAppVersion?: string;
muteUpdateNotificationVersion?: string;
}
const userPreferencesSchema: Schema<UserPreferencesSchema> = {
const userPreferencesSchema: Schema<UserPreferences> = {
hideDockIcon: {
type: "boolean",
},

View file

@ -1,47 +0,0 @@
import Store, { Schema } from "electron-store";
import { WatchStoreType } from "../../types/ipc";
const watchStoreSchema: Schema<WatchStoreType> = {
mappings: {
type: "array",
items: {
type: "object",
properties: {
rootFolderName: {
type: "string",
},
uploadStrategy: {
type: "number",
},
folderPath: {
type: "string",
},
syncedFiles: {
type: "array",
items: {
type: "object",
properties: {
path: {
type: "string",
},
id: {
type: "number",
},
},
},
},
ignoredFiles: {
type: "array",
items: {
type: "string",
},
},
},
},
},
};
export const watchStore = new Store({
name: "watch-status",
schema: watchStoreSchema,
});

View file

@ -0,0 +1,77 @@
import Store, { Schema } from "electron-store";
import { type FolderWatch } from "../../types/ipc";
import log from "../log";
interface WatchStore {
mappings?: FolderWatchWithLegacyFields[];
}
type FolderWatchWithLegacyFields = FolderWatch & {
/** @deprecated Only retained for migration, do not use in other code */
rootFolderName?: string;
/** @deprecated Only retained for migration, do not use in other code */
uploadStrategy?: number;
};
const watchStoreSchema: Schema<WatchStore> = {
mappings: {
type: "array",
items: {
type: "object",
properties: {
rootFolderName: { type: "string" },
collectionMapping: { type: "string" },
uploadStrategy: { type: "number" },
folderPath: { type: "string" },
syncedFiles: {
type: "array",
items: {
type: "object",
properties: {
path: { type: "string" },
uploadedFileID: { type: "number" },
collectionID: { type: "number" },
},
},
},
ignoredFiles: {
type: "array",
items: { type: "string" },
},
},
},
},
};
export const watchStore = new Store({
name: "watch-status",
schema: watchStoreSchema,
});
/**
* Previous versions of the store used to store an integer to indicate the
* collection mapping, migrate these to the new schema if we encounter them.
*/
export const migrateLegacyWatchStoreIfNeeded = () => {
let needsUpdate = false;
const updatedWatches = [];
for (const watch of watchStore.get("mappings") ?? []) {
let collectionMapping = watch.collectionMapping;
// The required type defines the latest schema, but before migration
// this'll be undefined, so tell ESLint to calm down.
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
if (!collectionMapping) {
collectionMapping = watch.uploadStrategy == 1 ? "parent" : "root";
needsUpdate = true;
}
if (watch.rootFolderName) {
delete watch.rootFolderName;
needsUpdate = true;
}
updatedWatches.push({ ...watch, collectionMapping });
}
if (needsUpdate) {
watchStore.set("mappings", updatedWatches);
log.info("Migrated legacy watch store data to new schema");
}
};

View file

@ -1,15 +1,19 @@
/**
* @file stream data to-from renderer using a custom protocol handler.
*/
import { protocol } from "electron/main";
import { net, protocol } from "electron/main";
import StreamZip from "node-stream-zip";
import { createWriteStream, existsSync } from "node:fs";
import fs from "node:fs/promises";
import { Readable } from "node:stream";
import { ReadableStream } from "node:stream/web";
import { pathToFileURL } from "node:url";
import log from "./log";
import { ensure } from "./utils/common";
/**
* Register a protocol handler that we use for streaming large files between the
* main process (node) and the renderer process (browser) layer.
* main (Node.js) and renderer (Chromium) processes.
*
* [Note: IPC streams]
*
@ -17,11 +21,14 @@ import log from "./log";
* across IPC. And passing the entire contents of the file is not feasible for
* large video files because of the memory pressure the copying would entail.
*
* As an alternative, we register a custom protocol handler that can provided a
* As an alternative, we register a custom protocol handler that can provides a
* bi-directional stream. The renderer can stream data to the node side by
* streaming the request. The node side can stream to the renderer side by
* streaming the response.
*
* The stream is not full duplex - while both reads and writes can be streamed,
* they need to be streamed separately.
*
* See also: [Note: Transferring large amount of data over IPC]
*
* Depends on {@link registerPrivilegedSchemes}.
@ -29,88 +36,148 @@ import log from "./log";
export const registerStreamProtocol = () => {
protocol.handle("stream", async (request: Request) => {
const url = request.url;
const { host, pathname } = new URL(url);
// Convert e.g. "%20" to spaces.
const path = decodeURIComponent(pathname);
// The request URL contains the command to run as the host, and the
// pathname of the file(s) as the search params.
const { host, searchParams } = new URL(url);
switch (host) {
/* stream://write/path/to/file */
/* host-pathname----- */
case "read":
return handleRead(ensure(searchParams.get("path")));
case "read-zip":
return handleReadZip(
ensure(searchParams.get("zipPath")),
ensure(searchParams.get("entryName")),
);
case "write":
try {
await writeStream(path, request.body);
return new Response("", { status: 200 });
} catch (e) {
log.error(`Failed to write stream for ${url}`, e);
return new Response(
`Failed to write stream: ${e.message}`,
{ status: 500 },
);
}
return handleWrite(ensure(searchParams.get("path")), request);
default:
return new Response("", { status: 404 });
}
});
};
const handleRead = async (path: string) => {
try {
const res = await net.fetch(pathToFileURL(path).toString());
if (res.ok) {
// net.fetch already seems to add "Content-Type" and "Last-Modified"
// headers, but I couldn't find documentation for this. In any case,
// since we already are stat-ting the file for the "Content-Length",
// we explicitly add the "X-Last-Modified-Ms" too,
//
// 1. Guaranteeing its presence,
//
// 2. Having it be in the exact format we want (no string <-> date
// conversions),
//
// 3. Retaining milliseconds.
const stat = await fs.stat(path);
// Add the file's size as the Content-Length header.
const fileSize = stat.size;
res.headers.set("Content-Length", `${fileSize}`);
// Add the file's last modified time (as epoch milliseconds).
const mtimeMs = stat.mtimeMs;
res.headers.set("X-Last-Modified-Ms", `${mtimeMs}`);
}
return res;
} catch (e) {
log.error(`Failed to read stream at ${path}`, e);
return new Response(`Failed to read stream: ${String(e)}`, {
status: 500,
});
}
};
const handleReadZip = async (zipPath: string, entryName: string) => {
try {
const zip = new StreamZip.async({ file: zipPath });
const entry = await zip.entry(entryName);
if (!entry) return new Response("", { status: 404 });
// This returns an "old style" NodeJS.ReadableStream.
const stream = await zip.stream(entry);
// Convert it into a new style NodeJS.Readable.
const nodeReadable = new Readable().wrap(stream);
// Then convert it into a Web stream.
const webReadableStreamAny = Readable.toWeb(nodeReadable);
// However, we get a ReadableStream<any> now. This doesn't go into the
// `BodyInit` expected by the Response constructor, which wants a
// ReadableStream<Uint8Array>. Force a cast.
const webReadableStream =
webReadableStreamAny as ReadableStream<Uint8Array>;
// Close the zip handle when the underlying stream closes.
stream.on("end", () => void zip.close());
return new Response(webReadableStream, {
headers: {
// We don't know the exact type, but it doesn't really matter,
// just set it to a generic binary content-type so that the
// browser doesn't tinker with it thinking of it as text.
"Content-Type": "application/octet-stream",
"Content-Length": `${entry.size}`,
// While it is documented that entry.time is the modification
// time, the units are not mentioned. By seeing the source code,
// we can verify that it is indeed epoch milliseconds. See
// `parseZipTime` in the node-stream-zip source,
// https://github.com/antelle/node-stream-zip/blob/master/node_stream_zip.js
"X-Last-Modified-Ms": `${entry.time}`,
},
});
} catch (e) {
log.error(
`Failed to read entry ${entryName} from zip file at ${zipPath}`,
e,
);
return new Response(`Failed to read stream: ${String(e)}`, {
status: 500,
});
}
};
const handleWrite = async (path: string, request: Request) => {
try {
await writeStream(path, ensure(request.body));
return new Response("", { status: 200 });
} catch (e) {
log.error(`Failed to write stream to ${path}`, e);
return new Response(`Failed to write stream: ${String(e)}`, {
status: 500,
});
}
};
/**
* Write a (web) ReadableStream to a file at the given {@link filePath}.
*
* The returned promise resolves when the write completes.
*
* @param filePath The local filesystem path where the file should be written.
* @param readableStream A [web
* ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
*
* @param readableStream A web
* [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream).
*/
export const writeStream = (filePath: string, readableStream: ReadableStream) =>
writeNodeStream(filePath, convertWebReadableStreamToNode(readableStream));
writeNodeStream(filePath, Readable.fromWeb(readableStream));
/**
* Convert a Web ReadableStream into a Node.js ReadableStream
*
* This can be used to, for example, write a ReadableStream obtained via
* `net.fetch` into a file using the Node.js `fs` APIs
*/
const convertWebReadableStreamToNode = (readableStream: ReadableStream) => {
const reader = readableStream.getReader();
const rs = new Readable();
rs._read = async () => {
try {
const result = await reader.read();
if (!result.done) {
rs.push(Buffer.from(result.value));
} else {
rs.push(null);
return;
}
} catch (e) {
rs.emit("error", e);
}
};
return rs;
};
const writeNodeStream = async (
filePath: string,
fileStream: NodeJS.ReadableStream,
) => {
const writeNodeStream = async (filePath: string, fileStream: Readable) => {
const writeable = createWriteStream(filePath);
fileStream.on("error", (error) => {
writeable.destroy(error); // Close the writable stream with an error
fileStream.on("error", (err) => {
writeable.destroy(err); // Close the writable stream with an error
});
fileStream.pipe(writeable);
await new Promise((resolve, reject) => {
writeable.on("finish", resolve);
writeable.on("error", async (e: unknown) => {
writeable.on("error", (err) => {
if (existsSync(filePath)) {
await fs.unlink(filePath);
void fs.unlink(filePath);
}
reject(e);
reject(err);
});
});
};

Some files were not shown because too many files have changed in this diff Show more