Merge branch 'main' into fix_bg_task_not_getting_killed

This commit is contained in:
ashilkn 2024-05-03 13:12:24 +05:30
commit b221b80625
27 changed files with 300 additions and 276 deletions

View file

@ -17,8 +17,8 @@ name: "Release (auth)"
# We use a suffix like `-test` to indicate that these are test tags, and that
# they belong to a pre-release.
#
# If you need to do multiple tests, add a +x at the end of the tag. e.g.
# `auth-v1.2.3-test+1`.
# If you need to do multiple tests, add a .x at the end of the tag. e.g.
# `auth-v1.2.3-test.1`.
#
# Once the testing is done, also delete the tag(s) please.

View file

@ -36,7 +36,8 @@ ente --help
### Accounts
If you wish, you can add multiple accounts (your own and that of your family members) and export all data using this tool.
If you wish, you can add multiple accounts (your own and that of your family
members) and export all data using this tool.
#### Add an account
@ -44,6 +45,12 @@ If you wish, you can add multiple accounts (your own and that of your family mem
ente account add
```
> [!NOTE]
>
> `ente account add` does not create new accounts, it just adds pre-existing
> accounts to the list of accounts that the CLI knows about so that you can use
> them for other actions.
#### List accounts
```shell

View file

@ -1,55 +0,0 @@
name: Build/release
on:
push:
tags:
- v*
jobs:
release:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-latest, ubuntu-latest, windows-latest]
steps:
- name: Check out Git repository
uses: actions/checkout@v3
with:
submodules: recursive
- name: Install Node.js, NPM and Yarn
uses: actions/setup-node@v3
with:
node-version: 20
- name: Prepare for app notarization
if: startsWith(matrix.os, 'macos')
# Import Apple API key for app notarization on macOS
run: |
mkdir -p ~/private_keys/
echo '${{ secrets.api_key }}' > ~/private_keys/AuthKey_${{ secrets.api_key_id }}.p8
- name: Install libarchive-tools for pacman build # Related https://github.com/electron-userland/electron-builder/issues/4181
if: startsWith(matrix.os, 'ubuntu')
run: sudo apt-get install libarchive-tools
- name: Ente Electron Builder Action
uses: ente-io/action-electron-builder@v1.0.0
with:
# GitHub token, automatically provided to the action
# (No need to define this secret in the repo settings)
github_token: ${{ secrets.github_token }}
# If the commit is tagged with a version (e.g. "v1.0.0"),
# release the app after building
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
mac_certs: ${{ secrets.mac_certs }}
mac_certs_password: ${{ secrets.mac_certs_password }}
env:
# macOS notarization API key
API_KEY_ID: ${{ secrets.api_key_id }}
API_KEY_ISSUER_ID: ${{ secrets.api_key_issuer_id}}
USE_HARD_LINKS: false

View file

@ -0,0 +1,90 @@
name: "Release"
# This will create a new draft release with public artifacts.
#
# Note that a release will only get created if there is an associated tag
# (GitHub releases need a corresponding tag).
#
# The canonical source for this action is in the repository where we keep the
# source code for the Ente Photos desktop app: https://github.com/ente-io/ente
#
# However, it actually lives and runs in the repository that we use for making
# releases: https://github.com/ente-io/photos-desktop
#
# We need two repositories because Electron updater currently doesn't work well
# with monorepos. For more details, see `docs/release.md`.
on:
push:
# Run when a tag matching the pattern "v*"" is pushed.
#
# See: [Note: Testing release workflows that are triggered by tags].
tags:
- "v*"
jobs:
release:
runs-on: ${{ matrix.os }}
defaults:
run:
working-directory: desktop
strategy:
matrix:
os: [macos-latest]
# Commented for testing
# os: [macos-latest, ubuntu-latest, windows-latest]
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# Checkout the tag photosd-v1.x.x from the source code
# repository when we're invoked for tag v1.x.x on the releases
# repository.
repository: ente-io/ente
ref: photosd-${{ github.ref_name }}
submodules: recursive
- name: Setup node
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install dependencies
run: yarn install
- name: Prepare for app notarization
if: startsWith(matrix.os, 'macos')
# Import Apple API key for app notarization on macOS
run: |
mkdir -p ~/private_keys/
echo '${{ secrets.API_KEY }}' > ~/private_keys/AuthKey_${{ secrets.API_KEY_ID }}.p8
- name: Install libarchive-tools for pacman build
if: startsWith(matrix.os, 'ubuntu')
# See:
# https://github.com/electron-userland/electron-builder/issues/4181
run: sudo apt-get install libarchive-tools
- name: Build
uses: ente-io/action-electron-builder@v1.0.0
with:
package_root: desktop
# GitHub token, automatically provided to the action
# (No need to define this secret in the repo settings)
github_token: ${{ secrets.GITHUB_TOKEN }}
# If the commit is tagged with a version (e.g. "v1.0.0"),
# release the app after building.
release: ${{ startsWith(github.ref, 'refs/tags/v') }}
mac_certs: ${{ secrets.MAC_CERTS }}
mac_certs_password: ${{ secrets.MAC_CERTS_PASSWORD }}
env:
# macOS notarization API key details
API_KEY_ID: ${{ secrets.API_KEY_ID }}
API_KEY_ISSUER_ID: ${{ secrets.API_KEY_ISSUER_ID }}
USE_HARD_LINKS: false

View file

@ -1,5 +1,13 @@
# CHANGELOG
## v1.7.0 (Unreleased)
v1.7 is a major rewrite to improve the security of our app. We have enabled
sandboxing and disabled node integration for the renderer process. All this
required restructuring our IPC mechanisms, which resulted in a lot of under the
hood changes. The outcome is a more secure app that also uses the latest and
greatest Electron recommendations.
## v1.6.63
### New

View file

@ -10,12 +10,6 @@ To know more about Ente, see [our main README](../README.md) or visit
## Building from source
> [!CAUTION]
>
> We're improving the security of the desktop app further by migrating to
> Electron's sandboxing and contextIsolation. These updates are still WIP and
> meanwhile the instructions below might not fully work on the main branch.
Fetch submodules
```sh

View file

@ -1,43 +1,47 @@
## Releases
> [!NOTE]
>
> TODO(MR): This document needs to be audited and changed as we do the first
> release from this new monorepo.
Conceptually, the release is straightforward: We push a tag, a GitHub workflow
gets triggered that creates a draft release with artifacts built from that tag.
We then publish that release. The download links on our website, and existing
apps already know how to check for the latest GitHub release and update
accordingly.
The Github Action that builds the desktop binaries is triggered by pushing a tag
matching the pattern `photos-desktop-v1.2.3`. This value should match the
version in `package.json`.
The complication comes by the fact that Electron Updater (the mechanism that we
use for auto updates) doesn't work well with monorepos. So we need to keep a
separate (non-mono) repository just for doing releases.
So the process for doing a release would be.
- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente).
1. Create a new branch (can be named anything). On this branch, include your
changes.
- Releases are done from
[ente-io/photos-desktop](https://github.com/ente-io/photos-desktop).
2. Mention the changes in `CHANGELOG.md`.
## Workflow
3. Changing the `version` in `package.json` to `1.x.x`.
The workflow is:
4. Commit and push to remote
1. Finalize the changes in the source repo.
- Update the CHANGELOG.
- Update the version in `package.json`
- `git commit -m 'Release v1.x.x'`
- Open PR, merge into main.
2. Tag this commit with a tag matching the pattern `photosd-v1.2.3`, where
`1.2.3` is the version in `package.json`
```sh
git add package.json && git commit -m 'Release v1.x.x'
git tag v1.x.x
git push && git push --tags
git tag photosd-v1.x.x
git push origin photosd-v1.x.x
```
This by itself will already trigger a new release. The GitHub action will create
a new draft release that can then be used as descibed below.
3. Head over to the releases repository and run the trigger script, passing it
the tag _without_ the `photosd-` prefix.
To wrap up, we also need to merge back these changes into main. So for that,
```sh
./.github/trigger-release.sh v1.x.x
```
5. Open a PR for the branch that we're working on (where the above tag was
pushed from) to get it merged into main.
6. In this PR, also increase the version number for the next release train. That
is, supposed we just released `v4.0.1`. Then we'll change the version number
in main to `v4.0.2-next.0`. Each pre-release will modify the `next.0` part.
Finally, at the time of the next release, this'll become `v4.0.2`.
## Post build
The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts
defined in the `build` value in `package.json`.
@ -46,29 +50,11 @@ defined in the `build` value in `package.json`.
- Linux - An AppImage, and 3 other packages (`.rpm`, `.deb`, `.pacman`)
- macOS - A universal DMG
Additionally, the GitHub action notarizes the macOS DMG. For this it needs
credentials provided via GitHub secrets.
Additionally, the GitHub action notarizes and signs the macOS DMG (For this it
uses credentials provided via GitHub secrets).
During the build the Sentry webpack plugin checks to see if SENTRY_AUTH_TOKEN is
defined. If so, it uploads the sourcemaps for the renderer process to Sentry
(For our GitHub action, the SENTRY_AUTH_TOKEN is defined as a GitHub secret).
The sourcemaps for the main (node) process are currently not sent to Sentry
(this works fine in practice since the node process files are not minified, we
only run `tsc`).
Once the build is done, a draft release with all these artifacts attached is
created. The build is idempotent, so if something goes wrong and we need to
re-run the GitHub action, just delete the draft release (if it got created) and
start a new run by pushing a new tag (if some code changes are required).
If no code changes are required, say the build failed for some transient network
or sentry issue, we can even be re-run by the build by going to Github Action
age and rerun from there. This will re-trigger for the same tag.
If everything goes well, we'll have a release on GitHub, and the corresponding
source maps for the renderer process uploaded to Sentry. There isn't anything
else to do:
To rollout the build, we need to publish the draft release. Thereafter,
everything is automated:
- The website automatically redirects to the latest release on GitHub when
people try to download.
@ -76,7 +62,7 @@ else to do:
- The file formats with support auto update (Windows `exe`, the Linux AppImage
and the macOS DMG) also check the latest GitHub release automatically to
download and apply the update (the rest of the formats don't support auto
updates).
updates yet).
- We're not putting the desktop app in other stores currently. It is available
as a `brew cask`, but we only had to open a PR to add the initial formula,
@ -87,6 +73,4 @@ else to do:
We can also publish the draft releases by checking the "pre-release" option.
Such releases don't cause any of the channels (our website, or the desktop app
auto updater, or brew) to be notified, instead these are useful for giving links
to pre-release builds to customers. Generally, in the version number for these
we'll add a label to the version, e.g. the "beta.x" in `1.x.x-beta.x`. This
should be done both in `package.json`, and what we tag the commit with.
to pre-release builds to customers.

View file

@ -1,6 +1,6 @@
{
"name": "ente",
"version": "1.6.63",
"version": "1.7.0-beta.0",
"private": true,
"description": "Desktop client for Ente Photos",
"author": "Ente <code@ente.io>",
@ -43,7 +43,7 @@
"@typescript-eslint/eslint-plugin": "^7",
"@typescript-eslint/parser": "^7",
"concurrently": "^8",
"electron": "^29",
"electron": "^30",
"electron-builder": "^24",
"electron-builder-notarize": "^1.5",
"eslint": "^8",

View file

@ -127,15 +127,7 @@ const registerPrivilegedSchemes = () => {
{
scheme: "stream",
privileges: {
// TODO(MR): Remove the commented bits if we don't end up
// needing them by the time the IPC refactoring is done.
// Prevent the insecure origin issues when fetching this
// secure: true,
// Allow the web fetch API in the renderer to use this scheme.
supportFetchAPI: true,
// Allow it to be used with video tags.
// stream: true,
},
},
]);

View file

@ -217,7 +217,25 @@ const watchReset = async () => {
// - Upload
const pathForFile = (file: File) => webUtils.getPathForFile(file);
const pathForFile = (file: File) => {
const path = webUtils.getPathForFile(file);
// The path that we get back from `webUtils.getPathForFile` on Windows uses
// "/" as the path separator. Convert them to POSIX separators.
//
// Note that we do not have access to the path or the os module in the
// preload script, thus this hand rolled transformation.
// However that makes TypeScript fidgety since we it cannot find navigator,
// as we haven't included "lib": ["dom"] in our tsconfig to avoid making DOM
// APIs available to our main Node.js code. We could create a separate
// tsconfig just for the preload script, but for now let's go with a cast.
//
// @ts-expect-error navigator is not defined.
const platform = (navigator as { platform: string }).platform;
return platform.toLowerCase().includes("win")
? path.split("\\").join("/")
: path;
};
const listZipItems = (zipPath: string) =>
ipcRenderer.invoke("listZipItems", zipPath);

View file

@ -1199,10 +1199,10 @@ electron-updater@^6.1:
semver "^7.3.8"
tiny-typed-emitter "^2.1.0"
electron@^29:
version "29.3.1"
resolved "https://registry.yarnpkg.com/electron/-/electron-29.3.1.tgz#87c82b2cd2c326f78f036499377a5448bea5d4bb"
integrity sha512-auge1/6RVqgUd6TgIq88wKdUCJi2cjESi3jy7d+6X4JzvBGprKBqMJ8JSSFpu/Px1YJrFUKAxfy6SC+TQf1uLw==
electron@^30:
version "30.0.2"
resolved "https://registry.yarnpkg.com/electron/-/electron-30.0.2.tgz#95ba019216bf8be9f3097580123e33ea37497733"
integrity sha512-zv7T+GG89J/hyWVkQsLH4Y/rVEfqJG5M/wOBIGNaDdqd8UV9/YZPdS7CuFeaIj0H9LhCt95xkIQNpYB/3svOkQ==
dependencies:
"@electron/get" "^2.0.0"
"@types/node" "^20.9.0"

View file

@ -25,10 +25,13 @@ configure the endpoint the app should be connecting to.
> You can download the CLI from
> [here](https://github.com/ente-io/ente/releases?q=tag%3Acli-v0)
Define a config.yaml and put it either in the same directory as CLI or path
defined in env variable `ENTE_CLI_CONFIG_PATH`
Define a config.yaml and put it either in the same directory as where you run
the CLI from ("current working directory"), or in the path defined in env
variable `ENTE_CLI_CONFIG_PATH`:
```yaml
endpoint:
api: "http://localhost:8080"
```
(Another [example](https://github.com/ente-io/ente/blob/main/cli/config.yaml.example))

View file

@ -357,10 +357,16 @@ class FileUploader {
final List<ConnectivityResult> connections =
await (Connectivity().checkConnectivity());
bool canUploadUnderCurrentNetworkConditions = true;
if (connections.any((element) => element == ConnectivityResult.mobile)) {
canUploadUnderCurrentNetworkConditions =
Configuration.instance.shouldBackupOverMobileData();
if (!Configuration.instance.shouldBackupOverMobileData()) {
if (connections.any((element) => element == ConnectivityResult.mobile)) {
canUploadUnderCurrentNetworkConditions = false;
} else {
_logger.info(
"mobileBackupDisabled, backing up with connections: ${connections.map((e) => e.name).toString()}",
);
}
}
if (!canUploadUnderCurrentNetworkConditions) {
throw WiFiUnavailableError();
}

View file

@ -9,27 +9,8 @@ import { useEffect, useState } from "react";
import { storeCastData } from "services/cast/castService";
import { useCastReceiver } from "../utils/useCastReceiver";
// Function to generate cryptographically secure digits
const generateSecureData = (length: number): Uint8Array => {
const array = new Uint8Array(length);
window.crypto.getRandomValues(array);
// Modulo operation to ensure each byte is a single digit
for (let i = 0; i < length; i++) {
array[i] = array[i] % 10;
}
return array;
};
const convertDataToDecimalString = (data: Uint8Array): string => {
let decimalString = "";
for (let i = 0; i < data.length; i++) {
decimalString += data[i].toString(); // No need to pad, as each value is a single digit
}
return decimalString;
};
export default function PairingMode() {
const [digits, setDigits] = useState<string[]>([]);
const [deviceCode, setDeviceCode] = useState("");
const [publicKeyB64, setPublicKeyB64] = useState("");
const [privateKeyB64, setPrivateKeyB64] = useState("");
const [codePending, setCodePending] = useState(true);
@ -43,8 +24,6 @@ export default function PairingMode() {
const init = async () => {
try {
const data = generateSecureData(6);
setDigits(convertDataToDecimalString(data).split(""));
const keypair = await generateKeyPair();
setPublicKeyB64(await toB64(keypair.publicKey));
setPrivateKeyB64(await toB64(keypair.privateKey));
@ -107,7 +86,7 @@ export default function PairingMode() {
"urn:x-cast:pair-request",
message.senderId,
{
code: digits.join(""),
code: deviceCode,
},
);
} catch (e) {
@ -117,9 +96,7 @@ export default function PairingMode() {
const generateKeyPair = async () => {
await _sodium.ready;
const keypair = _sodium.crypto_box_keypair();
return keypair;
};
@ -132,9 +109,7 @@ export default function PairingMode() {
// then, we can decrypt this and store all the necessary info locally so we can play the collection slideshow.
let devicePayload = "";
try {
const encDastData = await castGateway.getCastData(
`${digits.join("")}`,
);
const encDastData = await castGateway.getCastData(`${deviceCode}`);
if (!encDastData) return;
devicePayload = encDastData;
} catch (e) {
@ -157,10 +132,8 @@ export default function PairingMode() {
const advertisePublicKey = async (publicKeyB64: string) => {
// hey client, we exist!
try {
await castGateway.registerDevice(
`${digits.join("")}`,
publicKeyB64,
);
const codeValue = await castGateway.registerDevice(publicKeyB64);
setDeviceCode(codeValue);
setCodePending(false);
} catch (e) {
// schedule re-try after 5 seconds
@ -175,7 +148,7 @@ export default function PairingMode() {
useEffect(() => {
console.log("useEffect for pairing called");
if (digits.length < 1 || !publicKeyB64 || !privateKeyB64) return;
if (deviceCode.length < 1 || !publicKeyB64 || !privateKeyB64) return;
const interval = setInterval(async () => {
console.log("polling for cast data");
@ -192,7 +165,7 @@ export default function PairingMode() {
return () => {
clearInterval(interval);
};
}, [digits, publicKeyB64, privateKeyB64, codePending]);
}, [deviceCode, publicKeyB64, privateKeyB64, codePending]);
useEffect(() => {
if (!publicKeyB64) return;
@ -235,7 +208,7 @@ export default function PairingMode() {
<EnteSpinner />
) : (
<>
<LargeType chars={digits} />
<LargeType chars={deviceCode.split("")} />
</>
)}
</div>

View file

@ -1,6 +1,5 @@
import { basename } from "@/next/file";
import log from "@/next/log";
import { type FileAndPath } from "@/next/types/file";
import type { CollectionMapping, Electron, ZipItem } from "@/next/types/ipc";
import { CustomError } from "@ente/shared/error";
import { isPromise } from "@ente/shared/utils";
@ -20,7 +19,7 @@ import {
getPublicCollectionUploaderName,
savePublicCollectionUploaderName,
} from "services/publicCollectionService";
import type { UploadItem } from "services/upload/types";
import type { FileAndPath, UploadItem } from "services/upload/types";
import type {
InProgressUpload,
SegregatedFinishedUploads,

View file

@ -22,7 +22,7 @@ import {
getFaceSearchEnabledStatus,
updateFaceSearchEnabledStatus,
} from "services/userService";
import { openLink } from "utils/common";
import { isInternalUser } from "utils/user";
export const MLSearchSettings = ({ open, onClose, onRootClose }) => {
const {
@ -255,8 +255,8 @@ function EnableFaceSearch({ open, onClose, enableFaceSearch, onRootClose }) {
}
function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) {
const showDetails = () =>
openLink("https://ente.io/blog/desktop-ml-beta", true);
// const showDetails = () =>
// openLink("https://ente.io/blog/desktop-ml-beta", true);
return (
<Stack spacing={"4px"} py={"12px"}>
@ -269,25 +269,37 @@ function EnableMLSearch({ onClose, enableMlSearch, onRootClose }) {
<Box px={"8px"}>
{" "}
<Typography color="text.muted">
<Trans i18nKey={"ENABLE_ML_SEARCH_DESCRIPTION"} />
{/* <Trans i18nKey={"ENABLE_ML_SEARCH_DESCRIPTION"} /> */}
<p>
We're putting finishing touches, coming back soon!
</p>
<p>
<small>
Existing indexed faces will continue to show.
</small>
</p>
</Typography>
</Box>
<Stack px={"8px"} spacing={"8px"}>
<Button
color={"accent"}
size="large"
onClick={enableMlSearch}
>
{t("ENABLE")}
</Button>
<Button
{isInternalUser() && (
<Stack px={"8px"} spacing={"8px"}>
<Button
color={"accent"}
size="large"
onClick={enableMlSearch}
>
{t("ENABLE")}
</Button>
{/*
<Button
color="secondary"
size="large"
onClick={showDetails}
>
{t("ML_MORE_DETAILS")}
</Button>
</Stack>
>
{t("ML_MORE_DETAILS")}
</Button>
*/}
</Stack>
)}
</Stack>
</Stack>
);

View file

@ -370,7 +370,7 @@ export default function Gallery() {
syncWithRemote(false, true);
}, SYNC_INTERVAL_IN_MICROSECONDS);
if (electron) {
void clipService.setupOnFileUploadListener();
// void clipService.setupOnFileUploadListener();
electron.onMainWindowFocus(() => syncWithRemote(false, true));
}
};

View file

@ -86,7 +86,11 @@ export const syncEmbeddings = async () => {
allLocalFiles.forEach((file) => {
fileIdToKeyMap.set(file.id, file.key);
});
await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings);
await cleanupDeletedEmbeddings(
allLocalFiles,
allEmbeddings,
EMBEDDINGS_TABLE,
);
log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`);
for (const model of models) {
let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
@ -168,7 +172,11 @@ export const syncFileEmbeddings = async () => {
allLocalFiles.forEach((file) => {
fileIdToKeyMap.set(file.id, file.key);
});
await cleanupDeletedEmbeddings(allLocalFiles, allEmbeddings);
await cleanupDeletedEmbeddings(
allLocalFiles,
allEmbeddings,
FILE_EMBEDING_TABLE,
);
log.info(`Syncing embeddings localCount: ${allEmbeddings.length}`);
for (const model of models) {
let modelLastSinceTime = await getModelEmbeddingSyncTime(model);
@ -289,6 +297,7 @@ export const putEmbedding = async (
export const cleanupDeletedEmbeddings = async (
allLocalFiles: EnteFile[],
allLocalEmbeddings: Embedding[] | FileML[],
tableName: string,
) => {
const activeFileIds = new Set<number>();
allLocalFiles.forEach((file) => {
@ -302,6 +311,6 @@ export const cleanupDeletedEmbeddings = async (
log.info(
`cleanupDeletedEmbeddings embeddingsCount: ${allLocalEmbeddings.length} remainingEmbeddingsCount: ${remainingEmbeddings.length}`,
);
await localForage.setItem(EMBEDDINGS_TABLE, remainingEmbeddings);
await localForage.setItem(tableName, remainingEmbeddings);
}
};

View file

@ -1,4 +1,3 @@
import type { FileAndPath } from "@/next/types/file";
import type { ZipItem } from "@/next/types/ipc";
/**
@ -30,6 +29,17 @@ import type { ZipItem } from "@/next/types/ipc";
*/
export type UploadItem = File | FileAndPath | string | ZipItem;
/**
* When we are running in the context of our desktop app, we have access to the
* absolute path of {@link File} objects. This convenience type clubs these two
* bits of information, saving us the need to query the path again and again
* using the {@link getPathForFile} method of {@link Electron}.
*/
export interface FileAndPath {
file: File;
path: string;
}
/**
* The of cases of {@link UploadItem} that apply when we're running in the
* context of our desktop app.

View file

@ -609,11 +609,25 @@ class UploadManager {
].includes(uploadResult)
) {
try {
let file: File | undefined;
const uploadItem =
uploadableItem.uploadItem ??
uploadableItem.livePhotoAssets.image;
if (uploadItem) {
if (uploadItem instanceof File) {
file = uploadItem;
} else if (
typeof uploadItem == "string" ||
Array.isArray(uploadItem)
) {
// path from desktop, no file object
} else {
file = uploadItem.file;
}
}
eventBus.emit(Events.FILE_UPLOADED, {
enteFile: decryptedFile,
localFile:
uploadableItem.uploadItem ??
uploadableItem.livePhotoAssets.image,
localFile: file,
});
} catch (e) {
log.warn("Ignoring error in fileUploaded handlers", e);

View file

@ -10,6 +10,7 @@ import mlIDbStorage, {
ML_SYNC_CONFIG_NAME,
ML_SYNC_JOB_CONFIG_NAME,
} from "utils/storage/mlIDbStorage";
import { isInternalUser } from "utils/user";
export async function getMLSyncJobConfig() {
return mlIDbStorage.getConfig(
@ -23,10 +24,15 @@ export async function getMLSyncConfig() {
}
export async function getMLSearchConfig() {
return mlIDbStorage.getConfig(
ML_SEARCH_CONFIG_NAME,
DEFAULT_ML_SEARCH_CONFIG,
);
if (isInternalUser()) {
return mlIDbStorage.getConfig(
ML_SEARCH_CONFIG_NAME,
DEFAULT_ML_SEARCH_CONFIG,
);
}
// Force disabled for everyone else while we finalize it to avoid redundant
// reindexing for users.
return DEFAULT_ML_SEARCH_CONFIG;
}
export async function updateMLSyncJobConfig(newConfig: JobConfig) {

View file

@ -93,40 +93,21 @@ export const writeStream = async (
const params = new URLSearchParams({ path });
const url = new URL(`stream://write?${params.toString()}`);
// TODO(MR): This doesn't currently work.
//
// Not sure what I'm doing wrong here; I've opened an issue upstream
// https://github.com/electron/electron/issues/41872
//
// A gist with a minimal reproduction
// https://gist.github.com/mnvr/e08d9f4876fb8400b7615347b4d268eb
//
// Meanwhile, write the complete body in one go (this'll eventually run into
// memory failures with large files - just a temporary stopgap to get the
// code to work).
/*
// The duplex parameter needs to be set to 'half' when streaming requests.
//
// Currently browsers, and specifically in our case, since this code runs
// only within our desktop (Electron) app, Chromium, don't support 'full'
// duplex mode (i.e. streaming both the request and the response).
// https://developer.chrome.com/docs/capabilities/web-apis/fetch-streaming-requests
const req = new Request(`stream://write${path}`, {
const req = new Request(url, {
// GET can't have a body
method: "POST",
body: stream,
// --@ts-expect-error TypeScript's libdom.d.ts does not include the
// @ts-expect-error TypeScript's libdom.d.ts does not include the
// "duplex" parameter, e.g. see
// https://github.com/node-fetch/node-fetch/issues/1769.
duplex: "half",
});
*/
const req = new Request(url, {
method: "POST",
body: await new Response(stream).blob(),
});
const res = await fetch(req);
if (!res.ok)

View file

@ -144,7 +144,13 @@ class MLIDbStorage {
.objectStore("configs")
.add(DEFAULT_ML_SEARCH_CONFIG, ML_SEARCH_CONFIG_NAME);
}
/*
This'll go in version 5. Note that version 4 was never released,
but it was in main for a while, so we'll just skip it to avoid
breaking the upgrade path for people who ran the mainline.
*/
if (oldVersion < 4) {
/*
try {
await tx
.objectStore("configs")
@ -163,8 +169,8 @@ class MLIDbStorage {
// the shipped implementation should have a more
// deterministic migration.
}
*/
}
log.info(
`ML DB upgraded from version ${oldVersion} to version ${newVersion}`,
);

View file

@ -239,7 +239,7 @@
"ENABLE_MAPS": "Habilitar mapa?",
"ENABLE_MAP": "Habilitar mapa",
"DISABLE_MAPS": "Desativar Mapas?",
"ENABLE_MAP_DESCRIPTION": "Isto mostrará suas fotos em um mapa do mundo.</p> <p>Este mapa é hospedado pelo <a>OpenStreetMap <a>, e os exatos locais de suas fotos nunca são compartilhados.</p> <p>Você pode desativar esse recurso a qualquer momento nas Configurações.</p>",
"ENABLE_MAP_DESCRIPTION": "</p>Isto mostrará suas fotos em um mapa do mundo.</p> <p>Este mapa é hospedado pelo <a>OpenStreetMap<a>, e os exatos locais de suas fotos nunca são compartilhados.</p> <p>Você pode desativar esse recurso a qualquer momento nas Configurações.</p>",
"DISABLE_MAP_DESCRIPTION": "<p>Isto irá desativar a exibição de suas fotos em um mapa mundial.</p> <p>Você pode ativar este recurso a qualquer momento nas Configurações.</p>",
"DISABLE_MAP": "Desabilitar mapa",
"DETAILS": "Detalhes",
@ -380,14 +380,14 @@
"LINK_EXPIRED_MESSAGE": "Este link expirou ou foi desativado!",
"MANAGE_LINK": "Gerenciar link",
"LINK_TOO_MANY_REQUESTS": "Desculpe, este álbum foi visualizado em muitos dispositivos!",
"FILE_DOWNLOAD": "Permitir transferências",
"FILE_DOWNLOAD": "Permitir downloads",
"LINK_PASSWORD_LOCK": "Bloqueio de senha",
"PUBLIC_COLLECT": "Permitir adicionar fotos",
"LINK_DEVICE_LIMIT": "Limite de dispositivos",
"NO_DEVICE_LIMIT": "Nenhum",
"LINK_EXPIRY": "Expiração do link",
"NEVER": "Nunca",
"DISABLE_FILE_DOWNLOAD": "Desabilitar transferência",
"DISABLE_FILE_DOWNLOAD": "Desabilitar download",
"DISABLE_FILE_DOWNLOAD_MESSAGE": "<p>Tem certeza de que deseja desativar o botão de download para arquivos?</p><p>Os visualizadores ainda podem capturar imagens da tela ou salvar uma cópia de suas fotos usando ferramentas externas.</p>",
"SHARED_USING": "Compartilhar usando ",
"SHARING_REFERRAL_CODE": "Use o código <strong>{{referralCode}}</strong> para obter 10 GB de graça",
@ -408,8 +408,8 @@
"STOP_ALL_UPLOADS_MESSAGE": "Tem certeza que deseja parar todos os envios em andamento?",
"STOP_UPLOADS_HEADER": "Parar envios?",
"YES_STOP_UPLOADS": "Sim, parar envios",
"STOP_DOWNLOADS_HEADER": "Parar transferências?",
"YES_STOP_DOWNLOADS": "Sim, parar transferências",
"STOP_DOWNLOADS_HEADER": "Parar downloads?",
"YES_STOP_DOWNLOADS": "Sim, parar downloads",
"STOP_ALL_DOWNLOADS_MESSAGE": "Tem certeza que deseja parar todos as transferências em andamento?",
"albums_one": "1 Álbum",
"albums_other": "{{count, number}} Álbuns",
@ -556,8 +556,8 @@
"SELECT_COLLECTION": "Selecionar álbum",
"PIN_ALBUM": "Fixar álbum",
"UNPIN_ALBUM": "Desafixar álbum",
"DOWNLOAD_COMPLETE": "Transferência concluída",
"DOWNLOADING_COLLECTION": "Transferindo {{name}}",
"DOWNLOAD_COMPLETE": "Download concluído",
"DOWNLOADING_COLLECTION": "Fazendo download de {{name}}",
"DOWNLOAD_FAILED": "Falha ao baixar",
"DOWNLOAD_PROGRESS": "{{progress.current}} / {{progress.total}} arquivos",
"CHRISTMAS": "Natal",
@ -622,6 +622,6 @@
"TRY_AGAIN": "Tente novamente",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "Siga os passos do seu navegador para continuar acessando.",
"LOGIN_WITH_PASSKEY": "Entrar com a chave de acesso",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
"autogenerated_first_album_name": "Meu Primeiro Álbum",
"autogenerated_default_album_name": "Novo Álbum"
}

View file

@ -622,6 +622,6 @@
"TRY_AGAIN": "重试",
"PASSKEY_FOLLOW_THE_STEPS_FROM_YOUR_BROWSER": "按照浏览器中提示的步骤继续登录。",
"LOGIN_WITH_PASSKEY": "使用通行密钥来登录",
"autogenerated_first_album_name": "",
"autogenerated_default_album_name": ""
"autogenerated_first_album_name": "我的第一个相册",
"autogenerated_default_album_name": "新建相册"
}

View file

@ -1,36 +0,0 @@
/*
* ElectronFile is a custom interface that is used to represent
* any file on disk as a File-like object in the Electron desktop app.
*
* This was added to support the auto-resuming of failed uploads
* which needed absolute paths to the files which the
* normal File interface does not provide.
*/
export interface ElectronFile {
name: string;
path: string;
size: number;
lastModified: number;
stream: () => Promise<ReadableStream<Uint8Array>>;
blob: () => Promise<Blob>;
arrayBuffer: () => Promise<Uint8Array>;
}
/**
* When we are running in the context of our desktop app, we have access to the
* absolute path of {@link File} objects. This convenience type clubs these two
* bits of information, saving us the need to query the path again and again
* using the {@link getPathForFile} method of {@link Electron}.
*/
export interface FileAndPath {
file: File;
path: string;
}
export interface EventQueueItem {
type: "upload" | "trash";
folderPath: string;
collectionName?: string;
paths?: string[];
files?: ElectronFile[];
}

View file

@ -58,11 +58,14 @@ class CastGateway {
return resp.data.publicKey;
}
public async registerDevice(code: string, publicKey: string) {
await HTTPService.post(getEndpoint() + "/cast/device-info/", {
deviceCode: `${code}`,
publicKey: publicKey,
});
public async registerDevice(publicKey: string): Promise<string> {
const resp = await HTTPService.post(
getEndpoint() + "/cast/device-info/",
{
publicKey: publicKey,
},
);
return resp.data.deviceCode;
}
public async publishCastPayload(