[desktop] Add an OPFS based cache (#1429)
See the extensive documentation in `blob-cache.ts` for the why and the how. Done basic sanity testing for normal flows, the beta ML flows still need to be tested.
This commit is contained in:
commit
ba5383789a
18 changed files with 542 additions and 504 deletions
|
@ -103,6 +103,8 @@ const logStartupBanner = () => {
|
|||
* Note that increasing the disk cache size does not guarantee that Chromium
|
||||
* will respect in verbatim, it uses its own heuristics atop this hint.
|
||||
* https://superuser.com/questions/378991/what-is-chrome-default-cache-size-limit/1577693#1577693
|
||||
*
|
||||
* See also: [Note: Caching files].
|
||||
*/
|
||||
const increaseDiskCache = () =>
|
||||
app.commandLine.appendSwitch(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { cachedOrNew } from "@/next/blob-cache";
|
||||
import { ensureLocalUser } from "@/next/local-user";
|
||||
import log from "@/next/log";
|
||||
import { cached } from "@ente/shared/storage/cache";
|
||||
import { Skeleton, styled } from "@mui/material";
|
||||
import { Legend } from "components/PhotoViewer/styledComponents/Legend";
|
||||
import { t } from "i18next";
|
||||
|
@ -61,8 +61,8 @@ export const PeopleList = React.memo((props: PeopleListProps) => {
|
|||
}
|
||||
>
|
||||
<FaceCropImageView
|
||||
url={person.displayImageUrl}
|
||||
faceId={person.displayFaceId}
|
||||
cacheKey={person.faceCropCacheKey}
|
||||
/>
|
||||
</FaceChip>
|
||||
))}
|
||||
|
@ -141,7 +141,7 @@ export function UnidentifiedFaces(props: {
|
|||
<FaceChip key={index}>
|
||||
<FaceCropImageView
|
||||
faceId={face.id}
|
||||
url={face.crop?.imageUrl}
|
||||
cacheKey={face.crop?.cacheKey}
|
||||
/>
|
||||
</FaceChip>
|
||||
))}
|
||||
|
@ -151,56 +151,37 @@ export function UnidentifiedFaces(props: {
|
|||
}
|
||||
|
||||
interface FaceCropImageViewProps {
|
||||
url: string;
|
||||
faceId: string;
|
||||
cacheKey?: string;
|
||||
}
|
||||
|
||||
const FaceCropImageView: React.FC<FaceCropImageViewProps> = ({
|
||||
url,
|
||||
faceId,
|
||||
cacheKey,
|
||||
}) => {
|
||||
const [objectURL, setObjectURL] = useState<string | undefined>();
|
||||
|
||||
useEffect(() => {
|
||||
let didCancel = false;
|
||||
|
||||
async function loadImage() {
|
||||
let blob: Blob;
|
||||
if (!url) {
|
||||
blob = undefined;
|
||||
} else {
|
||||
if (cacheKey) {
|
||||
cachedOrNew("face-crops", cacheKey, async () => {
|
||||
const user = await ensureLocalUser();
|
||||
blob = await cached("face-crops", url, async () => {
|
||||
try {
|
||||
log.debug(
|
||||
() =>
|
||||
`ImageCacheView: regenerate face crop for ${faceId}`,
|
||||
);
|
||||
return machineLearningService.regenerateFaceCrop(
|
||||
user.token,
|
||||
user.id,
|
||||
faceId,
|
||||
);
|
||||
} catch (e) {
|
||||
log.error(
|
||||
"ImageCacheView: regenerate face crop failed",
|
||||
e,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (didCancel) return;
|
||||
setObjectURL(blob ? URL.createObjectURL(blob) : undefined);
|
||||
}
|
||||
|
||||
loadImage();
|
||||
return machineLearningService.regenerateFaceCrop(
|
||||
user.token,
|
||||
user.id,
|
||||
faceId,
|
||||
);
|
||||
}).then((blob) => {
|
||||
if (!didCancel) setObjectURL(URL.createObjectURL(blob));
|
||||
});
|
||||
} else setObjectURL(undefined);
|
||||
|
||||
return () => {
|
||||
didCancel = true;
|
||||
if (objectURL) URL.revokeObjectURL(objectURL);
|
||||
};
|
||||
}, [url, faceId]);
|
||||
}, [faceId, cacheKey]);
|
||||
|
||||
return objectURL ? (
|
||||
<img src={objectURL} />
|
||||
|
|
|
@ -10,14 +10,11 @@ export class PhotosDownloadClient implements DownloadClient {
|
|||
private token: string,
|
||||
private timeout: number,
|
||||
) {}
|
||||
|
||||
updateTokens(token: string) {
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
updateTimeout(timeout: number) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
async downloadThumbnail(file: EnteFile): Promise<Uint8Array> {
|
||||
if (!this.token) {
|
||||
throw Error(CustomError.TOKEN_MISSING);
|
||||
|
|
|
@ -20,10 +20,6 @@ export class PublicAlbumsDownloadClient implements DownloadClient {
|
|||
this.passwordToken = passwordToken;
|
||||
}
|
||||
|
||||
updateTimeout(timeout: number) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
downloadThumbnail = async (file: EnteFile) => {
|
||||
if (!this.token) {
|
||||
throw Error(CustomError.TOKEN_MISSING);
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
import { openCache, type BlobCache } from "@/next/blob-cache";
|
||||
import log from "@/next/log";
|
||||
import { APPS } from "@ente/shared/apps/constants";
|
||||
import ComlinkCryptoWorker from "@ente/shared/crypto";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { Events, eventBus } from "@ente/shared/events";
|
||||
import {
|
||||
CacheStorageService,
|
||||
type LimitedCache,
|
||||
} from "@ente/shared/storage/cache";
|
||||
import { Remote } from "comlink";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import isElectron from "is-electron";
|
||||
|
@ -16,7 +13,6 @@ import {
|
|||
generateStreamFromArrayBuffer,
|
||||
getRenderableFileURL,
|
||||
} from "utils/file";
|
||||
import { isInternalUser } from "utils/user";
|
||||
import { PhotosDownloadClient } from "./clients/photos";
|
||||
import { PublicAlbumsDownloadClient } from "./clients/publicAlbums";
|
||||
|
||||
|
@ -44,7 +40,6 @@ export type OnDownloadProgress = (event: {
|
|||
|
||||
export interface DownloadClient {
|
||||
updateTokens: (token: string, passwordToken?: string) => void;
|
||||
updateTimeout: (timeout: number) => void;
|
||||
downloadThumbnail: (
|
||||
file: EnteFile,
|
||||
timeout?: number,
|
||||
|
@ -59,9 +54,14 @@ export interface DownloadClient {
|
|||
class DownloadManagerImpl {
|
||||
private ready: boolean = false;
|
||||
private downloadClient: DownloadClient;
|
||||
private thumbnailCache?: LimitedCache;
|
||||
// disk cache is only available on electron
|
||||
private diskFileCache?: LimitedCache;
|
||||
/** Local cache for thumbnails. Might not be available. */
|
||||
private thumbnailCache?: BlobCache;
|
||||
/**
|
||||
* Local cache for the files themselves.
|
||||
*
|
||||
* Only available when we're running in the desktop app.
|
||||
*/
|
||||
private fileCache?: BlobCache;
|
||||
private cryptoWorker: Remote<DedicatedCryptoWorker>;
|
||||
|
||||
private fileObjectURLPromises = new Map<number, Promise<SourceURLs>>();
|
||||
|
@ -75,23 +75,35 @@ class DownloadManagerImpl {
|
|||
async init(
|
||||
app: APPS,
|
||||
tokens?: { token: string; passwordToken?: string } | { token: string },
|
||||
timeout?: number,
|
||||
) {
|
||||
try {
|
||||
if (this.ready) {
|
||||
log.info("DownloadManager already initialized");
|
||||
return;
|
||||
}
|
||||
this.downloadClient = createDownloadClient(app, tokens, timeout);
|
||||
this.thumbnailCache = await openThumbnailCache();
|
||||
this.diskFileCache = isElectron() && (await openDiskFileCache());
|
||||
this.cryptoWorker = await ComlinkCryptoWorker.getInstance();
|
||||
this.ready = true;
|
||||
eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this);
|
||||
} catch (e) {
|
||||
log.error("DownloadManager init failed", e);
|
||||
throw e;
|
||||
if (this.ready) {
|
||||
log.info("DownloadManager already initialized");
|
||||
return;
|
||||
}
|
||||
this.downloadClient = createDownloadClient(app, tokens);
|
||||
try {
|
||||
this.thumbnailCache = await openCache("thumbs");
|
||||
} catch (e) {
|
||||
log.error(
|
||||
"Failed to open thumbnail cache, will continue without it",
|
||||
e,
|
||||
);
|
||||
}
|
||||
try {
|
||||
if (isElectron()) this.fileCache = await openCache("files");
|
||||
} catch (e) {
|
||||
log.error("Failed to open file cache, will continue without it", e);
|
||||
}
|
||||
this.cryptoWorker = await ComlinkCryptoWorker.getInstance();
|
||||
this.ready = true;
|
||||
eventBus.on(Events.LOGOUT, this.logoutHandler.bind(this), this);
|
||||
}
|
||||
|
||||
private ensureInitialized() {
|
||||
if (!this.ready)
|
||||
throw new Error(
|
||||
"Attempting to use an uninitialized download manager",
|
||||
);
|
||||
}
|
||||
|
||||
private async logoutHandler() {
|
||||
|
@ -119,44 +131,10 @@ class DownloadManagerImpl {
|
|||
this.cryptoWorker = cryptoWorker;
|
||||
}
|
||||
|
||||
updateTimeout(timeout: number) {
|
||||
this.downloadClient.updateTimeout(timeout);
|
||||
}
|
||||
|
||||
setProgressUpdater(progressUpdater: (value: Map<number, number>) => void) {
|
||||
this.progressUpdater = progressUpdater;
|
||||
}
|
||||
|
||||
private async getCachedThumbnail(fileID: number) {
|
||||
try {
|
||||
const cacheResp: Response = await this.thumbnailCache?.match(
|
||||
fileID.toString(),
|
||||
);
|
||||
|
||||
if (cacheResp) {
|
||||
return new Uint8Array(await cacheResp.arrayBuffer());
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("failed to get cached thumbnail", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
private async getCachedFile(file: EnteFile): Promise<Response> {
|
||||
try {
|
||||
if (!this.diskFileCache) {
|
||||
return null;
|
||||
}
|
||||
const cacheResp: Response = await this.diskFileCache?.match(
|
||||
file.id.toString(),
|
||||
{ sizeInBytes: file.info?.fileSize },
|
||||
);
|
||||
return cacheResp?.clone();
|
||||
} catch (e) {
|
||||
log.error("failed to get cached file", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private downloadThumb = async (file: EnteFile) => {
|
||||
const encrypted = await this.downloadClient.downloadThumbnail(file);
|
||||
const decrypted = await this.cryptoWorker.decryptThumbnail(
|
||||
|
@ -168,37 +146,21 @@ class DownloadManagerImpl {
|
|||
};
|
||||
|
||||
async getThumbnail(file: EnteFile, localOnly = false) {
|
||||
try {
|
||||
if (!this.ready) {
|
||||
throw Error(CustomError.DOWNLOAD_MANAGER_NOT_READY);
|
||||
}
|
||||
const cachedThumb = await this.getCachedThumbnail(file.id);
|
||||
if (cachedThumb) {
|
||||
return cachedThumb;
|
||||
}
|
||||
if (localOnly) {
|
||||
return null;
|
||||
}
|
||||
const thumb = await this.downloadThumb(file);
|
||||
this.ensureInitialized();
|
||||
|
||||
this.thumbnailCache
|
||||
?.put(file.id.toString(), new Response(thumb))
|
||||
.catch((e) => {
|
||||
log.error("thumb cache put failed", e);
|
||||
// TODO: handle storage full exception.
|
||||
});
|
||||
return thumb;
|
||||
} catch (e) {
|
||||
log.error("getThumbnail failed", e);
|
||||
throw e;
|
||||
}
|
||||
const key = file.id.toString();
|
||||
const cached = await this.thumbnailCache.get(key);
|
||||
if (cached) return new Uint8Array(await cached.arrayBuffer());
|
||||
if (localOnly) return null;
|
||||
|
||||
const thumb = await this.downloadThumb(file);
|
||||
this.thumbnailCache?.put(key, new Blob([thumb]));
|
||||
return thumb;
|
||||
}
|
||||
|
||||
async getThumbnailForPreview(file: EnteFile, localOnly = false) {
|
||||
this.ensureInitialized();
|
||||
try {
|
||||
if (!this.ready) {
|
||||
throw Error(CustomError.DOWNLOAD_MANAGER_NOT_READY);
|
||||
}
|
||||
if (!this.thumbnailObjectURLPromises.has(file.id)) {
|
||||
const thumbPromise = this.getThumbnail(file, localOnly);
|
||||
const thumbURLPromise = thumbPromise.then(
|
||||
|
@ -223,10 +185,8 @@ class DownloadManagerImpl {
|
|||
file: EnteFile,
|
||||
forceConvert = false,
|
||||
): Promise<SourceURLs> => {
|
||||
this.ensureInitialized();
|
||||
try {
|
||||
if (!this.ready) {
|
||||
throw Error(CustomError.DOWNLOAD_MANAGER_NOT_READY);
|
||||
}
|
||||
const getFileForPreviewPromise = async () => {
|
||||
const fileBlob = await new Response(
|
||||
await this.getFile(file, true),
|
||||
|
@ -261,10 +221,8 @@ class DownloadManagerImpl {
|
|||
file: EnteFile,
|
||||
cacheInMemory = false,
|
||||
): Promise<ReadableStream<Uint8Array>> {
|
||||
this.ensureInitialized();
|
||||
try {
|
||||
if (!this.ready) {
|
||||
throw Error(CustomError.DOWNLOAD_MANAGER_NOT_READY);
|
||||
}
|
||||
const getFilePromise = async (): Promise<SourceURLs> => {
|
||||
const fileStream = await this.downloadFile(file);
|
||||
const fileBlob = await new Response(fileStream).blob();
|
||||
|
@ -298,191 +256,166 @@ class DownloadManagerImpl {
|
|||
private async downloadFile(
|
||||
file: EnteFile,
|
||||
): Promise<ReadableStream<Uint8Array>> {
|
||||
try {
|
||||
log.info(`download attempted for fileID:${file.id}`);
|
||||
const onDownloadProgress = this.trackDownloadProgress(
|
||||
file.id,
|
||||
file.info?.fileSize,
|
||||
);
|
||||
if (
|
||||
file.metadata.fileType === FILE_TYPE.IMAGE ||
|
||||
file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
|
||||
) {
|
||||
let encrypted = await this.getCachedFile(file);
|
||||
if (!encrypted) {
|
||||
encrypted = new Response(
|
||||
await this.downloadClient.downloadFile(
|
||||
file,
|
||||
onDownloadProgress,
|
||||
),
|
||||
);
|
||||
if (this.diskFileCache) {
|
||||
this.diskFileCache
|
||||
.put(file.id.toString(), encrypted.clone())
|
||||
.catch((e) => {
|
||||
log.error("file cache put failed", e);
|
||||
// TODO: handle storage full exception.
|
||||
});
|
||||
}
|
||||
}
|
||||
this.clearDownloadProgress(file.id);
|
||||
try {
|
||||
const decrypted = await this.cryptoWorker.decryptFile(
|
||||
new Uint8Array(await encrypted.arrayBuffer()),
|
||||
await this.cryptoWorker.fromB64(
|
||||
file.file.decryptionHeader,
|
||||
),
|
||||
file.key,
|
||||
);
|
||||
return generateStreamFromArrayBuffer(decrypted);
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.PROCESSING_FAILED) {
|
||||
log.error(
|
||||
`Failed to process file with fileID:${file.id}, localID: ${file.metadata.localID}, version: ${file.metadata.version}, deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
log.info(`download attempted for file id ${file.id}`);
|
||||
|
||||
const onDownloadProgress = this.trackDownloadProgress(
|
||||
file.id,
|
||||
file.info?.fileSize,
|
||||
);
|
||||
|
||||
const cacheKey = file.id.toString();
|
||||
|
||||
if (
|
||||
file.metadata.fileType === FILE_TYPE.IMAGE ||
|
||||
file.metadata.fileType === FILE_TYPE.LIVE_PHOTO
|
||||
) {
|
||||
const cachedBlob = await this.fileCache?.get(cacheKey);
|
||||
let encryptedArrayBuffer = await cachedBlob?.arrayBuffer();
|
||||
if (!encryptedArrayBuffer) {
|
||||
const array = await this.downloadClient.downloadFile(
|
||||
file,
|
||||
onDownloadProgress,
|
||||
);
|
||||
encryptedArrayBuffer = array.buffer;
|
||||
this.fileCache?.put(cacheKey, new Blob([encryptedArrayBuffer]));
|
||||
}
|
||||
|
||||
let resp: Response = await this.getCachedFile(file);
|
||||
if (!resp) {
|
||||
resp = await this.downloadClient.downloadFileStream(file);
|
||||
if (this.diskFileCache) {
|
||||
this.diskFileCache
|
||||
.put(file.id.toString(), resp.clone())
|
||||
.catch((e) => {
|
||||
log.error("file cache put failed", e);
|
||||
});
|
||||
this.clearDownloadProgress(file.id);
|
||||
try {
|
||||
const decrypted = await this.cryptoWorker.decryptFile(
|
||||
new Uint8Array(encryptedArrayBuffer),
|
||||
await this.cryptoWorker.fromB64(file.file.decryptionHeader),
|
||||
file.key,
|
||||
);
|
||||
return generateStreamFromArrayBuffer(decrypted);
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.PROCESSING_FAILED) {
|
||||
log.error(
|
||||
`Failed to process file with fileID:${file.id}, localID: ${file.metadata.localID}, version: ${file.metadata.version}, deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
const reader = resp.body.getReader();
|
||||
|
||||
const contentLength = +resp.headers.get("Content-Length") ?? 0;
|
||||
let downloadedBytes = 0;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start: async (controller) => {
|
||||
try {
|
||||
const decryptionHeader =
|
||||
await this.cryptoWorker.fromB64(
|
||||
file.file.decryptionHeader,
|
||||
);
|
||||
const fileKey = await this.cryptoWorker.fromB64(
|
||||
file.key,
|
||||
);
|
||||
const { pullState, decryptionChunkSize } =
|
||||
await this.cryptoWorker.initChunkDecryption(
|
||||
decryptionHeader,
|
||||
fileKey,
|
||||
);
|
||||
let data = new Uint8Array();
|
||||
// The following function handles each data chunk
|
||||
const push = () => {
|
||||
// "done" is a Boolean and value a "Uint8Array"
|
||||
reader.read().then(async ({ done, value }) => {
|
||||
try {
|
||||
// Is there more data to read?
|
||||
if (!done) {
|
||||
downloadedBytes += value.byteLength;
|
||||
onDownloadProgress({
|
||||
loaded: downloadedBytes,
|
||||
total: contentLength,
|
||||
});
|
||||
const buffer = new Uint8Array(
|
||||
data.byteLength + value.byteLength,
|
||||
);
|
||||
buffer.set(new Uint8Array(data), 0);
|
||||
buffer.set(
|
||||
new Uint8Array(value),
|
||||
data.byteLength,
|
||||
);
|
||||
if (
|
||||
buffer.length > decryptionChunkSize
|
||||
) {
|
||||
const fileData = buffer.slice(
|
||||
0,
|
||||
decryptionChunkSize,
|
||||
);
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
fileData,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(
|
||||
decryptedData,
|
||||
);
|
||||
data =
|
||||
buffer.slice(
|
||||
decryptionChunkSize,
|
||||
);
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
data = buffer;
|
||||
}
|
||||
push();
|
||||
} else {
|
||||
if (data) {
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
data,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(
|
||||
decryptedData,
|
||||
);
|
||||
data = null;
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
} catch (e) {
|
||||
log.error(
|
||||
"Failed to process file chunk",
|
||||
e,
|
||||
);
|
||||
controller.error(e);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
push();
|
||||
} catch (e) {
|
||||
log.error("Failed to process file stream", e);
|
||||
controller.error(e);
|
||||
}
|
||||
},
|
||||
});
|
||||
return stream;
|
||||
} catch (e) {
|
||||
log.error("Failed to download file", e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
const cachedBlob = await this.fileCache?.get(cacheKey);
|
||||
let res: Response;
|
||||
if (cachedBlob) res = new Response(cachedBlob);
|
||||
else {
|
||||
res = await this.downloadClient.downloadFileStream(file);
|
||||
this?.fileCache.put(cacheKey, await res.blob());
|
||||
}
|
||||
const reader = res.body.getReader();
|
||||
|
||||
const contentLength = +res.headers.get("Content-Length") ?? 0;
|
||||
let downloadedBytes = 0;
|
||||
|
||||
const stream = new ReadableStream({
|
||||
start: async (controller) => {
|
||||
try {
|
||||
const decryptionHeader = await this.cryptoWorker.fromB64(
|
||||
file.file.decryptionHeader,
|
||||
);
|
||||
const fileKey = await this.cryptoWorker.fromB64(file.key);
|
||||
const { pullState, decryptionChunkSize } =
|
||||
await this.cryptoWorker.initChunkDecryption(
|
||||
decryptionHeader,
|
||||
fileKey,
|
||||
);
|
||||
let data = new Uint8Array();
|
||||
// The following function handles each data chunk
|
||||
const push = () => {
|
||||
// "done" is a Boolean and value a "Uint8Array"
|
||||
reader.read().then(async ({ done, value }) => {
|
||||
try {
|
||||
// Is there more data to read?
|
||||
if (!done) {
|
||||
downloadedBytes += value.byteLength;
|
||||
onDownloadProgress({
|
||||
loaded: downloadedBytes,
|
||||
total: contentLength,
|
||||
});
|
||||
const buffer = new Uint8Array(
|
||||
data.byteLength + value.byteLength,
|
||||
);
|
||||
buffer.set(new Uint8Array(data), 0);
|
||||
buffer.set(
|
||||
new Uint8Array(value),
|
||||
data.byteLength,
|
||||
);
|
||||
if (buffer.length > decryptionChunkSize) {
|
||||
const fileData = buffer.slice(
|
||||
0,
|
||||
decryptionChunkSize,
|
||||
);
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
fileData,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(decryptedData);
|
||||
data =
|
||||
buffer.slice(
|
||||
decryptionChunkSize,
|
||||
);
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
} else {
|
||||
data = buffer;
|
||||
}
|
||||
push();
|
||||
} else {
|
||||
if (data) {
|
||||
try {
|
||||
const { decryptedData } =
|
||||
await this.cryptoWorker.decryptFileChunk(
|
||||
data,
|
||||
pullState,
|
||||
);
|
||||
controller.enqueue(decryptedData);
|
||||
data = null;
|
||||
} catch (e) {
|
||||
if (
|
||||
e.message ===
|
||||
CustomError.PROCESSING_FAILED
|
||||
) {
|
||||
log.error(
|
||||
`Failed to process file ${file.id} from localID: ${file.metadata.localID} version: ${file.metadata.version} deviceFolder:${file.metadata.deviceFolder}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
controller.close();
|
||||
}
|
||||
} catch (e) {
|
||||
log.error("Failed to process file chunk", e);
|
||||
controller.error(e);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
push();
|
||||
} catch (e) {
|
||||
log.error("Failed to process file stream", e);
|
||||
controller.error(e);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
trackDownloadProgress = (fileID: number, fileSize: number) => {
|
||||
|
@ -515,43 +448,11 @@ const DownloadManager = new DownloadManagerImpl();
|
|||
|
||||
export default DownloadManager;
|
||||
|
||||
async function openThumbnailCache() {
|
||||
try {
|
||||
return await CacheStorageService.open("thumbs");
|
||||
} catch (e) {
|
||||
log.error("Failed to open thumbnail cache", e);
|
||||
if (isInternalUser()) {
|
||||
throw e;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function openDiskFileCache() {
|
||||
try {
|
||||
if (!isElectron()) {
|
||||
throw Error(CustomError.NOT_AVAILABLE_ON_WEB);
|
||||
}
|
||||
return await CacheStorageService.open("files");
|
||||
} catch (e) {
|
||||
log.error("Failed to open file cache", e);
|
||||
if (isInternalUser()) {
|
||||
throw e;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createDownloadClient(
|
||||
app: APPS,
|
||||
tokens?: { token: string; passwordToken?: string } | { token: string },
|
||||
timeout?: number,
|
||||
): DownloadClient {
|
||||
if (!timeout) {
|
||||
timeout = 300000; // 5 minute
|
||||
}
|
||||
const timeout = 300000; // 5 minute
|
||||
if (app === APPS.ALBUMS) {
|
||||
if (!tokens) {
|
||||
tokens = { token: undefined, passwordToken: undefined };
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { openCache } from "@/next/blob-cache";
|
||||
import log from "@/next/log";
|
||||
import {
|
||||
DetectedFace,
|
||||
|
@ -14,7 +15,6 @@ import {
|
|||
getOriginalImageBitmap,
|
||||
isDifferentOrOld,
|
||||
} from "utils/machineLearning";
|
||||
import { storeFaceCrop } from "utils/machineLearning/faceCrop";
|
||||
import mlIDbStorage from "utils/storage/mlIDbStorage";
|
||||
import ReaderService from "./readerService";
|
||||
|
||||
|
@ -225,23 +225,15 @@ class FaceService {
|
|||
face.detection,
|
||||
syncContext.config.faceCrop,
|
||||
);
|
||||
try {
|
||||
face.crop = await storeFaceCrop(
|
||||
face.id,
|
||||
faceCrop,
|
||||
syncContext.config.faceCrop.blobOptions,
|
||||
);
|
||||
} catch (e) {
|
||||
// TODO(MR): Temporarily ignoring errors about failing cache puts
|
||||
// when using a custom scheme in Electron. Needs an alternative
|
||||
// approach, perhaps OPFS.
|
||||
console.error(
|
||||
"Ignoring error when caching face crop, the face crop will not be available",
|
||||
e,
|
||||
);
|
||||
}
|
||||
const blob = await imageBitmapToBlob(faceCrop.image);
|
||||
|
||||
const blobOptions = syncContext.config.faceCrop.blobOptions;
|
||||
const blob = await imageBitmapToBlob(faceCrop.image, blobOptions);
|
||||
|
||||
const cache = await openCache("face-crops");
|
||||
await cache.put(face.id, blob);
|
||||
|
||||
faceCrop.image.close();
|
||||
|
||||
return blob;
|
||||
}
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ class PeopleService {
|
|||
(a, b) => b.detection.probability - a.detection.probability,
|
||||
);
|
||||
|
||||
if (personFace && !personFace.crop?.imageUrl) {
|
||||
if (personFace && !personFace.crop?.cacheKey) {
|
||||
const file = await getLocalFile(personFace.fileId);
|
||||
const imageBitmap = await getOriginalImageBitmap(file);
|
||||
await FaceService.saveFaceCrop(
|
||||
|
@ -76,7 +76,7 @@ class PeopleService {
|
|||
id: index,
|
||||
files: faces.map((f) => f.fileId),
|
||||
displayFaceId: personFace?.id,
|
||||
displayImageUrl: personFace?.crop?.imageUrl,
|
||||
faceCropCacheKey: personFace?.crop?.cacheKey,
|
||||
};
|
||||
|
||||
await mlIDbStorage.putPerson(person);
|
||||
|
|
|
@ -90,7 +90,7 @@ export interface FaceCrop {
|
|||
}
|
||||
|
||||
export interface StoredFaceCrop {
|
||||
imageUrl: string;
|
||||
cacheKey: string;
|
||||
imageBox: Box;
|
||||
}
|
||||
|
||||
|
@ -128,7 +128,7 @@ export interface Person {
|
|||
name?: string;
|
||||
files: Array<number>;
|
||||
displayFaceId?: string;
|
||||
displayImageUrl?: string;
|
||||
faceCropCacheKey?: string;
|
||||
}
|
||||
|
||||
export interface MlFileData {
|
||||
|
|
|
@ -440,7 +440,7 @@ export async function getRenderableImage(fileName: string, imageBlob: Blob) {
|
|||
}
|
||||
|
||||
if (!isElectron()) {
|
||||
throw Error(CustomError.NOT_AVAILABLE_ON_WEB);
|
||||
throw new Error("not available on web");
|
||||
}
|
||||
log.info(
|
||||
`RawConverter called for ${fileName}-${convertBytesToHumanReadable(
|
||||
|
|
|
@ -1,12 +1,5 @@
|
|||
import { CacheStorageService } from "@ente/shared/storage/cache";
|
||||
import { BlobOptions } from "types/image";
|
||||
import {
|
||||
FaceAlignment,
|
||||
FaceCrop,
|
||||
FaceCropConfig,
|
||||
StoredFaceCrop,
|
||||
} from "types/machineLearning";
|
||||
import { cropWithRotation, imageBitmapToBlob } from "utils/image";
|
||||
import { FaceAlignment, FaceCrop, FaceCropConfig } from "types/machineLearning";
|
||||
import { cropWithRotation } from "utils/image";
|
||||
import { enlargeBox } from ".";
|
||||
import { Box } from "../../../thirdparty/face-api/classes";
|
||||
|
||||
|
@ -15,9 +8,14 @@ export function getFaceCrop(
|
|||
alignment: FaceAlignment,
|
||||
config: FaceCropConfig,
|
||||
): FaceCrop {
|
||||
const box = getAlignedFaceBox(alignment);
|
||||
const alignmentBox = new Box({
|
||||
x: alignment.center.x - alignment.size / 2,
|
||||
y: alignment.center.y - alignment.size / 2,
|
||||
width: alignment.size,
|
||||
height: alignment.size,
|
||||
}).round();
|
||||
const scaleForPadding = 1 + config.padding * 2;
|
||||
const paddedBox = enlargeBox(box, scaleForPadding).round();
|
||||
const paddedBox = enlargeBox(alignmentBox, scaleForPadding).round();
|
||||
const faceImageBitmap = cropWithRotation(imageBitmap, paddedBox, 0, {
|
||||
width: config.maxSize,
|
||||
height: config.maxSize,
|
||||
|
@ -28,36 +26,3 @@ export function getFaceCrop(
|
|||
imageBox: paddedBox,
|
||||
};
|
||||
}
|
||||
|
||||
function getAlignedFaceBox(alignment: FaceAlignment) {
|
||||
return new Box({
|
||||
x: alignment.center.x - alignment.size / 2,
|
||||
y: alignment.center.y - alignment.size / 2,
|
||||
width: alignment.size,
|
||||
height: alignment.size,
|
||||
}).round();
|
||||
}
|
||||
|
||||
export async function storeFaceCrop(
|
||||
faceId: string,
|
||||
faceCrop: FaceCrop,
|
||||
blobOptions: BlobOptions,
|
||||
): Promise<StoredFaceCrop> {
|
||||
const faceCropBlob = await imageBitmapToBlob(faceCrop.image, blobOptions);
|
||||
return storeFaceCropForBlob(faceId, faceCrop.imageBox, faceCropBlob);
|
||||
}
|
||||
|
||||
async function storeFaceCropForBlob(
|
||||
faceId: string,
|
||||
imageBox: Box,
|
||||
faceCropBlob: Blob,
|
||||
) {
|
||||
const faceCropUrl = `/${faceId}`;
|
||||
const faceCropResponse = new Response(faceCropBlob);
|
||||
const faceCropCache = await CacheStorageService.open("face-crops");
|
||||
await faceCropCache.put(faceCropUrl, faceCropResponse);
|
||||
return {
|
||||
imageUrl: faceCropUrl,
|
||||
imageBox: imageBox,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import log from "@/next/log";
|
||||
import { cached } from "@ente/shared/storage/cache";
|
||||
import { FILE_TYPE } from "constants/file";
|
||||
import PQueue from "p-queue";
|
||||
import DownloadManager from "services/download";
|
||||
|
@ -143,22 +142,9 @@ async function getOriginalConvertedFile(file: EnteFile, queue?: PQueue) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function getOriginalImageBitmap(
|
||||
file: EnteFile,
|
||||
queue?: PQueue,
|
||||
useCache: boolean = false,
|
||||
) {
|
||||
let fileBlob;
|
||||
|
||||
if (useCache) {
|
||||
fileBlob = await cached("files", file.id.toString(), () => {
|
||||
return getOriginalConvertedFile(file, queue);
|
||||
});
|
||||
} else {
|
||||
fileBlob = await getOriginalConvertedFile(file, queue);
|
||||
}
|
||||
export async function getOriginalImageBitmap(file: EnteFile, queue?: PQueue) {
|
||||
const fileBlob = await getOriginalConvertedFile(file, queue);
|
||||
log.info("[MLService] Got file: ", file.id.toString());
|
||||
|
||||
return getImageBlobBitmap(fileBlob);
|
||||
}
|
||||
|
||||
|
|
|
@ -83,6 +83,29 @@ class MLIDbStorage {
|
|||
log.error("ML Indexed DB blocking");
|
||||
},
|
||||
async upgrade(db, oldVersion, newVersion, tx) {
|
||||
let wasMLSearchEnabled = false;
|
||||
try {
|
||||
const searchConfig: unknown = await tx
|
||||
.objectStore("configs")
|
||||
.get(ML_SEARCH_CONFIG_NAME);
|
||||
if (
|
||||
searchConfig &&
|
||||
typeof searchConfig == "object" &&
|
||||
"enabled" in searchConfig &&
|
||||
typeof searchConfig.enabled == "boolean"
|
||||
) {
|
||||
wasMLSearchEnabled = searchConfig.enabled;
|
||||
}
|
||||
} catch (e) {
|
||||
log.info(
|
||||
"Ignoring likely harmless error while trying to determine ML search status during migration",
|
||||
e,
|
||||
);
|
||||
}
|
||||
log.info(
|
||||
`Previous ML database v${oldVersion} had ML search ${wasMLSearchEnabled ? "enabled" : "disabled"}`,
|
||||
);
|
||||
|
||||
if (oldVersion < 1) {
|
||||
const filesStore = db.createObjectStore("files", {
|
||||
keyPath: "fileId",
|
||||
|
@ -124,15 +147,28 @@ class MLIDbStorage {
|
|||
.add(DEFAULT_ML_SEARCH_CONFIG, ML_SEARCH_CONFIG_NAME);
|
||||
}
|
||||
if (oldVersion < 4) {
|
||||
// TODO(MR): This loses the user's settings.
|
||||
db.deleteObjectStore("configs");
|
||||
db.createObjectStore("configs");
|
||||
try {
|
||||
await tx
|
||||
.objectStore("configs")
|
||||
.delete(ML_SEARCH_CONFIG_NAME);
|
||||
|
||||
db.deleteObjectStore("things");
|
||||
await tx
|
||||
.objectStore("configs")
|
||||
.add(
|
||||
{ enabled: wasMLSearchEnabled },
|
||||
ML_SEARCH_CONFIG_NAME,
|
||||
);
|
||||
|
||||
db.deleteObjectStore("things");
|
||||
} catch {
|
||||
// TODO: ignore for now as we finalize the new version
|
||||
// the shipped implementation should have a more
|
||||
// deterministic migration.
|
||||
}
|
||||
}
|
||||
|
||||
log.info(
|
||||
`Ml DB upgraded to version: ${newVersion} from version: ${oldVersion}`,
|
||||
`ML DB upgraded from version ${oldVersion} to version ${newVersion}`,
|
||||
);
|
||||
},
|
||||
});
|
||||
|
|
|
@ -121,3 +121,10 @@ set of defaults for bundling our app into a static export which we can then
|
|||
deploy to our webserver. In addition, the Next.js page router is convenient.
|
||||
Apart from this, while we use a few tidbits from Next.js here and there, overall
|
||||
our apps are regular React SPAs, and are not particularly tied to Next.
|
||||
|
||||
### Vite
|
||||
|
||||
For some of our newer code, we have started to use [Vite](https://vitejs.dev).
|
||||
It is more lower level than Next, but the bells and whistles it doesn't have are
|
||||
the bells and whistles (and the accompanying complexity) that we don't need in
|
||||
some cases.
|
||||
|
|
|
@ -8,9 +8,32 @@ cleared when the browser tab is closed.
|
|||
|
||||
The data in local storage is tied to the Document's origin (scheme + host).
|
||||
|
||||
Some things that get stored here are:
|
||||
|
||||
- Details about the logged in user, in particular their user id and a auth
|
||||
token we can use to make API calls on their behalf.
|
||||
|
||||
- Various user preferences
|
||||
|
||||
## Session Storage
|
||||
|
||||
Data tied to the browser tab's lifetime.
|
||||
|
||||
We store the user's encryption key here.
|
||||
|
||||
## Indexed DB
|
||||
|
||||
We use the LocalForage library for storing things in Indexed DB. This library
|
||||
falls back to localStorage in case Indexed DB storage is not available.
|
||||
|
||||
Indexed DB allows for larger sizes than local/session storage, and is generally
|
||||
meant for larger, tabular data.
|
||||
|
||||
## OPFS
|
||||
|
||||
OPFS is used for caching entire files when we're running under Electron (the Web
|
||||
Cache API is used in the browser).
|
||||
|
||||
As it name suggests, it is an entire filesystem, private for us ("origin"). In
|
||||
is not undbounded though, and the storage is not guaranteed to be persistent (at
|
||||
least with the APIs we use), hence the cache designation.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { clearCaches } from "@/next/blob-cache";
|
||||
import log from "@/next/log";
|
||||
import { Events, eventBus } from "@ente/shared/events";
|
||||
import InMemoryStore from "@ente/shared/storage/InMemoryStore";
|
||||
import { clearCaches } from "@ente/shared/storage/cache";
|
||||
import { clearFiles } from "@ente/shared/storage/localForage/helpers";
|
||||
import { clearData } from "@ente/shared/storage/localStorage";
|
||||
import { clearKeys } from "@ente/shared/storage/sessionStorage";
|
||||
|
|
220
web/packages/next/blob-cache.ts
Normal file
220
web/packages/next/blob-cache.ts
Normal file
|
@ -0,0 +1,220 @@
|
|||
import isElectron from "is-electron";
|
||||
|
||||
const blobCacheNames = [
|
||||
"thumbs",
|
||||
"face-crops",
|
||||
// Desktop app only
|
||||
"files",
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* Namespaces into which our blob caches are divided
|
||||
*
|
||||
* Note that namespaces are just arbitrary (but predefined) strings to split the
|
||||
* cached data into "folders", so to speak.
|
||||
* */
|
||||
export type BlobCacheNamespace = (typeof blobCacheNames)[number];
|
||||
|
||||
/**
|
||||
* A namespaced blob cache.
|
||||
*
|
||||
* This cache is suitable for storing large amounts of data (entire files).
|
||||
*
|
||||
* To obtain a cache for a given namespace, use {@link openCache}. To clear all
|
||||
* cached data (e.g. during logout), use {@link clearCaches}.
|
||||
*
|
||||
* [Note: Caching files]
|
||||
*
|
||||
* The underlying implementation of the cache is different depending on the
|
||||
* runtime environment.
|
||||
*
|
||||
* * The preferred implementation, and the one that is used when we're running
|
||||
* in a browser, is to use the standard [Web
|
||||
* Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache).
|
||||
*
|
||||
* * However when running under Electron (when this code runs as part of our
|
||||
* desktop app), a custom OPFS based cache is used instead. This is because
|
||||
* Electron currently doesn't support using standard Web Cache API for data
|
||||
* served by a custom protocol handler (See this
|
||||
* [issue](https://github.com/electron/electron/issues/35033), and the
|
||||
* underlying restriction that comes from
|
||||
* [Chromium](https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/modules/cache_storage/cache.cc;l=83-87?q=%22Request%20scheme%20%27%22&ss=chromium))
|
||||
*
|
||||
* [OPFS](https://web.dev/articles/origin-private-file-system) stands for Origin
|
||||
* Private File System. It is a recent API that allows a web site to store
|
||||
* reasonably large amounts of data. One option (that may still become possible
|
||||
* in the future) was to always use OPFS for caching instead of this dual
|
||||
* implementation, however currently [Safari does not support writing to OPFS
|
||||
* outside of web
|
||||
* workers](https://webkit.org/blog/12257/the-file-system-access-api-with-origin-private-file-system/)
|
||||
* ([the WebKit bug](https://bugs.webkit.org/show_bug.cgi?id=231706)), so it's
|
||||
* not trivial to use this as a full on replacement of the Web Cache in the
|
||||
* browser. So for now we go with this split implementation.
|
||||
*
|
||||
* See also: [Note: Increased disk cache for the desktop app].
|
||||
*/
|
||||
export interface BlobCache {
|
||||
/**
|
||||
* Get the data corresponding to {@link key} (if found) from the cache.
|
||||
*/
|
||||
get: (key: string) => Promise<Blob | undefined>;
|
||||
/**
|
||||
* Add the given {@link key}-value ({@link blob}) pair to the cache.
|
||||
*/
|
||||
put: (key: string, blob: Blob) => Promise<void>;
|
||||
/**
|
||||
* Delete the blob corresponding to the given {@link key}.
|
||||
*
|
||||
* The returned promise resolves to `true` if a cache entry was found,
|
||||
* otherwise it resolves to `false`.
|
||||
* */
|
||||
delete: (key: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the {@link BlobCache} corresponding to the given {@link name}.
|
||||
*
|
||||
* @param name One of the arbitrary but predefined namespaces of type
|
||||
* {@link BlobCacheNamespace} which group related data and allow us to use the
|
||||
* same key across namespaces.
|
||||
*/
|
||||
export const openCache = async (
|
||||
name: BlobCacheNamespace,
|
||||
): Promise<BlobCache> =>
|
||||
isElectron() ? openOPFSCacheWeb(name) : openWebCache(name);
|
||||
|
||||
/**
|
||||
* [Note: ArrayBuffer vs Blob vs Uint8Array]
|
||||
*
|
||||
* ArrayBuffers are in memory, while blobs are unreified, and can directly point
|
||||
* to on disk objects too.
|
||||
*
|
||||
* If we are just passing data around without necessarily needing to manipulate
|
||||
* it, and we already have a blob, it's best to just pass that blob. Further,
|
||||
* blobs also retains the file's encoding information , and are thus a layer
|
||||
* above array buffers which are just raw byte sequences.
|
||||
*
|
||||
* ArrayBuffers are not directly manipulatable, which is where some sort of a
|
||||
* typed array or a data view comes into the picture. The typed `Uint8Array` is
|
||||
* a common way.
|
||||
*
|
||||
* To convert from ArrayBuffer to Uint8Array,
|
||||
*
|
||||
* new Uint8Array(arrayBuffer)
|
||||
*
|
||||
* Blobs are immutable, but a usual scenario is storing an entire file in a
|
||||
* blob, and when the need comes to display it, we can obtain a URL for it using
|
||||
*
|
||||
* URL.createObjectURL(blob)
|
||||
*
|
||||
* Also note that a File is a Blob!
|
||||
*
|
||||
* To convert from a Blob to ArrayBuffer
|
||||
*
|
||||
* await blob.arrayBuffer()
|
||||
*
|
||||
* To convert from an ArrayBuffer or Uint8Array to Blob
|
||||
*
|
||||
* new Blob([arrayBuffer, andOrAnyArray, andOrstring])
|
||||
*
|
||||
* Refs:
|
||||
* - https://github.com/yigitunallar/arraybuffer-vs-blob
|
||||
* - https://stackoverflow.com/questions/11821096/what-is-the-difference-between-an-arraybuffer-and-a-blob
|
||||
*/
|
||||
|
||||
/** An implementation of {@link BlobCache} using Web Cache APIs */
|
||||
const openWebCache = async (name: BlobCacheNamespace) => {
|
||||
const cache = await caches.open(name);
|
||||
return {
|
||||
get: async (key: string) => {
|
||||
const res = await cache.match(key);
|
||||
return await res?.blob();
|
||||
},
|
||||
put: (key: string, blob: Blob) => cache.put(key, new Response(blob)),
|
||||
delete: (key: string) => cache.delete(key),
|
||||
};
|
||||
};
|
||||
|
||||
/** An implementation of {@link BlobCache} using OPFS */
|
||||
const openOPFSCacheWeb = async (name: BlobCacheNamespace) => {
|
||||
// While all major browsers support OPFS now, their implementations still
|
||||
// have various quirks. However, we don't need to handle all possible cases
|
||||
// and can just instead use the APIs and guarantees Chromium provides since
|
||||
// this code will only run in our Electron app (which'll use Chromium as the
|
||||
// renderer).
|
||||
//
|
||||
// So for our purpose, these can serve as the doc for what's available:
|
||||
// https://web.dev/articles/origin-private-file-system
|
||||
|
||||
const root = await navigator.storage.getDirectory();
|
||||
const caches = await root.getDirectoryHandle("cache", { create: true });
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const cache = await caches.getDirectoryHandle(name, { create: true });
|
||||
|
||||
return {
|
||||
get: async (key: string) => {
|
||||
try {
|
||||
const fileHandle = await cache.getFileHandle(key);
|
||||
return await fileHandle.getFile();
|
||||
} catch (e) {
|
||||
if (e instanceof DOMException && e.name == "NotFoundError")
|
||||
return undefined;
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
put: async (key: string, blob: Blob) => {
|
||||
const fileHandle = await cache.getFileHandle(key, {
|
||||
create: true,
|
||||
});
|
||||
const writable = await fileHandle.createWritable();
|
||||
await writable.write(blob);
|
||||
await writable.close();
|
||||
},
|
||||
delete: async (key: string) => {
|
||||
try {
|
||||
await cache.removeEntry(key);
|
||||
return true;
|
||||
} catch (e) {
|
||||
if (e instanceof DOMException && e.name == "NotFoundError")
|
||||
return false;
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Return a cached blob for {@link key} in {@link cacheName}. If the blob is not
|
||||
* found in the cache, recreate/fetch it using {@link get}, cache it, and then
|
||||
* return it.
|
||||
*/
|
||||
export const cachedOrNew = async (
|
||||
cacheName: BlobCacheNamespace,
|
||||
key: string,
|
||||
get: () => Promise<Blob>,
|
||||
): Promise<Blob> => {
|
||||
const cache = await openCache(cacheName);
|
||||
const cachedBlob = await cache.get(key);
|
||||
if (cachedBlob) return cachedBlob;
|
||||
|
||||
const blob = await get();
|
||||
await cache.put(key, blob);
|
||||
return blob;
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete all cached data.
|
||||
*
|
||||
* Meant for use during logout, to reset the state of the user's account.
|
||||
*/
|
||||
export const clearCaches = async () =>
|
||||
isElectron() ? clearOPFSCaches() : clearWebCaches();
|
||||
|
||||
const clearWebCaches = async () => {
|
||||
await Promise.all(blobCacheNames.map((name) => caches.delete(name)));
|
||||
};
|
||||
|
||||
const clearOPFSCaches = async () => {
|
||||
const root = await navigator.storage.getDirectory();
|
||||
await root.removeEntry("cache", { recursive: true });
|
||||
};
|
|
@ -74,7 +74,6 @@ export const CustomError = {
|
|||
EXIF_DATA_NOT_FOUND: "exif data not found",
|
||||
SELECT_FOLDER_ABORTED: "select folder aborted",
|
||||
NON_MEDIA_FILE: "non media file",
|
||||
NOT_AVAILABLE_ON_WEB: "not available on web",
|
||||
UNSUPPORTED_RAW_FORMAT: "unsupported raw format",
|
||||
NON_PREVIEWABLE_FILE: "non previewable file",
|
||||
PROCESSING_FAILED: "processing failed",
|
||||
|
@ -87,7 +86,6 @@ export const CustomError = {
|
|||
UNSUPPORTED_PLATFORM: "Unsupported platform",
|
||||
MODEL_DOWNLOAD_PENDING:
|
||||
"Model download pending, skipping clip search request",
|
||||
DOWNLOAD_MANAGER_NOT_READY: "Download manager not initialized",
|
||||
UPDATE_URL_FILE_ID_MISMATCH: "update url file id mismatch",
|
||||
URL_ALREADY_SET: "url already set",
|
||||
FILE_CONVERSION_FAILED: "file conversion failed",
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
const cacheNames = [
|
||||
"thumbs",
|
||||
"face-crops",
|
||||
// Desktop app only
|
||||
"files",
|
||||
] as const;
|
||||
|
||||
/** Namespaces into which our caches data is divided */
|
||||
export type CacheName = (typeof cacheNames)[number];
|
||||
|
||||
export interface LimitedCache {
|
||||
match: (
|
||||
key: string,
|
||||
options?: { sizeInBytes?: number },
|
||||
) => Promise<Response>;
|
||||
put: (key: string, data: Response) => Promise<void>;
|
||||
delete: (key: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
const openCache = async (name: CacheName) => {
|
||||
const cache = await caches.open(name);
|
||||
return {
|
||||
match: (key) => {
|
||||
// options are not supported in the browser
|
||||
return cache.match(key);
|
||||
},
|
||||
put: cache.put.bind(cache),
|
||||
delete: cache.delete.bind(cache),
|
||||
};
|
||||
};
|
||||
|
||||
export const CacheStorageService = { open: openCache };
|
||||
|
||||
export async function cached(
|
||||
cacheName: CacheName,
|
||||
id: string,
|
||||
get: () => Promise<Blob>,
|
||||
): Promise<Blob> {
|
||||
const cache = await CacheStorageService.open(cacheName);
|
||||
const cacheResponse = await cache.match(id);
|
||||
|
||||
let result: Blob;
|
||||
if (cacheResponse) {
|
||||
result = await cacheResponse.blob();
|
||||
} else {
|
||||
result = await get();
|
||||
|
||||
try {
|
||||
await cache.put(id, new Response(result));
|
||||
} catch (e) {
|
||||
// TODO: handle storage full exception.
|
||||
console.error("Error while storing file to cache: ", id);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete all cached data.
|
||||
*
|
||||
* Meant for use during logout, to reset the state of the user's account.
|
||||
*/
|
||||
export const clearCaches = async () => {
|
||||
await Promise.all(cacheNames.map((name) => caches.delete(name)));
|
||||
};
|
Loading…
Add table
Reference in a new issue