Remove deprecated loggers

This commit is contained in:
Manav Rathi 2024-04-09 12:01:02 +05:30
parent 0d0e20f7c4
commit d441418b5b
No known key found for this signature in database
49 changed files with 242 additions and 362 deletions

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { FILE_TYPE, RAW_FORMATS } from "constants/file";
import CastDownloadManager from "services/castDownloadManager";
@ -9,7 +10,6 @@ import {
FileMagicMetadata,
FilePublicMagicMetadata,
} from "types/file";
import log from "@/next/log";
export function sortFiles(files: EnteFile[], sortAsc = false) {
// sort based on the time of creation time of the file,

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { styled } from "@mui/material";
import { Legend } from "components/PhotoViewer/styledComponents/Legend";
@ -86,11 +86,11 @@ export function PhotoPeopleList(props: PhotoPeopleListProps) {
let didCancel = false;
async function updateFaceImages() {
addLogLine("calling getPeopleList");
log.info("calling getPeopleList");
const startTime = Date.now();
const people = await getPeopleList(props.file);
addLogLine("getPeopleList", Date.now() - startTime, "ms");
addLogLine("getPeopleList done, didCancel: ", didCancel);
log.info(`getPeopleList ${Date.now() - startTime} ms`);
log.info(`getPeopleList done, didCancel: ${didCancel}`);
!didCancel && setPeople(people);
}

View file

@ -2,7 +2,6 @@ import log from "@/next/log";
import { PHOTOS_PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error";
import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
import { addLogLine } from "@ente/shared/logging";
import { styled } from "@mui/material";
import PhotoViewer from "components/PhotoViewer";
import { TRASH_SECTION } from "constants/collection";
@ -184,7 +183,7 @@ const PhotoFrame = ({
const file = displayFiles[index];
// this is to prevent outdated updateURL call from updating the wrong file
if (file.id !== id) {
addLogLine(
log.info(
`[${id}]PhotoSwipe: updateURL: file id mismatch: ${file.id} !== ${id}`,
);
throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH);
@ -204,7 +203,7 @@ const PhotoFrame = ({
const file = displayFiles[index];
// this is to prevent outdate updateSrcURL call from updating the wrong file
if (file.id !== id) {
addLogLine(
log.info(
`[${id}]PhotoSwipe: updateSrcURL: file id mismatch: ${file.id}`,
);
throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH);
@ -212,7 +211,7 @@ const PhotoFrame = ({
if (file.isSourceLoaded && !forceUpdate) {
throw Error(CustomError.URL_ALREADY_SET);
} else if (file.conversionFailed) {
addLogLine(`[${id}]PhotoSwipe: updateSrcURL: conversion failed`);
log.info(`[${id}]PhotoSwipe: updateSrcURL: conversion failed`);
throw Error(CustomError.FILE_CONVERSION_FAILED);
}
@ -308,7 +307,7 @@ const PhotoFrame = ({
index: number,
item: EnteFile,
) => {
addLogLine(
log.info(
`[${
item.id
}] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${
@ -319,17 +318,15 @@ const PhotoFrame = ({
if (!item.msrc) {
try {
if (thumbFetching[item.id]) {
addLogLine(
`[${item.id}] thumb download already in progress`,
);
log.info(`[${item.id}] thumb download already in progress`);
return;
}
addLogLine(`[${item.id}] doesn't have thumbnail`);
log.info(`[${item.id}] doesn't have thumbnail`);
thumbFetching[item.id] = true;
const url = await DownloadManager.getThumbnailForPreview(item);
try {
updateURL(index)(item.id, url);
addLogLine(
log.info(
`[${
item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
@ -355,20 +352,20 @@ const PhotoFrame = ({
if (item.isSourceLoaded || item.conversionFailed) {
if (item.isSourceLoaded) {
addLogLine(`[${item.id}] source already loaded`);
log.info(`[${item.id}] source already loaded`);
}
if (item.conversionFailed) {
addLogLine(`[${item.id}] conversion failed`);
log.info(`[${item.id}] conversion failed`);
}
return;
}
if (fetching[item.id]) {
addLogLine(`[${item.id}] file download already in progress`);
log.info(`[${item.id}] file download already in progress`);
return;
}
try {
addLogLine(`[${item.id}] new file src request`);
log.info(`[${item.id}] new file src request`);
fetching[item.id] = true;
const srcURLs = await DownloadManager.getFileForPreview(item);
if (item.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
@ -383,7 +380,7 @@ const PhotoFrame = ({
};
try {
await updateSrcURL(index, item.id, dummyImgSrcUrl);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
@ -417,7 +414,7 @@ const PhotoFrame = ({
loadedLivePhotoSrcURL,
true,
);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
@ -435,7 +432,7 @@ const PhotoFrame = ({
} else {
try {
await updateSrcURL(index, item.id, srcURLs);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();
@ -478,7 +475,7 @@ const PhotoFrame = ({
}
try {
updateURL(index)(item.id, item.msrc, true);
addLogLine(
log.info(
`[${
item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
@ -497,7 +494,7 @@ const PhotoFrame = ({
// ignore
}
try {
addLogLine(
log.info(
`[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`,
);
fetching[item.id] = true;
@ -506,7 +503,7 @@ const PhotoFrame = ({
try {
await updateSrcURL(index, item.id, srcURL, true);
addLogLine(
log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
);
instance.invalidateCurrItems();

View file

@ -25,7 +25,7 @@ const Caption = styled("span")`
const MenuWithPeople = (props) => {
const appContext = useContext(AppContext);
// addLogLine("props.selectProps.options: ", selectRef);
// log.info("props.selectProps.options: ", selectRef);
const peopleSuggestions = props.selectProps.options.filter(
(o) => o.type === SuggestionType.PERSON,
);

View file

@ -1,13 +1,12 @@
import log from "@/next/log";
import { savedLogs } from "@/next/log-web";
import { downloadAsFile } from "@ente/shared/utils";
import Typography from "@mui/material/Typography";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import { t } from "i18next";
import { AppContext } from "pages/_app";
import { useContext, useEffect, useState } from "react";
import { Trans } from "react-i18next";
import { savedLogs } from "@/next/log-web";
import { addLogLine } from "@ente/shared/logging";
import { downloadAsFile } from "@ente/shared/utils";
import Typography from "@mui/material/Typography";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import { isInternalUser } from "utils/user";
import { testUpload } from "../../../tests/upload.test";
import {
@ -40,7 +39,7 @@ export default function DebugSection() {
});
const downloadLogs = () => {
addLogLine("Downloading logs");
log.info("Downloading logs");
if (electron) electron.openLogDirectory();
else downloadAsFile(`debug_logs_${Date.now()}.txt`, savedLogs());
};

View file

@ -1,6 +1,5 @@
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { isPromise } from "@ente/shared/utils";
import DiscFullIcon from "@mui/icons-material/DiscFull";
import UserNameInputDialog from "components/UserNameInputDialog";
@ -179,9 +178,7 @@ export default function Uploader(props: Props) {
if (isElectron()) {
ImportService.getPendingUploads().then(
({ files: electronFiles, collectionName, type }) => {
addLogLine(
`found pending desktop upload, resuming uploads`,
);
log.info(`found pending desktop upload, resuming uploads`);
resumeDesktopUpload(type, electronFiles, collectionName);
},
);
@ -212,20 +209,20 @@ export default function Uploader(props: Props) {
pickedUploadType.current === PICKED_UPLOAD_TYPE.FOLDERS &&
props.webFolderSelectorFiles?.length > 0
) {
addLogLine(`received folder upload request`);
log.info(`received folder upload request`);
setWebFiles(props.webFolderSelectorFiles);
} else if (
pickedUploadType.current === PICKED_UPLOAD_TYPE.FILES &&
props.webFileSelectorFiles?.length > 0
) {
addLogLine(`received file upload request`);
log.info(`received file upload request`);
setWebFiles(props.webFileSelectorFiles);
} else if (props.dragAndDropFiles?.length > 0) {
isDragAndDrop.current = true;
if (electron) {
const main = async () => {
try {
addLogLine(`uploading dropped files from desktop app`);
log.info(`uploading dropped files from desktop app`);
// check and parse dropped files which are zip files
let electronFiles = [] as ElectronFile[];
for (const file of props.dragAndDropFiles) {
@ -234,7 +231,7 @@ export default function Uploader(props: Props) {
await electron.getElectronFilesFromGoogleZip(
(file as any).path,
);
addLogLine(
log.info(
`zip file - ${file.name} contains ${zipFiles.length} files`,
);
electronFiles = [...electronFiles, ...zipFiles];
@ -252,7 +249,7 @@ export default function Uploader(props: Props) {
);
}
}
addLogLine(
log.info(
`uploading dropped files from desktop app - ${electronFiles.length} files found`,
);
setElectronFiles(electronFiles);
@ -263,7 +260,7 @@ export default function Uploader(props: Props) {
};
main();
} else {
addLogLine(`uploading dropped files from web app`);
log.info(`uploading dropped files from web app`);
setWebFiles(props.dragAndDropFiles);
}
}
@ -279,7 +276,7 @@ export default function Uploader(props: Props) {
webFiles?.length > 0 ||
appContext.sharedFiles?.length > 0
) {
addLogLine(
log.info(
`upload request type:${
electronFiles?.length > 0
? "electronFiles"
@ -294,13 +291,13 @@ export default function Uploader(props: Props) {
);
if (uploadManager.isUploadRunning()) {
if (watchFolderService.isUploadRunning()) {
addLogLine(
log.info(
"watchFolder upload was running, pausing it to run user upload",
);
// pause watch folder service on user upload
watchFolderService.pauseRunningSync();
} else {
addLogLine(
log.info(
"an upload is already running, rejecting new upload request",
);
// no-op
@ -372,7 +369,7 @@ export default function Uploader(props: Props) {
uploaderName?: string,
) => {
try {
addLogLine(
log.info(
`upload file to an existing collection name:${collection.name}, collectionID:${collection.id}`,
);
await preCollectionCreationAction();
@ -397,7 +394,7 @@ export default function Uploader(props: Props) {
collectionName?: string,
) => {
try {
addLogLine(
log.info(
`upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`,
);
await preCollectionCreationAction();
@ -417,7 +414,7 @@ export default function Uploader(props: Props) {
toUploadFiles.current,
);
}
addLogLine(
log.info(
`upload collections - [${[...collectionNameToFilesMap.keys()]}]`,
);
try {
@ -502,7 +499,7 @@ export default function Uploader(props: Props) {
uploaderName?: string,
) => {
try {
addLogLine("uploadFiles called");
log.info("uploadFiles called");
preUploadAction();
if (
electron &&
@ -555,7 +552,7 @@ export default function Uploader(props: Props) {
const retryFailed = async () => {
try {
addLogLine("user retrying failed upload");
log.info("user retrying failed upload");
const filesWithCollections =
uploadManager.getFailedFilesWithCollections();
const uploaderName = uploadManager.getUploaderName();
@ -630,7 +627,7 @@ export default function Uploader(props: Props) {
) => {
try {
if (accessedThroughSharedURL) {
addLogLine(
log.info(
`uploading files to pulbic collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`,
);
const uploaderName = await getPublicCollectionUploaderName(
@ -645,7 +642,7 @@ export default function Uploader(props: Props) {
if (isPendingDesktopUpload.current) {
isPendingDesktopUpload.current = false;
if (pendingDesktopUploadCollectionName.current) {
addLogLine(
log.info(
`upload pending files to collection - ${pendingDesktopUploadCollectionName.current}`,
);
uploadFilesToNewCollections(
@ -654,7 +651,7 @@ export default function Uploader(props: Props) {
);
pendingDesktopUploadCollectionName.current = null;
} else {
addLogLine(
log.info(
`pending upload - strategy - "multiple collections" `,
);
uploadFilesToNewCollections(
@ -664,7 +661,7 @@ export default function Uploader(props: Props) {
return;
}
if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) {
addLogLine("uploading zip files");
log.info("uploading zip files");
uploadFilesToNewCollections(
UPLOAD_STRATEGY.COLLECTION_PER_FOLDER,
);
@ -685,7 +682,7 @@ export default function Uploader(props: Props) {
}
let showNextModal = () => {};
if (importSuggestion.hasNestedFolders) {
addLogLine(`nested folders detected`);
log.info(`nested folders detected`);
showNextModal = () => setChoiceModalView(true);
} else {
showNextModal = () =>
@ -718,7 +715,7 @@ export default function Uploader(props: Props) {
zipPaths.current = response.zipPaths;
}
if (files?.length > 0) {
addLogLine(
log.info(
` desktop upload for type:${type} and fileCount: ${files?.length} requested`,
);
setElectronFiles(files);

View file

@ -26,7 +26,6 @@ import AppNavbar from "@ente/shared/components/Navbar/app";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { Events, eventBus } from "@ente/shared/events";
import { useLocalState } from "@ente/shared/hooks/useLocalState";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import {
@ -213,10 +212,10 @@ export default function App({ Component, pageProps }: AppProps) {
}
const initExport = async () => {
try {
addLogLine("init export");
log.info("init export");
const token = getToken();
if (!token) {
addLogLine(
log.info(
"User not logged in, not starting export continuous sync job",
);
return;
@ -237,7 +236,7 @@ export default function App({ Component, pageProps }: AppProps) {
exportService.enableContinuousExport();
}
if (isExportInProgress(exportRecord.stage)) {
addLogLine("export was in progress, resuming");
log.info("export was in progress, resuming");
exportService.scheduleExport();
}
} catch (e) {

View file

@ -3,7 +3,6 @@ import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron";
@ -63,15 +62,15 @@ class ClipServiceImpl {
return;
}
if (this.onFileUploadedHandler) {
addLogLine("file upload listener already setup");
log.info("file upload listener already setup");
return;
}
addLogLine("setting up file upload listener");
log.info("setting up file upload listener");
this.onFileUploadedHandler = (args) => {
this.runLocalFileClipExtraction(args);
};
eventBus.on(Events.FILE_UPLOADED, this.onFileUploadedHandler, this);
addLogLine("setup file upload listener successfully");
log.info("setup file upload listener successfully");
} catch (e) {
log.error("failed to setup clip service", e);
}
@ -80,17 +79,17 @@ class ClipServiceImpl {
removeOnFileUploadListener = async () => {
try {
if (!this.onFileUploadedHandler) {
addLogLine("file upload listener already removed");
log.info("file upload listener already removed");
return;
}
addLogLine("removing file upload listener");
log.info("removing file upload listener");
eventBus.removeListener(
Events.FILE_UPLOADED,
this.onFileUploadedHandler,
this,
);
this.onFileUploadedHandler = null;
addLogLine("removed file upload listener successfully");
log.info("removed file upload listener successfully");
} catch (e) {
log.error("failed to remove clip service", e);
}
@ -121,13 +120,13 @@ class ClipServiceImpl {
) => {
try {
if (this.embeddingExtractionInProgress) {
addLogLine(
log.info(
"clip embedding extraction already in progress, scheduling re-run",
);
this.reRunNeeded = true;
return;
} else {
addLogLine(
log.info(
"clip embedding extraction not in progress, starting clip embedding extraction",
);
}
@ -139,7 +138,7 @@ class ClipServiceImpl {
this.embeddingExtractionInProgress = null;
if (!canceller.signal.aborted && this.reRunNeeded) {
this.reRunNeeded = false;
addLogLine("re-running clip embedding extraction");
log.info("re-running clip embedding extraction");
setTimeout(
() => this.scheduleImageEmbeddingExtraction(),
0,
@ -174,7 +173,7 @@ class ClipServiceImpl {
) => {
try {
if (this.unsupportedPlatform) {
addLogLine(
log.info(
`skipping clip embedding extraction, platform unsupported`,
);
return;
@ -194,15 +193,15 @@ class ClipServiceImpl {
pending: pendingFiles.length,
});
if (pendingFiles.length === 0) {
addLogLine("no clip embedding extraction needed, all done");
log.info("no clip embedding extraction needed, all done");
return;
}
addLogLine(
log.info(
`starting clip embedding extraction for ${pendingFiles.length} files`,
);
for (const file of pendingFiles) {
try {
addLogLine(
log.info(
`extracting clip embedding for file: ${file.metadata.title} fileID: ${file.id}`,
);
if (canceller.signal.aborted) {
@ -210,7 +209,7 @@ class ClipServiceImpl {
}
const embeddingData =
await this.extractFileClipImageEmbedding(model, file);
addLogLine(
log.info(
`successfully extracted clip embedding for file: ${file.metadata.title} fileID: ${file.id} embedding length: ${embeddingData?.length}`,
);
await this.encryptAndUploadEmbedding(
@ -219,7 +218,7 @@ class ClipServiceImpl {
embeddingData,
);
this.onSuccessStatusUpdater();
addLogLine(
log.info(
`successfully put clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`,
);
} catch (e) {
@ -258,24 +257,24 @@ class ClipServiceImpl {
model: Model = Model.ONNX_CLIP,
) {
const { enteFile, localFile } = arg;
addLogLine(
log.info(
`clip embedding extraction onFileUploadedHandler file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
enteFile.id,
);
if (enteFile.metadata.fileType === FILE_TYPE.VIDEO) {
addLogLine(
log.info(
`skipping video file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
return;
}
const extension = enteFile.metadata.title.split(".").pop();
if (!extension || !["jpg", "jpeg"].includes(extension)) {
addLogLine(
log.info(
`skipping non jpg file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
return;
}
addLogLine(
log.info(
`queuing up for local clip embedding extraction for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
try {
@ -290,7 +289,7 @@ class ClipServiceImpl {
embedding,
);
});
addLogLine(
log.info(
`successfully extracted clip embedding for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
);
} catch (e) {
@ -322,7 +321,7 @@ class ClipServiceImpl {
const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance();
const { file: encryptedEmbeddingData } =
await comlinkCryptoWorker.encryptEmbedding(embeddingData, file.key);
addLogLine(
log.info(
`putting clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`,
);
await putEmbedding({

View file

@ -1,22 +1,20 @@
import { EnteFile } from "types/file";
import {
generateStreamFromArrayBuffer,
getRenderableFileURL,
} from "utils/file";
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { CacheStorageService } from "@ente/shared/storage/cacheStorage";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { LimitedCache } from "@ente/shared/storage/cacheStorage/types";
import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron";
import { EnteFile } from "types/file";
import {
generateStreamFromArrayBuffer,
getRenderableFileURL,
} from "utils/file";
import { isInternalUser } from "utils/user";
import { PhotosDownloadClient } from "./clients/photos";
import { PublicAlbumsDownloadClient } from "./clients/publicAlbums";
@ -80,7 +78,7 @@ class DownloadManagerImpl {
) {
try {
if (this.ready) {
addLogLine("DownloadManager already initialized");
log.info("DownloadManager already initialized");
return;
}
this.downloadClient = createDownloadClient(app, tokens, timeout);
@ -97,7 +95,7 @@ class DownloadManagerImpl {
private async logoutHandler() {
try {
addLogLine("downloadManger logoutHandler started");
log.info("downloadManger logoutHandler started");
this.ready = false;
this.cryptoWorker = null;
this.downloadClient = null;
@ -106,7 +104,7 @@ class DownloadManagerImpl {
this.thumbnailObjectURLPromises.clear();
this.fileDownloadProgress.clear();
this.progressUpdater = () => {};
addLogLine("downloadManager logoutHandler completed");
log.info("downloadManager logoutHandler completed");
} catch (e) {
log.error("downloadManager logoutHandler failed", e);
}
@ -300,7 +298,7 @@ class DownloadManagerImpl {
file: EnteFile,
): Promise<ReadableStream<Uint8Array>> {
try {
addLogLine(`download attempted for fileID:${file.id}`);
log.info(`download attempted for fileID:${file.id}`);
const onDownloadProgress = this.trackDownloadProgress(
file.id,
file.info?.fileSize,

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
@ -108,11 +108,11 @@ export const syncEntities = async () => {
const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => {
try {
let entities = await getLocalEntity(type);
addLogLine(
log.info(
`Syncing ${type} entities localEntitiesCount: ${entities.length}`,
);
let syncTime = await getEntityLastSyncTime(type);
addLogLine(`Syncing ${type} entities syncTime: ${syncTime}`);
log.info(`Syncing ${type} entities syncTime: ${syncTime}`);
let response: EntitySyncDiffResponse;
do {
response = await getEntityDiff(type, syncTime);
@ -156,7 +156,7 @@ const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => {
}
await localForage.setItem(ENTITY_TABLES[type], nonDeletedEntities);
await localForage.setItem(ENTITY_SYNC_TIME_TABLES[type], syncTime);
addLogLine(
log.info(
`Syncing ${type} entities syncedEntitiesCount: ${nonDeletedEntities.length}`,
);
} while (response.diff.length === DIFF_LIMIT);

View file

@ -1,11 +1,9 @@
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { REQUEST_BATCH_SIZE } from "constants/api";
import { Collection } from "types/collection";
@ -57,7 +55,7 @@ const setLocalFiles = async (type: "normal" | "hidden", files: EnteFile[]) => {
`failed to save files to indexedDB (storageEstimate was ${storageEstimate}`,
e1,
);
addLogLine(`storage estimate ${JSON.stringify(storageEstimate)}`);
log.info(`storage estimate ${JSON.stringify(storageEstimate)}`);
} catch (e2) {
log.error("failed to save files to indexedDB", e1);
log.error("failed to get storage stats", e2);

View file

@ -2,7 +2,6 @@ import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { retryAsyncFunction } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker";
@ -46,7 +45,7 @@ class HEICConverter {
await worker.convertHEICToJPEG(
fileBlob,
);
addLogLine(
log.info(
`originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size,
)},convertedFileSize:${convertBytesToHumanReadable(

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { GraphModel } from "@tensorflow/tfjs-converter";
import * as tf from "@tensorflow/tfjs-core";
import {
@ -60,7 +59,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
inputHeight: BLAZEFACE_INPUT_SIZE,
inputWidth: BLAZEFACE_INPUT_SIZE,
});
addLogLine(
log.info(
"loaded blazeFaceModel: ",
// await this.blazeFaceModel,
// eslint-disable-next-line @typescript-eslint/await-thenable
@ -121,20 +120,20 @@ class BlazeFaceDetectionService implements FaceDetectionService {
let desiredDist = desiredRightEyeX - this.desiredLeftEye[0];
desiredDist *= this.desiredFaceSize;
const scale = desiredDist / dist;
// addLogLine("scale: ", scale);
// log.info("scale: ", scale);
const eyesCenter = [];
eyesCenter[0] = Math.floor((leftEye[0] + rightEye[0]) / 2);
eyesCenter[1] = Math.floor((leftEye[1] + rightEye[1]) / 2);
// addLogLine("eyesCenter: ", eyesCenter);
// log.info("eyesCenter: ", eyesCenter);
const faceWidth = this.desiredFaceSize / scale;
const faceHeight = this.desiredFaceSize / scale;
// addLogLine("faceWidth: ", faceWidth, "faceHeight: ", faceHeight)
// log.info("faceWidth: ", faceWidth, "faceHeight: ", faceHeight)
const tx = eyesCenter[0] - faceWidth * 0.5;
const ty = eyesCenter[1] - faceHeight * this.desiredLeftEye[1];
// addLogLine("tx: ", tx, "ty: ", ty);
// log.info("tx: ", tx, "ty: ", ty);
return new Box({
left: tx,
@ -155,7 +154,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
const normalizedImage = tf.sub(tf.div(reshapedImage, 127.5), 1.0);
// eslint-disable-next-line @typescript-eslint/await-thenable
const results = await this.blazeFaceBackModel.predict(normalizedImage);
// addLogLine('onFacesDetected: ', results);
// log.info('onFacesDetected: ', results);
return results;
}
@ -180,7 +179,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
const inBox = newBox(0, 0, resized.width, resized.height);
const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height);
const transform = computeTransformToBox(inBox, toBox);
// addLogLine("1st pass: ", { transform });
// log.info("1st pass: ", { transform });
const faceDetections: Array<FaceDetection> = faces?.map((f) => {
const box = transformBox(normFaceBox(f), transform);
@ -223,7 +222,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
);
let selected = pass2Detections?.[0];
if (pass2Detections?.length > 1) {
// addLogLine('2nd pass >1 face', pass2Detections.length);
// log.info('2nd pass >1 face', pass2Detections.length);
selected = getNearestDetection(
pass1Detection,
pass2Detections,
@ -234,7 +233,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
// we might miss 1st pass face actually having score within threshold
// it is ok as results will be consistent with 2nd pass only detections
if (selected && selected.probability >= BLAZEFACE_SCORE_THRESHOLD) {
// addLogLine("pass2: ", { imageBox, paddedBox, transform, selected });
// log.info("pass2: ", { imageBox, paddedBox, transform, selected });
detections.push(selected);
}
}

View file

@ -26,7 +26,7 @@ class ClusteringService {
epsilon: number = 1.0,
minPts: number = 2,
): ClusteringResults {
// addLogLine("distanceFunction", DBSCAN._);
// log.info("distanceFunction", DBSCAN._);
const clusters = this.dbscan.run(dataset, epsilon, minPts);
const noise = this.dbscan.noise;
return { clusters, noise };

View file

@ -22,7 +22,7 @@ class DbscanClusteringService implements ClusteringService {
input: ClusteringInput,
config: ClusteringConfig,
): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input);
// log.info('Clustering input: ', input);
const dbscan = new DBSCAN();
const clusters = dbscan.run(
input,

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import {
DetectedFace,
Face,
@ -51,7 +50,7 @@ class FaceService {
);
const faceDetections =
await syncContext.faceDetectionService.detectFaces(imageBitmap);
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width
const detectedFaces = faceDetections?.map((detection) => {
return {
@ -66,7 +65,7 @@ class FaceService {
// ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize
// );
addLogLine("[MLService] Detected Faces: ", newMlFile.faces?.length);
log.info("[MLService] Detected Faces: ", newMlFile.faces?.length);
}
async syncFileFaceCrops(
@ -128,8 +127,8 @@ class FaceService {
face.detection,
);
}
addLogLine("[MLService] alignedFaces: ", newMlFile.faces?.length);
// addLogLine('4 TF Memory stats: ',JSON.stringify(tf.memory()));
log.info("[MLService] alignedFaces: ", newMlFile.faces?.length);
// log.info('4 TF Memory stats: ',JSON.stringify(tf.memory()));
}
async syncFileFaceEmbeddings(
@ -168,8 +167,8 @@ class FaceService {
faceImages.forEach((faceImage) => faceImage.close());
newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i]));
addLogLine("[MLService] facesWithEmbeddings: ", newMlFile.faces.length);
// addLogLine('5 TF Memory stats: ',JSON.stringify(tf.memory()));
log.info("[MLService] facesWithEmbeddings: ", newMlFile.faces.length);
// log.info('5 TF Memory stats: ',JSON.stringify(tf.memory()));
}
async saveFaceCrop(
@ -210,14 +209,14 @@ class FaceService {
const clusteringConfig = syncContext.config.faceClustering;
if (!allFaces || allFaces.length < clusteringConfig.minInputSize) {
addLogLine(
log.info(
"[MLService] Too few faces to cluster, not running clustering: ",
allFaces.length,
);
return;
}
addLogLine("Running clustering allFaces: ", allFaces.length);
log.info("Running clustering allFaces: ", allFaces.length);
syncContext.mlLibraryData.faceClusteringResults =
await syncContext.faceClusteringService.cluster(
allFaces.map((f) => Array.from(f.embedding)),
@ -225,7 +224,7 @@ class FaceService {
);
syncContext.mlLibraryData.faceClusteringMethod =
syncContext.faceClusteringService.method;
addLogLine(
log.info(
"[MLService] Got face clustering results: ",
JSON.stringify(syncContext.mlLibraryData.faceClusteringResults),
);

View file

@ -22,7 +22,7 @@ class HdbscanClusteringService implements ClusteringService {
input: ClusteringInput,
config: ClusteringConfig,
): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input);
// log.info('Clustering input: ', input);
const hdbscan = new Hdbscan({
input,

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import * as tfjsConverter from "@tensorflow/tfjs-converter";
import * as tf from "@tensorflow/tfjs-core";
import { SCENE_DETECTION_IMAGE_SIZE } from "constants/mlConfig";
@ -26,7 +25,7 @@ class ImageScene implements SceneDetectionService {
}
private async init() {
addLogLine(`[${this.workerID}]`, "ImageScene init called");
log.info(`[${this.workerID}]`, "ImageScene init called");
if (this.model) {
return;
}
@ -38,7 +37,7 @@ class ImageScene implements SceneDetectionService {
this.model = await tfjsConverter.loadGraphModel(
"/models/imagescene/model.json",
);
addLogLine(
log.info(
`[${this.workerID}]`,
"loaded ImageScene model",
tf.getBackend(),
@ -52,10 +51,7 @@ class ImageScene implements SceneDetectionService {
}
private async getImageSceneModel() {
addLogLine(
`[${this.workerID}]`,
"ImageScene getImageSceneModel called",
);
log.info(`[${this.workerID}]`, "ImageScene getImageSceneModel called");
if (!this.ready) {
this.ready = this.init();
}

View file

@ -2,7 +2,6 @@ import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import PQueue from "p-queue";
import { EnteFile } from "types/file";
import {
@ -198,7 +197,7 @@ export class LocalMLSyncContext implements MLSyncContext {
this.concurrency = concurrency || getConcurrency();
addLogLine("Using concurrency: ", this.concurrency);
log.info("Using concurrency: ", this.concurrency);
// timeout is added on downloads
// timeout on queue will keep the operation open till worker is terminated
this.syncQueue = new PQueue({ concurrency: this.concurrency });

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants";
import { CustomError, parseUploadErrorCodes } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import "@tensorflow/tfjs-backend-cpu";
import "@tensorflow/tfjs-backend-webgl";
import * as tf from "@tensorflow/tfjs-core";
@ -78,10 +78,10 @@ class MachineLearningService {
tsne: syncContext.tsne,
error: syncContext.error,
};
// addLogLine('[MLService] sync results: ', mlSyncResult);
// log.info('[MLService] sync results: ', mlSyncResult);
// await syncContext.dispose();
addLogLine("Final TF Memory stats: ", JSON.stringify(tf.memory()));
log.info("Final TF Memory stats: ", JSON.stringify(tf.memory()));
return mlSyncResult;
}
@ -139,7 +139,7 @@ class MachineLearningService {
let updated = false;
if (newFileIds.length > 0) {
addLogLine("newFiles: ", newFileIds.length);
log.info("newFiles: ", newFileIds.length);
const newFiles = newFileIds.map((fileId) => this.newMlData(fileId));
await mlIDbStorage.putAllFiles(newFiles, tx);
updated = true;
@ -153,7 +153,7 @@ class MachineLearningService {
}
if (removedFileIds.length > 0) {
addLogLine("removedFiles: ", removedFileIds.length);
log.info("removedFiles: ", removedFileIds.length);
await mlIDbStorage.removeAllFiles(removedFileIds, tx);
updated = true;
}
@ -165,7 +165,7 @@ class MachineLearningService {
await mlIDbStorage.incrementIndexVersion("files");
}
addLogLine("syncLocalFiles", Date.now() - startTime, "ms");
log.info("syncLocalFiles", Date.now() - startTime, "ms");
}
private async getOutOfSyncFiles(syncContext: MLSyncContext) {
@ -176,13 +176,13 @@ class MachineLearningService {
MAX_ML_SYNC_ERROR_COUNT,
);
addLogLine("fileIds: ", JSON.stringify(fileIds));
log.info("fileIds: ", JSON.stringify(fileIds));
const localFilesMap = await this.getLocalFilesMap(syncContext);
syncContext.outOfSyncFiles = fileIds.map((fileId) =>
localFilesMap.get(fileId),
);
addLogLine("getOutOfSyncFiles", Date.now() - startTime, "ms");
log.info("getOutOfSyncFiles", Date.now() - startTime, "ms");
}
private async syncFiles(syncContext: MLSyncContext) {
@ -205,7 +205,7 @@ class MachineLearningService {
syncContext.error = error;
}
await syncContext.syncQueue.onIdle();
addLogLine("allFaces: ", syncContext.nSyncedFaces);
log.info("allFaces: ", syncContext.nSyncedFaces);
// TODO: In case syncJob has to use multiple ml workers
// do in same transaction with each file update
@ -216,32 +216,32 @@ class MachineLearningService {
private async getSyncContext(token: string, userID: number) {
if (!this.syncContext) {
addLogLine("Creating syncContext");
log.info("Creating syncContext");
this.syncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, true),
);
} else {
addLogLine("reusing existing syncContext");
log.info("reusing existing syncContext");
}
return this.syncContext;
}
private async getLocalSyncContext(token: string, userID: number) {
if (!this.localSyncContext) {
addLogLine("Creating localSyncContext");
log.info("Creating localSyncContext");
this.localSyncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, false),
);
} else {
addLogLine("reusing existing localSyncContext");
log.info("reusing existing localSyncContext");
}
return this.localSyncContext;
}
public async closeLocalSyncContext() {
if (this.localSyncContext) {
addLogLine("Closing localSyncContext");
log.info("Closing localSyncContext");
const syncContext = await this.localSyncContext;
await syncContext.dispose();
this.localSyncContext = undefined;
@ -319,7 +319,7 @@ class MachineLearningService {
await this.persistMLFileSyncError(syncContext, enteFile, error);
syncContext.nSyncedFiles += 1;
} finally {
addLogLine("TF Memory stats: ", JSON.stringify(tf.memory()));
log.info("TF Memory stats: ", JSON.stringify(tf.memory()));
}
}
@ -367,7 +367,7 @@ class MachineLearningService {
} finally {
fileContext.tfImage && fileContext.tfImage.dispose();
fileContext.imageBitmap && fileContext.imageBitmap.close();
// addLogLine('8 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('8 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: enable once faceId changes go in
// await removeOldFaceCrops(
@ -386,7 +386,7 @@ class MachineLearningService {
await tf.ready();
addLogLine("01 TF Memory stats: ", JSON.stringify(tf.memory()));
log.info("01 TF Memory stats: ", JSON.stringify(tf.memory()));
this.initialized = true;
}
@ -463,7 +463,7 @@ class MachineLearningService {
await FaceService.syncFileFaceEmbeddings(syncContext, fileContext);
}
addLogLine(
log.info(
`face detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime,
"ms",

View file

@ -1,7 +1,6 @@
import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { eventBus, Events } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
import { FILE_TYPE } from "constants/file";
import debounce from "debounce";
@ -51,7 +50,7 @@ class MLWorkManager {
public async setMlSearchEnabled(enabled: boolean) {
if (!this.mlSearchEnabled && enabled) {
addLogLine("Enabling MLWorkManager");
log.info("Enabling MLWorkManager");
this.mlSearchEnabled = true;
logQueueStats(this.liveSyncQueue, "livesync");
@ -70,7 +69,7 @@ class MLWorkManager {
await this.startSyncJob();
} else if (this.mlSearchEnabled && !enabled) {
addLogLine("Disabling MLWorkManager");
log.info("Disabling MLWorkManager");
this.mlSearchEnabled = false;
this.liveSyncQueue.removeAllListeners();
@ -92,7 +91,7 @@ class MLWorkManager {
// Handlers
private async appStartHandler() {
addLogLine("appStartHandler");
log.info("appStartHandler");
try {
this.startSyncJob();
} catch (e) {
@ -101,7 +100,7 @@ class MLWorkManager {
}
private async logoutHandler() {
addLogLine("logoutHandler");
log.info("logoutHandler");
try {
this.stopSyncJob();
this.mlSyncJob = undefined;
@ -119,9 +118,9 @@ class MLWorkManager {
if (!this.mlSearchEnabled) {
return;
}
addLogLine("fileUploadedHandler: ", arg.enteFile.id);
log.info("fileUploadedHandler: ", arg.enteFile.id);
if (arg.enteFile.metadata.fileType !== FILE_TYPE.IMAGE) {
addLogLine("Skipping non image file for local file processing");
log.info("Skipping non image file for local file processing");
return;
}
try {
@ -134,7 +133,7 @@ class MLWorkManager {
}
private async localFilesUpdatedHandler() {
addLogLine("Local files updated");
log.info("Local files updated");
this.startSyncJob();
}
@ -165,7 +164,7 @@ class MLWorkManager {
}
private async onLiveSyncIdle() {
addLogLine("Live sync idle");
log.info("Live sync idle");
await this.terminateLiveSyncWorker();
this.mlSearchEnabled && this.startSyncJob();
}
@ -206,7 +205,7 @@ class MLWorkManager {
// TODO: skipping is not required if we are caching chunks through service worker
// currently worker chunk itself is not loaded when network is not there
if (!navigator.onLine) {
addLogLine(
log.info(
"Skipping ml-sync job run as not connected to internet.",
);
return {
@ -227,7 +226,7 @@ class MLWorkManager {
!!mlSyncResult.error || mlSyncResult.nOutOfSyncFiles < 1,
mlSyncResult,
};
addLogLine("ML Sync Job result: ", JSON.stringify(jobResult));
log.info("ML Sync Job result: ", JSON.stringify(jobResult));
// TODO: redirect/refresh to gallery in case of session_expired, stop ml sync job
@ -239,13 +238,13 @@ class MLWorkManager {
public async startSyncJob() {
try {
addLogLine("MLWorkManager.startSyncJob");
log.info("MLWorkManager.startSyncJob");
if (!this.mlSearchEnabled) {
addLogLine("ML Search disabled, not starting ml sync job");
log.info("ML Search disabled, not starting ml sync job");
return;
}
if (!getToken()) {
addLogLine("User not logged in, not starting ml sync job");
log.info("User not logged in, not starting ml sync job");
return;
}
const mlSyncJobConfig = await getMLSyncJobConfig();
@ -262,7 +261,7 @@ class MLWorkManager {
public stopSyncJob(terminateWorker: boolean = true) {
try {
addLogLine("MLWorkManager.stopSyncJob");
log.info("MLWorkManager.stopSyncJob");
this.mlSyncJob?.stop();
terminateWorker && this.terminateSyncJobWorker();
} catch (e) {

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import * as tf from "@tensorflow/tfjs-core";
import { TFLiteModel } from "@tensorflow/tfjs-tflite";
import { MOBILEFACENET_FACE_SIZE } from "constants/mlConfig";
@ -37,7 +36,7 @@ class MobileFaceNetEmbeddingService implements FaceEmbeddingService {
"/models/mobilefacenet/mobilefacenet.tflite",
);
addLogLine("loaded mobileFaceNetModel: ", tf.getBackend());
log.info("loaded mobileFaceNetModel: ", tf.getBackend());
}
private async getMobileFaceNetModel() {

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import {
DetectedObject,
MLSyncContext,
@ -61,7 +60,7 @@ class ObjectService {
syncContext.config.sceneDetection.minScore,
)),
);
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width
const detectedObjects = objectDetections?.map((detection) => {
return {
@ -77,13 +76,13 @@ class ObjectService {
// ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize
// );
addLogLine(
log.info(
`object detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime,
"ms",
);
addLogLine("[MLService] Detected Objects: ", newMlFile.objects?.length);
log.info("[MLService] Detected Objects: ", newMlFile.objects?.length);
}
async getAllSyncedObjectsMap(syncContext: MLSyncContext) {
@ -115,9 +114,9 @@ class ObjectService {
async syncThingsIndex(syncContext: MLSyncContext) {
const filesVersion = await mlIDbStorage.getIndexVersion("files");
addLogLine("things", await mlIDbStorage.getIndexVersion("things"));
log.info("things", await mlIDbStorage.getIndexVersion("things"));
if (filesVersion <= (await mlIDbStorage.getIndexVersion("things"))) {
addLogLine(
log.info(
"[MLService] Skipping people index as already synced to latest version",
);
return;

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { Face, MLSyncContext, Person } from "types/machineLearning";
import {
findFirstIfSorted,
@ -20,7 +19,7 @@ class PeopleService {
syncContext.faceClusteringService.method,
)
) {
addLogLine(
log.info(
"[MLService] Skipping people index as already synced to latest version",
);
return;
@ -84,7 +83,7 @@ class PeopleService {
faces.forEach((face) => {
face.personId = person.id;
});
// addLogLine("Creating person: ", person, faces);
// log.info("Creating person: ", person, faces);
}
await mlIDbStorage.updateFaces(allFacesMap);

View file

@ -16,7 +16,7 @@ class ReaderService {
if (fileContext.imageBitmap) {
return fileContext.imageBitmap;
}
// addLogLine('1 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('1 TF Memory stats: ',JSON.stringify(tf.memory()));
if (fileContext.localFile) {
if (
fileContext.enteFile.metadata.fileType !== FILE_TYPE.IMAGE
@ -47,7 +47,7 @@ class ReaderService {
fileContext.newMlFile.imageSource = syncContext.config.imageSource;
const { width, height } = fileContext.imageBitmap;
fileContext.newMlFile.imageDimensions = { width, height };
// addLogLine('2 TF Memory stats: ',JSON.stringify(tf.memory()));
// log.info('2 TF Memory stats: ',JSON.stringify(tf.memory()));
return fileContext.imageBitmap;
} catch (e) {

View file

@ -6,7 +6,6 @@ import {
Versioned,
} from "types/machineLearning";
import { addLogLine } from "@ente/shared/logging";
import * as SSDMobileNet from "@tensorflow-models/coco-ssd";
import { OBJECT_DETECTION_IMAGE_SIZE } from "constants/mlConfig";
import { resizeToSquare } from "utils/image";
@ -28,7 +27,7 @@ class SSDMobileNetV2 implements ObjectDetectionService {
base: "mobilenet_v2",
modelUrl: "/models/ssdmobilenet/model.json",
});
addLogLine("loaded ssdMobileNetV2Model", tf.getBackend());
log.info("loaded ssdMobileNetV2Model", tf.getBackend());
}
private async getSSDMobileNetV2Model() {

View file

@ -1,6 +1,5 @@
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import * as chrono from "chrono-node";
import { FILE_TYPE } from "constants/file";
import { t } from "i18next";
@ -382,7 +381,7 @@ async function searchLocationTag(searchPhrase: string): Promise<LocationTag[]> {
locationTag.data.name.toLowerCase().includes(searchPhrase),
);
if (matchedLocationTags.length > 0) {
addLogLine(
log.info(
`Found ${matchedLocationTags.length} location tags for search phrase`,
);
}

View file

@ -1,7 +1,6 @@
import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from "constants/upload";
import { EncryptedMagicMetadata } from "types/magicMetadata";
@ -46,7 +45,7 @@ export async function readFile(
rawFile,
fileTypeInfo,
);
addLogLine(`reading file data ${getFileNameSize(rawFile)} `);
log.info(`reading file data ${getFileNameSize(rawFile)} `);
let filedata: Uint8Array | DataStream;
if (!(rawFile instanceof File)) {
if (rawFile.size > MULTIPART_PART_SIZE) {
@ -63,7 +62,7 @@ export async function readFile(
filedata = await getUint8ArrayView(rawFile);
}
addLogLine(`read file data successfully ${getFileNameSize(rawFile)} `);
log.info(`read file data successfully ${getFileNameSize(rawFile)} `);
return {
filedata,

View file

@ -1,19 +1,18 @@
import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE } from "constants/upload";
import { getElectronFileStream, getFileStream } from "services/readerService";
import { DataStream, ElectronFile } from "types/upload";
import log from "@/next/log";
export async function getFileHash(
worker: Remote<DedicatedCryptoWorker>,
file: File | ElectronFile,
) {
try {
addLogLine(`getFileHash called for ${getFileNameSize(file)}`);
log.info(`getFileHash called for ${getFileNameSize(file)}`);
let filedata: DataStream;
if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
@ -38,14 +37,12 @@ export async function getFileHash(
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
}
const hash = await worker.completeChunkHashing(hashState);
addLogLine(
log.info(
`file hashing completed successfully ${getFileNameSize(file)}`,
);
return hash;
} catch (e) {
log.error("getFileHash failed", e);
addLogLine(
`file hashing failed ${getFileNameSize(file)} ,${e.message} `,
);
log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
}
}

View file

@ -2,7 +2,6 @@ import ElectronAPIs from "@/next/electron";
import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { FILE_TYPE } from "constants/file";
import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
import isElectron from "is-electron";
@ -104,7 +103,7 @@ const generateImageThumbnailInElectron = async (
maxDimension,
maxSize,
);
addLogLine(
log.info(
`originalFileSize:${convertBytesToHumanReadable(
inputFile?.size,
)},thumbFileSize:${convertBytesToHumanReadable(
@ -136,12 +135,12 @@ export async function generateImageThumbnailUsingCanvas(
let timeout = null;
const isHEIC = isFileHEIC(fileTypeInfo.exactType);
if (isHEIC) {
addLogLine(`HEICConverter called for ${getFileNameSize(file)}`);
log.info(`HEICConverter called for ${getFileNameSize(file)}`);
const convertedBlob = await HeicConversionService.convert(
new Blob([await file.arrayBuffer()]),
);
file = new File([convertedBlob], file.name);
addLogLine(`${getFileNameSize(file)} successfully converted`);
log.info(`${getFileNameSize(file)} successfully converted`);
}
let image = new Image();
imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
@ -192,17 +191,17 @@ async function generateVideoThumbnail(
) {
let thumbnail: Uint8Array;
try {
addLogLine(
log.info(
`ffmpeg generateThumbnail called for ${getFileNameSize(file)}`,
);
const thumbnail = await FFmpegService.generateVideoThumbnail(file);
addLogLine(
log.info(
`ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`,
);
return await getUint8ArrayView(thumbnail);
} catch (e) {
addLogLine(
log.info(
`ffmpeg thumbnail generated failed ${getFileNameSize(
file,
)} error: ${e.message}`,

View file

@ -5,7 +5,6 @@ import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { Remote } from "comlink";
import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
import isElectron from "is-electron";
@ -124,7 +123,7 @@ class UploadManager {
this.uploadInProgress = true;
await this.updateExistingFilesAndCollections(collections);
this.uploaderName = uploaderName;
addLogLine(
log.info(
`received ${filesWithCollectionToUploadIn.length} files to upload`,
);
uiService.setFilenames(
@ -137,8 +136,8 @@ class UploadManager {
);
const { metadataJSONFiles, mediaFiles } =
segregateMetadataAndMediaFiles(filesWithCollectionToUploadIn);
addLogLine(`has ${metadataJSONFiles.length} metadata json files`);
addLogLine(`has ${mediaFiles.length} media files`);
log.info(`has ${metadataJSONFiles.length} metadata json files`);
log.info(`has ${mediaFiles.length} media files`);
if (metadataJSONFiles.length) {
UIService.setUploadStage(
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
@ -150,11 +149,11 @@ class UploadManager {
);
}
if (mediaFiles.length) {
addLogLine(`clusterLivePhotoFiles started`);
log.info(`clusterLivePhotoFiles started`);
const analysedMediaFiles =
await UploadService.clusterLivePhotoFiles(mediaFiles);
addLogLine(`clusterLivePhotoFiles ended`);
addLogLine(
log.info(`clusterLivePhotoFiles ended`);
log.info(
`got live photos: ${
mediaFiles.length !== analysedMediaFiles.length
}`,
@ -205,7 +204,7 @@ class UploadManager {
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
try {
addLogLine(`parseMetadataJSONFiles function executed `);
log.info(`parseMetadataJSONFiles function executed `);
UIService.reset(metadataFiles.length);
@ -214,7 +213,7 @@ class UploadManager {
if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED);
}
addLogLine(
log.info(
`parsing metadata json file ${getFileNameSize(file)}`,
);
@ -229,7 +228,7 @@ class UploadManager {
);
UIService.increaseFileUploaded();
}
addLogLine(
log.info(
`successfully parsed metadata json file ${getFileNameSize(
file,
)}`,
@ -240,7 +239,7 @@ class UploadManager {
} else {
// and don't break for subsequent files just log and move on
log.error("parsing failed for a file", e);
addLogLine(
log.info(
`failed to parse metadata json file ${getFileNameSize(
file,
)} error: ${e.message}`,
@ -257,7 +256,7 @@ class UploadManager {
}
private async uploadMediaFiles(mediaFiles: FileWithCollection[]) {
addLogLine(`uploadMediaFiles called`);
log.info(`uploadMediaFiles called`);
this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles];
if (isElectron()) {
@ -321,7 +320,7 @@ class UploadManager {
) {
try {
let decryptedFile: EnteFile;
addLogLine(
log.info(
`post upload action -> fileUploadResult: ${fileUploadResult} uploadedFile present ${!!uploadedFile}`,
);
await this.updateElectronRemainingFiles(fileWithCollection);
@ -397,7 +396,7 @@ class UploadManager {
}
public cancelRunningUpload() {
addLogLine("user cancelled running upload");
log.info("user cancelled running upload");
UIService.setUploadStage(UPLOAD_STAGES.CANCELLING);
uploadCancelService.requestUploadCancelation();
}

View file

@ -1,6 +1,5 @@
import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { addLogLine } from "@ente/shared/logging";
import { NULL_EXTRACTED_METADATA } from "constants/upload";
import * as ffmpegService from "services/ffmpeg/ffmpegService";
import { ElectronFile } from "types/upload";
@ -8,14 +7,14 @@ import { ElectronFile } from "types/upload";
export async function getVideoMetadata(file: File | ElectronFile) {
let videoMetadata = NULL_EXTRACTED_METADATA;
try {
addLogLine(`getVideoMetadata called for ${getFileNameSize(file)}`);
log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
videoMetadata = await ffmpegService.extractVideoMetadata(file);
addLogLine(
log.info(
`videoMetadata successfully extracted ${getFileNameSize(file)}`,
);
} catch (e) {
log.error("failed to get video metadata", e);
addLogLine(
log.info(
`videoMetadata extracted failed ${getFileNameSize(file)} ,${
e.message
} `,

View file

@ -1,10 +1,9 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { promiseWithTimeout } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { generateTempName } from "@ente/shared/utils/temp";
import { createFFmpeg, FFmpeg } from "ffmpeg-wasm";
import { getUint8ArrayView } from "services/readerService";
import log from "@/next/log";
const INPUT_PATH_PLACEHOLDER = "INPUT";
const FFMPEG_PLACEHOLDER = "FFMPEG";
@ -86,7 +85,7 @@ export class WasmFFmpeg {
return cmdPart;
}
});
addLogLine(`${cmd}`);
log.info(`${cmd}`);
await this.ffmpeg.run(...cmd);
return new File(
[this.ffmpeg.FS("readFile", tempOutputFilePath)],

View file

@ -1,5 +1,4 @@
import log from "@/next/log";
import { addLogLine } from "@ente/shared/logging";
import { ElectronFile } from "types/upload";
import { EventQueueItem } from "types/watchFolder";
import watchFolderService from "./watchFolderService";
@ -22,7 +21,7 @@ export async function diskFileAddedCallback(file: ElectronFile) {
files: [file],
};
watchFolderService.pushEvent(event);
addLogLine(
log.info(
`added (upload) to event queue, collectionName:${event.collectionName} folderPath:${event.folderPath}, filesCount: ${event.files.length}`,
);
} catch (e) {
@ -48,7 +47,7 @@ export async function diskFileRemovedCallback(filePath: string) {
paths: [filePath],
};
watchFolderService.pushEvent(event);
addLogLine(
log.info(
`added (trash) to event queue collectionName:${event.collectionName} folderPath:${event.folderPath} , pathsCount: ${event.paths.length}`,
);
} catch (e) {
@ -63,11 +62,11 @@ export async function diskFolderRemovedCallback(folderPath: string) {
(mapping) => mapping.folderPath === folderPath,
);
if (!mapping) {
addLogLine(`folder not found in mappings, ${folderPath}`);
log.info(`folder not found in mappings, ${folderPath}`);
throw Error(`Watch mapping not found`);
}
watchFolderService.pushTrashedDir(folderPath);
addLogLine(`added trashedDir, ${folderPath}`);
log.info(`added trashedDir, ${folderPath}`);
} catch (e) {
log.error("error while calling diskFolderRemovedCallback", e);
}

View file

@ -1,7 +1,6 @@
import ElectronAPIs from "@/next/electron";
import log from "@/next/log";
import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { getAlbumsURL } from "@ente/shared/network/api";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { getUnixTimeInMicroSecondsWithDelta } from "@ente/shared/time";
@ -568,13 +567,13 @@ export const getOrCreateAlbum = async (
}
for (const collection of existingCollections) {
if (isValidReplacementAlbum(collection, user, albumName)) {
addLogLine(
log.info(
`Found existing album ${albumName} with id ${collection.id}`,
);
return collection;
}
}
const album = await createAlbum(albumName);
addLogLine(`Created new album ${albumName} with id ${album.id}`);
log.info(`Created new album ${albumName} with id ${album.id}`);
return album;
};

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { JobConfig, JobResult, JobState } from "types/common/job";
export class SimpleJob<R extends JobResult> {
@ -27,7 +26,7 @@ export class SimpleJob<R extends JobResult> {
if (this.state !== "Running") {
this.scheduleNext();
} else {
addLogLine("Job already running, not scheduling");
log.info("Job already running, not scheduling");
}
}
@ -41,7 +40,7 @@ export class SimpleJob<R extends JobResult> {
this.intervalSec * 1000,
);
this.state = "Scheduled";
addLogLine("Scheduled next job after: ", this.intervalSec);
log.info("Scheduled next job after: ", this.intervalSec);
}
async run() {
@ -58,7 +57,7 @@ export class SimpleJob<R extends JobResult> {
} else {
this.resetInterval();
}
addLogLine("Job completed");
log.info("Job completed");
} catch (e) {
console.error("Error while running Job: ", e);
} finally {
@ -77,6 +76,6 @@ export class SimpleJob<R extends JobResult> {
clearTimeout(this.nextTimeoutId);
this.nextTimeoutId = undefined;
this.state = "NotScheduled";
addLogLine("Cleared next job");
log.info("Cleared next job");
}
}

View file

@ -80,7 +80,7 @@ export function cropWithRotation(
}
}
// addLogLine({ imageBitmap, box, outputSize });
// log.info({ imageBitmap, box, outputSize });
const offscreen = new OffscreenCanvas(outputSize.width, outputSize.height);
const offscreenCtx = offscreen.getContext("2d");

View file

@ -61,7 +61,7 @@ export function getFaceAlignmentUsingSimilarityTransform(
simTransform.rotation.get(0, 1),
simTransform.rotation.get(0, 0),
);
// addLogLine({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size });
// log.info({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size });
return {
affineMatrix,
@ -169,7 +169,7 @@ export function ibExtractFaceImageUsingTransform(
const scaledMatrix = new Matrix(alignment.affineMatrix)
.mul(faceSize)
.to2DArray();
// addLogLine("scaledMatrix: ", scaledMatrix);
// log.info("scaledMatrix: ", scaledMatrix);
return transform(image, scaledMatrix, faceSize, faceSize);
}
@ -230,7 +230,7 @@ export function getRotatedFaceImage(
padding: number = 1.5,
): tf.Tensor4D {
const paddedBox = enlargeBox(faceDetection.box, padding);
// addLogLine("paddedBox", paddedBox);
// log.info("paddedBox", paddedBox);
const landmarkPoints = faceDetection.landmarks;
return tf.tidy(() => {
@ -245,15 +245,15 @@ export function getRotatedFaceImage(
foreheadCenter,
); // landmarkPoints[BLAZEFACE_NOSE_INDEX]
// angle = computeRotation(leftEye, rightEye);
// addLogLine('angle: ', angle);
// log.info('angle: ', angle);
const faceCenter = getBoxCenter(faceDetection.box);
// addLogLine('faceCenter: ', faceCenter);
// log.info('faceCenter: ', faceCenter);
const faceCenterNormalized: [number, number] = [
faceCenter.x / tf4dFloat32Image.shape[2],
faceCenter.y / tf4dFloat32Image.shape[1],
];
// addLogLine('faceCenterNormalized: ', faceCenterNormalized);
// log.info('faceCenterNormalized: ', faceCenterNormalized);
let rotatedImage = tf4dFloat32Image;
if (angle !== 0) {

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { CacheStorageService } from "@ente/shared/storage/cacheStorage";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { getBlobFromCache } from "@ente/shared/storage/cacheStorage/helpers";
@ -105,7 +104,7 @@ export async function removeOldFaceCrops(
}
export async function removeFaceCropUrls(faceCropUrls: Array<string>) {
addLogLine("Removing face crop urls: ", JSON.stringify(faceCropUrls));
log.info("Removing face crop urls: ", JSON.stringify(faceCropUrls));
const faceCropCache = await CacheStorageService.open(CACHES.FACE_CROPS);
const urlRemovalPromises = faceCropUrls?.map((url) =>
faceCropCache.delete(url),
@ -132,7 +131,7 @@ export function extractFaceImageFromCrop(
.shift(-imageBox.x, -imageBox.y)
.rescale(scale)
.round();
// addLogLine({ box, imageBox, faceCropImage, scale, scaledBox, scaledImageBox, shiftedBox });
// log.info({ box, imageBox, faceCropImage, scale, scaledBox, scaledImageBox, shiftedBox });
const faceSizeDimentions: Dimensions = {
width: faceSize,

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { cached } from "@ente/shared/storage/cacheStorage/helpers";
import * as tf from "@tensorflow/tfjs-core";
@ -130,7 +129,7 @@ export function extractFaces(
];
});
// addLogLine('boxes: ', boxes[0]);
// log.info('boxes: ', boxes[0]);
const faceImagesTensor = tf.image.cropAndResize(
reshapedImage,
@ -356,14 +355,14 @@ export async function getOriginalImageBitmap(
} else {
fileBlob = await getOriginalConvertedFile(file, queue);
}
addLogLine("[MLService] Got file: ", file.id.toString());
log.info("[MLService] Got file: ", file.id.toString());
return getImageBlobBitmap(fileBlob);
}
export async function getThumbnailImageBitmap(file: EnteFile) {
const thumb = await DownloadManager.getThumbnail(file);
addLogLine("[MLService] Got thumbnail: ", file.id.toString());
log.info("[MLService] Got thumbnail: ", file.id.toString());
return getImageBlobBitmap(new Blob([thumb]));
}
@ -380,7 +379,7 @@ export async function getLocalFileImageBitmap(
export async function getPeopleList(file: EnteFile): Promise<Array<Person>> {
let startTime = Date.now();
const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
addLogLine(
log.info(
"getPeopleList:mlFilesStore:getItem",
Date.now() - startTime,
"ms",
@ -395,18 +394,18 @@ export async function getPeopleList(file: EnteFile): Promise<Array<Person>> {
if (!peopleIds || peopleIds.length < 1) {
return [];
}
// addLogLine("peopleIds: ", peopleIds);
// log.info("peopleIds: ", peopleIds);
startTime = Date.now();
const peoplePromises = peopleIds.map(
(p) => mlIDbStorage.getPerson(p) as Promise<Person>,
);
const peopleList = await Promise.all(peoplePromises);
addLogLine(
log.info(
"getPeopleList:mlPeopleStore:getItems",
Date.now() - startTime,
"ms",
);
// addLogLine("peopleList: ", peopleList);
// log.info("peopleList: ", peopleList);
return peopleList;
}
@ -514,7 +513,7 @@ export function getNearestPointIndex(
(a, b) => Math.abs(a.distance) - Math.abs(b.distance),
);
// addLogLine('Nearest dist: ', nearest.distance, maxDistance);
// log.info('Nearest dist: ', nearest.distance, maxDistance);
if (!maxDistance || nearest.distance <= maxDistance) {
return nearest.index;
}
@ -522,11 +521,11 @@ export function getNearestPointIndex(
export function logQueueStats(queue: PQueue, name: string) {
queue.on("active", () =>
addLogLine(
log.info(
`queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`,
),
);
queue.on("idle", () => addLogLine(`queuestats: ${name}: Idle`));
queue.on("idle", () => log.info(`queuestats: ${name}: Idle`));
queue.on("error", (error) =>
console.error(`queuestats: ${name}: Error, `, error),
);

View file

@ -1,6 +1,5 @@
import { haveWindow } from "@/next/env";
import log from "@/next/log";
import { addLogLine } from "@ente/shared/logging";
import {
DEFAULT_ML_SEARCH_CONFIG,
DEFAULT_ML_SYNC_CONFIG,
@ -129,7 +128,7 @@ class MLIDbStorage {
.objectStore("configs")
.add(DEFAULT_ML_SEARCH_CONFIG, ML_SEARCH_CONFIG_NAME);
}
addLogLine(
log.info(
`Ml DB upgraded to version: ${newVersion} from version: ${oldVersion}`,
);
},
@ -139,7 +138,7 @@ class MLIDbStorage {
public get db(): Promise<IDBPDatabase<MLDb>> {
if (!this._db) {
this._db = this.openDB();
addLogLine("Opening Ml DB");
log.info("Opening Ml DB");
}
return this._db;
@ -149,7 +148,7 @@ class MLIDbStorage {
const db = await this.db;
db.close();
await deleteDB(MLDATA_DB_NAME);
addLogLine("Cleared Ml DB");
log.info("Cleared Ml DB");
this._db = undefined;
await this.db;
}
@ -278,7 +277,7 @@ class MLIDbStorage {
mlFileData.faces &&
allFacesMap.set(mlFileData.fileId, mlFileData.faces),
);
addLogLine("getAllFacesMap", Date.now() - startTime, "ms");
log.info("getAllFacesMap", Date.now() - startTime, "ms");
return allFacesMap;
}
@ -297,7 +296,7 @@ class MLIDbStorage {
cursor = await cursor.continue();
}
await tx.done;
addLogLine("updateFaces", Date.now() - startTime, "ms");
log.info("updateFaces", Date.now() - startTime, "ms");
}
public async getAllObjectsMap() {
@ -310,7 +309,7 @@ class MLIDbStorage {
mlFileData.objects &&
allObjectsMap.set(mlFileData.fileId, mlFileData.objects),
);
addLogLine("allObjectsMap", Date.now() - startTime, "ms");
log.info("allObjectsMap", Date.now() - startTime, "ms");
return allObjectsMap;
}

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
import { expose } from "comlink";
import mlService from "services/machineLearning/machineLearningService";
import { EnteFile } from "types/file";
@ -6,7 +6,7 @@ import { MachineLearningWorker } from "types/machineLearning";
export class DedicatedMLWorker implements MachineLearningWorker {
constructor() {
addLogLine("DedicatedMLWorker constructor called");
log.info("DedicatedMLWorker constructor called");
}
public async closeLocalSyncContext() {

View file

@ -1,24 +1,21 @@
import { sendOtt } from "@ente/accounts/api/user";
import { isWeakPassword } from "@ente/accounts/utils";
import { generateKeyAndSRPAttributes } from "@ente/accounts/utils/srp";
import SubmitButton from "@ente/shared/components/SubmitButton";
import {
generateAndSaveIntermediateKeyAttributes,
saveKeyInSessionStore,
} from "@ente/shared/crypto/helpers";
import { LS_KEYS, setData } from "@ente/shared/storage/localStorage";
import { Formik, FormikHelpers } from "formik";
import React, { useState } from "react";
import * as Yup from "yup";
import log from "@/next/log";
import { sendOtt } from "@ente/accounts/api/user";
import { PasswordStrengthHint } from "@ente/accounts/components/PasswordStrength";
import { PAGES } from "@ente/accounts/constants/pages";
import { isWeakPassword } from "@ente/accounts/utils";
import { generateKeyAndSRPAttributes } from "@ente/accounts/utils/srp";
import { APPS } from "@ente/shared/apps/constants";
import { VerticallyCentered } from "@ente/shared/components//Container";
import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer";
import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title";
import ShowHidePassword from "@ente/shared/components/Form/ShowHidePassword";
import LinkButton from "@ente/shared/components/LinkButton";
import SubmitButton from "@ente/shared/components/SubmitButton";
import {
generateAndSaveIntermediateKeyAttributes,
saveKeyInSessionStore,
} from "@ente/shared/crypto/helpers";
import { LS_KEYS, setData } from "@ente/shared/storage/localStorage";
import {
setJustSignedUp,
setLocalReferralSource,
@ -37,9 +34,12 @@ import {
Tooltip,
Typography,
} from "@mui/material";
import { Formik, FormikHelpers } from "formik";
import { t } from "i18next";
import { NextRouter } from "next/router";
import React, { useState } from "react";
import { Trans } from "react-i18next";
import * as Yup from "yup";
interface FormValues {
email: string;

View file

@ -1,5 +1,5 @@
import { isDevBuild } from "@/next/env";
import { addLogLine } from "@ente/shared/logging";
import log from "@/next/log";
/**
* Log a standard startup banner.
@ -15,7 +15,7 @@ export const logStartupBanner = (appId: string, userId?: number) => {
const sha = process.env.GIT_SHA;
const buildId = isDevBuild ? "dev " : sha ? `git ${sha} ` : "";
addLogLine(`Starting ente-${appIdL}-web ${buildId}uid ${userId ?? 0}`);
log.info(`Starting ente-${appIdL}-web ${buildId}uid ${userId ?? 0}`);
};
interface LogEntry {

View file

@ -8,7 +8,6 @@ import { getActualKey } from "@ente/shared/user";
import { KeyAttributes } from "@ente/shared/user/types";
import isElectron from "is-electron";
import ComlinkCryptoWorker from ".";
import { addLogLine } from "../logging";
const LOGIN_SUB_KEY_LENGTH = 32;
const LOGIN_SUB_KEY_ID = 1;
@ -104,7 +103,6 @@ export const saveKeyInSessionStore = async (
const sessionKeyAttributes =
await cryptoWorker.generateKeyAndEncryptToB64(key);
setKey(keyType, sessionKeyAttributes);
addLogLine("fromDesktop", fromDesktop);
if (
isElectron() &&
!fromDesktop &&

View file

@ -1,9 +0,0 @@
import log from "@/next/log";
export function addLogLine(
msg: string | number | boolean,
...optionalParams: (string | number | boolean)[]
) {
const completeLog = [msg, ...optionalParams].join(" ");
log.info(completeLog);
}

View file

@ -1,27 +0,0 @@
import { ApiError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
/** Deprecated: Use `logError` from `@/utils/logging` */
export const logError = async (
error: any,
msg: string,
info?: Record<string, unknown>,
skipAddLogLine = false,
) => {
if (skipAddLogLine) return;
if (error instanceof ApiError) {
addLogLine(`error: ${error?.name} ${error?.message}
msg: ${msg} errorCode: ${JSON.stringify(error?.errCode)}
httpStatusCode: ${JSON.stringify(error?.httpStatusCode)} ${
info ? `info: ${JSON.stringify(info)}` : ""
}
${error?.stack}`);
} else {
addLogLine(
`error: ${error?.name} ${error?.message}
msg: ${msg} ${info ? `info: ${JSON.stringify(info)}` : ""}
${error?.stack}`,
);
}
};

View file

@ -1,7 +1,7 @@
import log from "@/next/log";
import { CacheStorageService } from ".";
import { CACHES } from "./constants";
import { LimitedCache } from "./types";
import log from "@/next/log";
export async function cached(
cacheName: string,

View file

@ -1,41 +0,0 @@
/**
* Log an error
*
* The {@link message} property describes what went wrong. Generally (but not
* always) in such situations we also have an "error" object that has specific
* details about the issue - that gets passed as the second parameter.
*
* Note that the "error" {@link e} is not typed. This is because in JavaScript
* any arbitrary value can be thrown. So this function allows us to pass it an
* arbitrary value as the error, and will internally figure out how best to deal
* with it.
*
* Where and how this error gets logged is dependent on where this code is
* running. The default implementation logs a string to the console, but in
* practice the layers above us will use the hooks provided in this file to
* route and show this error elsewhere.
*
* TODO (MR): Currently this is a placeholder function to funnel error logs
* through. This needs to do what the existing logError in @ente/shared does,
* but it cannot have a direct Electron dependency here. For now, we just
* log on the console.
*/
export const logError = (message: string, e?: unknown) => {
if (e === undefined || e === null) {
console.error(message);
return;
}
let es: string;
if (e instanceof Error) {
// In practice, we expect ourselves to be called with Error objects, so
// this is the happy path so to say.
es = `${e.name}: ${e.message}\n${e.stack}`;
} else {
// For the rest rare cases, use the default string serialization of e.
es = String(e);
}
// TODO(MR): Use addLogLine
console.error(`${message}: ${es}`);
};