Remove deprecated loggers

This commit is contained in:
Manav Rathi 2024-04-09 12:01:02 +05:30
parent 0d0e20f7c4
commit d441418b5b
No known key found for this signature in database
49 changed files with 242 additions and 362 deletions

View file

@ -1,3 +1,4 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto"; import ComlinkCryptoWorker from "@ente/shared/crypto";
import { FILE_TYPE, RAW_FORMATS } from "constants/file"; import { FILE_TYPE, RAW_FORMATS } from "constants/file";
import CastDownloadManager from "services/castDownloadManager"; import CastDownloadManager from "services/castDownloadManager";
@ -9,7 +10,6 @@ import {
FileMagicMetadata, FileMagicMetadata,
FilePublicMagicMetadata, FilePublicMagicMetadata,
} from "types/file"; } from "types/file";
import log from "@/next/log";
export function sortFiles(files: EnteFile[], sortAsc = false) { export function sortFiles(files: EnteFile[], sortAsc = false) {
// sort based on the time of creation time of the file, // sort based on the time of creation time of the file,

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging"; import log from "@/next/log";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants"; import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { styled } from "@mui/material"; import { styled } from "@mui/material";
import { Legend } from "components/PhotoViewer/styledComponents/Legend"; import { Legend } from "components/PhotoViewer/styledComponents/Legend";
@ -86,11 +86,11 @@ export function PhotoPeopleList(props: PhotoPeopleListProps) {
let didCancel = false; let didCancel = false;
async function updateFaceImages() { async function updateFaceImages() {
addLogLine("calling getPeopleList"); log.info("calling getPeopleList");
const startTime = Date.now(); const startTime = Date.now();
const people = await getPeopleList(props.file); const people = await getPeopleList(props.file);
addLogLine("getPeopleList", Date.now() - startTime, "ms"); log.info(`getPeopleList ${Date.now() - startTime} ms`);
addLogLine("getPeopleList done, didCancel: ", didCancel); log.info(`getPeopleList done, didCancel: ${didCancel}`);
!didCancel && setPeople(people); !didCancel && setPeople(people);
} }

View file

@ -2,7 +2,6 @@ import log from "@/next/log";
import { PHOTOS_PAGES } from "@ente/shared/constants/pages"; import { PHOTOS_PAGES } from "@ente/shared/constants/pages";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded"; import useMemoSingleThreaded from "@ente/shared/hooks/useMemoSingleThreaded";
import { addLogLine } from "@ente/shared/logging";
import { styled } from "@mui/material"; import { styled } from "@mui/material";
import PhotoViewer from "components/PhotoViewer"; import PhotoViewer from "components/PhotoViewer";
import { TRASH_SECTION } from "constants/collection"; import { TRASH_SECTION } from "constants/collection";
@ -184,7 +183,7 @@ const PhotoFrame = ({
const file = displayFiles[index]; const file = displayFiles[index];
// this is to prevent outdated updateURL call from updating the wrong file // this is to prevent outdated updateURL call from updating the wrong file
if (file.id !== id) { if (file.id !== id) {
addLogLine( log.info(
`[${id}]PhotoSwipe: updateURL: file id mismatch: ${file.id} !== ${id}`, `[${id}]PhotoSwipe: updateURL: file id mismatch: ${file.id} !== ${id}`,
); );
throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH); throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH);
@ -204,7 +203,7 @@ const PhotoFrame = ({
const file = displayFiles[index]; const file = displayFiles[index];
// this is to prevent outdate updateSrcURL call from updating the wrong file // this is to prevent outdate updateSrcURL call from updating the wrong file
if (file.id !== id) { if (file.id !== id) {
addLogLine( log.info(
`[${id}]PhotoSwipe: updateSrcURL: file id mismatch: ${file.id}`, `[${id}]PhotoSwipe: updateSrcURL: file id mismatch: ${file.id}`,
); );
throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH); throw Error(CustomError.UPDATE_URL_FILE_ID_MISMATCH);
@ -212,7 +211,7 @@ const PhotoFrame = ({
if (file.isSourceLoaded && !forceUpdate) { if (file.isSourceLoaded && !forceUpdate) {
throw Error(CustomError.URL_ALREADY_SET); throw Error(CustomError.URL_ALREADY_SET);
} else if (file.conversionFailed) { } else if (file.conversionFailed) {
addLogLine(`[${id}]PhotoSwipe: updateSrcURL: conversion failed`); log.info(`[${id}]PhotoSwipe: updateSrcURL: conversion failed`);
throw Error(CustomError.FILE_CONVERSION_FAILED); throw Error(CustomError.FILE_CONVERSION_FAILED);
} }
@ -308,7 +307,7 @@ const PhotoFrame = ({
index: number, index: number,
item: EnteFile, item: EnteFile,
) => { ) => {
addLogLine( log.info(
`[${ `[${
item.id item.id
}] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${ }] getSlideData called for thumbnail:${!!item.msrc} sourceLoaded:${
@ -319,17 +318,15 @@ const PhotoFrame = ({
if (!item.msrc) { if (!item.msrc) {
try { try {
if (thumbFetching[item.id]) { if (thumbFetching[item.id]) {
addLogLine( log.info(`[${item.id}] thumb download already in progress`);
`[${item.id}] thumb download already in progress`,
);
return; return;
} }
addLogLine(`[${item.id}] doesn't have thumbnail`); log.info(`[${item.id}] doesn't have thumbnail`);
thumbFetching[item.id] = true; thumbFetching[item.id] = true;
const url = await DownloadManager.getThumbnailForPreview(item); const url = await DownloadManager.getThumbnailForPreview(item);
try { try {
updateURL(index)(item.id, url); updateURL(index)(item.id, url);
addLogLine( log.info(
`[${ `[${
item.id item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
@ -355,20 +352,20 @@ const PhotoFrame = ({
if (item.isSourceLoaded || item.conversionFailed) { if (item.isSourceLoaded || item.conversionFailed) {
if (item.isSourceLoaded) { if (item.isSourceLoaded) {
addLogLine(`[${item.id}] source already loaded`); log.info(`[${item.id}] source already loaded`);
} }
if (item.conversionFailed) { if (item.conversionFailed) {
addLogLine(`[${item.id}] conversion failed`); log.info(`[${item.id}] conversion failed`);
} }
return; return;
} }
if (fetching[item.id]) { if (fetching[item.id]) {
addLogLine(`[${item.id}] file download already in progress`); log.info(`[${item.id}] file download already in progress`);
return; return;
} }
try { try {
addLogLine(`[${item.id}] new file src request`); log.info(`[${item.id}] new file src request`);
fetching[item.id] = true; fetching[item.id] = true;
const srcURLs = await DownloadManager.getFileForPreview(item); const srcURLs = await DownloadManager.getFileForPreview(item);
if (item.metadata.fileType === FILE_TYPE.LIVE_PHOTO) { if (item.metadata.fileType === FILE_TYPE.LIVE_PHOTO) {
@ -383,7 +380,7 @@ const PhotoFrame = ({
}; };
try { try {
await updateSrcURL(index, item.id, dummyImgSrcUrl); await updateSrcURL(index, item.id, dummyImgSrcUrl);
addLogLine( log.info(
`[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`, `[${item.id}] calling invalidateCurrItems for live photo imgSrc, source loaded :${item.isSourceLoaded}`,
); );
instance.invalidateCurrItems(); instance.invalidateCurrItems();
@ -417,7 +414,7 @@ const PhotoFrame = ({
loadedLivePhotoSrcURL, loadedLivePhotoSrcURL,
true, true,
); );
addLogLine( log.info(
`[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`, `[${item.id}] calling invalidateCurrItems for live photo complete, source loaded :${item.isSourceLoaded}`,
); );
instance.invalidateCurrItems(); instance.invalidateCurrItems();
@ -435,7 +432,7 @@ const PhotoFrame = ({
} else { } else {
try { try {
await updateSrcURL(index, item.id, srcURLs); await updateSrcURL(index, item.id, srcURLs);
addLogLine( log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
); );
instance.invalidateCurrItems(); instance.invalidateCurrItems();
@ -478,7 +475,7 @@ const PhotoFrame = ({
} }
try { try {
updateURL(index)(item.id, item.msrc, true); updateURL(index)(item.id, item.msrc, true);
addLogLine( log.info(
`[${ `[${
item.id item.id
}] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`, }] calling invalidateCurrItems for thumbnail msrc :${!!item.msrc}`,
@ -497,7 +494,7 @@ const PhotoFrame = ({
// ignore // ignore
} }
try { try {
addLogLine( log.info(
`[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`, `[${item.id}] new file getConvertedVideo request- ${item.metadata.title}}`,
); );
fetching[item.id] = true; fetching[item.id] = true;
@ -506,7 +503,7 @@ const PhotoFrame = ({
try { try {
await updateSrcURL(index, item.id, srcURL, true); await updateSrcURL(index, item.id, srcURL, true);
addLogLine( log.info(
`[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`, `[${item.id}] calling invalidateCurrItems for src, source loaded :${item.isSourceLoaded}`,
); );
instance.invalidateCurrItems(); instance.invalidateCurrItems();

View file

@ -25,7 +25,7 @@ const Caption = styled("span")`
const MenuWithPeople = (props) => { const MenuWithPeople = (props) => {
const appContext = useContext(AppContext); const appContext = useContext(AppContext);
// addLogLine("props.selectProps.options: ", selectRef); // log.info("props.selectProps.options: ", selectRef);
const peopleSuggestions = props.selectProps.options.filter( const peopleSuggestions = props.selectProps.options.filter(
(o) => o.type === SuggestionType.PERSON, (o) => o.type === SuggestionType.PERSON,
); );

View file

@ -1,13 +1,12 @@
import log from "@/next/log";
import { savedLogs } from "@/next/log-web";
import { downloadAsFile } from "@ente/shared/utils";
import Typography from "@mui/material/Typography";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import { t } from "i18next"; import { t } from "i18next";
import { AppContext } from "pages/_app"; import { AppContext } from "pages/_app";
import { useContext, useEffect, useState } from "react"; import { useContext, useEffect, useState } from "react";
import { Trans } from "react-i18next"; import { Trans } from "react-i18next";
import { savedLogs } from "@/next/log-web";
import { addLogLine } from "@ente/shared/logging";
import { downloadAsFile } from "@ente/shared/utils";
import Typography from "@mui/material/Typography";
import { EnteMenuItem } from "components/Menu/EnteMenuItem";
import { isInternalUser } from "utils/user"; import { isInternalUser } from "utils/user";
import { testUpload } from "../../../tests/upload.test"; import { testUpload } from "../../../tests/upload.test";
import { import {
@ -40,7 +39,7 @@ export default function DebugSection() {
}); });
const downloadLogs = () => { const downloadLogs = () => {
addLogLine("Downloading logs"); log.info("Downloading logs");
if (electron) electron.openLogDirectory(); if (electron) electron.openLogDirectory();
else downloadAsFile(`debug_logs_${Date.now()}.txt`, savedLogs()); else downloadAsFile(`debug_logs_${Date.now()}.txt`, savedLogs());
}; };

View file

@ -1,6 +1,5 @@
import log from "@/next/log"; import log from "@/next/log";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { isPromise } from "@ente/shared/utils"; import { isPromise } from "@ente/shared/utils";
import DiscFullIcon from "@mui/icons-material/DiscFull"; import DiscFullIcon from "@mui/icons-material/DiscFull";
import UserNameInputDialog from "components/UserNameInputDialog"; import UserNameInputDialog from "components/UserNameInputDialog";
@ -179,9 +178,7 @@ export default function Uploader(props: Props) {
if (isElectron()) { if (isElectron()) {
ImportService.getPendingUploads().then( ImportService.getPendingUploads().then(
({ files: electronFiles, collectionName, type }) => { ({ files: electronFiles, collectionName, type }) => {
addLogLine( log.info(`found pending desktop upload, resuming uploads`);
`found pending desktop upload, resuming uploads`,
);
resumeDesktopUpload(type, electronFiles, collectionName); resumeDesktopUpload(type, electronFiles, collectionName);
}, },
); );
@ -212,20 +209,20 @@ export default function Uploader(props: Props) {
pickedUploadType.current === PICKED_UPLOAD_TYPE.FOLDERS && pickedUploadType.current === PICKED_UPLOAD_TYPE.FOLDERS &&
props.webFolderSelectorFiles?.length > 0 props.webFolderSelectorFiles?.length > 0
) { ) {
addLogLine(`received folder upload request`); log.info(`received folder upload request`);
setWebFiles(props.webFolderSelectorFiles); setWebFiles(props.webFolderSelectorFiles);
} else if ( } else if (
pickedUploadType.current === PICKED_UPLOAD_TYPE.FILES && pickedUploadType.current === PICKED_UPLOAD_TYPE.FILES &&
props.webFileSelectorFiles?.length > 0 props.webFileSelectorFiles?.length > 0
) { ) {
addLogLine(`received file upload request`); log.info(`received file upload request`);
setWebFiles(props.webFileSelectorFiles); setWebFiles(props.webFileSelectorFiles);
} else if (props.dragAndDropFiles?.length > 0) { } else if (props.dragAndDropFiles?.length > 0) {
isDragAndDrop.current = true; isDragAndDrop.current = true;
if (electron) { if (electron) {
const main = async () => { const main = async () => {
try { try {
addLogLine(`uploading dropped files from desktop app`); log.info(`uploading dropped files from desktop app`);
// check and parse dropped files which are zip files // check and parse dropped files which are zip files
let electronFiles = [] as ElectronFile[]; let electronFiles = [] as ElectronFile[];
for (const file of props.dragAndDropFiles) { for (const file of props.dragAndDropFiles) {
@ -234,7 +231,7 @@ export default function Uploader(props: Props) {
await electron.getElectronFilesFromGoogleZip( await electron.getElectronFilesFromGoogleZip(
(file as any).path, (file as any).path,
); );
addLogLine( log.info(
`zip file - ${file.name} contains ${zipFiles.length} files`, `zip file - ${file.name} contains ${zipFiles.length} files`,
); );
electronFiles = [...electronFiles, ...zipFiles]; electronFiles = [...electronFiles, ...zipFiles];
@ -252,7 +249,7 @@ export default function Uploader(props: Props) {
); );
} }
} }
addLogLine( log.info(
`uploading dropped files from desktop app - ${electronFiles.length} files found`, `uploading dropped files from desktop app - ${electronFiles.length} files found`,
); );
setElectronFiles(electronFiles); setElectronFiles(electronFiles);
@ -263,7 +260,7 @@ export default function Uploader(props: Props) {
}; };
main(); main();
} else { } else {
addLogLine(`uploading dropped files from web app`); log.info(`uploading dropped files from web app`);
setWebFiles(props.dragAndDropFiles); setWebFiles(props.dragAndDropFiles);
} }
} }
@ -279,7 +276,7 @@ export default function Uploader(props: Props) {
webFiles?.length > 0 || webFiles?.length > 0 ||
appContext.sharedFiles?.length > 0 appContext.sharedFiles?.length > 0
) { ) {
addLogLine( log.info(
`upload request type:${ `upload request type:${
electronFiles?.length > 0 electronFiles?.length > 0
? "electronFiles" ? "electronFiles"
@ -294,13 +291,13 @@ export default function Uploader(props: Props) {
); );
if (uploadManager.isUploadRunning()) { if (uploadManager.isUploadRunning()) {
if (watchFolderService.isUploadRunning()) { if (watchFolderService.isUploadRunning()) {
addLogLine( log.info(
"watchFolder upload was running, pausing it to run user upload", "watchFolder upload was running, pausing it to run user upload",
); );
// pause watch folder service on user upload // pause watch folder service on user upload
watchFolderService.pauseRunningSync(); watchFolderService.pauseRunningSync();
} else { } else {
addLogLine( log.info(
"an upload is already running, rejecting new upload request", "an upload is already running, rejecting new upload request",
); );
// no-op // no-op
@ -372,7 +369,7 @@ export default function Uploader(props: Props) {
uploaderName?: string, uploaderName?: string,
) => { ) => {
try { try {
addLogLine( log.info(
`upload file to an existing collection name:${collection.name}, collectionID:${collection.id}`, `upload file to an existing collection name:${collection.name}, collectionID:${collection.id}`,
); );
await preCollectionCreationAction(); await preCollectionCreationAction();
@ -397,7 +394,7 @@ export default function Uploader(props: Props) {
collectionName?: string, collectionName?: string,
) => { ) => {
try { try {
addLogLine( log.info(
`upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`, `upload file to an new collections strategy:${strategy} ,collectionName:${collectionName}`,
); );
await preCollectionCreationAction(); await preCollectionCreationAction();
@ -417,7 +414,7 @@ export default function Uploader(props: Props) {
toUploadFiles.current, toUploadFiles.current,
); );
} }
addLogLine( log.info(
`upload collections - [${[...collectionNameToFilesMap.keys()]}]`, `upload collections - [${[...collectionNameToFilesMap.keys()]}]`,
); );
try { try {
@ -502,7 +499,7 @@ export default function Uploader(props: Props) {
uploaderName?: string, uploaderName?: string,
) => { ) => {
try { try {
addLogLine("uploadFiles called"); log.info("uploadFiles called");
preUploadAction(); preUploadAction();
if ( if (
electron && electron &&
@ -555,7 +552,7 @@ export default function Uploader(props: Props) {
const retryFailed = async () => { const retryFailed = async () => {
try { try {
addLogLine("user retrying failed upload"); log.info("user retrying failed upload");
const filesWithCollections = const filesWithCollections =
uploadManager.getFailedFilesWithCollections(); uploadManager.getFailedFilesWithCollections();
const uploaderName = uploadManager.getUploaderName(); const uploaderName = uploadManager.getUploaderName();
@ -630,7 +627,7 @@ export default function Uploader(props: Props) {
) => { ) => {
try { try {
if (accessedThroughSharedURL) { if (accessedThroughSharedURL) {
addLogLine( log.info(
`uploading files to pulbic collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`, `uploading files to pulbic collection - ${props.uploadCollection.name} - ${props.uploadCollection.id}`,
); );
const uploaderName = await getPublicCollectionUploaderName( const uploaderName = await getPublicCollectionUploaderName(
@ -645,7 +642,7 @@ export default function Uploader(props: Props) {
if (isPendingDesktopUpload.current) { if (isPendingDesktopUpload.current) {
isPendingDesktopUpload.current = false; isPendingDesktopUpload.current = false;
if (pendingDesktopUploadCollectionName.current) { if (pendingDesktopUploadCollectionName.current) {
addLogLine( log.info(
`upload pending files to collection - ${pendingDesktopUploadCollectionName.current}`, `upload pending files to collection - ${pendingDesktopUploadCollectionName.current}`,
); );
uploadFilesToNewCollections( uploadFilesToNewCollections(
@ -654,7 +651,7 @@ export default function Uploader(props: Props) {
); );
pendingDesktopUploadCollectionName.current = null; pendingDesktopUploadCollectionName.current = null;
} else { } else {
addLogLine( log.info(
`pending upload - strategy - "multiple collections" `, `pending upload - strategy - "multiple collections" `,
); );
uploadFilesToNewCollections( uploadFilesToNewCollections(
@ -664,7 +661,7 @@ export default function Uploader(props: Props) {
return; return;
} }
if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) { if (isElectron() && pickedUploadType === PICKED_UPLOAD_TYPE.ZIPS) {
addLogLine("uploading zip files"); log.info("uploading zip files");
uploadFilesToNewCollections( uploadFilesToNewCollections(
UPLOAD_STRATEGY.COLLECTION_PER_FOLDER, UPLOAD_STRATEGY.COLLECTION_PER_FOLDER,
); );
@ -685,7 +682,7 @@ export default function Uploader(props: Props) {
} }
let showNextModal = () => {}; let showNextModal = () => {};
if (importSuggestion.hasNestedFolders) { if (importSuggestion.hasNestedFolders) {
addLogLine(`nested folders detected`); log.info(`nested folders detected`);
showNextModal = () => setChoiceModalView(true); showNextModal = () => setChoiceModalView(true);
} else { } else {
showNextModal = () => showNextModal = () =>
@ -718,7 +715,7 @@ export default function Uploader(props: Props) {
zipPaths.current = response.zipPaths; zipPaths.current = response.zipPaths;
} }
if (files?.length > 0) { if (files?.length > 0) {
addLogLine( log.info(
` desktop upload for type:${type} and fileCount: ${files?.length} requested`, ` desktop upload for type:${type} and fileCount: ${files?.length} requested`,
); );
setElectronFiles(files); setElectronFiles(files);

View file

@ -26,7 +26,6 @@ import AppNavbar from "@ente/shared/components/Navbar/app";
import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages"; import { PHOTOS_PAGES as PAGES } from "@ente/shared/constants/pages";
import { Events, eventBus } from "@ente/shared/events"; import { Events, eventBus } from "@ente/shared/events";
import { useLocalState } from "@ente/shared/hooks/useLocalState"; import { useLocalState } from "@ente/shared/hooks/useLocalState";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService"; import HTTPService from "@ente/shared/network/HTTPService";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { import {
@ -213,10 +212,10 @@ export default function App({ Component, pageProps }: AppProps) {
} }
const initExport = async () => { const initExport = async () => {
try { try {
addLogLine("init export"); log.info("init export");
const token = getToken(); const token = getToken();
if (!token) { if (!token) {
addLogLine( log.info(
"User not logged in, not starting export continuous sync job", "User not logged in, not starting export continuous sync job",
); );
return; return;
@ -237,7 +236,7 @@ export default function App({ Component, pageProps }: AppProps) {
exportService.enableContinuousExport(); exportService.enableContinuousExport();
} }
if (isExportInProgress(exportRecord.stage)) { if (isExportInProgress(exportRecord.stage)) {
addLogLine("export was in progress, resuming"); log.info("export was in progress, resuming");
exportService.scheduleExport(); exportService.scheduleExport();
} }
} catch (e) { } catch (e) {

View file

@ -3,7 +3,6 @@ import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto"; import ComlinkCryptoWorker from "@ente/shared/crypto";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events"; import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { FILE_TYPE } from "constants/file"; import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron"; import isElectron from "is-electron";
@ -63,15 +62,15 @@ class ClipServiceImpl {
return; return;
} }
if (this.onFileUploadedHandler) { if (this.onFileUploadedHandler) {
addLogLine("file upload listener already setup"); log.info("file upload listener already setup");
return; return;
} }
addLogLine("setting up file upload listener"); log.info("setting up file upload listener");
this.onFileUploadedHandler = (args) => { this.onFileUploadedHandler = (args) => {
this.runLocalFileClipExtraction(args); this.runLocalFileClipExtraction(args);
}; };
eventBus.on(Events.FILE_UPLOADED, this.onFileUploadedHandler, this); eventBus.on(Events.FILE_UPLOADED, this.onFileUploadedHandler, this);
addLogLine("setup file upload listener successfully"); log.info("setup file upload listener successfully");
} catch (e) { } catch (e) {
log.error("failed to setup clip service", e); log.error("failed to setup clip service", e);
} }
@ -80,17 +79,17 @@ class ClipServiceImpl {
removeOnFileUploadListener = async () => { removeOnFileUploadListener = async () => {
try { try {
if (!this.onFileUploadedHandler) { if (!this.onFileUploadedHandler) {
addLogLine("file upload listener already removed"); log.info("file upload listener already removed");
return; return;
} }
addLogLine("removing file upload listener"); log.info("removing file upload listener");
eventBus.removeListener( eventBus.removeListener(
Events.FILE_UPLOADED, Events.FILE_UPLOADED,
this.onFileUploadedHandler, this.onFileUploadedHandler,
this, this,
); );
this.onFileUploadedHandler = null; this.onFileUploadedHandler = null;
addLogLine("removed file upload listener successfully"); log.info("removed file upload listener successfully");
} catch (e) { } catch (e) {
log.error("failed to remove clip service", e); log.error("failed to remove clip service", e);
} }
@ -121,13 +120,13 @@ class ClipServiceImpl {
) => { ) => {
try { try {
if (this.embeddingExtractionInProgress) { if (this.embeddingExtractionInProgress) {
addLogLine( log.info(
"clip embedding extraction already in progress, scheduling re-run", "clip embedding extraction already in progress, scheduling re-run",
); );
this.reRunNeeded = true; this.reRunNeeded = true;
return; return;
} else { } else {
addLogLine( log.info(
"clip embedding extraction not in progress, starting clip embedding extraction", "clip embedding extraction not in progress, starting clip embedding extraction",
); );
} }
@ -139,7 +138,7 @@ class ClipServiceImpl {
this.embeddingExtractionInProgress = null; this.embeddingExtractionInProgress = null;
if (!canceller.signal.aborted && this.reRunNeeded) { if (!canceller.signal.aborted && this.reRunNeeded) {
this.reRunNeeded = false; this.reRunNeeded = false;
addLogLine("re-running clip embedding extraction"); log.info("re-running clip embedding extraction");
setTimeout( setTimeout(
() => this.scheduleImageEmbeddingExtraction(), () => this.scheduleImageEmbeddingExtraction(),
0, 0,
@ -174,7 +173,7 @@ class ClipServiceImpl {
) => { ) => {
try { try {
if (this.unsupportedPlatform) { if (this.unsupportedPlatform) {
addLogLine( log.info(
`skipping clip embedding extraction, platform unsupported`, `skipping clip embedding extraction, platform unsupported`,
); );
return; return;
@ -194,15 +193,15 @@ class ClipServiceImpl {
pending: pendingFiles.length, pending: pendingFiles.length,
}); });
if (pendingFiles.length === 0) { if (pendingFiles.length === 0) {
addLogLine("no clip embedding extraction needed, all done"); log.info("no clip embedding extraction needed, all done");
return; return;
} }
addLogLine( log.info(
`starting clip embedding extraction for ${pendingFiles.length} files`, `starting clip embedding extraction for ${pendingFiles.length} files`,
); );
for (const file of pendingFiles) { for (const file of pendingFiles) {
try { try {
addLogLine( log.info(
`extracting clip embedding for file: ${file.metadata.title} fileID: ${file.id}`, `extracting clip embedding for file: ${file.metadata.title} fileID: ${file.id}`,
); );
if (canceller.signal.aborted) { if (canceller.signal.aborted) {
@ -210,7 +209,7 @@ class ClipServiceImpl {
} }
const embeddingData = const embeddingData =
await this.extractFileClipImageEmbedding(model, file); await this.extractFileClipImageEmbedding(model, file);
addLogLine( log.info(
`successfully extracted clip embedding for file: ${file.metadata.title} fileID: ${file.id} embedding length: ${embeddingData?.length}`, `successfully extracted clip embedding for file: ${file.metadata.title} fileID: ${file.id} embedding length: ${embeddingData?.length}`,
); );
await this.encryptAndUploadEmbedding( await this.encryptAndUploadEmbedding(
@ -219,7 +218,7 @@ class ClipServiceImpl {
embeddingData, embeddingData,
); );
this.onSuccessStatusUpdater(); this.onSuccessStatusUpdater();
addLogLine( log.info(
`successfully put clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`, `successfully put clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`,
); );
} catch (e) { } catch (e) {
@ -258,24 +257,24 @@ class ClipServiceImpl {
model: Model = Model.ONNX_CLIP, model: Model = Model.ONNX_CLIP,
) { ) {
const { enteFile, localFile } = arg; const { enteFile, localFile } = arg;
addLogLine( log.info(
`clip embedding extraction onFileUploadedHandler file: ${enteFile.metadata.title} fileID: ${enteFile.id}`, `clip embedding extraction onFileUploadedHandler file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
enteFile.id, enteFile.id,
); );
if (enteFile.metadata.fileType === FILE_TYPE.VIDEO) { if (enteFile.metadata.fileType === FILE_TYPE.VIDEO) {
addLogLine( log.info(
`skipping video file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`, `skipping video file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
); );
return; return;
} }
const extension = enteFile.metadata.title.split(".").pop(); const extension = enteFile.metadata.title.split(".").pop();
if (!extension || !["jpg", "jpeg"].includes(extension)) { if (!extension || !["jpg", "jpeg"].includes(extension)) {
addLogLine( log.info(
`skipping non jpg file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`, `skipping non jpg file for clip embedding extraction file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
); );
return; return;
} }
addLogLine( log.info(
`queuing up for local clip embedding extraction for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`, `queuing up for local clip embedding extraction for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
); );
try { try {
@ -290,7 +289,7 @@ class ClipServiceImpl {
embedding, embedding,
); );
}); });
addLogLine( log.info(
`successfully extracted clip embedding for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`, `successfully extracted clip embedding for file: ${enteFile.metadata.title} fileID: ${enteFile.id}`,
); );
} catch (e) { } catch (e) {
@ -322,7 +321,7 @@ class ClipServiceImpl {
const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance(); const comlinkCryptoWorker = await ComlinkCryptoWorker.getInstance();
const { file: encryptedEmbeddingData } = const { file: encryptedEmbeddingData } =
await comlinkCryptoWorker.encryptEmbedding(embeddingData, file.key); await comlinkCryptoWorker.encryptEmbedding(embeddingData, file.key);
addLogLine( log.info(
`putting clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`, `putting clip embedding to server for file: ${file.metadata.title} fileID: ${file.id}`,
); );
await putEmbedding({ await putEmbedding({

View file

@ -1,22 +1,20 @@
import { EnteFile } from "types/file";
import {
generateStreamFromArrayBuffer,
getRenderableFileURL,
} from "utils/file";
import log from "@/next/log"; import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants"; import { APPS } from "@ente/shared/apps/constants";
import ComlinkCryptoWorker from "@ente/shared/crypto"; import ComlinkCryptoWorker from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events"; import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { CacheStorageService } from "@ente/shared/storage/cacheStorage"; import { CacheStorageService } from "@ente/shared/storage/cacheStorage";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants"; import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { LimitedCache } from "@ente/shared/storage/cacheStorage/types"; import { LimitedCache } from "@ente/shared/storage/cacheStorage/types";
import { Remote } from "comlink"; import { Remote } from "comlink";
import { FILE_TYPE } from "constants/file"; import { FILE_TYPE } from "constants/file";
import isElectron from "is-electron"; import isElectron from "is-electron";
import { EnteFile } from "types/file";
import {
generateStreamFromArrayBuffer,
getRenderableFileURL,
} from "utils/file";
import { isInternalUser } from "utils/user"; import { isInternalUser } from "utils/user";
import { PhotosDownloadClient } from "./clients/photos"; import { PhotosDownloadClient } from "./clients/photos";
import { PublicAlbumsDownloadClient } from "./clients/publicAlbums"; import { PublicAlbumsDownloadClient } from "./clients/publicAlbums";
@ -80,7 +78,7 @@ class DownloadManagerImpl {
) { ) {
try { try {
if (this.ready) { if (this.ready) {
addLogLine("DownloadManager already initialized"); log.info("DownloadManager already initialized");
return; return;
} }
this.downloadClient = createDownloadClient(app, tokens, timeout); this.downloadClient = createDownloadClient(app, tokens, timeout);
@ -97,7 +95,7 @@ class DownloadManagerImpl {
private async logoutHandler() { private async logoutHandler() {
try { try {
addLogLine("downloadManger logoutHandler started"); log.info("downloadManger logoutHandler started");
this.ready = false; this.ready = false;
this.cryptoWorker = null; this.cryptoWorker = null;
this.downloadClient = null; this.downloadClient = null;
@ -106,7 +104,7 @@ class DownloadManagerImpl {
this.thumbnailObjectURLPromises.clear(); this.thumbnailObjectURLPromises.clear();
this.fileDownloadProgress.clear(); this.fileDownloadProgress.clear();
this.progressUpdater = () => {}; this.progressUpdater = () => {};
addLogLine("downloadManager logoutHandler completed"); log.info("downloadManager logoutHandler completed");
} catch (e) { } catch (e) {
log.error("downloadManager logoutHandler failed", e); log.error("downloadManager logoutHandler failed", e);
} }
@ -300,7 +298,7 @@ class DownloadManagerImpl {
file: EnteFile, file: EnteFile,
): Promise<ReadableStream<Uint8Array>> { ): Promise<ReadableStream<Uint8Array>> {
try { try {
addLogLine(`download attempted for fileID:${file.id}`); log.info(`download attempted for fileID:${file.id}`);
const onDownloadProgress = this.trackDownloadProgress( const onDownloadProgress = this.trackDownloadProgress(
file.id, file.id,
file.info?.fileSize, file.info?.fileSize,

View file

@ -1,5 +1,5 @@
import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto"; import ComlinkCryptoWorker from "@ente/shared/crypto";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService"; import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api"; import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage"; import localForage from "@ente/shared/storage/localForage";
@ -108,11 +108,11 @@ export const syncEntities = async () => {
const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => { const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => {
try { try {
let entities = await getLocalEntity(type); let entities = await getLocalEntity(type);
addLogLine( log.info(
`Syncing ${type} entities localEntitiesCount: ${entities.length}`, `Syncing ${type} entities localEntitiesCount: ${entities.length}`,
); );
let syncTime = await getEntityLastSyncTime(type); let syncTime = await getEntityLastSyncTime(type);
addLogLine(`Syncing ${type} entities syncTime: ${syncTime}`); log.info(`Syncing ${type} entities syncTime: ${syncTime}`);
let response: EntitySyncDiffResponse; let response: EntitySyncDiffResponse;
do { do {
response = await getEntityDiff(type, syncTime); response = await getEntityDiff(type, syncTime);
@ -156,7 +156,7 @@ const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => {
} }
await localForage.setItem(ENTITY_TABLES[type], nonDeletedEntities); await localForage.setItem(ENTITY_TABLES[type], nonDeletedEntities);
await localForage.setItem(ENTITY_SYNC_TIME_TABLES[type], syncTime); await localForage.setItem(ENTITY_SYNC_TIME_TABLES[type], syncTime);
addLogLine( log.info(
`Syncing ${type} entities syncedEntitiesCount: ${nonDeletedEntities.length}`, `Syncing ${type} entities syncedEntitiesCount: ${nonDeletedEntities.length}`,
); );
} while (response.diff.length === DIFF_LIMIT); } while (response.diff.length === DIFF_LIMIT);

View file

@ -1,11 +1,9 @@
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import log from "@/next/log"; import log from "@/next/log";
import ComlinkCryptoWorker from "@ente/shared/crypto"; import ComlinkCryptoWorker from "@ente/shared/crypto";
import { Events, eventBus } from "@ente/shared/events"; import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import HTTPService from "@ente/shared/network/HTTPService"; import HTTPService from "@ente/shared/network/HTTPService";
import { getEndpoint } from "@ente/shared/network/api";
import localForage from "@ente/shared/storage/localForage";
import { getToken } from "@ente/shared/storage/localStorage/helpers"; import { getToken } from "@ente/shared/storage/localStorage/helpers";
import { REQUEST_BATCH_SIZE } from "constants/api"; import { REQUEST_BATCH_SIZE } from "constants/api";
import { Collection } from "types/collection"; import { Collection } from "types/collection";
@ -57,7 +55,7 @@ const setLocalFiles = async (type: "normal" | "hidden", files: EnteFile[]) => {
`failed to save files to indexedDB (storageEstimate was ${storageEstimate}`, `failed to save files to indexedDB (storageEstimate was ${storageEstimate}`,
e1, e1,
); );
addLogLine(`storage estimate ${JSON.stringify(storageEstimate)}`); log.info(`storage estimate ${JSON.stringify(storageEstimate)}`);
} catch (e2) { } catch (e2) {
log.error("failed to save files to indexedDB", e1); log.error("failed to save files to indexedDB", e1);
log.error("failed to get storage stats", e2); log.error("failed to get storage stats", e2);

View file

@ -2,7 +2,6 @@ import { convertBytesToHumanReadable } from "@/next/file";
import log from "@/next/log"; import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { retryAsyncFunction } from "@ente/shared/utils"; import { retryAsyncFunction } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor"; import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker"; import { getDedicatedConvertWorker } from "utils/comlink/ComlinkConvertWorker";
@ -46,7 +45,7 @@ class HEICConverter {
await worker.convertHEICToJPEG( await worker.convertHEICToJPEG(
fileBlob, fileBlob,
); );
addLogLine( log.info(
`originalFileSize:${convertBytesToHumanReadable( `originalFileSize:${convertBytesToHumanReadable(
fileBlob?.size, fileBlob?.size,
)},convertedFileSize:${convertBytesToHumanReadable( )},convertedFileSize:${convertBytesToHumanReadable(

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { GraphModel } from "@tensorflow/tfjs-converter"; import { GraphModel } from "@tensorflow/tfjs-converter";
import * as tf from "@tensorflow/tfjs-core"; import * as tf from "@tensorflow/tfjs-core";
import { import {
@ -60,7 +59,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
inputHeight: BLAZEFACE_INPUT_SIZE, inputHeight: BLAZEFACE_INPUT_SIZE,
inputWidth: BLAZEFACE_INPUT_SIZE, inputWidth: BLAZEFACE_INPUT_SIZE,
}); });
addLogLine( log.info(
"loaded blazeFaceModel: ", "loaded blazeFaceModel: ",
// await this.blazeFaceModel, // await this.blazeFaceModel,
// eslint-disable-next-line @typescript-eslint/await-thenable // eslint-disable-next-line @typescript-eslint/await-thenable
@ -121,20 +120,20 @@ class BlazeFaceDetectionService implements FaceDetectionService {
let desiredDist = desiredRightEyeX - this.desiredLeftEye[0]; let desiredDist = desiredRightEyeX - this.desiredLeftEye[0];
desiredDist *= this.desiredFaceSize; desiredDist *= this.desiredFaceSize;
const scale = desiredDist / dist; const scale = desiredDist / dist;
// addLogLine("scale: ", scale); // log.info("scale: ", scale);
const eyesCenter = []; const eyesCenter = [];
eyesCenter[0] = Math.floor((leftEye[0] + rightEye[0]) / 2); eyesCenter[0] = Math.floor((leftEye[0] + rightEye[0]) / 2);
eyesCenter[1] = Math.floor((leftEye[1] + rightEye[1]) / 2); eyesCenter[1] = Math.floor((leftEye[1] + rightEye[1]) / 2);
// addLogLine("eyesCenter: ", eyesCenter); // log.info("eyesCenter: ", eyesCenter);
const faceWidth = this.desiredFaceSize / scale; const faceWidth = this.desiredFaceSize / scale;
const faceHeight = this.desiredFaceSize / scale; const faceHeight = this.desiredFaceSize / scale;
// addLogLine("faceWidth: ", faceWidth, "faceHeight: ", faceHeight) // log.info("faceWidth: ", faceWidth, "faceHeight: ", faceHeight)
const tx = eyesCenter[0] - faceWidth * 0.5; const tx = eyesCenter[0] - faceWidth * 0.5;
const ty = eyesCenter[1] - faceHeight * this.desiredLeftEye[1]; const ty = eyesCenter[1] - faceHeight * this.desiredLeftEye[1];
// addLogLine("tx: ", tx, "ty: ", ty); // log.info("tx: ", tx, "ty: ", ty);
return new Box({ return new Box({
left: tx, left: tx,
@ -155,7 +154,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
const normalizedImage = tf.sub(tf.div(reshapedImage, 127.5), 1.0); const normalizedImage = tf.sub(tf.div(reshapedImage, 127.5), 1.0);
// eslint-disable-next-line @typescript-eslint/await-thenable // eslint-disable-next-line @typescript-eslint/await-thenable
const results = await this.blazeFaceBackModel.predict(normalizedImage); const results = await this.blazeFaceBackModel.predict(normalizedImage);
// addLogLine('onFacesDetected: ', results); // log.info('onFacesDetected: ', results);
return results; return results;
} }
@ -180,7 +179,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
const inBox = newBox(0, 0, resized.width, resized.height); const inBox = newBox(0, 0, resized.width, resized.height);
const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height); const toBox = newBox(0, 0, imageBitmap.width, imageBitmap.height);
const transform = computeTransformToBox(inBox, toBox); const transform = computeTransformToBox(inBox, toBox);
// addLogLine("1st pass: ", { transform }); // log.info("1st pass: ", { transform });
const faceDetections: Array<FaceDetection> = faces?.map((f) => { const faceDetections: Array<FaceDetection> = faces?.map((f) => {
const box = transformBox(normFaceBox(f), transform); const box = transformBox(normFaceBox(f), transform);
@ -223,7 +222,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
); );
let selected = pass2Detections?.[0]; let selected = pass2Detections?.[0];
if (pass2Detections?.length > 1) { if (pass2Detections?.length > 1) {
// addLogLine('2nd pass >1 face', pass2Detections.length); // log.info('2nd pass >1 face', pass2Detections.length);
selected = getNearestDetection( selected = getNearestDetection(
pass1Detection, pass1Detection,
pass2Detections, pass2Detections,
@ -234,7 +233,7 @@ class BlazeFaceDetectionService implements FaceDetectionService {
// we might miss 1st pass face actually having score within threshold // we might miss 1st pass face actually having score within threshold
// it is ok as results will be consistent with 2nd pass only detections // it is ok as results will be consistent with 2nd pass only detections
if (selected && selected.probability >= BLAZEFACE_SCORE_THRESHOLD) { if (selected && selected.probability >= BLAZEFACE_SCORE_THRESHOLD) {
// addLogLine("pass2: ", { imageBox, paddedBox, transform, selected }); // log.info("pass2: ", { imageBox, paddedBox, transform, selected });
detections.push(selected); detections.push(selected);
} }
} }

View file

@ -26,7 +26,7 @@ class ClusteringService {
epsilon: number = 1.0, epsilon: number = 1.0,
minPts: number = 2, minPts: number = 2,
): ClusteringResults { ): ClusteringResults {
// addLogLine("distanceFunction", DBSCAN._); // log.info("distanceFunction", DBSCAN._);
const clusters = this.dbscan.run(dataset, epsilon, minPts); const clusters = this.dbscan.run(dataset, epsilon, minPts);
const noise = this.dbscan.noise; const noise = this.dbscan.noise;
return { clusters, noise }; return { clusters, noise };

View file

@ -22,7 +22,7 @@ class DbscanClusteringService implements ClusteringService {
input: ClusteringInput, input: ClusteringInput,
config: ClusteringConfig, config: ClusteringConfig,
): Promise<HdbscanResults> { ): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input); // log.info('Clustering input: ', input);
const dbscan = new DBSCAN(); const dbscan = new DBSCAN();
const clusters = dbscan.run( const clusters = dbscan.run(
input, input,

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { import {
DetectedFace, DetectedFace,
Face, Face,
@ -51,7 +50,7 @@ class FaceService {
); );
const faceDetections = const faceDetections =
await syncContext.faceDetectionService.detectFaces(imageBitmap); await syncContext.faceDetectionService.detectFaces(imageBitmap);
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory())); // log.info('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width // TODO: reenable faces filtering based on width
const detectedFaces = faceDetections?.map((detection) => { const detectedFaces = faceDetections?.map((detection) => {
return { return {
@ -66,7 +65,7 @@ class FaceService {
// ?.filter((f) => // ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize // f.box.width > syncContext.config.faceDetection.minFaceSize
// ); // );
addLogLine("[MLService] Detected Faces: ", newMlFile.faces?.length); log.info("[MLService] Detected Faces: ", newMlFile.faces?.length);
} }
async syncFileFaceCrops( async syncFileFaceCrops(
@ -128,8 +127,8 @@ class FaceService {
face.detection, face.detection,
); );
} }
addLogLine("[MLService] alignedFaces: ", newMlFile.faces?.length); log.info("[MLService] alignedFaces: ", newMlFile.faces?.length);
// addLogLine('4 TF Memory stats: ',JSON.stringify(tf.memory())); // log.info('4 TF Memory stats: ',JSON.stringify(tf.memory()));
} }
async syncFileFaceEmbeddings( async syncFileFaceEmbeddings(
@ -168,8 +167,8 @@ class FaceService {
faceImages.forEach((faceImage) => faceImage.close()); faceImages.forEach((faceImage) => faceImage.close());
newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i])); newMlFile.faces.forEach((f, i) => (f.embedding = embeddings[i]));
addLogLine("[MLService] facesWithEmbeddings: ", newMlFile.faces.length); log.info("[MLService] facesWithEmbeddings: ", newMlFile.faces.length);
// addLogLine('5 TF Memory stats: ',JSON.stringify(tf.memory())); // log.info('5 TF Memory stats: ',JSON.stringify(tf.memory()));
} }
async saveFaceCrop( async saveFaceCrop(
@ -210,14 +209,14 @@ class FaceService {
const clusteringConfig = syncContext.config.faceClustering; const clusteringConfig = syncContext.config.faceClustering;
if (!allFaces || allFaces.length < clusteringConfig.minInputSize) { if (!allFaces || allFaces.length < clusteringConfig.minInputSize) {
addLogLine( log.info(
"[MLService] Too few faces to cluster, not running clustering: ", "[MLService] Too few faces to cluster, not running clustering: ",
allFaces.length, allFaces.length,
); );
return; return;
} }
addLogLine("Running clustering allFaces: ", allFaces.length); log.info("Running clustering allFaces: ", allFaces.length);
syncContext.mlLibraryData.faceClusteringResults = syncContext.mlLibraryData.faceClusteringResults =
await syncContext.faceClusteringService.cluster( await syncContext.faceClusteringService.cluster(
allFaces.map((f) => Array.from(f.embedding)), allFaces.map((f) => Array.from(f.embedding)),
@ -225,7 +224,7 @@ class FaceService {
); );
syncContext.mlLibraryData.faceClusteringMethod = syncContext.mlLibraryData.faceClusteringMethod =
syncContext.faceClusteringService.method; syncContext.faceClusteringService.method;
addLogLine( log.info(
"[MLService] Got face clustering results: ", "[MLService] Got face clustering results: ",
JSON.stringify(syncContext.mlLibraryData.faceClusteringResults), JSON.stringify(syncContext.mlLibraryData.faceClusteringResults),
); );

View file

@ -22,7 +22,7 @@ class HdbscanClusteringService implements ClusteringService {
input: ClusteringInput, input: ClusteringInput,
config: ClusteringConfig, config: ClusteringConfig,
): Promise<HdbscanResults> { ): Promise<HdbscanResults> {
// addLogLine('Clustering input: ', input); // log.info('Clustering input: ', input);
const hdbscan = new Hdbscan({ const hdbscan = new Hdbscan({
input, input,

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import * as tfjsConverter from "@tensorflow/tfjs-converter"; import * as tfjsConverter from "@tensorflow/tfjs-converter";
import * as tf from "@tensorflow/tfjs-core"; import * as tf from "@tensorflow/tfjs-core";
import { SCENE_DETECTION_IMAGE_SIZE } from "constants/mlConfig"; import { SCENE_DETECTION_IMAGE_SIZE } from "constants/mlConfig";
@ -26,7 +25,7 @@ class ImageScene implements SceneDetectionService {
} }
private async init() { private async init() {
addLogLine(`[${this.workerID}]`, "ImageScene init called"); log.info(`[${this.workerID}]`, "ImageScene init called");
if (this.model) { if (this.model) {
return; return;
} }
@ -38,7 +37,7 @@ class ImageScene implements SceneDetectionService {
this.model = await tfjsConverter.loadGraphModel( this.model = await tfjsConverter.loadGraphModel(
"/models/imagescene/model.json", "/models/imagescene/model.json",
); );
addLogLine( log.info(
`[${this.workerID}]`, `[${this.workerID}]`,
"loaded ImageScene model", "loaded ImageScene model",
tf.getBackend(), tf.getBackend(),
@ -52,10 +51,7 @@ class ImageScene implements SceneDetectionService {
} }
private async getImageSceneModel() { private async getImageSceneModel() {
addLogLine( log.info(`[${this.workerID}]`, "ImageScene getImageSceneModel called");
`[${this.workerID}]`,
"ImageScene getImageSceneModel called",
);
if (!this.ready) { if (!this.ready) {
this.ready = this.init(); this.ready = this.init();
} }

View file

@ -2,7 +2,6 @@ import { haveWindow } from "@/next/env";
import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { getDedicatedCryptoWorker } from "@ente/shared/crypto"; import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import PQueue from "p-queue"; import PQueue from "p-queue";
import { EnteFile } from "types/file"; import { EnteFile } from "types/file";
import { import {
@ -198,7 +197,7 @@ export class LocalMLSyncContext implements MLSyncContext {
this.concurrency = concurrency || getConcurrency(); this.concurrency = concurrency || getConcurrency();
addLogLine("Using concurrency: ", this.concurrency); log.info("Using concurrency: ", this.concurrency);
// timeout is added on downloads // timeout is added on downloads
// timeout on queue will keep the operation open till worker is terminated // timeout on queue will keep the operation open till worker is terminated
this.syncQueue = new PQueue({ concurrency: this.concurrency }); this.syncQueue = new PQueue({ concurrency: this.concurrency });

View file

@ -1,6 +1,6 @@
import log from "@/next/log";
import { APPS } from "@ente/shared/apps/constants"; import { APPS } from "@ente/shared/apps/constants";
import { CustomError, parseUploadErrorCodes } from "@ente/shared/error"; import { CustomError, parseUploadErrorCodes } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import "@tensorflow/tfjs-backend-cpu"; import "@tensorflow/tfjs-backend-cpu";
import "@tensorflow/tfjs-backend-webgl"; import "@tensorflow/tfjs-backend-webgl";
import * as tf from "@tensorflow/tfjs-core"; import * as tf from "@tensorflow/tfjs-core";
@ -78,10 +78,10 @@ class MachineLearningService {
tsne: syncContext.tsne, tsne: syncContext.tsne,
error: syncContext.error, error: syncContext.error,
}; };
// addLogLine('[MLService] sync results: ', mlSyncResult); // log.info('[MLService] sync results: ', mlSyncResult);
// await syncContext.dispose(); // await syncContext.dispose();
addLogLine("Final TF Memory stats: ", JSON.stringify(tf.memory())); log.info("Final TF Memory stats: ", JSON.stringify(tf.memory()));
return mlSyncResult; return mlSyncResult;
} }
@ -139,7 +139,7 @@ class MachineLearningService {
let updated = false; let updated = false;
if (newFileIds.length > 0) { if (newFileIds.length > 0) {
addLogLine("newFiles: ", newFileIds.length); log.info("newFiles: ", newFileIds.length);
const newFiles = newFileIds.map((fileId) => this.newMlData(fileId)); const newFiles = newFileIds.map((fileId) => this.newMlData(fileId));
await mlIDbStorage.putAllFiles(newFiles, tx); await mlIDbStorage.putAllFiles(newFiles, tx);
updated = true; updated = true;
@ -153,7 +153,7 @@ class MachineLearningService {
} }
if (removedFileIds.length > 0) { if (removedFileIds.length > 0) {
addLogLine("removedFiles: ", removedFileIds.length); log.info("removedFiles: ", removedFileIds.length);
await mlIDbStorage.removeAllFiles(removedFileIds, tx); await mlIDbStorage.removeAllFiles(removedFileIds, tx);
updated = true; updated = true;
} }
@ -165,7 +165,7 @@ class MachineLearningService {
await mlIDbStorage.incrementIndexVersion("files"); await mlIDbStorage.incrementIndexVersion("files");
} }
addLogLine("syncLocalFiles", Date.now() - startTime, "ms"); log.info("syncLocalFiles", Date.now() - startTime, "ms");
} }
private async getOutOfSyncFiles(syncContext: MLSyncContext) { private async getOutOfSyncFiles(syncContext: MLSyncContext) {
@ -176,13 +176,13 @@ class MachineLearningService {
MAX_ML_SYNC_ERROR_COUNT, MAX_ML_SYNC_ERROR_COUNT,
); );
addLogLine("fileIds: ", JSON.stringify(fileIds)); log.info("fileIds: ", JSON.stringify(fileIds));
const localFilesMap = await this.getLocalFilesMap(syncContext); const localFilesMap = await this.getLocalFilesMap(syncContext);
syncContext.outOfSyncFiles = fileIds.map((fileId) => syncContext.outOfSyncFiles = fileIds.map((fileId) =>
localFilesMap.get(fileId), localFilesMap.get(fileId),
); );
addLogLine("getOutOfSyncFiles", Date.now() - startTime, "ms"); log.info("getOutOfSyncFiles", Date.now() - startTime, "ms");
} }
private async syncFiles(syncContext: MLSyncContext) { private async syncFiles(syncContext: MLSyncContext) {
@ -205,7 +205,7 @@ class MachineLearningService {
syncContext.error = error; syncContext.error = error;
} }
await syncContext.syncQueue.onIdle(); await syncContext.syncQueue.onIdle();
addLogLine("allFaces: ", syncContext.nSyncedFaces); log.info("allFaces: ", syncContext.nSyncedFaces);
// TODO: In case syncJob has to use multiple ml workers // TODO: In case syncJob has to use multiple ml workers
// do in same transaction with each file update // do in same transaction with each file update
@ -216,32 +216,32 @@ class MachineLearningService {
private async getSyncContext(token: string, userID: number) { private async getSyncContext(token: string, userID: number) {
if (!this.syncContext) { if (!this.syncContext) {
addLogLine("Creating syncContext"); log.info("Creating syncContext");
this.syncContext = getMLSyncConfig().then((mlSyncConfig) => this.syncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, true), MLFactory.getMLSyncContext(token, userID, mlSyncConfig, true),
); );
} else { } else {
addLogLine("reusing existing syncContext"); log.info("reusing existing syncContext");
} }
return this.syncContext; return this.syncContext;
} }
private async getLocalSyncContext(token: string, userID: number) { private async getLocalSyncContext(token: string, userID: number) {
if (!this.localSyncContext) { if (!this.localSyncContext) {
addLogLine("Creating localSyncContext"); log.info("Creating localSyncContext");
this.localSyncContext = getMLSyncConfig().then((mlSyncConfig) => this.localSyncContext = getMLSyncConfig().then((mlSyncConfig) =>
MLFactory.getMLSyncContext(token, userID, mlSyncConfig, false), MLFactory.getMLSyncContext(token, userID, mlSyncConfig, false),
); );
} else { } else {
addLogLine("reusing existing localSyncContext"); log.info("reusing existing localSyncContext");
} }
return this.localSyncContext; return this.localSyncContext;
} }
public async closeLocalSyncContext() { public async closeLocalSyncContext() {
if (this.localSyncContext) { if (this.localSyncContext) {
addLogLine("Closing localSyncContext"); log.info("Closing localSyncContext");
const syncContext = await this.localSyncContext; const syncContext = await this.localSyncContext;
await syncContext.dispose(); await syncContext.dispose();
this.localSyncContext = undefined; this.localSyncContext = undefined;
@ -319,7 +319,7 @@ class MachineLearningService {
await this.persistMLFileSyncError(syncContext, enteFile, error); await this.persistMLFileSyncError(syncContext, enteFile, error);
syncContext.nSyncedFiles += 1; syncContext.nSyncedFiles += 1;
} finally { } finally {
addLogLine("TF Memory stats: ", JSON.stringify(tf.memory())); log.info("TF Memory stats: ", JSON.stringify(tf.memory()));
} }
} }
@ -367,7 +367,7 @@ class MachineLearningService {
} finally { } finally {
fileContext.tfImage && fileContext.tfImage.dispose(); fileContext.tfImage && fileContext.tfImage.dispose();
fileContext.imageBitmap && fileContext.imageBitmap.close(); fileContext.imageBitmap && fileContext.imageBitmap.close();
// addLogLine('8 TF Memory stats: ',JSON.stringify(tf.memory())); // log.info('8 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: enable once faceId changes go in // TODO: enable once faceId changes go in
// await removeOldFaceCrops( // await removeOldFaceCrops(
@ -386,7 +386,7 @@ class MachineLearningService {
await tf.ready(); await tf.ready();
addLogLine("01 TF Memory stats: ", JSON.stringify(tf.memory())); log.info("01 TF Memory stats: ", JSON.stringify(tf.memory()));
this.initialized = true; this.initialized = true;
} }
@ -463,7 +463,7 @@ class MachineLearningService {
await FaceService.syncFileFaceEmbeddings(syncContext, fileContext); await FaceService.syncFileFaceEmbeddings(syncContext, fileContext);
} }
addLogLine( log.info(
`face detection time taken ${fileContext.enteFile.id}`, `face detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime, Date.now() - startTime,
"ms", "ms",

View file

@ -1,7 +1,6 @@
import log from "@/next/log"; import log from "@/next/log";
import { ComlinkWorker } from "@/next/worker/comlink-worker"; import { ComlinkWorker } from "@/next/worker/comlink-worker";
import { eventBus, Events } from "@ente/shared/events"; import { eventBus, Events } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers"; import { getToken, getUserID } from "@ente/shared/storage/localStorage/helpers";
import { FILE_TYPE } from "constants/file"; import { FILE_TYPE } from "constants/file";
import debounce from "debounce"; import debounce from "debounce";
@ -51,7 +50,7 @@ class MLWorkManager {
public async setMlSearchEnabled(enabled: boolean) { public async setMlSearchEnabled(enabled: boolean) {
if (!this.mlSearchEnabled && enabled) { if (!this.mlSearchEnabled && enabled) {
addLogLine("Enabling MLWorkManager"); log.info("Enabling MLWorkManager");
this.mlSearchEnabled = true; this.mlSearchEnabled = true;
logQueueStats(this.liveSyncQueue, "livesync"); logQueueStats(this.liveSyncQueue, "livesync");
@ -70,7 +69,7 @@ class MLWorkManager {
await this.startSyncJob(); await this.startSyncJob();
} else if (this.mlSearchEnabled && !enabled) { } else if (this.mlSearchEnabled && !enabled) {
addLogLine("Disabling MLWorkManager"); log.info("Disabling MLWorkManager");
this.mlSearchEnabled = false; this.mlSearchEnabled = false;
this.liveSyncQueue.removeAllListeners(); this.liveSyncQueue.removeAllListeners();
@ -92,7 +91,7 @@ class MLWorkManager {
// Handlers // Handlers
private async appStartHandler() { private async appStartHandler() {
addLogLine("appStartHandler"); log.info("appStartHandler");
try { try {
this.startSyncJob(); this.startSyncJob();
} catch (e) { } catch (e) {
@ -101,7 +100,7 @@ class MLWorkManager {
} }
private async logoutHandler() { private async logoutHandler() {
addLogLine("logoutHandler"); log.info("logoutHandler");
try { try {
this.stopSyncJob(); this.stopSyncJob();
this.mlSyncJob = undefined; this.mlSyncJob = undefined;
@ -119,9 +118,9 @@ class MLWorkManager {
if (!this.mlSearchEnabled) { if (!this.mlSearchEnabled) {
return; return;
} }
addLogLine("fileUploadedHandler: ", arg.enteFile.id); log.info("fileUploadedHandler: ", arg.enteFile.id);
if (arg.enteFile.metadata.fileType !== FILE_TYPE.IMAGE) { if (arg.enteFile.metadata.fileType !== FILE_TYPE.IMAGE) {
addLogLine("Skipping non image file for local file processing"); log.info("Skipping non image file for local file processing");
return; return;
} }
try { try {
@ -134,7 +133,7 @@ class MLWorkManager {
} }
private async localFilesUpdatedHandler() { private async localFilesUpdatedHandler() {
addLogLine("Local files updated"); log.info("Local files updated");
this.startSyncJob(); this.startSyncJob();
} }
@ -165,7 +164,7 @@ class MLWorkManager {
} }
private async onLiveSyncIdle() { private async onLiveSyncIdle() {
addLogLine("Live sync idle"); log.info("Live sync idle");
await this.terminateLiveSyncWorker(); await this.terminateLiveSyncWorker();
this.mlSearchEnabled && this.startSyncJob(); this.mlSearchEnabled && this.startSyncJob();
} }
@ -206,7 +205,7 @@ class MLWorkManager {
// TODO: skipping is not required if we are caching chunks through service worker // TODO: skipping is not required if we are caching chunks through service worker
// currently worker chunk itself is not loaded when network is not there // currently worker chunk itself is not loaded when network is not there
if (!navigator.onLine) { if (!navigator.onLine) {
addLogLine( log.info(
"Skipping ml-sync job run as not connected to internet.", "Skipping ml-sync job run as not connected to internet.",
); );
return { return {
@ -227,7 +226,7 @@ class MLWorkManager {
!!mlSyncResult.error || mlSyncResult.nOutOfSyncFiles < 1, !!mlSyncResult.error || mlSyncResult.nOutOfSyncFiles < 1,
mlSyncResult, mlSyncResult,
}; };
addLogLine("ML Sync Job result: ", JSON.stringify(jobResult)); log.info("ML Sync Job result: ", JSON.stringify(jobResult));
// TODO: redirect/refresh to gallery in case of session_expired, stop ml sync job // TODO: redirect/refresh to gallery in case of session_expired, stop ml sync job
@ -239,13 +238,13 @@ class MLWorkManager {
public async startSyncJob() { public async startSyncJob() {
try { try {
addLogLine("MLWorkManager.startSyncJob"); log.info("MLWorkManager.startSyncJob");
if (!this.mlSearchEnabled) { if (!this.mlSearchEnabled) {
addLogLine("ML Search disabled, not starting ml sync job"); log.info("ML Search disabled, not starting ml sync job");
return; return;
} }
if (!getToken()) { if (!getToken()) {
addLogLine("User not logged in, not starting ml sync job"); log.info("User not logged in, not starting ml sync job");
return; return;
} }
const mlSyncJobConfig = await getMLSyncJobConfig(); const mlSyncJobConfig = await getMLSyncJobConfig();
@ -262,7 +261,7 @@ class MLWorkManager {
public stopSyncJob(terminateWorker: boolean = true) { public stopSyncJob(terminateWorker: boolean = true) {
try { try {
addLogLine("MLWorkManager.stopSyncJob"); log.info("MLWorkManager.stopSyncJob");
this.mlSyncJob?.stop(); this.mlSyncJob?.stop();
terminateWorker && this.terminateSyncJobWorker(); terminateWorker && this.terminateSyncJobWorker();
} catch (e) { } catch (e) {

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import * as tf from "@tensorflow/tfjs-core"; import * as tf from "@tensorflow/tfjs-core";
import { TFLiteModel } from "@tensorflow/tfjs-tflite"; import { TFLiteModel } from "@tensorflow/tfjs-tflite";
import { MOBILEFACENET_FACE_SIZE } from "constants/mlConfig"; import { MOBILEFACENET_FACE_SIZE } from "constants/mlConfig";
@ -37,7 +36,7 @@ class MobileFaceNetEmbeddingService implements FaceEmbeddingService {
"/models/mobilefacenet/mobilefacenet.tflite", "/models/mobilefacenet/mobilefacenet.tflite",
); );
addLogLine("loaded mobileFaceNetModel: ", tf.getBackend()); log.info("loaded mobileFaceNetModel: ", tf.getBackend());
} }
private async getMobileFaceNetModel() { private async getMobileFaceNetModel() {

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { import {
DetectedObject, DetectedObject,
MLSyncContext, MLSyncContext,
@ -61,7 +60,7 @@ class ObjectService {
syncContext.config.sceneDetection.minScore, syncContext.config.sceneDetection.minScore,
)), )),
); );
// addLogLine('3 TF Memory stats: ',JSON.stringify(tf.memory())); // log.info('3 TF Memory stats: ',JSON.stringify(tf.memory()));
// TODO: reenable faces filtering based on width // TODO: reenable faces filtering based on width
const detectedObjects = objectDetections?.map((detection) => { const detectedObjects = objectDetections?.map((detection) => {
return { return {
@ -77,13 +76,13 @@ class ObjectService {
// ?.filter((f) => // ?.filter((f) =>
// f.box.width > syncContext.config.faceDetection.minFaceSize // f.box.width > syncContext.config.faceDetection.minFaceSize
// ); // );
addLogLine( log.info(
`object detection time taken ${fileContext.enteFile.id}`, `object detection time taken ${fileContext.enteFile.id}`,
Date.now() - startTime, Date.now() - startTime,
"ms", "ms",
); );
addLogLine("[MLService] Detected Objects: ", newMlFile.objects?.length); log.info("[MLService] Detected Objects: ", newMlFile.objects?.length);
} }
async getAllSyncedObjectsMap(syncContext: MLSyncContext) { async getAllSyncedObjectsMap(syncContext: MLSyncContext) {
@ -115,9 +114,9 @@ class ObjectService {
async syncThingsIndex(syncContext: MLSyncContext) { async syncThingsIndex(syncContext: MLSyncContext) {
const filesVersion = await mlIDbStorage.getIndexVersion("files"); const filesVersion = await mlIDbStorage.getIndexVersion("files");
addLogLine("things", await mlIDbStorage.getIndexVersion("things")); log.info("things", await mlIDbStorage.getIndexVersion("things"));
if (filesVersion <= (await mlIDbStorage.getIndexVersion("things"))) { if (filesVersion <= (await mlIDbStorage.getIndexVersion("things"))) {
addLogLine( log.info(
"[MLService] Skipping people index as already synced to latest version", "[MLService] Skipping people index as already synced to latest version",
); );
return; return;

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { Face, MLSyncContext, Person } from "types/machineLearning"; import { Face, MLSyncContext, Person } from "types/machineLearning";
import { import {
findFirstIfSorted, findFirstIfSorted,
@ -20,7 +19,7 @@ class PeopleService {
syncContext.faceClusteringService.method, syncContext.faceClusteringService.method,
) )
) { ) {
addLogLine( log.info(
"[MLService] Skipping people index as already synced to latest version", "[MLService] Skipping people index as already synced to latest version",
); );
return; return;
@ -84,7 +83,7 @@ class PeopleService {
faces.forEach((face) => { faces.forEach((face) => {
face.personId = person.id; face.personId = person.id;
}); });
// addLogLine("Creating person: ", person, faces); // log.info("Creating person: ", person, faces);
} }
await mlIDbStorage.updateFaces(allFacesMap); await mlIDbStorage.updateFaces(allFacesMap);

View file

@ -16,7 +16,7 @@ class ReaderService {
if (fileContext.imageBitmap) { if (fileContext.imageBitmap) {
return fileContext.imageBitmap; return fileContext.imageBitmap;
} }
// addLogLine('1 TF Memory stats: ',JSON.stringify(tf.memory())); // log.info('1 TF Memory stats: ',JSON.stringify(tf.memory()));
if (fileContext.localFile) { if (fileContext.localFile) {
if ( if (
fileContext.enteFile.metadata.fileType !== FILE_TYPE.IMAGE fileContext.enteFile.metadata.fileType !== FILE_TYPE.IMAGE
@ -47,7 +47,7 @@ class ReaderService {
fileContext.newMlFile.imageSource = syncContext.config.imageSource; fileContext.newMlFile.imageSource = syncContext.config.imageSource;
const { width, height } = fileContext.imageBitmap; const { width, height } = fileContext.imageBitmap;
fileContext.newMlFile.imageDimensions = { width, height }; fileContext.newMlFile.imageDimensions = { width, height };
// addLogLine('2 TF Memory stats: ',JSON.stringify(tf.memory())); // log.info('2 TF Memory stats: ',JSON.stringify(tf.memory()));
return fileContext.imageBitmap; return fileContext.imageBitmap;
} catch (e) { } catch (e) {

View file

@ -6,7 +6,6 @@ import {
Versioned, Versioned,
} from "types/machineLearning"; } from "types/machineLearning";
import { addLogLine } from "@ente/shared/logging";
import * as SSDMobileNet from "@tensorflow-models/coco-ssd"; import * as SSDMobileNet from "@tensorflow-models/coco-ssd";
import { OBJECT_DETECTION_IMAGE_SIZE } from "constants/mlConfig"; import { OBJECT_DETECTION_IMAGE_SIZE } from "constants/mlConfig";
import { resizeToSquare } from "utils/image"; import { resizeToSquare } from "utils/image";
@ -28,7 +27,7 @@ class SSDMobileNetV2 implements ObjectDetectionService {
base: "mobilenet_v2", base: "mobilenet_v2",
modelUrl: "/models/ssdmobilenet/model.json", modelUrl: "/models/ssdmobilenet/model.json",
}); });
addLogLine("loaded ssdMobileNetV2Model", tf.getBackend()); log.info("loaded ssdMobileNetV2Model", tf.getBackend());
} }
private async getSSDMobileNetV2Model() { private async getSSDMobileNetV2Model() {

View file

@ -1,6 +1,5 @@
import log from "@/next/log"; import log from "@/next/log";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import * as chrono from "chrono-node"; import * as chrono from "chrono-node";
import { FILE_TYPE } from "constants/file"; import { FILE_TYPE } from "constants/file";
import { t } from "i18next"; import { t } from "i18next";
@ -382,7 +381,7 @@ async function searchLocationTag(searchPhrase: string): Promise<LocationTag[]> {
locationTag.data.name.toLowerCase().includes(searchPhrase), locationTag.data.name.toLowerCase().includes(searchPhrase),
); );
if (matchedLocationTags.length > 0) { if (matchedLocationTags.length > 0) {
addLogLine( log.info(
`Found ${matchedLocationTags.length} location tags for search phrase`, `Found ${matchedLocationTags.length} location tags for search phrase`,
); );
} }

View file

@ -1,7 +1,6 @@
import { getFileNameSize } from "@/next/file"; import { getFileNameSize } from "@/next/file";
import log from "@/next/log"; import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { addLogLine } from "@ente/shared/logging";
import { Remote } from "comlink"; import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from "constants/upload"; import { FILE_READER_CHUNK_SIZE, MULTIPART_PART_SIZE } from "constants/upload";
import { EncryptedMagicMetadata } from "types/magicMetadata"; import { EncryptedMagicMetadata } from "types/magicMetadata";
@ -46,7 +45,7 @@ export async function readFile(
rawFile, rawFile,
fileTypeInfo, fileTypeInfo,
); );
addLogLine(`reading file data ${getFileNameSize(rawFile)} `); log.info(`reading file data ${getFileNameSize(rawFile)} `);
let filedata: Uint8Array | DataStream; let filedata: Uint8Array | DataStream;
if (!(rawFile instanceof File)) { if (!(rawFile instanceof File)) {
if (rawFile.size > MULTIPART_PART_SIZE) { if (rawFile.size > MULTIPART_PART_SIZE) {
@ -63,7 +62,7 @@ export async function readFile(
filedata = await getUint8ArrayView(rawFile); filedata = await getUint8ArrayView(rawFile);
} }
addLogLine(`read file data successfully ${getFileNameSize(rawFile)} `); log.info(`read file data successfully ${getFileNameSize(rawFile)} `);
return { return {
filedata, filedata,

View file

@ -1,19 +1,18 @@
import { getFileNameSize } from "@/next/file"; import { getFileNameSize } from "@/next/file";
import log from "@/next/log";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { Remote } from "comlink"; import { Remote } from "comlink";
import { FILE_READER_CHUNK_SIZE } from "constants/upload"; import { FILE_READER_CHUNK_SIZE } from "constants/upload";
import { getElectronFileStream, getFileStream } from "services/readerService"; import { getElectronFileStream, getFileStream } from "services/readerService";
import { DataStream, ElectronFile } from "types/upload"; import { DataStream, ElectronFile } from "types/upload";
import log from "@/next/log";
export async function getFileHash( export async function getFileHash(
worker: Remote<DedicatedCryptoWorker>, worker: Remote<DedicatedCryptoWorker>,
file: File | ElectronFile, file: File | ElectronFile,
) { ) {
try { try {
addLogLine(`getFileHash called for ${getFileNameSize(file)}`); log.info(`getFileHash called for ${getFileNameSize(file)}`);
let filedata: DataStream; let filedata: DataStream;
if (file instanceof File) { if (file instanceof File) {
filedata = getFileStream(file, FILE_READER_CHUNK_SIZE); filedata = getFileStream(file, FILE_READER_CHUNK_SIZE);
@ -38,14 +37,12 @@ export async function getFileHash(
throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED); throw Error(CustomError.CHUNK_MORE_THAN_EXPECTED);
} }
const hash = await worker.completeChunkHashing(hashState); const hash = await worker.completeChunkHashing(hashState);
addLogLine( log.info(
`file hashing completed successfully ${getFileNameSize(file)}`, `file hashing completed successfully ${getFileNameSize(file)}`,
); );
return hash; return hash;
} catch (e) { } catch (e) {
log.error("getFileHash failed", e); log.error("getFileHash failed", e);
addLogLine( log.info(`file hashing failed ${getFileNameSize(file)} ,${e.message} `);
`file hashing failed ${getFileNameSize(file)} ,${e.message} `,
);
} }
} }

View file

@ -2,7 +2,6 @@ import ElectronAPIs from "@/next/electron";
import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file"; import { convertBytesToHumanReadable, getFileNameSize } from "@/next/file";
import log from "@/next/log"; import log from "@/next/log";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { FILE_TYPE } from "constants/file"; import { FILE_TYPE } from "constants/file";
import { BLACK_THUMBNAIL_BASE64 } from "constants/upload"; import { BLACK_THUMBNAIL_BASE64 } from "constants/upload";
import isElectron from "is-electron"; import isElectron from "is-electron";
@ -104,7 +103,7 @@ const generateImageThumbnailInElectron = async (
maxDimension, maxDimension,
maxSize, maxSize,
); );
addLogLine( log.info(
`originalFileSize:${convertBytesToHumanReadable( `originalFileSize:${convertBytesToHumanReadable(
inputFile?.size, inputFile?.size,
)},thumbFileSize:${convertBytesToHumanReadable( )},thumbFileSize:${convertBytesToHumanReadable(
@ -136,12 +135,12 @@ export async function generateImageThumbnailUsingCanvas(
let timeout = null; let timeout = null;
const isHEIC = isFileHEIC(fileTypeInfo.exactType); const isHEIC = isFileHEIC(fileTypeInfo.exactType);
if (isHEIC) { if (isHEIC) {
addLogLine(`HEICConverter called for ${getFileNameSize(file)}`); log.info(`HEICConverter called for ${getFileNameSize(file)}`);
const convertedBlob = await HeicConversionService.convert( const convertedBlob = await HeicConversionService.convert(
new Blob([await file.arrayBuffer()]), new Blob([await file.arrayBuffer()]),
); );
file = new File([convertedBlob], file.name); file = new File([convertedBlob], file.name);
addLogLine(`${getFileNameSize(file)} successfully converted`); log.info(`${getFileNameSize(file)} successfully converted`);
} }
let image = new Image(); let image = new Image();
imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()])); imageURL = URL.createObjectURL(new Blob([await file.arrayBuffer()]));
@ -192,17 +191,17 @@ async function generateVideoThumbnail(
) { ) {
let thumbnail: Uint8Array; let thumbnail: Uint8Array;
try { try {
addLogLine( log.info(
`ffmpeg generateThumbnail called for ${getFileNameSize(file)}`, `ffmpeg generateThumbnail called for ${getFileNameSize(file)}`,
); );
const thumbnail = await FFmpegService.generateVideoThumbnail(file); const thumbnail = await FFmpegService.generateVideoThumbnail(file);
addLogLine( log.info(
`ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`, `ffmpeg thumbnail successfully generated ${getFileNameSize(file)}`,
); );
return await getUint8ArrayView(thumbnail); return await getUint8ArrayView(thumbnail);
} catch (e) { } catch (e) {
addLogLine( log.info(
`ffmpeg thumbnail generated failed ${getFileNameSize( `ffmpeg thumbnail generated failed ${getFileNameSize(
file, file,
)} error: ${e.message}`, )} error: ${e.message}`,

View file

@ -5,7 +5,6 @@ import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker"; import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { Events, eventBus } from "@ente/shared/events"; import { Events, eventBus } from "@ente/shared/events";
import { addLogLine } from "@ente/shared/logging";
import { Remote } from "comlink"; import { Remote } from "comlink";
import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload"; import { UPLOAD_RESULT, UPLOAD_STAGES } from "constants/upload";
import isElectron from "is-electron"; import isElectron from "is-electron";
@ -124,7 +123,7 @@ class UploadManager {
this.uploadInProgress = true; this.uploadInProgress = true;
await this.updateExistingFilesAndCollections(collections); await this.updateExistingFilesAndCollections(collections);
this.uploaderName = uploaderName; this.uploaderName = uploaderName;
addLogLine( log.info(
`received ${filesWithCollectionToUploadIn.length} files to upload`, `received ${filesWithCollectionToUploadIn.length} files to upload`,
); );
uiService.setFilenames( uiService.setFilenames(
@ -137,8 +136,8 @@ class UploadManager {
); );
const { metadataJSONFiles, mediaFiles } = const { metadataJSONFiles, mediaFiles } =
segregateMetadataAndMediaFiles(filesWithCollectionToUploadIn); segregateMetadataAndMediaFiles(filesWithCollectionToUploadIn);
addLogLine(`has ${metadataJSONFiles.length} metadata json files`); log.info(`has ${metadataJSONFiles.length} metadata json files`);
addLogLine(`has ${mediaFiles.length} media files`); log.info(`has ${mediaFiles.length} media files`);
if (metadataJSONFiles.length) { if (metadataJSONFiles.length) {
UIService.setUploadStage( UIService.setUploadStage(
UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES, UPLOAD_STAGES.READING_GOOGLE_METADATA_FILES,
@ -150,11 +149,11 @@ class UploadManager {
); );
} }
if (mediaFiles.length) { if (mediaFiles.length) {
addLogLine(`clusterLivePhotoFiles started`); log.info(`clusterLivePhotoFiles started`);
const analysedMediaFiles = const analysedMediaFiles =
await UploadService.clusterLivePhotoFiles(mediaFiles); await UploadService.clusterLivePhotoFiles(mediaFiles);
addLogLine(`clusterLivePhotoFiles ended`); log.info(`clusterLivePhotoFiles ended`);
addLogLine( log.info(
`got live photos: ${ `got live photos: ${
mediaFiles.length !== analysedMediaFiles.length mediaFiles.length !== analysedMediaFiles.length
}`, }`,
@ -205,7 +204,7 @@ class UploadManager {
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) { private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
try { try {
addLogLine(`parseMetadataJSONFiles function executed `); log.info(`parseMetadataJSONFiles function executed `);
UIService.reset(metadataFiles.length); UIService.reset(metadataFiles.length);
@ -214,7 +213,7 @@ class UploadManager {
if (uploadCancelService.isUploadCancelationRequested()) { if (uploadCancelService.isUploadCancelationRequested()) {
throw Error(CustomError.UPLOAD_CANCELLED); throw Error(CustomError.UPLOAD_CANCELLED);
} }
addLogLine( log.info(
`parsing metadata json file ${getFileNameSize(file)}`, `parsing metadata json file ${getFileNameSize(file)}`,
); );
@ -229,7 +228,7 @@ class UploadManager {
); );
UIService.increaseFileUploaded(); UIService.increaseFileUploaded();
} }
addLogLine( log.info(
`successfully parsed metadata json file ${getFileNameSize( `successfully parsed metadata json file ${getFileNameSize(
file, file,
)}`, )}`,
@ -240,7 +239,7 @@ class UploadManager {
} else { } else {
// and don't break for subsequent files just log and move on // and don't break for subsequent files just log and move on
log.error("parsing failed for a file", e); log.error("parsing failed for a file", e);
addLogLine( log.info(
`failed to parse metadata json file ${getFileNameSize( `failed to parse metadata json file ${getFileNameSize(
file, file,
)} error: ${e.message}`, )} error: ${e.message}`,
@ -257,7 +256,7 @@ class UploadManager {
} }
private async uploadMediaFiles(mediaFiles: FileWithCollection[]) { private async uploadMediaFiles(mediaFiles: FileWithCollection[]) {
addLogLine(`uploadMediaFiles called`); log.info(`uploadMediaFiles called`);
this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles]; this.filesToBeUploaded = [...this.filesToBeUploaded, ...mediaFiles];
if (isElectron()) { if (isElectron()) {
@ -321,7 +320,7 @@ class UploadManager {
) { ) {
try { try {
let decryptedFile: EnteFile; let decryptedFile: EnteFile;
addLogLine( log.info(
`post upload action -> fileUploadResult: ${fileUploadResult} uploadedFile present ${!!uploadedFile}`, `post upload action -> fileUploadResult: ${fileUploadResult} uploadedFile present ${!!uploadedFile}`,
); );
await this.updateElectronRemainingFiles(fileWithCollection); await this.updateElectronRemainingFiles(fileWithCollection);
@ -397,7 +396,7 @@ class UploadManager {
} }
public cancelRunningUpload() { public cancelRunningUpload() {
addLogLine("user cancelled running upload"); log.info("user cancelled running upload");
UIService.setUploadStage(UPLOAD_STAGES.CANCELLING); UIService.setUploadStage(UPLOAD_STAGES.CANCELLING);
uploadCancelService.requestUploadCancelation(); uploadCancelService.requestUploadCancelation();
} }

View file

@ -1,6 +1,5 @@
import { getFileNameSize } from "@/next/file"; import { getFileNameSize } from "@/next/file";
import log from "@/next/log"; import log from "@/next/log";
import { addLogLine } from "@ente/shared/logging";
import { NULL_EXTRACTED_METADATA } from "constants/upload"; import { NULL_EXTRACTED_METADATA } from "constants/upload";
import * as ffmpegService from "services/ffmpeg/ffmpegService"; import * as ffmpegService from "services/ffmpeg/ffmpegService";
import { ElectronFile } from "types/upload"; import { ElectronFile } from "types/upload";
@ -8,14 +7,14 @@ import { ElectronFile } from "types/upload";
export async function getVideoMetadata(file: File | ElectronFile) { export async function getVideoMetadata(file: File | ElectronFile) {
let videoMetadata = NULL_EXTRACTED_METADATA; let videoMetadata = NULL_EXTRACTED_METADATA;
try { try {
addLogLine(`getVideoMetadata called for ${getFileNameSize(file)}`); log.info(`getVideoMetadata called for ${getFileNameSize(file)}`);
videoMetadata = await ffmpegService.extractVideoMetadata(file); videoMetadata = await ffmpegService.extractVideoMetadata(file);
addLogLine( log.info(
`videoMetadata successfully extracted ${getFileNameSize(file)}`, `videoMetadata successfully extracted ${getFileNameSize(file)}`,
); );
} catch (e) { } catch (e) {
log.error("failed to get video metadata", e); log.error("failed to get video metadata", e);
addLogLine( log.info(
`videoMetadata extracted failed ${getFileNameSize(file)} ,${ `videoMetadata extracted failed ${getFileNameSize(file)} ,${
e.message e.message
} `, } `,

View file

@ -1,10 +1,9 @@
import { addLogLine } from "@ente/shared/logging"; import log from "@/next/log";
import { promiseWithTimeout } from "@ente/shared/utils"; import { promiseWithTimeout } from "@ente/shared/utils";
import QueueProcessor from "@ente/shared/utils/queueProcessor"; import QueueProcessor from "@ente/shared/utils/queueProcessor";
import { generateTempName } from "@ente/shared/utils/temp"; import { generateTempName } from "@ente/shared/utils/temp";
import { createFFmpeg, FFmpeg } from "ffmpeg-wasm"; import { createFFmpeg, FFmpeg } from "ffmpeg-wasm";
import { getUint8ArrayView } from "services/readerService"; import { getUint8ArrayView } from "services/readerService";
import log from "@/next/log";
const INPUT_PATH_PLACEHOLDER = "INPUT"; const INPUT_PATH_PLACEHOLDER = "INPUT";
const FFMPEG_PLACEHOLDER = "FFMPEG"; const FFMPEG_PLACEHOLDER = "FFMPEG";
@ -86,7 +85,7 @@ export class WasmFFmpeg {
return cmdPart; return cmdPart;
} }
}); });
addLogLine(`${cmd}`); log.info(`${cmd}`);
await this.ffmpeg.run(...cmd); await this.ffmpeg.run(...cmd);
return new File( return new File(
[this.ffmpeg.FS("readFile", tempOutputFilePath)], [this.ffmpeg.FS("readFile", tempOutputFilePath)],

View file

@ -1,5 +1,4 @@
import log from "@/next/log"; import log from "@/next/log";
import { addLogLine } from "@ente/shared/logging";
import { ElectronFile } from "types/upload"; import { ElectronFile } from "types/upload";
import { EventQueueItem } from "types/watchFolder"; import { EventQueueItem } from "types/watchFolder";
import watchFolderService from "./watchFolderService"; import watchFolderService from "./watchFolderService";
@ -22,7 +21,7 @@ export async function diskFileAddedCallback(file: ElectronFile) {
files: [file], files: [file],
}; };
watchFolderService.pushEvent(event); watchFolderService.pushEvent(event);
addLogLine( log.info(
`added (upload) to event queue, collectionName:${event.collectionName} folderPath:${event.folderPath}, filesCount: ${event.files.length}`, `added (upload) to event queue, collectionName:${event.collectionName} folderPath:${event.folderPath}, filesCount: ${event.files.length}`,
); );
} catch (e) { } catch (e) {
@ -48,7 +47,7 @@ export async function diskFileRemovedCallback(filePath: string) {
paths: [filePath], paths: [filePath],
}; };
watchFolderService.pushEvent(event); watchFolderService.pushEvent(event);
addLogLine( log.info(
`added (trash) to event queue collectionName:${event.collectionName} folderPath:${event.folderPath} , pathsCount: ${event.paths.length}`, `added (trash) to event queue collectionName:${event.collectionName} folderPath:${event.folderPath} , pathsCount: ${event.paths.length}`,
); );
} catch (e) { } catch (e) {
@ -63,11 +62,11 @@ export async function diskFolderRemovedCallback(folderPath: string) {
(mapping) => mapping.folderPath === folderPath, (mapping) => mapping.folderPath === folderPath,
); );
if (!mapping) { if (!mapping) {
addLogLine(`folder not found in mappings, ${folderPath}`); log.info(`folder not found in mappings, ${folderPath}`);
throw Error(`Watch mapping not found`); throw Error(`Watch mapping not found`);
} }
watchFolderService.pushTrashedDir(folderPath); watchFolderService.pushTrashedDir(folderPath);
addLogLine(`added trashedDir, ${folderPath}`); log.info(`added trashedDir, ${folderPath}`);
} catch (e) { } catch (e) {
log.error("error while calling diskFolderRemovedCallback", e); log.error("error while calling diskFolderRemovedCallback", e);
} }

View file

@ -1,7 +1,6 @@
import ElectronAPIs from "@/next/electron"; import ElectronAPIs from "@/next/electron";
import log from "@/next/log"; import log from "@/next/log";
import { CustomError } from "@ente/shared/error"; import { CustomError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
import { getAlbumsURL } from "@ente/shared/network/api"; import { getAlbumsURL } from "@ente/shared/network/api";
import { LS_KEYS, getData } from "@ente/shared/storage/localStorage"; import { LS_KEYS, getData } from "@ente/shared/storage/localStorage";
import { getUnixTimeInMicroSecondsWithDelta } from "@ente/shared/time"; import { getUnixTimeInMicroSecondsWithDelta } from "@ente/shared/time";
@ -568,13 +567,13 @@ export const getOrCreateAlbum = async (
} }
for (const collection of existingCollections) { for (const collection of existingCollections) {
if (isValidReplacementAlbum(collection, user, albumName)) { if (isValidReplacementAlbum(collection, user, albumName)) {
addLogLine( log.info(
`Found existing album ${albumName} with id ${collection.id}`, `Found existing album ${albumName} with id ${collection.id}`,
); );
return collection; return collection;
} }
} }
const album = await createAlbum(albumName); const album = await createAlbum(albumName);
addLogLine(`Created new album ${albumName} with id ${album.id}`); log.info(`Created new album ${albumName} with id ${album.id}`);
return album; return album;
}; };

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { JobConfig, JobResult, JobState } from "types/common/job"; import { JobConfig, JobResult, JobState } from "types/common/job";
export class SimpleJob<R extends JobResult> { export class SimpleJob<R extends JobResult> {
@ -27,7 +26,7 @@ export class SimpleJob<R extends JobResult> {
if (this.state !== "Running") { if (this.state !== "Running") {
this.scheduleNext(); this.scheduleNext();
} else { } else {
addLogLine("Job already running, not scheduling"); log.info("Job already running, not scheduling");
} }
} }
@ -41,7 +40,7 @@ export class SimpleJob<R extends JobResult> {
this.intervalSec * 1000, this.intervalSec * 1000,
); );
this.state = "Scheduled"; this.state = "Scheduled";
addLogLine("Scheduled next job after: ", this.intervalSec); log.info("Scheduled next job after: ", this.intervalSec);
} }
async run() { async run() {
@ -58,7 +57,7 @@ export class SimpleJob<R extends JobResult> {
} else { } else {
this.resetInterval(); this.resetInterval();
} }
addLogLine("Job completed"); log.info("Job completed");
} catch (e) { } catch (e) {
console.error("Error while running Job: ", e); console.error("Error while running Job: ", e);
} finally { } finally {
@ -77,6 +76,6 @@ export class SimpleJob<R extends JobResult> {
clearTimeout(this.nextTimeoutId); clearTimeout(this.nextTimeoutId);
this.nextTimeoutId = undefined; this.nextTimeoutId = undefined;
this.state = "NotScheduled"; this.state = "NotScheduled";
addLogLine("Cleared next job"); log.info("Cleared next job");
} }
} }

View file

@ -80,7 +80,7 @@ export function cropWithRotation(
} }
} }
// addLogLine({ imageBitmap, box, outputSize }); // log.info({ imageBitmap, box, outputSize });
const offscreen = new OffscreenCanvas(outputSize.width, outputSize.height); const offscreen = new OffscreenCanvas(outputSize.width, outputSize.height);
const offscreenCtx = offscreen.getContext("2d"); const offscreenCtx = offscreen.getContext("2d");

View file

@ -61,7 +61,7 @@ export function getFaceAlignmentUsingSimilarityTransform(
simTransform.rotation.get(0, 1), simTransform.rotation.get(0, 1),
simTransform.rotation.get(0, 0), simTransform.rotation.get(0, 0),
); );
// addLogLine({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size }); // log.info({ affineMatrix, meanTranslation, centerMat, center, toMean: simTransform.toMean, fromMean: simTransform.fromMean, size });
return { return {
affineMatrix, affineMatrix,
@ -169,7 +169,7 @@ export function ibExtractFaceImageUsingTransform(
const scaledMatrix = new Matrix(alignment.affineMatrix) const scaledMatrix = new Matrix(alignment.affineMatrix)
.mul(faceSize) .mul(faceSize)
.to2DArray(); .to2DArray();
// addLogLine("scaledMatrix: ", scaledMatrix); // log.info("scaledMatrix: ", scaledMatrix);
return transform(image, scaledMatrix, faceSize, faceSize); return transform(image, scaledMatrix, faceSize, faceSize);
} }
@ -230,7 +230,7 @@ export function getRotatedFaceImage(
padding: number = 1.5, padding: number = 1.5,
): tf.Tensor4D { ): tf.Tensor4D {
const paddedBox = enlargeBox(faceDetection.box, padding); const paddedBox = enlargeBox(faceDetection.box, padding);
// addLogLine("paddedBox", paddedBox); // log.info("paddedBox", paddedBox);
const landmarkPoints = faceDetection.landmarks; const landmarkPoints = faceDetection.landmarks;
return tf.tidy(() => { return tf.tidy(() => {
@ -245,15 +245,15 @@ export function getRotatedFaceImage(
foreheadCenter, foreheadCenter,
); // landmarkPoints[BLAZEFACE_NOSE_INDEX] ); // landmarkPoints[BLAZEFACE_NOSE_INDEX]
// angle = computeRotation(leftEye, rightEye); // angle = computeRotation(leftEye, rightEye);
// addLogLine('angle: ', angle); // log.info('angle: ', angle);
const faceCenter = getBoxCenter(faceDetection.box); const faceCenter = getBoxCenter(faceDetection.box);
// addLogLine('faceCenter: ', faceCenter); // log.info('faceCenter: ', faceCenter);
const faceCenterNormalized: [number, number] = [ const faceCenterNormalized: [number, number] = [
faceCenter.x / tf4dFloat32Image.shape[2], faceCenter.x / tf4dFloat32Image.shape[2],
faceCenter.y / tf4dFloat32Image.shape[1], faceCenter.y / tf4dFloat32Image.shape[1],
]; ];
// addLogLine('faceCenterNormalized: ', faceCenterNormalized); // log.info('faceCenterNormalized: ', faceCenterNormalized);
let rotatedImage = tf4dFloat32Image; let rotatedImage = tf4dFloat32Image;
if (angle !== 0) { if (angle !== 0) {

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { CacheStorageService } from "@ente/shared/storage/cacheStorage"; import { CacheStorageService } from "@ente/shared/storage/cacheStorage";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants"; import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { getBlobFromCache } from "@ente/shared/storage/cacheStorage/helpers"; import { getBlobFromCache } from "@ente/shared/storage/cacheStorage/helpers";
@ -105,7 +104,7 @@ export async function removeOldFaceCrops(
} }
export async function removeFaceCropUrls(faceCropUrls: Array<string>) { export async function removeFaceCropUrls(faceCropUrls: Array<string>) {
addLogLine("Removing face crop urls: ", JSON.stringify(faceCropUrls)); log.info("Removing face crop urls: ", JSON.stringify(faceCropUrls));
const faceCropCache = await CacheStorageService.open(CACHES.FACE_CROPS); const faceCropCache = await CacheStorageService.open(CACHES.FACE_CROPS);
const urlRemovalPromises = faceCropUrls?.map((url) => const urlRemovalPromises = faceCropUrls?.map((url) =>
faceCropCache.delete(url), faceCropCache.delete(url),
@ -132,7 +131,7 @@ export function extractFaceImageFromCrop(
.shift(-imageBox.x, -imageBox.y) .shift(-imageBox.x, -imageBox.y)
.rescale(scale) .rescale(scale)
.round(); .round();
// addLogLine({ box, imageBox, faceCropImage, scale, scaledBox, scaledImageBox, shiftedBox }); // log.info({ box, imageBox, faceCropImage, scale, scaledBox, scaledImageBox, shiftedBox });
const faceSizeDimentions: Dimensions = { const faceSizeDimentions: Dimensions = {
width: faceSize, width: faceSize,

View file

@ -1,4 +1,3 @@
import { addLogLine } from "@ente/shared/logging";
import { CACHES } from "@ente/shared/storage/cacheStorage/constants"; import { CACHES } from "@ente/shared/storage/cacheStorage/constants";
import { cached } from "@ente/shared/storage/cacheStorage/helpers"; import { cached } from "@ente/shared/storage/cacheStorage/helpers";
import * as tf from "@tensorflow/tfjs-core"; import * as tf from "@tensorflow/tfjs-core";
@ -130,7 +129,7 @@ export function extractFaces(
]; ];
}); });
// addLogLine('boxes: ', boxes[0]); // log.info('boxes: ', boxes[0]);
const faceImagesTensor = tf.image.cropAndResize( const faceImagesTensor = tf.image.cropAndResize(
reshapedImage, reshapedImage,
@ -356,14 +355,14 @@ export async function getOriginalImageBitmap(
} else { } else {
fileBlob = await getOriginalConvertedFile(file, queue); fileBlob = await getOriginalConvertedFile(file, queue);
} }
addLogLine("[MLService] Got file: ", file.id.toString()); log.info("[MLService] Got file: ", file.id.toString());
return getImageBlobBitmap(fileBlob); return getImageBlobBitmap(fileBlob);
} }
export async function getThumbnailImageBitmap(file: EnteFile) { export async function getThumbnailImageBitmap(file: EnteFile) {
const thumb = await DownloadManager.getThumbnail(file); const thumb = await DownloadManager.getThumbnail(file);
addLogLine("[MLService] Got thumbnail: ", file.id.toString()); log.info("[MLService] Got thumbnail: ", file.id.toString());
return getImageBlobBitmap(new Blob([thumb])); return getImageBlobBitmap(new Blob([thumb]));
} }
@ -380,7 +379,7 @@ export async function getLocalFileImageBitmap(
export async function getPeopleList(file: EnteFile): Promise<Array<Person>> { export async function getPeopleList(file: EnteFile): Promise<Array<Person>> {
let startTime = Date.now(); let startTime = Date.now();
const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id); const mlFileData: MlFileData = await mlIDbStorage.getFile(file.id);
addLogLine( log.info(
"getPeopleList:mlFilesStore:getItem", "getPeopleList:mlFilesStore:getItem",
Date.now() - startTime, Date.now() - startTime,
"ms", "ms",
@ -395,18 +394,18 @@ export async function getPeopleList(file: EnteFile): Promise<Array<Person>> {
if (!peopleIds || peopleIds.length < 1) { if (!peopleIds || peopleIds.length < 1) {
return []; return [];
} }
// addLogLine("peopleIds: ", peopleIds); // log.info("peopleIds: ", peopleIds);
startTime = Date.now(); startTime = Date.now();
const peoplePromises = peopleIds.map( const peoplePromises = peopleIds.map(
(p) => mlIDbStorage.getPerson(p) as Promise<Person>, (p) => mlIDbStorage.getPerson(p) as Promise<Person>,
); );
const peopleList = await Promise.all(peoplePromises); const peopleList = await Promise.all(peoplePromises);
addLogLine( log.info(
"getPeopleList:mlPeopleStore:getItems", "getPeopleList:mlPeopleStore:getItems",
Date.now() - startTime, Date.now() - startTime,
"ms", "ms",
); );
// addLogLine("peopleList: ", peopleList); // log.info("peopleList: ", peopleList);
return peopleList; return peopleList;
} }
@ -514,7 +513,7 @@ export function getNearestPointIndex(
(a, b) => Math.abs(a.distance) - Math.abs(b.distance), (a, b) => Math.abs(a.distance) - Math.abs(b.distance),
); );
// addLogLine('Nearest dist: ', nearest.distance, maxDistance); // log.info('Nearest dist: ', nearest.distance, maxDistance);
if (!maxDistance || nearest.distance <= maxDistance) { if (!maxDistance || nearest.distance <= maxDistance) {
return nearest.index; return nearest.index;
} }
@ -522,11 +521,11 @@ export function getNearestPointIndex(
export function logQueueStats(queue: PQueue, name: string) { export function logQueueStats(queue: PQueue, name: string) {
queue.on("active", () => queue.on("active", () =>
addLogLine( log.info(
`queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`, `queuestats: ${name}: Active, Size: ${queue.size} Pending: ${queue.pending}`,
), ),
); );
queue.on("idle", () => addLogLine(`queuestats: ${name}: Idle`)); queue.on("idle", () => log.info(`queuestats: ${name}: Idle`));
queue.on("error", (error) => queue.on("error", (error) =>
console.error(`queuestats: ${name}: Error, `, error), console.error(`queuestats: ${name}: Error, `, error),
); );

View file

@ -1,6 +1,5 @@
import { haveWindow } from "@/next/env"; import { haveWindow } from "@/next/env";
import log from "@/next/log"; import log from "@/next/log";
import { addLogLine } from "@ente/shared/logging";
import { import {
DEFAULT_ML_SEARCH_CONFIG, DEFAULT_ML_SEARCH_CONFIG,
DEFAULT_ML_SYNC_CONFIG, DEFAULT_ML_SYNC_CONFIG,
@ -129,7 +128,7 @@ class MLIDbStorage {
.objectStore("configs") .objectStore("configs")
.add(DEFAULT_ML_SEARCH_CONFIG, ML_SEARCH_CONFIG_NAME); .add(DEFAULT_ML_SEARCH_CONFIG, ML_SEARCH_CONFIG_NAME);
} }
addLogLine( log.info(
`Ml DB upgraded to version: ${newVersion} from version: ${oldVersion}`, `Ml DB upgraded to version: ${newVersion} from version: ${oldVersion}`,
); );
}, },
@ -139,7 +138,7 @@ class MLIDbStorage {
public get db(): Promise<IDBPDatabase<MLDb>> { public get db(): Promise<IDBPDatabase<MLDb>> {
if (!this._db) { if (!this._db) {
this._db = this.openDB(); this._db = this.openDB();
addLogLine("Opening Ml DB"); log.info("Opening Ml DB");
} }
return this._db; return this._db;
@ -149,7 +148,7 @@ class MLIDbStorage {
const db = await this.db; const db = await this.db;
db.close(); db.close();
await deleteDB(MLDATA_DB_NAME); await deleteDB(MLDATA_DB_NAME);
addLogLine("Cleared Ml DB"); log.info("Cleared Ml DB");
this._db = undefined; this._db = undefined;
await this.db; await this.db;
} }
@ -278,7 +277,7 @@ class MLIDbStorage {
mlFileData.faces && mlFileData.faces &&
allFacesMap.set(mlFileData.fileId, mlFileData.faces), allFacesMap.set(mlFileData.fileId, mlFileData.faces),
); );
addLogLine("getAllFacesMap", Date.now() - startTime, "ms"); log.info("getAllFacesMap", Date.now() - startTime, "ms");
return allFacesMap; return allFacesMap;
} }
@ -297,7 +296,7 @@ class MLIDbStorage {
cursor = await cursor.continue(); cursor = await cursor.continue();
} }
await tx.done; await tx.done;
addLogLine("updateFaces", Date.now() - startTime, "ms"); log.info("updateFaces", Date.now() - startTime, "ms");
} }
public async getAllObjectsMap() { public async getAllObjectsMap() {
@ -310,7 +309,7 @@ class MLIDbStorage {
mlFileData.objects && mlFileData.objects &&
allObjectsMap.set(mlFileData.fileId, mlFileData.objects), allObjectsMap.set(mlFileData.fileId, mlFileData.objects),
); );
addLogLine("allObjectsMap", Date.now() - startTime, "ms"); log.info("allObjectsMap", Date.now() - startTime, "ms");
return allObjectsMap; return allObjectsMap;
} }

View file

@ -1,4 +1,4 @@
import { addLogLine } from "@ente/shared/logging"; import log from "@/next/log";
import { expose } from "comlink"; import { expose } from "comlink";
import mlService from "services/machineLearning/machineLearningService"; import mlService from "services/machineLearning/machineLearningService";
import { EnteFile } from "types/file"; import { EnteFile } from "types/file";
@ -6,7 +6,7 @@ import { MachineLearningWorker } from "types/machineLearning";
export class DedicatedMLWorker implements MachineLearningWorker { export class DedicatedMLWorker implements MachineLearningWorker {
constructor() { constructor() {
addLogLine("DedicatedMLWorker constructor called"); log.info("DedicatedMLWorker constructor called");
} }
public async closeLocalSyncContext() { public async closeLocalSyncContext() {

View file

@ -1,24 +1,21 @@
import { sendOtt } from "@ente/accounts/api/user";
import { isWeakPassword } from "@ente/accounts/utils";
import { generateKeyAndSRPAttributes } from "@ente/accounts/utils/srp";
import SubmitButton from "@ente/shared/components/SubmitButton";
import {
generateAndSaveIntermediateKeyAttributes,
saveKeyInSessionStore,
} from "@ente/shared/crypto/helpers";
import { LS_KEYS, setData } from "@ente/shared/storage/localStorage";
import { Formik, FormikHelpers } from "formik";
import React, { useState } from "react";
import * as Yup from "yup";
import log from "@/next/log"; import log from "@/next/log";
import { sendOtt } from "@ente/accounts/api/user";
import { PasswordStrengthHint } from "@ente/accounts/components/PasswordStrength"; import { PasswordStrengthHint } from "@ente/accounts/components/PasswordStrength";
import { PAGES } from "@ente/accounts/constants/pages"; import { PAGES } from "@ente/accounts/constants/pages";
import { isWeakPassword } from "@ente/accounts/utils";
import { generateKeyAndSRPAttributes } from "@ente/accounts/utils/srp";
import { APPS } from "@ente/shared/apps/constants"; import { APPS } from "@ente/shared/apps/constants";
import { VerticallyCentered } from "@ente/shared/components//Container"; import { VerticallyCentered } from "@ente/shared/components//Container";
import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer"; import FormPaperFooter from "@ente/shared/components/Form/FormPaper/Footer";
import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title"; import FormPaperTitle from "@ente/shared/components/Form/FormPaper/Title";
import ShowHidePassword from "@ente/shared/components/Form/ShowHidePassword"; import ShowHidePassword from "@ente/shared/components/Form/ShowHidePassword";
import LinkButton from "@ente/shared/components/LinkButton"; import LinkButton from "@ente/shared/components/LinkButton";
import SubmitButton from "@ente/shared/components/SubmitButton";
import {
generateAndSaveIntermediateKeyAttributes,
saveKeyInSessionStore,
} from "@ente/shared/crypto/helpers";
import { LS_KEYS, setData } from "@ente/shared/storage/localStorage";
import { import {
setJustSignedUp, setJustSignedUp,
setLocalReferralSource, setLocalReferralSource,
@ -37,9 +34,12 @@ import {
Tooltip, Tooltip,
Typography, Typography,
} from "@mui/material"; } from "@mui/material";
import { Formik, FormikHelpers } from "formik";
import { t } from "i18next"; import { t } from "i18next";
import { NextRouter } from "next/router"; import { NextRouter } from "next/router";
import React, { useState } from "react";
import { Trans } from "react-i18next"; import { Trans } from "react-i18next";
import * as Yup from "yup";
interface FormValues { interface FormValues {
email: string; email: string;

View file

@ -1,5 +1,5 @@
import { isDevBuild } from "@/next/env"; import { isDevBuild } from "@/next/env";
import { addLogLine } from "@ente/shared/logging"; import log from "@/next/log";
/** /**
* Log a standard startup banner. * Log a standard startup banner.
@ -15,7 +15,7 @@ export const logStartupBanner = (appId: string, userId?: number) => {
const sha = process.env.GIT_SHA; const sha = process.env.GIT_SHA;
const buildId = isDevBuild ? "dev " : sha ? `git ${sha} ` : ""; const buildId = isDevBuild ? "dev " : sha ? `git ${sha} ` : "";
addLogLine(`Starting ente-${appIdL}-web ${buildId}uid ${userId ?? 0}`); log.info(`Starting ente-${appIdL}-web ${buildId}uid ${userId ?? 0}`);
}; };
interface LogEntry { interface LogEntry {

View file

@ -8,7 +8,6 @@ import { getActualKey } from "@ente/shared/user";
import { KeyAttributes } from "@ente/shared/user/types"; import { KeyAttributes } from "@ente/shared/user/types";
import isElectron from "is-electron"; import isElectron from "is-electron";
import ComlinkCryptoWorker from "."; import ComlinkCryptoWorker from ".";
import { addLogLine } from "../logging";
const LOGIN_SUB_KEY_LENGTH = 32; const LOGIN_SUB_KEY_LENGTH = 32;
const LOGIN_SUB_KEY_ID = 1; const LOGIN_SUB_KEY_ID = 1;
@ -104,7 +103,6 @@ export const saveKeyInSessionStore = async (
const sessionKeyAttributes = const sessionKeyAttributes =
await cryptoWorker.generateKeyAndEncryptToB64(key); await cryptoWorker.generateKeyAndEncryptToB64(key);
setKey(keyType, sessionKeyAttributes); setKey(keyType, sessionKeyAttributes);
addLogLine("fromDesktop", fromDesktop);
if ( if (
isElectron() && isElectron() &&
!fromDesktop && !fromDesktop &&

View file

@ -1,9 +0,0 @@
import log from "@/next/log";
export function addLogLine(
msg: string | number | boolean,
...optionalParams: (string | number | boolean)[]
) {
const completeLog = [msg, ...optionalParams].join(" ");
log.info(completeLog);
}

View file

@ -1,27 +0,0 @@
import { ApiError } from "@ente/shared/error";
import { addLogLine } from "@ente/shared/logging";
/** Deprecated: Use `logError` from `@/utils/logging` */
export const logError = async (
error: any,
msg: string,
info?: Record<string, unknown>,
skipAddLogLine = false,
) => {
if (skipAddLogLine) return;
if (error instanceof ApiError) {
addLogLine(`error: ${error?.name} ${error?.message}
msg: ${msg} errorCode: ${JSON.stringify(error?.errCode)}
httpStatusCode: ${JSON.stringify(error?.httpStatusCode)} ${
info ? `info: ${JSON.stringify(info)}` : ""
}
${error?.stack}`);
} else {
addLogLine(
`error: ${error?.name} ${error?.message}
msg: ${msg} ${info ? `info: ${JSON.stringify(info)}` : ""}
${error?.stack}`,
);
}
};

View file

@ -1,7 +1,7 @@
import log from "@/next/log";
import { CacheStorageService } from "."; import { CacheStorageService } from ".";
import { CACHES } from "./constants"; import { CACHES } from "./constants";
import { LimitedCache } from "./types"; import { LimitedCache } from "./types";
import log from "@/next/log";
export async function cached( export async function cached(
cacheName: string, cacheName: string,

View file

@ -1,41 +0,0 @@
/**
* Log an error
*
* The {@link message} property describes what went wrong. Generally (but not
* always) in such situations we also have an "error" object that has specific
* details about the issue - that gets passed as the second parameter.
*
* Note that the "error" {@link e} is not typed. This is because in JavaScript
* any arbitrary value can be thrown. So this function allows us to pass it an
* arbitrary value as the error, and will internally figure out how best to deal
* with it.
*
* Where and how this error gets logged is dependent on where this code is
* running. The default implementation logs a string to the console, but in
* practice the layers above us will use the hooks provided in this file to
* route and show this error elsewhere.
*
* TODO (MR): Currently this is a placeholder function to funnel error logs
* through. This needs to do what the existing logError in @ente/shared does,
* but it cannot have a direct Electron dependency here. For now, we just
* log on the console.
*/
export const logError = (message: string, e?: unknown) => {
if (e === undefined || e === null) {
console.error(message);
return;
}
let es: string;
if (e instanceof Error) {
// In practice, we expect ourselves to be called with Error objects, so
// this is the happy path so to say.
es = `${e.name}: ${e.message}\n${e.stack}`;
} else {
// For the rest rare cases, use the default string serialization of e.
es = String(e);
}
// TODO(MR): Use addLogLine
console.error(`${message}: ${es}`);
};