JSON 2
This commit is contained in:
parent
505d376dc9
commit
42c1bec044
4 changed files with 100 additions and 74 deletions
|
@ -1,4 +1,5 @@
|
|||
import { encodeLivePhoto } from "@/media/live-photo";
|
||||
import { ensureElectron } from "@/next/electron";
|
||||
import { basename, getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { DedicatedCryptoWorker } from "@ente/shared/crypto/internal/crypto.worker";
|
||||
|
@ -168,67 +169,79 @@ export const getMetadataJSONMapKeyForFile = (
|
|||
return `${collectionID}-${getFileOriginalName(fileName)}`;
|
||||
};
|
||||
|
||||
export async function parseMetadataJSON(receivedFile: File | ElectronFile) {
|
||||
export async function parseMetadataJSON(
|
||||
receivedFile: File | ElectronFile | string,
|
||||
) {
|
||||
try {
|
||||
if (!(receivedFile instanceof File)) {
|
||||
receivedFile = new File(
|
||||
[await receivedFile.blob()],
|
||||
receivedFile.name,
|
||||
);
|
||||
}
|
||||
const metadataJSON: object = JSON.parse(await receivedFile.text());
|
||||
|
||||
const parsedMetadataJSON: ParsedMetadataJSON =
|
||||
NULL_PARSED_METADATA_JSON;
|
||||
if (!metadataJSON) {
|
||||
return;
|
||||
let text: string;
|
||||
if (typeof receivedFile == "string") {
|
||||
text = await ensureElectron().fs.readTextFile(receivedFile);
|
||||
} else {
|
||||
if (!(receivedFile instanceof File)) {
|
||||
receivedFile = new File(
|
||||
[await receivedFile.blob()],
|
||||
receivedFile.name,
|
||||
);
|
||||
}
|
||||
text = await receivedFile.text();
|
||||
}
|
||||
|
||||
if (
|
||||
metadataJSON["photoTakenTime"] &&
|
||||
metadataJSON["photoTakenTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
|
||||
} else if (
|
||||
metadataJSON["creationTime"] &&
|
||||
metadataJSON["creationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["creationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
if (
|
||||
metadataJSON["modificationTime"] &&
|
||||
metadataJSON["modificationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.modificationTime =
|
||||
metadataJSON["modificationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
let locationData: Location = NULL_LOCATION;
|
||||
if (
|
||||
metadataJSON["geoData"] &&
|
||||
(metadataJSON["geoData"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoData"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoData"];
|
||||
} else if (
|
||||
metadataJSON["geoDataExif"] &&
|
||||
(metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoDataExif"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoDataExif"];
|
||||
}
|
||||
if (locationData !== null) {
|
||||
parsedMetadataJSON.latitude = locationData.latitude;
|
||||
parsedMetadataJSON.longitude = locationData.longitude;
|
||||
}
|
||||
return parsedMetadataJSON;
|
||||
return parseMetadataJSONText(text);
|
||||
} catch (e) {
|
||||
log.error("parseMetadataJSON failed", e);
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
export async function parseMetadataJSONText(text: string) {
|
||||
const metadataJSON: object = JSON.parse(text);
|
||||
|
||||
const parsedMetadataJSON: ParsedMetadataJSON = NULL_PARSED_METADATA_JSON;
|
||||
if (!metadataJSON) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
metadataJSON["photoTakenTime"] &&
|
||||
metadataJSON["photoTakenTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["photoTakenTime"]["timestamp"] * 1000000;
|
||||
} else if (
|
||||
metadataJSON["creationTime"] &&
|
||||
metadataJSON["creationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["creationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
if (
|
||||
metadataJSON["modificationTime"] &&
|
||||
metadataJSON["modificationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.modificationTime =
|
||||
metadataJSON["modificationTime"]["timestamp"] * 1000000;
|
||||
}
|
||||
let locationData: Location = NULL_LOCATION;
|
||||
if (
|
||||
metadataJSON["geoData"] &&
|
||||
(metadataJSON["geoData"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoData"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoData"];
|
||||
} else if (
|
||||
metadataJSON["geoDataExif"] &&
|
||||
(metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoDataExif"]["longitude"] !== 0.0)
|
||||
) {
|
||||
locationData = metadataJSON["geoDataExif"];
|
||||
}
|
||||
if (locationData !== null) {
|
||||
parsedMetadataJSON.latitude = locationData.latitude;
|
||||
parsedMetadataJSON.longitude = locationData.longitude;
|
||||
}
|
||||
return parsedMetadataJSON;
|
||||
}
|
||||
|
||||
// tries to extract date from file name if available else returns null
|
||||
export function extractDateFromFileName(filename: string): number {
|
||||
try {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import { getFileNameSize } from "@/next/file";
|
||||
import log from "@/next/log";
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { getDedicatedCryptoWorker } from "@ente/shared/crypto";
|
||||
|
@ -33,6 +32,7 @@ import { decryptFile, getUserOwnedFiles, sortFiles } from "utils/file";
|
|||
import {
|
||||
areFileWithCollectionsSame,
|
||||
segregateMetadataAndMediaFiles,
|
||||
segregateMetadataAndMediaFiles2,
|
||||
} from "utils/upload";
|
||||
import { getLocalFiles } from "../fileService";
|
||||
import {
|
||||
|
@ -41,7 +41,7 @@ import {
|
|||
} from "./metadataService";
|
||||
import { default as UIService, default as uiService } from "./uiService";
|
||||
import uploadCancelService from "./uploadCancelService";
|
||||
import UploadService, { uploader } from "./uploadService";
|
||||
import UploadService, { getFileName, uploader } from "./uploadService";
|
||||
|
||||
const MAX_CONCURRENT_UPLOADS = 4;
|
||||
|
||||
|
@ -229,7 +229,7 @@ class UploadManager {
|
|||
),
|
||||
);
|
||||
const { metadataJSONFiles, mediaFiles } =
|
||||
segregateMetadataAndMediaFiles(filesWithCollectionToUploadIn);
|
||||
segregateMetadataAndMediaFiles2(filesWithCollectionToUploadIn);
|
||||
log.info(`has ${metadataJSONFiles.length} metadata json files`);
|
||||
log.info(`has ${mediaFiles.length} media files`);
|
||||
if (metadataJSONFiles.length) {
|
||||
|
@ -296,37 +296,30 @@ class UploadManager {
|
|||
}
|
||||
}
|
||||
|
||||
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection[]) {
|
||||
private async parseMetadataJSONFiles(metadataFiles: FileWithCollection2[]) {
|
||||
try {
|
||||
log.info(`parseMetadataJSONFiles function executed `);
|
||||
|
||||
UIService.reset(metadataFiles.length);
|
||||
|
||||
for (const { file, collectionID } of metadataFiles) {
|
||||
const name = getFileName(file);
|
||||
try {
|
||||
if (uploadCancelService.isUploadCancelationRequested()) {
|
||||
throw Error(CustomError.UPLOAD_CANCELLED);
|
||||
}
|
||||
log.info(
|
||||
`parsing metadata json file ${getFileNameSize(file)}`,
|
||||
);
|
||||
|
||||
log.info(`parsing metadata json file ${name}`);
|
||||
|
||||
const parsedMetadataJSON = await parseMetadataJSON(file);
|
||||
if (parsedMetadataJSON) {
|
||||
this.parsedMetadataJSONMap.set(
|
||||
getMetadataJSONMapKeyForJSON(
|
||||
collectionID,
|
||||
file.name,
|
||||
),
|
||||
getMetadataJSONMapKeyForJSON(collectionID, name),
|
||||
parsedMetadataJSON && { ...parsedMetadataJSON },
|
||||
);
|
||||
UIService.increaseFileUploaded();
|
||||
}
|
||||
log.info(
|
||||
`successfully parsed metadata json file ${getFileNameSize(
|
||||
file,
|
||||
)}`,
|
||||
);
|
||||
log.info(`successfully parsed metadata json file ${name}`);
|
||||
} catch (e) {
|
||||
if (e.message === CustomError.UPLOAD_CANCELLED) {
|
||||
throw e;
|
||||
|
@ -334,9 +327,7 @@ class UploadManager {
|
|||
// and don't break for subsequent files just log and move on
|
||||
log.error("parsing failed for a file", e);
|
||||
log.info(
|
||||
`failed to parse metadata json file ${getFileNameSize(
|
||||
file,
|
||||
)} error: ${e.message}`,
|
||||
`failed to parse metadata json file ${name} error: ${e.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -135,7 +135,7 @@ class UploadService {
|
|||
getAssetName({ isLivePhoto, file, livePhotoAssets }: UploadAsset2) {
|
||||
return isLivePhoto
|
||||
? getLivePhotoName(livePhotoAssets)
|
||||
: getFilename(file);
|
||||
: getFileName(file);
|
||||
}
|
||||
|
||||
getAssetFileType({ isLivePhoto, file, livePhotoAssets }: UploadAsset) {
|
||||
|
@ -366,7 +366,7 @@ function getFileSize(file: File | ElectronFile) {
|
|||
return file.size;
|
||||
}
|
||||
|
||||
const getFilename = (file: File | ElectronFile | string) =>
|
||||
export const getFileName = (file: File | ElectronFile | string) =>
|
||||
typeof file == "string" ? basename(file) : file.name;
|
||||
|
||||
async function readFile(
|
||||
|
|
|
@ -4,7 +4,12 @@ import { A_SEC_IN_MICROSECONDS, PICKED_UPLOAD_TYPE } from "constants/upload";
|
|||
import isElectron from "is-electron";
|
||||
import { exportMetadataDirectoryName } from "services/export";
|
||||
import { EnteFile } from "types/file";
|
||||
import { ElectronFile, FileWithCollection, Metadata } from "types/upload";
|
||||
import {
|
||||
ElectronFile,
|
||||
FileWithCollection,
|
||||
Metadata,
|
||||
type FileWithCollection2,
|
||||
} from "types/upload";
|
||||
|
||||
const TYPE_JSON = "json";
|
||||
const DEDUPE_COLLECTION = new Set(["icloud library", "icloudlibrary"]);
|
||||
|
@ -95,6 +100,23 @@ export function segregateMetadataAndMediaFiles(
|
|||
return { mediaFiles, metadataJSONFiles };
|
||||
}
|
||||
|
||||
export function segregateMetadataAndMediaFiles2(
|
||||
filesWithCollectionToUpload: FileWithCollection2[],
|
||||
) {
|
||||
const metadataJSONFiles: FileWithCollection2[] = [];
|
||||
const mediaFiles: FileWithCollection2[] = [];
|
||||
filesWithCollectionToUpload.forEach((fileWithCollection) => {
|
||||
const file = fileWithCollection.file;
|
||||
const s = typeof file == "string" ? file : file.name;
|
||||
if (s.toLowerCase().endsWith(TYPE_JSON)) {
|
||||
metadataJSONFiles.push(fileWithCollection);
|
||||
} else {
|
||||
mediaFiles.push(fileWithCollection);
|
||||
}
|
||||
});
|
||||
return { mediaFiles, metadataJSONFiles };
|
||||
}
|
||||
|
||||
export function areFileWithCollectionsSame(
|
||||
firstFile: FileWithCollection,
|
||||
secondFile: FileWithCollection,
|
||||
|
|
Loading…
Add table
Reference in a new issue