face_ml_service.dart 44 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259
  1. import "dart:async";
  2. import "dart:developer" as dev show log;
  3. import "dart:io" show File;
  4. import "dart:isolate";
  5. import "dart:typed_data" show Uint8List, Float32List, ByteData;
  6. import "dart:ui" show Image;
  7. import "package:computer/computer.dart";
  8. import "package:flutter/foundation.dart" show debugPrint, kDebugMode;
  9. import "package:flutter_image_compress/flutter_image_compress.dart";
  10. import "package:flutter_isolate/flutter_isolate.dart";
  11. import "package:logging/logging.dart";
  12. import "package:onnxruntime/onnxruntime.dart";
  13. import "package:photos/core/configuration.dart";
  14. import "package:photos/core/event_bus.dart";
  15. import "package:photos/db/files_db.dart";
  16. import "package:photos/events/diff_sync_complete_event.dart";
  17. import "package:photos/extensions/list.dart";
  18. import "package:photos/extensions/stop_watch.dart";
  19. import "package:photos/face/db.dart";
  20. import "package:photos/face/model/box.dart";
  21. import "package:photos/face/model/detection.dart" as face_detection;
  22. import "package:photos/face/model/face.dart";
  23. import "package:photos/face/model/landmark.dart";
  24. import "package:photos/models/file/extensions/file_props.dart";
  25. import "package:photos/models/file/file.dart";
  26. import "package:photos/models/file/file_type.dart";
  27. import "package:photos/models/ml/ml_versions.dart";
  28. import 'package:photos/services/machine_learning/face_ml/face_clustering/linear_clustering_service.dart';
  29. import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
  30. import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/onnx_face_detection.dart';
  31. import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_face_detection_exceptions.dart';
  32. import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_exceptions.dart';
  33. import 'package:photos/services/machine_learning/face_ml/face_embedding/onnx_face_embedding.dart';
  34. import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
  35. import 'package:photos/services/machine_learning/face_ml/face_ml_exceptions.dart';
  36. import 'package:photos/services/machine_learning/face_ml/face_ml_result.dart';
  37. import 'package:photos/services/machine_learning/file_ml/file_ml.dart';
  38. import 'package:photos/services/machine_learning/file_ml/remote_fileml_service.dart';
  39. import "package:photos/services/search_service.dart";
  40. import "package:photos/utils/file_util.dart";
  41. import 'package:photos/utils/image_ml_isolate.dart';
  42. import "package:photos/utils/image_ml_util.dart";
  43. import "package:photos/utils/local_settings.dart";
  44. import "package:photos/utils/thumbnail_util.dart";
  45. import "package:synchronized/synchronized.dart";
  46. enum FileDataForML { thumbnailData, fileData, compressedFileData }
  47. enum FaceMlOperation { analyzeImage }
  48. /// This class is responsible for running the full face ml pipeline on images.
  49. ///
  50. /// WARNING: For getting the ML results needed for the UI, you should use `FaceSearchService` instead of this class!
  51. ///
  52. /// The pipeline consists of face detection, face alignment and face embedding.
  53. class FaceMlService {
  54. final _logger = Logger("FaceMlService");
  55. // Flutter isolate things for running the image ml pipeline
  56. Timer? _inactivityTimer;
  57. final Duration _inactivityDuration = const Duration(seconds: 120);
  58. int _activeTasks = 0;
  59. final _initLockIsolate = Lock();
  60. late FlutterIsolate _isolate;
  61. late ReceivePort _receivePort = ReceivePort();
  62. late SendPort _mainSendPort;
  63. bool isIsolateSpawned = false;
  64. // singleton pattern
  65. FaceMlService._privateConstructor();
  66. static final instance = FaceMlService._privateConstructor();
  67. factory FaceMlService() => instance;
  68. final _initLock = Lock();
  69. final _functionLock = Lock();
  70. final _computer = Computer.shared();
  71. bool isInitialized = false;
  72. bool isImageIndexRunning = false;
  73. int kParallelism = 15;
  74. Future<void> init({bool initializeImageMlIsolate = false}) async {
  75. return _initLock.synchronized(() async {
  76. if (isInitialized) {
  77. return;
  78. }
  79. _logger.info("init called");
  80. await _computer.compute(initOrtEnv);
  81. try {
  82. await YoloOnnxFaceDetection.instance.init();
  83. } catch (e, s) {
  84. _logger.severe("Could not initialize yolo onnx", e, s);
  85. }
  86. if (initializeImageMlIsolate) {
  87. try {
  88. await ImageMlIsolate.instance.init();
  89. } catch (e, s) {
  90. _logger.severe("Could not initialize image ml isolate", e, s);
  91. }
  92. }
  93. try {
  94. await FaceEmbeddingOnnx.instance.init();
  95. } catch (e, s) {
  96. _logger.severe("Could not initialize mobilefacenet", e, s);
  97. }
  98. isInitialized = true;
  99. });
  100. }
  101. static void initOrtEnv() async {
  102. OrtEnv.instance.init();
  103. }
  104. void listenIndexOnDiffSync() {
  105. Bus.instance.on<DiffSyncCompleteEvent>().listen((event) async {
  106. if (LocalSettings.instance.isFaceIndexingEnabled == false) {
  107. return;
  108. }
  109. // [neeraj] intentional delay in starting indexing on diff sync, this gives time for the user
  110. // to disable face-indexing in case it's causing crash. In the future, we
  111. // should have a better way to handle this.
  112. Future.delayed(const Duration(seconds: 10), () {
  113. unawaited(indexAllImages());
  114. });
  115. });
  116. }
  117. Future<void> ensureInitialized() async {
  118. if (!isInitialized) {
  119. await init();
  120. }
  121. }
  122. Future<void> release() async {
  123. return _initLock.synchronized(() async {
  124. _logger.info("dispose called");
  125. if (!isInitialized) {
  126. return;
  127. }
  128. try {
  129. await YoloOnnxFaceDetection.instance.release();
  130. } catch (e, s) {
  131. _logger.severe("Could not dispose yolo onnx", e, s);
  132. }
  133. try {
  134. ImageMlIsolate.instance.dispose();
  135. } catch (e, s) {
  136. _logger.severe("Could not dispose image ml isolate", e, s);
  137. }
  138. try {
  139. await FaceEmbeddingOnnx.instance.release();
  140. } catch (e, s) {
  141. _logger.severe("Could not dispose mobilefacenet", e, s);
  142. }
  143. OrtEnv.instance.release();
  144. isInitialized = false;
  145. });
  146. }
  147. Future<void> initIsolate() async {
  148. return _initLockIsolate.synchronized(() async {
  149. if (isIsolateSpawned) return;
  150. _logger.info("initIsolate called");
  151. _receivePort = ReceivePort();
  152. try {
  153. _isolate = await FlutterIsolate.spawn(
  154. _isolateMain,
  155. _receivePort.sendPort,
  156. );
  157. _mainSendPort = await _receivePort.first as SendPort;
  158. isIsolateSpawned = true;
  159. _resetInactivityTimer();
  160. } catch (e) {
  161. _logger.severe('Could not spawn isolate', e);
  162. isIsolateSpawned = false;
  163. }
  164. });
  165. }
  166. Future<void> ensureSpawnedIsolate() async {
  167. if (!isIsolateSpawned) {
  168. await initIsolate();
  169. }
  170. }
  171. /// The main execution function of the isolate.
  172. static void _isolateMain(SendPort mainSendPort) async {
  173. final receivePort = ReceivePort();
  174. mainSendPort.send(receivePort.sendPort);
  175. receivePort.listen((message) async {
  176. final functionIndex = message[0] as int;
  177. final function = FaceMlOperation.values[functionIndex];
  178. final args = message[1] as Map<String, dynamic>;
  179. final sendPort = message[2] as SendPort;
  180. try {
  181. switch (function) {
  182. case FaceMlOperation.analyzeImage:
  183. final int enteFileID = args["enteFileID"] as int;
  184. final String imagePath = args["filePath"] as String;
  185. final int faceDetectionAddress =
  186. args["faceDetectionAddress"] as int;
  187. final int faceEmbeddingAddress =
  188. args["faceEmbeddingAddress"] as int;
  189. final resultBuilder =
  190. FaceMlResultBuilder.fromEnteFileID(enteFileID);
  191. dev.log(
  192. "Start analyzing image with uploadedFileID: $enteFileID inside the isolate",
  193. );
  194. final stopwatchTotal = Stopwatch()..start();
  195. final stopwatch = Stopwatch()..start();
  196. // Decode the image once to use for both face detection and alignment
  197. final imageData = await File(imagePath).readAsBytes();
  198. final image = await decodeImageFromData(imageData);
  199. final ByteData imgByteData = await getByteDataFromImage(image);
  200. dev.log('Reading and decoding image took '
  201. '${stopwatch.elapsedMilliseconds} ms');
  202. stopwatch.reset();
  203. // Get the faces
  204. final List<FaceDetectionRelative> faceDetectionResult =
  205. await FaceMlService.detectFacesSync(
  206. image,
  207. imgByteData,
  208. faceDetectionAddress,
  209. resultBuilder: resultBuilder,
  210. );
  211. dev.log(
  212. "${faceDetectionResult.length} faces detected with scores ${faceDetectionResult.map((e) => e.score).toList()}: completed `detectFacesSync` function, in "
  213. "${stopwatch.elapsedMilliseconds} ms");
  214. // If no faces were detected, return a result with no faces. Otherwise, continue.
  215. if (faceDetectionResult.isEmpty) {
  216. dev.log(
  217. "No faceDetectionResult, Completed analyzing image with uploadedFileID $enteFileID, in "
  218. "${stopwatch.elapsedMilliseconds} ms");
  219. sendPort.send(resultBuilder.buildNoFaceDetected().toJsonString());
  220. break;
  221. }
  222. stopwatch.reset();
  223. // Align the faces
  224. final Float32List faceAlignmentResult =
  225. await FaceMlService.alignFacesSync(
  226. image,
  227. imgByteData,
  228. faceDetectionResult,
  229. resultBuilder: resultBuilder,
  230. );
  231. dev.log("Completed `alignFacesSync` function, in "
  232. "${stopwatch.elapsedMilliseconds} ms");
  233. stopwatch.reset();
  234. // Get the embeddings of the faces
  235. final embeddings = await FaceMlService.embedFacesSync(
  236. faceAlignmentResult,
  237. faceEmbeddingAddress,
  238. resultBuilder: resultBuilder,
  239. );
  240. dev.log("Completed `embedFacesSync` function, in "
  241. "${stopwatch.elapsedMilliseconds} ms");
  242. stopwatch.stop();
  243. stopwatchTotal.stop();
  244. dev.log("Finished Analyze image (${embeddings.length} faces) with "
  245. "uploadedFileID $enteFileID, in "
  246. "${stopwatchTotal.elapsedMilliseconds} ms");
  247. sendPort.send(resultBuilder.build().toJsonString());
  248. break;
  249. }
  250. } catch (e, stackTrace) {
  251. dev.log(
  252. "[SEVERE] Error in FaceML isolate: $e",
  253. error: e,
  254. stackTrace: stackTrace,
  255. );
  256. sendPort
  257. .send({'error': e.toString(), 'stackTrace': stackTrace.toString()});
  258. }
  259. });
  260. }
  261. /// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result.
  262. Future<dynamic> _runInIsolate(
  263. (FaceMlOperation, Map<String, dynamic>) message,
  264. ) async {
  265. await ensureSpawnedIsolate();
  266. return _functionLock.synchronized(() async {
  267. _resetInactivityTimer();
  268. if (isImageIndexRunning == false) {
  269. return null;
  270. }
  271. final completer = Completer<dynamic>();
  272. final answerPort = ReceivePort();
  273. _activeTasks++;
  274. _mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]);
  275. answerPort.listen((receivedMessage) {
  276. if (receivedMessage is Map && receivedMessage.containsKey('error')) {
  277. // Handle the error
  278. final errorMessage = receivedMessage['error'];
  279. final errorStackTrace = receivedMessage['stackTrace'];
  280. final exception = Exception(errorMessage);
  281. final stackTrace = StackTrace.fromString(errorStackTrace);
  282. completer.completeError(exception, stackTrace);
  283. } else {
  284. completer.complete(receivedMessage);
  285. }
  286. });
  287. _activeTasks--;
  288. return completer.future;
  289. });
  290. }
  291. /// Resets a timer that kills the isolate after a certain amount of inactivity.
  292. ///
  293. /// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`)
  294. void _resetInactivityTimer() {
  295. _inactivityTimer?.cancel();
  296. _inactivityTimer = Timer(_inactivityDuration, () {
  297. if (_activeTasks > 0) {
  298. _logger.info('Tasks are still running. Delaying isolate disposal.');
  299. // Optionally, reschedule the timer to check again later.
  300. _resetInactivityTimer();
  301. } else {
  302. _logger.info(
  303. 'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.',
  304. );
  305. disposeIsolate();
  306. }
  307. });
  308. }
  309. void disposeIsolate() async {
  310. if (!isIsolateSpawned) return;
  311. await release();
  312. isIsolateSpawned = false;
  313. _isolate.kill();
  314. _receivePort.close();
  315. _inactivityTimer?.cancel();
  316. }
  317. Future<void> indexAndClusterAllImages() async {
  318. // Run the analysis on all images to make sure everything is analyzed
  319. await indexAllImages();
  320. // Cluster all the images
  321. await clusterAllImages();
  322. }
  323. Future<void> clusterAllImages({
  324. double minFaceScore = kMinHighQualityFaceScore,
  325. bool clusterInBuckets = false,
  326. }) async {
  327. _logger.info("`clusterAllImages()` called");
  328. try {
  329. if (clusterInBuckets) {
  330. // Get a sense of the total number of faces in the database
  331. final int totalFaces = await FaceMLDataDB.instance
  332. .getTotalFaceCount(minFaceScore: minFaceScore);
  333. // read the creation times from Files DB, in a map from fileID to creation time
  334. final fileIDToCreationTime =
  335. await FilesDB.instance.getFileIDToCreationTime();
  336. const int bucketSize = 20000;
  337. const int batchSize = 20000;
  338. const int offsetIncrement = 7500;
  339. int offset = 0;
  340. while (true) {
  341. final faceIdToEmbeddingBucket =
  342. await FaceMLDataDB.instance.getFaceEmbeddingMap(
  343. minScore: minFaceScore,
  344. maxFaces: bucketSize,
  345. offset: offset,
  346. batchSize: batchSize,
  347. );
  348. if (faceIdToEmbeddingBucket.isEmpty) {
  349. break;
  350. }
  351. if (offset > totalFaces) {
  352. _logger.warning(
  353. 'offset > totalFaces, this should ideally not happen. offset: $offset, totalFaces: $totalFaces',
  354. );
  355. break;
  356. }
  357. final faceIdToCluster = await FaceClustering.instance.predictLinear(
  358. faceIdToEmbeddingBucket,
  359. fileIDToCreationTime: fileIDToCreationTime,
  360. );
  361. if (faceIdToCluster == null) {
  362. _logger.warning("faceIdToCluster is null");
  363. return;
  364. }
  365. await FaceMLDataDB.instance.updateClusterIdToFaceId(faceIdToCluster);
  366. offset += offsetIncrement;
  367. }
  368. } else {
  369. // Read all the embeddings from the database, in a map from faceID to embedding
  370. final clusterStartTime = DateTime.now();
  371. final faceIdToEmbedding =
  372. await FaceMLDataDB.instance.getFaceEmbeddingMap(
  373. minScore: minFaceScore,
  374. );
  375. final gotFaceEmbeddingsTime = DateTime.now();
  376. _logger.info(
  377. 'read embeddings ${faceIdToEmbedding.length} in ${gotFaceEmbeddingsTime.difference(clusterStartTime).inMilliseconds} ms',
  378. );
  379. // Read the creation times from Files DB, in a map from fileID to creation time
  380. final fileIDToCreationTime =
  381. await FilesDB.instance.getFileIDToCreationTime();
  382. _logger.info('read creation times from FilesDB in '
  383. '${DateTime.now().difference(gotFaceEmbeddingsTime).inMilliseconds} ms');
  384. // Cluster the embeddings using the linear clustering algorithm, returning a map from faceID to clusterID
  385. final faceIdToCluster = await FaceClustering.instance.predictLinear(
  386. faceIdToEmbedding,
  387. fileIDToCreationTime: fileIDToCreationTime,
  388. );
  389. if (faceIdToCluster == null) {
  390. _logger.warning("faceIdToCluster is null");
  391. return;
  392. }
  393. final clusterDoneTime = DateTime.now();
  394. _logger.info(
  395. 'done with clustering ${faceIdToEmbedding.length} in ${clusterDoneTime.difference(clusterStartTime).inSeconds} seconds ',
  396. );
  397. // Store the updated clusterIDs in the database
  398. _logger.info(
  399. 'Updating ${faceIdToCluster.length} FaceIDs with clusterIDs in the DB',
  400. );
  401. await FaceMLDataDB.instance.updateClusterIdToFaceId(faceIdToCluster);
  402. _logger.info('Done updating FaceIDs with clusterIDs in the DB, in '
  403. '${DateTime.now().difference(clusterDoneTime).inSeconds} seconds');
  404. }
  405. } catch (e, s) {
  406. _logger.severe("`clusterAllImages` failed", e, s);
  407. }
  408. }
  409. bool shouldDiscardRemoteEmbedding(FileMl fileMl) {
  410. if (fileMl.faceEmbedding.version < faceMlVersion) {
  411. debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} "
  412. "because version is ${fileMl.faceEmbedding.version} and we need $faceMlVersion");
  413. return true;
  414. }
  415. // are all landmarks equal?
  416. bool allLandmarksEqual = true;
  417. for (final face in fileMl.faceEmbedding.faces) {
  418. if (face.detection.landmarks
  419. .any((landmark) => landmark.x != landmark.y)) {
  420. allLandmarksEqual = false;
  421. break;
  422. }
  423. }
  424. if (!allLandmarksEqual) {
  425. debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} "
  426. "because landmarks are not equal");
  427. return true;
  428. }
  429. if (fileMl.width == null || fileMl.height == null) {
  430. debugPrint("Discarding remote embedding for fileID ${fileMl.fileID} "
  431. "because width is null");
  432. return true;
  433. }
  434. return false;
  435. }
  436. /// Analyzes all the images in the database with the latest ml version and stores the results in the database.
  437. ///
  438. /// This function first checks if the image has already been analyzed with the lastest faceMlVersion and stored in the database. If so, it skips the image.
  439. Future<void> indexAllImages() async {
  440. if (isImageIndexRunning) {
  441. _logger.warning("indexAllImages is already running, skipping");
  442. return;
  443. }
  444. // verify indexing is enabled
  445. if (LocalSettings.instance.isFaceIndexingEnabled == false) {
  446. _logger.warning("indexAllImages is disabled");
  447. return;
  448. }
  449. try {
  450. isImageIndexRunning = true;
  451. _logger.info('starting image indexing');
  452. final List<EnteFile> enteFiles =
  453. await SearchService.instance.getAllFiles();
  454. final Map<int, int> alreadyIndexedFiles =
  455. await FaceMLDataDB.instance.getIndexedFileIds();
  456. // Make sure the image conversion isolate is spawned
  457. // await ImageMlIsolate.instance.ensureSpawned();
  458. await ensureInitialized();
  459. int fileAnalyzedCount = 0;
  460. int fileSkippedCount = 0;
  461. final stopwatch = Stopwatch()..start();
  462. final List<EnteFile> filesWithLocalID = <EnteFile>[];
  463. final List<EnteFile> filesWithoutLocalID = <EnteFile>[];
  464. for (final EnteFile enteFile in enteFiles) {
  465. if (_skipAnalysisEnteFile(enteFile, alreadyIndexedFiles)) {
  466. fileSkippedCount++;
  467. continue;
  468. }
  469. if ((enteFile.localID ?? '').isEmpty) {
  470. filesWithoutLocalID.add(enteFile);
  471. } else {
  472. filesWithLocalID.add(enteFile);
  473. }
  474. }
  475. // list of files where files with localID are first
  476. final sortedBylocalID = <EnteFile>[];
  477. sortedBylocalID.addAll(filesWithLocalID);
  478. sortedBylocalID.addAll(filesWithoutLocalID);
  479. final List<List<EnteFile>> chunks = sortedBylocalID.chunks(kParallelism);
  480. outerLoop:
  481. for (final chunk in chunks) {
  482. final futures = <Future<bool>>[];
  483. final List<int> fileIds = [];
  484. // Try to find embeddings on the remote server
  485. for (final f in chunk) {
  486. fileIds.add(f.uploadedFileID!);
  487. }
  488. try {
  489. final EnteWatch? w = kDebugMode ? EnteWatch("face_em_fetch") : null;
  490. w?.start();
  491. w?.log('starting remote fetch for ${fileIds.length} files');
  492. final res =
  493. await RemoteFileMLService.instance.getFilessEmbedding(fileIds);
  494. w?.logAndReset('fetched ${res.mlData.length} embeddings');
  495. final List<Face> faces = [];
  496. final remoteFileIdToVersion = <int, int>{};
  497. for (FileMl fileMl in res.mlData.values) {
  498. if (shouldDiscardRemoteEmbedding(fileMl)) continue;
  499. if (fileMl.faceEmbedding.faces.isEmpty) {
  500. faces.add(
  501. Face.empty(
  502. fileMl.fileID,
  503. error: (fileMl.faceEmbedding.error ?? false),
  504. ),
  505. );
  506. } else {
  507. for (final f in fileMl.faceEmbedding.faces) {
  508. f.fileInfo = FileInfo(
  509. imageHeight: fileMl.height,
  510. imageWidth: fileMl.width,
  511. );
  512. faces.add(f);
  513. }
  514. }
  515. remoteFileIdToVersion[fileMl.fileID] = fileMl.faceEmbedding.version;
  516. }
  517. await FaceMLDataDB.instance.bulkInsertFaces(faces);
  518. w?.logAndReset('stored embeddings');
  519. for (final entry in remoteFileIdToVersion.entries) {
  520. alreadyIndexedFiles[entry.key] = entry.value;
  521. }
  522. _logger.info('already indexed files ${remoteFileIdToVersion.length}');
  523. } catch (e, s) {
  524. _logger.severe("err while getting files embeddings", e, s);
  525. rethrow;
  526. }
  527. for (final enteFile in chunk) {
  528. if (isImageIndexRunning == false) {
  529. _logger.info("indexAllImages() was paused, stopping");
  530. break outerLoop;
  531. }
  532. if (_skipAnalysisEnteFile(
  533. enteFile,
  534. alreadyIndexedFiles,
  535. )) {
  536. fileSkippedCount++;
  537. continue;
  538. }
  539. futures.add(processImage(enteFile));
  540. }
  541. final awaitedFutures = await Future.wait(futures);
  542. final sumFutures = awaitedFutures.fold<int>(
  543. 0,
  544. (previousValue, element) => previousValue + (element ? 1 : 0),
  545. );
  546. fileAnalyzedCount += sumFutures;
  547. // TODO: remove this cooldown later. Cooldown of one minute every 400 images
  548. if (fileAnalyzedCount > 400 && fileAnalyzedCount % 400 < kParallelism) {
  549. _logger.info(
  550. "indexAllImages() analyzed $fileAnalyzedCount images, cooldown for 1 minute",
  551. );
  552. }
  553. }
  554. stopwatch.stop();
  555. _logger.info(
  556. "`indexAllImages()` finished. Analyzed $fileAnalyzedCount images, in ${stopwatch.elapsed.inSeconds} seconds (avg of ${stopwatch.elapsed.inSeconds / fileAnalyzedCount} seconds per image, skipped $fileSkippedCount images)",
  557. );
  558. // Dispose of all the isolates
  559. // ImageMlIsolate.instance.dispose();
  560. // await release();
  561. } catch (e, s) {
  562. _logger.severe("indexAllImages failed", e, s);
  563. } finally {
  564. isImageIndexRunning = false;
  565. }
  566. }
  567. Future<bool> processImage(EnteFile enteFile) async {
  568. _logger.info(
  569. "`indexAllImages()` on file number start processing image with uploadedFileID: ${enteFile.uploadedFileID}",
  570. );
  571. try {
  572. final FaceMlResult? result = await analyzeImageInSingleIsolate(
  573. enteFile,
  574. // preferUsingThumbnailForEverything: false,
  575. // disposeImageIsolateAfterUse: false,
  576. );
  577. if (result == null) {
  578. return false;
  579. }
  580. final List<Face> faces = [];
  581. if (!result.hasFaces) {
  582. debugPrint(
  583. 'No faces detected for file with name:${enteFile.displayName}',
  584. );
  585. faces.add(
  586. Face(
  587. '${result.fileId}-0',
  588. result.fileId,
  589. <double>[],
  590. result.errorOccured ? -1.0 : 0.0,
  591. face_detection.Detection.empty(),
  592. 0.0,
  593. ),
  594. );
  595. } else {
  596. if (result.decodedImageSize.width == -1 ||
  597. result.decodedImageSize.height == -1) {
  598. _logger
  599. .severe("decodedImageSize is not stored correctly for image with "
  600. "ID: ${enteFile.uploadedFileID}");
  601. _logger.info(
  602. "Using aligned image size for image with ID: ${enteFile.uploadedFileID}. This size is ${result.decodedImageSize.width}x${result.decodedImageSize.height} compared to size of ${enteFile.width}x${enteFile.height} in the metadata",
  603. );
  604. }
  605. for (int i = 0; i < result.faces.length; ++i) {
  606. final FaceResult faceRes = result.faces[i];
  607. final detection = face_detection.Detection(
  608. box: FaceBox(
  609. xMin: faceRes.detection.xMinBox,
  610. yMin: faceRes.detection.yMinBox,
  611. width: faceRes.detection.width,
  612. height: faceRes.detection.height,
  613. ),
  614. landmarks: faceRes.detection.allKeypoints
  615. .map(
  616. (keypoint) => Landmark(
  617. x: keypoint[0],
  618. y: keypoint[1],
  619. ),
  620. )
  621. .toList(),
  622. );
  623. faces.add(
  624. Face(
  625. faceRes.faceId,
  626. result.fileId,
  627. faceRes.embedding,
  628. faceRes.detection.score,
  629. detection,
  630. faceRes.blurValue,
  631. fileInfo: FileInfo(
  632. imageHeight: result.decodedImageSize.height,
  633. imageWidth: result.decodedImageSize.width,
  634. ),
  635. ),
  636. );
  637. }
  638. }
  639. _logger.info("inserting ${faces.length} faces for ${result.fileId}");
  640. await RemoteFileMLService.instance.putFileEmbedding(
  641. enteFile,
  642. FileMl(
  643. enteFile.uploadedFileID!,
  644. FaceEmbeddings(
  645. faces,
  646. result.mlVersion,
  647. error: result.errorOccured ? true : null,
  648. ),
  649. height: result.decodedImageSize.height,
  650. width: result.decodedImageSize.width,
  651. ),
  652. );
  653. await FaceMLDataDB.instance.bulkInsertFaces(faces);
  654. return true;
  655. } catch (e, s) {
  656. _logger.severe(
  657. "Failed to analyze using FaceML for image with ID: ${enteFile.uploadedFileID}",
  658. e,
  659. s,
  660. );
  661. return true;
  662. }
  663. }
  664. void pauseIndexing() {
  665. isImageIndexRunning = false;
  666. }
  667. /// Analyzes the given image data by running the full pipeline (face detection, face alignment, face embedding).
  668. ///
  669. /// [enteFile] The ente file to analyze.
  670. ///
  671. /// [preferUsingThumbnailForEverything] If true, the thumbnail will be used for everything (face detection, face alignment, face embedding), and file data will be used only if a thumbnail is unavailable.
  672. /// If false, thumbnail will only be used for detection, and the original image will be used for face alignment and face embedding.
  673. ///
  674. /// Returns an immutable [FaceMlResult] instance containing the results of the analysis.
  675. /// Does not store the result in the database, for that you should use [indexImage].
  676. /// Throws [CouldNotRetrieveAnyFileData] or [GeneralFaceMlException] if something goes wrong.
  677. /// TODO: improve function such that it only uses full image if it is already on the device, otherwise it uses thumbnail. And make sure to store what is used!
  678. Future<FaceMlResult> analyzeImageInComputerAndImageIsolate(
  679. EnteFile enteFile, {
  680. bool preferUsingThumbnailForEverything = false,
  681. bool disposeImageIsolateAfterUse = true,
  682. }) async {
  683. _checkEnteFileForID(enteFile);
  684. final String? thumbnailPath = await _getImagePathForML(
  685. enteFile,
  686. typeOfData: FileDataForML.thumbnailData,
  687. );
  688. String? filePath;
  689. // // TODO: remove/optimize this later. Not now though: premature optimization
  690. // fileData =
  691. // await _getDataForML(enteFile, typeOfData: FileDataForML.fileData);
  692. if (thumbnailPath == null) {
  693. filePath = await _getImagePathForML(
  694. enteFile,
  695. typeOfData: FileDataForML.fileData,
  696. );
  697. if (thumbnailPath == null && filePath == null) {
  698. _logger.severe(
  699. "Failed to get any data for enteFile with uploadedFileID ${enteFile.uploadedFileID}",
  700. );
  701. throw CouldNotRetrieveAnyFileData();
  702. }
  703. }
  704. // TODO: use smallData and largeData instead of thumbnailData and fileData again!
  705. final String smallDataPath = thumbnailPath ?? filePath!;
  706. final resultBuilder = FaceMlResultBuilder.fromEnteFile(enteFile);
  707. _logger.info(
  708. "Analyzing image with uploadedFileID: ${enteFile.uploadedFileID} ${kDebugMode ? enteFile.displayName : ''}",
  709. );
  710. final stopwatch = Stopwatch()..start();
  711. try {
  712. // Get the faces
  713. final List<FaceDetectionRelative> faceDetectionResult =
  714. await _detectFacesIsolate(
  715. smallDataPath,
  716. resultBuilder: resultBuilder,
  717. );
  718. _logger.info("Completed `detectFaces` function");
  719. // If no faces were detected, return a result with no faces. Otherwise, continue.
  720. if (faceDetectionResult.isEmpty) {
  721. _logger.info(
  722. "No faceDetectionResult, Completed analyzing image with uploadedFileID ${enteFile.uploadedFileID}, in "
  723. "${stopwatch.elapsedMilliseconds} ms");
  724. return resultBuilder.buildNoFaceDetected();
  725. }
  726. if (!preferUsingThumbnailForEverything) {
  727. filePath ??= await _getImagePathForML(
  728. enteFile,
  729. typeOfData: FileDataForML.fileData,
  730. );
  731. }
  732. resultBuilder.onlyThumbnailUsed = filePath == null;
  733. final String largeDataPath = filePath ?? thumbnailPath!;
  734. // Align the faces
  735. final Float32List faceAlignmentResult = await _alignFaces(
  736. largeDataPath,
  737. faceDetectionResult,
  738. resultBuilder: resultBuilder,
  739. );
  740. _logger.info("Completed `alignFaces` function");
  741. // Get the embeddings of the faces
  742. final embeddings = await _embedFaces(
  743. faceAlignmentResult,
  744. resultBuilder: resultBuilder,
  745. );
  746. _logger.info("Completed `embedBatchFaces` function");
  747. stopwatch.stop();
  748. _logger.info("Finished Analyze image (${embeddings.length} faces) with "
  749. "uploadedFileID ${enteFile.uploadedFileID}, in "
  750. "${stopwatch.elapsedMilliseconds} ms");
  751. if (disposeImageIsolateAfterUse) {
  752. // Close the image conversion isolate
  753. ImageMlIsolate.instance.dispose();
  754. }
  755. return resultBuilder.build();
  756. } catch (e, s) {
  757. _logger.severe(
  758. "Could not analyze image with ID ${enteFile.uploadedFileID} \n",
  759. e,
  760. s,
  761. );
  762. // throw GeneralFaceMlException("Could not analyze image");
  763. return resultBuilder.buildErrorOccurred();
  764. }
  765. }
  766. Future<FaceMlResult?> analyzeImageInSingleIsolate(EnteFile enteFile) async {
  767. _checkEnteFileForID(enteFile);
  768. await ensureInitialized();
  769. final String? filePath =
  770. await _getImagePathForML(enteFile, typeOfData: FileDataForML.fileData);
  771. if (filePath == null) {
  772. _logger.severe(
  773. "Failed to get any data for enteFile with uploadedFileID ${enteFile.uploadedFileID}",
  774. );
  775. throw CouldNotRetrieveAnyFileData();
  776. }
  777. final Stopwatch stopwatch = Stopwatch()..start();
  778. late FaceMlResult result;
  779. try {
  780. final resultJsonString = await _runInIsolate(
  781. (
  782. FaceMlOperation.analyzeImage,
  783. {
  784. "enteFileID": enteFile.uploadedFileID ?? -1,
  785. "filePath": filePath,
  786. "faceDetectionAddress":
  787. YoloOnnxFaceDetection.instance.sessionAddress,
  788. "faceEmbeddingAddress": FaceEmbeddingOnnx.instance.sessionAddress,
  789. }
  790. ),
  791. ) as String?;
  792. if (resultJsonString == null) {
  793. return null;
  794. }
  795. result = FaceMlResult.fromJsonString(resultJsonString);
  796. } catch (e, s) {
  797. _logger.severe(
  798. "Could not analyze image with ID ${enteFile.uploadedFileID} \n",
  799. e,
  800. s,
  801. );
  802. debugPrint(
  803. "This image with ID ${enteFile.uploadedFileID} has name ${enteFile.displayName}.",
  804. );
  805. final resultBuilder = FaceMlResultBuilder.fromEnteFile(enteFile);
  806. return resultBuilder.buildErrorOccurred();
  807. }
  808. stopwatch.stop();
  809. _logger.info(
  810. "Finished Analyze image (${result.faces.length} faces) with uploadedFileID ${enteFile.uploadedFileID}, in "
  811. "${stopwatch.elapsedMilliseconds} ms",
  812. );
  813. return result;
  814. }
  815. Future<String?> _getImagePathForML(
  816. EnteFile enteFile, {
  817. FileDataForML typeOfData = FileDataForML.fileData,
  818. }) async {
  819. String? imagePath;
  820. switch (typeOfData) {
  821. case FileDataForML.fileData:
  822. final stopwatch = Stopwatch()..start();
  823. final File? file = await getFile(enteFile, isOrigin: true);
  824. if (file == null) {
  825. _logger.warning("Could not get file for $enteFile");
  826. imagePath = null;
  827. break;
  828. }
  829. imagePath = file.path;
  830. stopwatch.stop();
  831. _logger.info(
  832. "Getting file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
  833. );
  834. break;
  835. case FileDataForML.thumbnailData:
  836. final stopwatch = Stopwatch()..start();
  837. final File? thumbnail = await getThumbnailForUploadedFile(enteFile);
  838. if (thumbnail == null) {
  839. _logger.warning("Could not get thumbnail for $enteFile");
  840. imagePath = null;
  841. break;
  842. }
  843. imagePath = thumbnail.path;
  844. stopwatch.stop();
  845. _logger.info(
  846. "Getting thumbnail data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
  847. );
  848. break;
  849. case FileDataForML.compressedFileData:
  850. _logger.warning(
  851. "Getting compressed file data for uploadedFileID ${enteFile.uploadedFileID} is not implemented yet",
  852. );
  853. imagePath = null;
  854. break;
  855. }
  856. return imagePath;
  857. }
  858. @Deprecated('Deprecated in favor of `_getImagePathForML`')
  859. Future<Uint8List?> _getDataForML(
  860. EnteFile enteFile, {
  861. FileDataForML typeOfData = FileDataForML.fileData,
  862. }) async {
  863. Uint8List? data;
  864. switch (typeOfData) {
  865. case FileDataForML.fileData:
  866. final stopwatch = Stopwatch()..start();
  867. final File? actualIoFile = await getFile(enteFile, isOrigin: true);
  868. if (actualIoFile != null) {
  869. data = await actualIoFile.readAsBytes();
  870. }
  871. stopwatch.stop();
  872. _logger.info(
  873. "Getting file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
  874. );
  875. break;
  876. case FileDataForML.thumbnailData:
  877. final stopwatch = Stopwatch()..start();
  878. data = await getThumbnail(enteFile);
  879. stopwatch.stop();
  880. _logger.info(
  881. "Getting thumbnail data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
  882. );
  883. break;
  884. case FileDataForML.compressedFileData:
  885. final stopwatch = Stopwatch()..start();
  886. final String tempPath = Configuration.instance.getTempDirectory() +
  887. "${enteFile.uploadedFileID!}";
  888. final File? actualIoFile = await getFile(enteFile);
  889. if (actualIoFile != null) {
  890. final compressResult = await FlutterImageCompress.compressAndGetFile(
  891. actualIoFile.path,
  892. tempPath + ".jpg",
  893. );
  894. if (compressResult != null) {
  895. data = await compressResult.readAsBytes();
  896. }
  897. }
  898. stopwatch.stop();
  899. _logger.info(
  900. "Getting compressed file data for uploadedFileID ${enteFile.uploadedFileID} took ${stopwatch.elapsedMilliseconds} ms",
  901. );
  902. break;
  903. }
  904. return data;
  905. }
  906. /// Detects faces in the given image data.
  907. ///
  908. /// `imageData`: The image data to analyze.
  909. ///
  910. /// Returns a list of face detection results.
  911. ///
  912. /// Throws [CouldNotInitializeFaceDetector], [CouldNotRunFaceDetector] or [GeneralFaceMlException] if something goes wrong.
  913. Future<List<FaceDetectionRelative>> _detectFacesIsolate(
  914. String imagePath,
  915. // Uint8List fileData,
  916. {
  917. FaceMlResultBuilder? resultBuilder,
  918. }) async {
  919. try {
  920. // Get the bounding boxes of the faces
  921. final (List<FaceDetectionRelative> faces, dataSize) =
  922. await YoloOnnxFaceDetection.instance.predictInComputer(imagePath);
  923. // Add detected faces to the resultBuilder
  924. if (resultBuilder != null) {
  925. resultBuilder.addNewlyDetectedFaces(faces, dataSize);
  926. }
  927. return faces;
  928. } on YOLOInterpreterInitializationException {
  929. throw CouldNotInitializeFaceDetector();
  930. } on YOLOInterpreterRunException {
  931. throw CouldNotRunFaceDetector();
  932. } catch (e) {
  933. _logger.severe('Face detection failed: $e');
  934. throw GeneralFaceMlException('Face detection failed: $e');
  935. }
  936. }
  937. /// Detects faces in the given image data.
  938. ///
  939. /// `imageData`: The image data to analyze.
  940. ///
  941. /// Returns a list of face detection results.
  942. ///
  943. /// Throws [CouldNotInitializeFaceDetector], [CouldNotRunFaceDetector] or [GeneralFaceMlException] if something goes wrong.
  944. static Future<List<FaceDetectionRelative>> detectFacesSync(
  945. Image image,
  946. ByteData imageByteData,
  947. int interpreterAddress, {
  948. FaceMlResultBuilder? resultBuilder,
  949. }) async {
  950. try {
  951. // Get the bounding boxes of the faces
  952. final (List<FaceDetectionRelative> faces, dataSize) =
  953. await YoloOnnxFaceDetection.predictSync(
  954. image,
  955. imageByteData,
  956. interpreterAddress,
  957. );
  958. // Add detected faces to the resultBuilder
  959. if (resultBuilder != null) {
  960. resultBuilder.addNewlyDetectedFaces(faces, dataSize);
  961. }
  962. return faces;
  963. } on YOLOInterpreterInitializationException {
  964. throw CouldNotInitializeFaceDetector();
  965. } on YOLOInterpreterRunException {
  966. throw CouldNotRunFaceDetector();
  967. } catch (e) {
  968. dev.log('[SEVERE] Face detection failed: $e');
  969. throw GeneralFaceMlException('Face detection failed: $e');
  970. }
  971. }
  972. /// Aligns multiple faces from the given image data.
  973. ///
  974. /// `imageData`: The image data in [Uint8List] that contains the faces.
  975. /// `faces`: The face detection results in a list of [FaceDetectionAbsolute] for the faces to align.
  976. ///
  977. /// Returns a list of the aligned faces as image data.
  978. ///
  979. /// Throws [CouldNotWarpAffine] or [GeneralFaceMlException] if the face alignment fails.
  980. Future<Float32List> _alignFaces(
  981. String imagePath,
  982. List<FaceDetectionRelative> faces, {
  983. FaceMlResultBuilder? resultBuilder,
  984. }) async {
  985. try {
  986. final (alignedFaces, alignmentResults, _, blurValues, _) =
  987. await ImageMlIsolate.instance
  988. .preprocessMobileFaceNetOnnx(imagePath, faces);
  989. if (resultBuilder != null) {
  990. resultBuilder.addAlignmentResults(
  991. alignmentResults,
  992. blurValues,
  993. );
  994. }
  995. return alignedFaces;
  996. } catch (e, s) {
  997. _logger.severe('Face alignment failed: $e', e, s);
  998. throw CouldNotWarpAffine();
  999. }
  1000. }
  1001. /// Aligns multiple faces from the given image data.
  1002. ///
  1003. /// `imageData`: The image data in [Uint8List] that contains the faces.
  1004. /// `faces`: The face detection results in a list of [FaceDetectionAbsolute] for the faces to align.
  1005. ///
  1006. /// Returns a list of the aligned faces as image data.
  1007. ///
  1008. /// Throws [CouldNotWarpAffine] or [GeneralFaceMlException] if the face alignment fails.
  1009. static Future<Float32List> alignFacesSync(
  1010. Image image,
  1011. ByteData imageByteData,
  1012. List<FaceDetectionRelative> faces, {
  1013. FaceMlResultBuilder? resultBuilder,
  1014. }) async {
  1015. try {
  1016. final stopwatch = Stopwatch()..start();
  1017. final (alignedFaces, alignmentResults, _, blurValues, _) =
  1018. await preprocessToMobileFaceNetFloat32List(
  1019. image,
  1020. imageByteData,
  1021. faces,
  1022. );
  1023. stopwatch.stop();
  1024. dev.log(
  1025. "Face alignment image decoding and processing took ${stopwatch.elapsedMilliseconds} ms",
  1026. );
  1027. if (resultBuilder != null) {
  1028. resultBuilder.addAlignmentResults(
  1029. alignmentResults,
  1030. blurValues,
  1031. );
  1032. }
  1033. return alignedFaces;
  1034. } catch (e, s) {
  1035. dev.log('[SEVERE] Face alignment failed: $e $s');
  1036. throw CouldNotWarpAffine();
  1037. }
  1038. }
  1039. /// Embeds multiple faces from the given input matrices.
  1040. ///
  1041. /// `facesMatrices`: The input matrices of the faces to embed.
  1042. ///
  1043. /// Returns a list of the face embeddings as lists of doubles.
  1044. ///
  1045. /// Throws [CouldNotInitializeFaceEmbeddor], [CouldNotRunFaceEmbeddor], [InputProblemFaceEmbeddor] or [GeneralFaceMlException] if the face embedding fails.
  1046. Future<List<List<double>>> _embedFaces(
  1047. Float32List facesList, {
  1048. FaceMlResultBuilder? resultBuilder,
  1049. }) async {
  1050. try {
  1051. // Get the embedding of the faces
  1052. final List<List<double>> embeddings =
  1053. await FaceEmbeddingOnnx.instance.predictInComputer(facesList);
  1054. // Add the embeddings to the resultBuilder
  1055. if (resultBuilder != null) {
  1056. resultBuilder.addEmbeddingsToExistingFaces(embeddings);
  1057. }
  1058. return embeddings;
  1059. } on MobileFaceNetInterpreterInitializationException {
  1060. throw CouldNotInitializeFaceEmbeddor();
  1061. } on MobileFaceNetInterpreterRunException {
  1062. throw CouldNotRunFaceEmbeddor();
  1063. } on MobileFaceNetEmptyInput {
  1064. throw InputProblemFaceEmbeddor("Input is empty");
  1065. } on MobileFaceNetWrongInputSize {
  1066. throw InputProblemFaceEmbeddor("Input size is wrong");
  1067. } on MobileFaceNetWrongInputRange {
  1068. throw InputProblemFaceEmbeddor("Input range is wrong");
  1069. // ignore: avoid_catches_without_on_clauses
  1070. } catch (e) {
  1071. _logger.severe('Face embedding (batch) failed: $e');
  1072. throw GeneralFaceMlException('Face embedding (batch) failed: $e');
  1073. }
  1074. }
  1075. static Future<List<List<double>>> embedFacesSync(
  1076. Float32List facesList,
  1077. int interpreterAddress, {
  1078. FaceMlResultBuilder? resultBuilder,
  1079. }) async {
  1080. try {
  1081. // Get the embedding of the faces
  1082. final List<List<double>> embeddings =
  1083. await FaceEmbeddingOnnx.predictSync(facesList, interpreterAddress);
  1084. // Add the embeddings to the resultBuilder
  1085. if (resultBuilder != null) {
  1086. resultBuilder.addEmbeddingsToExistingFaces(embeddings);
  1087. }
  1088. return embeddings;
  1089. } on MobileFaceNetInterpreterInitializationException {
  1090. throw CouldNotInitializeFaceEmbeddor();
  1091. } on MobileFaceNetInterpreterRunException {
  1092. throw CouldNotRunFaceEmbeddor();
  1093. } on MobileFaceNetEmptyInput {
  1094. throw InputProblemFaceEmbeddor("Input is empty");
  1095. } on MobileFaceNetWrongInputSize {
  1096. throw InputProblemFaceEmbeddor("Input size is wrong");
  1097. } on MobileFaceNetWrongInputRange {
  1098. throw InputProblemFaceEmbeddor("Input range is wrong");
  1099. // ignore: avoid_catches_without_on_clauses
  1100. } catch (e) {
  1101. dev.log('[SEVERE] Face embedding (batch) failed: $e');
  1102. throw GeneralFaceMlException('Face embedding (batch) failed: $e');
  1103. }
  1104. }
  1105. /// Checks if the ente file to be analyzed actually can be analyzed: it must be uploaded and in the correct format.
  1106. void _checkEnteFileForID(EnteFile enteFile) {
  1107. if (_skipAnalysisEnteFile(enteFile, <int, int>{})) {
  1108. _logger.severe(
  1109. "Skipped analysis of image with enteFile ${enteFile.toString()} because it is the wrong format or has no uploadedFileID",
  1110. );
  1111. throw CouldNotRetrieveAnyFileData();
  1112. }
  1113. }
  1114. bool _skipAnalysisEnteFile(EnteFile enteFile, Map<int, int> indexedFileIds) {
  1115. if (isImageIndexRunning == false) {
  1116. return true;
  1117. }
  1118. // Skip if the file is not uploaded or not owned by the user
  1119. if (!enteFile.isUploaded || enteFile.isOwner == false) {
  1120. return true;
  1121. }
  1122. // Skip if the file is a video
  1123. if (enteFile.fileType == FileType.video) {
  1124. return true;
  1125. }
  1126. // I don't know how motionPhotos and livePhotos work, so I'm also just skipping them for now
  1127. if (enteFile.fileType == FileType.other) {
  1128. return true;
  1129. }
  1130. // Skip if the file is already analyzed with the latest ml version
  1131. final id = enteFile.uploadedFileID!;
  1132. return indexedFileIds.containsKey(id) &&
  1133. indexedFileIds[id]! >= faceMlVersion;
  1134. }
  1135. }