Merge branch 'mobile_face' of https://github.com/ente-io/auth into mobile_face

This commit is contained in:
Neeraj Gupta 2024-03-28 16:13:25 +05:30
commit 9aafe137a1
49 changed files with 1835 additions and 207 deletions

View file

@ -1304,6 +1304,23 @@ class FilesDB {
return result;
}
Future<Map<int,int>> getFileIDToCreationTime() async {
final db = await instance.database;
final rows = await db.rawQuery(
'''
SELECT $columnUploadedFileID, $columnCreationTime
FROM $filesTable
WHERE
($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1);
''',
);
final result = <int, int>{};
for (final row in rows) {
result[row[columnUploadedFileID] as int] = row[columnCreationTime] as int;
}
return result;
}
// getCollectionFileFirstOrLast returns the first or last uploaded file in
// the collection based on the given collectionID and the order.
Future<EnteFile?> getCollectionFileFirstOrLast(

View file

@ -4,9 +4,9 @@ import 'package:logging/logging.dart';
import 'package:path/path.dart' show join;
import 'package:path_provider/path_provider.dart';
import 'package:photos/models/ml/ml_typedefs.dart';
import "package:photos/services/face_ml/face_feedback.dart/cluster_feedback.dart";
import "package:photos/services/face_ml/face_feedback.dart/feedback_types.dart";
import "package:photos/services/face_ml/face_ml_result.dart";
import 'package:photos/services/machine_learning/face_ml/face_feedback.dart/cluster_feedback.dart';
import 'package:photos/services/machine_learning/face_ml/face_feedback.dart/feedback_types.dart';
import 'package:photos/services/machine_learning/face_ml/face_ml_result.dart';
import 'package:sqflite/sqflite.dart';
/// Stores all data for the ML-related features. The database can be accessed by `MlDataDB.instance.database`.

View file

@ -11,7 +11,7 @@ import "package:photos/face/db_model_mappers.dart";
import "package:photos/face/model/face.dart";
import "package:photos/face/model/person.dart";
import "package:photos/models/file/file.dart";
import "package:photos/services/face_ml/blur_detection/blur_constants.dart";
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
import 'package:sqflite/sqflite.dart';
/// Stores all data for the ML-related features. The database can be accessed by `MlDataDB.instance.database`.
@ -185,7 +185,7 @@ class FaceMLDataDB {
final Map<int, int> result = {};
final db = await instance.database;
final List<Map<String, dynamic>> maps = await db.rawQuery(
'SELECT $fileIDColumn, COUNT(*) as count FROM $facesTable where $faceScore > 0.8 GROUP BY $fileIDColumn',
'SELECT $fileIDColumn, COUNT(*) as count FROM $facesTable where $faceScore > $kMinFaceDetectionScore GROUP BY $fileIDColumn',
);
for (final map in maps) {
@ -228,7 +228,7 @@ class FaceMLDataDB {
final clusterIDs =
cluterRows.map((e) => e[cluserIDColumn] as int).toList();
final List<Map<String, dynamic>> faceMaps = await db.rawQuery(
'SELECT * FROM $facesTable where $faceClusterId IN (${clusterIDs.join(",")}) AND $fileIDColumn in (${fileId.join(",")}) AND $faceScore > 0.8 ORDER BY $faceScore DESC',
'SELECT * FROM $facesTable where $faceClusterId IN (${clusterIDs.join(",")}) AND $fileIDColumn in (${fileId.join(",")}) AND $faceScore > $kMinHighQualityFaceScore ORDER BY $faceScore DESC',
);
if (faceMaps.isNotEmpty) {
if (avatarFileId != null) {
@ -257,7 +257,7 @@ class FaceMLDataDB {
return null;
}
Future<List<Face>> getFacesForGivenFileID(int fileUploadID) async {
Future<List<Face>?> getFacesForGivenFileID(int fileUploadID) async {
final db = await instance.database;
final List<Map<String, dynamic>> maps = await db.query(
facesTable,
@ -277,6 +277,9 @@ class FaceMLDataDB {
where: '$fileIDColumn = ?',
whereArgs: [fileUploadID],
);
if (maps.isEmpty) {
return null;
}
return maps.map((e) => mapRowToFace(e)).toList();
}
@ -347,17 +350,17 @@ class FaceMLDataDB {
///
/// Only selects faces with score greater than [minScore] and blur score greater than [minClarity]
Future<Map<String, (int?, Uint8List)>> getFaceEmbeddingMap({
double minScore = 0.78,
double minScore = kMinHighQualityFaceScore,
int minClarity = kLaplacianThreshold,
int maxRows = 10000,
int maxFaces = 20000,
int offset = 0,
int batchSize = 10000,
}) async {
_logger.info('reading as float');
_logger.info(
'reading as float offset: $offset, maxFaces: $maxFaces, batchSize: $batchSize',
);
final db = await instance.database;
// Define the batch size
const batchSize = 10000;
int offset = 0;
final Map<String, (int?, Uint8List)> result = {};
while (true) {
// Query a batch of rows
@ -379,7 +382,7 @@ class FaceMLDataDB {
result[faceID] =
(map[faceClusterId] as int?, map[faceEmbeddingBlob] as Uint8List);
}
if (result.length >= maxRows) {
if (result.length >= maxFaces) {
break;
}
offset += batchSize;
@ -404,7 +407,7 @@ class FaceMLDataDB {
facesTable,
columns: [faceIDColumn, faceEmbeddingBlob],
where:
'$faceScore > 0.8 AND $faceBlur > $kLaplacianThreshold AND $fileIDColumn IN (${fileIDs.join(",")})',
'$faceScore > $kMinHighQualityFaceScore AND $faceBlur > $kLaplacianThreshold AND $fileIDColumn IN (${fileIDs.join(",")})',
limit: batchSize,
offset: offset,
orderBy: '$faceIDColumn DESC',
@ -425,6 +428,16 @@ class FaceMLDataDB {
return result;
}
Future<int> getTotalFaceCount({
double minFaceScore = kMinHighQualityFaceScore,
}) async {
final db = await instance.database;
final List<Map<String, dynamic>> maps = await db.rawQuery(
'SELECT COUNT(*) as count FROM $facesTable WHERE $faceScore > $minFaceScore AND $faceBlur > $kLaplacianThreshold',
);
return maps.first['count'] as int;
}
Future<void> resetClusterIDs() async {
final db = await instance.database;
await db.update(

View file

@ -1,5 +1,5 @@
// Faces Table Fields & Schema Queries
import "package:photos/services/face_ml/blur_detection/blur_constants.dart";
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
const facesTable = 'faces';
const fileIDColumn = 'file_id';

View file

@ -1,5 +1,5 @@
import "package:photos/face/model/detection.dart";
import "package:photos/services/face_ml/blur_detection/blur_constants.dart";
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
class Face {
final int fileID;
@ -11,6 +11,10 @@ class Face {
bool get isBlurry => blur < kLaplacianThreshold;
bool get hasHighScore => score > kMinHighQualityFaceScore;
bool get isHighQuality => (!isBlurry) && hasHighScore;
Face(
this.faceID,
this.fileID,
@ -20,6 +24,17 @@ class Face {
this.blur,
);
factory Face.empty(int fileID, {bool error = false}) {
return Face(
"$fileID-0",
fileID,
<double>[],
error ? -1.0 : 0.0,
Detection.empty(),
0.0,
);
}
factory Face.fromJson(Map<String, dynamic> json) {
return Face(
json['faceID'] as String,

View file

@ -25,13 +25,13 @@ import 'package:photos/services/app_lifecycle_service.dart';
import 'package:photos/services/billing_service.dart';
import 'package:photos/services/collections_service.dart';
import "package:photos/services/entity_service.dart";
import "package:photos/services/face_ml/face_ml_service.dart";
import 'package:photos/services/favorites_service.dart';
import 'package:photos/services/feature_flag_service.dart';
import 'package:photos/services/home_widget_service.dart';
import 'package:photos/services/local_file_update_service.dart';
import 'package:photos/services/local_sync_service.dart';
import "package:photos/services/location_service.dart";
import 'package:photos/services/machine_learning/face_ml/face_ml_service.dart';
import 'package:photos/services/machine_learning/file_ml/remote_fileml_service.dart';
import "package:photos/services/machine_learning/machine_learning_controller.dart";
import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart';

View file

@ -1,2 +0,0 @@
const kLaplacianThreshold = 10;
const kLapacianDefault = 10000.0;

View file

@ -34,6 +34,7 @@ import "package:photos/services/face_ml/face_embedding/face_embedding_exceptions
import 'package:photos/services/face_ml/face_embedding/onnx_face_embedding.dart';
import "package:photos/services/face_ml/face_ml_exceptions.dart";
import "package:photos/services/face_ml/face_ml_result.dart";
import "package:photos/services/machine_learning/face_ml/face_clustering/linear_clustering_service.dart";
import 'package:photos/services/machine_learning/file_ml/file_ml.dart';
import 'package:photos/services/machine_learning/file_ml/remote_fileml_service.dart';
import "package:photos/services/search_service.dart";

View file

@ -1,7 +1,7 @@
import 'dart:math' show atan2;
import 'package:ml_linalg/linalg.dart';
import 'package:photos/extensions/ml_linalg_extensions.dart';
import "package:photos/services/face_ml/face_alignment/alignment_result.dart";
import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart';
/// Class to compute the similarity transform between two sets of points.
///

View file

@ -6,7 +6,8 @@ import "dart:typed_data";
import "package:logging/logging.dart";
import "package:photos/generated/protos/ente/common/vector.pb.dart";
import "package:photos/services/face_ml/face_clustering/cosine_distance.dart";
import 'package:photos/services/machine_learning/face_ml/face_clustering/cosine_distance.dart';
import "package:photos/services/machine_learning/face_ml/face_ml_result.dart";
import "package:synchronized/synchronized.dart";
class FaceInfo {
@ -15,10 +16,12 @@ class FaceInfo {
int? clusterId;
String? closestFaceId;
int? closestDist;
int? fileCreationTime;
FaceInfo({
required this.faceID,
required this.embedding,
this.clusterId,
this.fileCreationTime,
});
}
@ -31,7 +34,6 @@ class FaceLinearClustering {
final Duration _inactivityDuration = const Duration(seconds: 30);
int _activeTasks = 0;
final _initLock = Lock();
late Isolate _isolate;
@ -94,7 +96,12 @@ class FaceLinearClustering {
switch (function) {
case ClusterOperation.linearIncrementalClustering:
final input = args['input'] as Map<String, (int?, Uint8List)>;
final result = FaceLinearClustering._runLinearClustering(input);
final fileIDToCreationTime =
args['fileIDToCreationTime'] as Map<int, int>?;
final result = FaceLinearClustering._runLinearClustering(
input,
fileIDToCreationTime: fileIDToCreationTime,
);
sendPort.send(result);
break;
}
@ -124,12 +131,13 @@ class FaceLinearClustering {
final errorStackTrace = receivedMessage['stackTrace'];
final exception = Exception(errorMessage);
final stackTrace = StackTrace.fromString(errorStackTrace);
_activeTasks--;
completer.completeError(exception, stackTrace);
} else {
_activeTasks--;
completer.complete(receivedMessage);
}
});
_activeTasks--;
return completer.future;
}
@ -146,8 +154,8 @@ class FaceLinearClustering {
_resetInactivityTimer();
} else {
_logger.info(
'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.',
);
'Clustering Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds with no tasks running. Killing isolate.',
);
dispose();
}
});
@ -169,8 +177,9 @@ class FaceLinearClustering {
///
/// WARNING: Make sure to always input data in the same ordering, otherwise the clustering can less less deterministic.
Future<Map<String, int>?> predict(
Map<String, (int?, Uint8List)> input,
) async {
Map<String, (int?, Uint8List)> input, {
Map<int, int>? fileIDToCreationTime,
}) async {
if (input.isEmpty) {
_logger.warning(
"Clustering dataset of embeddings is empty, returning empty list.",
@ -192,7 +201,10 @@ class FaceLinearClustering {
// final Map<String, int> faceIdToCluster =
// await _runLinearClusteringInComputer(input);
final Map<String, int> faceIdToCluster = await _runInIsolate(
(ClusterOperation.linearIncrementalClustering, {'input': input}),
(
ClusterOperation.linearIncrementalClustering,
{'input': input, 'fileIDToCreationTime': fileIDToCreationTime}
),
);
// return _runLinearClusteringInComputer(input);
_logger.info(
@ -205,11 +217,14 @@ class FaceLinearClustering {
}
static Map<String, int> _runLinearClustering(
Map<String, (int?, Uint8List)> x,
) {
Map<String, (int?, Uint8List)> x, {
Map<int, int>? fileIDToCreationTime,
}) {
log(
"[ClusterIsolate] ${DateTime.now()} Copied to isolate ${x.length} faces",
);
// Organize everything into a list of FaceInfo objects
final List<FaceInfo> faceInfos = [];
for (final entry in x.entries) {
faceInfos.add(
@ -217,63 +232,83 @@ class FaceLinearClustering {
faceID: entry.key,
embedding: EVector.fromBuffer(entry.value.$2).values,
clusterId: entry.value.$1,
fileCreationTime:
fileIDToCreationTime?[getFileIdFromFaceId(entry.key)],
),
);
}
// Sort the faceInfos such that the ones with null clusterId are at the end
faceInfos.sort((a, b) {
if (a.clusterId == null && b.clusterId == null) {
return 0;
} else if (a.clusterId == null) {
return 1;
} else if (b.clusterId == null) {
return -1;
} else {
return 0;
}
});
// Count the amount of null values at the end
int nullCount = 0;
for (final faceInfo in faceInfos.reversed) {
if (faceInfo.clusterId == null) {
nullCount++;
} else {
break;
}
}
log(
"[ClusterIsolate] ${DateTime.now()} Clustering $nullCount new faces without clusterId, and ${faceInfos.length - nullCount} faces with clusterId",
);
for (final clusteredFaceInfo
in faceInfos.sublist(0, faceInfos.length - nullCount)) {
assert(clusteredFaceInfo.clusterId != null);
// Sort the faceInfos based on fileCreationTime, in ascending order, so oldest faces are first
if (fileIDToCreationTime != null) {
faceInfos.sort((a, b) {
if (a.fileCreationTime == null && b.fileCreationTime == null) {
return 0;
} else if (a.fileCreationTime == null) {
return 1;
} else if (b.fileCreationTime == null) {
return -1;
} else {
return a.fileCreationTime!.compareTo(b.fileCreationTime!);
}
});
}
final int totalFaces = faceInfos.length;
int clusterID = 1;
if (faceInfos.isNotEmpty) {
faceInfos.first.clusterId = clusterID;
// Sort the faceInfos such that the ones with null clusterId are at the end
final List<FaceInfo> facesWithClusterID = <FaceInfo>[];
final List<FaceInfo> facesWithoutClusterID = <FaceInfo>[];
for (final FaceInfo faceInfo in faceInfos) {
if (faceInfo.clusterId == null) {
facesWithoutClusterID.add(faceInfo);
} else {
facesWithClusterID.add(faceInfo);
}
}
final sortedFaceInfos = <FaceInfo>[];
sortedFaceInfos.addAll(facesWithClusterID);
sortedFaceInfos.addAll(facesWithoutClusterID);
log(
"[ClusterIsolate] ${DateTime.now()} Clustering ${facesWithoutClusterID.length} new faces without clusterId, and ${facesWithClusterID.length} faces with clusterId",
);
// Make sure the first face has a clusterId
final int totalFaces = sortedFaceInfos.length;
int clusterID = 1;
if (sortedFaceInfos.isNotEmpty) {
if (sortedFaceInfos.first.clusterId == null) {
sortedFaceInfos.first.clusterId = clusterID;
} else {
clusterID = sortedFaceInfos.first.clusterId!;
}
} else {
return {};
}
// Start actual clustering
log(
"[ClusterIsolate] ${DateTime.now()} Processing $totalFaces faces",
);
final Map<String, int> newFaceIdToCluster = {};
final stopwatchClustering = Stopwatch()..start();
for (int i = 1; i < totalFaces; i++) {
// Incremental clustering, so we can skip faces that already have a clusterId
if (faceInfos[i].clusterId != null) {
clusterID = max(clusterID, faceInfos[i].clusterId!);
if (sortedFaceInfos[i].clusterId != null) {
clusterID = max(clusterID, sortedFaceInfos[i].clusterId!);
if (i % 250 == 0) {
log("[ClusterIsolate] ${DateTime.now()} First $i faces already had a clusterID");
}
continue;
}
final currentEmbedding = faceInfos[i].embedding;
final currentEmbedding = sortedFaceInfos[i].embedding;
int closestIdx = -1;
double closestDistance = double.infinity;
if (i % 250 == 0) {
log("[ClusterIsolate] ${DateTime.now()} Processing $i faces");
}
for (int j = 0; j < i; j++) {
for (int j = i - 1; j >= 0; j--) {
final double distance = cosineDistForNormVectors(
currentEmbedding,
faceInfos[j].embedding,
sortedFaceInfos[j].embedding,
);
if (distance < closestDistance) {
closestDistance = distance;
@ -282,42 +317,43 @@ class FaceLinearClustering {
}
if (closestDistance < recommendedDistanceThreshold) {
if (faceInfos[closestIdx].clusterId == null) {
if (sortedFaceInfos[closestIdx].clusterId == null) {
// Ideally this should never happen, but just in case log it
log(
" [ClusterIsolate] ${DateTime.now()} Found new cluster $clusterID",
" [ClusterIsolate] [WARNING] ${DateTime.now()} Found new cluster $clusterID",
);
clusterID++;
faceInfos[closestIdx].clusterId = clusterID;
sortedFaceInfos[closestIdx].clusterId = clusterID;
newFaceIdToCluster[sortedFaceInfos[closestIdx].faceID] = clusterID;
}
faceInfos[i].clusterId = faceInfos[closestIdx].clusterId;
sortedFaceInfos[i].clusterId = sortedFaceInfos[closestIdx].clusterId;
newFaceIdToCluster[sortedFaceInfos[i].faceID] =
sortedFaceInfos[closestIdx].clusterId!;
} else {
clusterID++;
faceInfos[i].clusterId = clusterID;
sortedFaceInfos[i].clusterId = clusterID;
newFaceIdToCluster[sortedFaceInfos[i].faceID] = clusterID;
}
}
final Map<String, int> result = {};
for (final faceInfo in faceInfos) {
result[faceInfo.faceID] = faceInfo.clusterId!;
}
stopwatchClustering.stop();
log(
' [ClusterIsolate] ${DateTime.now()} Clustering for ${faceInfos.length} embeddings (${faceInfos[0].embedding.length} size) executed in ${stopwatchClustering.elapsedMilliseconds}ms, clusters $clusterID',
' [ClusterIsolate] ${DateTime.now()} Clustering for ${sortedFaceInfos.length} embeddings (${sortedFaceInfos[0].embedding.length} size) executed in ${stopwatchClustering.elapsedMilliseconds}ms, clusters $clusterID',
);
// return result;
// NOTe: The main clustering logic is done, the following is just filtering and logging
final input = x;
final faceIdToCluster = result;
stopwatchClustering.reset();
stopwatchClustering.start();
// analyze the results
FaceLinearClustering._analyzeClusterResults(sortedFaceInfos);
final Set<String> newFaceIds = <String>{};
input.forEach((key, value) {
if (value.$1 == null) {
newFaceIds.add(key);
}
});
return newFaceIdToCluster;
}
static void _analyzeClusterResults(List<FaceInfo> sortedFaceInfos) {
final stopwatch = Stopwatch()..start();
final Map<String, int> faceIdToCluster = {};
for (final faceInfo in sortedFaceInfos) {
faceIdToCluster[faceInfo.faceID] = faceInfo.clusterId!;
}
// Find faceIDs that are part of a cluster which is larger than 5 and are new faceIDs
final Map<int, int> clusterIdToSize = {};
@ -328,12 +364,6 @@ class FaceLinearClustering {
clusterIdToSize[value] = 1;
}
});
final Map<String, int> faceIdToClusterFiltered = {};
for (final entry in faceIdToCluster.entries) {
if (clusterIdToSize[entry.value]! > 0 && newFaceIds.contains(entry.key)) {
faceIdToClusterFiltered[entry.key] = entry.value;
}
}
// print top 10 cluster ids and their sizes based on the internal cluster id
final clusterIds = faceIdToCluster.values.toSet();
@ -341,7 +371,7 @@ class FaceLinearClustering {
return faceIdToCluster.values.where((id) => id == clusterId).length;
}).toList();
clusterSizes.sort();
// find clusters whose size is graeter than 1
// find clusters whose size is greater than 1
int oneClusterCount = 0;
int moreThan5Count = 0;
int moreThan10Count = 0;
@ -349,43 +379,29 @@ class FaceLinearClustering {
int moreThan50Count = 0;
int moreThan100Count = 0;
for (int i = 0; i < clusterSizes.length; i++) {
if (clusterSizes[i] > 100) {
moreThan100Count++;
}
if (clusterSizes[i] > 50) {
} else if (clusterSizes[i] > 50) {
moreThan50Count++;
}
if (clusterSizes[i] > 20) {
} else if (clusterSizes[i] > 20) {
moreThan20Count++;
}
if (clusterSizes[i] > 10) {
} else if (clusterSizes[i] > 10) {
moreThan10Count++;
}
if (clusterSizes[i] > 5) {
} else if (clusterSizes[i] > 5) {
moreThan5Count++;
}
if (clusterSizes[i] == 1) {
} else if (clusterSizes[i] == 1) {
oneClusterCount++;
}
}
// print the metrics
log(
'[ClusterIsolate] Total clusters ${clusterIds.length}, '
'oneClusterCount $oneClusterCount, '
'moreThan5Count $moreThan5Count, '
'moreThan10Count $moreThan10Count, '
'moreThan20Count $moreThan20Count, '
'moreThan50Count $moreThan50Count, '
'moreThan100Count $moreThan100Count',
"[ClusterIsolate] Total clusters ${clusterIds.length}: \n oneClusterCount $oneClusterCount \n moreThan5Count $moreThan5Count \n moreThan10Count $moreThan10Count \n moreThan20Count $moreThan20Count \n moreThan50Count $moreThan50Count \n moreThan100Count $moreThan100Count",
);
stopwatchClustering.stop();
stopwatch.stop();
log(
"[ClusterIsolate] Clustering additional steps took ${stopwatchClustering.elapsedMilliseconds} ms",
"[ClusterIsolate] Clustering additional analysis took ${stopwatch.elapsedMilliseconds} ms",
);
// log('Top clusters count ${clusterSizes.reversed.take(10).toList()}');
return faceIdToClusterFiltered;
}
}

View file

@ -1,6 +1,6 @@
import 'dart:math' as math show max, min;
import "package:photos/services/face_ml/face_detection/detection.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
List<FaceDetectionRelative> naiveNonMaxSuppression({
required List<FaceDetectionRelative> detections,

View file

@ -9,10 +9,10 @@ import "package:computer/computer.dart";
import 'package:flutter/material.dart';
import 'package:logging/logging.dart';
import 'package:onnxruntime/onnxruntime.dart';
import "package:photos/services/face_ml/face_detection/detection.dart";
import "package:photos/services/face_ml/face_detection/naive_non_max_suppression.dart";
import "package:photos/services/face_ml/face_detection/yolov5face/yolo_face_detection_exceptions.dart";
import "package:photos/services/face_ml/face_detection/yolov5face/yolo_filter_extract_detections.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/naive_non_max_suppression.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_face_detection_exceptions.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_filter_extract_detections.dart';
import "package:photos/services/remote_assets_service.dart";
import "package:photos/utils/image_ml_isolate.dart";
import "package:photos/utils/image_ml_util.dart";

View file

@ -1,6 +1,6 @@
import 'dart:developer' as dev show log;
import "package:photos/services/face_ml/face_detection/detection.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
List<FaceDetectionRelative> yoloOnnxFilterExtractDetections(
double minScoreSigmoidThreshold,

View file

@ -1,5 +1,5 @@
import "package:photos/services/face_ml/face_detection/yolov5face/yolo_face_detection_options.dart";
import "package:photos/services/face_ml/model_file.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_face_detection_options.dart';
import 'package:photos/services/machine_learning/face_ml/model_file.dart';
class YOLOModelConfig {
final String modelPath;

View file

@ -6,10 +6,10 @@ import 'dart:typed_data' show Uint8List;
import "package:flutter/foundation.dart";
import "package:logging/logging.dart";
import 'package:photos/models/ml/ml_typedefs.dart';
import "package:photos/services/face_ml/face_detection/detection.dart";
import "package:photos/services/face_ml/face_embedding/face_embedding_exceptions.dart";
import "package:photos/services/face_ml/face_embedding/face_embedding_options.dart";
import "package:photos/services/face_ml/face_embedding/mobilefacenet_model_config.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_exceptions.dart';
import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_options.dart';
import 'package:photos/services/machine_learning/face_ml/face_embedding/mobilefacenet_model_config.dart';
import 'package:photos/utils/image_ml_isolate.dart';
import 'package:photos/utils/image_ml_util.dart';
import 'package:tflite_flutter/tflite_flutter.dart';

View file

@ -1,5 +1,5 @@
import "package:photos/services/face_ml/face_embedding/face_embedding_options.dart";
import "package:photos/services/face_ml/model_file.dart";
import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_options.dart';
import 'package:photos/services/machine_learning/face_ml/model_file.dart';
class MobileFaceNetModelConfig {
final String modelPath;

View file

@ -5,7 +5,7 @@ import 'dart:typed_data' show Float32List;
import 'package:computer/computer.dart';
import 'package:logging/logging.dart';
import 'package:onnxruntime/onnxruntime.dart';
import "package:photos/services/face_ml/face_detection/detection.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import "package:photos/services/remote_assets_service.dart";
import "package:photos/utils/image_ml_isolate.dart";
import "package:synchronized/synchronized.dart";

View file

@ -1,8 +1,8 @@
import "dart:convert";
import "package:photos/services/face_ml/face_clustering/cosine_distance.dart";
import "package:photos/services/face_ml/face_feedback.dart/feedback.dart";
import "package:photos/services/face_ml/face_feedback.dart/feedback_types.dart";
import 'package:photos/services/machine_learning/face_ml/face_clustering/cosine_distance.dart';
import 'package:photos/services/machine_learning/face_ml/face_feedback.dart/feedback.dart';
import 'package:photos/services/machine_learning/face_ml/face_feedback.dart/feedback_types.dart';
abstract class ClusterFeedback extends Feedback {
static final Map<FeedbackType, Function(String)> fromJsonStringRegistry = {

View file

@ -1,8 +1,8 @@
import "package:logging/logging.dart";
import "package:photos/db/ml_data_db.dart";
import "package:photos/services/face_ml/face_detection/detection.dart";
import "package:photos/services/face_ml/face_feedback.dart/cluster_feedback.dart";
import "package:photos/services/face_ml/face_ml_result.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import 'package:photos/services/machine_learning/face_ml/face_feedback.dart/cluster_feedback.dart';
import 'package:photos/services/machine_learning/face_ml/face_ml_result.dart';
class FaceFeedbackService {
final _logger = Logger("FaceFeedbackService");

View file

@ -1,5 +1,5 @@
import "package:photos/models/ml/ml_versions.dart";
import "package:photos/services/face_ml/face_feedback.dart/feedback_types.dart";
import 'package:photos/services/machine_learning/face_ml/face_feedback.dart/feedback_types.dart';
import "package:uuid/uuid.dart";
abstract class Feedback {

View file

@ -1,5 +1,5 @@
import 'package:logging/logging.dart';
import "package:photos/services/face_ml/blur_detection/blur_constants.dart";
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
class BlurDetectionService {
final _logger = Logger('BlurDetectionService');

View file

@ -0,0 +1,13 @@
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/onnx_face_detection.dart';
/// Blur detection threshold
const kLaplacianThreshold = 15;
/// Default blur value
const kLapacianDefault = 10000.0;
/// The minimum score for a face to be considered a high quality face for clustering and person detection
const kMinHighQualityFaceScore = 0.78;
/// The minimum score for a face to be detected, regardless of quality. Use [kMinHighQualityFaceScore] for high quality faces.
const kMinFaceDetectionScore = YoloOnnxFaceDetection.kMinScoreSigmoidThreshold;

View file

@ -1,4 +1,4 @@
import "package:photos/services/face_ml/face_ml_version.dart";
import 'package:photos/services/machine_learning/face_ml/face_ml_version.dart';
/// Represents a face detection method with a specific version.
class FaceDetectionMethod extends VersionedMethod {

View file

@ -6,12 +6,12 @@ import "package:photos/db/ml_data_db.dart";
import "package:photos/models/file/file.dart";
import 'package:photos/models/ml/ml_typedefs.dart';
import "package:photos/models/ml/ml_versions.dart";
import "package:photos/services/face_ml/blur_detection/blur_constants.dart";
import "package:photos/services/face_ml/face_alignment/alignment_result.dart";
import "package:photos/services/face_ml/face_clustering/cosine_distance.dart";
import "package:photos/services/face_ml/face_detection/detection.dart";
import "package:photos/services/face_ml/face_feedback.dart/cluster_feedback.dart";
import "package:photos/services/face_ml/face_ml_methods.dart";
import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart';
import 'package:photos/services/machine_learning/face_ml/face_clustering/cosine_distance.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import 'package:photos/services/machine_learning/face_ml/face_feedback.dart/cluster_feedback.dart';
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
import 'package:photos/services/machine_learning/face_ml/face_ml_methods.dart';
final _logger = Logger('ClusterResult_FaceMlResult');
@ -37,7 +37,7 @@ class ClusterResult {
String get thumbnailFaceId => _thumbnailFaceId;
int get thumbnailFileId => _getFileIdFromFaceId(_thumbnailFaceId);
int get thumbnailFileId => getFileIdFromFaceId(_thumbnailFaceId);
/// Sets the thumbnail faceId to the given faceId.
/// Throws an exception if the faceId is not in the list of faceIds.
@ -89,7 +89,7 @@ class ClusterResult {
int removedCount = 0;
for (var i = 0; i < _fileIds.length; i++) {
if (_fileIds[i] == fileId) {
assert(_getFileIdFromFaceId(_faceIds[i]) == fileId);
assert(getFileIdFromFaceId(_faceIds[i]) == fileId);
_fileIds.removeAt(i);
_faceIds.removeAt(i);
debugPrint(
@ -748,6 +748,6 @@ class FaceResultBuilder {
}
}
int _getFileIdFromFaceId(String faceId) {
int getFileIdFromFaceId(String faceId) {
return int.parse(faceId.split("_")[0]);
}

File diff suppressed because it is too large Load diff

View file

@ -11,7 +11,7 @@ import "package:photos/face/db.dart";
import "package:photos/face/model/person.dart";
import "package:photos/generated/protos/ente/common/vector.pb.dart";
import "package:photos/models/file/file.dart";
import "package:photos/services/face_ml/face_clustering/cosine_distance.dart";
import 'package:photos/services/machine_learning/face_ml/face_clustering/cosine_distance.dart';
import "package:photos/services/search_service.dart";
class ClusterFeedbackService {

View file

@ -123,7 +123,8 @@ class RemoteFileMLService {
}
Future<Map<int, FileMl>> decryptFileMLComputer(
Map<String, dynamic> args) async {
Map<String, dynamic> args,
) async {
final result = <int, FileMl>{};
final inputs = args["inputs"] as List<EmbeddingsDecoderInput>;
for (final input in inputs) {

View file

@ -8,9 +8,10 @@ import "package:photos/events/people_changed_event.dart";
import "package:photos/extensions/stop_watch.dart";
import "package:photos/face/db.dart";
import "package:photos/face/model/person.dart";
import "package:photos/models/ml/ml_versions.dart";
import "package:photos/services/face_ml/face_ml_service.dart";
import "package:photos/services/face_ml/feedback/cluster_feedback.dart";
import "package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart";
import 'package:photos/services/machine_learning/face_ml/face_ml_service.dart';
import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart';
// import "package:photos/services/search_service.dart";
import 'package:photos/theme/ente_theme.dart';
import 'package:photos/ui/components/captioned_text_widget.dart';
import 'package:photos/ui/components/expandable_menu_item_widget.dart';
@ -93,6 +94,62 @@ class _FaceDebugSectionWidgetState extends State<FaceDebugSectionWidget> {
}
},
),
MenuItemWidget(
captionedTextWidget: FutureBuilder<int>(
future: FaceMLDataDB.instance.getTotalFaceCount(),
builder: (context, snapshot) {
if (snapshot.hasData) {
return CaptionedTextWidget(
title: "${snapshot.data!} high quality faces",
);
}
return const SizedBox.shrink();
},
),
pressedColor: getEnteColorScheme(context).fillFaint,
trailingIcon: Icons.chevron_right_outlined,
trailingIconIsMuted: true,
onTap: () async {
final faces75 = await FaceMLDataDB.instance
.getTotalFaceCount(minFaceScore: 0.75);
final faces78 = await FaceMLDataDB.instance
.getTotalFaceCount(minFaceScore: kMinHighQualityFaceScore);
showShortToast(context, "Faces75: $faces75, Faces78: $faces78");
},
),
// MenuItemWidget(
// captionedTextWidget: const CaptionedTextWidget(
// title: "Analyze file ID 25728869",
// ),
// pressedColor: getEnteColorScheme(context).fillFaint,
// trailingIcon: Icons.chevron_right_outlined,
// trailingIconIsMuted: true,
// onTap: () async {
// try {
// final enteFile = await SearchService.instance.getAllFiles().then(
// (value) => value.firstWhere(
// (element) => element.uploadedFileID == 25728869,
// ),
// );
// _logger.info(
// 'File with ID ${enteFile.uploadedFileID} has name ${enteFile.displayName}',
// );
// FaceMlService.instance.isImageIndexRunning = true;
// final result = await FaceMlService.instance
// .analyzeImageInSingleIsolate(enteFile);
// if (result != null) {
// final resultJson = result.toJsonString();
// _logger.info('result: $resultJson');
// }
// FaceMlService.instance.isImageIndexRunning = false;
// } catch (e, s) {
// _logger.severe('indexing failed ', e, s);
// await showGenericErrorDialog(context: context, error: e);
// } finally {
// FaceMlService.instance.isImageIndexRunning = false;
// }
// },
// ),
MenuItemWidget(
captionedTextWidget: const CaptionedTextWidget(
title: "Run Clustering",
@ -101,7 +158,8 @@ class _FaceDebugSectionWidgetState extends State<FaceDebugSectionWidget> {
trailingIcon: Icons.chevron_right_outlined,
trailingIconIsMuted: true,
onTap: () async {
await FaceMlService.instance.clusterAllImages(minFaceScore: 0.75);
await FaceMlService.instance
.clusterAllImages(minFaceScore: 0.75, clusterInBuckets: true);
Bus.instance.fire(PeopleChangedEvent());
showShortToast(context, "Done");
},

View file

@ -20,8 +20,8 @@ import 'package:photos/models/gallery_type.dart';
import "package:photos/models/metadata/common_keys.dart";
import 'package:photos/models/selected_files.dart';
import 'package:photos/services/collections_service.dart';
import "package:photos/services/face_ml/feedback/cluster_feedback.dart";
import 'package:photos/services/hidden_service.dart';
import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart';
import "package:photos/theme/colors.dart";
import "package:photos/theme/ente_theme.dart";
import 'package:photos/ui/actions/collection/collection_file_actions.dart';

View file

@ -1,4 +1,5 @@
import "dart:developer" show log;
import "dart:io" show Platform;
import "dart:typed_data";
import "package:flutter/material.dart";
@ -9,6 +10,7 @@ import 'package:photos/models/file/file.dart';
import "package:photos/services/search_service.dart";
import "package:photos/ui/viewer/file/no_thumbnail_widget.dart";
import "package:photos/ui/viewer/people/cluster_page.dart";
import "package:photos/ui/viewer/people/cropped_face_image_view.dart";
import "package:photos/ui/viewer/people/people_page.dart";
import "package:photos/utils/face/face_box_crop.dart";
import "package:photos/utils/thumbnail_util.dart";
@ -29,11 +31,104 @@ class FaceWidget extends StatelessWidget {
@override
Widget build(BuildContext context) {
return FutureBuilder<Uint8List?>(
future: getFaceCrop(),
builder: (context, snapshot) {
if (snapshot.hasData) {
final ImageProvider imageProvider = MemoryImage(snapshot.data!);
if (Platform.isIOS) {
return FutureBuilder<Uint8List?>(
future: getFaceCrop(),
builder: (context, snapshot) {
if (snapshot.hasData) {
final ImageProvider imageProvider = MemoryImage(snapshot.data!);
return GestureDetector(
onTap: () async {
log(
"FaceWidget is tapped, with person $person and clusterID $clusterID",
name: "FaceWidget",
);
if (person == null && clusterID == null) {
return;
}
if (person != null) {
await Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => PeoplePage(
person: person!,
),
),
);
} else if (clusterID != null) {
final fileIdsToClusterIds =
await FaceMLDataDB.instance.getFileIdToClusterIds();
final files = await SearchService.instance.getAllFiles();
final clusterFiles = files
.where(
(file) =>
fileIdsToClusterIds[file.uploadedFileID]
?.contains(clusterID) ??
false,
)
.toList();
await Navigator.of(context).push(
MaterialPageRoute(
builder: (context) => ClusterPage(
clusterFiles,
cluserID: clusterID!,
),
),
);
}
},
child: Column(
children: [
ClipRRect(
borderRadius:
const BorderRadius.all(Radius.elliptical(16, 12)),
child: SizedBox(
width: 60,
height: 60,
child: Image(
image: imageProvider,
fit: BoxFit.cover,
),
),
),
const SizedBox(height: 8),
if (person != null)
Text(
person!.attr.name.trim(),
style: Theme.of(context).textTheme.bodySmall,
overflow: TextOverflow.ellipsis,
maxLines: 1,
),
],
),
);
} else {
if (snapshot.connectionState == ConnectionState.waiting) {
return const ClipRRect(
borderRadius: BorderRadius.all(Radius.elliptical(16, 12)),
child: SizedBox(
width: 60, // Ensure consistent sizing
height: 60,
child: CircularProgressIndicator(),
),
);
}
if (snapshot.hasError) {
log('Error getting face: ${snapshot.error}');
}
return const ClipRRect(
borderRadius: BorderRadius.all(Radius.elliptical(16, 12)),
child: SizedBox(
width: 60, // Ensure consistent sizing
height: 60,
child: NoThumbnailWidget(),
),
);
}
},
);
} else {
return Builder(
builder: (context) {
return GestureDetector(
onTap: () async {
log(
@ -81,9 +176,9 @@ class FaceWidget extends StatelessWidget {
child: SizedBox(
width: 60,
height: 60,
child: Image(
image: imageProvider,
fit: BoxFit.cover,
child: CroppedFaceImageView(
enteFile: file,
face: face,
),
),
),
@ -98,31 +193,9 @@ class FaceWidget extends StatelessWidget {
],
),
);
} else {
if (snapshot.connectionState == ConnectionState.waiting) {
return const ClipRRect(
borderRadius: BorderRadius.all(Radius.elliptical(16, 12)),
child: SizedBox(
width: 60, // Ensure consistent sizing
height: 60,
child: CircularProgressIndicator(),
),
);
}
if (snapshot.hasError) {
log('Error getting face: ${snapshot.error}');
}
return const ClipRRect(
borderRadius: BorderRadius.all(Radius.elliptical(16, 12)),
child: SizedBox(
width: 60, // Ensure consistent sizing
height: 60,
child: NoThumbnailWidget(),
),
);
}
},
);
},
);
}
}
Future<Uint8List?> getFaceCrop() async {

View file

@ -36,9 +36,18 @@ class FacesItemWidget extends StatelessWidget {
];
}
final List<Face> faces = await FaceMLDataDB.instance
final List<Face>? faces = await FaceMLDataDB.instance
.getFacesForGivenFileID(file.uploadedFileID!);
if (faces.isEmpty || faces.every((face) => face.score < 0.5)) {
if (faces == null) {
return [
const ChipButtonWidget(
"Image not analyzed",
noChips: true,
),
];
}
if (faces.isEmpty ||
faces.every((face) => face.score < 0.75 || face.isBlurry)) {
return [
const ChipButtonWidget(
"No faces found",
@ -50,6 +59,9 @@ class FacesItemWidget extends StatelessWidget {
// Sort the faces by score in descending order, so that the highest scoring face is first.
faces.sort((Face a, Face b) => b.score.compareTo(a.score));
// Remove faces with low scores and blurry faces
faces.removeWhere((face) => face.isHighQuality == false);
// TODO: add deduplication of faces of same person
final faceIdsToClusterIds = await FaceMLDataDB.instance
.getFaceIdsToClusterIds(faces.map((face) => face.faceID));

View file

@ -10,7 +10,7 @@ import "package:photos/events/people_changed_event.dart";
import "package:photos/face/db.dart";
import "package:photos/face/model/person.dart";
import "package:photos/generated/l10n.dart";
import "package:photos/services/face_ml/feedback/cluster_feedback.dart";
import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart';
import 'package:photos/theme/colors.dart';
import 'package:photos/theme/ente_theme.dart';
import 'package:photos/ui/common/loading_widget.dart';

View file

@ -0,0 +1,117 @@
import 'dart:developer' show log;
import "dart:io" show File;
import 'package:flutter/material.dart';
import "package:photos/face/model/face.dart";
import "package:photos/models/file/file.dart";
import "package:photos/ui/viewer/file/thumbnail_widget.dart";
import "package:photos/utils/file_util.dart";
class CroppedFaceInfo {
final Image image;
final double scale;
final double offsetX;
final double offsetY;
const CroppedFaceInfo({
required this.image,
required this.scale,
required this.offsetX,
required this.offsetY,
});
}
class CroppedFaceImageView extends StatelessWidget {
final EnteFile enteFile;
final Face face;
const CroppedFaceImageView({
Key? key,
required this.enteFile,
required this.face,
}) : super(key: key);
@override
Widget build(BuildContext context) {
return FutureBuilder(
future: getImage(),
builder: (context, snapshot) {
if (snapshot.hasData) {
return LayoutBuilder(
builder: (BuildContext context, BoxConstraints constraints) {
final Image image = snapshot.data!;
final double viewWidth = constraints.maxWidth;
final double viewHeight = constraints.maxHeight;
final faceBox = face.detection.box;
final double relativeFaceCenterX =
faceBox.xMin + faceBox.width / 2;
final double relativeFaceCenterY =
faceBox.yMin + faceBox.height / 2;
const double desiredFaceHeightRelativeToWidget = 1 / 2;
final double scale =
(1 / faceBox.height) * desiredFaceHeightRelativeToWidget;
final double widgetCenterX = viewWidth / 2;
final double widgetCenterY = viewHeight / 2;
final double imageAspectRatio = enteFile.width / enteFile.height;
final double widgetAspectRatio = viewWidth / viewHeight;
final double imageToWidgetRatio =
imageAspectRatio / widgetAspectRatio;
double offsetX =
(widgetCenterX - relativeFaceCenterX * viewWidth) * scale;
double offsetY =
(widgetCenterY - relativeFaceCenterY * viewHeight) * scale;
if (imageAspectRatio > widgetAspectRatio) {
// Landscape Image: Adjust offsetX more conservatively
offsetX = offsetX * imageToWidgetRatio;
} else {
// Portrait Image: Adjust offsetY more conservatively
offsetY = offsetY / imageToWidgetRatio;
}
return ClipRect(
clipBehavior: Clip.antiAlias,
child: Transform.translate(
offset: Offset(
offsetX,
offsetY,
),
child: Transform.scale(
scale: scale,
child: image,
),
),
);
},
);
} else {
if (snapshot.hasError) {
log('Error getting cover face for person: ${snapshot.error}');
}
return ThumbnailWidget(
enteFile,
);
}
},
);
}
Future<Image?> getImage() async {
final File? ioFile = await getFile(enteFile);
if (ioFile == null) {
return null;
}
final imageData = await ioFile.readAsBytes();
final image = Image.memory(imageData, fit: BoxFit.cover);
return image;
}
}

View file

@ -6,7 +6,7 @@ import "package:photos/events/people_changed_event.dart";
import "package:photos/face/db.dart";
import "package:photos/face/model/person.dart";
import "package:photos/models/file/file.dart";
import "package:photos/services/face_ml/feedback/cluster_feedback.dart";
import 'package:photos/services/machine_learning/face_ml/feedback/cluster_feedback.dart';
import "package:photos/theme/ente_theme.dart";
import "package:photos/ui/components/buttons/button_widget.dart";
import "package:photos/ui/components/models/button_type.dart";

View file

@ -170,7 +170,6 @@ class SearchExampleRow extends StatelessWidget {
),
);
});
scrollableExamples.add(SearchSectionCTAIcon(sectionType));
return SizedBox(
child: SingleChildScrollView(
physics: const BouncingScrollPhysics(),
@ -237,7 +236,9 @@ class SearchExample extends StatelessWidget {
child: searchResult.previewThumbnail() != null
? Hero(
tag: heroTag,
child: ClipOval(
child: ClipRRect(
borderRadius:
const BorderRadius.all(Radius.elliptical(16, 12)),
child: searchResult.type() != ResultType.faces
? ThumbnailWidget(
searchResult.previewThumbnail()!,
@ -246,7 +247,9 @@ class SearchExample extends StatelessWidget {
: FaceSearchResult(searchResult, heroTag),
),
)
: const ClipOval(
: const ClipRRect(
borderRadius:
BorderRadius.all(Radius.elliptical(16, 12)),
child: NoThumbnailWidget(
addBorder: false,
),

View file

@ -9,8 +9,8 @@ import 'package:flutter_isolate/flutter_isolate.dart';
import "package:logging/logging.dart";
import "package:photos/face/model/box.dart";
import 'package:photos/models/ml/ml_typedefs.dart';
import "package:photos/services/face_ml/face_alignment/alignment_result.dart";
import "package:photos/services/face_ml/face_detection/detection.dart";
import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import "package:photos/utils/image_ml_util.dart";
import "package:synchronized/synchronized.dart";

View file

@ -18,10 +18,10 @@ import 'package:flutter/painting.dart' as paint show decodeImageFromList;
import 'package:ml_linalg/linalg.dart';
import "package:photos/face/model/box.dart";
import 'package:photos/models/ml/ml_typedefs.dart';
import "package:photos/services/face_ml/blur_detection/blur_detection_service.dart";
import "package:photos/services/face_ml/face_alignment/alignment_result.dart";
import "package:photos/services/face_ml/face_alignment/similarity_transform.dart";
import "package:photos/services/face_ml/face_detection/detection.dart";
import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart';
import 'package:photos/services/machine_learning/face_ml/face_alignment/similarity_transform.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import 'package:photos/services/machine_learning/face_ml/face_filtering/blur_detection_service.dart';
/// All of the functions in this file are helper functions for the [ImageMlIsolate] isolate.
/// Don't use them outside of the isolate, unless you are okay with UI jank!!!!