[mob] Better organization of face detection code.
This commit is contained in:
parent
aad0a5a1d4
commit
c902f24e63
8 changed files with 38 additions and 87 deletions
|
@ -0,0 +1,3 @@
|
|||
class YOLOFaceInterpreterInitializationException implements Exception {}
|
||||
|
||||
class YOLOFaceInterpreterRunException implements Exception {}
|
|
@ -11,9 +11,9 @@ import 'package:logging/logging.dart';
|
|||
import 'package:onnxruntime/onnxruntime.dart';
|
||||
import "package:photos/face/model/dimension.dart";
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/naive_non_max_suppression.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_face_detection_exceptions.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_filter_extract_detections.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/yolo_filter_extract_detections.dart';
|
||||
import "package:photos/services/remote_assets_service.dart";
|
||||
import "package:photos/utils/image_ml_isolate.dart";
|
||||
import "package:photos/utils/image_ml_util.dart";
|
||||
|
@ -21,22 +21,26 @@ import "package:synchronized/synchronized.dart";
|
|||
|
||||
enum FaceDetectionOperation { yoloInferenceAndPostProcessing }
|
||||
|
||||
class YoloOnnxFaceDetection {
|
||||
/// This class is responsible for running the face detection model (YOLOv5Face) on ONNX runtime, and can be accessed through the singleton instance [FaceDetectionService.instance].
|
||||
class FaceDetectionService {
|
||||
static final _logger = Logger('YOLOFaceDetectionService');
|
||||
|
||||
final _computer = Computer.shared();
|
||||
|
||||
int sessionAddress = 0;
|
||||
|
||||
static const kModelBucketEndpoint = "https://models.ente.io/";
|
||||
static const kRemoteBucketModelPath = "yolov5s_face_640_640_dynamic.onnx";
|
||||
static const String kModelBucketEndpoint = "https://models.ente.io/";
|
||||
static const String kRemoteBucketModelPath =
|
||||
"yolov5s_face_640_640_dynamic.onnx";
|
||||
// static const kRemoteBucketModelPath = "yolov5n_face_640_640.onnx";
|
||||
static const modelRemotePath = kModelBucketEndpoint + kRemoteBucketModelPath;
|
||||
static const String modelRemotePath =
|
||||
kModelBucketEndpoint + kRemoteBucketModelPath;
|
||||
|
||||
static const kInputWidth = 640;
|
||||
static const kInputHeight = 640;
|
||||
static const kIouThreshold = 0.4;
|
||||
static const kMinScoreSigmoidThreshold = 0.7;
|
||||
static const int kInputWidth = 640;
|
||||
static const int kInputHeight = 640;
|
||||
static const double kIouThreshold = 0.4;
|
||||
static const double kMinScoreSigmoidThreshold = 0.7;
|
||||
static const int kNumKeypoints = 5;
|
||||
|
||||
bool isInitialized = false;
|
||||
|
||||
|
@ -55,7 +59,7 @@ class YoloOnnxFaceDetection {
|
|||
bool isRunning = false;
|
||||
|
||||
// singleton pattern
|
||||
YoloOnnxFaceDetection._privateConstructor();
|
||||
FaceDetectionService._privateConstructor();
|
||||
|
||||
/// Use this instance to access the FaceDetection service. Make sure to call `init()` before using it.
|
||||
/// e.g. `await FaceDetection.instance.init();`
|
||||
|
@ -63,9 +67,9 @@ class YoloOnnxFaceDetection {
|
|||
/// Then you can use `predict()` to get the bounding boxes of the faces, so `FaceDetection.instance.predict(imageData)`
|
||||
///
|
||||
/// config options: yoloV5FaceN //
|
||||
static final instance = YoloOnnxFaceDetection._privateConstructor();
|
||||
static final instance = FaceDetectionService._privateConstructor();
|
||||
|
||||
factory YoloOnnxFaceDetection() => instance;
|
||||
factory FaceDetectionService() => instance;
|
||||
|
||||
/// Check if the interpreter is initialized, if not initialize it with `loadModel()`
|
||||
Future<void> init() async {
|
||||
|
@ -178,7 +182,7 @@ class YoloOnnxFaceDetection {
|
|||
dev.log(
|
||||
'[YOLOFaceDetectionService] Error while running inference: $e \n $s',
|
||||
);
|
||||
throw YOLOInterpreterRunException();
|
||||
throw YOLOFaceInterpreterRunException();
|
||||
}
|
||||
stopwatchInterpreter.stop();
|
||||
dev.log(
|
||||
|
@ -297,7 +301,7 @@ class YoloOnnxFaceDetection {
|
|||
// runOptions.release();
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while running inference: $e \n $s');
|
||||
throw YOLOInterpreterRunException();
|
||||
throw YOLOFaceInterpreterRunException();
|
||||
}
|
||||
stopwatchInterpreter.stop();
|
||||
_logger.info(
|
||||
|
@ -367,7 +371,7 @@ class YoloOnnxFaceDetection {
|
|||
// runOptions.release();
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while running inference: $e \n $s');
|
||||
throw YOLOInterpreterRunException();
|
||||
throw YOLOFaceInterpreterRunException();
|
||||
}
|
||||
stopwatchInterpreter.stop();
|
||||
_logger.info(
|
||||
|
@ -585,7 +589,7 @@ class YoloOnnxFaceDetection {
|
|||
runOptions.release();
|
||||
} catch (e, s) {
|
||||
_logger.severe('Error while running inference: $e \n $s');
|
||||
throw YOLOInterpreterRunException();
|
||||
throw YOLOFaceInterpreterRunException();
|
||||
}
|
||||
stopwatchInterpreter.stop();
|
||||
_logger.info(
|
||||
|
@ -770,7 +774,7 @@ class YoloOnnxFaceDetection {
|
|||
dev.log(
|
||||
'[YOLOFaceDetectionService] Error while running inference: $e \n $s',
|
||||
);
|
||||
throw YOLOInterpreterRunException();
|
||||
throw YOLOFaceInterpreterRunException();
|
||||
}
|
||||
stopwatchInterpreter.stop();
|
||||
dev.log(
|
|
@ -1,3 +0,0 @@
|
|||
class YOLOInterpreterInitializationException implements Exception {}
|
||||
|
||||
class YOLOInterpreterRunException implements Exception {}
|
|
@ -1,31 +0,0 @@
|
|||
import 'dart:math' as math show log;
|
||||
|
||||
class FaceDetectionOptionsYOLO {
|
||||
final double minScoreSigmoidThreshold;
|
||||
final double iouThreshold;
|
||||
final int inputWidth;
|
||||
final int inputHeight;
|
||||
final int numCoords;
|
||||
final int numKeypoints;
|
||||
final int numValuesPerKeypoint;
|
||||
final int maxNumFaces;
|
||||
final double scoreClippingThresh;
|
||||
final double inverseSigmoidMinScoreThreshold;
|
||||
final bool useSigmoidScore;
|
||||
final bool flipVertically;
|
||||
|
||||
FaceDetectionOptionsYOLO({
|
||||
required this.minScoreSigmoidThreshold,
|
||||
required this.iouThreshold,
|
||||
required this.inputWidth,
|
||||
required this.inputHeight,
|
||||
this.numCoords = 14,
|
||||
this.numKeypoints = 5,
|
||||
this.numValuesPerKeypoint = 2,
|
||||
this.maxNumFaces = 100,
|
||||
this.scoreClippingThresh = 100.0,
|
||||
this.useSigmoidScore = true,
|
||||
this.flipVertically = false,
|
||||
}) : inverseSigmoidMinScoreThreshold =
|
||||
math.log(minScoreSigmoidThreshold / (1 - minScoreSigmoidThreshold));
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_face_detection_options.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/model_file.dart';
|
||||
|
||||
class YOLOModelConfig {
|
||||
final String modelPath;
|
||||
final FaceDetectionOptionsYOLO faceOptions;
|
||||
|
||||
YOLOModelConfig({
|
||||
required this.modelPath,
|
||||
required this.faceOptions,
|
||||
});
|
||||
}
|
||||
|
||||
final YOLOModelConfig yoloV5FaceS640x640DynamicBatchonnx = YOLOModelConfig(
|
||||
modelPath: ModelFile.yoloV5FaceS640x640DynamicBatchonnx,
|
||||
faceOptions: FaceDetectionOptionsYOLO(
|
||||
minScoreSigmoidThreshold: 0.8,
|
||||
iouThreshold: 0.4,
|
||||
inputWidth: 640,
|
||||
inputHeight: 640,
|
||||
),
|
||||
);
|
|
@ -1,4 +1,4 @@
|
|||
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/onnx_face_detection.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_service.dart';
|
||||
|
||||
/// Blur detection threshold
|
||||
const kLaplacianThreshold = 15;
|
||||
|
@ -10,4 +10,4 @@ const kLapacianDefault = 10000.0;
|
|||
const kMinHighQualityFaceScore = 0.80;
|
||||
|
||||
/// The minimum score for a face to be detected, regardless of quality. Use [kMinHighQualityFaceScore] for high quality faces.
|
||||
const kMinFaceDetectionScore = YoloOnnxFaceDetection.kMinScoreSigmoidThreshold;
|
||||
const kMinFaceDetectionScore = FaceDetectionService.kMinScoreSigmoidThreshold;
|
||||
|
|
|
@ -28,8 +28,8 @@ import "package:photos/models/file/file_type.dart";
|
|||
import "package:photos/models/ml/ml_versions.dart";
|
||||
import 'package:photos/services/machine_learning/face_ml/face_clustering/linear_clustering_service.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/onnx_face_detection.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/yolov5face/yolo_face_detection_exceptions.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_service.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_exceptions.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_embedding/face_embedding_service.dart';
|
||||
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
|
||||
|
@ -90,7 +90,7 @@ class FaceMlService {
|
|||
_logger.info("init called");
|
||||
await _computer.compute(initOrtEnv);
|
||||
try {
|
||||
await YoloOnnxFaceDetection.instance.init();
|
||||
await FaceDetectionService.instance.init();
|
||||
} catch (e, s) {
|
||||
_logger.severe("Could not initialize yolo onnx", e, s);
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ class FaceMlService {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
await YoloOnnxFaceDetection.instance.release();
|
||||
await FaceDetectionService.instance.release();
|
||||
} catch (e, s) {
|
||||
_logger.severe("Could not dispose yolo onnx", e, s);
|
||||
}
|
||||
|
@ -894,7 +894,7 @@ class FaceMlService {
|
|||
"enteFileID": enteFile.uploadedFileID ?? -1,
|
||||
"filePath": filePath,
|
||||
"faceDetectionAddress":
|
||||
YoloOnnxFaceDetection.instance.sessionAddress,
|
||||
FaceDetectionService.instance.sessionAddress,
|
||||
"faceEmbeddingAddress":
|
||||
FaceEmbeddingService.instance.sessionAddress,
|
||||
}
|
||||
|
@ -1043,7 +1043,7 @@ class FaceMlService {
|
|||
try {
|
||||
// Get the bounding boxes of the faces
|
||||
final (List<FaceDetectionRelative> faces, dataSize) =
|
||||
await YoloOnnxFaceDetection.instance.predictInComputer(imagePath);
|
||||
await FaceDetectionService.instance.predictInComputer(imagePath);
|
||||
|
||||
// Add detected faces to the resultBuilder
|
||||
if (resultBuilder != null) {
|
||||
|
@ -1051,9 +1051,9 @@ class FaceMlService {
|
|||
}
|
||||
|
||||
return faces;
|
||||
} on YOLOInterpreterInitializationException {
|
||||
} on YOLOFaceInterpreterInitializationException {
|
||||
throw CouldNotInitializeFaceDetector();
|
||||
} on YOLOInterpreterRunException {
|
||||
} on YOLOFaceInterpreterRunException {
|
||||
throw CouldNotRunFaceDetector();
|
||||
} catch (e) {
|
||||
_logger.severe('Face detection failed: $e');
|
||||
|
@ -1077,7 +1077,7 @@ class FaceMlService {
|
|||
try {
|
||||
// Get the bounding boxes of the faces
|
||||
final (List<FaceDetectionRelative> faces, dataSize) =
|
||||
await YoloOnnxFaceDetection.predictSync(
|
||||
await FaceDetectionService.predictSync(
|
||||
image,
|
||||
imageByteData,
|
||||
interpreterAddress,
|
||||
|
@ -1089,9 +1089,9 @@ class FaceMlService {
|
|||
}
|
||||
|
||||
return faces;
|
||||
} on YOLOInterpreterInitializationException {
|
||||
} on YOLOFaceInterpreterInitializationException {
|
||||
throw CouldNotInitializeFaceDetector();
|
||||
} on YOLOInterpreterRunException {
|
||||
} on YOLOFaceInterpreterRunException {
|
||||
throw CouldNotRunFaceDetector();
|
||||
} catch (e) {
|
||||
dev.log('[SEVERE] Face detection failed: $e');
|
||||
|
|
Loading…
Reference in a new issue