Mobile faces v0 (#1776)

This commit is contained in:
Neeraj Gupta 2024-05-20 17:46:38 +05:30 committed by GitHub
commit b1314729b1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
167 changed files with 17651 additions and 1313 deletions

View file

@ -43,7 +43,7 @@ android {
defaultConfig {
applicationId "io.ente.photos"
minSdkVersion 21
minSdkVersion 26
targetSdkVersion 33
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
@ -70,6 +70,10 @@ android {
dimension "default"
applicationIdSuffix ".dev"
}
face {
dimension "default"
applicationIdSuffix ".face"
}
playstore {
dimension "default"
}

View file

@ -0,0 +1,10 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="io.ente.photos">
<!-- Flutter needs it to communicate with the running application
to allow setting breakpoints, to provide hot reload, etc.
-->
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
</manifest>

View file

@ -0,0 +1,4 @@
<resources>
<string name="app_name">ente face</string>
<string name="backup">backup face</string>
</resources>

View file

@ -1,91 +0,0 @@
unknown
person
bicycle
car
motorcycle
airplane
bus
train
truck
boat
traffic light
fire hydrant
unknown
stop sign
parking meter
bench
bird
cat
dog
horse
sheep
cow
elephant
bear
zebra
giraffe
unknown
backpack
umbrella
unknown
unknown
handbag
tie
suitcase
frisbee
skis
snowboard
sports ball
kite
baseball bat
baseball glove
skateboard
surfboard
tennis racket
bottle
unknown
wine glass
cup
fork
knife
spoon
bowl
banana
apple
sandwich
orange
broccoli
carrot
hot dog
pizza
donut
cake
chair
couch
potted plant
bed
unknown
dining table
unknown
unknown
toilet
unknown
tv
laptop
mouse
remote
keyboard
cell phone
microwave
oven
toaster
sink
refrigerator
unknown
book
clock
vase
scissors
teddy bear
hair drier
toothbrush

View file

@ -1,30 +0,0 @@
waterfall
snow
landscape
underwater
architecture
sunset / sunrise
blue sky
cloudy sky
greenery
autumn leaves
portrait
flower
night shot
stage concert
fireworks
candle light
neon lights
indoor
backlight
text documents
qr images
group portrait
computer screens
kids
dog
cat
macro
food
beach
mountain

View file

@ -6,6 +6,8 @@ PODS:
- connectivity_plus (0.0.1):
- Flutter
- FlutterMacOS
- dart_ui_isolate (0.0.1):
- Flutter
- device_info_plus (0.0.1):
- Flutter
- file_saver (0.0.1):
@ -226,6 +228,7 @@ DEPENDENCIES:
- background_fetch (from `.symlinks/plugins/background_fetch/ios`)
- battery_info (from `.symlinks/plugins/battery_info/ios`)
- connectivity_plus (from `.symlinks/plugins/connectivity_plus/darwin`)
- dart_ui_isolate (from `.symlinks/plugins/dart_ui_isolate/ios`)
- device_info_plus (from `.symlinks/plugins/device_info_plus/ios`)
- file_saver (from `.symlinks/plugins/file_saver/ios`)
- firebase_core (from `.symlinks/plugins/firebase_core/ios`)
@ -302,6 +305,8 @@ EXTERNAL SOURCES:
:path: ".symlinks/plugins/battery_info/ios"
connectivity_plus:
:path: ".symlinks/plugins/connectivity_plus/darwin"
dart_ui_isolate:
:path: ".symlinks/plugins/dart_ui_isolate/ios"
device_info_plus:
:path: ".symlinks/plugins/device_info_plus/ios"
file_saver:
@ -397,6 +402,7 @@ SPEC CHECKSUMS:
background_fetch: 2319bf7e18237b4b269430b7f14d177c0df09c5a
battery_info: 09f5c9ee65394f2291c8c6227bedff345b8a730c
connectivity_plus: ddd7f30999e1faaef5967c23d5b6d503d10434db
dart_ui_isolate: d5bcda83ca4b04f129d70eb90110b7a567aece14
device_info_plus: c6fb39579d0f423935b0c9ce7ee2f44b71b9fce6
file_saver: 503e386464dbe118f630e17b4c2e1190fa0cf808
Firebase: 91fefd38712feb9186ea8996af6cbdef41473442

View file

@ -293,6 +293,7 @@
"${BUILT_PRODUCTS_DIR}/background_fetch/background_fetch.framework",
"${BUILT_PRODUCTS_DIR}/battery_info/battery_info.framework",
"${BUILT_PRODUCTS_DIR}/connectivity_plus/connectivity_plus.framework",
"${BUILT_PRODUCTS_DIR}/dart_ui_isolate/dart_ui_isolate.framework",
"${BUILT_PRODUCTS_DIR}/device_info_plus/device_info_plus.framework",
"${BUILT_PRODUCTS_DIR}/file_saver/file_saver.framework",
"${BUILT_PRODUCTS_DIR}/fk_user_agent/fk_user_agent.framework",
@ -374,6 +375,7 @@
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/background_fetch.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/battery_info.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/connectivity_plus.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/dart_ui_isolate.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/device_info_plus.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/file_saver.framework",
"${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/fk_user_agent.framework",

View file

@ -65,9 +65,9 @@
<key>ITSAppUsesNonExemptEncryption</key>
<false/>
<key>FLTEnableImpeller</key>
<false />
<true />
<key>FLTEnableWideGamut</key>
<false/>
<true/>
<key>NSFaceIDUsageDescription</key>
<string>Please allow ente to lock itself with FaceID or TouchID</string>
<key>NSCameraUsageDescription</key>

View file

@ -19,6 +19,7 @@ import 'package:photos/db/upload_locks_db.dart';
import "package:photos/events/endpoint_updated_event.dart";
import 'package:photos/events/signed_in_event.dart';
import 'package:photos/events/user_logged_out_event.dart';
import "package:photos/face/db.dart";
import 'package:photos/models/key_attributes.dart';
import 'package:photos/models/key_gen_result.dart';
import 'package:photos/models/private_key_attributes.dart';
@ -187,6 +188,7 @@ class Configuration {
: null;
await CollectionsDB.instance.clearTable();
await MemoriesDB.instance.clearTable();
await FaceMLDataDB.instance.clearTable();
await UploadLocksDB.instance.clearTable();
await IgnoredFilesService.instance.reset();

View file

@ -99,6 +99,9 @@ const blackThumbnailBase64 = '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAEBAQEBAQEB'
'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAo' +
'AKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgD/9k=';
const localFileServer =
String.fromEnvironment("localFileServer", defaultValue: "");
const uploadTempFilePrefix = "upload_file_";
final tempDirCleanUpInterval = kDebugMode
? const Duration(seconds: 30).inMicroseconds

View file

@ -9,7 +9,7 @@ extension EntitiesDB on FilesDB {
List<LocalEntityData> data, {
ConflictAlgorithm conflictAlgorithm = ConflictAlgorithm.replace,
}) async {
debugPrint("Inserting missing PathIDToLocalIDMapping");
debugPrint("entitiesDB: upsertEntities ${data.length} entities");
final db = await database;
var batch = db.batch();
int batchCounter = 0;
@ -62,4 +62,17 @@ extension EntitiesDB on FilesDB {
return LocalEntityData.fromJson(maps[i]);
});
}
Future<LocalEntityData?> getEntity(EntityType type, String id) async {
final db = await database;
final List<Map<String, dynamic>> maps = await db.query(
"entities",
where: "type = ? AND id = ?",
whereArgs: [type.typeToString(), id],
);
if (maps.isEmpty) {
return null;
}
return LocalEntityData.fromJson(maps.first);
}
}

View file

@ -491,6 +491,18 @@ class FilesDB {
return convertToFiles(results)[0];
}
Future<EnteFile?> getAnyUploadedFile(int uploadedID) async {
final db = await instance.sqliteAsyncDB;
final results = await db.getAll(
'SELECT * FROM $filesTable WHERE $columnUploadedFileID = ?',
[uploadedID],
);
if (results.isEmpty) {
return null;
}
return convertToFiles(results)[0];
}
Future<Set<int>> getUploadedFileIDs(int collectionID) async {
final db = await instance.sqliteAsyncDB;
final results = await db.getAll(
@ -683,6 +695,17 @@ class FilesDB {
return files;
}
Future<List<EnteFile>> getAllFilesFromCollections(
Iterable<int> collectionID,
) async {
final db = await instance.sqliteAsyncDB;
final String sql =
'SELECT * FROM $filesTable WHERE $columnCollectionID IN (${collectionID.join(',')})';
final results = await db.getAll(sql);
final files = convertToFiles(results);
return files;
}
Future<List<EnteFile>> getNewFilesInCollection(
int collectionID,
int addedTime,
@ -1304,6 +1327,23 @@ class FilesDB {
return result;
}
Future<Map<int, int>> getFileIDToCreationTime() async {
final db = await instance.sqliteAsyncDB;
final rows = await db.getAll(
'''
SELECT $columnUploadedFileID, $columnCreationTime
FROM $filesTable
WHERE
($columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1);
''',
);
final result = <int, int>{};
for (final row in rows) {
result[row[columnUploadedFileID] as int] = row[columnCreationTime] as int;
}
return result;
}
// getCollectionFileFirstOrLast returns the first or last uploaded file in
// the collection based on the given collectionID and the order.
Future<EnteFile?> getCollectionFileFirstOrLast(
@ -1643,13 +1683,14 @@ class FilesDB {
}
Future<List<int>> getOwnedFileIDs(int ownerID) async {
final db = await instance.database;
final results = await db.query(
filesTable,
columns: [columnUploadedFileID],
where:
'($columnOwnerID = $ownerID AND $columnUploadedFileID IS NOT NULL AND $columnUploadedFileID IS NOT -1)',
distinct: true,
final db = await instance.sqliteAsyncDB;
final results = await db.getAll(
'''
SELECT DISTINCT $columnUploadedFileID FROM $filesTable
WHERE ($columnOwnerID = ? AND $columnUploadedFileID IS NOT NULL AND
$columnUploadedFileID IS NOT -1)
''',
[ownerID],
);
final ids = <int>[];
for (final result in results) {
@ -1659,16 +1700,17 @@ class FilesDB {
}
Future<List<EnteFile>> getUploadedFiles(List<int> uploadedIDs) async {
final db = await instance.database;
final db = await instance.sqliteAsyncDB;
String inParam = "";
for (final id in uploadedIDs) {
inParam += "'" + id.toString() + "',";
}
inParam = inParam.substring(0, inParam.length - 1);
final results = await db.query(
filesTable,
where: '$columnUploadedFileID IN ($inParam)',
groupBy: columnUploadedFileID,
final results = await db.getAll(
'''
SELECT * FROM $filesTable WHERE $columnUploadedFileID IN ($inParam)
GROUP BY $columnUploadedFileID
''',
);
if (results.isEmpty) {
return <EnteFile>[];

View file

@ -26,4 +26,6 @@ enum EventType {
hide,
unhide,
coverChanged,
peopleChanged,
peopleClusterChanged,
}

View file

@ -0,0 +1,22 @@
import "package:photos/events/event.dart";
import "package:photos/models/file/file.dart";
class PeopleChangedEvent extends Event {
final List<EnteFile>? relevantFiles;
final PeopleEventType type;
final String source;
PeopleChangedEvent({
this.relevantFiles,
this.type = PeopleEventType.defaultType,
this.source = "",
});
@override
String get reason => '$runtimeType{type: ${type.name}, "via": $source}';
}
enum PeopleEventType {
defaultType,
removedFilesFromCluster,
}

View file

@ -0,0 +1,193 @@
import 'dart:math' as math show sin, cos, atan2, sqrt, pow;
import 'package:ml_linalg/linalg.dart';
extension SetVectorValues on Vector {
Vector setValues(int start, int end, Iterable<double> values) {
if (values.length > length) {
throw Exception('Values cannot be larger than vector');
} else if (end - start != values.length) {
throw Exception('Values must be same length as range');
} else if (start < 0 || end > length) {
throw Exception('Range must be within vector');
}
final tempList = toList();
tempList.replaceRange(start, end, values);
final newVector = Vector.fromList(tempList);
return newVector;
}
}
extension SetMatrixValues on Matrix {
Matrix setSubMatrix(
int startRow,
int endRow,
int startColumn,
int endColumn,
Iterable<Iterable<double>> values,
) {
if (values.length > rowCount) {
throw Exception('New values cannot have more rows than original matrix');
} else if (values.elementAt(0).length > columnCount) {
throw Exception(
'New values cannot have more columns than original matrix',
);
} else if (endRow - startRow != values.length) {
throw Exception('Values (number of rows) must be same length as range');
} else if (endColumn - startColumn != values.elementAt(0).length) {
throw Exception(
'Values (number of columns) must be same length as range',
);
} else if (startRow < 0 ||
endRow > rowCount ||
startColumn < 0 ||
endColumn > columnCount) {
throw Exception('Range must be within matrix');
}
final tempList = asFlattenedList
.toList(); // You need `.toList()` here to make sure the list is growable, otherwise `replaceRange` will throw an error
for (var i = startRow; i < endRow; i++) {
tempList.replaceRange(
i * columnCount + startColumn,
i * columnCount + endColumn,
values.elementAt(i).toList(),
);
}
final newMatrix = Matrix.fromFlattenedList(tempList, rowCount, columnCount);
return newMatrix;
}
Matrix setValues(
int startRow,
int endRow,
int startColumn,
int endColumn,
Iterable<double> values,
) {
if ((startRow - endRow) * (startColumn - endColumn) != values.length) {
throw Exception('Values must be same length as range');
} else if (startRow < 0 ||
endRow > rowCount ||
startColumn < 0 ||
endColumn > columnCount) {
throw Exception('Range must be within matrix');
}
final tempList = asFlattenedList
.toList(); // You need `.toList()` here to make sure the list is growable, otherwise `replaceRange` will throw an error
var index = 0;
for (var i = startRow; i < endRow; i++) {
for (var j = startColumn; j < endColumn; j++) {
tempList[i * columnCount + j] = values.elementAt(index);
index++;
}
}
final newMatrix = Matrix.fromFlattenedList(tempList, rowCount, columnCount);
return newMatrix;
}
Matrix setValue(int row, int column, double value) {
if (row < 0 || row > rowCount || column < 0 || column > columnCount) {
throw Exception('Index must be within range of matrix');
}
final tempList = asFlattenedList;
tempList[row * columnCount + column] = value;
final newMatrix = Matrix.fromFlattenedList(tempList, rowCount, columnCount);
return newMatrix;
}
Matrix appendRow(List<double> row) {
final oldNumberOfRows = rowCount;
final oldNumberOfColumns = columnCount;
if (row.length != oldNumberOfColumns) {
throw Exception('Row must have same number of columns as matrix');
}
final flatListMatrix = asFlattenedList;
flatListMatrix.addAll(row);
return Matrix.fromFlattenedList(
flatListMatrix,
oldNumberOfRows + 1,
oldNumberOfColumns,
);
}
}
extension MatrixCalculations on Matrix {
double determinant() {
final int length = rowCount;
if (length != columnCount) {
throw Exception('Matrix must be square');
}
if (length == 1) {
return this[0][0];
} else if (length == 2) {
return this[0][0] * this[1][1] - this[0][1] * this[1][0];
} else {
throw Exception('Determinant for Matrix larger than 2x2 not implemented');
}
}
/// Computes the singular value decomposition of a matrix, using https://lucidar.me/en/mathematics/singular-value-decomposition-of-a-2x2-matrix/ as reference, but with slightly different signs for the second columns of U and V
Map<String, dynamic> svd() {
if (rowCount != 2 || columnCount != 2) {
throw Exception('Matrix must be 2x2');
}
final a = this[0][0];
final b = this[0][1];
final c = this[1][0];
final d = this[1][1];
// Computation of U matrix
final tempCalc = a * a + b * b - c * c - d * d;
final theta = 0.5 * math.atan2(2 * a * c + 2 * b * d, tempCalc);
final U = Matrix.fromList([
[math.cos(theta), math.sin(theta)],
[math.sin(theta), -math.cos(theta)],
]);
// Computation of S matrix
// ignore: non_constant_identifier_names
final S1 = a * a + b * b + c * c + d * d;
// ignore: non_constant_identifier_names
final S2 =
math.sqrt(math.pow(tempCalc, 2) + 4 * math.pow(a * c + b * d, 2));
final sigma1 = math.sqrt((S1 + S2) / 2);
final sigma2 = math.sqrt((S1 - S2) / 2);
final S = Vector.fromList([sigma1, sigma2]);
// Computation of V matrix
final tempCalc2 = a * a - b * b + c * c - d * d;
final phi = 0.5 * math.atan2(2 * a * b + 2 * c * d, tempCalc2);
final s11 = (a * math.cos(theta) + c * math.sin(theta)) * math.cos(phi) +
(b * math.cos(theta) + d * math.sin(theta)) * math.sin(phi);
final s22 = (a * math.sin(theta) - c * math.cos(theta)) * math.sin(phi) +
(-b * math.sin(theta) + d * math.cos(theta)) * math.cos(phi);
final V = Matrix.fromList([
[s11.sign * math.cos(phi), s22.sign * math.sin(phi)],
[s11.sign * math.sin(phi), -s22.sign * math.cos(phi)],
]);
return {
'U': U,
'S': S,
'V': V,
};
}
int matrixRank() {
final svdResult = svd();
final Vector S = svdResult['S']!;
final rank = S.toList().where((element) => element > 1e-10).length;
return rank;
}
}
extension TransformMatrix on Matrix {
List<List<double>> to2DList() {
final List<List<double>> outerList = [];
for (var i = 0; i < rowCount; i++) {
final innerList = this[i].toList();
outerList.add(innerList);
}
return outerList;
}
}

View file

@ -23,4 +23,9 @@ class EnteWatch extends Stopwatch {
reset();
previousElapsed = 0;
}
void stopWithLog(String msg) {
log(msg);
stop();
}
}

1018
mobile/lib/face/db.dart Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,103 @@
// Faces Table Fields & Schema Queries
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
const facesTable = 'faces';
const fileIDColumn = 'file_id';
const faceIDColumn = 'face_id';
const faceDetectionColumn = 'detection';
const faceEmbeddingBlob = 'eBlob';
const faceScore = 'score';
const faceBlur = 'blur';
const isSideways = 'is_sideways';
const imageWidth = 'width';
const imageHeight = 'height';
const faceClusterId = 'cluster_id';
const mlVersionColumn = 'ml_version';
const createFacesTable = '''CREATE TABLE IF NOT EXISTS $facesTable (
$fileIDColumn INTEGER NOT NULL,
$faceIDColumn TEXT NOT NULL UNIQUE,
$faceDetectionColumn TEXT NOT NULL,
$faceEmbeddingBlob BLOB NOT NULL,
$faceScore REAL NOT NULL,
$faceBlur REAL NOT NULL DEFAULT $kLapacianDefault,
$isSideways INTEGER NOT NULL DEFAULT 0,
$imageHeight INTEGER NOT NULL DEFAULT 0,
$imageWidth INTEGER NOT NULL DEFAULT 0,
$mlVersionColumn INTEGER NOT NULL DEFAULT -1,
PRIMARY KEY($fileIDColumn, $faceIDColumn)
);
''';
const deleteFacesTable = 'DROP TABLE IF EXISTS $facesTable';
// End of Faces Table Fields & Schema Queries
//##region Face Clusters Table Fields & Schema Queries
const faceClustersTable = 'face_clusters';
const fcClusterID = 'cluster_id';
const fcFaceId = 'face_id';
// fcClusterId & fcFaceId are the primary keys and fcClusterId is a foreign key to faces table
const createFaceClustersTable = '''
CREATE TABLE IF NOT EXISTS $faceClustersTable (
$fcFaceId TEXT NOT NULL,
$fcClusterID INTEGER NOT NULL,
PRIMARY KEY($fcFaceId)
);
''';
// -- Creating a non-unique index on clusterID for query optimization
const fcClusterIDIndex =
'''CREATE INDEX IF NOT EXISTS idx_fcClusterID ON $faceClustersTable($fcClusterID);''';
const dropFaceClustersTable = 'DROP TABLE IF EXISTS $faceClustersTable';
//##endregion
// People Table Fields & Schema Queries
const personTable = 'person';
const deletePersonTable = 'DROP TABLE IF EXISTS $personTable';
//End People Table Fields & Schema Queries
// Clusters Table Fields & Schema Queries
const clusterPersonTable = 'cluster_person';
const personIdColumn = 'person_id';
const clusterIDColumn = 'cluster_id';
const createClusterPersonTable = '''
CREATE TABLE IF NOT EXISTS $clusterPersonTable (
$personIdColumn TEXT NOT NULL,
$clusterIDColumn INTEGER NOT NULL,
PRIMARY KEY($personIdColumn, $clusterIDColumn)
);
''';
const dropClusterPersonTable = 'DROP TABLE IF EXISTS $clusterPersonTable';
// End Clusters Table Fields & Schema Queries
/// Cluster Summary Table Fields & Schema Queries
const clusterSummaryTable = 'cluster_summary';
const avgColumn = 'avg';
const countColumn = 'count';
const createClusterSummaryTable = '''
CREATE TABLE IF NOT EXISTS $clusterSummaryTable (
$clusterIDColumn INTEGER NOT NULL,
$avgColumn BLOB NOT NULL,
$countColumn INTEGER NOT NULL,
PRIMARY KEY($clusterIDColumn)
);
''';
const dropClusterSummaryTable = 'DROP TABLE IF EXISTS $clusterSummaryTable';
/// End Cluster Summary Table Fields & Schema Queries
/// notPersonFeedback Table Fields & Schema Queries
const notPersonFeedback = 'not_person_feedback';
const createNotPersonFeedbackTable = '''
CREATE TABLE IF NOT EXISTS $notPersonFeedback (
$personIdColumn TEXT NOT NULL,
$clusterIDColumn INTEGER NOT NULL,
PRIMARY KEY($personIdColumn, $clusterIDColumn)
);
''';
const dropNotPersonFeedbackTable = 'DROP TABLE IF EXISTS $notPersonFeedback';
// End Clusters Table Fields & Schema Queries

View file

@ -0,0 +1,57 @@
import "dart:convert";
import 'package:photos/face/db_fields.dart';
import "package:photos/face/model/detection.dart";
import "package:photos/face/model/face.dart";
import "package:photos/generated/protos/ente/common/vector.pb.dart";
import "package:photos/models/ml/ml_versions.dart";
int boolToSQLInt(bool? value, {bool defaultValue = false}) {
final bool v = value ?? defaultValue;
if (v == false) {
return 0;
} else {
return 1;
}
}
bool sqlIntToBool(int? value, {bool defaultValue = false}) {
final int v = value ?? (defaultValue ? 1 : 0);
if (v == 0) {
return false;
} else {
return true;
}
}
Map<String, dynamic> mapRemoteToFaceDB(Face face) {
return {
faceIDColumn: face.faceID,
fileIDColumn: face.fileID,
faceDetectionColumn: json.encode(face.detection.toJson()),
faceEmbeddingBlob: EVector(
values: face.embedding,
).writeToBuffer(),
faceScore: face.score,
faceBlur: face.blur,
isSideways: face.detection.faceIsSideways() ? 1 : 0,
mlVersionColumn: faceMlVersion,
imageWidth: face.fileInfo?.imageWidth ?? 0,
imageHeight: face.fileInfo?.imageHeight ?? 0,
};
}
Face mapRowToFace(Map<String, dynamic> row) {
return Face(
row[faceIDColumn] as String,
row[fileIDColumn] as int,
EVector.fromBuffer(row[faceEmbeddingBlob] as List<int>).values,
row[faceScore] as double,
Detection.fromJson(json.decode(row[faceDetectionColumn] as String)),
row[faceBlur] as double,
fileInfo: FileInfo(
imageWidth: row[imageWidth] as int,
imageHeight: row[imageHeight] as int,
),
);
}

View file

@ -0,0 +1,43 @@
/// Bounding box of a face.
///
/// [xMin] and [yMin] are the coordinates of the top left corner of the box, and
/// [width] and [height] are the width and height of the box.
///
/// WARNING: All values are relative to the original image size, so in the range [0, 1].
class FaceBox {
final double xMin;
final double yMin;
final double width;
final double height;
FaceBox({
required this.xMin,
required this.yMin,
required this.width,
required this.height,
});
factory FaceBox.fromJson(Map<String, dynamic> json) {
return FaceBox(
xMin: (json['xMin'] is int
? (json['xMin'] as int).toDouble()
: json['xMin'] as double),
yMin: (json['yMin'] is int
? (json['yMin'] as int).toDouble()
: json['yMin'] as double),
width: (json['width'] is int
? (json['width'] as int).toDouble()
: json['width'] as double),
height: (json['height'] is int
? (json['height'] as int).toDouble()
: json['height'] as double),
);
}
Map<String, dynamic> toJson() => {
'xMin': xMin,
'yMin': yMin,
'width': width,
'height': height,
};
}

View file

@ -0,0 +1,120 @@
import "dart:math" show min, max;
import "package:photos/face/model/box.dart";
import "package:photos/face/model/landmark.dart";
import "package:photos/services/machine_learning/face_ml/face_detection/detection.dart";
/// Stores the face detection data, notably the bounding box and landmarks.
///
/// - Bounding box: [FaceBox] with xMin, yMin (so top left corner), width, height
/// - Landmarks: list of [Landmark]s, namely leftEye, rightEye, nose, leftMouth, rightMouth
///
/// WARNING: All coordinates are relative to the image size, so in the range [0, 1]!
class Detection {
FaceBox box;
List<Landmark> landmarks;
Detection({
required this.box,
required this.landmarks,
});
bool get isEmpty => box.width == 0 && box.height == 0 && landmarks.isEmpty;
// empty box
Detection.empty()
: box = FaceBox(
xMin: 0,
yMin: 0,
width: 0,
height: 0,
),
landmarks = [];
Map<String, dynamic> toJson() => {
'box': box.toJson(),
'landmarks': landmarks.map((x) => x.toJson()).toList(),
};
factory Detection.fromJson(Map<String, dynamic> json) {
return Detection(
box: FaceBox.fromJson(json['box'] as Map<String, dynamic>),
landmarks: List<Landmark>.from(
json['landmarks']
.map((x) => Landmark.fromJson(x as Map<String, dynamic>)),
),
);
}
int getFaceArea(int imageWidth, int imageHeight) {
return (box.width * imageWidth * box.height * imageHeight).toInt();
}
FaceDirection getFaceDirection() {
if (isEmpty) {
return FaceDirection.straight;
}
final leftEye = [landmarks[0].x, landmarks[0].y];
final rightEye = [landmarks[1].x, landmarks[1].y];
final nose = [landmarks[2].x, landmarks[2].y];
final leftMouth = [landmarks[3].x, landmarks[3].y];
final rightMouth = [landmarks[4].x, landmarks[4].y];
final double eyeDistanceX = (rightEye[0] - leftEye[0]).abs();
final double eyeDistanceY = (rightEye[1] - leftEye[1]).abs();
final double mouthDistanceY = (rightMouth[1] - leftMouth[1]).abs();
final bool faceIsUpright =
(max(leftEye[1], rightEye[1]) + 0.5 * eyeDistanceY < nose[1]) &&
(nose[1] + 0.5 * mouthDistanceY < min(leftMouth[1], rightMouth[1]));
final bool noseStickingOutLeft = (nose[0] < min(leftEye[0], rightEye[0])) &&
(nose[0] < min(leftMouth[0], rightMouth[0]));
final bool noseStickingOutRight =
(nose[0] > max(leftEye[0], rightEye[0])) &&
(nose[0] > max(leftMouth[0], rightMouth[0]));
final bool noseCloseToLeftEye =
(nose[0] - leftEye[0]).abs() < 0.2 * eyeDistanceX;
final bool noseCloseToRightEye =
(nose[0] - rightEye[0]).abs() < 0.2 * eyeDistanceX;
// if (faceIsUpright && (noseStickingOutLeft || noseCloseToLeftEye)) {
if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) {
return FaceDirection.left;
// } else if (faceIsUpright && (noseStickingOutRight || noseCloseToRightEye)) {
} else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) {
return FaceDirection.right;
}
return FaceDirection.straight;
}
bool faceIsSideways() {
if (isEmpty) {
return false;
}
final leftEye = [landmarks[0].x, landmarks[0].y];
final rightEye = [landmarks[1].x, landmarks[1].y];
final nose = [landmarks[2].x, landmarks[2].y];
final leftMouth = [landmarks[3].x, landmarks[3].y];
final rightMouth = [landmarks[4].x, landmarks[4].y];
final double eyeDistanceX = (rightEye[0] - leftEye[0]).abs();
final double eyeDistanceY = (rightEye[1] - leftEye[1]).abs();
final double mouthDistanceY = (rightMouth[1] - leftMouth[1]).abs();
final bool faceIsUpright =
(max(leftEye[1], rightEye[1]) + 0.5 * eyeDistanceY < nose[1]) &&
(nose[1] + 0.5 * mouthDistanceY < min(leftMouth[1], rightMouth[1]));
final bool noseStickingOutLeft =
(nose[0] < min(leftEye[0], rightEye[0]) - 0.5 * eyeDistanceX) &&
(nose[0] < min(leftMouth[0], rightMouth[0]));
final bool noseStickingOutRight =
(nose[0] > max(leftEye[0], rightEye[0]) + 0.5 * eyeDistanceX) &&
(nose[0] > max(leftMouth[0], rightMouth[0]));
return faceIsUpright && (noseStickingOutLeft || noseStickingOutRight);
}
}

View file

@ -0,0 +1,25 @@
class Dimensions {
final int width;
final int height;
const Dimensions({required this.width, required this.height});
@override
String toString() {
return 'Dimensions(width: $width, height: $height})';
}
Map<String, int> toJson() {
return {
'width': width,
'height': height,
};
}
factory Dimensions.fromJson(Map<String, dynamic> json) {
return Dimensions(
width: json['width'] as int,
height: json['height'] as int,
);
}
}

View file

@ -0,0 +1,85 @@
import "package:photos/face/model/detection.dart";
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
import "package:photos/services/machine_learning/face_ml/face_ml_result.dart";
// FileInfo contains the image width and height of the image the face was detected in.
class FileInfo {
int? imageWidth;
int? imageHeight;
FileInfo({
this.imageWidth,
this.imageHeight,
});
}
class Face {
final String faceID;
final List<double> embedding;
Detection detection;
final double score;
final double blur;
///#region Local DB fields
// This is not stored on the server, using it for local DB row
FileInfo? fileInfo;
final int fileID;
///#endregion
bool get isBlurry => blur < kLaplacianHardThreshold;
bool get hasHighScore => score > kMinimumQualityFaceScore;
bool get isHighQuality => (!isBlurry) && hasHighScore;
int area({int? w, int? h}) {
return detection.getFaceArea(
fileInfo?.imageWidth ?? w ?? 0,
fileInfo?.imageHeight ?? h ?? 0,
);
}
Face(
this.faceID,
this.fileID,
this.embedding,
this.score,
this.detection,
this.blur, {
this.fileInfo,
});
factory Face.empty(int fileID, {bool error = false}) {
return Face(
"$fileID-0",
fileID,
<double>[],
error ? -1.0 : 0.0,
Detection.empty(),
0.0,
);
}
factory Face.fromJson(Map<String, dynamic> json) {
final String faceID = json['faceID'] as String;
final int fileID = getFileIdFromFaceId(faceID);
return Face(
faceID,
fileID,
List<double>.from((json['embedding'] ?? json['embeddings']) as List),
json['score'] as double,
Detection.fromJson(json['detection'] as Map<String, dynamic>),
// high value means t
(json['blur'] ?? kLapacianDefault) as double,
);
}
// Note: Keep the information in toJson minimum. Keep in sync with desktop.
// Derive fields like fileID from other values whenever possible
Map<String, dynamic> toJson() => {
'faceID': faceID,
'embedding': embedding,
'detection': detection.toJson(),
'score': score,
'blur': blur,
};
}

View file

@ -0,0 +1,33 @@
/// Landmark coordinate data.
///
/// WARNING: All coordinates are relative to the image size, so in the range [0, 1]!
class Landmark {
double x;
double y;
Landmark({
required this.x,
required this.y,
});
Map<String, dynamic> toJson() => {
'x': x,
'y': y,
};
factory Landmark.fromJson(Map<String, dynamic> json) {
return Landmark(
x: (json['x'] is int
? (json['x'] as int).toDouble()
: json['x'] as double),
y: (json['y'] is int
? (json['y'] as int).toDouble()
: json['y'] as double),
);
}
@override
toString() {
return '(x: ${x.toStringAsFixed(4)}, y: ${y.toStringAsFixed(4)})';
}
}

View file

@ -0,0 +1,139 @@
// PersonEntity represents information about a Person in the context of FaceClustering that is stored.
// On the remote server, the PersonEntity is stored as {Entity} with type person.
// On the device, this information is stored as [LocalEntityData] with type person.
import "package:flutter/foundation.dart";
class PersonEntity {
final String remoteID;
final PersonData data;
PersonEntity(
this.remoteID,
this.data,
);
// copyWith
PersonEntity copyWith({
String? remoteID,
PersonData? data,
}) {
return PersonEntity(
remoteID ?? this.remoteID,
data ?? this.data,
);
}
}
class ClusterInfo {
final int id;
final Set<String> faces;
ClusterInfo({
required this.id,
required this.faces,
});
// toJson
Map<String, dynamic> toJson() => {
'id': id,
'faces': faces.toList(),
};
// from Json
factory ClusterInfo.fromJson(Map<String, dynamic> json) {
return ClusterInfo(
id: json['id'] as int,
faces: (json['faces'] as List<dynamic>).map((e) => e as String).toSet(),
);
}
}
class PersonData {
final String name;
final bool isHidden;
String? avatarFaceId;
List<ClusterInfo>? assigned = List<ClusterInfo>.empty();
List<ClusterInfo>? rejected = List<ClusterInfo>.empty();
final String? birthDate;
bool hasAvatar() => avatarFaceId != null;
bool get isIgnored =>
(name.isEmpty || name == '(hidden)' || name == '(ignored)');
PersonData({
required this.name,
this.assigned,
this.rejected,
this.avatarFaceId,
this.isHidden = false,
this.birthDate,
});
// copyWith
PersonData copyWith({
String? name,
List<ClusterInfo>? assigned,
String? avatarFaceId,
bool? isHidden,
int? version,
String? birthDate,
}) {
return PersonData(
name: name ?? this.name,
assigned: assigned ?? this.assigned,
avatarFaceId: avatarFaceId ?? this.avatarFaceId,
isHidden: isHidden ?? this.isHidden,
birthDate: birthDate ?? this.birthDate,
);
}
void logStats() {
if (kDebugMode == false) return;
// log number of assigned and rejected clusters and total number of faces in each cluster
final StringBuffer sb = StringBuffer();
sb.writeln('Person: $name');
int assignedCount = 0;
for (final a in (assigned ?? <ClusterInfo>[])) {
assignedCount += a.faces.length;
}
sb.writeln('Assigned: ${assigned?.length} withFaces $assignedCount');
sb.writeln('Rejected: ${rejected?.length}');
if (assigned != null) {
for (var cluster in assigned!) {
sb.writeln('Cluster: ${cluster.id} - ${cluster.faces.length}');
}
}
debugPrint(sb.toString());
}
// toJson
Map<String, dynamic> toJson() => {
'name': name,
'assigned': assigned?.map((e) => e.toJson()).toList(),
'rejected': rejected?.map((e) => e.toJson()).toList(),
'avatarFaceId': avatarFaceId,
'isHidden': isHidden,
'birthDate': birthDate,
};
// fromJson
factory PersonData.fromJson(Map<String, dynamic> json) {
final assigned = (json['assigned'] == null || json['assigned'].length == 0)
? <ClusterInfo>[]
: List<ClusterInfo>.from(
json['assigned'].map((x) => ClusterInfo.fromJson(x)),
);
final rejected = (json['rejected'] == null || json['rejected'].length == 0)
? <ClusterInfo>[]
: List<ClusterInfo>.from(
json['rejected'].map((x) => ClusterInfo.fromJson(x)),
);
return PersonData(
name: json['name'] as String,
assigned: assigned,
rejected: rejected,
avatarFaceId: json['avatarFaceId'] as String?,
isHidden: json['isHidden'] as bool? ?? false,
birthDate: json['birthDate'] as String?,
);
}
}

View file

@ -34,6 +34,8 @@ class MessageLookup extends MessageLookupByLibrary {
"addViewers": m1,
"changeLocationOfSelectedItems": MessageLookupByLibrary.simpleMessage(
"Change location of selected items?"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"contacts": MessageLookupByLibrary.simpleMessage("Contacts"),
"createCollaborativeLink":
MessageLookupByLibrary.simpleMessage("Create collaborative link"),
@ -44,7 +46,14 @@ class MessageLookup extends MessageLookupByLibrary {
"editsToLocationWillOnlyBeSeenWithinEnte":
MessageLookupByLibrary.simpleMessage(
"Edits to location will only be seen within Ente"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"fileTypes": MessageLookupByLibrary.simpleMessage("File types"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"joinDiscord": MessageLookupByLibrary.simpleMessage("Join Discord"),
"locations": MessageLookupByLibrary.simpleMessage("Locations"),
"longPressAnEmailToVerifyEndToEndEncryption":
@ -55,6 +64,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Modify your query, or try searching for"),
"moveToHiddenAlbum":
MessageLookupByLibrary.simpleMessage("Move to hidden album"),
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"search": MessageLookupByLibrary.simpleMessage("Search"),
"selectALocation":
MessageLookupByLibrary.simpleMessage("Select a location"),

View file

@ -227,6 +227,7 @@ class MessageLookup extends MessageLookupByLibrary {
"Ich verstehe, dass ich meine Daten verlieren kann, wenn ich mein Passwort vergesse, da meine Daten <underline>Ende-zu-Ende-verschlüsselt</underline> sind."),
"activeSessions":
MessageLookupByLibrary.simpleMessage("Aktive Sitzungen"),
"addAName": MessageLookupByLibrary.simpleMessage("Add a name"),
"addANewEmail": MessageLookupByLibrary.simpleMessage(
"Neue E-Mail-Adresse hinzufügen"),
"addCollaborator":
@ -435,6 +436,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Nach Aufnahmezeit gruppieren"),
"clubByFileName":
MessageLookupByLibrary.simpleMessage("Nach Dateiname gruppieren"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle":
MessageLookupByLibrary.simpleMessage("Code eingelöst"),
"codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage(
@ -675,6 +678,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Passwort eingeben"),
"enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage(
"Gib ein Passwort ein, mit dem wir deine Daten verschlüsseln können"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"enterReferralCode": MessageLookupByLibrary.simpleMessage(
"Gib den Weiterempfehlungs-Code ein"),
"enterThe6digitCodeFromnyourAuthenticatorApp":
@ -699,6 +704,10 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Protokolle exportieren"),
"exportYourData":
MessageLookupByLibrary.simpleMessage("Daten exportieren"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"faces": MessageLookupByLibrary.simpleMessage("Gesichter"),
"failedToApplyCode": MessageLookupByLibrary.simpleMessage(
"Der Code konnte nicht aktiviert werden"),
@ -738,11 +747,14 @@ class MessageLookup extends MessageLookupByLibrary {
"filesBackedUpInAlbum": m23,
"filesDeleted":
MessageLookupByLibrary.simpleMessage("Dateien gelöscht"),
"findPeopleByName": MessageLookupByLibrary.simpleMessage(
"Find people quickly by searching by name"),
"flip": MessageLookupByLibrary.simpleMessage("Spiegeln"),
"forYourMemories":
MessageLookupByLibrary.simpleMessage("Als Erinnerung"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Passwort vergessen"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed": MessageLookupByLibrary.simpleMessage(
"Kostenlos hinzugefügter Speicherplatz"),
"freeStorageOnReferralSuccess": m24,
@ -1164,6 +1176,8 @@ class MessageLookup extends MessageLookupByLibrary {
"removeParticipant":
MessageLookupByLibrary.simpleMessage("Teilnehmer entfernen"),
"removeParticipantBody": m43,
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"removePublicLink":
MessageLookupByLibrary.simpleMessage("Öffentlichen Link entfernen"),
"removeShareItemsWarning": MessageLookupByLibrary.simpleMessage(

View file

@ -132,7 +132,7 @@ class MessageLookup extends MessageLookupByLibrary {
"Please talk to ${providerName} support if you were charged";
static String m38(endDate) =>
"Free trial valid till ${endDate}.\nYou can purchase a paid plan afterwards.";
"Free trial valid till ${endDate}.\nYou can choose a paid plan afterwards.";
static String m39(toEmail) => "Please email us at ${toEmail}";
@ -225,6 +225,7 @@ class MessageLookup extends MessageLookupByLibrary {
"I understand that if I lose my password, I may lose my data since my data is <underline>end-to-end encrypted</underline>."),
"activeSessions":
MessageLookupByLibrary.simpleMessage("Active sessions"),
"addAName": MessageLookupByLibrary.simpleMessage("Add a name"),
"addANewEmail": MessageLookupByLibrary.simpleMessage("Add a new email"),
"addCollaborator":
MessageLookupByLibrary.simpleMessage("Add collaborator"),
@ -434,6 +435,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Club by capture time"),
"clubByFileName":
MessageLookupByLibrary.simpleMessage("Club by file name"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle":
MessageLookupByLibrary.simpleMessage("Code applied"),
"codeCopiedToClipboard":
@ -675,6 +678,8 @@ class MessageLookup extends MessageLookupByLibrary {
"enterPassword": MessageLookupByLibrary.simpleMessage("Enter password"),
"enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage(
"Enter a password we can use to encrypt your data"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"enterReferralCode":
MessageLookupByLibrary.simpleMessage("Enter referral code"),
"enterThe6digitCodeFromnyourAuthenticatorApp":
@ -697,6 +702,10 @@ class MessageLookup extends MessageLookupByLibrary {
"exportLogs": MessageLookupByLibrary.simpleMessage("Export logs"),
"exportYourData":
MessageLookupByLibrary.simpleMessage("Export your data"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"faces": MessageLookupByLibrary.simpleMessage("Faces"),
"failedToApplyCode":
MessageLookupByLibrary.simpleMessage("Failed to apply code"),
@ -736,11 +745,14 @@ class MessageLookup extends MessageLookupByLibrary {
"filesDeleted": MessageLookupByLibrary.simpleMessage("Files deleted"),
"filesSavedToGallery":
MessageLookupByLibrary.simpleMessage("Files saved to gallery"),
"findPeopleByName":
MessageLookupByLibrary.simpleMessage("Find people quickly by name"),
"flip": MessageLookupByLibrary.simpleMessage("Flip"),
"forYourMemories":
MessageLookupByLibrary.simpleMessage("for your memories"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Forgot password"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed":
MessageLookupByLibrary.simpleMessage("Free storage claimed"),
"freeStorageOnReferralSuccess": m24,
@ -1022,6 +1034,7 @@ class MessageLookup extends MessageLookupByLibrary {
"paymentFailedTalkToProvider": m37,
"pendingItems": MessageLookupByLibrary.simpleMessage("Pending items"),
"pendingSync": MessageLookupByLibrary.simpleMessage("Pending sync"),
"people": MessageLookupByLibrary.simpleMessage("People"),
"peopleUsingYourCode":
MessageLookupByLibrary.simpleMessage("People using your code"),
"permDeleteWarning": MessageLookupByLibrary.simpleMessage(
@ -1151,6 +1164,8 @@ class MessageLookup extends MessageLookupByLibrary {
"removeParticipant":
MessageLookupByLibrary.simpleMessage("Remove participant"),
"removeParticipantBody": m43,
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"removePublicLink":
MessageLookupByLibrary.simpleMessage("Remove public link"),
"removeShareItemsWarning": MessageLookupByLibrary.simpleMessage(
@ -1208,8 +1223,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Add descriptions like \"#trip\" in photo info to quickly find them here"),
"searchDatesEmptySection": MessageLookupByLibrary.simpleMessage(
"Search by a date, month or year"),
"searchFaceEmptySection":
MessageLookupByLibrary.simpleMessage("Find all photos of a person"),
"searchFaceEmptySection": MessageLookupByLibrary.simpleMessage(
"Persons will be shown here once indexing is done"),
"searchFileTypesAndNamesEmptySection":
MessageLookupByLibrary.simpleMessage("File types and names"),
"searchHint1":

View file

@ -367,6 +367,8 @@ class MessageLookup extends MessageLookupByLibrary {
"close": MessageLookupByLibrary.simpleMessage("Cerrar"),
"clubByCaptureTime": MessageLookupByLibrary.simpleMessage(
"Agrupar por tiempo de captura"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle":
MessageLookupByLibrary.simpleMessage("Código aplicado"),
"codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage(
@ -585,6 +587,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Introduzca contraseña"),
"enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage(
"Introduzca una contraseña que podamos usar para cifrar sus datos"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"enterReferralCode": MessageLookupByLibrary.simpleMessage(
"Ingresar código de referencia"),
"enterThe6digitCodeFromnyourAuthenticatorApp":
@ -609,6 +613,10 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Exportar registros"),
"exportYourData":
MessageLookupByLibrary.simpleMessage("Exportar tus datos"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"failedToApplyCode":
MessageLookupByLibrary.simpleMessage("Error al aplicar el código"),
"failedToCancel":
@ -647,6 +655,7 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("para tus recuerdos"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Olvidé mi contraseña"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed": MessageLookupByLibrary.simpleMessage(
"Almacenamiento gratuito reclamado"),
"freeStorageOnReferralSuccess": m24,
@ -997,6 +1006,8 @@ class MessageLookup extends MessageLookupByLibrary {
"removeParticipant":
MessageLookupByLibrary.simpleMessage("Quitar participante"),
"removeParticipantBody": m43,
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"removePublicLink":
MessageLookupByLibrary.simpleMessage("Quitar enlace público"),
"removeShareItemsWarning": MessageLookupByLibrary.simpleMessage(

View file

@ -425,6 +425,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Grouper par durée"),
"clubByFileName":
MessageLookupByLibrary.simpleMessage("Grouper par nom de fichier"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle":
MessageLookupByLibrary.simpleMessage("Code appliqué"),
"codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage(
@ -665,6 +667,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Saisissez le mot de passe"),
"enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage(
"Entrez un mot de passe que nous pouvons utiliser pour chiffrer vos données"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"enterReferralCode": MessageLookupByLibrary.simpleMessage(
"Entrez le code de parrainage"),
"enterThe6digitCodeFromnyourAuthenticatorApp":
@ -688,6 +692,10 @@ class MessageLookup extends MessageLookupByLibrary {
"exportLogs": MessageLookupByLibrary.simpleMessage("Exporter les logs"),
"exportYourData":
MessageLookupByLibrary.simpleMessage("Exportez vos données"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"faces": MessageLookupByLibrary.simpleMessage("Visages"),
"failedToApplyCode": MessageLookupByLibrary.simpleMessage(
"Impossible d\'appliquer le code"),
@ -732,6 +740,7 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("pour vos souvenirs"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Mot de passe oublié"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed":
MessageLookupByLibrary.simpleMessage("Stockage gratuit réclamé"),
"freeStorageOnReferralSuccess": m24,
@ -1129,6 +1138,8 @@ class MessageLookup extends MessageLookupByLibrary {
"removeParticipant":
MessageLookupByLibrary.simpleMessage("Supprimer le participant"),
"removeParticipantBody": m43,
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"removePublicLink":
MessageLookupByLibrary.simpleMessage("Supprimer le lien public"),
"removeShareItemsWarning": MessageLookupByLibrary.simpleMessage(

View file

@ -411,6 +411,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Club per tempo di cattura"),
"clubByFileName":
MessageLookupByLibrary.simpleMessage("Unisci per nome file"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle":
MessageLookupByLibrary.simpleMessage("Codice applicato"),
"codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage(
@ -644,6 +646,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Inserisci password"),
"enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage(
"Inserisci una password per criptare i tuoi dati"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"enterReferralCode": MessageLookupByLibrary.simpleMessage(
"Inserisci il codice di invito"),
"enterThe6digitCodeFromnyourAuthenticatorApp":
@ -665,6 +669,10 @@ class MessageLookup extends MessageLookupByLibrary {
"Questo link è scaduto. Si prega di selezionare un nuovo orario di scadenza o disabilitare la scadenza del link."),
"exportLogs": MessageLookupByLibrary.simpleMessage("Esporta log"),
"exportYourData": MessageLookupByLibrary.simpleMessage("Esporta dati"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"failedToApplyCode": MessageLookupByLibrary.simpleMessage(
"Impossibile applicare il codice"),
"failedToCancel":
@ -704,6 +712,7 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("per i tuoi ricordi"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Password dimenticata"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed":
MessageLookupByLibrary.simpleMessage("Spazio gratuito richiesto"),
"freeStorageOnReferralSuccess": m24,
@ -1090,6 +1099,8 @@ class MessageLookup extends MessageLookupByLibrary {
"removeParticipant":
MessageLookupByLibrary.simpleMessage("Rimuovi partecipante"),
"removeParticipantBody": m43,
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"removePublicLink":
MessageLookupByLibrary.simpleMessage("Rimuovi link pubblico"),
"removeShareItemsWarning": MessageLookupByLibrary.simpleMessage(

View file

@ -34,6 +34,8 @@ class MessageLookup extends MessageLookupByLibrary {
"addViewers": m1,
"changeLocationOfSelectedItems": MessageLookupByLibrary.simpleMessage(
"Change location of selected items?"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"contacts": MessageLookupByLibrary.simpleMessage("Contacts"),
"createCollaborativeLink":
MessageLookupByLibrary.simpleMessage("Create collaborative link"),
@ -44,7 +46,14 @@ class MessageLookup extends MessageLookupByLibrary {
"editsToLocationWillOnlyBeSeenWithinEnte":
MessageLookupByLibrary.simpleMessage(
"Edits to location will only be seen within Ente"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"fileTypes": MessageLookupByLibrary.simpleMessage("File types"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"joinDiscord": MessageLookupByLibrary.simpleMessage("Join Discord"),
"locations": MessageLookupByLibrary.simpleMessage("Locations"),
"longPressAnEmailToVerifyEndToEndEncryption":
@ -55,6 +64,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Modify your query, or try searching for"),
"moveToHiddenAlbum":
MessageLookupByLibrary.simpleMessage("Move to hidden album"),
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"search": MessageLookupByLibrary.simpleMessage("Search"),
"selectALocation":
MessageLookupByLibrary.simpleMessage("Select a location"),

View file

@ -447,6 +447,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Samenvoegen op tijd"),
"clubByFileName":
MessageLookupByLibrary.simpleMessage("Samenvoegen op bestandsnaam"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle":
MessageLookupByLibrary.simpleMessage("Code toegepast"),
"codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage(
@ -723,6 +725,10 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Logboek exporteren"),
"exportYourData":
MessageLookupByLibrary.simpleMessage("Exporteer je gegevens"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"faces": MessageLookupByLibrary.simpleMessage("Gezichten"),
"failedToApplyCode":
MessageLookupByLibrary.simpleMessage("Code toepassen mislukt"),
@ -771,6 +777,7 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("voor uw herinneringen"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Wachtwoord vergeten"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed":
MessageLookupByLibrary.simpleMessage("Gratis opslag geclaimd"),
"freeStorageOnReferralSuccess": m24,

View file

@ -39,6 +39,8 @@ class MessageLookup extends MessageLookupByLibrary {
"cancel": MessageLookupByLibrary.simpleMessage("Avbryt"),
"changeLocationOfSelectedItems": MessageLookupByLibrary.simpleMessage(
"Change location of selected items?"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"confirmAccountDeletion":
MessageLookupByLibrary.simpleMessage("Bekreft sletting av konto"),
"confirmDeletePrompt": MessageLookupByLibrary.simpleMessage(
@ -57,12 +59,19 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage(
"Edits to location will only be seen within Ente"),
"email": MessageLookupByLibrary.simpleMessage("E-post"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"enterValidEmail": MessageLookupByLibrary.simpleMessage(
"Vennligst skriv inn en gyldig e-postadresse."),
"enterYourEmailAddress": MessageLookupByLibrary.simpleMessage(
"Skriv inn e-postadressen din"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"feedback": MessageLookupByLibrary.simpleMessage("Tilbakemelding"),
"fileTypes": MessageLookupByLibrary.simpleMessage("File types"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"invalidEmailAddress":
MessageLookupByLibrary.simpleMessage("Ugyldig e-postadresse"),
"joinDiscord": MessageLookupByLibrary.simpleMessage("Join Discord"),
@ -77,6 +86,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Modify your query, or try searching for"),
"moveToHiddenAlbum":
MessageLookupByLibrary.simpleMessage("Move to hidden album"),
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"search": MessageLookupByLibrary.simpleMessage("Search"),
"selectALocation":
MessageLookupByLibrary.simpleMessage("Select a location"),

View file

@ -49,6 +49,8 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Zmień hasło"),
"checkInboxAndSpamFolder": MessageLookupByLibrary.simpleMessage(
"Sprawdź swoją skrzynkę odbiorczą (i spam), aby zakończyć weryfikację"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage(
"Kod został skopiowany do schowka"),
"confirm": MessageLookupByLibrary.simpleMessage("Potwierdź"),
@ -101,6 +103,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Wprowadź nowe hasło, którego możemy użyć do zaszyfrowania Twoich danych"),
"enterPasswordToEncrypt": MessageLookupByLibrary.simpleMessage(
"Wprowadź hasło, którego możemy użyć do zaszyfrowania Twoich danych"),
"enterPersonName":
MessageLookupByLibrary.simpleMessage("Enter person name"),
"enterValidEmail": MessageLookupByLibrary.simpleMessage(
"Podaj poprawny adres e-mail."),
"enterYourEmailAddress":
@ -109,10 +113,15 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("Wprowadź hasło"),
"enterYourRecoveryKey": MessageLookupByLibrary.simpleMessage(
"Wprowadź swój klucz odzyskiwania"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"feedback": MessageLookupByLibrary.simpleMessage("Informacja zwrotna"),
"fileTypes": MessageLookupByLibrary.simpleMessage("File types"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Nie pamiętam hasła"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"generatingEncryptionKeys": MessageLookupByLibrary.simpleMessage(
"Generowanie kluczy szyfrujących..."),
"howItWorks": MessageLookupByLibrary.simpleMessage("Jak to działa"),
@ -166,6 +175,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Jeśli zapomnisz hasła, jedynym sposobem odzyskania danych jest ten klucz."),
"recoverySuccessful":
MessageLookupByLibrary.simpleMessage("Odzyskano pomyślnie!"),
"removePersonLabel":
MessageLookupByLibrary.simpleMessage("Remove person label"),
"resendEmail":
MessageLookupByLibrary.simpleMessage("Wyślij e-mail ponownie"),
"resetPasswordTitle":

View file

@ -445,6 +445,8 @@ class MessageLookup extends MessageLookupByLibrary {
"Agrupar por tempo de captura"),
"clubByFileName": MessageLookupByLibrary.simpleMessage(
"Agrupar pelo nome de arquivo"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle":
MessageLookupByLibrary.simpleMessage("Código aplicado"),
"codeCopiedToClipboard": MessageLookupByLibrary.simpleMessage(
@ -714,6 +716,10 @@ class MessageLookup extends MessageLookupByLibrary {
"exportLogs": MessageLookupByLibrary.simpleMessage("Exportar logs"),
"exportYourData":
MessageLookupByLibrary.simpleMessage("Exportar seus dados"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"faces": MessageLookupByLibrary.simpleMessage("Rostos"),
"failedToApplyCode":
MessageLookupByLibrary.simpleMessage("Falha ao aplicar o código"),
@ -760,6 +766,7 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("para suas memórias"),
"forgotPassword":
MessageLookupByLibrary.simpleMessage("Esqueceu sua senha"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed": MessageLookupByLibrary.simpleMessage(
"Armazenamento gratuito reivindicado"),
"freeStorageOnReferralSuccess": m24,

View file

@ -382,6 +382,8 @@ class MessageLookup extends MessageLookupByLibrary {
"close": MessageLookupByLibrary.simpleMessage("关闭"),
"clubByCaptureTime": MessageLookupByLibrary.simpleMessage("按拍摄时间分组"),
"clubByFileName": MessageLookupByLibrary.simpleMessage("按文件名排序"),
"clusteringProgress":
MessageLookupByLibrary.simpleMessage("Clustering progress"),
"codeAppliedPageTitle": MessageLookupByLibrary.simpleMessage("代码已应用"),
"codeCopiedToClipboard":
MessageLookupByLibrary.simpleMessage("代码已复制到剪贴板"),
@ -592,6 +594,10 @@ class MessageLookup extends MessageLookupByLibrary {
MessageLookupByLibrary.simpleMessage("此链接已过期。请选择新的过期时间或禁用链接有效期。"),
"exportLogs": MessageLookupByLibrary.simpleMessage("导出日志"),
"exportYourData": MessageLookupByLibrary.simpleMessage("导出您的数据"),
"faceRecognition":
MessageLookupByLibrary.simpleMessage("Face recognition"),
"faceRecognitionIndexingDescription": MessageLookupByLibrary.simpleMessage(
"Please note that this will result in a higher bandwidth and battery usage until all items are indexed."),
"faces": MessageLookupByLibrary.simpleMessage("人脸"),
"failedToApplyCode": MessageLookupByLibrary.simpleMessage("无法使用此代码"),
"failedToCancel": MessageLookupByLibrary.simpleMessage("取消失败"),
@ -626,6 +632,7 @@ class MessageLookup extends MessageLookupByLibrary {
"flip": MessageLookupByLibrary.simpleMessage("上下翻转"),
"forYourMemories": MessageLookupByLibrary.simpleMessage("为您的回忆"),
"forgotPassword": MessageLookupByLibrary.simpleMessage("忘记密码"),
"foundFaces": MessageLookupByLibrary.simpleMessage("Found faces"),
"freeStorageClaimed": MessageLookupByLibrary.simpleMessage("已领取的免费存储"),
"freeStorageOnReferralSuccess": m24,
"freeStorageSpace": m25,

View file

@ -4034,10 +4034,10 @@ class S {
);
}
/// `Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.`
/// `Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.`
String playStoreFreeTrialValidTill(Object endDate) {
return Intl.message(
'Free trial valid till $endDate.\nYou can purchase a paid plan afterwards.',
'Free trial valid till $endDate.\nYou can choose a paid plan afterwards.',
name: 'playStoreFreeTrialValidTill',
desc: '',
args: [endDate],
@ -6969,10 +6969,10 @@ class S {
);
}
/// `Find all photos of a person`
/// `Persons will be shown here once indexing is done`
String get searchFaceEmptySection {
return Intl.message(
'Find all photos of a person',
'Persons will be shown here once indexing is done',
name: 'searchFaceEmptySection',
desc: '',
args: [],
@ -8168,6 +8168,16 @@ class S {
);
}
/// `People`
String get people {
return Intl.message(
'People',
name: 'people',
desc: '',
args: [],
);
}
/// `Contents`
String get contents {
return Intl.message(
@ -8388,26 +8398,6 @@ class S {
);
}
/// `Auto pair`
String get autoPair {
return Intl.message(
'Auto pair',
name: 'autoPair',
desc: '',
args: [],
);
}
/// `Pair with PIN`
String get pairWithPin {
return Intl.message(
'Pair with PIN',
name: 'pairWithPin',
desc: '',
args: [],
);
}
/// `Device not found`
String get deviceNotFound {
return Intl.message(
@ -8468,6 +8458,26 @@ class S {
);
}
/// `Add a name`
String get addAName {
return Intl.message(
'Add a name',
name: 'addAName',
desc: '',
args: [],
);
}
/// `Find people quickly by name`
String get findPeopleByName {
return Intl.message(
'Find people quickly by name',
name: 'findPeopleByName',
desc: '',
args: [],
);
}
/// `{count, plural, zero {Add viewer} one {Add viewer} other {Add viewers}}`
String addViewers(num count) {
return Intl.plural(
@ -8594,6 +8604,26 @@ class S {
);
}
/// `Enter person name`
String get enterPersonName {
return Intl.message(
'Enter person name',
name: 'enterPersonName',
desc: '',
args: [],
);
}
/// `Remove person label`
String get removePersonLabel {
return Intl.message(
'Remove person label',
name: 'removePersonLabel',
desc: '',
args: [],
);
}
/// `Auto pair works only with devices that support Chromecast.`
String get autoPairDesc {
return Intl.message(
@ -8703,6 +8733,66 @@ class S {
args: [],
);
}
/// `Auto pair`
String get autoPair {
return Intl.message(
'Auto pair',
name: 'autoPair',
desc: '',
args: [],
);
}
/// `Pair with PIN`
String get pairWithPin {
return Intl.message(
'Pair with PIN',
name: 'pairWithPin',
desc: '',
args: [],
);
}
/// `Face recognition`
String get faceRecognition {
return Intl.message(
'Face recognition',
name: 'faceRecognition',
desc: '',
args: [],
);
}
/// `Please note that this will result in a higher bandwidth and battery usage until all items are indexed.`
String get faceRecognitionIndexingDescription {
return Intl.message(
'Please note that this will result in a higher bandwidth and battery usage until all items are indexed.',
name: 'faceRecognitionIndexingDescription',
desc: '',
args: [],
);
}
/// `Found faces`
String get foundFaces {
return Intl.message(
'Found faces',
name: 'foundFaces',
desc: '',
args: [],
);
}
/// `Clustering progress`
String get clusteringProgress {
return Intl.message(
'Clustering progress',
name: 'clusteringProgress',
desc: '',
args: [],
);
}
}
class AppLocalizationDelegate extends LocalizationsDelegate<S> {

View file

@ -0,0 +1,111 @@
//
// Generated code. Do not modify.
// source: ente/common/box.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
/// CenterBox is a box where x,y is the center of the box
class CenterBox extends $pb.GeneratedMessage {
factory CenterBox({
$core.double? x,
$core.double? y,
$core.double? height,
$core.double? width,
}) {
final $result = create();
if (x != null) {
$result.x = x;
}
if (y != null) {
$result.y = y;
}
if (height != null) {
$result.height = height;
}
if (width != null) {
$result.width = width;
}
return $result;
}
CenterBox._() : super();
factory CenterBox.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory CenterBox.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'CenterBox', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.common'), createEmptyInstance: create)
..a<$core.double>(1, _omitFieldNames ? '' : 'x', $pb.PbFieldType.OF)
..a<$core.double>(2, _omitFieldNames ? '' : 'y', $pb.PbFieldType.OF)
..a<$core.double>(3, _omitFieldNames ? '' : 'height', $pb.PbFieldType.OF)
..a<$core.double>(4, _omitFieldNames ? '' : 'width', $pb.PbFieldType.OF)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
CenterBox clone() => CenterBox()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
CenterBox copyWith(void Function(CenterBox) updates) => super.copyWith((message) => updates(message as CenterBox)) as CenterBox;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static CenterBox create() => CenterBox._();
CenterBox createEmptyInstance() => create();
static $pb.PbList<CenterBox> createRepeated() => $pb.PbList<CenterBox>();
@$core.pragma('dart2js:noInline')
static CenterBox getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<CenterBox>(create);
static CenterBox? _defaultInstance;
@$pb.TagNumber(1)
$core.double get x => $_getN(0);
@$pb.TagNumber(1)
set x($core.double v) { $_setFloat(0, v); }
@$pb.TagNumber(1)
$core.bool hasX() => $_has(0);
@$pb.TagNumber(1)
void clearX() => clearField(1);
@$pb.TagNumber(2)
$core.double get y => $_getN(1);
@$pb.TagNumber(2)
set y($core.double v) { $_setFloat(1, v); }
@$pb.TagNumber(2)
$core.bool hasY() => $_has(1);
@$pb.TagNumber(2)
void clearY() => clearField(2);
@$pb.TagNumber(3)
$core.double get height => $_getN(2);
@$pb.TagNumber(3)
set height($core.double v) { $_setFloat(2, v); }
@$pb.TagNumber(3)
$core.bool hasHeight() => $_has(2);
@$pb.TagNumber(3)
void clearHeight() => clearField(3);
@$pb.TagNumber(4)
$core.double get width => $_getN(3);
@$pb.TagNumber(4)
set width($core.double v) { $_setFloat(3, v); }
@$pb.TagNumber(4)
$core.bool hasWidth() => $_has(3);
@$pb.TagNumber(4)
void clearWidth() => clearField(4);
}
const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names');
const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names');

View file

@ -0,0 +1,11 @@
//
// Generated code. Do not modify.
// source: ente/common/box.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import

View file

@ -0,0 +1,38 @@
//
// Generated code. Do not modify.
// source: ente/common/box.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use centerBoxDescriptor instead')
const CenterBox$json = {
'1': 'CenterBox',
'2': [
{'1': 'x', '3': 1, '4': 1, '5': 2, '9': 0, '10': 'x', '17': true},
{'1': 'y', '3': 2, '4': 1, '5': 2, '9': 1, '10': 'y', '17': true},
{'1': 'height', '3': 3, '4': 1, '5': 2, '9': 2, '10': 'height', '17': true},
{'1': 'width', '3': 4, '4': 1, '5': 2, '9': 3, '10': 'width', '17': true},
],
'8': [
{'1': '_x'},
{'1': '_y'},
{'1': '_height'},
{'1': '_width'},
],
};
/// Descriptor for `CenterBox`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List centerBoxDescriptor = $convert.base64Decode(
'CglDZW50ZXJCb3gSEQoBeBgBIAEoAkgAUgF4iAEBEhEKAXkYAiABKAJIAVIBeYgBARIbCgZoZW'
'lnaHQYAyABKAJIAlIGaGVpZ2h0iAEBEhkKBXdpZHRoGAQgASgCSANSBXdpZHRoiAEBQgQKAl94'
'QgQKAl95QgkKB19oZWlnaHRCCAoGX3dpZHRo');

View file

@ -0,0 +1,14 @@
//
// Generated code. Do not modify.
// source: ente/common/box.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names
// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
export 'box.pb.dart';

View file

@ -0,0 +1,83 @@
//
// Generated code. Do not modify.
// source: ente/common/point.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
/// EPoint is a point in 2D space
class EPoint extends $pb.GeneratedMessage {
factory EPoint({
$core.double? x,
$core.double? y,
}) {
final $result = create();
if (x != null) {
$result.x = x;
}
if (y != null) {
$result.y = y;
}
return $result;
}
EPoint._() : super();
factory EPoint.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory EPoint.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'EPoint', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.common'), createEmptyInstance: create)
..a<$core.double>(1, _omitFieldNames ? '' : 'x', $pb.PbFieldType.OF)
..a<$core.double>(2, _omitFieldNames ? '' : 'y', $pb.PbFieldType.OF)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
EPoint clone() => EPoint()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
EPoint copyWith(void Function(EPoint) updates) => super.copyWith((message) => updates(message as EPoint)) as EPoint;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static EPoint create() => EPoint._();
EPoint createEmptyInstance() => create();
static $pb.PbList<EPoint> createRepeated() => $pb.PbList<EPoint>();
@$core.pragma('dart2js:noInline')
static EPoint getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<EPoint>(create);
static EPoint? _defaultInstance;
@$pb.TagNumber(1)
$core.double get x => $_getN(0);
@$pb.TagNumber(1)
set x($core.double v) { $_setFloat(0, v); }
@$pb.TagNumber(1)
$core.bool hasX() => $_has(0);
@$pb.TagNumber(1)
void clearX() => clearField(1);
@$pb.TagNumber(2)
$core.double get y => $_getN(1);
@$pb.TagNumber(2)
set y($core.double v) { $_setFloat(1, v); }
@$pb.TagNumber(2)
$core.bool hasY() => $_has(1);
@$pb.TagNumber(2)
void clearY() => clearField(2);
}
const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names');
const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names');

View file

@ -0,0 +1,11 @@
//
// Generated code. Do not modify.
// source: ente/common/point.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import

View file

@ -0,0 +1,33 @@
//
// Generated code. Do not modify.
// source: ente/common/point.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use ePointDescriptor instead')
const EPoint$json = {
'1': 'EPoint',
'2': [
{'1': 'x', '3': 1, '4': 1, '5': 2, '9': 0, '10': 'x', '17': true},
{'1': 'y', '3': 2, '4': 1, '5': 2, '9': 1, '10': 'y', '17': true},
],
'8': [
{'1': '_x'},
{'1': '_y'},
],
};
/// Descriptor for `EPoint`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List ePointDescriptor = $convert.base64Decode(
'CgZFUG9pbnQSEQoBeBgBIAEoAkgAUgF4iAEBEhEKAXkYAiABKAJIAVIBeYgBAUIECgJfeEIECg'
'JfeQ==');

View file

@ -0,0 +1,14 @@
//
// Generated code. Do not modify.
// source: ente/common/point.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names
// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
export 'point.pb.dart';

View file

@ -0,0 +1,64 @@
//
// Generated code. Do not modify.
// source: ente/common/vector.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
/// Vector is generic message for dealing with lists of doubles
/// It should ideally be used independently and not as a submessage
class EVector extends $pb.GeneratedMessage {
factory EVector({
$core.Iterable<$core.double>? values,
}) {
final $result = create();
if (values != null) {
$result.values.addAll(values);
}
return $result;
}
EVector._() : super();
factory EVector.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory EVector.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'EVector', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.common'), createEmptyInstance: create)
..p<$core.double>(1, _omitFieldNames ? '' : 'values', $pb.PbFieldType.KD)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
EVector clone() => EVector()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
EVector copyWith(void Function(EVector) updates) => super.copyWith((message) => updates(message as EVector)) as EVector;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static EVector create() => EVector._();
EVector createEmptyInstance() => create();
static $pb.PbList<EVector> createRepeated() => $pb.PbList<EVector>();
@$core.pragma('dart2js:noInline')
static EVector getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<EVector>(create);
static EVector? _defaultInstance;
@$pb.TagNumber(1)
$core.List<$core.double> get values => $_getList(0);
}
const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names');
const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names');

View file

@ -0,0 +1,11 @@
//
// Generated code. Do not modify.
// source: ente/common/vector.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import

View file

@ -0,0 +1,27 @@
//
// Generated code. Do not modify.
// source: ente/common/vector.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use eVectorDescriptor instead')
const EVector$json = {
'1': 'EVector',
'2': [
{'1': 'values', '3': 1, '4': 3, '5': 1, '10': 'values'},
],
};
/// Descriptor for `EVector`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List eVectorDescriptor = $convert.base64Decode(
'CgdFVmVjdG9yEhYKBnZhbHVlcxgBIAMoAVIGdmFsdWVz');

View file

@ -0,0 +1,14 @@
//
// Generated code. Do not modify.
// source: ente/common/vector.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names
// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
export 'vector.pb.dart';

View file

@ -0,0 +1,169 @@
//
// Generated code. Do not modify.
// source: ente/ml/face.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
import '../common/box.pb.dart' as $0;
import '../common/point.pb.dart' as $1;
class Detection extends $pb.GeneratedMessage {
factory Detection({
$0.CenterBox? box,
$1.EPoint? landmarks,
}) {
final $result = create();
if (box != null) {
$result.box = box;
}
if (landmarks != null) {
$result.landmarks = landmarks;
}
return $result;
}
Detection._() : super();
factory Detection.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Detection.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Detection', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create)
..aOM<$0.CenterBox>(1, _omitFieldNames ? '' : 'box', subBuilder: $0.CenterBox.create)
..aOM<$1.EPoint>(2, _omitFieldNames ? '' : 'landmarks', subBuilder: $1.EPoint.create)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Detection clone() => Detection()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Detection copyWith(void Function(Detection) updates) => super.copyWith((message) => updates(message as Detection)) as Detection;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Detection create() => Detection._();
Detection createEmptyInstance() => create();
static $pb.PbList<Detection> createRepeated() => $pb.PbList<Detection>();
@$core.pragma('dart2js:noInline')
static Detection getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Detection>(create);
static Detection? _defaultInstance;
@$pb.TagNumber(1)
$0.CenterBox get box => $_getN(0);
@$pb.TagNumber(1)
set box($0.CenterBox v) { setField(1, v); }
@$pb.TagNumber(1)
$core.bool hasBox() => $_has(0);
@$pb.TagNumber(1)
void clearBox() => clearField(1);
@$pb.TagNumber(1)
$0.CenterBox ensureBox() => $_ensure(0);
@$pb.TagNumber(2)
$1.EPoint get landmarks => $_getN(1);
@$pb.TagNumber(2)
set landmarks($1.EPoint v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasLandmarks() => $_has(1);
@$pb.TagNumber(2)
void clearLandmarks() => clearField(2);
@$pb.TagNumber(2)
$1.EPoint ensureLandmarks() => $_ensure(1);
}
class Face extends $pb.GeneratedMessage {
factory Face({
$core.String? id,
Detection? detection,
$core.double? confidence,
}) {
final $result = create();
if (id != null) {
$result.id = id;
}
if (detection != null) {
$result.detection = detection;
}
if (confidence != null) {
$result.confidence = confidence;
}
return $result;
}
Face._() : super();
factory Face.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Face.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Face', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create)
..aOS(1, _omitFieldNames ? '' : 'id')
..aOM<Detection>(2, _omitFieldNames ? '' : 'detection', subBuilder: Detection.create)
..a<$core.double>(3, _omitFieldNames ? '' : 'confidence', $pb.PbFieldType.OF)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Face clone() => Face()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Face copyWith(void Function(Face) updates) => super.copyWith((message) => updates(message as Face)) as Face;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Face create() => Face._();
Face createEmptyInstance() => create();
static $pb.PbList<Face> createRepeated() => $pb.PbList<Face>();
@$core.pragma('dart2js:noInline')
static Face getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Face>(create);
static Face? _defaultInstance;
@$pb.TagNumber(1)
$core.String get id => $_getSZ(0);
@$pb.TagNumber(1)
set id($core.String v) { $_setString(0, v); }
@$pb.TagNumber(1)
$core.bool hasId() => $_has(0);
@$pb.TagNumber(1)
void clearId() => clearField(1);
@$pb.TagNumber(2)
Detection get detection => $_getN(1);
@$pb.TagNumber(2)
set detection(Detection v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasDetection() => $_has(1);
@$pb.TagNumber(2)
void clearDetection() => clearField(2);
@$pb.TagNumber(2)
Detection ensureDetection() => $_ensure(1);
@$pb.TagNumber(3)
$core.double get confidence => $_getN(2);
@$pb.TagNumber(3)
set confidence($core.double v) { $_setFloat(2, v); }
@$pb.TagNumber(3)
$core.bool hasConfidence() => $_has(2);
@$pb.TagNumber(3)
void clearConfidence() => clearField(3);
}
const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names');
const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names');

View file

@ -0,0 +1,11 @@
//
// Generated code. Do not modify.
// source: ente/ml/face.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import

View file

@ -0,0 +1,55 @@
//
// Generated code. Do not modify.
// source: ente/ml/face.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use detectionDescriptor instead')
const Detection$json = {
'1': 'Detection',
'2': [
{'1': 'box', '3': 1, '4': 1, '5': 11, '6': '.ente.common.CenterBox', '9': 0, '10': 'box', '17': true},
{'1': 'landmarks', '3': 2, '4': 1, '5': 11, '6': '.ente.common.EPoint', '9': 1, '10': 'landmarks', '17': true},
],
'8': [
{'1': '_box'},
{'1': '_landmarks'},
],
};
/// Descriptor for `Detection`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List detectionDescriptor = $convert.base64Decode(
'CglEZXRlY3Rpb24SLQoDYm94GAEgASgLMhYuZW50ZS5jb21tb24uQ2VudGVyQm94SABSA2JveI'
'gBARI2CglsYW5kbWFya3MYAiABKAsyEy5lbnRlLmNvbW1vbi5FUG9pbnRIAVIJbGFuZG1hcmtz'
'iAEBQgYKBF9ib3hCDAoKX2xhbmRtYXJrcw==');
@$core.Deprecated('Use faceDescriptor instead')
const Face$json = {
'1': 'Face',
'2': [
{'1': 'id', '3': 1, '4': 1, '5': 9, '9': 0, '10': 'id', '17': true},
{'1': 'detection', '3': 2, '4': 1, '5': 11, '6': '.ente.ml.Detection', '9': 1, '10': 'detection', '17': true},
{'1': 'confidence', '3': 3, '4': 1, '5': 2, '9': 2, '10': 'confidence', '17': true},
],
'8': [
{'1': '_id'},
{'1': '_detection'},
{'1': '_confidence'},
],
};
/// Descriptor for `Face`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List faceDescriptor = $convert.base64Decode(
'CgRGYWNlEhMKAmlkGAEgASgJSABSAmlkiAEBEjUKCWRldGVjdGlvbhgCIAEoCzISLmVudGUubW'
'wuRGV0ZWN0aW9uSAFSCWRldGVjdGlvbogBARIjCgpjb25maWRlbmNlGAMgASgCSAJSCmNvbmZp'
'ZGVuY2WIAQFCBQoDX2lkQgwKCl9kZXRlY3Rpb25CDQoLX2NvbmZpZGVuY2U=');

View file

@ -0,0 +1,14 @@
//
// Generated code. Do not modify.
// source: ente/ml/face.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names
// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
export 'face.pb.dart';

View file

@ -0,0 +1,179 @@
//
// Generated code. Do not modify.
// source: ente/ml/fileml.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:core' as $core;
import 'package:fixnum/fixnum.dart' as $fixnum;
import 'package:protobuf/protobuf.dart' as $pb;
import 'face.pb.dart' as $2;
class FileML extends $pb.GeneratedMessage {
factory FileML({
$fixnum.Int64? id,
$core.Iterable<$core.double>? clip,
}) {
final $result = create();
if (id != null) {
$result.id = id;
}
if (clip != null) {
$result.clip.addAll(clip);
}
return $result;
}
FileML._() : super();
factory FileML.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory FileML.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'FileML', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create)
..aInt64(1, _omitFieldNames ? '' : 'id')
..p<$core.double>(2, _omitFieldNames ? '' : 'clip', $pb.PbFieldType.KD)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
FileML clone() => FileML()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
FileML copyWith(void Function(FileML) updates) => super.copyWith((message) => updates(message as FileML)) as FileML;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static FileML create() => FileML._();
FileML createEmptyInstance() => create();
static $pb.PbList<FileML> createRepeated() => $pb.PbList<FileML>();
@$core.pragma('dart2js:noInline')
static FileML getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<FileML>(create);
static FileML? _defaultInstance;
@$pb.TagNumber(1)
$fixnum.Int64 get id => $_getI64(0);
@$pb.TagNumber(1)
set id($fixnum.Int64 v) { $_setInt64(0, v); }
@$pb.TagNumber(1)
$core.bool hasId() => $_has(0);
@$pb.TagNumber(1)
void clearId() => clearField(1);
@$pb.TagNumber(2)
$core.List<$core.double> get clip => $_getList(1);
}
class FileFaces extends $pb.GeneratedMessage {
factory FileFaces({
$core.Iterable<$2.Face>? faces,
$core.int? height,
$core.int? width,
$core.int? version,
$core.String? error,
}) {
final $result = create();
if (faces != null) {
$result.faces.addAll(faces);
}
if (height != null) {
$result.height = height;
}
if (width != null) {
$result.width = width;
}
if (version != null) {
$result.version = version;
}
if (error != null) {
$result.error = error;
}
return $result;
}
FileFaces._() : super();
factory FileFaces.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory FileFaces.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'FileFaces', package: const $pb.PackageName(_omitMessageNames ? '' : 'ente.ml'), createEmptyInstance: create)
..pc<$2.Face>(1, _omitFieldNames ? '' : 'faces', $pb.PbFieldType.PM, subBuilder: $2.Face.create)
..a<$core.int>(2, _omitFieldNames ? '' : 'height', $pb.PbFieldType.O3)
..a<$core.int>(3, _omitFieldNames ? '' : 'width', $pb.PbFieldType.O3)
..a<$core.int>(4, _omitFieldNames ? '' : 'version', $pb.PbFieldType.O3)
..aOS(5, _omitFieldNames ? '' : 'error')
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
FileFaces clone() => FileFaces()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
FileFaces copyWith(void Function(FileFaces) updates) => super.copyWith((message) => updates(message as FileFaces)) as FileFaces;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static FileFaces create() => FileFaces._();
FileFaces createEmptyInstance() => create();
static $pb.PbList<FileFaces> createRepeated() => $pb.PbList<FileFaces>();
@$core.pragma('dart2js:noInline')
static FileFaces getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<FileFaces>(create);
static FileFaces? _defaultInstance;
@$pb.TagNumber(1)
$core.List<$2.Face> get faces => $_getList(0);
@$pb.TagNumber(2)
$core.int get height => $_getIZ(1);
@$pb.TagNumber(2)
set height($core.int v) { $_setSignedInt32(1, v); }
@$pb.TagNumber(2)
$core.bool hasHeight() => $_has(1);
@$pb.TagNumber(2)
void clearHeight() => clearField(2);
@$pb.TagNumber(3)
$core.int get width => $_getIZ(2);
@$pb.TagNumber(3)
set width($core.int v) { $_setSignedInt32(2, v); }
@$pb.TagNumber(3)
$core.bool hasWidth() => $_has(2);
@$pb.TagNumber(3)
void clearWidth() => clearField(3);
@$pb.TagNumber(4)
$core.int get version => $_getIZ(3);
@$pb.TagNumber(4)
set version($core.int v) { $_setSignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasVersion() => $_has(3);
@$pb.TagNumber(4)
void clearVersion() => clearField(4);
@$pb.TagNumber(5)
$core.String get error => $_getSZ(4);
@$pb.TagNumber(5)
set error($core.String v) { $_setString(4, v); }
@$pb.TagNumber(5)
$core.bool hasError() => $_has(4);
@$pb.TagNumber(5)
void clearError() => clearField(5);
}
const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names');
const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names');

View file

@ -0,0 +1,11 @@
//
// Generated code. Do not modify.
// source: ente/ml/fileml.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import

View file

@ -0,0 +1,57 @@
//
// Generated code. Do not modify.
// source: ente/ml/fileml.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use fileMLDescriptor instead')
const FileML$json = {
'1': 'FileML',
'2': [
{'1': 'id', '3': 1, '4': 1, '5': 3, '9': 0, '10': 'id', '17': true},
{'1': 'clip', '3': 2, '4': 3, '5': 1, '10': 'clip'},
],
'8': [
{'1': '_id'},
],
};
/// Descriptor for `FileML`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List fileMLDescriptor = $convert.base64Decode(
'CgZGaWxlTUwSEwoCaWQYASABKANIAFICaWSIAQESEgoEY2xpcBgCIAMoAVIEY2xpcEIFCgNfaW'
'Q=');
@$core.Deprecated('Use fileFacesDescriptor instead')
const FileFaces$json = {
'1': 'FileFaces',
'2': [
{'1': 'faces', '3': 1, '4': 3, '5': 11, '6': '.ente.ml.Face', '10': 'faces'},
{'1': 'height', '3': 2, '4': 1, '5': 5, '9': 0, '10': 'height', '17': true},
{'1': 'width', '3': 3, '4': 1, '5': 5, '9': 1, '10': 'width', '17': true},
{'1': 'version', '3': 4, '4': 1, '5': 5, '9': 2, '10': 'version', '17': true},
{'1': 'error', '3': 5, '4': 1, '5': 9, '9': 3, '10': 'error', '17': true},
],
'8': [
{'1': '_height'},
{'1': '_width'},
{'1': '_version'},
{'1': '_error'},
],
};
/// Descriptor for `FileFaces`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List fileFacesDescriptor = $convert.base64Decode(
'CglGaWxlRmFjZXMSIwoFZmFjZXMYASADKAsyDS5lbnRlLm1sLkZhY2VSBWZhY2VzEhsKBmhlaW'
'dodBgCIAEoBUgAUgZoZWlnaHSIAQESGQoFd2lkdGgYAyABKAVIAVIFd2lkdGiIAQESHQoHdmVy'
'c2lvbhgEIAEoBUgCUgd2ZXJzaW9uiAEBEhkKBWVycm9yGAUgASgJSANSBWVycm9yiAEBQgkKB1'
'9oZWlnaHRCCAoGX3dpZHRoQgoKCF92ZXJzaW9uQggKBl9lcnJvcg==');

View file

@ -0,0 +1,14 @@
//
// Generated code. Do not modify.
// source: ente/ml/fileml.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types, comment_references
// ignore_for_file: constant_identifier_names
// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
export 'fileml.pb.dart';

View file

@ -18,5 +18,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -1187,6 +1187,8 @@
"changeLocationOfSelectedItems": "Standort der gewählten Elemente ändern?",
"editsToLocationWillOnlyBeSeenWithinEnte": "Änderungen des Standorts werden nur in ente sichtbar sein",
"cleanUncategorized": "Unkategorisiert leeren",
"addAName": "Add a name",
"findPeopleByName": "Find people quickly by searching by name",
"cleanUncategorizedDescription": "Entferne alle Dateien von \"Unkategorisiert\" die in anderen Alben vorhanden sind",
"waitingForVerification": "Warte auf Bestätigung...",
"passkey": "Passkey",
@ -1204,5 +1206,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -569,7 +569,7 @@
"freeTrialValidTill": "Free trial valid till {endDate}",
"validTill": "Valid till {endDate}",
"addOnValidTill": "Your {storageAmount} add-on is valid till {endDate}",
"playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can purchase a paid plan afterwards.",
"playStoreFreeTrialValidTill": "Free trial valid till {endDate}.\nYou can choose a paid plan afterwards.",
"subWillBeCancelledOn": "Your subscription will be cancelled on {endDate}",
"subscription": "Subscription",
"paymentDetails": "Payment details",
@ -987,7 +987,7 @@
"fileTypesAndNames": "File types and names",
"location": "Location",
"moments": "Moments",
"searchFaceEmptySection": "Find all photos of a person",
"searchFaceEmptySection": "Persons will be shown here once indexing is done",
"searchDatesEmptySection": "Search by a date, month or year",
"searchLocationEmptySection": "Group photos that are taken within some radius of a photo",
"searchPeopleEmptySection": "Invite people, and you'll see all photos shared by them here",
@ -1171,6 +1171,7 @@
}
},
"faces": "Faces",
"people": "People",
"contents": "Contents",
"addNew": "Add new",
"@addNew": {
@ -1196,14 +1197,14 @@
"verifyPasskey": "Verify passkey",
"playOnTv": "Play album on TV",
"pair": "Pair",
"autoPair": "Auto pair",
"pairWithPin": "Pair with PIN",
"deviceNotFound": "Device not found",
"castInstruction": "Visit cast.ente.io on the device you want to pair.\n\nEnter the code below to play the album on your TV.",
"deviceCodeHint": "Enter the code",
"joinDiscord": "Join Discord",
"locations": "Locations",
"descriptions": "Descriptions",
"addAName": "Add a name",
"findPeopleByName": "Find people quickly by name",
"addViewers": "{count, plural, zero {Add viewer} one {Add viewer} other {Add viewers}}",
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
@ -1216,6 +1217,8 @@
"customEndpoint": "Connected to {endpoint}",
"createCollaborativeLink": "Create collaborative link",
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"autoPairDesc": "Auto pair works only with devices that support Chromecast.",
"manualPairDesc": "Pair with PIN works with any screen you wish to view your album on.",
"connectToDevice": "Connect to device",
@ -1226,5 +1229,11 @@
"stopCastingBody": "Do you want to stop casting?",
"castIPMismatchTitle": "Failed to cast album",
"castIPMismatchBody": "Please make sure you are on the same network as the TV.",
"pairingComplete": "Pairing complete"
"pairingComplete": "Pairing complete",
"autoPair": "Auto pair",
"pairWithPin": "Pair with PIN",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -980,5 +980,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -1161,5 +1161,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -1123,5 +1123,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -18,5 +18,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -1226,5 +1226,9 @@
"stopCastingBody": "Wil je stoppen met casten?",
"castIPMismatchTitle": "Album casten mislukt",
"castIPMismatchBody": "Zorg ervoor dat je op hetzelfde netwerk zit als de tv.",
"pairingComplete": "Koppeling voltooid"
"pairingComplete": "Koppeling voltooid",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -32,5 +32,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -119,5 +119,11 @@
"addCollaborators": "{count, plural, zero {Add collaborator} one {Add collaborator} other {Add collaborators}}",
"longPressAnEmailToVerifyEndToEndEncryption": "Long press an email to verify end to end encryption.",
"createCollaborativeLink": "Create collaborative link",
"search": "Search"
"search": "Search",
"enterPersonName": "Enter person name",
"removePersonLabel": "Remove person label",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -1226,5 +1226,9 @@
"stopCastingBody": "Você quer parar a transmissão?",
"castIPMismatchTitle": "Falha ao transmitir álbum",
"castIPMismatchBody": "Certifique-se de estar na mesma rede que a TV.",
"pairingComplete": "Pareamento concluído"
"pairingComplete": "Pareamento concluído",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -1226,5 +1226,9 @@
"stopCastingBody": "您想停止投放吗?",
"castIPMismatchTitle": "投放相册失败",
"castIPMismatchBody": "请确保您的设备与电视处于同一网络。",
"pairingComplete": "配对完成"
"pairingComplete": "配对完成",
"faceRecognition": "Face recognition",
"faceRecognitionIndexingDescription": "Please note that this will result in a higher bandwidth and battery usage until all items are indexed.",
"foundFaces": "Found faces",
"clusteringProgress": "Clustering progress"
}

View file

@ -21,6 +21,7 @@ import 'package:photos/core/errors.dart';
import 'package:photos/core/network/network.dart';
import 'package:photos/db/upload_locks_db.dart';
import 'package:photos/ente_theme_data.dart';
import "package:photos/face/db.dart";
import "package:photos/l10n/l10n.dart";
import "package:photos/service_locator.dart";
import 'package:photos/services/app_lifecycle_service.dart';
@ -32,6 +33,9 @@ import 'package:photos/services/home_widget_service.dart';
import 'package:photos/services/local_file_update_service.dart';
import 'package:photos/services/local_sync_service.dart';
import "package:photos/services/location_service.dart";
import 'package:photos/services/machine_learning/face_ml/face_ml_service.dart';
import "package:photos/services/machine_learning/face_ml/person/person_service.dart";
import 'package:photos/services/machine_learning/file_ml/remote_fileml_service.dart';
import "package:photos/services/machine_learning/machine_learning_controller.dart";
import 'package:photos/services/machine_learning/semantic_search/semantic_search_service.dart';
import 'package:photos/services/memories_service.dart';
@ -213,6 +217,7 @@ Future<void> _init(bool isBackground, {String via = ''}) async {
LocalFileUpdateService.instance.init(preferences);
SearchService.instance.init();
StorageBonusService.instance.init(preferences);
RemoteFileMLService.instance.init(preferences);
if (!isBackground &&
Platform.isAndroid &&
await HomeWidgetService.instance.countHomeWidgets() == 0) {
@ -233,9 +238,23 @@ Future<void> _init(bool isBackground, {String via = ''}) async {
// Can not including existing tf/ml binaries as they are not being built
// from source.
// See https://gitlab.com/fdroid/fdroiddata/-/merge_requests/12671#note_1294346819
// if (!UpdateService.instance.isFdroidFlavor()) {
// unawaited(ObjectDetectionService.instance.init());
// }
if (!UpdateService.instance.isFdroidFlavor()) {
// unawaited(ObjectDetectionService.instance.init());
if (flagService.faceSearchEnabled) {
unawaited(FaceMlService.instance.init());
FaceMlService.instance.listenIndexOnDiffSync();
FaceMlService.instance.listenOnPeopleChangedSync();
} else {
if (LocalSettings.instance.isFaceIndexingEnabled) {
unawaited(LocalSettings.instance.toggleFaceIndexing());
}
}
}
PersonService.init(
EntityService.instance,
FaceMLDataDB.instance,
preferences,
);
_logger.info("Initialization done");
}

View file

@ -2,6 +2,7 @@ import "package:flutter/foundation.dart";
enum EntityType {
location,
person,
unknown,
}
@ -9,6 +10,8 @@ EntityType typeFromString(String type) {
switch (type) {
case "location":
return EntityType.location;
case "person":
return EntityType.location;
}
debugPrint("unexpected collection type $type");
return EntityType.unknown;
@ -19,6 +22,8 @@ extension EntityTypeExtn on EntityType {
switch (this) {
case EntityType.location:
return "location";
case EntityType.person:
return "person";
case EntityType.unknown:
return "unknown";
}

View file

@ -243,6 +243,9 @@ class EnteFile {
}
String get downloadUrl {
if (localFileServer.isNotEmpty) {
return "$localFileServer/$uploadedFileID";
}
final endpoint = Configuration.instance.getHttpEndpoint();
if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) {
return endpoint + "/files/download/" + uploadedFileID.toString();
@ -256,6 +259,9 @@ class EnteFile {
}
String get thumbnailUrl {
if (localFileServer.isNotEmpty) {
return "$localFileServer/thumb/$uploadedFileID";
}
final endpoint = Configuration.instance.getHttpEndpoint();
if (endpoint != kDefaultProductionEndpoint || flagService.disableCFWorker) {
return endpoint + "/files/preview/" + uploadedFileID.toString();

View file

@ -18,6 +18,8 @@ enum GalleryType {
searchResults,
locationTag,
quickLink,
peopleTag,
cluster,
}
extension GalleyTypeExtension on GalleryType {
@ -32,12 +34,14 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.locationTag:
case GalleryType.quickLink:
case GalleryType.uncategorized:
case GalleryType.peopleTag:
case GalleryType.sharedCollection:
return true;
case GalleryType.hiddenSection:
case GalleryType.hiddenOwnedCollection:
case GalleryType.trash:
case GalleryType.cluster:
return false;
}
}
@ -50,6 +54,7 @@ extension GalleyTypeExtension on GalleryType {
return true;
case GalleryType.hiddenSection:
case GalleryType.peopleTag:
case GalleryType.hiddenOwnedCollection:
case GalleryType.favorite:
case GalleryType.searchResults:
@ -59,6 +64,7 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.trash:
case GalleryType.sharedCollection:
case GalleryType.locationTag:
case GalleryType.cluster:
return false;
}
}
@ -75,12 +81,14 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.uncategorized:
case GalleryType.locationTag:
case GalleryType.quickLink:
case GalleryType.peopleTag:
return true;
case GalleryType.trash:
case GalleryType.archive:
case GalleryType.hiddenSection:
case GalleryType.hiddenOwnedCollection:
case GalleryType.sharedCollection:
case GalleryType.cluster:
return false;
}
}
@ -98,8 +106,10 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.localFolder:
case GalleryType.locationTag:
case GalleryType.quickLink:
case GalleryType.peopleTag:
return true;
case GalleryType.trash:
case GalleryType.cluster:
case GalleryType.sharedCollection:
return false;
}
@ -114,8 +124,10 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.archive:
case GalleryType.uncategorized:
case GalleryType.locationTag:
case GalleryType.peopleTag:
return true;
case GalleryType.hiddenSection:
case GalleryType.cluster:
case GalleryType.hiddenOwnedCollection:
case GalleryType.localFolder:
case GalleryType.trash:
@ -132,6 +144,7 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.quickLink:
return true;
case GalleryType.hiddenSection:
case GalleryType.peopleTag:
case GalleryType.hiddenOwnedCollection:
case GalleryType.uncategorized:
case GalleryType.favorite:
@ -139,6 +152,7 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.homepage:
case GalleryType.archive:
case GalleryType.localFolder:
case GalleryType.cluster:
case GalleryType.trash:
case GalleryType.locationTag:
return false;
@ -154,6 +168,7 @@ extension GalleyTypeExtension on GalleryType {
return true;
case GalleryType.hiddenSection:
case GalleryType.peopleTag:
case GalleryType.hiddenOwnedCollection:
case GalleryType.favorite:
case GalleryType.searchResults:
@ -162,6 +177,7 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.trash:
case GalleryType.sharedCollection:
case GalleryType.locationTag:
case GalleryType.cluster:
return false;
}
}
@ -182,10 +198,12 @@ extension GalleyTypeExtension on GalleryType {
return true;
case GalleryType.hiddenSection:
case GalleryType.peopleTag:
case GalleryType.hiddenOwnedCollection:
case GalleryType.localFolder:
case GalleryType.trash:
case GalleryType.favorite:
case GalleryType.cluster:
case GalleryType.sharedCollection:
return false;
}
@ -203,12 +221,14 @@ extension GalleyTypeExtension on GalleryType {
case GalleryType.searchResults:
case GalleryType.uncategorized:
case GalleryType.locationTag:
case GalleryType.peopleTag:
return true;
case GalleryType.hiddenSection:
case GalleryType.hiddenOwnedCollection:
case GalleryType.quickLink:
case GalleryType.favorite:
case GalleryType.cluster:
case GalleryType.archive:
case GalleryType.localFolder:
case GalleryType.trash:
@ -244,7 +264,7 @@ extension GalleyTypeExtension on GalleryType {
}
bool showEditLocation() {
return this != GalleryType.sharedCollection;
return this != GalleryType.sharedCollection && this != GalleryType.cluster;
}
}
@ -334,7 +354,9 @@ extension GalleryAppBarExtn on GalleryType {
case GalleryType.locationTag:
case GalleryType.searchResults:
return false;
case GalleryType.cluster:
case GalleryType.uncategorized:
case GalleryType.peopleTag:
case GalleryType.ownedCollection:
case GalleryType.sharedCollection:
case GalleryType.quickLink:

View file

@ -1,6 +1,7 @@
import "package:equatable/equatable.dart";
import "package:photos/models/api/entity/type.dart";
// LocalEntityData is a class that represents the data of an entity stored locally.
class LocalEntityData {
final String id;
final EntityType type;

View file

@ -0,0 +1,7 @@
typedef Embedding = List<double>;
typedef Num3DInputMatrix = List<List<List<num>>>;
typedef Int3DInputMatrix = List<List<List<int>>>;
typedef Double3DInputMatrix = List<List<List<double>>>;

View file

@ -0,0 +1,3 @@
const faceMlVersion = 1;
const clusterMlVersion = 1;
const minimumClusterSize = 2;

View file

@ -8,8 +8,15 @@ class GenericSearchResult extends SearchResult {
final List<EnteFile> _files;
final ResultType _type;
final Function(BuildContext context)? onResultTap;
final Map<String, dynamic> params;
GenericSearchResult(this._type, this._name, this._files, {this.onResultTap});
GenericSearchResult(
this._type,
this._name,
this._files, {
this.onResultTap,
this.params = const {},
});
@override
String name() {

View file

@ -0,0 +1,3 @@
const kPersonParamID = 'person_id';
const kClusterParamId = 'cluster_id';
const kFileID = 'file_id';

View file

@ -6,6 +6,7 @@ import "package:photos/core/event_bus.dart";
import "package:photos/events/collection_updated_event.dart";
import "package:photos/events/event.dart";
import "package:photos/events/location_tag_updated_event.dart";
import "package:photos/events/people_changed_event.dart";
import "package:photos/generated/l10n.dart";
import "package:photos/models/collection/collection.dart";
import "package:photos/models/collection/collection_items.dart";
@ -33,6 +34,7 @@ enum ResultType {
fileCaption,
event,
shared,
faces,
magic,
}
@ -55,7 +57,7 @@ extension SectionTypeExtensions on SectionType {
String sectionTitle(BuildContext context) {
switch (this) {
case SectionType.face:
return S.of(context).faces;
return S.of(context).people;
case SectionType.content:
return S.of(context).contents;
case SectionType.moment:
@ -117,10 +119,12 @@ extension SectionTypeExtensions on SectionType {
}
}
bool get sortByName => this != SectionType.face;
bool get isEmptyCTAVisible {
switch (this) {
case SectionType.face:
return true;
return false;
case SectionType.content:
return false;
case SectionType.moment:
@ -245,8 +249,7 @@ extension SectionTypeExtensions on SectionType {
}) {
switch (this) {
case SectionType.face:
return Future.value(List<GenericSearchResult>.empty());
return SearchService.instance.getAllFace(limit);
case SectionType.content:
return Future.value(List<GenericSearchResult>.empty());
@ -277,6 +280,8 @@ extension SectionTypeExtensions on SectionType {
return [Bus.instance.on<LocationTagUpdatedEvent>()];
case SectionType.album:
return [Bus.instance.on<CollectionUpdatedEvent>()];
case SectionType.face:
return [Bus.instance.on<PeopleChangedEvent>()];
default:
return [];
}

View file

@ -50,6 +50,10 @@ class EntityService {
return await _db.getEntities(type);
}
Future<LocalEntityData?> getEntity(EntityType type, String id) async {
return await _db.getEntity(type, id);
}
Future<LocalEntityData> addOrUpdate(
EntityType type,
String plainText, {
@ -57,13 +61,16 @@ class EntityService {
}) async {
final key = await getOrCreateEntityKey(type);
final encryptedKeyData = await CryptoUtil.encryptChaCha(
utf8.encode(plainText) as Uint8List,
utf8.encode(plainText),
key,
);
final String encryptedData =
CryptoUtil.bin2base64(encryptedKeyData.encryptedData!);
final String header = CryptoUtil.bin2base64(encryptedKeyData.header!);
debugPrint("Adding entity of type: " + type.typeToString());
debugPrint(
" ${id == null ? 'Adding' : 'Updating'} entity of type: " +
type.typeToString(),
);
final EntityData data = id == null
? await _gateway.createEntity(type, encryptedData, header)
: await _gateway.updateEntity(type, id, encryptedData, header);
@ -87,6 +94,7 @@ class EntityService {
Future<void> syncEntities() async {
try {
await _remoteToLocalSync(EntityType.location);
await _remoteToLocalSync(EntityType.person);
} catch (e) {
_logger.severe("Failed to sync entities", e);
}

View file

@ -0,0 +1,36 @@
class AlignmentResult {
final List<List<double>> affineMatrix; // 3x3
final List<double> center; // [x, y]
final double size; // 1 / scale
final double rotation; // atan2(simRotation[1][0], simRotation[0][0]);
AlignmentResult({required this.affineMatrix, required this.center, required this.size, required this.rotation});
AlignmentResult.empty()
: affineMatrix = <List<double>>[
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
],
center = <double>[0, 0],
size = 1,
rotation = 0;
factory AlignmentResult.fromJson(Map<String, dynamic> json) {
return AlignmentResult(
affineMatrix: (json['affineMatrix'] as List)
.map((item) => List<double>.from(item))
.toList(),
center: List<double>.from(json['center'] as List),
size: json['size'] as double,
rotation: json['rotation'] as double,
);
}
Map<String, dynamic> toJson() => {
'affineMatrix': affineMatrix,
'center': center,
'size': size,
'rotation': rotation,
};
}

View file

@ -0,0 +1,171 @@
import 'dart:math' show atan2;
import 'package:ml_linalg/linalg.dart';
import 'package:photos/extensions/ml_linalg_extensions.dart';
import 'package:photos/services/machine_learning/face_ml/face_alignment/alignment_result.dart';
/// Class to compute the similarity transform between two sets of points.
///
/// The class estimates the parameters of the similarity transformation via the `estimate` function.
/// After estimation, the transformation can be applied to an image using the `warpAffine` function.
class SimilarityTransform {
Matrix _params = Matrix.fromList([
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0, 0, 1],
]);
List<double> _center = <double>[0, 0]; // [x, y]
double _size = 1; // 1 / scale
double _rotation = 0; // atan2(simRotation[1][0], simRotation[0][0]);
final arcface4Landmarks = [
<double>[38.2946, 51.6963],
<double>[73.5318, 51.5014],
<double>[56.0252, 71.7366],
<double>[56.1396, 92.2848],
];
final arcface5Landmarks = [
<double>[38.2946, 51.6963],
<double>[73.5318, 51.5014],
<double>[56.0252, 71.7366],
<double>[41.5493, 92.3655],
<double>[70.7299, 92.2041],
];
get arcfaceNormalized4 => arcface4Landmarks
.map((list) => list.map((value) => value / 112.0).toList())
.toList();
get arcfaceNormalized5 => arcface5Landmarks
.map((list) => list.map((value) => value / 112.0).toList())
.toList();
List<List<double>> get paramsList => _params.to2DList();
// singleton pattern
SimilarityTransform._privateConstructor();
static final instance = SimilarityTransform._privateConstructor();
factory SimilarityTransform() => instance;
void _cleanParams() {
_params = Matrix.fromList([
[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0, 0, 1],
]);
_center = <double>[0, 0];
_size = 1;
_rotation = 0;
}
/// Function to estimate the parameters of the affine transformation. These parameters are stored in the class variable params.
///
/// Returns a tuple of (AlignmentResult, bool). The bool indicates whether the parameters are valid or not.
///
/// Runs efficiently in about 1-3 ms after initial warm-up.
///
/// It takes the source and destination points as input and returns the
/// parameters of the affine transformation as output. The function
/// returns false if the parameters cannot be estimated. The function
/// estimates the parameters by solving a least-squares problem using
/// the Umeyama algorithm, via [_umeyama].
(AlignmentResult, bool) estimate(List<List<double>> src) {
_cleanParams();
final (params, center, size, rotation) =
_umeyama(src, arcfaceNormalized5, true);
_params = params;
_center = center;
_size = size;
_rotation = rotation;
final alignmentResult = AlignmentResult(
affineMatrix: paramsList,
center: _center,
size: _size,
rotation: _rotation,
);
// We check for NaN in the transformation matrix params.
final isNoNanInParam =
!_params.asFlattenedList.any((element) => element.isNaN);
return (alignmentResult, isNoNanInParam);
}
static (Matrix, List<double>, double, double) _umeyama(
List<List<double>> src,
List<List<double>> dst, [
bool estimateScale = true,
]) {
final srcMat = Matrix.fromList(
src,
// .map((list) => list.map((value) => value.toDouble()).toList())
// .toList(),
);
final dstMat = Matrix.fromList(dst);
final num = srcMat.rowCount;
final dim = srcMat.columnCount;
// Compute mean of src and dst.
final srcMean = srcMat.mean(Axis.columns);
final dstMean = dstMat.mean(Axis.columns);
// Subtract mean from src and dst.
final srcDemean = srcMat.mapRows((vector) => vector - srcMean);
final dstDemean = dstMat.mapRows((vector) => vector - dstMean);
// Eq. (38).
final A = (dstDemean.transpose() * srcDemean) / num;
// Eq. (39).
var d = Vector.filled(dim, 1.0);
if (A.determinant() < 0) {
d = d.set(dim - 1, -1);
}
var T = Matrix.identity(dim + 1);
final svdResult = A.svd();
final Matrix U = svdResult['U']!;
final Vector S = svdResult['S']!;
final Matrix V = svdResult['V']!;
// Eq. (40) and (43).
final rank = A.matrixRank();
if (rank == 0) {
return (T * double.nan, <double>[0, 0], 1, 0);
} else if (rank == dim - 1) {
if (U.determinant() * V.determinant() > 0) {
T = T.setSubMatrix(0, dim, 0, dim, U * V);
} else {
final s = d[dim - 1];
d = d.set(dim - 1, -1);
final replacement = U * Matrix.diagonal(d.toList()) * V;
T = T.setSubMatrix(0, dim, 0, dim, replacement);
d = d.set(dim - 1, s);
}
} else {
final replacement = U * Matrix.diagonal(d.toList()) * V;
T = T.setSubMatrix(0, dim, 0, dim, replacement);
}
final Matrix simRotation = U * Matrix.diagonal(d.toList()) * V;
var scale = 1.0;
if (estimateScale) {
// Eq. (41) and (42).
scale = 1.0 / srcDemean.variance(Axis.columns).sum() * (S * d).sum();
}
final subTIndices = Iterable<int>.generate(dim, (index) => index);
final subT = T.sample(rowIndices: subTIndices, columnIndices: subTIndices);
final newSubT = dstMean - (subT * srcMean) * scale;
T = T.setValues(0, dim, dim, dim + 1, newSubT);
final newNewSubT =
T.sample(rowIndices: subTIndices, columnIndices: subTIndices) * scale;
T = T.setSubMatrix(0, dim, 0, dim, newNewSubT);
// final List<double> translation = [T[0][2], T[1][2]];
// final simRotation = replacement?;
final size = 1 / scale;
final rotation = atan2(simRotation[1][0], simRotation[0][0]);
final meanTranslation = (dstMean - 0.5) * size;
final centerMat = srcMean - meanTranslation;
final List<double> center = [centerMat[0], centerMat[1]];
return (T, center, size, rotation);
}
}

View file

@ -0,0 +1,22 @@
import "package:photos/face/model/person.dart";
enum MappingSource {
local,
remote,
}
class ClustersMapping {
final Map<int, Set<int>> fileIDToClusterIDs;
final Map<int, String> clusterToPersonID;
// personIDToPerson is a map of personID to PersonEntity, and it's same for
// both local and remote sources
final Map<String, PersonEntity> personIDToPerson;
final MappingSource source;
ClustersMapping({
required this.fileIDToClusterIDs,
required this.clusterToPersonID,
required this.personIDToPerson,
required this.source,
});
}

View file

@ -0,0 +1,79 @@
import 'dart:math' show sqrt;
import "package:ml_linalg/linalg.dart";
/// Calculates the cosine distance between two embeddings/vectors using SIMD from ml_linalg
///
/// WARNING: This assumes both vectors are already normalized!
double cosineDistanceSIMD(Vector vector1, Vector vector2) {
if (vector1.length != vector2.length) {
throw ArgumentError('Vectors must be the same length');
}
return 1 - vector1.dot(vector2);
}
/// Calculates the cosine distance between two embeddings/vectors using SIMD from ml_linalg
///
/// WARNING: Only use when you're not sure if vectors are normalized. If you're sure they are, use [cosineDistanceSIMD] instead for better performance.
double cosineDistanceSIMDSafe(Vector vector1, Vector vector2) {
if (vector1.length != vector2.length) {
throw ArgumentError('Vectors must be the same length');
}
return vector1.distanceTo(vector2, distance: Distance.cosine);
}
/// Calculates the cosine distance between two embeddings/vectors.
///
/// Throws an ArgumentError if the vectors are of different lengths or
/// if either of the vectors has a magnitude of zero.
double cosineDistance(List<double> vector1, List<double> vector2) {
if (vector1.length != vector2.length) {
throw ArgumentError('Vectors must be the same length');
}
double dotProduct = 0.0;
double magnitude1 = 0.0;
double magnitude2 = 0.0;
for (int i = 0; i < vector1.length; i++) {
dotProduct += vector1[i] * vector2[i];
magnitude1 += vector1[i] * vector1[i];
magnitude2 += vector2[i] * vector2[i];
}
magnitude1 = sqrt(magnitude1);
magnitude2 = sqrt(magnitude2);
// Avoid division by zero. This should never happen. If it does, then one of the vectors contains only zeros.
if (magnitude1 == 0 || magnitude2 == 0) {
throw ArgumentError('Vectors must not have a magnitude of zero');
}
final double similarity = dotProduct / (magnitude1 * magnitude2);
// Cosine distance is the complement of cosine similarity
return 1.0 - similarity;
}
// cosineDistForNormVectors calculates the cosine distance between two normalized embeddings/vectors.
@pragma('vm:entry-point')
double cosineDistForNormVectors(List<double> vector1, List<double> vector2) {
if (vector1.length != vector2.length) {
throw ArgumentError('Vectors must be the same length');
}
double dotProduct = 0.0;
for (int i = 0; i < vector1.length; i++) {
dotProduct += vector1[i] * vector2[i];
}
return 1.0 - dotProduct;
}
double calculateSqrDistance(List<double> v1, List<double> v2) {
double sum = 0;
for (int i = 0; i < v1.length; i++) {
sum += (v1[i] - v2[i]) * (v1[i] - v2[i]);
}
return sqrt(sum);
}

View file

@ -0,0 +1,25 @@
import "dart:typed_data" show Uint8List;
class FaceInfoForClustering {
final String faceID;
int? clusterId;
final Uint8List embeddingBytes;
final double faceScore;
final double blurValue;
final bool isSideways;
int? _fileID;
int get fileID {
_fileID ??= int.parse(faceID.split('_').first);
return _fileID!;
}
FaceInfoForClustering({
required this.faceID,
this.clusterId,
required this.embeddingBytes,
required this.faceScore,
required this.blurValue,
this.isSideways = false,
});
}

View file

@ -0,0 +1,516 @@
import 'dart:math' show max, min, pow, sqrt;
import "package:photos/face/model/dimension.dart";
enum FaceDirection { left, right, straight }
extension FaceDirectionExtension on FaceDirection {
String toDirectionString() {
switch (this) {
case FaceDirection.left:
return 'Left';
case FaceDirection.right:
return 'Right';
case FaceDirection.straight:
return 'Straight';
default:
throw Exception('Unknown FaceDirection');
}
}
}
abstract class Detection {
final double score;
Detection({required this.score});
const Detection.empty() : score = 0;
get width;
get height;
@override
String toString();
}
@Deprecated('Old method only used in other deprecated methods')
extension BBoxExtension on List<double> {
void roundBoxToDouble() {
final widthRounded = (this[2] - this[0]).roundToDouble();
final heightRounded = (this[3] - this[1]).roundToDouble();
this[0] = this[0].roundToDouble();
this[1] = this[1].roundToDouble();
this[2] = this[0] + widthRounded;
this[3] = this[1] + heightRounded;
}
// double get xMinBox =>
// isNotEmpty ? this[0] : throw IndexError.withLength(0, length);
// double get yMinBox =>
// length >= 2 ? this[1] : throw IndexError.withLength(1, length);
// double get xMaxBox =>
// length >= 3 ? this[2] : throw IndexError.withLength(2, length);
// double get yMaxBox =>
// length >= 4 ? this[3] : throw IndexError.withLength(3, length);
}
/// This class represents a face detection with relative coordinates in the range [0, 1].
/// The coordinates are relative to the image size. The pattern for the coordinates is always [x, y], where x is the horizontal coordinate and y is the vertical coordinate.
///
/// The [score] attribute is a double representing the confidence of the face detection.
///
/// The [box] attribute is a list of 4 doubles, representing the coordinates of the bounding box of the face detection.
/// The four values of the box in order are: [xMinBox, yMinBox, xMaxBox, yMaxBox].
///
/// The [allKeypoints] attribute is a list of 6 lists of 2 doubles, representing the coordinates of the keypoints of the face detection.
/// The six lists of two values in order are: [leftEye, rightEye, nose, mouth, leftEar, rightEar]. Again, all in [x, y] order.
class FaceDetectionRelative extends Detection {
final List<double> box;
final List<List<double>> allKeypoints;
double get xMinBox => box[0];
double get yMinBox => box[1];
double get xMaxBox => box[2];
double get yMaxBox => box[3];
List<double> get leftEye => allKeypoints[0];
List<double> get rightEye => allKeypoints[1];
List<double> get nose => allKeypoints[2];
List<double> get leftMouth => allKeypoints[3];
List<double> get rightMouth => allKeypoints[4];
FaceDetectionRelative({
required double score,
required List<double> box,
required List<List<double>> allKeypoints,
}) : assert(
box.every((e) => e >= -0.1 && e <= 1.1),
"Bounding box values must be in the range [0, 1], with only a small margin of error allowed.",
),
assert(
allKeypoints
.every((sublist) => sublist.every((e) => e >= -0.1 && e <= 1.1)),
"All keypoints must be in the range [0, 1], with only a small margin of error allowed.",
),
box = List<double>.from(box.map((e) => e.clamp(0.0, 1.0))),
allKeypoints = allKeypoints
.map(
(sublist) =>
List<double>.from(sublist.map((e) => e.clamp(0.0, 1.0))),
)
.toList(),
super(score: score);
factory FaceDetectionRelative.zero() {
return FaceDetectionRelative(
score: 0,
box: <double>[0, 0, 0, 0],
allKeypoints: <List<double>>[
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
],
);
}
/// This is used to initialize the FaceDetectionRelative object with default values.
/// This constructor is useful because it can be used to initialize a FaceDetectionRelative object as a constant.
/// Contrary to the `FaceDetectionRelative.zero()` constructor, this one gives immutable attributes [box] and [allKeypoints].
FaceDetectionRelative.defaultInitialization()
: box = const <double>[0, 0, 0, 0],
allKeypoints = const <List<double>>[
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
],
super.empty();
FaceDetectionRelative getNearestDetection(
List<FaceDetectionRelative> detections,
) {
if (detections.isEmpty) {
throw ArgumentError("The detection list cannot be empty.");
}
var nearestDetection = detections[0];
var minDistance = double.infinity;
// Calculate the center of the current instance
final centerX1 = (xMinBox + xMaxBox) / 2;
final centerY1 = (yMinBox + yMaxBox) / 2;
for (var detection in detections) {
final centerX2 = (detection.xMinBox + detection.xMaxBox) / 2;
final centerY2 = (detection.yMinBox + detection.yMaxBox) / 2;
final distance =
sqrt(pow(centerX2 - centerX1, 2) + pow(centerY2 - centerY1, 2));
if (distance < minDistance) {
minDistance = distance;
nearestDetection = detection;
}
}
return nearestDetection;
}
void transformRelativeToOriginalImage(
List<double> fromBox, // [xMin, yMin, xMax, yMax]
List<double> toBox, // [xMin, yMin, xMax, yMax]
) {
// Return if all elements of fromBox and toBox are equal
for (int i = 0; i < fromBox.length; i++) {
if (fromBox[i] != toBox[i]) {
break;
}
if (i == fromBox.length - 1) {
return;
}
}
// Account for padding
final double paddingXRatio =
(fromBox[0] - toBox[0]) / (toBox[2] - toBox[0]);
final double paddingYRatio =
(fromBox[1] - toBox[1]) / (toBox[3] - toBox[1]);
// Calculate the scaling and translation
final double scaleX = (fromBox[2] - fromBox[0]) / (1 - 2 * paddingXRatio);
final double scaleY = (fromBox[3] - fromBox[1]) / (1 - 2 * paddingYRatio);
final double translateX = fromBox[0] - paddingXRatio * scaleX;
final double translateY = fromBox[1] - paddingYRatio * scaleY;
// Transform Box
_transformBox(box, scaleX, scaleY, translateX, translateY);
// Transform All Keypoints
for (int i = 0; i < allKeypoints.length; i++) {
allKeypoints[i] = _transformPoint(
allKeypoints[i],
scaleX,
scaleY,
translateX,
translateY,
);
}
}
void correctForMaintainedAspectRatio(
Dimensions originalSize,
Dimensions newSize,
) {
// Return if both are the same size, meaning no scaling was done on both width and height
if (originalSize == newSize) {
return;
}
// Calculate the scaling
final double scaleX = originalSize.width / newSize.width;
final double scaleY = originalSize.height / newSize.height;
const double translateX = 0;
const double translateY = 0;
// Transform Box
_transformBox(box, scaleX, scaleY, translateX, translateY);
// Transform All Keypoints
for (int i = 0; i < allKeypoints.length; i++) {
allKeypoints[i] = _transformPoint(
allKeypoints[i],
scaleX,
scaleY,
translateX,
translateY,
);
}
}
void _transformBox(
List<double> box,
double scaleX,
double scaleY,
double translateX,
double translateY,
) {
box[0] = (box[0] * scaleX + translateX).clamp(0.0, 1.0);
box[1] = (box[1] * scaleY + translateY).clamp(0.0, 1.0);
box[2] = (box[2] * scaleX + translateX).clamp(0.0, 1.0);
box[3] = (box[3] * scaleY + translateY).clamp(0.0, 1.0);
}
List<double> _transformPoint(
List<double> point,
double scaleX,
double scaleY,
double translateX,
double translateY,
) {
return [
(point[0] * scaleX + translateX).clamp(0.0, 1.0),
(point[1] * scaleY + translateY).clamp(0.0, 1.0),
];
}
FaceDetectionAbsolute toAbsolute({
required int imageWidth,
required int imageHeight,
}) {
final scoreCopy = score;
final boxCopy = List<double>.from(box, growable: false);
final allKeypointsCopy = allKeypoints
.map((sublist) => List<double>.from(sublist, growable: false))
.toList();
boxCopy[0] *= imageWidth;
boxCopy[1] *= imageHeight;
boxCopy[2] *= imageWidth;
boxCopy[3] *= imageHeight;
// final intbox = boxCopy.map((e) => e.toInt()).toList();
for (List<double> keypoint in allKeypointsCopy) {
keypoint[0] *= imageWidth;
keypoint[1] *= imageHeight;
}
// final intKeypoints =
// allKeypointsCopy.map((e) => e.map((e) => e.toInt()).toList()).toList();
return FaceDetectionAbsolute(
score: scoreCopy,
box: boxCopy,
allKeypoints: allKeypointsCopy,
);
}
String toFaceID({required int fileID}) {
// Assert that the values are within the expected range
assert(
(xMinBox >= 0 && xMinBox <= 1) &&
(yMinBox >= 0 && yMinBox <= 1) &&
(xMaxBox >= 0 && xMaxBox <= 1) &&
(yMaxBox >= 0 && yMaxBox <= 1),
"Bounding box values must be in the range [0, 1]",
);
// Extract bounding box values
final String xMin =
xMinBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2);
final String yMin =
yMinBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2);
final String xMax =
xMaxBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2);
final String yMax =
yMaxBox.clamp(0.0, 0.999999).toStringAsFixed(5).substring(2);
// Convert the bounding box values to string and concatenate
final String rawID = "${xMin}_${yMin}_${xMax}_$yMax";
final faceID = fileID.toString() + '_' + rawID.toString();
// Return the hexadecimal representation of the hash
return faceID;
}
/// This method is used to generate a faceID for a face detection that was manually added by the user.
static String toFaceIDEmpty({required int fileID}) {
return fileID.toString() + '_0';
}
/// This method is used to check if a faceID corresponds to a manually added face detection and not an actual face detection.
static bool isFaceIDEmpty(String faceID) {
return faceID.split('_')[1] == '0';
}
@override
String toString() {
return 'FaceDetectionRelative( with relative coordinates: \n score: $score \n Box: xMinBox: $xMinBox, yMinBox: $yMinBox, xMaxBox: $xMaxBox, yMaxBox: $yMaxBox, \n Keypoints: leftEye: $leftEye, rightEye: $rightEye, nose: $nose, leftMouth: $leftMouth, rightMouth: $rightMouth \n )';
}
Map<String, dynamic> toJson() {
return {
'score': score,
'box': box,
'allKeypoints': allKeypoints,
};
}
factory FaceDetectionRelative.fromJson(Map<String, dynamic> json) {
return FaceDetectionRelative(
score: (json['score'] as num).toDouble(),
box: List<double>.from(json['box']),
allKeypoints: (json['allKeypoints'] as List)
.map((item) => List<double>.from(item))
.toList(),
);
}
@override
/// The width of the bounding box of the face detection, in relative range [0, 1].
double get width => xMaxBox - xMinBox;
@override
/// The height of the bounding box of the face detection, in relative range [0, 1].
double get height => yMaxBox - yMinBox;
}
/// This class represents a face detection with absolute coordinates in pixels, in the range [0, imageWidth] for the horizontal coordinates and [0, imageHeight] for the vertical coordinates.
/// The pattern for the coordinates is always [x, y], where x is the horizontal coordinate and y is the vertical coordinate.
///
/// The [score] attribute is a double representing the confidence of the face detection.
///
/// The [box] attribute is a list of 4 integers, representing the coordinates of the bounding box of the face detection.
/// The four values of the box in order are: [xMinBox, yMinBox, xMaxBox, yMaxBox].
///
/// The [allKeypoints] attribute is a list of 6 lists of 2 integers, representing the coordinates of the keypoints of the face detection.
/// The six lists of two values in order are: [leftEye, rightEye, nose, mouth, leftEar, rightEar]. Again, all in [x, y] order.
class FaceDetectionAbsolute extends Detection {
final List<double> box;
final List<List<double>> allKeypoints;
double get xMinBox => box[0];
double get yMinBox => box[1];
double get xMaxBox => box[2];
double get yMaxBox => box[3];
List<double> get leftEye => allKeypoints[0];
List<double> get rightEye => allKeypoints[1];
List<double> get nose => allKeypoints[2];
List<double> get leftMouth => allKeypoints[3];
List<double> get rightMouth => allKeypoints[4];
FaceDetectionAbsolute({
required double score,
required this.box,
required this.allKeypoints,
}) : super(score: score);
factory FaceDetectionAbsolute._zero() {
return FaceDetectionAbsolute(
score: 0,
box: <double>[0, 0, 0, 0],
allKeypoints: <List<double>>[
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
],
);
}
FaceDetectionAbsolute.defaultInitialization()
: box = const <double>[0, 0, 0, 0],
allKeypoints = const <List<double>>[
[0, 0],
[0, 0],
[0, 0],
[0, 0],
[0, 0],
],
super.empty();
@override
String toString() {
return 'FaceDetectionAbsolute( with absolute coordinates: \n score: $score \n Box: xMinBox: $xMinBox, yMinBox: $yMinBox, xMaxBox: $xMaxBox, yMaxBox: $yMaxBox, \n Keypoints: leftEye: $leftEye, rightEye: $rightEye, nose: $nose, leftMouth: $leftMouth, rightMouth: $rightMouth \n )';
}
Map<String, dynamic> toJson() {
return {
'score': score,
'box': box,
'allKeypoints': allKeypoints,
};
}
factory FaceDetectionAbsolute.fromJson(Map<String, dynamic> json) {
return FaceDetectionAbsolute(
score: (json['score'] as num).toDouble(),
box: List<double>.from(json['box']),
allKeypoints: (json['allKeypoints'] as List)
.map((item) => List<double>.from(item))
.toList(),
);
}
static FaceDetectionAbsolute empty = FaceDetectionAbsolute._zero();
@override
/// The width of the bounding box of the face detection, in number of pixels, range [0, imageWidth].
double get width => xMaxBox - xMinBox;
@override
/// The height of the bounding box of the face detection, in number of pixels, range [0, imageHeight].
double get height => yMaxBox - yMinBox;
FaceDirection getFaceDirection() {
final double eyeDistanceX = (rightEye[0] - leftEye[0]).abs();
final double eyeDistanceY = (rightEye[1] - leftEye[1]).abs();
final double mouthDistanceY = (rightMouth[1] - leftMouth[1]).abs();
final bool faceIsUpright =
(max(leftEye[1], rightEye[1]) + 0.5 * eyeDistanceY < nose[1]) &&
(nose[1] + 0.5 * mouthDistanceY < min(leftMouth[1], rightMouth[1]));
final bool noseStickingOutLeft = (nose[0] < min(leftEye[0], rightEye[0])) &&
(nose[0] < min(leftMouth[0], rightMouth[0]));
final bool noseStickingOutRight =
(nose[0] > max(leftEye[0], rightEye[0])) &&
(nose[0] > max(leftMouth[0], rightMouth[0]));
final bool noseCloseToLeftEye =
(nose[0] - leftEye[0]).abs() < 0.2 * eyeDistanceX;
final bool noseCloseToRightEye =
(nose[0] - rightEye[0]).abs() < 0.2 * eyeDistanceX;
// if (faceIsUpright && (noseStickingOutLeft || noseCloseToLeftEye)) {
if (noseStickingOutLeft || (faceIsUpright && noseCloseToLeftEye)) {
return FaceDirection.left;
// } else if (faceIsUpright && (noseStickingOutRight || noseCloseToRightEye)) {
} else if (noseStickingOutRight || (faceIsUpright && noseCloseToRightEye)) {
return FaceDirection.right;
}
return FaceDirection.straight;
}
}
List<FaceDetectionAbsolute> relativeToAbsoluteDetections({
required List<FaceDetectionRelative> relativeDetections,
required int imageWidth,
required int imageHeight,
}) {
final numberOfDetections = relativeDetections.length;
final absoluteDetections = List<FaceDetectionAbsolute>.filled(
numberOfDetections,
FaceDetectionAbsolute._zero(),
);
for (var i = 0; i < relativeDetections.length; i++) {
final relativeDetection = relativeDetections[i];
final absoluteDetection = relativeDetection.toAbsolute(
imageWidth: imageWidth,
imageHeight: imageHeight,
);
absoluteDetections[i] = absoluteDetection;
}
return absoluteDetections;
}
/// Returns an enlarged version of the [box] by a factor of [factor].
List<double> getEnlargedRelativeBox(List<double> box, [double factor = 2]) {
final boxCopy = List<double>.from(box, growable: false);
// The four values of the box in order are: [xMinBox, yMinBox, xMaxBox, yMaxBox].
final width = boxCopy[2] - boxCopy[0];
final height = boxCopy[3] - boxCopy[1];
boxCopy[0] -= width * (factor - 1) / 2;
boxCopy[1] -= height * (factor - 1) / 2;
boxCopy[2] += width * (factor - 1) / 2;
boxCopy[3] += height * (factor - 1) / 2;
return boxCopy;
}

View file

@ -0,0 +1,3 @@
class YOLOFaceInterpreterInitializationException implements Exception {}
class YOLOFaceInterpreterRunException implements Exception {}

View file

@ -0,0 +1,788 @@
import "dart:async";
import "dart:developer" as dev show log;
import "dart:io" show File;
import "dart:isolate";
import 'dart:typed_data' show ByteData, Float32List, Uint8List;
import 'dart:ui' as ui show Image;
import "package:computer/computer.dart";
import 'package:flutter/material.dart';
import 'package:logging/logging.dart';
import 'package:onnxruntime/onnxruntime.dart';
import "package:photos/face/model/dimension.dart";
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_exceptions.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/naive_non_max_suppression.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/yolo_filter_extract_detections.dart';
import "package:photos/services/remote_assets_service.dart";
import "package:photos/utils/image_ml_isolate.dart";
import "package:photos/utils/image_ml_util.dart";
import "package:synchronized/synchronized.dart";
enum FaceDetectionOperation { yoloInferenceAndPostProcessing }
/// This class is responsible for running the face detection model (YOLOv5Face) on ONNX runtime, and can be accessed through the singleton instance [FaceDetectionService.instance].
class FaceDetectionService {
static final _logger = Logger('YOLOFaceDetectionService');
final _computer = Computer.shared();
int sessionAddress = 0;
static const String kModelBucketEndpoint = "https://models.ente.io/";
static const String kRemoteBucketModelPath =
"yolov5s_face_640_640_dynamic.onnx";
// static const kRemoteBucketModelPath = "yolov5n_face_640_640.onnx";
static const String modelRemotePath =
kModelBucketEndpoint + kRemoteBucketModelPath;
static const int kInputWidth = 640;
static const int kInputHeight = 640;
static const double kIouThreshold = 0.4;
static const double kMinScoreSigmoidThreshold = 0.7;
static const int kNumKeypoints = 5;
bool isInitialized = false;
// Isolate things
Timer? _inactivityTimer;
final Duration _inactivityDuration = const Duration(seconds: 30);
final _initLock = Lock();
final _computerLock = Lock();
late Isolate _isolate;
late ReceivePort _receivePort = ReceivePort();
late SendPort _mainSendPort;
bool isSpawned = false;
bool isRunning = false;
// singleton pattern
FaceDetectionService._privateConstructor();
/// Use this instance to access the FaceDetection service. Make sure to call `init()` before using it.
/// e.g. `await FaceDetection.instance.init();`
///
/// Then you can use `predict()` to get the bounding boxes of the faces, so `FaceDetection.instance.predict(imageData)`
///
/// config options: yoloV5FaceN //
static final instance = FaceDetectionService._privateConstructor();
factory FaceDetectionService() => instance;
/// Check if the interpreter is initialized, if not initialize it with `loadModel()`
Future<void> init() async {
if (!isInitialized) {
_logger.info('init is called');
final model =
await RemoteAssetsService.instance.getAsset(modelRemotePath);
final startTime = DateTime.now();
// Doing this from main isolate since `rootBundle` cannot be accessed outside it
sessionAddress = await _computer.compute(
_loadModel,
param: {
"modelPath": model.path,
},
);
final endTime = DateTime.now();
_logger.info(
"Face detection model loaded, took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch).toString()}ms",
);
if (sessionAddress != -1) {
isInitialized = true;
}
}
}
Future<void> release() async {
if (isInitialized) {
await _computer
.compute(_releaseModel, param: {'address': sessionAddress});
isInitialized = false;
sessionAddress = 0;
}
}
Future<void> initIsolate() async {
return _initLock.synchronized(() async {
if (isSpawned) return;
_receivePort = ReceivePort();
try {
_isolate = await Isolate.spawn(
_isolateMain,
_receivePort.sendPort,
);
_mainSendPort = await _receivePort.first as SendPort;
isSpawned = true;
_resetInactivityTimer();
} catch (e) {
_logger.severe('Could not spawn isolate', e);
isSpawned = false;
}
});
}
Future<void> ensureSpawnedIsolate() async {
if (!isSpawned) {
await initIsolate();
}
}
/// The main execution function of the isolate.
static void _isolateMain(SendPort mainSendPort) async {
final receivePort = ReceivePort();
mainSendPort.send(receivePort.sendPort);
receivePort.listen((message) async {
final functionIndex = message[0] as int;
final function = FaceDetectionOperation.values[functionIndex];
final args = message[1] as Map<String, dynamic>;
final sendPort = message[2] as SendPort;
try {
switch (function) {
case FaceDetectionOperation.yoloInferenceAndPostProcessing:
final inputImageList = args['inputImageList'] as Float32List;
final inputShape = args['inputShape'] as List<int>;
final newSize = args['newSize'] as Dimensions;
final sessionAddress = args['sessionAddress'] as int;
final timeSentToIsolate = args['timeNow'] as DateTime;
final delaySentToIsolate =
DateTime.now().difference(timeSentToIsolate).inMilliseconds;
final Stopwatch stopwatchPrepare = Stopwatch()..start();
final inputOrt = OrtValueTensor.createTensorWithDataList(
inputImageList,
inputShape,
);
final inputs = {'input': inputOrt};
stopwatchPrepare.stop();
dev.log(
'[YOLOFaceDetectionService] data preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms',
);
stopwatchPrepare.reset();
stopwatchPrepare.start();
final runOptions = OrtRunOptions();
final session = OrtSession.fromAddress(sessionAddress);
stopwatchPrepare.stop();
dev.log(
'[YOLOFaceDetectionService] session preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms',
);
final stopwatchInterpreter = Stopwatch()..start();
late final List<OrtValue?> outputs;
try {
outputs = session.run(runOptions, inputs);
} catch (e, s) {
dev.log(
'[YOLOFaceDetectionService] Error while running inference: $e \n $s',
);
throw YOLOFaceInterpreterRunException();
}
stopwatchInterpreter.stop();
dev.log(
'[YOLOFaceDetectionService] interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms',
);
final relativeDetections =
_yoloPostProcessOutputs(outputs, newSize);
sendPort
.send((relativeDetections, delaySentToIsolate, DateTime.now()));
break;
}
} catch (e, stackTrace) {
sendPort
.send({'error': e.toString(), 'stackTrace': stackTrace.toString()});
}
});
}
/// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result.
Future<dynamic> _runInIsolate(
(FaceDetectionOperation, Map<String, dynamic>) message,
) async {
await ensureSpawnedIsolate();
_resetInactivityTimer();
final completer = Completer<dynamic>();
final answerPort = ReceivePort();
_mainSendPort.send([message.$1.index, message.$2, answerPort.sendPort]);
answerPort.listen((receivedMessage) {
if (receivedMessage is Map && receivedMessage.containsKey('error')) {
// Handle the error
final errorMessage = receivedMessage['error'];
final errorStackTrace = receivedMessage['stackTrace'];
final exception = Exception(errorMessage);
final stackTrace = StackTrace.fromString(errorStackTrace);
completer.completeError(exception, stackTrace);
} else {
completer.complete(receivedMessage);
}
});
return completer.future;
}
/// Resets a timer that kills the isolate after a certain amount of inactivity.
///
/// Should be called after initialization (e.g. inside `init()`) and after every call to isolate (e.g. inside `_runInIsolate()`)
void _resetInactivityTimer() {
_inactivityTimer?.cancel();
_inactivityTimer = Timer(_inactivityDuration, () {
_logger.info(
'Face detection (YOLO ONNX) Isolate has been inactive for ${_inactivityDuration.inSeconds} seconds. Killing isolate.',
);
disposeIsolate();
});
}
/// Disposes the isolate worker.
void disposeIsolate() {
if (!isSpawned) return;
isSpawned = false;
_isolate.kill();
_receivePort.close();
_inactivityTimer?.cancel();
}
/// Detects faces in the given image data.
Future<(List<FaceDetectionRelative>, Dimensions)> predict(
Uint8List imageData,
) async {
assert(isInitialized);
final stopwatch = Stopwatch()..start();
final stopwatchDecoding = Stopwatch()..start();
final (inputImageList, originalSize, newSize) =
await ImageMlIsolate.instance.preprocessImageYoloOnnx(
imageData,
normalize: true,
requiredWidth: kInputWidth,
requiredHeight: kInputHeight,
maintainAspectRatio: true,
quality: FilterQuality.medium,
);
// final input = [inputImageList];
final inputShape = [
1,
3,
kInputHeight,
kInputWidth,
];
final inputOrt = OrtValueTensor.createTensorWithDataList(
inputImageList,
inputShape,
);
final inputs = {'input': inputOrt};
stopwatchDecoding.stop();
_logger.info(
'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms',
);
_logger.info('original size: $originalSize \n new size: $newSize');
// Run inference
final stopwatchInterpreter = Stopwatch()..start();
List<OrtValue?>? outputs;
try {
final runOptions = OrtRunOptions();
final session = OrtSession.fromAddress(sessionAddress);
outputs = session.run(runOptions, inputs);
// inputOrt.release();
// runOptions.release();
} catch (e, s) {
_logger.severe('Error while running inference: $e \n $s');
throw YOLOFaceInterpreterRunException();
}
stopwatchInterpreter.stop();
_logger.info(
'interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms',
);
final relativeDetections = _yoloPostProcessOutputs(outputs, newSize);
stopwatch.stop();
_logger.info(
'predict() face detection executed in ${stopwatch.elapsedMilliseconds}ms',
);
return (relativeDetections, originalSize);
}
/// Detects faces in the given image data.
static Future<(List<FaceDetectionRelative>, Dimensions)> predictSync(
ui.Image image,
ByteData imageByteData,
int sessionAddress,
) async {
assert(sessionAddress != 0 && sessionAddress != -1);
final stopwatch = Stopwatch()..start();
final stopwatchPreprocessing = Stopwatch()..start();
final (inputImageList, originalSize, newSize) =
await preprocessImageToFloat32ChannelsFirst(
image,
imageByteData,
normalization: 1,
requiredWidth: kInputWidth,
requiredHeight: kInputHeight,
maintainAspectRatio: true,
);
// final input = [inputImageList];
final inputShape = [
1,
3,
kInputHeight,
kInputWidth,
];
final inputOrt = OrtValueTensor.createTensorWithDataList(
inputImageList,
inputShape,
);
final inputs = {'input': inputOrt};
stopwatchPreprocessing.stop();
dev.log(
'Face detection image preprocessing is finished, in ${stopwatchPreprocessing.elapsedMilliseconds}ms',
);
_logger.info(
'Image decoding and preprocessing is finished, in ${stopwatchPreprocessing.elapsedMilliseconds}ms',
);
_logger.info('original size: $originalSize \n new size: $newSize');
// Run inference
final stopwatchInterpreter = Stopwatch()..start();
List<OrtValue?>? outputs;
try {
final runOptions = OrtRunOptions();
final session = OrtSession.fromAddress(sessionAddress);
outputs = session.run(runOptions, inputs);
// inputOrt.release();
// runOptions.release();
} catch (e, s) {
_logger.severe('Error while running inference: $e \n $s');
throw YOLOFaceInterpreterRunException();
}
stopwatchInterpreter.stop();
_logger.info(
'interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms',
);
final relativeDetections = _yoloPostProcessOutputs(outputs, newSize);
stopwatch.stop();
_logger.info(
'predict() face detection executed in ${stopwatch.elapsedMilliseconds}ms',
);
return (relativeDetections, originalSize);
}
/// Detects faces in the given image data.
Future<(List<FaceDetectionRelative>, Dimensions)> predictInIsolate(
Uint8List imageData,
) async {
await ensureSpawnedIsolate();
assert(isInitialized);
_logger.info('predictInIsolate() is called');
final stopwatch = Stopwatch()..start();
final stopwatchDecoding = Stopwatch()..start();
final (inputImageList, originalSize, newSize) =
await ImageMlIsolate.instance.preprocessImageYoloOnnx(
imageData,
normalize: true,
requiredWidth: kInputWidth,
requiredHeight: kInputHeight,
maintainAspectRatio: true,
quality: FilterQuality.medium,
);
// final input = [inputImageList];
final inputShape = [
1,
3,
kInputHeight,
kInputWidth,
];
stopwatchDecoding.stop();
_logger.info(
'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms',
);
_logger.info('original size: $originalSize \n new size: $newSize');
final (
List<FaceDetectionRelative> relativeDetections,
delaySentToIsolate,
timeSentToMain
) = await _runInIsolate(
(
FaceDetectionOperation.yoloInferenceAndPostProcessing,
{
'inputImageList': inputImageList,
'inputShape': inputShape,
'newSize': newSize,
'sessionAddress': sessionAddress,
'timeNow': DateTime.now(),
}
),
) as (List<FaceDetectionRelative>, int, DateTime);
final delaySentToMain =
DateTime.now().difference(timeSentToMain).inMilliseconds;
stopwatch.stop();
_logger.info(
'predictInIsolate() face detection executed in ${stopwatch.elapsedMilliseconds}ms, with ${delaySentToIsolate}ms delay sent to isolate, and ${delaySentToMain}ms delay sent to main, for a total of ${delaySentToIsolate + delaySentToMain}ms delay due to isolate',
);
return (relativeDetections, originalSize);
}
Future<(List<FaceDetectionRelative>, Dimensions)> predictInComputer(
String imagePath,
) async {
assert(isInitialized);
_logger.info('predictInComputer() is called');
final stopwatch = Stopwatch()..start();
final stopwatchDecoding = Stopwatch()..start();
final imageData = await File(imagePath).readAsBytes();
final (inputImageList, originalSize, newSize) =
await ImageMlIsolate.instance.preprocessImageYoloOnnx(
imageData,
normalize: true,
requiredWidth: kInputWidth,
requiredHeight: kInputHeight,
maintainAspectRatio: true,
quality: FilterQuality.medium,
);
// final input = [inputImageList];
return await _computerLock.synchronized(() async {
final inputShape = [
1,
3,
kInputHeight,
kInputWidth,
];
stopwatchDecoding.stop();
_logger.info(
'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms',
);
_logger.info('original size: $originalSize \n new size: $newSize');
final (
List<FaceDetectionRelative> relativeDetections,
delaySentToIsolate,
timeSentToMain
) = await _computer.compute(
inferenceAndPostProcess,
param: {
'inputImageList': inputImageList,
'inputShape': inputShape,
'newSize': newSize,
'sessionAddress': sessionAddress,
'timeNow': DateTime.now(),
},
) as (List<FaceDetectionRelative>, int, DateTime);
final delaySentToMain =
DateTime.now().difference(timeSentToMain).inMilliseconds;
stopwatch.stop();
_logger.info(
'predictInIsolate() face detection executed in ${stopwatch.elapsedMilliseconds}ms, with ${delaySentToIsolate}ms delay sent to isolate, and ${delaySentToMain}ms delay sent to main, for a total of ${delaySentToIsolate + delaySentToMain}ms delay due to isolate',
);
return (relativeDetections, originalSize);
});
}
/// Detects faces in the given image data.
/// This method is optimized for batch processing.
///
/// `imageDataList`: The image data to analyze.
///
/// WARNING: Currently this method only returns the detections for the first image in the batch.
/// Change the function to output all detection before actually using it in production.
Future<List<FaceDetectionRelative>> predictBatch(
List<Uint8List> imageDataList,
) async {
assert(isInitialized);
final stopwatch = Stopwatch()..start();
final stopwatchDecoding = Stopwatch()..start();
final List<Float32List> inputImageDataLists = [];
final List<(Dimensions, Dimensions)> originalAndNewSizeList = [];
int concatenatedImageInputsLength = 0;
for (final imageData in imageDataList) {
final (inputImageList, originalSize, newSize) =
await ImageMlIsolate.instance.preprocessImageYoloOnnx(
imageData,
normalize: true,
requiredWidth: kInputWidth,
requiredHeight: kInputHeight,
maintainAspectRatio: true,
quality: FilterQuality.medium,
);
inputImageDataLists.add(inputImageList);
originalAndNewSizeList.add((originalSize, newSize));
concatenatedImageInputsLength += inputImageList.length;
}
final inputImageList = Float32List(concatenatedImageInputsLength);
int offset = 0;
for (int i = 0; i < inputImageDataLists.length; i++) {
final inputImageData = inputImageDataLists[i];
inputImageList.setRange(
offset,
offset + inputImageData.length,
inputImageData,
);
offset += inputImageData.length;
}
// final input = [inputImageList];
final inputShape = [
inputImageDataLists.length,
3,
kInputHeight,
kInputWidth,
];
final inputOrt = OrtValueTensor.createTensorWithDataList(
inputImageList,
inputShape,
);
final inputs = {'input': inputOrt};
stopwatchDecoding.stop();
_logger.info(
'Image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms',
);
// _logger.info('original size: $originalSize \n new size: $newSize');
_logger.info('interpreter.run is called');
// Run inference
final stopwatchInterpreter = Stopwatch()..start();
List<OrtValue?>? outputs;
try {
final runOptions = OrtRunOptions();
final session = OrtSession.fromAddress(sessionAddress);
outputs = session.run(runOptions, inputs);
inputOrt.release();
runOptions.release();
} catch (e, s) {
_logger.severe('Error while running inference: $e \n $s');
throw YOLOFaceInterpreterRunException();
}
stopwatchInterpreter.stop();
_logger.info(
'interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms, or ${stopwatchInterpreter.elapsedMilliseconds / inputImageDataLists.length} ms per image',
);
_logger.info('outputs: $outputs');
const int imageOutputToUse = 0;
// // Get output tensors
final nestedResults =
outputs[0]?.value as List<List<List<double>>>; // [b, 25200, 16]
final selectedResults = nestedResults[imageOutputToUse]; // [25200, 16]
// final rawScores = <double>[];
// for (final result in firstResults) {
// rawScores.add(result[4]);
// }
// final rawScoresCopy = List<double>.from(rawScores);
// rawScoresCopy.sort();
// _logger.info('rawScores minimum: ${rawScoresCopy.first}');
// _logger.info('rawScores maximum: ${rawScoresCopy.last}');
var relativeDetections = yoloOnnxFilterExtractDetections(
kMinScoreSigmoidThreshold,
kInputWidth,
kInputHeight,
results: selectedResults,
);
// Release outputs
for (var element in outputs) {
element?.release();
}
// Account for the fact that the aspect ratio was maintained
for (final faceDetection in relativeDetections) {
faceDetection.correctForMaintainedAspectRatio(
const Dimensions(
width: kInputWidth,
height: kInputHeight,
),
originalAndNewSizeList[imageOutputToUse].$2,
);
}
// Non-maximum suppression to remove duplicate detections
relativeDetections = naiveNonMaxSuppression(
detections: relativeDetections,
iouThreshold: kIouThreshold,
);
if (relativeDetections.isEmpty) {
_logger.info('No face detected');
return <FaceDetectionRelative>[];
}
stopwatch.stop();
_logger.info(
'predict() face detection executed in ${stopwatch.elapsedMilliseconds}ms',
);
return relativeDetections;
}
static List<FaceDetectionRelative> _yoloPostProcessOutputs(
List<OrtValue?>? outputs,
Dimensions newSize,
) {
// // Get output tensors
final nestedResults =
outputs?[0]?.value as List<List<List<double>>>; // [1, 25200, 16]
final firstResults = nestedResults[0]; // [25200, 16]
// final rawScores = <double>[];
// for (final result in firstResults) {
// rawScores.add(result[4]);
// }
// final rawScoresCopy = List<double>.from(rawScores);
// rawScoresCopy.sort();
// _logger.info('rawScores minimum: ${rawScoresCopy.first}');
// _logger.info('rawScores maximum: ${rawScoresCopy.last}');
var relativeDetections = yoloOnnxFilterExtractDetections(
kMinScoreSigmoidThreshold,
kInputWidth,
kInputHeight,
results: firstResults,
);
// Release outputs
// outputs?.forEach((element) {
// element?.release();
// });
// Account for the fact that the aspect ratio was maintained
for (final faceDetection in relativeDetections) {
faceDetection.correctForMaintainedAspectRatio(
const Dimensions(
width: kInputWidth,
height: kInputHeight,
),
newSize,
);
}
// Non-maximum suppression to remove duplicate detections
relativeDetections = naiveNonMaxSuppression(
detections: relativeDetections,
iouThreshold: kIouThreshold,
);
return relativeDetections;
}
/// Initialize the interpreter by loading the model file.
static Future<int> _loadModel(Map args) async {
final sessionOptions = OrtSessionOptions()
..setInterOpNumThreads(1)
..setIntraOpNumThreads(1)
..setSessionGraphOptimizationLevel(GraphOptimizationLevel.ortEnableAll);
try {
// _logger.info('Loading face embedding model');
final session =
OrtSession.fromFile(File(args["modelPath"]), sessionOptions);
// _logger.info('Face embedding model loaded');
return session.address;
} catch (e, _) {
// _logger.severe('Face embedding model not loaded', e, s);
}
return -1;
}
static Future<void> _releaseModel(Map args) async {
final address = args['address'] as int;
if (address == 0) {
return;
}
final session = OrtSession.fromAddress(address);
session.release();
return;
}
static Future<(List<FaceDetectionRelative>, int, DateTime)>
inferenceAndPostProcess(
Map args,
) async {
final inputImageList = args['inputImageList'] as Float32List;
final inputShape = args['inputShape'] as List<int>;
final newSize = args['newSize'] as Dimensions;
final sessionAddress = args['sessionAddress'] as int;
final timeSentToIsolate = args['timeNow'] as DateTime;
final delaySentToIsolate =
DateTime.now().difference(timeSentToIsolate).inMilliseconds;
final Stopwatch stopwatchPrepare = Stopwatch()..start();
final inputOrt = OrtValueTensor.createTensorWithDataList(
inputImageList,
inputShape,
);
final inputs = {'input': inputOrt};
stopwatchPrepare.stop();
dev.log(
'[YOLOFaceDetectionService] data preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms',
);
stopwatchPrepare.reset();
stopwatchPrepare.start();
final runOptions = OrtRunOptions();
final session = OrtSession.fromAddress(sessionAddress);
stopwatchPrepare.stop();
dev.log(
'[YOLOFaceDetectionService] session preparation is finished, in ${stopwatchPrepare.elapsedMilliseconds}ms',
);
final stopwatchInterpreter = Stopwatch()..start();
late final List<OrtValue?> outputs;
try {
outputs = session.run(runOptions, inputs);
} catch (e, s) {
dev.log(
'[YOLOFaceDetectionService] Error while running inference: $e \n $s',
);
throw YOLOFaceInterpreterRunException();
}
stopwatchInterpreter.stop();
dev.log(
'[YOLOFaceDetectionService] interpreter.run is finished, in ${stopwatchInterpreter.elapsedMilliseconds} ms',
);
final relativeDetections = _yoloPostProcessOutputs(outputs, newSize);
return (relativeDetections, delaySentToIsolate, DateTime.now());
}
}

View file

@ -0,0 +1,49 @@
import 'dart:math' as math show max, min;
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
List<FaceDetectionRelative> naiveNonMaxSuppression({
required List<FaceDetectionRelative> detections,
required double iouThreshold,
}) {
// Sort the detections by score, the highest first
detections.sort((a, b) => b.score.compareTo(a.score));
// Loop through the detections and calculate the IOU
for (var i = 0; i < detections.length - 1; i++) {
for (var j = i + 1; j < detections.length; j++) {
final iou = _calculateIOU(detections[i], detections[j]);
if (iou >= iouThreshold) {
detections.removeAt(j);
j--;
}
}
}
return detections;
}
double _calculateIOU(
FaceDetectionRelative detectionA,
FaceDetectionRelative detectionB,
) {
final areaA = detectionA.width * detectionA.height;
final areaB = detectionB.width * detectionB.height;
final intersectionMinX = math.max(detectionA.xMinBox, detectionB.xMinBox);
final intersectionMinY = math.max(detectionA.yMinBox, detectionB.yMinBox);
final intersectionMaxX = math.min(detectionA.xMaxBox, detectionB.xMaxBox);
final intersectionMaxY = math.min(detectionA.yMaxBox, detectionB.yMaxBox);
final intersectionWidth = intersectionMaxX - intersectionMinX;
final intersectionHeight = intersectionMaxY - intersectionMinY;
if (intersectionWidth < 0 || intersectionHeight < 0) {
return 0.0; // If boxes do not overlap, IoU is 0
}
final intersectionArea = intersectionWidth * intersectionHeight;
final unionArea = areaA + areaB - intersectionArea;
return intersectionArea / unionArea;
}

View file

@ -0,0 +1,95 @@
import 'dart:developer' as dev show log;
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
List<FaceDetectionRelative> yoloOnnxFilterExtractDetections(
double minScoreSigmoidThreshold,
int inputWidth,
int inputHeight, {
required List<List<double>> results, // // [25200, 16]
}) {
final outputDetections = <FaceDetectionRelative>[];
final output = <List<double>>[];
// Go through the raw output and check the scores
for (final result in results) {
// Filter out raw detections with low scores
if (result[4] < minScoreSigmoidThreshold) {
continue;
}
// Get the raw detection
final rawDetection = List<double>.from(result);
// Append the processed raw detection to the output
output.add(rawDetection);
}
if (output.isEmpty) {
double maxScore = 0;
for (final result in results) {
if (result[4] > maxScore) {
maxScore = result[4];
}
}
dev.log(
'No face detections found above the minScoreSigmoidThreshold of $minScoreSigmoidThreshold. The max score was $maxScore.',
);
}
for (final List<double> rawDetection in output) {
// Get absolute bounding box coordinates in format [xMin, yMin, xMax, yMax] https://github.com/deepcam-cn/yolov5-face/blob/eb23d18defe4a76cc06449a61cd51004c59d2697/utils/general.py#L216
final xMinAbs = rawDetection[0] - rawDetection[2] / 2;
final yMinAbs = rawDetection[1] - rawDetection[3] / 2;
final xMaxAbs = rawDetection[0] + rawDetection[2] / 2;
final yMaxAbs = rawDetection[1] + rawDetection[3] / 2;
// Get the relative bounding box coordinates in format [xMin, yMin, xMax, yMax]
final box = [
xMinAbs / inputWidth,
yMinAbs / inputHeight,
xMaxAbs / inputWidth,
yMaxAbs / inputHeight,
];
// Get the keypoints coordinates in format [x, y]
final allKeypoints = <List<double>>[
[
rawDetection[5] / inputWidth,
rawDetection[6] / inputHeight,
],
[
rawDetection[7] / inputWidth,
rawDetection[8] / inputHeight,
],
[
rawDetection[9] / inputWidth,
rawDetection[10] / inputHeight,
],
[
rawDetection[11] / inputWidth,
rawDetection[12] / inputHeight,
],
[
rawDetection[13] / inputWidth,
rawDetection[14] / inputHeight,
],
];
// Get the score
final score =
rawDetection[4]; // Or should it be rawDetection[4]*rawDetection[15]?
// Create the relative detection
final detection = FaceDetectionRelative(
score: score,
box: box,
allKeypoints: allKeypoints,
);
// Append the relative detection to the output
outputDetections.add(detection);
}
return outputDetections;
}

View file

@ -0,0 +1,11 @@
class MobileFaceNetInterpreterInitializationException implements Exception {}
class MobileFaceNetImagePreprocessingException implements Exception {}
class MobileFaceNetEmptyInput implements Exception {}
class MobileFaceNetWrongInputSize implements Exception {}
class MobileFaceNetWrongInputRange implements Exception {}
class MobileFaceNetInterpreterRunException implements Exception {}

View file

@ -0,0 +1,249 @@
import "dart:io" show File;
import 'dart:math' as math show max, min, sqrt;
import 'dart:typed_data' show Float32List;
import 'package:computer/computer.dart';
import 'package:logging/logging.dart';
import 'package:onnxruntime/onnxruntime.dart';
import 'package:photos/services/machine_learning/face_ml/face_detection/detection.dart';
import "package:photos/services/remote_assets_service.dart";
import "package:photos/utils/image_ml_isolate.dart";
import "package:synchronized/synchronized.dart";
/// This class is responsible for running the face embedding model (MobileFaceNet) on ONNX runtime, and can be accessed through the singleton instance [FaceEmbeddingService.instance].
class FaceEmbeddingService {
static const kModelBucketEndpoint = "https://models.ente.io/";
static const kRemoteBucketModelPath = "mobilefacenet_opset15.onnx";
static const modelRemotePath = kModelBucketEndpoint + kRemoteBucketModelPath;
static const int kInputSize = 112;
static const int kEmbeddingSize = 192;
static const int kNumChannels = 3;
static const bool kPreWhiten = false;
static final _logger = Logger('FaceEmbeddingOnnx');
bool isInitialized = false;
int sessionAddress = 0;
final _computer = Computer.shared();
final _computerLock = Lock();
// singleton pattern
FaceEmbeddingService._privateConstructor();
/// Use this instance to access the FaceEmbedding service. Make sure to call `init()` before using it.
/// e.g. `await FaceEmbedding.instance.init();`
///
/// Then you can use `predict()` to get the embedding of a face, so `FaceEmbedding.instance.predict(imageData)`
///
/// config options: faceEmbeddingEnte
static final instance = FaceEmbeddingService._privateConstructor();
factory FaceEmbeddingService() => instance;
/// Check if the interpreter is initialized, if not initialize it with `loadModel()`
Future<void> init() async {
if (!isInitialized) {
_logger.info('init is called');
final model =
await RemoteAssetsService.instance.getAsset(modelRemotePath);
final startTime = DateTime.now();
// Doing this from main isolate since `rootBundle` cannot be accessed outside it
sessionAddress = await _computer.compute(
_loadModel,
param: {
"modelPath": model.path,
},
);
final endTime = DateTime.now();
_logger.info(
"Face embedding model loaded, took: ${(endTime.millisecondsSinceEpoch - startTime.millisecondsSinceEpoch).toString()}ms",
);
if (sessionAddress != -1) {
isInitialized = true;
}
}
}
Future<void> release() async {
if (isInitialized) {
await _computer
.compute(_releaseModel, param: {'address': sessionAddress});
isInitialized = false;
sessionAddress = 0;
}
}
static Future<int> _loadModel(Map args) async {
final sessionOptions = OrtSessionOptions()
..setInterOpNumThreads(1)
..setIntraOpNumThreads(1)
..setSessionGraphOptimizationLevel(GraphOptimizationLevel.ortEnableAll);
try {
// _logger.info('Loading face embedding model');
final session =
OrtSession.fromFile(File(args["modelPath"]), sessionOptions);
// _logger.info('Face embedding model loaded');
return session.address;
} catch (e, _) {
// _logger.severe('Face embedding model not loaded', e, s);
}
return -1;
}
static Future<void> _releaseModel(Map args) async {
final address = args['address'] as int;
if (address == 0) {
return;
}
final session = OrtSession.fromAddress(address);
session.release();
return;
}
Future<(List<double>, bool, double)> predictFromImageDataInComputer(
String imagePath,
FaceDetectionRelative face,
) async {
assert(sessionAddress != 0 && sessionAddress != -1 && isInitialized);
try {
final stopwatchDecoding = Stopwatch()..start();
final (inputImageList, _, isBlur, blurValue, _) =
await ImageMlIsolate.instance.preprocessMobileFaceNetOnnx(
imagePath,
[face],
);
stopwatchDecoding.stop();
_logger.info(
'MobileFaceNet image decoding and preprocessing is finished, in ${stopwatchDecoding.elapsedMilliseconds}ms',
);
final stopwatch = Stopwatch()..start();
_logger.info('MobileFaceNet interpreter.run is called');
final embedding = await _computer.compute(
inferFromMap,
param: {
'input': inputImageList,
'address': sessionAddress,
'inputSize': kInputSize,
},
taskName: 'createFaceEmbedding',
) as List<double>;
stopwatch.stop();
_logger.info(
'MobileFaceNet interpreter.run is finished, in ${stopwatch.elapsedMilliseconds}ms',
);
_logger.info(
'MobileFaceNet results (only first few numbers): embedding ${embedding.sublist(0, 5)}',
);
_logger.info(
'Mean of embedding: ${embedding.reduce((a, b) => a + b) / embedding.length}',
);
_logger.info(
'Max of embedding: ${embedding.reduce(math.max)}',
);
_logger.info(
'Min of embedding: ${embedding.reduce(math.min)}',
);
return (embedding, isBlur[0], blurValue[0]);
} catch (e) {
_logger.info('MobileFaceNet Error while running inference: $e');
rethrow;
}
}
Future<List<List<double>>> predictInComputer(Float32List input) async {
assert(sessionAddress != 0 && sessionAddress != -1 && isInitialized);
return await _computerLock.synchronized(() async {
try {
final stopwatch = Stopwatch()..start();
_logger.info('MobileFaceNet interpreter.run is called');
final embeddings = await _computer.compute(
inferFromMap,
param: {
'input': input,
'address': sessionAddress,
'inputSize': kInputSize,
},
taskName: 'createFaceEmbedding',
) as List<List<double>>;
stopwatch.stop();
_logger.info(
'MobileFaceNet interpreter.run is finished, in ${stopwatch.elapsedMilliseconds}ms',
);
return embeddings;
} catch (e) {
_logger.info('MobileFaceNet Error while running inference: $e');
rethrow;
}
});
}
static Future<List<List<double>>> predictSync(
Float32List input,
int sessionAddress,
) async {
assert(sessionAddress != 0 && sessionAddress != -1);
try {
final stopwatch = Stopwatch()..start();
_logger.info('MobileFaceNet interpreter.run is called');
final embeddings = await infer(
input,
sessionAddress,
kInputSize,
);
stopwatch.stop();
_logger.info(
'MobileFaceNet interpreter.run is finished, in ${stopwatch.elapsedMilliseconds}ms',
);
return embeddings;
} catch (e) {
_logger.info('MobileFaceNet Error while running inference: $e');
rethrow;
}
}
static Future<List<List<double>>> inferFromMap(Map args) async {
final inputImageList = args['input'] as Float32List;
final address = args['address'] as int;
final inputSize = args['inputSize'] as int;
return await infer(inputImageList, address, inputSize);
}
static Future<List<List<double>>> infer(
Float32List inputImageList,
int address,
int inputSize,
) async {
final runOptions = OrtRunOptions();
final int numberOfFaces =
inputImageList.length ~/ (inputSize * inputSize * 3);
final inputOrt = OrtValueTensor.createTensorWithDataList(
inputImageList,
[numberOfFaces, inputSize, inputSize, 3],
);
final inputs = {'img_inputs': inputOrt};
final session = OrtSession.fromAddress(address);
final List<OrtValue?> outputs = session.run(runOptions, inputs);
final embeddings = outputs[0]?.value as List<List<double>>;
for (final embedding in embeddings) {
double normalization = 0;
for (int i = 0; i < kEmbeddingSize; i++) {
normalization += embedding[i] * embedding[i];
}
final double sqrtNormalization = math.sqrt(normalization);
for (int i = 0; i < kEmbeddingSize; i++) {
embedding[i] = embedding[i] / sqrtNormalization;
}
}
return embeddings;
}
}

View file

@ -0,0 +1,155 @@
import 'package:logging/logging.dart';
import "package:photos/services/machine_learning/face_ml/face_detection/detection.dart";
import 'package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart';
class BlurDetectionService {
final _logger = Logger('BlurDetectionService');
// singleton pattern
BlurDetectionService._privateConstructor();
static final instance = BlurDetectionService._privateConstructor();
factory BlurDetectionService() => instance;
Future<(bool, double)> predictIsBlurGrayLaplacian(
List<List<int>> grayImage, {
int threshold = kLaplacianHardThreshold,
FaceDirection faceDirection = FaceDirection.straight,
}) async {
final List<List<int>> laplacian =
_applyLaplacian(grayImage, faceDirection: faceDirection);
final double variance = _calculateVariance(laplacian);
_logger.info('Variance: $variance');
return (variance < threshold, variance);
}
double _calculateVariance(List<List<int>> matrix) {
final int numRows = matrix.length;
final int numCols = matrix[0].length;
final int totalElements = numRows * numCols;
// Calculate the mean
double mean = 0;
for (var row in matrix) {
for (var value in row) {
mean += value;
}
}
mean /= totalElements;
// Calculate the variance
double variance = 0;
for (var row in matrix) {
for (var value in row) {
final double diff = value - mean;
variance += diff * diff;
}
}
variance /= totalElements;
return variance;
}
List<List<int>> _padImage(
List<List<int>> image, {
int removeSideColumns = 56,
FaceDirection faceDirection = FaceDirection.straight,
}) {
// Exception is removeSideColumns is not even
if (removeSideColumns % 2 != 0) {
throw Exception('removeSideColumns must be even');
}
final int numRows = image.length;
final int numCols = image[0].length;
final int paddedNumCols = numCols + 2 - removeSideColumns;
final int paddedNumRows = numRows + 2;
// Create a new matrix with extra padding
final List<List<int>> paddedImage = List.generate(
paddedNumRows,
(i) => List.generate(
paddedNumCols,
(j) => 0,
growable: false,
),
growable: false,
);
// Copy original image into the center of the padded image, taking into account the face direction
if (faceDirection == FaceDirection.straight) {
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < (paddedNumCols - 2); j++) {
paddedImage[i + 1][j + 1] =
image[i][j + (removeSideColumns / 2).round()];
}
}
// If the face is facing left, we only take the right side of the face image
} else if (faceDirection == FaceDirection.left) {
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < (paddedNumCols - 2); j++) {
paddedImage[i + 1][j + 1] = image[i][j + removeSideColumns];
}
}
// If the face is facing right, we only take the left side of the face image
} else if (faceDirection == FaceDirection.right) {
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < (paddedNumCols - 2); j++) {
paddedImage[i + 1][j + 1] = image[i][j];
}
}
}
// Reflect padding
// Top and bottom rows
for (int j = 1; j <= (paddedNumCols - 2); j++) {
paddedImage[0][j] = paddedImage[2][j]; // Top row
paddedImage[numRows + 1][j] = paddedImage[numRows - 1][j]; // Bottom row
}
// Left and right columns
for (int i = 0; i < numRows + 2; i++) {
paddedImage[i][0] = paddedImage[i][2]; // Left column
paddedImage[i][paddedNumCols - 1] =
paddedImage[i][paddedNumCols - 3]; // Right column
}
return paddedImage;
}
List<List<int>> _applyLaplacian(
List<List<int>> image, {
FaceDirection faceDirection = FaceDirection.straight,
}) {
final List<List<int>> paddedImage =
_padImage(image, faceDirection: faceDirection);
final int numRows = paddedImage.length - 2;
final int numCols = paddedImage[0].length - 2;
final List<List<int>> outputImage = List.generate(
numRows,
(i) => List.generate(numCols, (j) => 0, growable: false),
growable: false,
);
// Define the Laplacian kernel
final List<List<int>> kernel = [
[0, 1, 0],
[1, -4, 1],
[0, 1, 0],
];
// Apply the kernel to each pixel
for (int i = 0; i < numRows; i++) {
for (int j = 0; j < numCols; j++) {
int sum = 0;
for (int ki = 0; ki < 3; ki++) {
for (int kj = 0; kj < 3; kj++) {
sum += paddedImage[i + ki][j + kj] * kernel[ki][kj];
}
}
// Adjust the output value if necessary (e.g., clipping)
outputImage[i][j] = sum; //.clamp(0, 255);
}
}
return outputImage;
}
}

View file

@ -0,0 +1,20 @@
import 'package:photos/services/machine_learning/face_ml/face_detection/face_detection_service.dart';
/// Blur detection threshold
const kLaplacianHardThreshold = 10;
const kLaplacianSoftThreshold = 50;
const kLaplacianVerySoftThreshold = 200;
/// Default blur value
const kLapacianDefault = 10000.0;
/// The minimum score for a face to be considered a high quality face for clustering and person detection
const kMinimumQualityFaceScore = 0.80;
const kMediumQualityFaceScore = 0.85;
const kHighQualityFaceScore = 0.90;
/// The minimum score for a face to be detected, regardless of quality. Use [kMinimumQualityFaceScore] for high quality faces.
const kMinFaceDetectionScore = FaceDetectionService.kMinScoreSigmoidThreshold;
/// The minimum cluster size for displaying a cluster in the UI
const kMinimumClusterSizeSearchResult = 20;

Some files were not shown because too many files have changed in this diff Show more