face_debug_section_widget.dart 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348
  1. import "dart:async";
  2. import "package:flutter/foundation.dart";
  3. import 'package:flutter/material.dart';
  4. import "package:logging/logging.dart";
  5. import "package:photos/core/event_bus.dart";
  6. import "package:photos/events/people_changed_event.dart";
  7. import "package:photos/extensions/stop_watch.dart";
  8. import "package:photos/face/db.dart";
  9. import "package:photos/face/model/person.dart";
  10. import "package:photos/services/machine_learning/face_ml/face_filtering/face_filtering_constants.dart";
  11. import 'package:photos/services/machine_learning/face_ml/face_ml_service.dart';
  12. import "package:photos/services/machine_learning/face_ml/person/person_service.dart";
  13. import 'package:photos/theme/ente_theme.dart';
  14. import 'package:photos/ui/components/captioned_text_widget.dart';
  15. import 'package:photos/ui/components/expandable_menu_item_widget.dart';
  16. import 'package:photos/ui/components/menu_item_widget/menu_item_widget.dart';
  17. import 'package:photos/ui/settings/common_settings.dart';
  18. import "package:photos/utils/dialog_util.dart";
  19. import "package:photos/utils/local_settings.dart";
  20. import 'package:photos/utils/toast_util.dart';
  21. class FaceDebugSectionWidget extends StatefulWidget {
  22. const FaceDebugSectionWidget({Key? key}) : super(key: key);
  23. @override
  24. State<FaceDebugSectionWidget> createState() => _FaceDebugSectionWidgetState();
  25. }
  26. class _FaceDebugSectionWidgetState extends State<FaceDebugSectionWidget> {
  27. Timer? _timer;
  28. @override
  29. void initState() {
  30. super.initState();
  31. _timer = Timer.periodic(const Duration(seconds: 5), (timer) {
  32. setState(() {
  33. // Your state update logic here
  34. });
  35. });
  36. }
  37. @override
  38. void dispose() {
  39. _timer?.cancel();
  40. super.dispose();
  41. }
  42. @override
  43. Widget build(BuildContext context) {
  44. return ExpandableMenuItemWidget(
  45. title: "Face Beta",
  46. selectionOptionsWidget: _getSectionOptions(context),
  47. leadingIcon: Icons.bug_report_outlined,
  48. );
  49. }
  50. Widget _getSectionOptions(BuildContext context) {
  51. final Logger _logger = Logger("FaceDebugSectionWidget");
  52. return Column(
  53. children: [
  54. MenuItemWidget(
  55. captionedTextWidget: FutureBuilder<Map<int, int>>(
  56. future: FaceMLDataDB.instance.getIndexedFileIds(),
  57. builder: (context, snapshot) {
  58. if (snapshot.hasData) {
  59. return CaptionedTextWidget(
  60. title: LocalSettings.instance.isFaceIndexingEnabled
  61. ? "Disable indexing (${snapshot.data!.length})"
  62. : "Enable indexing (${snapshot.data!.length})",
  63. );
  64. }
  65. return const SizedBox.shrink();
  66. },
  67. ),
  68. pressedColor: getEnteColorScheme(context).fillFaint,
  69. trailingIcon: Icons.chevron_right_outlined,
  70. trailingIconIsMuted: true,
  71. onTap: () async {
  72. try {
  73. final isEnabled =
  74. await LocalSettings.instance.toggleFaceIndexing();
  75. if (isEnabled) {
  76. FaceMlService.instance.indexAllImages().ignore();
  77. } else {
  78. FaceMlService.instance.pauseIndexing();
  79. }
  80. if (mounted) {
  81. setState(() {});
  82. }
  83. } catch (e, s) {
  84. _logger.warning('indexing failed ', e, s);
  85. await showGenericErrorDialog(context: context, error: e);
  86. }
  87. },
  88. ),
  89. MenuItemWidget(
  90. captionedTextWidget: FutureBuilder<Map<int, int>>(
  91. future: FaceMLDataDB.instance.getIndexedFileIds(),
  92. builder: (context, snapshot) {
  93. if (snapshot.hasData) {
  94. return CaptionedTextWidget(
  95. title: LocalSettings.instance.isFaceIndexingEnabled
  96. ? "Disable indexing (no fetch) (${snapshot.data!.length})"
  97. : "Enable indexing (${snapshot.data!.length})",
  98. );
  99. }
  100. return const SizedBox.shrink();
  101. },
  102. ),
  103. pressedColor: getEnteColorScheme(context).fillFaint,
  104. trailingIcon: Icons.chevron_right_outlined,
  105. trailingIconIsMuted: true,
  106. onTap: () async {
  107. try {
  108. final isEnabled =
  109. await LocalSettings.instance.toggleFaceIndexing();
  110. if (isEnabled) {
  111. FaceMlService.instance
  112. .indexAllImages(withFetching: false)
  113. .ignore();
  114. } else {
  115. FaceMlService.instance.pauseIndexing();
  116. }
  117. if (mounted) {
  118. setState(() {});
  119. }
  120. } catch (e, s) {
  121. _logger.warning('indexing failed ', e, s);
  122. await showGenericErrorDialog(context: context, error: e);
  123. }
  124. },
  125. ),
  126. MenuItemWidget(
  127. captionedTextWidget: FutureBuilder<int>(
  128. future: FaceMLDataDB.instance.getTotalFaceCount(),
  129. builder: (context, snapshot) {
  130. if (snapshot.hasData) {
  131. return CaptionedTextWidget(
  132. title: "${snapshot.data!} high quality faces",
  133. );
  134. }
  135. return const SizedBox.shrink();
  136. },
  137. ),
  138. pressedColor: getEnteColorScheme(context).fillFaint,
  139. trailingIcon: Icons.chevron_right_outlined,
  140. trailingIconIsMuted: true,
  141. onTap: () async {
  142. final faces75 = await FaceMLDataDB.instance
  143. .getTotalFaceCount(minFaceScore: 0.75);
  144. final faces78 = await FaceMLDataDB.instance
  145. .getTotalFaceCount(minFaceScore: kMinHighQualityFaceScore);
  146. final blurryFaceCount =
  147. await FaceMLDataDB.instance.getBlurryFaceCount(15);
  148. showShortToast(context, "$blurryFaceCount blurry faces");
  149. },
  150. ),
  151. // MenuItemWidget(
  152. // captionedTextWidget: const CaptionedTextWidget(
  153. // title: "Analyze file ID 25728869",
  154. // ),
  155. // pressedColor: getEnteColorScheme(context).fillFaint,
  156. // trailingIcon: Icons.chevron_right_outlined,
  157. // trailingIconIsMuted: true,
  158. // onTap: () async {
  159. // try {
  160. // final enteFile = await SearchService.instance.getAllFiles().then(
  161. // (value) => value.firstWhere(
  162. // (element) => element.uploadedFileID == 25728869,
  163. // ),
  164. // );
  165. // _logger.info(
  166. // 'File with ID ${enteFile.uploadedFileID} has name ${enteFile.displayName}',
  167. // );
  168. // FaceMlService.instance.isImageIndexRunning = true;
  169. // final result = await FaceMlService.instance
  170. // .analyzeImageInSingleIsolate(enteFile);
  171. // if (result != null) {
  172. // final resultJson = result.toJsonString();
  173. // _logger.info('result: $resultJson');
  174. // }
  175. // FaceMlService.instance.isImageIndexRunning = false;
  176. // } catch (e, s) {
  177. // _logger.severe('indexing failed ', e, s);
  178. // await showGenericErrorDialog(context: context, error: e);
  179. // } finally {
  180. // FaceMlService.instance.isImageIndexRunning = false;
  181. // }
  182. // },
  183. // ),
  184. MenuItemWidget(
  185. captionedTextWidget: const CaptionedTextWidget(
  186. title: "Run Clustering",
  187. ),
  188. pressedColor: getEnteColorScheme(context).fillFaint,
  189. trailingIcon: Icons.chevron_right_outlined,
  190. trailingIconIsMuted: true,
  191. onTap: () async {
  192. try {
  193. await PersonService.instance.storeRemoteFeedback();
  194. await FaceMlService.instance
  195. .clusterAllImages(clusterInBuckets: true);
  196. Bus.instance.fire(PeopleChangedEvent());
  197. showShortToast(context, "Done");
  198. } catch (e, s) {
  199. _logger.warning('clustering failed ', e, s);
  200. await showGenericErrorDialog(context: context, error: e);
  201. }
  202. },
  203. ),
  204. sectionOptionSpacing,
  205. MenuItemWidget(
  206. captionedTextWidget: const CaptionedTextWidget(
  207. title: "Reset feedback",
  208. ),
  209. pressedColor: getEnteColorScheme(context).fillFaint,
  210. trailingIcon: Icons.chevron_right_outlined,
  211. trailingIconIsMuted: true,
  212. onTap: () async {
  213. await FaceMLDataDB.instance.dropFeedbackTables();
  214. Bus.instance.fire(PeopleChangedEvent());
  215. showShortToast(context, "Done");
  216. },
  217. ),
  218. sectionOptionSpacing,
  219. MenuItemWidget(
  220. captionedTextWidget: const CaptionedTextWidget(
  221. title: "Reset feedback & clusters",
  222. ),
  223. pressedColor: getEnteColorScheme(context).fillFaint,
  224. trailingIcon: Icons.chevron_right_outlined,
  225. trailingIconIsMuted: true,
  226. alwaysShowSuccessState: true,
  227. onTap: () async {
  228. await showChoiceDialog(
  229. context,
  230. title: "Are you sure?",
  231. body:
  232. "You will need to again cluster all the faces. You can drop feedback if you want to return to original cluster labels",
  233. firstButtonLabel: "Yes, confirm",
  234. firstButtonOnTap: () async {
  235. try {
  236. await FaceMLDataDB.instance.resetClusterIDs();
  237. await FaceMLDataDB.instance.dropClustersAndPersonTable();
  238. Bus.instance.fire(PeopleChangedEvent());
  239. showShortToast(context, "Done");
  240. } catch (e, s) {
  241. _logger.warning('reset feedback failed ', e, s);
  242. await showGenericErrorDialog(context: context, error: e);
  243. }
  244. },
  245. );
  246. },
  247. ),
  248. sectionOptionSpacing,
  249. MenuItemWidget(
  250. captionedTextWidget: const CaptionedTextWidget(
  251. title: "Drop People to clusterMapping",
  252. ),
  253. pressedColor: getEnteColorScheme(context).fillFaint,
  254. trailingIcon: Icons.chevron_right_outlined,
  255. trailingIconIsMuted: true,
  256. onTap: () async {
  257. await showChoiceDialog(
  258. context,
  259. title: "Are you sure?",
  260. body:
  261. "This won't delete the people, but will remove the mapping of people to clusters",
  262. firstButtonLabel: "Yes, confirm",
  263. firstButtonOnTap: () async {
  264. try {
  265. final List<PersonEntity> persons =
  266. await PersonService.instance.getPersons();
  267. for (final PersonEntity p in persons) {
  268. await PersonService.instance.deletePerson(p.remoteID);
  269. }
  270. Bus.instance.fire(PeopleChangedEvent());
  271. showShortToast(context, "Done");
  272. } catch (e, s) {
  273. _logger.warning('peopleToPersonMapping remove failed ', e, s);
  274. await showGenericErrorDialog(context: context, error: e);
  275. }
  276. },
  277. );
  278. },
  279. ),
  280. sectionOptionSpacing,
  281. MenuItemWidget(
  282. captionedTextWidget: const CaptionedTextWidget(
  283. title: "Drop embeddings & feedback",
  284. ),
  285. pressedColor: getEnteColorScheme(context).fillFaint,
  286. trailingIcon: Icons.chevron_right_outlined,
  287. trailingIconIsMuted: true,
  288. onTap: () async {
  289. await showChoiceDialog(
  290. context,
  291. title: "Are you sure?",
  292. body:
  293. "You will need to again re-index all the faces. You can drop feedback if you want to label again",
  294. firstButtonLabel: "Yes, confirm",
  295. firstButtonOnTap: () async {
  296. try {
  297. await FaceMLDataDB.instance
  298. .dropClustersAndPersonTable(faces: true);
  299. Bus.instance.fire(PeopleChangedEvent());
  300. showShortToast(context, "Done");
  301. } catch (e, s) {
  302. _logger.warning('drop feedback failed ', e, s);
  303. await showGenericErrorDialog(context: context, error: e);
  304. }
  305. },
  306. );
  307. },
  308. ),
  309. if (kDebugMode) sectionOptionSpacing,
  310. if (kDebugMode)
  311. MenuItemWidget(
  312. captionedTextWidget: FutureBuilder<Map<int, int>>(
  313. future: FaceMLDataDB.instance.getIndexedFileIds(),
  314. builder: (context, snapshot) {
  315. if (snapshot.hasData) {
  316. return CaptionedTextWidget(
  317. title: "Read embeddings for ${snapshot.data!.length} files",
  318. );
  319. }
  320. return const CaptionedTextWidget(
  321. title: "Loading...",
  322. );
  323. },
  324. ),
  325. pressedColor: getEnteColorScheme(context).fillFaint,
  326. trailingIcon: Icons.chevron_right_outlined,
  327. trailingIconIsMuted: true,
  328. onTap: () async {
  329. final EnteWatch watch = EnteWatch("read_embeddings")..start();
  330. final result = await FaceMLDataDB.instance.getFaceEmbeddingMap();
  331. watch.logAndReset('read embeddings ${result.length} ');
  332. showShortToast(
  333. context,
  334. "Read ${result.length} face embeddings in ${watch.elapsed.inSeconds} secs",
  335. );
  336. },
  337. ),
  338. ],
  339. );
  340. }
  341. }