|
@@ -8,9 +8,12 @@ import 'package:flutter/services.dart';
|
|
|
import 'package:get/get.dart';
|
|
|
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
|
|
|
import 'package:image_gallery_saver/image_gallery_saver.dart';
|
|
|
+import 'package:vitalapp/architecture/storage/storage.dart';
|
|
|
import 'package:vitalapp/architecture/utils/prompt_box.dart';
|
|
|
import 'package:vitalapp/managers/interfaces/cache.dart';
|
|
|
import 'package:fis_common/logger/logger.dart';
|
|
|
+import 'package:vitalapp/rpc.dart';
|
|
|
+import 'package:vitalapp/store/store.dart';
|
|
|
import 'dart:ui' as ui;
|
|
|
import 'index.dart';
|
|
|
|
|
@@ -181,6 +184,7 @@ class FacialRecognitionController extends GetxController
|
|
|
if (cameraDescription.lensDirection == CameraLensDirection.back) {
|
|
|
await openNewCamera(cameraDescription);
|
|
|
lockCaptureOrientation();
|
|
|
+ state.isUsingFrontCamera = false;
|
|
|
update();
|
|
|
state.isCameraReady = true;
|
|
|
break;
|
|
@@ -197,6 +201,7 @@ class FacialRecognitionController extends GetxController
|
|
|
for (CameraDescription cameraDescription in _cameras) {
|
|
|
if (cameraDescription.lensDirection == CameraLensDirection.front) {
|
|
|
await openNewCamera(cameraDescription);
|
|
|
+ state.isUsingFrontCamera = true;
|
|
|
lockCaptureOrientation();
|
|
|
update();
|
|
|
state.isCameraReady = true;
|
|
@@ -335,7 +340,132 @@ class FacialRecognitionController extends GetxController
|
|
|
|
|
|
/// 发生开始人脸识别事件
|
|
|
void onCaptureFaceButtonPressed() {
|
|
|
- runDetectionTimer();
|
|
|
+ // runDetectionTimer();
|
|
|
+ /// 接口测试
|
|
|
+ rpcTest2();
|
|
|
+ }
|
|
|
+
|
|
|
+ void rpcTest() async {
|
|
|
+ if (kCameraController == null) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ final XFile? file = await takePicture();
|
|
|
+ if (file != null) {
|
|
|
+ faceDetector = FaceDetector(options: FaceDetectorOptions());
|
|
|
+ // faceDetector =
|
|
|
+ // FaceDetector(options: FaceDetectorOptions(enableContours: true));
|
|
|
+ int faceNum =
|
|
|
+ await doDetection(faceDetector, file.path); // max 分辨率下检测用时大约 100ms
|
|
|
+
|
|
|
+ if (faceNum == 0) {
|
|
|
+ PromptBox.toast('请将面部保持在识别框内');
|
|
|
+ return;
|
|
|
+ } else if (faceNum > 1) {
|
|
|
+ PromptBox.toast('请保持只有一张面部在识别范围内');
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ /// TODO 上传图像到云然后传给后端
|
|
|
+ final url = await rpc.storage.upload(file);
|
|
|
+ print('⭐⭐⭐⭐⭐⭐⭐⭐ url: $url');
|
|
|
+ try {
|
|
|
+ PatientBaseDTO result =
|
|
|
+ await rpc.patient.getPatientBaseByFaceImageAsync(
|
|
|
+ GetPatientBaseByFaceImageRequest(
|
|
|
+ token: Store.user.token,
|
|
|
+ image: url,
|
|
|
+ ),
|
|
|
+ );
|
|
|
+ print(result);
|
|
|
+ if (result.faceScanErrorType == FaceScanErrorTypeEnum.Success) {
|
|
|
+ PromptBox.toast('人脸识别成功,身份证号 ${result.cardNo}}');
|
|
|
+ finishFaceDetection(result);
|
|
|
+ } else {
|
|
|
+ // 暂先将 API 错误和 查无此人都归为识别错误
|
|
|
+ PromptBox.toast('人脸识别失败,请重试');
|
|
|
+ }
|
|
|
+ } catch (e) {
|
|
|
+ logger.e("getPatientBaseByFaceImageAsync failed: $e", e);
|
|
|
+ }
|
|
|
+
|
|
|
+ // if (timer.tick == 1 || kFrameImageSize == Size.zero) {
|
|
|
+ // Size imageSize = await getImageSize(file);
|
|
|
+ // kFrameImageSize = imageSize;
|
|
|
+ // }
|
|
|
+ // int kTime = DateTime.now().millisecondsSinceEpoch;
|
|
|
+ // print('⭐⭐⭐⭐⭐⭐⭐⭐ capture time: ${kTime - lastCaptureTime} ms');
|
|
|
+ // lastCaptureTime = kTime;
|
|
|
+
|
|
|
+ // /// 记录用时 ms
|
|
|
+
|
|
|
+ // int endTime = DateTime.now().millisecondsSinceEpoch;
|
|
|
+ // print('⭐⭐⭐⭐⭐⭐⭐⭐ detection time: ${endTime - lastCaptureTime} ms');
|
|
|
+ // update(['face_bounding_box']);
|
|
|
+ // if (timer.tick >= 10) {
|
|
|
+ // finishFaceDetection(); // TODO 接入真实的判断条件
|
|
|
+ // }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ /// 人脸录入测试
|
|
|
+ void rpcTest2() async {
|
|
|
+ if (kCameraController == null) {
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ final XFile? file = await takePicture();
|
|
|
+ if (file != null) {
|
|
|
+ faceDetector = FaceDetector(options: FaceDetectorOptions());
|
|
|
+ // faceDetector =
|
|
|
+ // FaceDetector(options: FaceDetectorOptions(enableContours: true));
|
|
|
+ int faceNum =
|
|
|
+ await doDetection(faceDetector, file.path); // max 分辨率下检测用时大约 100ms
|
|
|
+
|
|
|
+ if (faceNum == 0) {
|
|
|
+ PromptBox.toast('请将面部保持在识别框内');
|
|
|
+ return;
|
|
|
+ } else if (faceNum > 1) {
|
|
|
+ PromptBox.toast('请保持只有一张面部在识别范围内');
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ /// TODO 上传图像到云然后传给后端
|
|
|
+ final url = await rpc.storage.upload(file);
|
|
|
+ print('⭐⭐⭐⭐⭐⭐⭐⭐ url: $url');
|
|
|
+ try {
|
|
|
+ bool result = await rpc.patient.savePatientBaseByFaceImageAsync(
|
|
|
+ SavePatientBaseByFaceImageRequest(
|
|
|
+ cardNo: '320520200006190612',
|
|
|
+ token: Store.user.token,
|
|
|
+ image: url,
|
|
|
+ ),
|
|
|
+ );
|
|
|
+ print(result);
|
|
|
+ if (result) {
|
|
|
+ PromptBox.toast('人脸数据存入成功');
|
|
|
+ } else {
|
|
|
+ PromptBox.toast('人脸数据存入失败');
|
|
|
+ }
|
|
|
+ } catch (e) {
|
|
|
+ logger.e("savePatientBaseByFaceImageAsync failed: $e", e);
|
|
|
+ }
|
|
|
+
|
|
|
+ // if (timer.tick == 1 || kFrameImageSize == Size.zero) {
|
|
|
+ // Size imageSize = await getImageSize(file);
|
|
|
+ // kFrameImageSize = imageSize;
|
|
|
+ // }
|
|
|
+ // int kTime = DateTime.now().millisecondsSinceEpoch;
|
|
|
+ // print('⭐⭐⭐⭐⭐⭐⭐⭐ capture time: ${kTime - lastCaptureTime} ms');
|
|
|
+ // lastCaptureTime = kTime;
|
|
|
+
|
|
|
+ // /// 记录用时 ms
|
|
|
+
|
|
|
+ // int endTime = DateTime.now().millisecondsSinceEpoch;
|
|
|
+ // print('⭐⭐⭐⭐⭐⭐⭐⭐ detection time: ${endTime - lastCaptureTime} ms');
|
|
|
+ // update(['face_bounding_box']);
|
|
|
+ // if (timer.tick >= 10) {
|
|
|
+ // finishFaceDetection(); // TODO 接入真实的判断条件
|
|
|
+ // }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
/// 发生结束录制视频事件
|
|
@@ -501,7 +631,7 @@ class FacialRecognitionController extends GetxController
|
|
|
}
|
|
|
|
|
|
/// 完成人脸识别
|
|
|
- void finishFaceDetection() {
|
|
|
+ void finishFaceDetection(PatientBaseDTO patient) {
|
|
|
final result = FaceRecognitionResult(
|
|
|
success: true,
|
|
|
cardNo: idCardInfo.idCardNumber,
|
|
@@ -512,9 +642,10 @@ class FacialRecognitionController extends GetxController
|
|
|
birthday: DateTime.now(),
|
|
|
address: idCardInfo.idCardAddress,
|
|
|
);
|
|
|
- Get.back<FaceRecognitionResult>(
|
|
|
- result: result,
|
|
|
- );
|
|
|
+ // Get.back<FaceRecognitionResult>(
|
|
|
+ // result: result,
|
|
|
+ // );
|
|
|
+ // TODO 识别成功后的切换逻辑
|
|
|
}
|
|
|
|
|
|
/// WIP
|
|
@@ -524,8 +655,8 @@ class FacialRecognitionController extends GetxController
|
|
|
InputImage inputImage = InputImage.fromFilePath('');
|
|
|
FaceDetector faceDetector = FaceDetector(options: FaceDetectorOptions());
|
|
|
|
|
|
- // 进行一次人脸检测
|
|
|
- Future<void> doDetection(
|
|
|
+ // 进行一次人脸检测 (返回人脸数量)
|
|
|
+ Future<int> doDetection(
|
|
|
FaceDetector faceDetector,
|
|
|
String imagePath,
|
|
|
) async {
|
|
@@ -534,6 +665,7 @@ class FacialRecognitionController extends GetxController
|
|
|
final List<Face> faces = await faceDetector.processImage(inputImage);
|
|
|
kFrameFacesResult = [];
|
|
|
kFrameFacesResult.addAll(faces);
|
|
|
+
|
|
|
// for (Face face in faces) {
|
|
|
// final Rect boundingBox = face.boundingBox;
|
|
|
|
|
@@ -561,6 +693,7 @@ class FacialRecognitionController extends GetxController
|
|
|
// final int? id = face.trackingId;
|
|
|
// }
|
|
|
// }
|
|
|
+ return kFrameFacesResult.length;
|
|
|
}
|
|
|
|
|
|
// bool isDetectionRunning = false;
|
|
@@ -603,7 +736,7 @@ class FacialRecognitionController extends GetxController
|
|
|
print('⭐⭐⭐⭐⭐⭐⭐⭐ detection time: ${endTime - lastCaptureTime} ms');
|
|
|
update(['face_bounding_box']);
|
|
|
if (timer.tick >= 10) {
|
|
|
- finishFaceDetection(); // TODO 接入真实的判断条件
|
|
|
+ // finishFaceDetection(); // TODO 接入真实的判断条件
|
|
|
}
|
|
|
}
|
|
|
},
|