controller.dart 35 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033
  1. import 'dart:convert';
  2. import 'dart:math';
  3. import 'dart:typed_data';
  4. import 'dart:ui' as ui;
  5. import 'package:fis_common/index.dart';
  6. import 'package:fis_common/logger/logger.dart';
  7. import 'package:fis_i18n/i18n.dart';
  8. import 'package:fis_jsonrpc/rpc.dart';
  9. import 'package:fis_measure/process/language/measure_language.dart';
  10. import 'package:fis_measure/process/visual/visual.dart';
  11. import 'package:fis_measure/process/workspace/measure_data_controller.dart';
  12. import 'package:fis_measure/process/workspace/rpc_helper.dart';
  13. import 'package:fis_measure/process/workspace/visual_loader.dart';
  14. import 'package:fis_measure/utils/prompt_box.dart';
  15. import 'package:fis_measure/values/unit_desc.dart';
  16. import 'package:fis_measure/view/ai_result_modifier/state.dart';
  17. import 'package:fis_measure/view/mobile_view/widgets/throttle.dart' as utils;
  18. import 'package:fis_ui/utils/sizer/sizer.dart';
  19. import 'package:flutter/gestures.dart';
  20. import 'package:flutter/material.dart';
  21. import 'package:get/get.dart';
  22. import 'package:path_drawing/path_drawing.dart';
  23. import 'package:vid/us/vid_us_image.dart';
  24. import 'package:vid/us/vid_us_unit.dart';
  25. import 'package:http/http.dart' as http;
  26. class AiResultModifierController extends GetxController {
  27. final rpcHelper = Get.find<RPCHelper>();
  28. MeasureDataController get measureData => Get.find<MeasureDataController>();
  29. /// 后台语言包控制器
  30. // final languageService = Get.find<LanguageService>();
  31. final state = AiResultModifierState();
  32. /// 传入参数 [图像code,图像帧下标,图像元数据, 图像编辑过的code]
  33. final String remedicalCode;
  34. final int currFrameIndex;
  35. final VidUsImage currFrame;
  36. /// 初次查询到的完整数据
  37. AIDiagnosisPerImageDTO resultDTO = AIDiagnosisPerImageDTO();
  38. /// 编辑后的完整数据【用于发给后端】
  39. AIDiagnosisPerImageDTO modifiedDataDTO = AIDiagnosisPerImageDTO();
  40. // 用于画布绘制的轮廓点集
  41. List<Offset> _canvasContoursPoints = [];
  42. // 用于画布绘制的关键点集【拖拽模式】
  43. List<Offset> _canvasKeyPoints = [];
  44. // 用于画布绘制的高亮关键点集【拖拽模式】
  45. final List<Offset> _canvasAffectedKeyPoints = [];
  46. // 用于画布绘制的病灶大小横纵比例线段【四个坐标下标】
  47. List<int> _canvasLesionSizePointsIndexes = [];
  48. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  49. final List<int> _canvasPenModeKeyPointIndexes = [];
  50. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  51. final List<Offset> _canvasNewContoursPoints = [];
  52. // 播放器组件的key
  53. final List<Offset> _aiPoints = [];
  54. // 病灶结论列表
  55. List<EnumItemDTO> _diagnosisEnumItems = [];
  56. // 当前横线像素长度
  57. int _horizontalLengthInPixel = 0;
  58. // 当前横线像素长度
  59. int _verticalLengthInPixel = 0;
  60. // 当前AI病灶下标
  61. int currentAiDetectedObjectIndex = 0;
  62. // 播放器区域的key
  63. GlobalKey framePlayerKey = GlobalKey();
  64. // 截图区域的key
  65. GlobalKey captureAreaKey = GlobalKey();
  66. // 画布组件的大小
  67. Size aiCanvasSize = Size.zero;
  68. // 图像的实际大小
  69. Size frameSize = Size.zero;
  70. // 图像的缩放比例
  71. double _scale = 1.0;
  72. // 图像的物理单位像素长度
  73. double _unitsPhysicalPixels = 0.0;
  74. // 图像的物理单位
  75. String _xUnit = '';
  76. // 当前的轮廓点集
  77. List<AIDiagnosisPoint2D> contours = [];
  78. // 当前的病灶大小
  79. AIDiagnosisLesionSize? lesionSize;
  80. // 当前的关键点集
  81. List<DiagnosisKeyPointDTO> keyPoints = [];
  82. // 当前受影响的高亮的关键点下标集合
  83. List<int> affectedKeyPointIndexes = [];
  84. // 当前操作模式
  85. AiResultModifierMode _mode = AiResultModifierMode.drag;
  86. // 当前是否正在绘制新轮廓
  87. bool _isDrawingNewContours = false;
  88. // 拖拽起点
  89. Offset _dragStartPoint = Offset.zero;
  90. // 拖拽开始时的轮廓点集【仅用于发请求】
  91. List<AIDiagnosisPoint2D> contoursOnDragStart = [];
  92. // 拖拽开始时的关键点集【仅用于发请求】
  93. List<DiagnosisKeyPointDTO> keyPointsOnDragStart = [];
  94. /// 测量语言包
  95. final measureLanguage = MeasureLanguage();
  96. AiResultModifierController(
  97. {required this.remedicalCode,
  98. required this.currFrameIndex,
  99. required this.currFrame});
  100. /// 多个ai病灶
  101. List<AIDetectedObject> get aiDetectedObjectList =>
  102. modifiedDataDTO.diagResultsForEachOrgan?.first.detectedObjects ?? [];
  103. /// 当前病灶
  104. AIDetectedObject? get aiDetectedObject => modifiedDataDTO
  105. .diagResultsForEachOrgan
  106. ?.first
  107. .detectedObjects?[currentAiDetectedObjectIndex];
  108. List<Offset> get aiPoints => _aiPoints;
  109. List<Offset> get canvasAffectedKeyPoints => _canvasAffectedKeyPoints;
  110. List<Offset> get canvasContoursPoints => _canvasContoursPoints;
  111. List<Offset> get canvasKeyPoints => _canvasKeyPoints;
  112. List<int> get canvasLesionSizePointsIndexes => _canvasLesionSizePointsIndexes;
  113. List<Offset> get canvasNewContoursPoints => _canvasNewContoursPoints;
  114. List<int> get canvasPenModeKeyPointIndexes => _canvasPenModeKeyPointIndexes;
  115. AiResultModifierMode get currMode => _mode;
  116. List<EnumItemDTO> get diagnosisEnumItems => _diagnosisEnumItems;
  117. /// 当前器官
  118. DiagnosisOrganEnum get diagnosisOrgan =>
  119. modifiedDataDTO.diagResultsForEachOrgan?.first.organ ??
  120. DiagnosisOrganEnum.Null;
  121. /// 获取病灶的水平长度
  122. String get horizontalLength =>
  123. _countLesionLengthWithUnit(_horizontalLengthInPixel);
  124. /// 获取病灶的垂直长度
  125. String get verticalLength =>
  126. _countLesionLengthWithUnit(_verticalLengthInPixel);
  127. /// 病灶横纵比
  128. String get lesionRatio =>
  129. _verticalLengthInPixel / _horizontalLengthInPixel > 1 ||
  130. _verticalLengthInPixel / _horizontalLengthInPixel == 1
  131. ? '> 1'
  132. : '< 1';
  133. /// 切换操作模式
  134. void changeModifierMode(AiResultModifierMode newMode) {
  135. if (_mode == newMode) return;
  136. _mode = newMode;
  137. _canvasAffectedKeyPoints.clear();
  138. update(['ai_result_modifier', 'ai_mode_change_buttons']);
  139. }
  140. /// 切换ai病灶
  141. Future<void> changeAiDetectedObjectIndex(int index) async {
  142. _setNewCurrContoursToModifiedDataDTO(
  143. oldIndex: currentAiDetectedObjectIndex);
  144. currentAiDetectedObjectIndex = index;
  145. await _updateContoursByIndex(index);
  146. update(['ai_result_canvas', 'ai_conclusion_result', 'ai_index_tag']);
  147. }
  148. /// 切换病灶轮廓
  149. Future<void> _updateContoursByIndex(int index) async {
  150. contours = modifiedDataDTO
  151. .diagResultsForEachOrgan![0].detectedObjects![index].contours ??
  152. [];
  153. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  154. .diagResultsForEachOrgan![0].detectedObjects![index].descriptions;
  155. //遍历 descriptions 取出病灶大小
  156. for (AIDiagnosisDescription description in descriptions!) {
  157. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  158. lesionSize =
  159. AIDiagnosisLesionSize.fromJson(jsonDecode(description.value ?? ""));
  160. }
  161. }
  162. keyPoints = await _queryAllKeyPoints();
  163. _canvasAffectedKeyPoints.clear();
  164. _updateCurrContoursPoints();
  165. _updateCurrKeyPoints();
  166. }
  167. /// 更新当前轮廓点集【要在 currentAiDetectedObjectIndex 更新前触发】
  168. void _setNewCurrContoursToModifiedDataDTO({required int oldIndex}) {
  169. List<AIDiagnosisPoint2D> newContours =
  170. _convertCanvasPoints(_canvasContoursPoints);
  171. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  172. .contours = newContours;
  173. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  174. .diagResultsForEachOrgan![0].detectedObjects![oldIndex].descriptions;
  175. //遍历 descriptions 更新病灶大小
  176. for (var i = 0; i < descriptions!.length; i++) {
  177. if (descriptions[i].type == DiagnosisDescriptionEnum.LesionSize) {
  178. descriptions[i].value = jsonEncode(lesionSize);
  179. }
  180. }
  181. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  182. .descriptions = descriptions;
  183. }
  184. /// 获取当前的新病灶大小
  185. AIDiagnosisLesionSize _getNewLesionSize(List<AIDiagnosisPoint2D> p1234,
  186. int newHorizontalLengthInPixel, int newVerticalLengthInPixel) {
  187. return AIDiagnosisLesionSize(
  188. horizontalPoint1: p1234[0],
  189. horizontalPoint2: p1234[1],
  190. verticalPoint1: p1234[2],
  191. verticalPoint2: p1234[3],
  192. horizontalLengthInPixel: newHorizontalLengthInPixel,
  193. verticalLengthInPixel: newVerticalLengthInPixel);
  194. }
  195. /// 上传当前封面图以及压缩后的缩略图
  196. Future<ImageUrls> _getCurrImageUrls() async {
  197. try {
  198. final Uint8List vidImageBytes = currFrame.imageData;
  199. /// 生成缩略图
  200. final Rect offscreenCanvasRect =
  201. Rect.fromLTWH(0, 0, frameSize.width, frameSize.height);
  202. final ui.PictureRecorder recorder = ui.PictureRecorder();
  203. final Canvas offscreenCanvas = Canvas(recorder, offscreenCanvasRect);
  204. offscreenCanvas.drawImage(
  205. await decodeImageFromList(vidImageBytes), Offset.zero, Paint());
  206. _paintAllContours(offscreenCanvas);
  207. final ui.Image orginalFileImage = await recorder
  208. .endRecording()
  209. .toImage(currFrame.width.toInt(), currFrame.height.toInt());
  210. final orginalFileByteData =
  211. await orginalFileImage.toByteData(format: ui.ImageByteFormat.png);
  212. final orginalFileByteDataBuffer =
  213. orginalFileByteData!.buffer.asUint8List();
  214. final String aiFileToken = await rpcHelper.rpc.storage.uploadUint8List(
  215. orginalFileByteDataBuffer,
  216. "ai_modified_orginal_${remedicalCode}_$currFrameIndex.png",
  217. rpcHelper.userToken) ??
  218. '';
  219. print('coverUrl: $aiFileToken');
  220. /// 生成缩略图
  221. final double scale = _calcScale(
  222. srcWidth: currFrame.width.toDouble(),
  223. srcHeight: currFrame.height.toDouble(),
  224. minWidth: 200,
  225. minHeight: 200,
  226. );
  227. final int scaledWidth = currFrame.width ~/ scale;
  228. final int scaledHeight = currFrame.height ~/ scale;
  229. final Rect previewOffscreenCanvasRect =
  230. Rect.fromLTWH(0, 0, scaledWidth.toDouble(), scaledHeight.toDouble());
  231. final ui.PictureRecorder lowLevelRecorder = ui.PictureRecorder();
  232. final Canvas previewOffscreenCanvas =
  233. Canvas(lowLevelRecorder, previewOffscreenCanvasRect);
  234. previewOffscreenCanvas.drawImageRect(orginalFileImage,
  235. offscreenCanvasRect, previewOffscreenCanvasRect, Paint());
  236. final ui.Image previewFileImage = await lowLevelRecorder
  237. .endRecording()
  238. .toImage(scaledWidth, scaledHeight);
  239. final previewFileByteData =
  240. await previewFileImage.toByteData(format: ui.ImageByteFormat.png);
  241. final previewFileByteDataBuffer =
  242. previewFileByteData!.buffer.asUint8List();
  243. final String previewFileUrl = await rpcHelper.rpc.storage.uploadUint8List(
  244. previewFileByteDataBuffer,
  245. "ai_modified_preview_${remedicalCode}_$currFrameIndex.png",
  246. rpcHelper.userToken) ??
  247. '';
  248. print('previewFileUrl: $previewFileUrl');
  249. return ImageUrls(
  250. aiFileToken: aiFileToken, previewFileUrl: previewFileUrl);
  251. } on Exception catch (e) {
  252. logger.e('get screenshot failed', e);
  253. return ImageUrls(aiFileToken: '', previewFileUrl: '', isUploaded: false);
  254. }
  255. }
  256. /// 计算压缩倍率
  257. double _calcScale({
  258. required double srcWidth,
  259. required double srcHeight,
  260. required double minWidth,
  261. required double minHeight,
  262. }) {
  263. var scaleW = srcWidth / minWidth;
  264. var scaleH = srcHeight / minHeight;
  265. var scale = max(1.0, min(scaleW, scaleH));
  266. return scale;
  267. }
  268. /// 在图像上绘制所有轮廓
  269. void _paintAllContours(Canvas canvas) {
  270. for (var i = 0;
  271. i < modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects!.length;
  272. i++) {
  273. contours = modifiedDataDTO
  274. .diagResultsForEachOrgan![0].detectedObjects![i].contours ??
  275. [];
  276. // 设置虚线圆点画笔
  277. final contoursPaint = Paint()
  278. ..color = Colors.green
  279. ..strokeCap = StrokeCap.round
  280. ..strokeWidth = 3.0
  281. ..style = PaintingStyle.stroke;
  282. // 遍历 contoursPoints 绘制轮廓
  283. if (contours.isNotEmpty) {
  284. Path path = Path();
  285. path.moveTo(contours[0].x.toDouble(), contours[0].y.toDouble());
  286. for (int i = 1; i < contours.length; i++) {
  287. path.lineTo(contours[i].x.toDouble(), contours[i].y.toDouble());
  288. }
  289. path.close();
  290. canvas.drawPath(
  291. dashPath(
  292. path,
  293. dashArray: CircularIntervalList<double>([1, 10]),
  294. ),
  295. contoursPaint);
  296. }
  297. AIDiagnosisLesionSize currLesionSize = AIDiagnosisLesionSize();
  298. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  299. .diagResultsForEachOrgan![0].detectedObjects![i].descriptions;
  300. //遍历 descriptions 取出病灶大小
  301. for (AIDiagnosisDescription description in descriptions!) {
  302. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  303. currLesionSize = AIDiagnosisLesionSize.fromJson(
  304. jsonDecode(description.value ?? ""));
  305. }
  306. }
  307. final AIDiagnosisPoint2D p1 = currLesionSize.horizontalPoint1!;
  308. final AIDiagnosisPoint2D p2 = currLesionSize.horizontalPoint2!;
  309. final AIDiagnosisPoint2D p3 = currLesionSize.verticalPoint1!;
  310. final AIDiagnosisPoint2D p4 = currLesionSize.verticalPoint2!;
  311. Path path = Path();
  312. path.moveTo(p1.x.toDouble(), p1.y.toDouble());
  313. path.lineTo(p2.x.toDouble(), p2.y.toDouble());
  314. canvas.drawPath(
  315. dashPath(
  316. path,
  317. dashArray: CircularIntervalList<double>([1, 5]),
  318. ),
  319. contoursPaint);
  320. Path path2 = Path();
  321. path2.moveTo(p3.x.toDouble(), p3.y.toDouble());
  322. path2.lineTo(p4.x.toDouble(), p4.y.toDouble());
  323. canvas.drawPath(
  324. dashPath(
  325. path2,
  326. dashArray: CircularIntervalList<double>([1, 5]),
  327. ),
  328. contoursPaint);
  329. paintX(
  330. canvas,
  331. Offset(p1.x.toDouble(), p1.y.toDouble()),
  332. 6.0,
  333. 3,
  334. Colors.green,
  335. );
  336. paintX(
  337. canvas,
  338. Offset(p2.x.toDouble(), p2.y.toDouble()),
  339. 6.0,
  340. 3,
  341. Colors.green,
  342. );
  343. paintX(
  344. canvas,
  345. Offset(p3.x.toDouble(), p3.y.toDouble()),
  346. 6.0,
  347. 3,
  348. Colors.green,
  349. );
  350. paintX(
  351. canvas,
  352. Offset(p4.x.toDouble(), p4.y.toDouble()),
  353. 6.0,
  354. 3,
  355. Colors.green,
  356. );
  357. }
  358. }
  359. /// 绘制叉叉
  360. void paintX(
  361. Canvas canvas, Offset center, double radius, double width, Color color) {
  362. final paint = Paint()
  363. ..color = color
  364. ..strokeCap = StrokeCap.round
  365. ..strokeWidth = width
  366. ..style = PaintingStyle.stroke;
  367. Path path = Path();
  368. path.moveTo(center.dx - radius, center.dy - radius);
  369. path.lineTo(center.dx + radius, center.dy + radius);
  370. path.moveTo(center.dx + radius, center.dy - radius);
  371. path.lineTo(center.dx - radius, center.dy + radius);
  372. canvas.drawPath(path, paint);
  373. }
  374. /// 获取AI模块的翻译值
  375. String getValuesFromAiLanguage(String code) {
  376. final value = measureLanguage.t('ai', code);
  377. return value;
  378. }
  379. /// 重置AI结果
  380. void resetAIResult() async {
  381. await _initAIResult();
  382. update(['ai_conclusion_result', 'ai_result_sleek_circular_slider']);
  383. }
  384. @override
  385. void onClose() {
  386. super.onClose();
  387. Sizer.ins.removeListener(_onWindowResize);
  388. }
  389. @override
  390. void onInit() async {
  391. super.onInit();
  392. await _getDiagnosisEnumItemsAsync();
  393. _updateModifierInteractiveLayerSize();
  394. _updateImagePhysicalSize();
  395. _initAIResult();
  396. Sizer.ins.addListener(_onWindowResize);
  397. }
  398. /// 窗口大小改变
  399. void _onWindowResize(_) {
  400. update(['ai_result_modifier']);
  401. frameSize = Size(currFrame.width.toDouble(), currFrame.height.toDouble());
  402. WidgetsBinding.instance.addPostFrameCallback((_) {
  403. final RenderBox box =
  404. framePlayerKey.currentContext!.findRenderObject() as RenderBox;
  405. final framePlayerSize = Size(box.size.width, box.size.height);
  406. _scale = min(framePlayerSize.width / frameSize.width,
  407. framePlayerSize.height / frameSize.height);
  408. aiCanvasSize = Size(frameSize.width * _scale, frameSize.height * _scale);
  409. _updateModifierInteractiveLayerSize();
  410. _updateCurrKeyPoints();
  411. _updateCurrContoursPoints();
  412. _updateCurrAffectedKeyPoints();
  413. // 更新交互层尺寸
  414. update(["ai_result_modifier_interactive_layer", "ai_result_canvas"]);
  415. });
  416. }
  417. /// 鼠标拖拽
  418. void onMouseDrag(DragUpdateDetails details) {
  419. switch (_mode) {
  420. case AiResultModifierMode.drag:
  421. utils.throttle(() {
  422. _onDragModeCallDragFunction(details.localPosition);
  423. }, 'onMouseDrag', 100);
  424. break;
  425. case AiResultModifierMode.pen:
  426. _onPenModeCallDragFunction(details.localPosition);
  427. break;
  428. default:
  429. }
  430. }
  431. /// 鼠标拖拽结束
  432. void onMouseDragEnd(DragEndDetails details) async {
  433. switch (_mode) {
  434. case AiResultModifierMode.drag:
  435. break;
  436. case AiResultModifierMode.pen:
  437. if (_isDrawingNewContours) {
  438. _isDrawingNewContours = false;
  439. await _callContourMergeAsync();
  440. _updateCurrContoursPoints();
  441. _updateCurrKeyPoints();
  442. }
  443. _canvasNewContoursPoints.clear();
  444. update(['ai_result_canvas']);
  445. break;
  446. default:
  447. }
  448. }
  449. /// 鼠标拖拽开始【记录起点】
  450. void onMouseDragStart(DragDownDetails details) {
  451. switch (_mode) {
  452. case AiResultModifierMode.drag:
  453. _dragStartPoint = details.localPosition;
  454. contoursOnDragStart = contours;
  455. keyPointsOnDragStart = keyPoints;
  456. break;
  457. case AiResultModifierMode.pen:
  458. if (_canvasPenModeKeyPointIndexes.isNotEmpty) {
  459. _isDrawingNewContours = true;
  460. _dragStartPoint = details.localPosition;
  461. _canvasNewContoursPoints.clear();
  462. _canvasNewContoursPoints
  463. .add(_canvasContoursPoints[_canvasPenModeKeyPointIndexes[0]]);
  464. _canvasNewContoursPoints.add(_dragStartPoint);
  465. }
  466. break;
  467. default:
  468. }
  469. }
  470. /// 鼠标离开区域
  471. void onMouseExit(PointerExitEvent e) async {
  472. // 延迟200ms (因为鼠标位置更新高亮关键点有100ms延迟)
  473. await Future.delayed(const Duration(milliseconds: 200));
  474. _canvasAffectedKeyPoints.clear();
  475. update(['ai_result_canvas']);
  476. }
  477. /// 鼠标悬浮移动
  478. void onMouseHover(PointerHoverEvent e) async {
  479. if (keyPoints.isEmpty) return;
  480. switch (_mode) {
  481. case AiResultModifierMode.drag:
  482. utils.throttle(() {
  483. _onDragModeCallHoverFunction(e.localPosition);
  484. }, 'onMouseHover', 100);
  485. break;
  486. case AiResultModifierMode.pen:
  487. utils.throttle(() {
  488. _onPenModeCallHoverFunction(e.localPosition);
  489. }, 'onMouseHover', 10);
  490. // Offset point = e.localPosition;
  491. break;
  492. default:
  493. }
  494. }
  495. @override
  496. void onReady() {
  497. super.onReady();
  498. _initData();
  499. }
  500. /// 保存AI修改结果
  501. Future<void> saveAIResult({
  502. String? code,
  503. }) async {
  504. PromptBox.toast(i18nBook.realTimeConsultation.uploading.t);
  505. _setNewCurrContoursToModifiedDataDTO(
  506. oldIndex: currentAiDetectedObjectIndex);
  507. try {
  508. final ImageUrls imageUrls = await _getCurrImageUrls();
  509. if (!imageUrls.isUploaded) {
  510. PromptBox.toast(i18nBook.user.saveFailed.t);
  511. return;
  512. }
  513. bool hasRemedicalAISelectedInfoCode = measureData
  514. .measureImageData.remedicalAISelectedInfoCode.isNotNullOrEmpty;
  515. final result =
  516. await rpcHelper.rpc.remedical.saveRemedicalAISelectedInfoAsync(
  517. SaveRemedicalAISelectedInfoRequest(
  518. token: rpcHelper.userToken,
  519. remedicalCode: hasRemedicalAISelectedInfoCode ? null : remedicalCode,
  520. code: hasRemedicalAISelectedInfoCode
  521. ? measureData.measureImageData.remedicalAISelectedInfoCode
  522. : null,
  523. frameIndex: currFrameIndex,
  524. // diagnosisConclusion: diagnosisOrgan,
  525. previewFileToken: imageUrls.previewFileUrl,
  526. aIFileToken: imageUrls.aiFileToken,
  527. diagnosisData: jsonEncode(modifiedDataDTO),
  528. ),
  529. );
  530. if (result) {
  531. PromptBox.toast(
  532. "${i18nBook.user.saveSuccess.t} \r\n ${i18nBook.measure.saveLocation.t + ' > ' + i18nBook.measure.aiImage.t}");
  533. Get.back();
  534. } else {
  535. PromptBox.toast(i18nBook.user.saveFailed.t);
  536. }
  537. } catch (e) {
  538. PromptBox.toast(i18nBook.user.saveFailed.t);
  539. }
  540. }
  541. /// 加载AI结果并调用绘制
  542. Future<void> _initAIResult() async {
  543. try {
  544. if (measureData
  545. .measureImageData.remedicalAISelectedInfoCode.isNotNullOrEmpty) {
  546. resultDTO = AIDiagnosisPerImageDTO.fromJson(
  547. jsonDecode(measureData.aiResults)[0]);
  548. } else {
  549. final result =
  550. await rpcHelper.rpc.remedical.getRemedicalDiagnosisDataAsync(
  551. GetRemedicalDiagnosisDataRequest(
  552. token: rpcHelper.userToken,
  553. remedicalCode: remedicalCode,
  554. frameIndex: currFrameIndex,
  555. ),
  556. );
  557. resultDTO = AIDiagnosisPerImageDTO.fromJson(jsonDecode(result));
  558. }
  559. modifiedDataDTO = resultDTO;
  560. contours = resultDTO.diagResultsForEachOrgan![0]
  561. .detectedObjects![currentAiDetectedObjectIndex].contours ??
  562. [];
  563. List<AIDiagnosisDescription>? descriptions = resultDTO
  564. .diagResultsForEachOrgan![0]
  565. .detectedObjects![currentAiDetectedObjectIndex]
  566. .descriptions;
  567. //遍历 descriptions 取出病灶大小
  568. for (AIDiagnosisDescription description in descriptions!) {
  569. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  570. lesionSize = AIDiagnosisLesionSize.fromJson(
  571. jsonDecode(description.value ?? ""));
  572. }
  573. }
  574. keyPoints = await _queryAllKeyPoints();
  575. _canvasAffectedKeyPoints.clear();
  576. _updateCurrContoursPoints();
  577. _updateCurrKeyPoints();
  578. update(['ai_result_canvas', 'ai_result_panel', 'ai_index_tag']);
  579. } catch (e) {
  580. logger.e('load ai result failed', e);
  581. }
  582. }
  583. /// 更新交互层尺寸
  584. void _updateModifierInteractiveLayerSize() {
  585. frameSize = Size(currFrame.width.toDouble(), currFrame.height.toDouble());
  586. WidgetsBinding.instance.addPostFrameCallback((_) {
  587. final RenderBox box =
  588. framePlayerKey.currentContext!.findRenderObject() as RenderBox;
  589. final framePlayerSize = Size(box.size.width, box.size.height);
  590. _scale = min(framePlayerSize.width / frameSize.width,
  591. framePlayerSize.height / frameSize.height);
  592. aiCanvasSize = Size(frameSize.width * _scale, frameSize.height * _scale);
  593. /// 更新交互层尺寸
  594. update(["ai_result_modifier_interactive_layer"]);
  595. });
  596. }
  597. /// 计算带单位的病灶长度
  598. String _countLesionLengthWithUnit(int length) {
  599. String lengthStr =
  600. (length * _unitsPhysicalPixels).toStringAsFixed(2).toString();
  601. return "$lengthStr $_xUnit";
  602. }
  603. /// 更新图像物理尺度信息
  604. void _updateImagePhysicalSize() {
  605. List<VisualBase> visuals = VisualsLoader(currFrame.visuals).load();
  606. _unitsPhysicalPixels =
  607. (((visuals[0].visualAreas[0].viewport?.region.width) ?? 0) /
  608. currFrame.width);
  609. VidUsUnit targetUnit =
  610. visuals[0].visualAreas[0].viewport?.xUnit ?? VidUsUnit.cm;
  611. _xUnit = UnitDescriptionMap.getDesc(targetUnit);
  612. }
  613. /// 自动吸附闭合判断
  614. void _autoCloseContours() async {
  615. if (_canvasNewContoursPoints.length < 6) return;
  616. double minDistance = double.infinity;
  617. int nearestKeyPointIndex = -1;
  618. final lastPoint = _canvasNewContoursPoints.last;
  619. for (int i = 0; i < canvasContoursPoints.length; i++) {
  620. final point = canvasContoursPoints[i];
  621. final double distance = (point - lastPoint).distance;
  622. if (distance < minDistance) {
  623. minDistance = distance;
  624. nearestKeyPointIndex = i;
  625. }
  626. }
  627. if (minDistance < 6) {
  628. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  629. _canvasNewContoursPoints.add(canvasContoursPoints[nearestKeyPointIndex]);
  630. _isDrawingNewContours = false;
  631. await _callContourMergeAsync();
  632. _updateCurrContoursPoints();
  633. _updateCurrKeyPoints();
  634. }
  635. }
  636. /// 发送请求通知后端合并轮廓
  637. Future<bool> _callContourMergeAsync() async {
  638. final ContourMergeResult result =
  639. await rpcHelper.rpc.aIDiagnosis.contourMergeAsync(
  640. ContourMergeRequest(
  641. token: rpcHelper.userToken,
  642. contourPoints: contours,
  643. lesionSize: lesionSize,
  644. drawingNewContourPoints: _convertCanvasPoints(_canvasNewContoursPoints),
  645. ),
  646. );
  647. contours = result.dstContours ?? [];
  648. lesionSize = result.dstLesionSize;
  649. keyPoints = await _queryAllKeyPoints();
  650. return true;
  651. }
  652. /// 画布坐标系转换【画布坐标系 -> 接口坐标系】
  653. List<AIDiagnosisPoint2D> _convertCanvasPoints(List<Offset> points) {
  654. List<AIDiagnosisPoint2D> result = [];
  655. for (Offset point in points) {
  656. result.add(
  657. AIDiagnosisPoint2D(x: point.dx ~/ _scale, y: point.dy ~/ _scale));
  658. }
  659. return result;
  660. }
  661. /// 关键点坐标转换【接口坐标系 -> 画布坐标系】同时更新横纵比例线段下标
  662. List<Offset> _convertKeyPoints(List<DiagnosisKeyPointDTO> points) {
  663. List<Offset> result = [];
  664. List<int> pointIndexes = List.generate(4, (_) => 0);
  665. for (int i = 0; i < points.length; i++) {
  666. final point = points[i];
  667. if (point.point == null) continue;
  668. result.add(Offset(point.point!.x.toDouble() * _scale,
  669. point.point!.y.toDouble() * _scale));
  670. if (point.type != DiagnosisKeyPointType.OtherKeyPoints) {
  671. switch (point.type) {
  672. case DiagnosisKeyPointType.HorizontalPointLeft:
  673. pointIndexes[0] = i;
  674. break;
  675. case DiagnosisKeyPointType.HorizontalPointRight:
  676. pointIndexes[1] = i;
  677. break;
  678. case DiagnosisKeyPointType.VerticalPointUp:
  679. pointIndexes[2] = i;
  680. break;
  681. case DiagnosisKeyPointType.VerticalPointDown:
  682. pointIndexes[3] = i;
  683. break;
  684. default:
  685. }
  686. }
  687. }
  688. _canvasLesionSizePointsIndexes = pointIndexes;
  689. _updateLesionSizeAndRatio();
  690. return result;
  691. }
  692. /// 坐标转换【接口坐标系 -> 画布坐标系】
  693. List<Offset> _convertPoints(List<AIDiagnosisPoint2D> points) {
  694. List<Offset> result = [];
  695. for (AIDiagnosisPoint2D point in points) {
  696. result.add(
  697. Offset(point.x.toDouble() * _scale, point.y.toDouble() * _scale));
  698. }
  699. return result;
  700. }
  701. /// 获取ai结果相关枚举集合
  702. Future<void> _getDiagnosisEnumItemsAsync() async {
  703. final getDiagnosisEnumItems =
  704. await rpcHelper.rpc.aIDiagnosis.getDiagnosisEnumItemsAsync(
  705. GetDiagnosisEnumItemsRequest(
  706. token: rpcHelper.userToken,
  707. ),
  708. );
  709. _diagnosisEnumItems = getDiagnosisEnumItems.source ?? [];
  710. }
  711. void _initData() {
  712. update(["ai_result_modifier"]);
  713. }
  714. /// 在拖拽模式下触发拖拽事件【每隔100ms触发一次】
  715. void _onDragModeCallDragFunction(Offset pos) async {
  716. AIDiagnosisPoint2D startPoint = AIDiagnosisPoint2D(
  717. x: _dragStartPoint.dx ~/ _scale, y: _dragStartPoint.dy ~/ _scale);
  718. AIDiagnosisPoint2D endPoint =
  719. AIDiagnosisPoint2D(x: pos.dx ~/ _scale, y: pos.dy ~/ _scale);
  720. final bool success = await _queryDragResult(startPoint, endPoint);
  721. if (success) {
  722. _updateCurrKeyPoints();
  723. _updateCurrContoursPoints();
  724. _updateCurrAffectedKeyPoints();
  725. update(["ai_result_canvas"]);
  726. }
  727. }
  728. /// 在拖拽模式下,通过鼠标位置更新高亮的关键点下标【每隔100ms触发一次】
  729. void _onDragModeCallHoverFunction(Offset localPosition) async {
  730. final mousePos = AIDiagnosisPoint2D(
  731. x: localPosition.dx ~/ _scale, y: localPosition.dy ~/ _scale);
  732. affectedKeyPointIndexes = await _queryAffectedKeyPoints(mousePos);
  733. _updateCurrAffectedKeyPoints();
  734. update(["ai_result_canvas"]);
  735. }
  736. /// 在画轮廓模式下触发拖拽事件
  737. void _onPenModeCallDragFunction(Offset pos) async {
  738. if (!_isDrawingNewContours) return;
  739. // 点间距【疏密程度】
  740. const double pointDistance = 8;
  741. final double distance = (pos - _canvasNewContoursPoints.last).distance;
  742. if (distance >= pointDistance) {
  743. int numPointsToInsert = (distance / pointDistance).ceil() - 1; // 需要插入的点数
  744. for (int i = 0; i < numPointsToInsert; i++) {
  745. double t = (i + 1) / (numPointsToInsert + 1);
  746. Offset interpolatedPoint = Offset(
  747. _canvasNewContoursPoints.last.dx +
  748. t * (pos.dx - _canvasNewContoursPoints.last.dx),
  749. _canvasNewContoursPoints.last.dy +
  750. t * (pos.dy - _canvasNewContoursPoints.last.dy),
  751. );
  752. _canvasNewContoursPoints.add(interpolatedPoint);
  753. }
  754. _canvasNewContoursPoints.add(pos);
  755. update(["ai_result_canvas"]);
  756. }
  757. _autoCloseContours();
  758. }
  759. /// 在画轮廓模式下,通过鼠标位置更新最近的关键点【每隔10ms触发一次】
  760. void _onPenModeCallHoverFunction(Offset localPosition) async {
  761. double minDistance = double.infinity;
  762. int nearestKeyPointIndex = -1;
  763. for (int i = 0; i < canvasContoursPoints.length; i++) {
  764. final point = canvasContoursPoints[i];
  765. final double distance = (point - localPosition).distance;
  766. if (distance < minDistance) {
  767. minDistance = distance;
  768. nearestKeyPointIndex = i;
  769. }
  770. }
  771. _canvasPenModeKeyPointIndexes.clear();
  772. if (minDistance < 10) {
  773. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  774. }
  775. update(["ai_result_canvas"]);
  776. }
  777. /// 根据鼠标位置查询受影响的关键点
  778. Future<List<int>> _queryAffectedKeyPoints(AIDiagnosisPoint2D mousePos) async {
  779. try {
  780. final List<int> result =
  781. await rpcHelper.rpc.aIDiagnosis.affectedKeyPointsByDragActionAsync(
  782. AffectedKeyPointsByDragActionRequest(
  783. token: rpcHelper.userToken,
  784. keyPoints: keyPoints,
  785. mousePoint: mousePos,
  786. ),
  787. );
  788. // print(result);
  789. return result;
  790. } catch (e) {
  791. return [];
  792. }
  793. }
  794. /// 查询所有关键点【需要先存好contours和lesionSize】
  795. Future<List<DiagnosisKeyPointDTO>> _queryAllKeyPoints() async {
  796. try {
  797. final List<DiagnosisKeyPointDTO> result =
  798. await rpcHelper.rpc.aIDiagnosis.getKeyPointsOfContourAsync(
  799. GetKeyPointsOfContourRequest(
  800. token: rpcHelper.userToken,
  801. contours: contours,
  802. lesionSize: lesionSize,
  803. ),
  804. );
  805. return result;
  806. } catch (e) {
  807. return [];
  808. }
  809. }
  810. /// 查询拖拽结果集合【需要先存好 contoursOnDragStart 和 keyPointsOnDragStart】
  811. Future<bool> _queryDragResult(
  812. AIDiagnosisPoint2D startPoint, AIDiagnosisPoint2D endPoint) async {
  813. try {
  814. final ContourAndKeyPointsAfterDragResult result =
  815. await rpcHelper.rpc.aIDiagnosis.contourAndKeyPointsAfterDragAsync(
  816. ContourAndKeyPointsAfterDragRequest(
  817. token: rpcHelper.userToken,
  818. contours: contoursOnDragStart,
  819. keyPoints: keyPointsOnDragStart,
  820. startPoint: startPoint,
  821. endPoint: endPoint,
  822. ),
  823. );
  824. keyPoints = result.dstKeyPoints ?? [];
  825. contours = result.dstContours ?? [];
  826. affectedKeyPointIndexes = result.affectedKeyPointIndexes!;
  827. return true;
  828. } catch (e) {
  829. return false;
  830. }
  831. }
  832. // 根据病灶四个点位置横纵比参数
  833. void _updateLesionSizeAndRatio() {
  834. if (_canvasLesionSizePointsIndexes.length != 4) return;
  835. if (keyPoints.length < 4) return;
  836. final pIndexs = _canvasLesionSizePointsIndexes;
  837. final p1 = keyPoints[pIndexs[0]].point!;
  838. final p2 = keyPoints[pIndexs[1]].point!;
  839. final p3 = keyPoints[pIndexs[2]].point!;
  840. final p4 = keyPoints[pIndexs[3]].point!;
  841. /// 计算 p1 到 p2 的像素距离 更新到 _horizontalLengthInPixel
  842. /// 计算 p3 到 p4 的像素距离 更新到 _verticalLengthInPixel
  843. double _horizontalLength = (Offset(p1.x.toDouble(), p1.y.toDouble()) -
  844. Offset(p2.x.toDouble(), p2.y.toDouble()))
  845. .distance;
  846. _horizontalLengthInPixel = _horizontalLength.ceil();
  847. double _verticalLength = (Offset(p3.x.toDouble(), p3.y.toDouble()) -
  848. Offset(p4.x.toDouble(), p4.y.toDouble()))
  849. .distance;
  850. _verticalLengthInPixel = _verticalLength.ceil();
  851. lesionSize = _getNewLesionSize(
  852. [p1, p2, p3, p4], _horizontalLengthInPixel, _verticalLengthInPixel);
  853. // _canvasLesionSizePointsIndexes
  854. update(['ai_result_lesion_size', 'ai_result_lesion_ratio']);
  855. }
  856. /// [⭐ _canvasAffectedKeyPoints ] 根据当前的受影响关键点下标更新受影响关键点集
  857. void _updateCurrAffectedKeyPoints() {
  858. _canvasAffectedKeyPoints.clear();
  859. if (keyPoints.isEmpty) return;
  860. for (int i = 0; i < keyPoints.length; i++) {
  861. if (affectedKeyPointIndexes.contains(i)) {
  862. _canvasAffectedKeyPoints.add(Offset(
  863. keyPoints[i].point!.x.toDouble() * _scale,
  864. keyPoints[i].point!.y.toDouble() * _scale));
  865. }
  866. }
  867. }
  868. /// [⭐ _canvasContoursPoints ] 更新当前轮廓点
  869. void _updateCurrContoursPoints() {
  870. _canvasContoursPoints = _convertPoints(contours);
  871. }
  872. /// [⭐ _canvasKeyPoints ] 更新当前关键点
  873. void _updateCurrKeyPoints() async {
  874. _canvasKeyPoints = _convertKeyPoints(keyPoints);
  875. }
  876. }
  877. enum AiResultModifierMode {
  878. /// 拖拽
  879. drag,
  880. /// 画笔
  881. pen,
  882. /// 截图
  883. screenshot,
  884. }
  885. ///存储服务扩展类
  886. extension StorageServiceExt on StorageService {
  887. ///鉴权 fileName 为空则接口报错,所以此处设置一个默认值
  888. Future<StorageServiceSettingDTO> getAuth({
  889. String? fileName,
  890. bool? isRechristen,
  891. List<DataItemDTO>? urlParams,
  892. List<DataItemDTO>? headerParams,
  893. String? requestMethod,
  894. required String userToken,
  895. }) async {
  896. try {
  897. final result = await getAuthorizationAsync(FileServiceRequest(
  898. token: userToken,
  899. fileName: fileName ?? "dat",
  900. isRechristen: isRechristen ?? true,
  901. urlParams: urlParams,
  902. headerParams: headerParams,
  903. requestMethod: requestMethod,
  904. ));
  905. return result;
  906. } catch (e) {
  907. return StorageServiceSettingDTO();
  908. }
  909. }
  910. ///文件上传(UInt8List)
  911. Future<String?> uploadUint8List(Uint8List buffer, String name, String token,
  912. [bool? isRechristen]) async {
  913. try {
  914. var nameInfos = name.split('.');
  915. final auth = await getAuth(
  916. fileName: nameInfos.last,
  917. isRechristen: isRechristen,
  918. userToken: token,
  919. );
  920. Map<String, String> params = {};
  921. params['Authorization'] = auth.authorization!;
  922. params['ContentType'] = auth.contentType!;
  923. final response = await http
  924. .put(
  925. Uri.parse(auth.storageUrl!),
  926. body: buffer,
  927. headers: params,
  928. )
  929. .timeout(
  930. const Duration(seconds: 30),
  931. );
  932. if (response.statusCode == 200) {
  933. return auth.storageUrl;
  934. }
  935. } catch (e) {
  936. logger.e('StorageServiceExt uploadUint8List ex:$e');
  937. }
  938. return null;
  939. }
  940. }
  941. class ImageUrls {
  942. /// 原始图像地址
  943. String aiFileToken;
  944. /// 缩略图地址
  945. String previewFileUrl;
  946. /// 是否已经上传
  947. bool isUploaded = true;
  948. ImageUrls({
  949. required this.aiFileToken,
  950. required this.previewFileUrl,
  951. this.isUploaded = true,
  952. });
  953. }