controller.dart 36 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061
  1. import 'dart:convert';
  2. import 'dart:math';
  3. import 'dart:typed_data';
  4. import 'dart:ui' as ui;
  5. import 'package:fis_common/index.dart';
  6. import 'package:fis_common/logger/logger.dart';
  7. import 'package:fis_i18n/i18n.dart';
  8. import 'package:fis_jsonrpc/rpc.dart';
  9. import 'package:fis_measure/interfaces/process/workspace/application.dart';
  10. import 'package:fis_measure/process/language/measure_language.dart';
  11. import 'package:fis_measure/process/visual/visual.dart';
  12. import 'package:fis_measure/process/workspace/measure_data_controller.dart';
  13. import 'package:fis_measure/process/workspace/rpc_helper.dart';
  14. import 'package:fis_measure/process/workspace/visual_loader.dart';
  15. import 'package:fis_measure/utils/prompt_box.dart';
  16. import 'package:fis_measure/values/unit_desc.dart';
  17. import 'package:fis_measure/view/ai_result_modifier/state.dart';
  18. import 'package:fis_measure/view/mobile_view/widgets/throttle.dart' as utils;
  19. import 'package:fis_ui/utils/sizer/sizer.dart';
  20. import 'package:flutter/gestures.dart';
  21. import 'package:flutter/material.dart';
  22. import 'package:get/get.dart';
  23. import 'package:path_drawing/path_drawing.dart';
  24. import 'package:vid/us/vid_us_image.dart';
  25. import 'package:vid/us/vid_us_unit.dart';
  26. import 'package:http/http.dart' as http;
  27. class AiResultModifierController extends GetxController {
  28. final rpcHelper = Get.find<RPCHelper>();
  29. MeasureDataController get measureData => Get.find<MeasureDataController>();
  30. /// 后台语言包控制器
  31. // final languageService = Get.find<LanguageService>();
  32. final state = AiResultModifierState();
  33. /// 传入参数 [图像code,图像帧下标,图像元数据, 图像编辑过的code]
  34. final String remedicalCode;
  35. final int currFrameIndex;
  36. final VidUsImage currFrame;
  37. /// 初次查询到的完整数据
  38. AIDiagnosisPerImageDTO resultDTO = AIDiagnosisPerImageDTO();
  39. /// 编辑后的完整数据【用于发给后端】
  40. AIDiagnosisPerImageDTO modifiedDataDTO = AIDiagnosisPerImageDTO();
  41. // 用于画布绘制的轮廓点集
  42. List<Offset> _canvasContoursPoints = [];
  43. // 用于画布绘制的关键点集【拖拽模式】
  44. List<Offset> _canvasKeyPoints = [];
  45. // 用于画布绘制的高亮关键点集【拖拽模式】
  46. final List<Offset> _canvasAffectedKeyPoints = [];
  47. // 用于画布绘制的病灶大小横纵比例线段【四个坐标下标】
  48. List<int> _canvasLesionSizePointsIndexes = [];
  49. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  50. final List<int> _canvasPenModeKeyPointIndexes = [];
  51. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  52. final List<Offset> _canvasNewContoursPoints = [];
  53. // 播放器组件的key
  54. final List<Offset> _aiPoints = [];
  55. // 病灶结论列表
  56. List<EnumItemDTO> _diagnosisEnumItems = [];
  57. // 当前横线像素长度
  58. int _horizontalLengthInPixel = 0;
  59. // 当前横线像素长度
  60. int _verticalLengthInPixel = 0;
  61. // 当前AI病灶下标
  62. int currentAiDetectedObjectIndex = 0;
  63. // 播放器区域的key
  64. GlobalKey framePlayerKey = GlobalKey();
  65. // 截图区域的key
  66. GlobalKey captureAreaKey = GlobalKey();
  67. // 画布组件的大小
  68. Size aiCanvasSize = Size.zero;
  69. // 图像的实际大小
  70. Size frameSize = Size.zero;
  71. // 图像的缩放比例
  72. double _scale = 1.0;
  73. // 图像的物理单位像素长度
  74. double _unitsPhysicalPixels = 0.0;
  75. // 图像的物理单位
  76. String _xUnit = '';
  77. // 当前的轮廓点集
  78. List<AIDiagnosisPoint2D> contours = [];
  79. // 当前的病灶大小
  80. AIDiagnosisLesionSize? lesionSize;
  81. // 当前的关键点集
  82. List<DiagnosisKeyPointDTO> keyPoints = [];
  83. // 当前受影响的高亮的关键点下标集合
  84. List<int> affectedKeyPointIndexes = [];
  85. // 当前操作模式
  86. AiResultModifierMode _mode = AiResultModifierMode.drag;
  87. // 当前是否正在绘制新轮廓
  88. bool _isDrawingNewContours = false;
  89. // 拖拽起点
  90. Offset _dragStartPoint = Offset.zero;
  91. // 拖拽开始时的轮廓点集【仅用于发请求】
  92. List<AIDiagnosisPoint2D> contoursOnDragStart = [];
  93. // 拖拽开始时的关键点集【仅用于发请求】
  94. List<DiagnosisKeyPointDTO> keyPointsOnDragStart = [];
  95. /// 测量语言包
  96. final measureLanguage = MeasureLanguage();
  97. AiResultModifierController(
  98. {required this.remedicalCode,
  99. required this.currFrameIndex,
  100. required this.currFrame});
  101. late final application = Get.find<IApplication>();
  102. /// 多个ai病灶
  103. List<AIDetectedObject> get aiDetectedObjectList =>
  104. modifiedDataDTO.diagResultsForEachOrgan?.first.detectedObjects ?? [];
  105. /// 当前病灶
  106. AIDetectedObject? get aiDetectedObject => modifiedDataDTO
  107. .diagResultsForEachOrgan
  108. ?.first
  109. .detectedObjects?[currentAiDetectedObjectIndex];
  110. List<Offset> get aiPoints => _aiPoints;
  111. List<Offset> get canvasAffectedKeyPoints => _canvasAffectedKeyPoints;
  112. List<Offset> get canvasContoursPoints => _canvasContoursPoints;
  113. List<Offset> get canvasKeyPoints => _canvasKeyPoints;
  114. List<int> get canvasLesionSizePointsIndexes => _canvasLesionSizePointsIndexes;
  115. List<Offset> get canvasNewContoursPoints => _canvasNewContoursPoints;
  116. List<int> get canvasPenModeKeyPointIndexes => _canvasPenModeKeyPointIndexes;
  117. AiResultModifierMode get currMode => _mode;
  118. List<EnumItemDTO> get diagnosisEnumItems => _diagnosisEnumItems;
  119. /// 当前器官
  120. DiagnosisOrganEnum get diagnosisOrgan =>
  121. modifiedDataDTO.diagResultsForEachOrgan?.first.organ ??
  122. DiagnosisOrganEnum.Null;
  123. /// 获取病灶的水平长度
  124. String get horizontalLength =>
  125. _countLesionLengthWithUnit(_horizontalLengthInPixel);
  126. /// 获取病灶的垂直长度
  127. String get verticalLength =>
  128. _countLesionLengthWithUnit(_verticalLengthInPixel);
  129. /// 病灶横纵比
  130. String get lesionRatio =>
  131. _verticalLengthInPixel / _horizontalLengthInPixel > 1 ||
  132. _verticalLengthInPixel / _horizontalLengthInPixel == 1
  133. ? '> 1'
  134. : '< 1';
  135. /// 切换操作模式
  136. void changeModifierMode(AiResultModifierMode newMode) {
  137. if (_mode == newMode) return;
  138. _mode = newMode;
  139. _canvasAffectedKeyPoints.clear();
  140. update(['ai_result_modifier', 'ai_mode_change_buttons']);
  141. }
  142. /// 切换ai病灶
  143. Future<void> changeAiDetectedObjectIndex(int index) async {
  144. _setNewCurrContoursToModifiedDataDTO(
  145. oldIndex: currentAiDetectedObjectIndex);
  146. currentAiDetectedObjectIndex = index;
  147. await _updateContoursByIndex(index);
  148. update(['ai_result_canvas', 'ai_conclusion_result', 'ai_index_tag']);
  149. }
  150. /// 切换病灶轮廓
  151. Future<void> _updateContoursByIndex(int index) async {
  152. contours = modifiedDataDTO
  153. .diagResultsForEachOrgan![0].detectedObjects![index].contours ??
  154. [];
  155. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  156. .diagResultsForEachOrgan![0].detectedObjects![index].descriptions;
  157. //遍历 descriptions 取出病灶大小
  158. for (AIDiagnosisDescription description in descriptions!) {
  159. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  160. lesionSize =
  161. AIDiagnosisLesionSize.fromJson(jsonDecode(description.value ?? ""));
  162. }
  163. }
  164. keyPoints = await _queryAllKeyPoints();
  165. _canvasAffectedKeyPoints.clear();
  166. _updateCurrContoursPoints();
  167. _updateCurrKeyPoints();
  168. }
  169. /// 更新当前轮廓点集【要在 currentAiDetectedObjectIndex 更新前触发】
  170. void _setNewCurrContoursToModifiedDataDTO({required int oldIndex}) {
  171. List<AIDiagnosisPoint2D> newContours =
  172. _convertCanvasPoints(_canvasContoursPoints);
  173. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  174. .contours = newContours;
  175. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  176. .diagResultsForEachOrgan![0].detectedObjects![oldIndex].descriptions;
  177. //遍历 descriptions 更新病灶大小
  178. for (var i = 0; i < descriptions!.length; i++) {
  179. if (descriptions[i].type == DiagnosisDescriptionEnum.LesionSize) {
  180. descriptions[i].value = jsonEncode(lesionSize);
  181. }
  182. }
  183. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  184. .descriptions = descriptions;
  185. }
  186. /// 获取当前的新病灶大小
  187. AIDiagnosisLesionSize _getNewLesionSize(List<AIDiagnosisPoint2D> p1234,
  188. int newHorizontalLengthInPixel, int newVerticalLengthInPixel) {
  189. return AIDiagnosisLesionSize(
  190. horizontalPoint1: p1234[0],
  191. horizontalPoint2: p1234[1],
  192. verticalPoint1: p1234[2],
  193. verticalPoint2: p1234[3],
  194. horizontalLengthInPixel: newHorizontalLengthInPixel,
  195. verticalLengthInPixel: newVerticalLengthInPixel);
  196. }
  197. /// 上传当前封面图以及压缩后的缩略图
  198. Future<ImageUrls> _getCurrImageUrls() async {
  199. try {
  200. final Uint8List vidImageBytes = currFrame.imageData;
  201. /// 生成缩略图
  202. final Rect offscreenCanvasRect =
  203. Rect.fromLTWH(0, 0, frameSize.width, frameSize.height);
  204. final ui.PictureRecorder recorder = ui.PictureRecorder();
  205. final Canvas offscreenCanvas = Canvas(recorder, offscreenCanvasRect);
  206. offscreenCanvas.drawImage(
  207. await decodeImageFromList(vidImageBytes), Offset.zero, Paint());
  208. _paintAllContours(offscreenCanvas);
  209. final ui.Image orginalFileImage = await recorder
  210. .endRecording()
  211. .toImage(currFrame.width.toInt(), currFrame.height.toInt());
  212. final orginalFileByteData =
  213. await orginalFileImage.toByteData(format: ui.ImageByteFormat.png);
  214. final orginalFileByteDataBuffer =
  215. orginalFileByteData!.buffer.asUint8List();
  216. final String aiFileToken = await rpcHelper.rpc.storage.uploadUint8List(
  217. orginalFileByteDataBuffer,
  218. "ai_modified_orginal_${remedicalCode}_$currFrameIndex.png",
  219. rpcHelper.userToken) ??
  220. '';
  221. print('coverUrl: $aiFileToken');
  222. /// 生成缩略图
  223. final double scale = _calcScale(
  224. srcWidth: currFrame.width.toDouble(),
  225. srcHeight: currFrame.height.toDouble(),
  226. minWidth: 200,
  227. minHeight: 200,
  228. );
  229. final int scaledWidth = currFrame.width ~/ scale;
  230. final int scaledHeight = currFrame.height ~/ scale;
  231. final Rect previewOffscreenCanvasRect =
  232. Rect.fromLTWH(0, 0, scaledWidth.toDouble(), scaledHeight.toDouble());
  233. final ui.PictureRecorder lowLevelRecorder = ui.PictureRecorder();
  234. final Canvas previewOffscreenCanvas =
  235. Canvas(lowLevelRecorder, previewOffscreenCanvasRect);
  236. previewOffscreenCanvas.drawImageRect(orginalFileImage,
  237. offscreenCanvasRect, previewOffscreenCanvasRect, Paint());
  238. final ui.Image previewFileImage = await lowLevelRecorder
  239. .endRecording()
  240. .toImage(scaledWidth, scaledHeight);
  241. final previewFileByteData =
  242. await previewFileImage.toByteData(format: ui.ImageByteFormat.png);
  243. final previewFileByteDataBuffer =
  244. previewFileByteData!.buffer.asUint8List();
  245. final String previewFileUrl = await rpcHelper.rpc.storage.uploadUint8List(
  246. previewFileByteDataBuffer,
  247. "ai_modified_preview_${remedicalCode}_$currFrameIndex.png",
  248. rpcHelper.userToken) ??
  249. '';
  250. print('previewFileUrl: $previewFileUrl');
  251. return ImageUrls(
  252. aiFileToken: aiFileToken, previewFileUrl: previewFileUrl);
  253. } catch (e) {
  254. logger.e('get screenshot failed', e);
  255. return ImageUrls(aiFileToken: '', previewFileUrl: '', isUploaded: false);
  256. }
  257. }
  258. /// 计算压缩倍率
  259. double _calcScale({
  260. required double srcWidth,
  261. required double srcHeight,
  262. required double minWidth,
  263. required double minHeight,
  264. }) {
  265. var scaleW = srcWidth / minWidth;
  266. var scaleH = srcHeight / minHeight;
  267. var scale = max(1.0, min(scaleW, scaleH));
  268. return scale;
  269. }
  270. /// 在图像上绘制所有轮廓
  271. void _paintAllContours(Canvas canvas) {
  272. for (var i = 0;
  273. i < modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects!.length;
  274. i++) {
  275. contours = modifiedDataDTO
  276. .diagResultsForEachOrgan![0].detectedObjects![i].contours ??
  277. [];
  278. // 设置虚线圆点画笔
  279. final contoursPaint = Paint()
  280. ..color = Colors.green
  281. ..strokeCap = StrokeCap.round
  282. ..strokeWidth = 3.0
  283. ..style = PaintingStyle.stroke;
  284. // 遍历 contoursPoints 绘制轮廓
  285. if (contours.isNotEmpty) {
  286. Path path = Path();
  287. path.moveTo(contours[0].x.toDouble(), contours[0].y.toDouble());
  288. for (int i = 1; i < contours.length; i++) {
  289. path.lineTo(contours[i].x.toDouble(), contours[i].y.toDouble());
  290. }
  291. path.close();
  292. canvas.drawPath(
  293. dashPath(
  294. path,
  295. dashArray: CircularIntervalList<double>([1, 10]),
  296. ),
  297. contoursPaint);
  298. }
  299. AIDiagnosisLesionSize currLesionSize = AIDiagnosisLesionSize();
  300. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  301. .diagResultsForEachOrgan![0].detectedObjects![i].descriptions;
  302. //遍历 descriptions 取出病灶大小
  303. //Descriptions 为空属于非正常数据
  304. if (descriptions!.isEmpty) {
  305. continue;
  306. }
  307. for (AIDiagnosisDescription description in descriptions) {
  308. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  309. currLesionSize = AIDiagnosisLesionSize.fromJson(
  310. jsonDecode(description.value ?? ""));
  311. }
  312. }
  313. final AIDiagnosisPoint2D p1 = currLesionSize.horizontalPoint1!;
  314. final AIDiagnosisPoint2D p2 = currLesionSize.horizontalPoint2!;
  315. final AIDiagnosisPoint2D p3 = currLesionSize.verticalPoint1!;
  316. final AIDiagnosisPoint2D p4 = currLesionSize.verticalPoint2!;
  317. Path path = Path();
  318. path.moveTo(p1.x.toDouble(), p1.y.toDouble());
  319. path.lineTo(p2.x.toDouble(), p2.y.toDouble());
  320. canvas.drawPath(
  321. dashPath(
  322. path,
  323. dashArray: CircularIntervalList<double>([1, 5]),
  324. ),
  325. contoursPaint);
  326. Path path2 = Path();
  327. path2.moveTo(p3.x.toDouble(), p3.y.toDouble());
  328. path2.lineTo(p4.x.toDouble(), p4.y.toDouble());
  329. canvas.drawPath(
  330. dashPath(
  331. path2,
  332. dashArray: CircularIntervalList<double>([1, 5]),
  333. ),
  334. contoursPaint);
  335. paintX(
  336. canvas,
  337. Offset(p1.x.toDouble(), p1.y.toDouble()),
  338. 6.0,
  339. 3,
  340. Colors.green,
  341. );
  342. paintX(
  343. canvas,
  344. Offset(p2.x.toDouble(), p2.y.toDouble()),
  345. 6.0,
  346. 3,
  347. Colors.green,
  348. );
  349. paintX(
  350. canvas,
  351. Offset(p3.x.toDouble(), p3.y.toDouble()),
  352. 6.0,
  353. 3,
  354. Colors.green,
  355. );
  356. paintX(
  357. canvas,
  358. Offset(p4.x.toDouble(), p4.y.toDouble()),
  359. 6.0,
  360. 3,
  361. Colors.green,
  362. );
  363. }
  364. }
  365. /// 绘制叉叉
  366. void paintX(
  367. Canvas canvas, Offset center, double radius, double width, Color color) {
  368. final paint = Paint()
  369. ..color = color
  370. ..strokeCap = StrokeCap.round
  371. ..strokeWidth = width
  372. ..style = PaintingStyle.stroke;
  373. Path path = Path();
  374. path.moveTo(center.dx - radius, center.dy - radius);
  375. path.lineTo(center.dx + radius, center.dy + radius);
  376. path.moveTo(center.dx + radius, center.dy - radius);
  377. path.lineTo(center.dx - radius, center.dy + radius);
  378. canvas.drawPath(path, paint);
  379. }
  380. /// 获取AI模块的翻译值
  381. String getValuesFromAiLanguage(String code) {
  382. final value = measureLanguage.t('ai', code);
  383. return value;
  384. }
  385. /// 重置AI结果
  386. void resetAIResult() async {
  387. await _initAIResult();
  388. update(['ai_conclusion_result']);
  389. }
  390. @override
  391. void onClose() {
  392. super.onClose();
  393. Sizer.ins.removeListener(_onWindowResize);
  394. }
  395. @override
  396. void onInit() async {
  397. super.onInit();
  398. await _getDiagnosisEnumItemsAsync();
  399. _updateModifierInteractiveLayerSize();
  400. _updateImagePhysicalSize();
  401. _initAIResult();
  402. Sizer.ins.addListener(_onWindowResize);
  403. }
  404. /// 窗口大小改变
  405. void _onWindowResize(_) {
  406. update(['ai_result_modifier']);
  407. frameSize = Size(currFrame.width.toDouble(), currFrame.height.toDouble());
  408. WidgetsBinding.instance.addPostFrameCallback((_) {
  409. final RenderBox box =
  410. framePlayerKey.currentContext!.findRenderObject() as RenderBox;
  411. final framePlayerSize = Size(box.size.width, box.size.height);
  412. _scale = min(framePlayerSize.width / frameSize.width,
  413. framePlayerSize.height / frameSize.height);
  414. aiCanvasSize = Size(frameSize.width * _scale, frameSize.height * _scale);
  415. _updateModifierInteractiveLayerSize();
  416. _updateCurrKeyPoints();
  417. _updateCurrContoursPoints();
  418. _updateCurrAffectedKeyPoints();
  419. // 更新交互层尺寸
  420. update(["ai_result_modifier_interactive_layer", "ai_result_canvas"]);
  421. });
  422. }
  423. /// 鼠标拖拽
  424. void onMouseDrag(DragUpdateDetails details) {
  425. switch (_mode) {
  426. case AiResultModifierMode.drag:
  427. utils.throttle(() {
  428. _onDragModeCallDragFunction(details.localPosition);
  429. }, 'onMouseDrag', 100);
  430. break;
  431. case AiResultModifierMode.pen:
  432. _onPenModeCallDragFunction(details.localPosition);
  433. break;
  434. default:
  435. }
  436. }
  437. /// 鼠标拖拽结束
  438. void onMouseDragEnd(DragEndDetails details) async {
  439. switch (_mode) {
  440. case AiResultModifierMode.drag:
  441. break;
  442. case AiResultModifierMode.pen:
  443. if (_isDrawingNewContours) {
  444. _isDrawingNewContours = false;
  445. await _callContourMergeAsync();
  446. _updateCurrContoursPoints();
  447. _updateCurrKeyPoints();
  448. }
  449. _canvasNewContoursPoints.clear();
  450. update(['ai_result_canvas']);
  451. break;
  452. default:
  453. }
  454. }
  455. /// 鼠标拖拽开始【记录起点】
  456. void onMouseDragStart(DragDownDetails details) {
  457. switch (_mode) {
  458. case AiResultModifierMode.drag:
  459. _dragStartPoint = details.localPosition;
  460. contoursOnDragStart = contours;
  461. keyPointsOnDragStart = keyPoints;
  462. break;
  463. case AiResultModifierMode.pen:
  464. if (_canvasPenModeKeyPointIndexes.isNotEmpty) {
  465. _isDrawingNewContours = true;
  466. _dragStartPoint = details.localPosition;
  467. _canvasNewContoursPoints.clear();
  468. _canvasNewContoursPoints
  469. .add(_canvasContoursPoints[_canvasPenModeKeyPointIndexes[0]]);
  470. _canvasNewContoursPoints.add(_dragStartPoint);
  471. }
  472. break;
  473. default:
  474. }
  475. }
  476. /// 鼠标离开区域
  477. void onMouseExit(PointerExitEvent e) async {
  478. // 延迟200ms (因为鼠标位置更新高亮关键点有100ms延迟)
  479. await Future.delayed(const Duration(milliseconds: 200));
  480. _canvasAffectedKeyPoints.clear();
  481. update(['ai_result_canvas']);
  482. }
  483. /// 鼠标悬浮移动
  484. void onMouseHover(PointerHoverEvent e) async {
  485. if (keyPoints.isEmpty) return;
  486. switch (_mode) {
  487. case AiResultModifierMode.drag:
  488. utils.throttle(() {
  489. _onDragModeCallHoverFunction(e.localPosition);
  490. }, 'onMouseHover', 100);
  491. break;
  492. case AiResultModifierMode.pen:
  493. utils.throttle(() {
  494. _onPenModeCallHoverFunction(e.localPosition);
  495. }, 'onMouseHover', 10);
  496. // Offset point = e.localPosition;
  497. break;
  498. default:
  499. }
  500. }
  501. @override
  502. void onReady() {
  503. super.onReady();
  504. _initData();
  505. }
  506. /// 保存AI修改结果
  507. Future<void> saveAIResult({
  508. String? code,
  509. }) async {
  510. PromptBox.toast(i18nBook.realTimeConsultation.uploading.t);
  511. _setNewCurrContoursToModifiedDataDTO(
  512. oldIndex: currentAiDetectedObjectIndex);
  513. try {
  514. final ImageUrls imageUrls = await _getCurrImageUrls();
  515. if (!imageUrls.isUploaded) {
  516. logger.e("Url not uploaded");
  517. PromptBox.toast(i18nBook.user.saveFailed.t);
  518. return;
  519. }
  520. //该逻辑是为了判断当前是否是编辑状态
  521. //如果AIEditCode为空,说明当前模式为从源Vid里新建单帧Vid进行AI信息的update操作
  522. //否则,则是在一个编辑过的Vid上面进行AI信息的update操作
  523. bool isAIEditMode = measureData
  524. .measureImageData.remedicalAISelectedInfoCode.isNotNullOrEmpty;
  525. final result =
  526. await rpcHelper.rpc.remedical.saveRemedicalAISelectedInfoAsync(
  527. SaveRemedicalAISelectedInfoRequest(
  528. token: rpcHelper.userToken,
  529. remedicalCode: remedicalCode,
  530. code: isAIEditMode
  531. ? measureData.measureImageData.remedicalAISelectedInfoCode
  532. : null,
  533. frameIndex: currFrameIndex,
  534. // diagnosisConclusion: diagnosisOrgan,
  535. previewFileToken: imageUrls.previewFileUrl,
  536. aIFileToken: imageUrls.aiFileToken,
  537. diagnosisData: jsonEncode(modifiedDataDTO),
  538. ),
  539. );
  540. if (result) {
  541. PromptBox.toast(
  542. "${i18nBook.user.saveSuccess.t} \r\n ${i18nBook.measure.saveLocation.t + ' > ' + i18nBook.measure.aiImage.t}");
  543. Get.back();
  544. } else {
  545. logger.e("Server result is false");
  546. PromptBox.toast(i18nBook.user.saveFailed.t);
  547. }
  548. } catch (e) {
  549. logger.e("Operation failed with exception ${e} ");
  550. PromptBox.toast(i18nBook.user.saveFailed.t);
  551. }
  552. }
  553. /// 加载AI结果并调用绘制
  554. Future<void> _initAIResult() async {
  555. try {
  556. var existAIResult = jsonDecode(measureData.aiResults);
  557. //当aiResult==1时,为具有单帧AI结果图像,无论是编辑过的还是原vid的单帧图像
  558. //均可用此从measureData里读取,不受影响,因此该处无需区分图像源是AI编辑还是普通VID
  559. if (measureData
  560. .measureImageData.remedicalAISelectedInfoCode.isNotNullOrEmpty &&
  561. existAIResult.length == 1) {
  562. resultDTO = AIDiagnosisPerImageDTO.fromJson(existAIResult[0]);
  563. } else {
  564. final result =
  565. await rpcHelper.rpc.remedical.getRemedicalDiagnosisDataAsync(
  566. GetRemedicalDiagnosisDataRequest(
  567. token: rpcHelper.userToken,
  568. remedicalCode: remedicalCode,
  569. frameIndex: currFrameIndex,
  570. ),
  571. );
  572. resultDTO = AIDiagnosisPerImageDTO.fromJson(jsonDecode(result));
  573. }
  574. List<AIDetectedObject> legalObjs = [];
  575. var tempResultDto = resultDTO;
  576. var rawObjs = tempResultDto.diagResultsForEachOrgan![0].detectedObjects!;
  577. for (var detectedObject in rawObjs) {
  578. var isLegalObject = detectedObject.descriptions?.isNotEmpty ?? false;
  579. if (isLegalObject) {
  580. legalObjs.add(detectedObject);
  581. }
  582. }
  583. rawObjs.clear();
  584. rawObjs.addAll(legalObjs);
  585. modifiedDataDTO = tempResultDto;
  586. contours = resultDTO.diagResultsForEachOrgan![0]
  587. .detectedObjects![currentAiDetectedObjectIndex].contours ??
  588. [];
  589. List<AIDiagnosisDescription>? descriptions = resultDTO
  590. .diagResultsForEachOrgan![0]
  591. .detectedObjects![currentAiDetectedObjectIndex]
  592. .descriptions;
  593. //遍历 descriptions 取出病灶大小
  594. for (AIDiagnosisDescription description in descriptions!) {
  595. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  596. lesionSize = AIDiagnosisLesionSize.fromJson(
  597. jsonDecode(description.value ?? ""));
  598. }
  599. }
  600. keyPoints = await _queryAllKeyPoints();
  601. _canvasAffectedKeyPoints.clear();
  602. _updateCurrContoursPoints();
  603. _updateCurrKeyPoints();
  604. update(['ai_result_canvas', 'ai_result_panel', 'ai_index_tag']);
  605. } catch (e) {
  606. logger.e('load ai result failed', e);
  607. }
  608. }
  609. /// 更新交互层尺寸
  610. void _updateModifierInteractiveLayerSize() {
  611. frameSize = Size(currFrame.width.toDouble(), currFrame.height.toDouble());
  612. WidgetsBinding.instance.addPostFrameCallback((_) {
  613. final RenderBox box =
  614. framePlayerKey.currentContext!.findRenderObject() as RenderBox;
  615. final framePlayerSize = Size(box.size.width, box.size.height);
  616. _scale = min(framePlayerSize.width / frameSize.width,
  617. framePlayerSize.height / frameSize.height);
  618. aiCanvasSize = Size(frameSize.width * _scale, frameSize.height * _scale);
  619. /// 更新交互层尺寸
  620. update(["ai_result_modifier_interactive_layer"]);
  621. });
  622. }
  623. /// 计算带单位的病灶长度
  624. String _countLesionLengthWithUnit(int length) {
  625. String lengthStr =
  626. (length * _unitsPhysicalPixels).toStringAsFixed(2).toString();
  627. return "$lengthStr $_xUnit";
  628. }
  629. /// 更新图像物理尺度信息
  630. void _updateImagePhysicalSize() {
  631. _unitsPhysicalPixels =
  632. (application.visuals[0].visualAreas[0].viewport?.region.width)! /
  633. (application.frameData!.width).toDouble();
  634. VidUsUnit targetUnit =
  635. application.visuals[0].visualAreas[0].viewport?.xUnit ?? VidUsUnit.cm;
  636. _xUnit = UnitDescriptionMap.getDesc(targetUnit);
  637. }
  638. /// 自动吸附闭合判断
  639. void _autoCloseContours() async {
  640. if (_canvasNewContoursPoints.length < 6) return;
  641. double minDistance = double.infinity;
  642. int nearestKeyPointIndex = -1;
  643. final lastPoint = _canvasNewContoursPoints.last;
  644. for (int i = 0; i < canvasContoursPoints.length; i++) {
  645. final point = canvasContoursPoints[i];
  646. final double distance = (point - lastPoint).distance;
  647. if (distance < minDistance) {
  648. minDistance = distance;
  649. nearestKeyPointIndex = i;
  650. }
  651. }
  652. if (minDistance < 6) {
  653. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  654. _canvasNewContoursPoints.add(canvasContoursPoints[nearestKeyPointIndex]);
  655. _isDrawingNewContours = false;
  656. await _callContourMergeAsync();
  657. _updateCurrContoursPoints();
  658. _updateCurrKeyPoints();
  659. }
  660. }
  661. /// 发送请求通知后端合并轮廓
  662. Future<bool> _callContourMergeAsync() async {
  663. final ContourMergeResult result =
  664. await rpcHelper.rpc.aIDiagnosis.contourMergeAsync(
  665. ContourMergeRequest(
  666. token: rpcHelper.userToken,
  667. contourPoints: contours,
  668. lesionSize: lesionSize,
  669. drawingNewContourPoints: _convertCanvasPoints(_canvasNewContoursPoints),
  670. ),
  671. );
  672. contours = result.dstContours ?? [];
  673. lesionSize = result.dstLesionSize;
  674. keyPoints = await _queryAllKeyPoints();
  675. return true;
  676. }
  677. /// 画布坐标系转换【画布坐标系 -> 接口坐标系】
  678. List<AIDiagnosisPoint2D> _convertCanvasPoints(List<Offset> points) {
  679. List<AIDiagnosisPoint2D> result = [];
  680. for (Offset point in points) {
  681. result.add(
  682. AIDiagnosisPoint2D(x: point.dx ~/ _scale, y: point.dy ~/ _scale));
  683. }
  684. return result;
  685. }
  686. /// 关键点坐标转换【接口坐标系 -> 画布坐标系】同时更新横纵比例线段下标
  687. List<Offset> _convertKeyPoints(List<DiagnosisKeyPointDTO> points) {
  688. List<Offset> result = [];
  689. List<int> pointIndexes = List.generate(4, (_) => 0);
  690. for (int i = 0; i < points.length; i++) {
  691. final point = points[i];
  692. if (point.point == null) continue;
  693. result.add(Offset(point.point!.x.toDouble() * _scale,
  694. point.point!.y.toDouble() * _scale));
  695. if (point.type != DiagnosisKeyPointType.OtherKeyPoints) {
  696. switch (point.type) {
  697. case DiagnosisKeyPointType.HorizontalPointLeft:
  698. pointIndexes[0] = i;
  699. break;
  700. case DiagnosisKeyPointType.HorizontalPointRight:
  701. pointIndexes[1] = i;
  702. break;
  703. case DiagnosisKeyPointType.VerticalPointUp:
  704. pointIndexes[2] = i;
  705. break;
  706. case DiagnosisKeyPointType.VerticalPointDown:
  707. pointIndexes[3] = i;
  708. break;
  709. default:
  710. }
  711. }
  712. }
  713. _canvasLesionSizePointsIndexes = pointIndexes;
  714. _updateLesionSizeAndRatio();
  715. return result;
  716. }
  717. /// 坐标转换【接口坐标系 -> 画布坐标系】
  718. List<Offset> _convertPoints(List<AIDiagnosisPoint2D> points) {
  719. List<Offset> result = [];
  720. for (AIDiagnosisPoint2D point in points) {
  721. result.add(
  722. Offset(point.x.toDouble() * _scale, point.y.toDouble() * _scale));
  723. }
  724. return result;
  725. }
  726. /// 获取ai结果相关枚举集合
  727. Future<void> _getDiagnosisEnumItemsAsync() async {
  728. final getDiagnosisEnumItems =
  729. await rpcHelper.rpc.aIDiagnosis.getDiagnosisEnumItemsAsync(
  730. GetDiagnosisEnumItemsRequest(
  731. token: rpcHelper.userToken,
  732. ),
  733. );
  734. _diagnosisEnumItems = getDiagnosisEnumItems.source ?? [];
  735. }
  736. void _initData() {
  737. update(["ai_result_modifier"]);
  738. }
  739. /// 在拖拽模式下触发拖拽事件【每隔100ms触发一次】
  740. void _onDragModeCallDragFunction(Offset pos) async {
  741. AIDiagnosisPoint2D startPoint = AIDiagnosisPoint2D(
  742. x: _dragStartPoint.dx ~/ _scale, y: _dragStartPoint.dy ~/ _scale);
  743. AIDiagnosisPoint2D endPoint =
  744. AIDiagnosisPoint2D(x: pos.dx ~/ _scale, y: pos.dy ~/ _scale);
  745. final bool success = await _queryDragResult(startPoint, endPoint);
  746. if (success) {
  747. _updateCurrKeyPoints();
  748. _updateCurrContoursPoints();
  749. _updateCurrAffectedKeyPoints();
  750. update(["ai_result_canvas"]);
  751. }
  752. }
  753. /// 在拖拽模式下,通过鼠标位置更新高亮的关键点下标【每隔100ms触发一次】
  754. void _onDragModeCallHoverFunction(Offset localPosition) async {
  755. final mousePos = AIDiagnosisPoint2D(
  756. x: localPosition.dx ~/ _scale, y: localPosition.dy ~/ _scale);
  757. affectedKeyPointIndexes = await _queryAffectedKeyPoints(mousePos);
  758. _updateCurrAffectedKeyPoints();
  759. update(["ai_result_canvas"]);
  760. }
  761. /// 在画轮廓模式下触发拖拽事件
  762. void _onPenModeCallDragFunction(Offset pos) async {
  763. if (!_isDrawingNewContours) return;
  764. // 点间距【疏密程度】
  765. const double pointDistance = 8;
  766. final double distance = (pos - _canvasNewContoursPoints.last).distance;
  767. if (distance >= pointDistance) {
  768. int numPointsToInsert = (distance / pointDistance).ceil() - 1; // 需要插入的点数
  769. for (int i = 0; i < numPointsToInsert; i++) {
  770. double t = (i + 1) / (numPointsToInsert + 1);
  771. Offset interpolatedPoint = Offset(
  772. _canvasNewContoursPoints.last.dx +
  773. t * (pos.dx - _canvasNewContoursPoints.last.dx),
  774. _canvasNewContoursPoints.last.dy +
  775. t * (pos.dy - _canvasNewContoursPoints.last.dy),
  776. );
  777. _canvasNewContoursPoints.add(interpolatedPoint);
  778. }
  779. _canvasNewContoursPoints.add(pos);
  780. update(["ai_result_canvas"]);
  781. }
  782. _autoCloseContours();
  783. }
  784. /// 在画轮廓模式下,通过鼠标位置更新最近的关键点【每隔10ms触发一次】
  785. void _onPenModeCallHoverFunction(Offset localPosition) async {
  786. double minDistance = double.infinity;
  787. int nearestKeyPointIndex = -1;
  788. for (int i = 0; i < canvasContoursPoints.length; i++) {
  789. final point = canvasContoursPoints[i];
  790. final double distance = (point - localPosition).distance;
  791. if (distance < minDistance) {
  792. minDistance = distance;
  793. nearestKeyPointIndex = i;
  794. }
  795. }
  796. _canvasPenModeKeyPointIndexes.clear();
  797. if (minDistance < 10) {
  798. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  799. }
  800. update(["ai_result_canvas"]);
  801. }
  802. /// 根据鼠标位置查询受影响的关键点
  803. Future<List<int>> _queryAffectedKeyPoints(AIDiagnosisPoint2D mousePos) async {
  804. try {
  805. final List<int> result =
  806. await rpcHelper.rpc.aIDiagnosis.affectedKeyPointsByDragActionAsync(
  807. AffectedKeyPointsByDragActionRequest(
  808. token: rpcHelper.userToken,
  809. keyPoints: keyPoints,
  810. mousePoint: mousePos,
  811. ),
  812. );
  813. // print(result);
  814. return result;
  815. } catch (e) {
  816. return [];
  817. }
  818. }
  819. /// 查询所有关键点【需要先存好contours和lesionSize】
  820. Future<List<DiagnosisKeyPointDTO>> _queryAllKeyPoints() async {
  821. try {
  822. final List<DiagnosisKeyPointDTO> result =
  823. await rpcHelper.rpc.aIDiagnosis.getKeyPointsOfContourAsync(
  824. GetKeyPointsOfContourRequest(
  825. token: rpcHelper.userToken,
  826. contours: contours,
  827. lesionSize: lesionSize,
  828. ),
  829. );
  830. return result;
  831. } catch (e) {
  832. return [];
  833. }
  834. }
  835. /// 查询拖拽结果集合【需要先存好 contoursOnDragStart 和 keyPointsOnDragStart】
  836. Future<bool> _queryDragResult(
  837. AIDiagnosisPoint2D startPoint, AIDiagnosisPoint2D endPoint) async {
  838. try {
  839. final ContourAndKeyPointsAfterDragResult result =
  840. await rpcHelper.rpc.aIDiagnosis.contourAndKeyPointsAfterDragAsync(
  841. ContourAndKeyPointsAfterDragRequest(
  842. token: rpcHelper.userToken,
  843. contours: contoursOnDragStart,
  844. keyPoints: keyPointsOnDragStart,
  845. startPoint: startPoint,
  846. endPoint: endPoint,
  847. ),
  848. );
  849. keyPoints = result.dstKeyPoints ?? [];
  850. contours = result.dstContours ?? [];
  851. affectedKeyPointIndexes = result.affectedKeyPointIndexes!;
  852. return true;
  853. } catch (e) {
  854. return false;
  855. }
  856. }
  857. // 根据病灶四个点位置横纵比参数
  858. void _updateLesionSizeAndRatio() {
  859. if (_canvasLesionSizePointsIndexes.length != 4) return;
  860. if (keyPoints.length < 4) return;
  861. final pIndexs = _canvasLesionSizePointsIndexes;
  862. final p1 = keyPoints[pIndexs[0]].point!;
  863. final p2 = keyPoints[pIndexs[1]].point!;
  864. final p3 = keyPoints[pIndexs[2]].point!;
  865. final p4 = keyPoints[pIndexs[3]].point!;
  866. /// 计算 p1 到 p2 的像素距离 更新到 _horizontalLengthInPixel
  867. /// 计算 p3 到 p4 的像素距离 更新到 _verticalLengthInPixel
  868. double _horizontalLength =
  869. (Offset(p1.x.toInt().toDouble(), p1.y.toInt().toDouble()) -
  870. Offset(p2.x.toInt().toDouble(), p2.y.toInt().toDouble()))
  871. .distance;
  872. _horizontalLengthInPixel = _horizontalLength.ceil();
  873. double _verticalLength =
  874. (Offset(p3.x.toInt().toDouble(), p3.y.toInt().toDouble()) -
  875. Offset(p4.x.toInt().toDouble(), p4.y.toInt().toDouble()))
  876. .distance;
  877. _verticalLengthInPixel = _verticalLength.ceil();
  878. lesionSize = _getNewLesionSize(
  879. [p1, p2, p3, p4], _horizontalLengthInPixel, _verticalLengthInPixel);
  880. update(['ai_result_lesion_size', 'ai_result_lesion_ratio']);
  881. }
  882. /// [⭐ _canvasAffectedKeyPoints ] 根据当前的受影响关键点下标更新受影响关键点集
  883. void _updateCurrAffectedKeyPoints() {
  884. _canvasAffectedKeyPoints.clear();
  885. if (keyPoints.isEmpty) return;
  886. for (int i = 0; i < keyPoints.length; i++) {
  887. if (affectedKeyPointIndexes.contains(i)) {
  888. _canvasAffectedKeyPoints.add(Offset(
  889. keyPoints[i].point!.x.toDouble() * _scale,
  890. keyPoints[i].point!.y.toDouble() * _scale));
  891. }
  892. }
  893. }
  894. /// [⭐ _canvasContoursPoints ] 更新当前轮廓点
  895. void _updateCurrContoursPoints() {
  896. _canvasContoursPoints = _convertPoints(contours);
  897. }
  898. /// [⭐ _canvasKeyPoints ] 更新当前关键点
  899. void _updateCurrKeyPoints() async {
  900. _canvasKeyPoints = _convertKeyPoints(keyPoints);
  901. }
  902. }
  903. enum AiResultModifierMode {
  904. /// 拖拽
  905. drag,
  906. /// 画笔
  907. pen,
  908. /// 截图
  909. screenshot,
  910. }
  911. ///存储服务扩展类
  912. extension StorageServiceExt on StorageService {
  913. ///鉴权 fileName 为空则接口报错,所以此处设置一个默认值
  914. Future<StorageServiceSettingDTO> getAuth({
  915. String? fileName,
  916. bool? isRechristen,
  917. List<DataItemDTO>? urlParams,
  918. List<DataItemDTO>? headerParams,
  919. String? requestMethod,
  920. required String userToken,
  921. }) async {
  922. try {
  923. final result = await getAuthorizationAsync(FileServiceRequest(
  924. token: userToken,
  925. fileName: fileName ?? "dat",
  926. isRechristen: isRechristen ?? true,
  927. urlParams: urlParams,
  928. headerParams: headerParams,
  929. requestMethod: requestMethod,
  930. ));
  931. return result;
  932. } catch (e) {
  933. return StorageServiceSettingDTO();
  934. }
  935. }
  936. ///文件上传(UInt8List)
  937. Future<String?> uploadUint8List(Uint8List buffer, String name, String token,
  938. [bool? isRechristen]) async {
  939. try {
  940. var nameInfos = name.split('.');
  941. final auth = await getAuth(
  942. fileName: nameInfos.last,
  943. isRechristen: isRechristen,
  944. userToken: token,
  945. );
  946. Map<String, String> params = {};
  947. params['Authorization'] = auth.authorization!;
  948. params['ContentType'] = auth.contentType!;
  949. final response = await http
  950. .put(
  951. Uri.parse(auth.storageUrl!),
  952. body: buffer,
  953. headers: params,
  954. )
  955. .timeout(
  956. const Duration(seconds: 30),
  957. );
  958. if (response.statusCode == 200) {
  959. return auth.storageUrl;
  960. }
  961. } catch (e) {
  962. logger.e('StorageServiceExt uploadUint8List ex:$e');
  963. }
  964. return null;
  965. }
  966. }
  967. class ImageUrls {
  968. /// 原始图像地址
  969. String aiFileToken;
  970. /// 缩略图地址
  971. String previewFileUrl;
  972. /// 是否已经上传
  973. bool isUploaded = true;
  974. ImageUrls({
  975. required this.aiFileToken,
  976. required this.previewFileUrl,
  977. this.isUploaded = true,
  978. });
  979. }