controller.dart 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059
  1. import 'dart:convert';
  2. import 'dart:math';
  3. import 'dart:typed_data';
  4. import 'dart:ui' as ui;
  5. import 'package:fis_common/index.dart';
  6. import 'package:fis_common/logger/logger.dart';
  7. import 'package:fis_i18n/i18n.dart';
  8. import 'package:fis_jsonrpc/rpc.dart';
  9. import 'package:fis_measure/interfaces/process/workspace/application.dart';
  10. import 'package:fis_measure/process/language/measure_language.dart';
  11. import 'package:fis_measure/process/workspace/measure_data_controller.dart';
  12. import 'package:fis_measure/process/workspace/rpc_bridge.dart';
  13. import 'package:fis_measure/utils/prompt_box.dart';
  14. import 'package:fis_measure/values/unit_desc.dart';
  15. import 'package:fis_measure/view/ai_result_modifier/state.dart';
  16. import 'package:fis_measure/view/mobile_view/widgets/throttle.dart' as utils;
  17. import 'package:fis_ui/utils/sizer/sizer.dart';
  18. import 'package:flutter/gestures.dart';
  19. import 'package:flutter/material.dart';
  20. import 'package:get/get.dart';
  21. import 'package:path_drawing/path_drawing.dart';
  22. import 'package:vid/us/vid_us_image.dart';
  23. import 'package:vid/us/vid_us_unit.dart';
  24. import 'package:http/http.dart' as http;
  25. class AiResultModifierController extends GetxController {
  26. final rpcHelper = Get.find<RPCBridge>();
  27. MeasureDataController get measureData => Get.find<MeasureDataController>();
  28. /// 后台语言包控制器
  29. // final languageService = Get.find<LanguageService>();
  30. final state = AiResultModifierState();
  31. /// 传入参数 [图像code,图像帧下标,图像元数据, 图像编辑过的code]
  32. final String remedicalCode;
  33. final int currFrameIndex;
  34. final VidUsImage currFrame;
  35. /// 初次查询到的完整数据
  36. AIDiagnosisPerImageDTO resultDTO = AIDiagnosisPerImageDTO();
  37. /// 编辑后的完整数据【用于发给后端】
  38. AIDiagnosisPerImageDTO modifiedDataDTO = AIDiagnosisPerImageDTO();
  39. // 用于画布绘制的轮廓点集
  40. List<Offset> _canvasContoursPoints = [];
  41. // 用于画布绘制的关键点集【拖拽模式】
  42. List<Offset> _canvasKeyPoints = [];
  43. // 用于画布绘制的高亮关键点集【拖拽模式】
  44. final List<Offset> _canvasAffectedKeyPoints = [];
  45. // 用于画布绘制的病灶大小横纵比例线段【四个坐标下标】
  46. List<int> _canvasLesionSizePointsIndexes = [];
  47. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  48. final List<int> _canvasPenModeKeyPointIndexes = [];
  49. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  50. final List<Offset> _canvasNewContoursPoints = [];
  51. // 播放器组件的key
  52. final List<Offset> _aiPoints = [];
  53. // 病灶结论列表
  54. List<EnumItemDTO> _diagnosisEnumItems = [];
  55. // 当前横线像素长度
  56. int _horizontalLengthInPixel = 0;
  57. // 当前横线像素长度
  58. int _verticalLengthInPixel = 0;
  59. // 当前AI病灶下标
  60. int currentAiDetectedObjectIndex = 0;
  61. // 播放器区域的key
  62. GlobalKey framePlayerKey = GlobalKey();
  63. // 截图区域的key
  64. GlobalKey captureAreaKey = GlobalKey();
  65. // 画布组件的大小
  66. Size aiCanvasSize = Size.zero;
  67. // 图像的实际大小
  68. Size frameSize = Size.zero;
  69. // 图像的缩放比例
  70. double _scale = 1.0;
  71. // 图像的物理单位像素长度
  72. double _unitsPhysicalPixels = 0.0;
  73. // 图像的物理单位
  74. String _xUnit = '';
  75. // 当前的轮廓点集
  76. List<AIDiagnosisPoint2D> contours = [];
  77. // 当前的病灶大小
  78. AIDiagnosisLesionSize? lesionSize;
  79. // 当前的关键点集
  80. List<DiagnosisKeyPointDTO> keyPoints = [];
  81. // 当前受影响的高亮的关键点下标集合
  82. List<int> affectedKeyPointIndexes = [];
  83. // 当前操作模式
  84. AiResultModifierMode _mode = AiResultModifierMode.drag;
  85. // 当前是否正在绘制新轮廓
  86. bool _isDrawingNewContours = false;
  87. // 拖拽起点
  88. Offset _dragStartPoint = Offset.zero;
  89. // 拖拽开始时的轮廓点集【仅用于发请求】
  90. List<AIDiagnosisPoint2D> contoursOnDragStart = [];
  91. // 拖拽开始时的关键点集【仅用于发请求】
  92. List<DiagnosisKeyPointDTO> keyPointsOnDragStart = [];
  93. /// 测量语言包
  94. final measureLanguage = MeasureLanguage();
  95. AiResultModifierController(
  96. {required this.remedicalCode,
  97. required this.currFrameIndex,
  98. required this.currFrame});
  99. late final application = Get.find<IApplication>();
  100. /// 多个ai病灶
  101. List<AIDetectedObject> get aiDetectedObjectList =>
  102. modifiedDataDTO.diagResultsForEachOrgan?.first.detectedObjects ?? [];
  103. /// 当前病灶
  104. AIDetectedObject? get aiDetectedObject => modifiedDataDTO
  105. .diagResultsForEachOrgan
  106. ?.first
  107. .detectedObjects?[currentAiDetectedObjectIndex];
  108. List<Offset> get aiPoints => _aiPoints;
  109. List<Offset> get canvasAffectedKeyPoints => _canvasAffectedKeyPoints;
  110. List<Offset> get canvasContoursPoints => _canvasContoursPoints;
  111. List<Offset> get canvasKeyPoints => _canvasKeyPoints;
  112. List<int> get canvasLesionSizePointsIndexes => _canvasLesionSizePointsIndexes;
  113. List<Offset> get canvasNewContoursPoints => _canvasNewContoursPoints;
  114. List<int> get canvasPenModeKeyPointIndexes => _canvasPenModeKeyPointIndexes;
  115. AiResultModifierMode get currMode => _mode;
  116. List<EnumItemDTO> get diagnosisEnumItems => _diagnosisEnumItems;
  117. /// 当前器官
  118. DiagnosisOrganEnum get diagnosisOrgan =>
  119. modifiedDataDTO.diagResultsForEachOrgan?.first.organ ??
  120. DiagnosisOrganEnum.Null;
  121. /// 获取病灶的水平长度
  122. String get horizontalLength =>
  123. _countLesionLengthWithUnit(_horizontalLengthInPixel);
  124. /// 获取病灶的垂直长度
  125. String get verticalLength =>
  126. _countLesionLengthWithUnit(_verticalLengthInPixel);
  127. /// 病灶横纵比
  128. String get lesionRatio =>
  129. _verticalLengthInPixel / _horizontalLengthInPixel > 1 ||
  130. _verticalLengthInPixel / _horizontalLengthInPixel == 1
  131. ? '> 1'
  132. : '< 1';
  133. /// 切换操作模式
  134. void changeModifierMode(AiResultModifierMode newMode) {
  135. if (_mode == newMode) return;
  136. _mode = newMode;
  137. _canvasAffectedKeyPoints.clear();
  138. update(['ai_result_modifier', 'ai_mode_change_buttons']);
  139. }
  140. /// 切换ai病灶
  141. Future<void> changeAiDetectedObjectIndex(int index) async {
  142. _setNewCurrContoursToModifiedDataDTO(
  143. oldIndex: currentAiDetectedObjectIndex);
  144. currentAiDetectedObjectIndex = index;
  145. await _updateContoursByIndex(index);
  146. update(['ai_result_canvas', 'ai_conclusion_result', 'ai_index_tag']);
  147. }
  148. /// 切换病灶轮廓
  149. Future<void> _updateContoursByIndex(int index) async {
  150. contours = modifiedDataDTO
  151. .diagResultsForEachOrgan![0].detectedObjects![index].contours ??
  152. [];
  153. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  154. .diagResultsForEachOrgan![0].detectedObjects![index].descriptions;
  155. //遍历 descriptions 取出病灶大小
  156. for (AIDiagnosisDescription description in descriptions!) {
  157. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  158. lesionSize =
  159. AIDiagnosisLesionSize.fromJson(jsonDecode(description.value ?? ""));
  160. }
  161. }
  162. keyPoints = await _queryAllKeyPoints();
  163. _canvasAffectedKeyPoints.clear();
  164. _updateCurrContoursPoints();
  165. _updateCurrKeyPoints();
  166. }
  167. /// 更新当前轮廓点集【要在 currentAiDetectedObjectIndex 更新前触发】
  168. void _setNewCurrContoursToModifiedDataDTO({required int oldIndex}) {
  169. List<AIDiagnosisPoint2D> newContours =
  170. _convertCanvasPoints(_canvasContoursPoints);
  171. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  172. .contours = newContours;
  173. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  174. .diagResultsForEachOrgan![0].detectedObjects![oldIndex].descriptions;
  175. //遍历 descriptions 更新病灶大小
  176. for (var i = 0; i < descriptions!.length; i++) {
  177. if (descriptions[i].type == DiagnosisDescriptionEnum.LesionSize) {
  178. descriptions[i].value = jsonEncode(lesionSize);
  179. }
  180. }
  181. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  182. .descriptions = descriptions;
  183. }
  184. /// 获取当前的新病灶大小
  185. AIDiagnosisLesionSize _getNewLesionSize(List<AIDiagnosisPoint2D> p1234,
  186. int newHorizontalLengthInPixel, int newVerticalLengthInPixel) {
  187. return AIDiagnosisLesionSize(
  188. horizontalPoint1: p1234[0],
  189. horizontalPoint2: p1234[1],
  190. verticalPoint1: p1234[2],
  191. verticalPoint2: p1234[3],
  192. horizontalLengthInPixel: newHorizontalLengthInPixel,
  193. verticalLengthInPixel: newVerticalLengthInPixel);
  194. }
  195. /// 上传当前封面图以及压缩后的缩略图
  196. Future<ImageUrls> _getCurrImageUrls() async {
  197. try {
  198. final Uint8List vidImageBytes = currFrame.imageData;
  199. /// 生成缩略图
  200. final Rect offscreenCanvasRect =
  201. Rect.fromLTWH(0, 0, frameSize.width, frameSize.height);
  202. final ui.PictureRecorder recorder = ui.PictureRecorder();
  203. final Canvas offscreenCanvas = Canvas(recorder, offscreenCanvasRect);
  204. offscreenCanvas.drawImage(
  205. await decodeImageFromList(vidImageBytes), Offset.zero, Paint());
  206. _paintAllContours(offscreenCanvas);
  207. final ui.Image orginalFileImage = await recorder
  208. .endRecording()
  209. .toImage(currFrame.width.toInt(), currFrame.height.toInt());
  210. final orginalFileByteData =
  211. await orginalFileImage.toByteData(format: ui.ImageByteFormat.png);
  212. final orginalFileByteDataBuffer =
  213. orginalFileByteData!.buffer.asUint8List();
  214. final String aiFileToken = await rpcHelper.rpc.storage.uploadUint8List(
  215. orginalFileByteDataBuffer,
  216. "ai_modified_orginal_${remedicalCode}_$currFrameIndex.png",
  217. rpcHelper.userToken) ??
  218. '';
  219. print('coverUrl: $aiFileToken');
  220. /// 生成缩略图
  221. final double scale = _calcScale(
  222. srcWidth: currFrame.width.toDouble(),
  223. srcHeight: currFrame.height.toDouble(),
  224. minWidth: 200,
  225. minHeight: 200,
  226. );
  227. final int scaledWidth = currFrame.width ~/ scale;
  228. final int scaledHeight = currFrame.height ~/ scale;
  229. final Rect previewOffscreenCanvasRect =
  230. Rect.fromLTWH(0, 0, scaledWidth.toDouble(), scaledHeight.toDouble());
  231. final ui.PictureRecorder lowLevelRecorder = ui.PictureRecorder();
  232. final Canvas previewOffscreenCanvas =
  233. Canvas(lowLevelRecorder, previewOffscreenCanvasRect);
  234. previewOffscreenCanvas.drawImageRect(orginalFileImage,
  235. offscreenCanvasRect, previewOffscreenCanvasRect, Paint());
  236. final ui.Image previewFileImage = await lowLevelRecorder
  237. .endRecording()
  238. .toImage(scaledWidth, scaledHeight);
  239. final previewFileByteData =
  240. await previewFileImage.toByteData(format: ui.ImageByteFormat.png);
  241. final previewFileByteDataBuffer =
  242. previewFileByteData!.buffer.asUint8List();
  243. final String previewFileUrl = await rpcHelper.rpc.storage.uploadUint8List(
  244. previewFileByteDataBuffer,
  245. "ai_modified_preview_${remedicalCode}_$currFrameIndex.png",
  246. rpcHelper.userToken) ??
  247. '';
  248. print('previewFileUrl: $previewFileUrl');
  249. return ImageUrls(
  250. aiFileToken: aiFileToken, previewFileUrl: previewFileUrl);
  251. } catch (e) {
  252. logger.e('get screenshot failed', e);
  253. return ImageUrls(aiFileToken: '', previewFileUrl: '', isUploaded: false);
  254. }
  255. }
  256. /// 计算压缩倍率
  257. double _calcScale({
  258. required double srcWidth,
  259. required double srcHeight,
  260. required double minWidth,
  261. required double minHeight,
  262. }) {
  263. var scaleW = srcWidth / minWidth;
  264. var scaleH = srcHeight / minHeight;
  265. var scale = max(1.0, min(scaleW, scaleH));
  266. return scale;
  267. }
  268. /// 在图像上绘制所有轮廓
  269. void _paintAllContours(Canvas canvas) {
  270. for (var i = 0;
  271. i < modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects!.length;
  272. i++) {
  273. contours = modifiedDataDTO
  274. .diagResultsForEachOrgan![0].detectedObjects![i].contours ??
  275. [];
  276. // 设置虚线圆点画笔
  277. final contoursPaint = Paint()
  278. ..color = Colors.green
  279. ..strokeCap = StrokeCap.round
  280. ..strokeWidth = 3.0
  281. ..style = PaintingStyle.stroke;
  282. // 遍历 contoursPoints 绘制轮廓
  283. if (contours.isNotEmpty) {
  284. Path path = Path();
  285. path.moveTo(contours[0].x.toDouble(), contours[0].y.toDouble());
  286. for (int i = 1; i < contours.length; i++) {
  287. path.lineTo(contours[i].x.toDouble(), contours[i].y.toDouble());
  288. }
  289. path.close();
  290. canvas.drawPath(
  291. dashPath(
  292. path,
  293. dashArray: CircularIntervalList<double>([1, 10]),
  294. ),
  295. contoursPaint);
  296. }
  297. AIDiagnosisLesionSize currLesionSize = AIDiagnosisLesionSize();
  298. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  299. .diagResultsForEachOrgan![0].detectedObjects![i].descriptions;
  300. //遍历 descriptions 取出病灶大小
  301. //Descriptions 为空属于非正常数据
  302. if (descriptions!.isEmpty) {
  303. continue;
  304. }
  305. for (AIDiagnosisDescription description in descriptions) {
  306. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  307. currLesionSize = AIDiagnosisLesionSize.fromJson(
  308. jsonDecode(description.value ?? ""));
  309. }
  310. }
  311. final AIDiagnosisPoint2D p1 = currLesionSize.horizontalPoint1!;
  312. final AIDiagnosisPoint2D p2 = currLesionSize.horizontalPoint2!;
  313. final AIDiagnosisPoint2D p3 = currLesionSize.verticalPoint1!;
  314. final AIDiagnosisPoint2D p4 = currLesionSize.verticalPoint2!;
  315. Path path = Path();
  316. path.moveTo(p1.x.toDouble(), p1.y.toDouble());
  317. path.lineTo(p2.x.toDouble(), p2.y.toDouble());
  318. canvas.drawPath(
  319. dashPath(
  320. path,
  321. dashArray: CircularIntervalList<double>([1, 5]),
  322. ),
  323. contoursPaint);
  324. Path path2 = Path();
  325. path2.moveTo(p3.x.toDouble(), p3.y.toDouble());
  326. path2.lineTo(p4.x.toDouble(), p4.y.toDouble());
  327. canvas.drawPath(
  328. dashPath(
  329. path2,
  330. dashArray: CircularIntervalList<double>([1, 5]),
  331. ),
  332. contoursPaint);
  333. paintX(
  334. canvas,
  335. Offset(p1.x.toDouble(), p1.y.toDouble()),
  336. 6.0,
  337. 3,
  338. Colors.green,
  339. );
  340. paintX(
  341. canvas,
  342. Offset(p2.x.toDouble(), p2.y.toDouble()),
  343. 6.0,
  344. 3,
  345. Colors.green,
  346. );
  347. paintX(
  348. canvas,
  349. Offset(p3.x.toDouble(), p3.y.toDouble()),
  350. 6.0,
  351. 3,
  352. Colors.green,
  353. );
  354. paintX(
  355. canvas,
  356. Offset(p4.x.toDouble(), p4.y.toDouble()),
  357. 6.0,
  358. 3,
  359. Colors.green,
  360. );
  361. }
  362. }
  363. /// 绘制叉叉
  364. void paintX(
  365. Canvas canvas, Offset center, double radius, double width, Color color) {
  366. final paint = Paint()
  367. ..color = color
  368. ..strokeCap = StrokeCap.round
  369. ..strokeWidth = width
  370. ..style = PaintingStyle.stroke;
  371. Path path = Path();
  372. path.moveTo(center.dx - radius, center.dy - radius);
  373. path.lineTo(center.dx + radius, center.dy + radius);
  374. path.moveTo(center.dx + radius, center.dy - radius);
  375. path.lineTo(center.dx - radius, center.dy + radius);
  376. canvas.drawPath(path, paint);
  377. }
  378. /// 获取AI模块的翻译值
  379. String getValuesFromAiLanguage(String code) {
  380. final value = measureLanguage.t('ai', code);
  381. return value;
  382. }
  383. /// 重置AI结果
  384. void resetAIResult() async {
  385. await _initAIResult();
  386. update(['ai_conclusion_result']);
  387. }
  388. @override
  389. void onClose() {
  390. super.onClose();
  391. Sizer.ins.removeListener(_onWindowResize);
  392. }
  393. @override
  394. void onInit() async {
  395. super.onInit();
  396. await _getDiagnosisEnumItemsAsync();
  397. _updateModifierInteractiveLayerSize();
  398. _updateImagePhysicalSize();
  399. _initAIResult();
  400. Sizer.ins.addListener(_onWindowResize);
  401. }
  402. /// 窗口大小改变
  403. void _onWindowResize(_) {
  404. update(['ai_result_modifier']);
  405. frameSize = Size(currFrame.width.toDouble(), currFrame.height.toDouble());
  406. WidgetsBinding.instance.addPostFrameCallback((_) {
  407. final RenderBox box =
  408. framePlayerKey.currentContext!.findRenderObject() as RenderBox;
  409. final framePlayerSize = Size(box.size.width, box.size.height);
  410. _scale = min(framePlayerSize.width / frameSize.width,
  411. framePlayerSize.height / frameSize.height);
  412. aiCanvasSize = Size(frameSize.width * _scale, frameSize.height * _scale);
  413. _updateModifierInteractiveLayerSize();
  414. _updateCurrKeyPoints();
  415. _updateCurrContoursPoints();
  416. _updateCurrAffectedKeyPoints();
  417. // 更新交互层尺寸
  418. update(["ai_result_modifier_interactive_layer", "ai_result_canvas"]);
  419. });
  420. }
  421. /// 鼠标拖拽
  422. void onMouseDrag(DragUpdateDetails details) {
  423. switch (_mode) {
  424. case AiResultModifierMode.drag:
  425. utils.throttle(() {
  426. _onDragModeCallDragFunction(details.localPosition);
  427. }, 'onMouseDrag', 100);
  428. break;
  429. case AiResultModifierMode.pen:
  430. _onPenModeCallDragFunction(details.localPosition);
  431. break;
  432. default:
  433. }
  434. }
  435. /// 鼠标拖拽结束
  436. void onMouseDragEnd(DragEndDetails details) async {
  437. switch (_mode) {
  438. case AiResultModifierMode.drag:
  439. break;
  440. case AiResultModifierMode.pen:
  441. if (_isDrawingNewContours) {
  442. _isDrawingNewContours = false;
  443. await _callContourMergeAsync();
  444. _updateCurrContoursPoints();
  445. _updateCurrKeyPoints();
  446. }
  447. _canvasNewContoursPoints.clear();
  448. update(['ai_result_canvas']);
  449. break;
  450. default:
  451. }
  452. }
  453. /// 鼠标拖拽开始【记录起点】
  454. void onMouseDragStart(DragDownDetails details) {
  455. switch (_mode) {
  456. case AiResultModifierMode.drag:
  457. _dragStartPoint = details.localPosition;
  458. contoursOnDragStart = contours;
  459. keyPointsOnDragStart = keyPoints;
  460. break;
  461. case AiResultModifierMode.pen:
  462. if (_canvasPenModeKeyPointIndexes.isNotEmpty) {
  463. _isDrawingNewContours = true;
  464. _dragStartPoint = details.localPosition;
  465. _canvasNewContoursPoints.clear();
  466. _canvasNewContoursPoints
  467. .add(_canvasContoursPoints[_canvasPenModeKeyPointIndexes[0]]);
  468. _canvasNewContoursPoints.add(_dragStartPoint);
  469. }
  470. break;
  471. default:
  472. }
  473. }
  474. /// 鼠标离开区域
  475. void onMouseExit(PointerExitEvent e) async {
  476. // 延迟200ms (因为鼠标位置更新高亮关键点有100ms延迟)
  477. await Future.delayed(const Duration(milliseconds: 200));
  478. _canvasAffectedKeyPoints.clear();
  479. update(['ai_result_canvas']);
  480. }
  481. /// 鼠标悬浮移动
  482. void onMouseHover(PointerHoverEvent e) async {
  483. if (keyPoints.isEmpty) return;
  484. switch (_mode) {
  485. case AiResultModifierMode.drag:
  486. utils.throttle(() {
  487. _onDragModeCallHoverFunction(e.localPosition);
  488. }, 'onMouseHover', 100);
  489. break;
  490. case AiResultModifierMode.pen:
  491. utils.throttle(() {
  492. _onPenModeCallHoverFunction(e.localPosition);
  493. }, 'onMouseHover', 10);
  494. // Offset point = e.localPosition;
  495. break;
  496. default:
  497. }
  498. }
  499. @override
  500. void onReady() {
  501. super.onReady();
  502. _initData();
  503. }
  504. /// 保存AI修改结果
  505. Future<void> saveAIResult({
  506. String? code,
  507. }) async {
  508. PromptBox.toast(i18nBook.realTimeConsultation.uploading.t);
  509. _setNewCurrContoursToModifiedDataDTO(
  510. oldIndex: currentAiDetectedObjectIndex);
  511. try {
  512. final ImageUrls imageUrls = await _getCurrImageUrls();
  513. if (!imageUrls.isUploaded) {
  514. logger.e("Url not uploaded");
  515. PromptBox.toast(i18nBook.user.saveFailed.t);
  516. return;
  517. }
  518. //该逻辑是为了判断当前是否是编辑状态
  519. //如果AIEditCode为空,说明当前模式为从源Vid里新建单帧Vid进行AI信息的update操作
  520. //否则,则是在一个编辑过的Vid上面进行AI信息的update操作
  521. bool isAIEditMode = measureData
  522. .measureInfoData.remedicalAISelectedInfoCode.isNotNullOrEmpty;
  523. final result =
  524. await rpcHelper.rpc.remedical.saveRemedicalAISelectedInfoAsync(
  525. SaveRemedicalAISelectedInfoRequest(
  526. token: rpcHelper.userToken,
  527. remedicalCode: remedicalCode,
  528. code: isAIEditMode
  529. ? measureData.measureInfoData.remedicalAISelectedInfoCode
  530. : null,
  531. frameIndex: currFrameIndex,
  532. // diagnosisConclusion: diagnosisOrgan,
  533. previewFileToken: imageUrls.previewFileUrl,
  534. aIFileToken: imageUrls.aiFileToken,
  535. diagnosisData: jsonEncode(modifiedDataDTO),
  536. ),
  537. );
  538. if (result) {
  539. PromptBox.toast(
  540. "${i18nBook.user.saveSuccess.t} \r\n ${i18nBook.measure.saveLocation.t + ' > ' + i18nBook.measure.measureImage.t}");
  541. Get.back();
  542. } else {
  543. logger.e("Server result is false");
  544. PromptBox.toast(i18nBook.user.saveFailed.t);
  545. }
  546. } catch (e) {
  547. logger.e("Operation failed with exception ${e} ");
  548. PromptBox.toast(i18nBook.user.saveFailed.t);
  549. }
  550. }
  551. /// 加载AI结果并调用绘制
  552. Future<void> _initAIResult() async {
  553. try {
  554. var existAIResult = jsonDecode(measureData.aiResults);
  555. //当aiResult==1时,为具有单帧AI结果图像,无论是编辑过的还是原vid的单帧图像
  556. //均可用此从measureData里读取,不受影响,因此该处无需区分图像源是AI编辑还是普通VID
  557. if (measureData
  558. .measureInfoData.remedicalAISelectedInfoCode.isNotNullOrEmpty &&
  559. existAIResult.length == 1) {
  560. resultDTO = AIDiagnosisPerImageDTO.fromJson(existAIResult[0]);
  561. } else {
  562. final result =
  563. await rpcHelper.rpc.remedical.getRemedicalDiagnosisDataAsync(
  564. GetRemedicalDiagnosisDataRequest(
  565. token: rpcHelper.userToken,
  566. remedicalCode: remedicalCode,
  567. frameIndex: currFrameIndex,
  568. ),
  569. );
  570. resultDTO = AIDiagnosisPerImageDTO.fromJson(jsonDecode(result));
  571. }
  572. List<AIDetectedObject> legalObjs = [];
  573. var tempResultDto = resultDTO;
  574. var rawObjs = tempResultDto.diagResultsForEachOrgan![0].detectedObjects!;
  575. for (var detectedObject in rawObjs) {
  576. var isLegalObject = detectedObject.descriptions?.isNotEmpty ?? false;
  577. if (isLegalObject) {
  578. legalObjs.add(detectedObject);
  579. }
  580. }
  581. rawObjs.clear();
  582. rawObjs.addAll(legalObjs);
  583. modifiedDataDTO = tempResultDto;
  584. contours = resultDTO.diagResultsForEachOrgan![0]
  585. .detectedObjects![currentAiDetectedObjectIndex].contours ??
  586. [];
  587. List<AIDiagnosisDescription>? descriptions = resultDTO
  588. .diagResultsForEachOrgan![0]
  589. .detectedObjects![currentAiDetectedObjectIndex]
  590. .descriptions;
  591. //遍历 descriptions 取出病灶大小
  592. for (AIDiagnosisDescription description in descriptions!) {
  593. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  594. lesionSize = AIDiagnosisLesionSize.fromJson(
  595. jsonDecode(description.value ?? ""));
  596. }
  597. }
  598. keyPoints = await _queryAllKeyPoints();
  599. _canvasAffectedKeyPoints.clear();
  600. _updateCurrContoursPoints();
  601. _updateCurrKeyPoints();
  602. update(['ai_result_canvas', 'ai_result_panel', 'ai_index_tag']);
  603. } catch (e) {
  604. logger.e('load ai result failed', e);
  605. }
  606. }
  607. /// 更新交互层尺寸
  608. void _updateModifierInteractiveLayerSize() {
  609. frameSize = Size(currFrame.width.toDouble(), currFrame.height.toDouble());
  610. WidgetsBinding.instance.addPostFrameCallback((_) {
  611. final RenderBox box =
  612. framePlayerKey.currentContext!.findRenderObject() as RenderBox;
  613. final framePlayerSize = Size(box.size.width, box.size.height);
  614. _scale = min(framePlayerSize.width / frameSize.width,
  615. framePlayerSize.height / frameSize.height);
  616. aiCanvasSize = Size(frameSize.width * _scale, frameSize.height * _scale);
  617. /// 更新交互层尺寸
  618. update(["ai_result_modifier_interactive_layer"]);
  619. });
  620. }
  621. /// 计算带单位的病灶长度
  622. String _countLesionLengthWithUnit(int length) {
  623. String lengthStr =
  624. (length * _unitsPhysicalPixels).toStringAsFixed(2).toString();
  625. return "$lengthStr $_xUnit";
  626. }
  627. /// 更新图像物理尺度信息
  628. void _updateImagePhysicalSize() {
  629. _unitsPhysicalPixels =
  630. (application.visuals[0].visualAreas[0].viewport?.region.width)! /
  631. (application.frameData!.width).toDouble();
  632. VidUsUnit targetUnit =
  633. application.visuals[0].visualAreas[0].viewport?.xUnit ?? VidUsUnit.cm;
  634. _xUnit = UnitDescriptionMap.getDesc(targetUnit);
  635. }
  636. /// 自动吸附闭合判断
  637. void _autoCloseContours() async {
  638. if (_canvasNewContoursPoints.length < 6) return;
  639. double minDistance = double.infinity;
  640. int nearestKeyPointIndex = -1;
  641. final lastPoint = _canvasNewContoursPoints.last;
  642. for (int i = 0; i < canvasContoursPoints.length; i++) {
  643. final point = canvasContoursPoints[i];
  644. final double distance = (point - lastPoint).distance;
  645. if (distance < minDistance) {
  646. minDistance = distance;
  647. nearestKeyPointIndex = i;
  648. }
  649. }
  650. if (minDistance < 6) {
  651. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  652. _canvasNewContoursPoints.add(canvasContoursPoints[nearestKeyPointIndex]);
  653. _isDrawingNewContours = false;
  654. await _callContourMergeAsync();
  655. _updateCurrContoursPoints();
  656. _updateCurrKeyPoints();
  657. }
  658. }
  659. /// 发送请求通知后端合并轮廓
  660. Future<bool> _callContourMergeAsync() async {
  661. final ContourMergeResult result =
  662. await rpcHelper.rpc.aIDiagnosis.contourMergeAsync(
  663. ContourMergeRequest(
  664. token: rpcHelper.userToken,
  665. contourPoints: contours,
  666. lesionSize: lesionSize,
  667. drawingNewContourPoints: _convertCanvasPoints(_canvasNewContoursPoints),
  668. ),
  669. );
  670. contours = result.dstContours ?? [];
  671. lesionSize = result.dstLesionSize;
  672. keyPoints = await _queryAllKeyPoints();
  673. return true;
  674. }
  675. /// 画布坐标系转换【画布坐标系 -> 接口坐标系】
  676. List<AIDiagnosisPoint2D> _convertCanvasPoints(List<Offset> points) {
  677. List<AIDiagnosisPoint2D> result = [];
  678. for (Offset point in points) {
  679. result.add(
  680. AIDiagnosisPoint2D(x: point.dx ~/ _scale, y: point.dy ~/ _scale));
  681. }
  682. return result;
  683. }
  684. /// 关键点坐标转换【接口坐标系 -> 画布坐标系】同时更新横纵比例线段下标
  685. List<Offset> _convertKeyPoints(List<DiagnosisKeyPointDTO> points) {
  686. List<Offset> result = [];
  687. List<int> pointIndexes = List.generate(4, (_) => 0);
  688. for (int i = 0; i < points.length; i++) {
  689. final point = points[i];
  690. if (point.point == null) continue;
  691. result.add(Offset(point.point!.x.toDouble() * _scale,
  692. point.point!.y.toDouble() * _scale));
  693. if (point.type != DiagnosisKeyPointType.OtherKeyPoints) {
  694. switch (point.type) {
  695. case DiagnosisKeyPointType.HorizontalPointLeft:
  696. pointIndexes[0] = i;
  697. break;
  698. case DiagnosisKeyPointType.HorizontalPointRight:
  699. pointIndexes[1] = i;
  700. break;
  701. case DiagnosisKeyPointType.VerticalPointUp:
  702. pointIndexes[2] = i;
  703. break;
  704. case DiagnosisKeyPointType.VerticalPointDown:
  705. pointIndexes[3] = i;
  706. break;
  707. default:
  708. }
  709. }
  710. }
  711. _canvasLesionSizePointsIndexes = pointIndexes;
  712. _updateLesionSizeAndRatio();
  713. return result;
  714. }
  715. /// 坐标转换【接口坐标系 -> 画布坐标系】
  716. List<Offset> _convertPoints(List<AIDiagnosisPoint2D> points) {
  717. List<Offset> result = [];
  718. for (AIDiagnosisPoint2D point in points) {
  719. result.add(
  720. Offset(point.x.toDouble() * _scale, point.y.toDouble() * _scale));
  721. }
  722. return result;
  723. }
  724. /// 获取ai结果相关枚举集合
  725. Future<void> _getDiagnosisEnumItemsAsync() async {
  726. final getDiagnosisEnumItems =
  727. await rpcHelper.rpc.aIDiagnosis.getDiagnosisEnumItemsAsync(
  728. GetDiagnosisEnumItemsRequest(
  729. token: rpcHelper.userToken,
  730. ),
  731. );
  732. _diagnosisEnumItems = getDiagnosisEnumItems.source ?? [];
  733. }
  734. void _initData() {
  735. update(["ai_result_modifier"]);
  736. }
  737. /// 在拖拽模式下触发拖拽事件【每隔100ms触发一次】
  738. void _onDragModeCallDragFunction(Offset pos) async {
  739. AIDiagnosisPoint2D startPoint = AIDiagnosisPoint2D(
  740. x: _dragStartPoint.dx ~/ _scale, y: _dragStartPoint.dy ~/ _scale);
  741. AIDiagnosisPoint2D endPoint =
  742. AIDiagnosisPoint2D(x: pos.dx ~/ _scale, y: pos.dy ~/ _scale);
  743. final bool success = await _queryDragResult(startPoint, endPoint);
  744. if (success) {
  745. _updateCurrKeyPoints();
  746. _updateCurrContoursPoints();
  747. _updateCurrAffectedKeyPoints();
  748. update(["ai_result_canvas"]);
  749. }
  750. }
  751. /// 在拖拽模式下,通过鼠标位置更新高亮的关键点下标【每隔100ms触发一次】
  752. void _onDragModeCallHoverFunction(Offset localPosition) async {
  753. final mousePos = AIDiagnosisPoint2D(
  754. x: localPosition.dx ~/ _scale, y: localPosition.dy ~/ _scale);
  755. affectedKeyPointIndexes = await _queryAffectedKeyPoints(mousePos);
  756. _updateCurrAffectedKeyPoints();
  757. update(["ai_result_canvas"]);
  758. }
  759. /// 在画轮廓模式下触发拖拽事件
  760. void _onPenModeCallDragFunction(Offset pos) async {
  761. if (!_isDrawingNewContours) return;
  762. // 点间距【疏密程度】
  763. const double pointDistance = 8;
  764. final double distance = (pos - _canvasNewContoursPoints.last).distance;
  765. if (distance >= pointDistance) {
  766. int numPointsToInsert = (distance / pointDistance).ceil() - 1; // 需要插入的点数
  767. for (int i = 0; i < numPointsToInsert; i++) {
  768. double t = (i + 1) / (numPointsToInsert + 1);
  769. Offset interpolatedPoint = Offset(
  770. _canvasNewContoursPoints.last.dx +
  771. t * (pos.dx - _canvasNewContoursPoints.last.dx),
  772. _canvasNewContoursPoints.last.dy +
  773. t * (pos.dy - _canvasNewContoursPoints.last.dy),
  774. );
  775. _canvasNewContoursPoints.add(interpolatedPoint);
  776. }
  777. _canvasNewContoursPoints.add(pos);
  778. update(["ai_result_canvas"]);
  779. }
  780. _autoCloseContours();
  781. }
  782. /// 在画轮廓模式下,通过鼠标位置更新最近的关键点【每隔10ms触发一次】
  783. void _onPenModeCallHoverFunction(Offset localPosition) async {
  784. double minDistance = double.infinity;
  785. int nearestKeyPointIndex = -1;
  786. for (int i = 0; i < canvasContoursPoints.length; i++) {
  787. final point = canvasContoursPoints[i];
  788. final double distance = (point - localPosition).distance;
  789. if (distance < minDistance) {
  790. minDistance = distance;
  791. nearestKeyPointIndex = i;
  792. }
  793. }
  794. _canvasPenModeKeyPointIndexes.clear();
  795. if (minDistance < 10) {
  796. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  797. }
  798. update(["ai_result_canvas"]);
  799. }
  800. /// 根据鼠标位置查询受影响的关键点
  801. Future<List<int>> _queryAffectedKeyPoints(AIDiagnosisPoint2D mousePos) async {
  802. try {
  803. final List<int> result =
  804. await rpcHelper.rpc.aIDiagnosis.affectedKeyPointsByDragActionAsync(
  805. AffectedKeyPointsByDragActionRequest(
  806. token: rpcHelper.userToken,
  807. keyPoints: keyPoints,
  808. mousePoint: mousePos,
  809. ),
  810. );
  811. // print(result);
  812. return result;
  813. } catch (e) {
  814. return [];
  815. }
  816. }
  817. /// 查询所有关键点【需要先存好contours和lesionSize】
  818. Future<List<DiagnosisKeyPointDTO>> _queryAllKeyPoints() async {
  819. try {
  820. final List<DiagnosisKeyPointDTO> result =
  821. await rpcHelper.rpc.aIDiagnosis.getKeyPointsOfContourAsync(
  822. GetKeyPointsOfContourRequest(
  823. token: rpcHelper.userToken,
  824. contours: contours,
  825. lesionSize: lesionSize,
  826. ),
  827. );
  828. return result;
  829. } catch (e) {
  830. return [];
  831. }
  832. }
  833. /// 查询拖拽结果集合【需要先存好 contoursOnDragStart 和 keyPointsOnDragStart】
  834. Future<bool> _queryDragResult(
  835. AIDiagnosisPoint2D startPoint, AIDiagnosisPoint2D endPoint) async {
  836. try {
  837. final ContourAndKeyPointsAfterDragResult result =
  838. await rpcHelper.rpc.aIDiagnosis.contourAndKeyPointsAfterDragAsync(
  839. ContourAndKeyPointsAfterDragRequest(
  840. token: rpcHelper.userToken,
  841. contours: contoursOnDragStart,
  842. keyPoints: keyPointsOnDragStart,
  843. startPoint: startPoint,
  844. endPoint: endPoint,
  845. ),
  846. );
  847. keyPoints = result.dstKeyPoints ?? [];
  848. contours = result.dstContours ?? [];
  849. affectedKeyPointIndexes = result.affectedKeyPointIndexes!;
  850. return true;
  851. } catch (e) {
  852. return false;
  853. }
  854. }
  855. // 根据病灶四个点位置横纵比参数
  856. void _updateLesionSizeAndRatio() {
  857. if (_canvasLesionSizePointsIndexes.length != 4) return;
  858. if (keyPoints.length < 4) return;
  859. final pIndexs = _canvasLesionSizePointsIndexes;
  860. final p1 = keyPoints[pIndexs[0]].point!;
  861. final p2 = keyPoints[pIndexs[1]].point!;
  862. final p3 = keyPoints[pIndexs[2]].point!;
  863. final p4 = keyPoints[pIndexs[3]].point!;
  864. /// 计算 p1 到 p2 的像素距离 更新到 _horizontalLengthInPixel
  865. /// 计算 p3 到 p4 的像素距离 更新到 _verticalLengthInPixel
  866. double _horizontalLength =
  867. (Offset(p1.x.toInt().toDouble(), p1.y.toInt().toDouble()) -
  868. Offset(p2.x.toInt().toDouble(), p2.y.toInt().toDouble()))
  869. .distance;
  870. _horizontalLengthInPixel = _horizontalLength.ceil();
  871. double _verticalLength =
  872. (Offset(p3.x.toInt().toDouble(), p3.y.toInt().toDouble()) -
  873. Offset(p4.x.toInt().toDouble(), p4.y.toInt().toDouble()))
  874. .distance;
  875. _verticalLengthInPixel = _verticalLength.ceil();
  876. lesionSize = _getNewLesionSize(
  877. [p1, p2, p3, p4], _horizontalLengthInPixel, _verticalLengthInPixel);
  878. update(['ai_result_lesion_size', 'ai_result_lesion_ratio']);
  879. }
  880. /// [⭐ _canvasAffectedKeyPoints ] 根据当前的受影响关键点下标更新受影响关键点集
  881. void _updateCurrAffectedKeyPoints() {
  882. _canvasAffectedKeyPoints.clear();
  883. if (keyPoints.isEmpty) return;
  884. for (int i = 0; i < keyPoints.length; i++) {
  885. if (affectedKeyPointIndexes.contains(i)) {
  886. _canvasAffectedKeyPoints.add(Offset(
  887. keyPoints[i].point!.x.toDouble() * _scale,
  888. keyPoints[i].point!.y.toDouble() * _scale));
  889. }
  890. }
  891. }
  892. /// [⭐ _canvasContoursPoints ] 更新当前轮廓点
  893. void _updateCurrContoursPoints() {
  894. _canvasContoursPoints = _convertPoints(contours);
  895. }
  896. /// [⭐ _canvasKeyPoints ] 更新当前关键点
  897. void _updateCurrKeyPoints() async {
  898. _canvasKeyPoints = _convertKeyPoints(keyPoints);
  899. }
  900. }
  901. enum AiResultModifierMode {
  902. /// 拖拽
  903. drag,
  904. /// 画笔
  905. pen,
  906. /// 截图
  907. screenshot,
  908. }
  909. ///存储服务扩展类
  910. extension StorageServiceExt on StorageService {
  911. ///鉴权 fileName 为空则接口报错,所以此处设置一个默认值
  912. Future<StorageServiceSettingDTO> getAuth({
  913. String? fileName,
  914. bool? isRechristen,
  915. List<DataItemDTO>? urlParams,
  916. List<DataItemDTO>? headerParams,
  917. String? requestMethod,
  918. required String userToken,
  919. }) async {
  920. try {
  921. final result = await getAuthorizationAsync(FileServiceRequest(
  922. token: userToken,
  923. fileName: fileName ?? "dat",
  924. isRechristen: isRechristen ?? true,
  925. urlParams: urlParams,
  926. headerParams: headerParams,
  927. requestMethod: requestMethod,
  928. ));
  929. return result;
  930. } catch (e) {
  931. return StorageServiceSettingDTO();
  932. }
  933. }
  934. ///文件上传(UInt8List)
  935. Future<String?> uploadUint8List(Uint8List buffer, String name, String token,
  936. [bool? isRechristen]) async {
  937. try {
  938. var nameInfos = name.split('.');
  939. final auth = await getAuth(
  940. fileName: nameInfos.last,
  941. isRechristen: isRechristen,
  942. userToken: token,
  943. );
  944. Map<String, String> params = {};
  945. params['Authorization'] = auth.authorization!;
  946. params['ContentType'] = auth.contentType!;
  947. final response = await http
  948. .put(
  949. Uri.parse(auth.storageUrl!),
  950. body: buffer,
  951. headers: params,
  952. )
  953. .timeout(
  954. const Duration(seconds: 30),
  955. );
  956. if (response.statusCode == 200) {
  957. return auth.storageUrl;
  958. }
  959. } catch (e) {
  960. logger.e('StorageServiceExt uploadUint8List ex:$e');
  961. }
  962. return null;
  963. }
  964. }
  965. class ImageUrls {
  966. /// 原始图像地址
  967. String aiFileToken;
  968. /// 缩略图地址
  969. String previewFileUrl;
  970. /// 是否已经上传
  971. bool isUploaded = true;
  972. ImageUrls({
  973. required this.aiFileToken,
  974. required this.previewFileUrl,
  975. this.isUploaded = true,
  976. });
  977. }