controller.dart 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836
  1. import 'dart:convert';
  2. import 'dart:math';
  3. import 'dart:typed_data';
  4. import 'dart:ui' as ui;
  5. import 'package:fis_common/index.dart';
  6. import 'package:fis_common/logger/logger.dart';
  7. import 'package:fis_i18n/i18n.dart';
  8. import 'package:fis_jsonrpc/rpc.dart';
  9. import 'package:fis_measure/process/language/measure_language.dart';
  10. import 'package:fis_measure/process/visual/visual.dart';
  11. import 'package:fis_measure/process/workspace/measure_data_controller.dart';
  12. import 'package:fis_measure/process/workspace/rpc_helper.dart';
  13. import 'package:fis_measure/process/workspace/visual_loader.dart';
  14. import 'package:fis_measure/utils/prompt_box.dart';
  15. import 'package:fis_measure/values/unit_desc.dart';
  16. import 'package:fis_measure/view/ai_result_modifier/state.dart';
  17. import 'package:fis_measure/view/mobile_view/widgets/throttle.dart' as utils;
  18. import 'package:flutter/gestures.dart';
  19. import 'package:flutter/material.dart';
  20. import 'package:flutter/rendering.dart';
  21. import 'package:get/get.dart';
  22. import 'package:vid/us/vid_us_image.dart';
  23. import 'package:vid/us/vid_us_unit.dart';
  24. import 'package:http/http.dart' as http;
  25. class AiResultModifierController extends GetxController {
  26. final rpcHelper = Get.find<RPCHelper>();
  27. MeasureDataController get measureData => Get.find<MeasureDataController>();
  28. /// 后台语言包控制器
  29. // final languageService = Get.find<LanguageService>();
  30. final state = AiResultModifierState();
  31. /// 传入参数 [图像code,图像帧下标,图像元数据, 图像编辑过的code]
  32. final String remedicalCode;
  33. final int currFrameIndex;
  34. final VidUsImage currFrame;
  35. /// 初次查询到的完整数据
  36. AIDiagnosisPerImageDTO resultDTO = AIDiagnosisPerImageDTO();
  37. /// 编辑后的完整数据【用于发给后端】
  38. AIDiagnosisPerImageDTO modifiedDataDTO = AIDiagnosisPerImageDTO();
  39. // 用于画布绘制的轮廓点集
  40. List<Offset> _canvasContoursPoints = [];
  41. // 用于画布绘制的关键点集【拖拽模式】
  42. List<Offset> _canvasKeyPoints = [];
  43. // 用于画布绘制的高亮关键点集【拖拽模式】
  44. final List<Offset> _canvasAffectedKeyPoints = [];
  45. // 用于画布绘制的病灶大小横纵比例线段【四个坐标下标】
  46. List<int> _canvasLesionSizePointsIndexes = [];
  47. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  48. final List<int> _canvasPenModeKeyPointIndexes = [];
  49. // 用于画布绘制的轮廓关键点下标集合【画轮廓模式】
  50. final List<Offset> _canvasNewContoursPoints = [];
  51. // 播放器组件的key
  52. final List<Offset> _aiPoints = [];
  53. // 病灶结论列表
  54. List<EnumItemDTO> _diagnosisEnumItems = [];
  55. // 当前横线像素长度
  56. int _horizontalLengthInPixel = 0;
  57. // 当前横线像素长度
  58. int _verticalLengthInPixel = 0;
  59. // 当前AI病灶下标
  60. int currentAiDetectedObjectIndex = 0;
  61. // 播放器区域的key
  62. GlobalKey framePlayerKey = GlobalKey();
  63. // 截图区域的key
  64. GlobalKey captureAreaKey = GlobalKey();
  65. // 画布组件的大小
  66. Size aiCanvasSize = Size.zero;
  67. // 图像的实际大小
  68. Size frameSize = Size.zero;
  69. // 图像的缩放比例
  70. double _scale = 1.0;
  71. // 图像的物理单位像素长度
  72. double _unitsPhysicalPixels = 0.0;
  73. // 图像的物理单位
  74. String _xUnit = '';
  75. // 当前的轮廓点集
  76. List<AIDiagnosisPoint2D> contours = [];
  77. // 当前的病灶大小
  78. AIDiagnosisLesionSize? lesionSize;
  79. // 当前的关键点集
  80. List<DiagnosisKeyPointDTO> keyPoints = [];
  81. // 当前受影响的高亮的关键点下标集合
  82. List<int> affectedKeyPointIndexes = [];
  83. // 当前操作模式
  84. AiResultModifierMode _mode = AiResultModifierMode.drag;
  85. // 当前是否正在绘制新轮廓
  86. bool _isDrawingNewContours = false;
  87. // 拖拽起点
  88. Offset _dragStartPoint = Offset.zero;
  89. // 拖拽开始时的轮廓点集【仅用于发请求】
  90. List<AIDiagnosisPoint2D> contoursOnDragStart = [];
  91. // 拖拽开始时的关键点集【仅用于发请求】
  92. List<DiagnosisKeyPointDTO> keyPointsOnDragStart = [];
  93. /// 测量语言包
  94. final measureLanguage = MeasureLanguage();
  95. AiResultModifierController({
  96. required this.remedicalCode,
  97. required this.currFrameIndex,
  98. required this.currFrame,
  99. }) {
  100. print('创建 AiResultModifierController');
  101. }
  102. /// 多个ai病灶
  103. List<AIDetectedObject> get aiDetectedObjectList =>
  104. modifiedDataDTO.diagResultsForEachOrgan?.first.detectedObjects ?? [];
  105. List<Offset> get aiPoints => _aiPoints;
  106. List<Offset> get canvasAffectedKeyPoints => _canvasAffectedKeyPoints;
  107. List<Offset> get canvasContoursPoints => _canvasContoursPoints;
  108. List<Offset> get canvasKeyPoints => _canvasKeyPoints;
  109. List<int> get canvasLesionSizePointsIndexes => _canvasLesionSizePointsIndexes;
  110. List<Offset> get canvasNewContoursPoints => _canvasNewContoursPoints;
  111. List<int> get canvasPenModeKeyPointIndexes => _canvasPenModeKeyPointIndexes;
  112. AiResultModifierMode get currMode => _mode;
  113. List<EnumItemDTO> get diagnosisEnumItems => _diagnosisEnumItems;
  114. /// 当前器官
  115. DiagnosisOrganEnum get diagnosisOrgan =>
  116. modifiedDataDTO.diagResultsForEachOrgan?.first.organ ??
  117. DiagnosisOrganEnum.Null;
  118. /// 获取病灶的水平长度
  119. String get horizontalLength =>
  120. _countLesionLengthWithUnit(_horizontalLengthInPixel);
  121. /// 获取病灶的垂直长度
  122. String get verticalLength =>
  123. _countLesionLengthWithUnit(_verticalLengthInPixel);
  124. /// 病灶横纵比
  125. String get lesionRatio =>
  126. _verticalLengthInPixel / _horizontalLengthInPixel > 1 ? '> 1' : '< 1';
  127. /// 切换操作模式
  128. void changeModifierMode(AiResultModifierMode newMode) {
  129. if (_mode == newMode) return;
  130. _mode = newMode;
  131. _canvasAffectedKeyPoints.clear();
  132. update(['ai_result_modifier', 'ai_mode_change_buttons']);
  133. }
  134. /// 切换ai病灶
  135. Future<void> changeAiDetectedObjectIndex(int index) async {
  136. _setNewCurrContoursToModifiedDataDTO(
  137. oldIndex: currentAiDetectedObjectIndex);
  138. currentAiDetectedObjectIndex = index;
  139. await _updateContoursByIndex(index);
  140. update(['ai_result_canvas', 'ai_conclusion_result', 'ai_index_tag']);
  141. }
  142. /// 切换病灶轮廓
  143. Future<void> _updateContoursByIndex(int index) async {
  144. contours = modifiedDataDTO
  145. .diagResultsForEachOrgan![0].detectedObjects![index].contours ??
  146. [];
  147. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  148. .diagResultsForEachOrgan![0].detectedObjects![index].descriptions;
  149. //遍历 descriptions 取出病灶大小
  150. for (AIDiagnosisDescription description in descriptions!) {
  151. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  152. lesionSize =
  153. AIDiagnosisLesionSize.fromJson(jsonDecode(description.value ?? ""));
  154. }
  155. }
  156. keyPoints = await _queryAllKeyPoints();
  157. _canvasAffectedKeyPoints.clear();
  158. _updateCurrContoursPoints();
  159. _updateCurrKeyPoints();
  160. }
  161. /// 更新当前轮廓点集【要在 currentAiDetectedObjectIndex 更新前触发】
  162. void _setNewCurrContoursToModifiedDataDTO({required int oldIndex}) {
  163. List<AIDiagnosisPoint2D> newContours =
  164. _convertCanvasPoints(_canvasContoursPoints);
  165. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  166. .contours = newContours;
  167. List<AIDiagnosisDescription>? descriptions = modifiedDataDTO
  168. .diagResultsForEachOrgan![0].detectedObjects![oldIndex].descriptions;
  169. //遍历 descriptions 更新病灶大小
  170. for (var i = 0; i < descriptions!.length; i++) {
  171. if (descriptions[i].type == DiagnosisDescriptionEnum.LesionSize) {
  172. descriptions[i].value = jsonEncode(lesionSize);
  173. }
  174. }
  175. modifiedDataDTO.diagResultsForEachOrgan![0].detectedObjects![oldIndex]
  176. .descriptions = descriptions;
  177. }
  178. /// 获取当前的新病灶大小
  179. AIDiagnosisLesionSize _getNewLesionSize(List<AIDiagnosisPoint2D> p1234,
  180. int newHorizontalLengthInPixel, int newVerticalLengthInPixel) {
  181. return AIDiagnosisLesionSize(
  182. horizontalPoint1: p1234[0],
  183. horizontalPoint2: p1234[1],
  184. verticalPoint1: p1234[2],
  185. verticalPoint2: p1234[3],
  186. horizontalLengthInPixel: newHorizontalLengthInPixel,
  187. verticalLengthInPixel: newVerticalLengthInPixel);
  188. }
  189. /// 获取当前封面图
  190. Future<String> _getCurrCoverUrl() async {
  191. changeModifierMode(AiResultModifierMode.screenshot);
  192. await Future.delayed(const Duration(milliseconds: 20), () async {});
  193. try {
  194. final RenderRepaintBoundary? boundary = captureAreaKey.currentContext
  195. ?.findRenderObject() as RenderRepaintBoundary?;
  196. if (boundary == null) return '';
  197. final Uint8List imageBytes = currFrame.imageData;
  198. final ui.Image traceImageBytes = await boundary.toImage();
  199. final Rect traceCanvasRect =
  200. Rect.fromLTWH(0, 0, aiCanvasSize.width, aiCanvasSize.height);
  201. final Rect offscreenCanvasRect =
  202. Rect.fromLTWH(0, 0, frameSize.width, frameSize.height);
  203. final ui.PictureRecorder recorder = ui.PictureRecorder();
  204. final Canvas offscreenCanvas = Canvas(recorder, offscreenCanvasRect);
  205. offscreenCanvas.drawImage(
  206. await decodeImageFromList(imageBytes), Offset.zero, Paint());
  207. offscreenCanvas.drawImageRect(
  208. traceImageBytes, traceCanvasRect, offscreenCanvasRect, Paint());
  209. final ui.Image combinedImage = await recorder
  210. .endRecording()
  211. .toImage(currFrame.width.toInt(), currFrame.height.toInt());
  212. final byteData =
  213. await combinedImage.toByteData(format: ui.ImageByteFormat.png);
  214. final pngBytes = byteData!.buffer.asUint8List();
  215. final String coverUrl = await rpcHelper.rpc.storage.uploadUint8List(
  216. pngBytes,
  217. "ai_modified_${remedicalCode}_$currFrameIndex.jpg",
  218. rpcHelper.userToken) ??
  219. '';
  220. print('coverUrl: $coverUrl');
  221. return coverUrl;
  222. } on Exception catch (e) {
  223. logger.e('get screenshot failed', e);
  224. return '';
  225. }
  226. }
  227. /// 获取AI模块的翻译值
  228. String getValuesFromAiLanguage(String code) {
  229. final value = measureLanguage.t('ai', code);
  230. return value;
  231. }
  232. /// 重置AI结果
  233. void resetAIResult() {
  234. _initAIResult();
  235. }
  236. @override
  237. void onClose() {
  238. super.onClose();
  239. print("AiResultModifierController close");
  240. }
  241. @override
  242. void onInit() async {
  243. super.onInit();
  244. await _getDiagnosisEnumItemsAsync();
  245. _updateModifierInteractiveLayerSize();
  246. _updateImagePhysicalSize();
  247. _initAIResult();
  248. }
  249. /// 鼠标拖拽
  250. void onMouseDrag(DragUpdateDetails details) {
  251. switch (_mode) {
  252. case AiResultModifierMode.drag:
  253. utils.throttle(() {
  254. _onDragModeCallDragFunction(details.localPosition);
  255. }, 'onMouseDrag', 100);
  256. break;
  257. case AiResultModifierMode.pen:
  258. _onPenModeCallDragFunction(details.localPosition);
  259. break;
  260. default:
  261. }
  262. }
  263. /// 鼠标拖拽结束
  264. void onMouseDragEnd(DragEndDetails details) async {
  265. switch (_mode) {
  266. case AiResultModifierMode.drag:
  267. break;
  268. case AiResultModifierMode.pen:
  269. if (_isDrawingNewContours) {
  270. _isDrawingNewContours = false;
  271. await _callContourMergeAsync();
  272. _updateCurrContoursPoints();
  273. _updateCurrKeyPoints();
  274. }
  275. _canvasNewContoursPoints.clear();
  276. update(['ai_result_canvas']);
  277. break;
  278. default:
  279. }
  280. }
  281. /// 鼠标拖拽开始【记录起点】
  282. void onMouseDragStart(DragDownDetails details) {
  283. switch (_mode) {
  284. case AiResultModifierMode.drag:
  285. _dragStartPoint = details.localPosition;
  286. contoursOnDragStart = contours;
  287. keyPointsOnDragStart = keyPoints;
  288. break;
  289. case AiResultModifierMode.pen:
  290. if (_canvasPenModeKeyPointIndexes.isNotEmpty) {
  291. _isDrawingNewContours = true;
  292. _dragStartPoint = details.localPosition;
  293. _canvasNewContoursPoints.clear();
  294. _canvasNewContoursPoints
  295. .add(_canvasContoursPoints[_canvasPenModeKeyPointIndexes[0]]);
  296. _canvasNewContoursPoints.add(_dragStartPoint);
  297. }
  298. break;
  299. default:
  300. }
  301. }
  302. /// 鼠标离开区域
  303. void onMouseExit(PointerExitEvent e) async {
  304. // 延迟200ms (因为鼠标位置更新高亮关键点有100ms延迟)
  305. await Future.delayed(const Duration(milliseconds: 200));
  306. _canvasAffectedKeyPoints.clear();
  307. update(['ai_result_canvas']);
  308. }
  309. /// 鼠标悬浮移动
  310. void onMouseHover(PointerHoverEvent e) async {
  311. if (keyPoints.isEmpty) return;
  312. switch (_mode) {
  313. case AiResultModifierMode.drag:
  314. utils.throttle(() {
  315. _onDragModeCallHoverFunction(e.localPosition);
  316. }, 'onMouseHover', 100);
  317. break;
  318. case AiResultModifierMode.pen:
  319. utils.throttle(() {
  320. _onPenModeCallHoverFunction(e.localPosition);
  321. }, 'onMouseHover', 10);
  322. // Offset point = e.localPosition;
  323. break;
  324. default:
  325. }
  326. }
  327. @override
  328. void onReady() {
  329. super.onReady();
  330. _initData();
  331. }
  332. /// 保存AI修改结果
  333. Future<void> saveAIResult({
  334. String? code,
  335. }) async {
  336. PromptBox.toast("上传中...");
  337. await changeAiDetectedObjectIndex(0);
  338. try {
  339. final coverUrl = await _getCurrCoverUrl();
  340. final result =
  341. await rpcHelper.rpc.remedical.saveRemedicalAISelectedInfoAsync(
  342. SaveRemedicalAISelectedInfoRequest(
  343. token: rpcHelper.userToken,
  344. remedicalCode: remedicalCode,
  345. code: code,
  346. frameIndex: currFrameIndex,
  347. previewFileToken: coverUrl,
  348. diagnosisData: jsonEncode(modifiedDataDTO),
  349. ),
  350. );
  351. if (result) {
  352. PromptBox.toast(
  353. "保存修改成功,${i18nBook.measure.saveLocation.t + ' > AI 图像'}");
  354. Get.back();
  355. } else {
  356. PromptBox.toast("保存失败,请重试");
  357. }
  358. } catch (e) {
  359. PromptBox.toast("保存失败,请重试");
  360. }
  361. }
  362. /// 加载AI结果并调用绘制
  363. Future<void> _initAIResult() async {
  364. try {
  365. if (measureData
  366. .measureImageData.remedicalAISelectedInfoCode.isNotNullOrEmpty) {
  367. resultDTO = AIDiagnosisPerImageDTO.fromJson(
  368. jsonDecode(measureData.aiResults)[0]);
  369. } else {
  370. final result =
  371. await rpcHelper.rpc.remedical.getRemedicalDiagnosisDataAsync(
  372. GetRemedicalDiagnosisDataRequest(
  373. token: rpcHelper.userToken,
  374. remedicalCode: remedicalCode,
  375. frameIndex: currFrameIndex,
  376. ),
  377. );
  378. resultDTO = AIDiagnosisPerImageDTO.fromJson(jsonDecode(result));
  379. }
  380. modifiedDataDTO = resultDTO;
  381. contours = resultDTO.diagResultsForEachOrgan![0]
  382. .detectedObjects![currentAiDetectedObjectIndex].contours ??
  383. [];
  384. List<AIDiagnosisDescription>? descriptions = resultDTO
  385. .diagResultsForEachOrgan![0]
  386. .detectedObjects![currentAiDetectedObjectIndex]
  387. .descriptions;
  388. //遍历 descriptions 取出病灶大小
  389. for (AIDiagnosisDescription description in descriptions!) {
  390. if (description.type == DiagnosisDescriptionEnum.LesionSize) {
  391. lesionSize = AIDiagnosisLesionSize.fromJson(
  392. jsonDecode(description.value ?? ""));
  393. }
  394. }
  395. keyPoints = await _queryAllKeyPoints();
  396. _canvasAffectedKeyPoints.clear();
  397. _updateCurrContoursPoints();
  398. _updateCurrKeyPoints();
  399. update(['ai_result_canvas', 'ai_result_panel', 'ai_index_tag']);
  400. } catch (e) {
  401. logger.e('load ai result failed', e);
  402. }
  403. }
  404. /// 更新交互层尺寸
  405. void _updateModifierInteractiveLayerSize() {
  406. frameSize = Size(currFrame.width.toDouble(), currFrame.height.toDouble());
  407. WidgetsBinding.instance.addPostFrameCallback((_) {
  408. final RenderBox box =
  409. framePlayerKey.currentContext!.findRenderObject() as RenderBox;
  410. final framePlayerSize = Size(box.size.width, box.size.height);
  411. _scale = min(framePlayerSize.width / frameSize.width,
  412. framePlayerSize.height / frameSize.height);
  413. aiCanvasSize = Size(frameSize.width * _scale, frameSize.height * _scale);
  414. /// 更新交互层尺寸
  415. update(["ai_result_modifier_interactive_layer"]);
  416. });
  417. }
  418. /// 计算带单位的病灶长度
  419. String _countLesionLengthWithUnit(int length) {
  420. String lengthStr =
  421. (length * _unitsPhysicalPixels).toStringAsFixed(2).toString();
  422. return "$lengthStr $_xUnit";
  423. }
  424. /// 更新图像物理尺度信息
  425. void _updateImagePhysicalSize() {
  426. List<VisualBase> visuals = VisualsLoader(currFrame.visuals).load();
  427. _unitsPhysicalPixels =
  428. (((visuals[0].visualAreas[0].viewport?.region.width) ?? 0) /
  429. currFrame.width);
  430. VidUsUnit targetUnit =
  431. visuals[0].visualAreas[0].viewport?.xUnit ?? VidUsUnit.cm;
  432. _xUnit = UnitDescriptionMap.getDesc(targetUnit);
  433. }
  434. /// 自动吸附闭合判断
  435. void _autoCloseContours() async {
  436. if (_canvasNewContoursPoints.length < 6) return;
  437. double minDistance = double.infinity;
  438. int nearestKeyPointIndex = -1;
  439. final lastPoint = _canvasNewContoursPoints.last;
  440. for (int i = 0; i < canvasContoursPoints.length; i++) {
  441. final point = canvasContoursPoints[i];
  442. final double distance = (point - lastPoint).distance;
  443. if (distance < minDistance) {
  444. minDistance = distance;
  445. nearestKeyPointIndex = i;
  446. }
  447. }
  448. if (minDistance < 6) {
  449. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  450. _canvasNewContoursPoints.add(canvasContoursPoints[nearestKeyPointIndex]);
  451. _isDrawingNewContours = false;
  452. await _callContourMergeAsync();
  453. _updateCurrContoursPoints();
  454. _updateCurrKeyPoints();
  455. }
  456. }
  457. /// 发送请求通知后端合并轮廓
  458. Future<bool> _callContourMergeAsync() async {
  459. final ContourMergeResult result =
  460. await rpcHelper.rpc.aIDiagnosis.contourMergeAsync(
  461. ContourMergeRequest(
  462. token: rpcHelper.userToken,
  463. contourPoints: contours,
  464. lesionSize: lesionSize,
  465. drawingNewContourPoints: _convertCanvasPoints(_canvasNewContoursPoints),
  466. ),
  467. );
  468. contours = result.dstContours ?? [];
  469. lesionSize = result.dstLesionSize;
  470. keyPoints = await _queryAllKeyPoints();
  471. return true;
  472. }
  473. /// 画布坐标系转换【画布坐标系 -> 接口坐标系】
  474. List<AIDiagnosisPoint2D> _convertCanvasPoints(List<Offset> points) {
  475. List<AIDiagnosisPoint2D> result = [];
  476. for (Offset point in points) {
  477. result.add(
  478. AIDiagnosisPoint2D(x: point.dx ~/ _scale, y: point.dy ~/ _scale));
  479. }
  480. return result;
  481. }
  482. /// 关键点坐标转换【接口坐标系 -> 画布坐标系】同时更新横纵比例线段下标
  483. List<Offset> _convertKeyPoints(List<DiagnosisKeyPointDTO> points) {
  484. List<Offset> result = [];
  485. List<int> pointIndexes = List.generate(4, (_) => 0);
  486. for (int i = 0; i < points.length; i++) {
  487. final point = points[i];
  488. if (point.point == null) continue;
  489. result.add(Offset(point.point!.x.toDouble() * _scale,
  490. point.point!.y.toDouble() * _scale));
  491. if (point.type != DiagnosisKeyPointType.OtherKeyPoints) {
  492. switch (point.type) {
  493. case DiagnosisKeyPointType.HorizontalPointLeft:
  494. pointIndexes[0] = i;
  495. break;
  496. case DiagnosisKeyPointType.HorizontalPointRight:
  497. pointIndexes[1] = i;
  498. break;
  499. case DiagnosisKeyPointType.VerticalPointUp:
  500. pointIndexes[2] = i;
  501. break;
  502. case DiagnosisKeyPointType.VerticalPointDown:
  503. pointIndexes[3] = i;
  504. break;
  505. default:
  506. }
  507. }
  508. }
  509. _canvasLesionSizePointsIndexes = pointIndexes;
  510. _updateLesionSizeAndRatio();
  511. return result;
  512. }
  513. /// 坐标转换【接口坐标系 -> 画布坐标系】
  514. List<Offset> _convertPoints(List<AIDiagnosisPoint2D> points) {
  515. List<Offset> result = [];
  516. for (AIDiagnosisPoint2D point in points) {
  517. result.add(
  518. Offset(point.x.toDouble() * _scale, point.y.toDouble() * _scale));
  519. }
  520. return result;
  521. }
  522. /// 获取ai结果相关枚举集合
  523. Future<void> _getDiagnosisEnumItemsAsync() async {
  524. final getDiagnosisEnumItems =
  525. await rpcHelper.rpc.aIDiagnosis.getDiagnosisEnumItemsAsync(
  526. GetDiagnosisEnumItemsRequest(
  527. token: rpcHelper.userToken,
  528. ),
  529. );
  530. _diagnosisEnumItems = getDiagnosisEnumItems.source ?? [];
  531. }
  532. void _initData() {
  533. update(["ai_result_modifier"]);
  534. }
  535. /// 在拖拽模式下触发拖拽事件【每隔100ms触发一次】
  536. void _onDragModeCallDragFunction(Offset pos) async {
  537. AIDiagnosisPoint2D startPoint = AIDiagnosisPoint2D(
  538. x: _dragStartPoint.dx ~/ _scale, y: _dragStartPoint.dy ~/ _scale);
  539. AIDiagnosisPoint2D endPoint =
  540. AIDiagnosisPoint2D(x: pos.dx ~/ _scale, y: pos.dy ~/ _scale);
  541. final bool success = await _queryDragResult(startPoint, endPoint);
  542. if (success) {
  543. _updateCurrKeyPoints();
  544. _updateCurrContoursPoints();
  545. _updateCurrAffectedKeyPoints();
  546. update(["ai_result_canvas"]);
  547. }
  548. }
  549. /// 在拖拽模式下,通过鼠标位置更新高亮的关键点下标【每隔100ms触发一次】
  550. void _onDragModeCallHoverFunction(Offset localPosition) async {
  551. final mousePos = AIDiagnosisPoint2D(
  552. x: localPosition.dx ~/ _scale, y: localPosition.dy ~/ _scale);
  553. affectedKeyPointIndexes = await _queryAffectedKeyPoints(mousePos);
  554. _updateCurrAffectedKeyPoints();
  555. update(["ai_result_canvas"]);
  556. }
  557. /// 在画轮廓模式下触发拖拽事件
  558. void _onPenModeCallDragFunction(Offset pos) async {
  559. if (!_isDrawingNewContours) return;
  560. // 点间距【疏密程度】
  561. const double pointDistance = 8;
  562. final double distance = (pos - _canvasNewContoursPoints.last).distance;
  563. // print("当前点到上一个点的距离:$distance");
  564. if (distance >= pointDistance) {
  565. int numPointsToInsert = (distance / pointDistance).ceil() - 1; // 需要插入的点数
  566. for (int i = 0; i < numPointsToInsert; i++) {
  567. double t = (i + 1) / (numPointsToInsert + 1);
  568. Offset interpolatedPoint = Offset(
  569. _canvasNewContoursPoints.last.dx +
  570. t * (pos.dx - _canvasNewContoursPoints.last.dx),
  571. _canvasNewContoursPoints.last.dy +
  572. t * (pos.dy - _canvasNewContoursPoints.last.dy),
  573. );
  574. _canvasNewContoursPoints.add(interpolatedPoint);
  575. }
  576. _canvasNewContoursPoints.add(pos);
  577. update(["ai_result_canvas"]);
  578. }
  579. // print("当前轮廓点数量:${_canvasNewContoursPoints.length}");
  580. _autoCloseContours();
  581. }
  582. /// 在画轮廓模式下,通过鼠标位置更新最近的关键点【每隔10ms触发一次】
  583. void _onPenModeCallHoverFunction(Offset localPosition) async {
  584. double minDistance = double.infinity;
  585. int nearestKeyPointIndex = -1;
  586. for (int i = 0; i < canvasContoursPoints.length; i++) {
  587. final point = canvasContoursPoints[i];
  588. final double distance = (point - localPosition).distance;
  589. if (distance < minDistance) {
  590. minDistance = distance;
  591. nearestKeyPointIndex = i;
  592. }
  593. }
  594. _canvasPenModeKeyPointIndexes.clear();
  595. if (minDistance < 10) {
  596. _canvasPenModeKeyPointIndexes.add(nearestKeyPointIndex);
  597. }
  598. update(["ai_result_canvas"]);
  599. }
  600. /// 根据鼠标位置查询受影响的关键点
  601. Future<List<int>> _queryAffectedKeyPoints(AIDiagnosisPoint2D mousePos) async {
  602. try {
  603. final List<int> result =
  604. await rpcHelper.rpc.aIDiagnosis.affectedKeyPointsByDragActionAsync(
  605. AffectedKeyPointsByDragActionRequest(
  606. token: rpcHelper.userToken,
  607. keyPoints: keyPoints,
  608. mousePoint: mousePos,
  609. ),
  610. );
  611. // print(result);
  612. return result;
  613. } catch (e) {
  614. return [];
  615. }
  616. }
  617. /// 查询所有关键点【需要先存好contours和lesionSize】
  618. Future<List<DiagnosisKeyPointDTO>> _queryAllKeyPoints() async {
  619. try {
  620. final List<DiagnosisKeyPointDTO> result =
  621. await rpcHelper.rpc.aIDiagnosis.getKeyPointsOfContourAsync(
  622. GetKeyPointsOfContourRequest(
  623. token: rpcHelper.userToken,
  624. contours: contours,
  625. lesionSize: lesionSize,
  626. ),
  627. );
  628. return result;
  629. } catch (e) {
  630. print(e);
  631. return [];
  632. }
  633. }
  634. /// 查询拖拽结果集合【需要先存好 contoursOnDragStart 和 keyPointsOnDragStart】
  635. Future<bool> _queryDragResult(
  636. AIDiagnosisPoint2D startPoint, AIDiagnosisPoint2D endPoint) async {
  637. try {
  638. final ContourAndKeyPointsAfterDragResult result =
  639. await rpcHelper.rpc.aIDiagnosis.contourAndKeyPointsAfterDragAsync(
  640. ContourAndKeyPointsAfterDragRequest(
  641. token: rpcHelper.userToken,
  642. contours: contoursOnDragStart,
  643. keyPoints: keyPointsOnDragStart,
  644. startPoint: startPoint,
  645. endPoint: endPoint,
  646. ),
  647. );
  648. keyPoints = result.dstKeyPoints ?? [];
  649. contours = result.dstContours ?? [];
  650. affectedKeyPointIndexes = result.affectedKeyPointIndexes!;
  651. return true;
  652. } catch (e) {
  653. return false;
  654. }
  655. }
  656. // 根据病灶四个点位置横纵比参数
  657. void _updateLesionSizeAndRatio() {
  658. if (_canvasLesionSizePointsIndexes.length != 4) return;
  659. if (keyPoints.length < 4) return;
  660. final pIndexs = _canvasLesionSizePointsIndexes;
  661. final p1 = keyPoints[pIndexs[0]].point!;
  662. final p2 = keyPoints[pIndexs[1]].point!;
  663. final p3 = keyPoints[pIndexs[2]].point!;
  664. final p4 = keyPoints[pIndexs[3]].point!;
  665. /// 计算 p1 到 p2 的像素距离 更新到 _horizontalLengthInPixel
  666. /// 计算 p3 到 p4 的像素距离 更新到 _verticalLengthInPixel
  667. double _horizontalLength = (Offset(p1.x.toDouble(), p1.y.toDouble()) -
  668. Offset(p2.x.toDouble(), p2.y.toDouble()))
  669. .distance;
  670. _horizontalLengthInPixel = _horizontalLength.ceil();
  671. double _verticalLength = (Offset(p3.x.toDouble(), p3.y.toDouble()) -
  672. Offset(p4.x.toDouble(), p4.y.toDouble()))
  673. .distance;
  674. _verticalLengthInPixel = _verticalLength.ceil();
  675. lesionSize = _getNewLesionSize(
  676. [p1, p2, p3, p4], _horizontalLengthInPixel, _verticalLengthInPixel);
  677. // _canvasLesionSizePointsIndexes
  678. update(['ai_result_lesion_size', 'ai_result_lesion_ratio']);
  679. }
  680. /// [⭐ _canvasAffectedKeyPoints ] 根据当前的受影响关键点下标更新受影响关键点集
  681. void _updateCurrAffectedKeyPoints() {
  682. _canvasAffectedKeyPoints.clear();
  683. if (keyPoints.isEmpty) return;
  684. for (int i = 0; i < keyPoints.length; i++) {
  685. if (affectedKeyPointIndexes.contains(i)) {
  686. _canvasAffectedKeyPoints.add(Offset(
  687. keyPoints[i].point!.x.toDouble() * _scale,
  688. keyPoints[i].point!.y.toDouble() * _scale));
  689. }
  690. }
  691. // print("受影响的点数:${_canvasAffectedKeyPoints.length}");
  692. }
  693. /// [⭐ _canvasContoursPoints ] 更新当前轮廓点
  694. void _updateCurrContoursPoints() {
  695. _canvasContoursPoints = _convertPoints(contours);
  696. // print("轮廓点数:${_canvasContoursPoints.length}");
  697. }
  698. /// [⭐ _canvasKeyPoints ] 更新当前关键点
  699. void _updateCurrKeyPoints() async {
  700. _canvasKeyPoints = _convertKeyPoints(keyPoints);
  701. // print("关键点数:${_canvasKeyPoints.length}");
  702. }
  703. }
  704. enum AiResultModifierMode {
  705. /// 拖拽
  706. drag,
  707. /// 画笔
  708. pen,
  709. /// 截图
  710. screenshot,
  711. }
  712. ///存储服务扩展类
  713. extension StorageServiceExt on StorageService {
  714. ///鉴权 TODO: fileName 为空则接口报错,所以此处设置一个默认值
  715. Future<StorageServiceSettingDTO> getAuth({
  716. String? fileName,
  717. bool? isRechristen,
  718. List<DataItemDTO>? urlParams,
  719. List<DataItemDTO>? headerParams,
  720. String? requestMethod,
  721. required String userToken,
  722. }) async {
  723. try {
  724. final result = await getAuthorizationAsync(FileServiceRequest(
  725. token: userToken,
  726. fileName: fileName ?? "dat",
  727. isRechristen: isRechristen ?? true,
  728. urlParams: urlParams,
  729. headerParams: headerParams,
  730. requestMethod: requestMethod,
  731. ));
  732. return result;
  733. } catch (e) {
  734. return StorageServiceSettingDTO();
  735. }
  736. }
  737. ///文件上传(UInt8List)
  738. Future<String?> uploadUint8List(Uint8List buffer, String name, String token,
  739. [bool? isRechristen]) async {
  740. try {
  741. var nameInfos = name.split('.');
  742. final auth = await getAuth(
  743. fileName: nameInfos.last,
  744. isRechristen: isRechristen,
  745. userToken: token,
  746. );
  747. Map<String, String> params = {};
  748. params['Authorization'] = auth.authorization!;
  749. params['ContentType'] = auth.contentType!;
  750. final response = await http
  751. .put(
  752. Uri.parse(auth.storageUrl!),
  753. body: buffer,
  754. headers: params,
  755. )
  756. .timeout(
  757. const Duration(seconds: 30),
  758. );
  759. if (response.statusCode == 200) {
  760. return auth.storageUrl;
  761. }
  762. } catch (e) {
  763. logger.e('StorageServiceExt uploadUint8List ex:$e');
  764. }
  765. return null;
  766. }
  767. }