application.dart 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332
  1. import 'dart:ui';
  2. // import 'package:fis_measure/interfaces/date_types/rect_region.dart';
  3. // import 'package:fis_measure/interfaces/date_types/size.dart';
  4. import 'package:fis_jsonrpc/services/aIDiagnosis.m.dart';
  5. import 'package:fis_measure/interfaces/date_types/point.dart';
  6. import 'package:fis_measure/interfaces/process/visuals/visual.dart';
  7. import 'package:fis_measure/process/visual/v2d_visual.dart';
  8. import 'package:fis_measure/process/workspace/rpc_bridge.dart';
  9. import 'package:vid/us/vid_us_probe.dart';
  10. import '../application.dart';
  11. import 'package:fis_measure/interfaces/date_types/rect_region.dart';
  12. import 'package:vid/us/vid_us_2d_visual.dart';
  13. import 'package:vid/us/vid_us_logical_coordinate.dart';
  14. import 'package:vid/us/vid_us_physical_coordinate.dart';
  15. import 'package:vid/us/vid_us_visual.dart';
  16. import 'package:fis_measure/interfaces/process/workspace/point_info.dart';
  17. import 'package:flutter/painting.dart';
  18. /// URM 专业应用
  19. class URMApplication extends Application {
  20. URMApplication(
  21. VidUsProbe probe, {
  22. required this.beams,
  23. required this.samples,
  24. required this.roiRect,
  25. required this.resultWidth,
  26. required this.resultHeight,
  27. required this.remedicalCode,
  28. required this.originVisuals,
  29. }) : super(probe);
  30. @override
  31. bool get isThirdPart => true;
  32. List<IVisual> originVisuals; // 原始的视图坐标系
  33. int beams; // 原图有效的beam数量
  34. int samples; // 原图有效的sample数量
  35. Rect roiRect; // ROI区域
  36. int resultWidth; // 分析结果图像宽度
  37. int resultHeight; // 分析结果图像高度
  38. String remedicalCode; // 图像 Code
  39. bool syncDisplay = false; // 是否开启同步显示
  40. @override
  41. List<IVisual> convertVisuals() {
  42. var urmVisuals = originVisuals;
  43. double beamsPercent = 1;
  44. if (originVisuals.isNotEmpty) {
  45. IVisual visual = originVisuals[0];
  46. if (visual is V2dVisual) {
  47. VidUsVisual visualData = visual.visualData;
  48. if (visualData is VidUs2DVisual) {
  49. if (visualData.logicalCoordinates.length == 2 &&
  50. visualData.physicalCoordinates.length == 2) {
  51. beamsPercent = getBeamsPercent(
  52. visualData.logicalCoordinates.entries.first.value,
  53. visualData.physicalCoordinates.entries.first.value,
  54. );
  55. }
  56. }
  57. double scaleX = roiRect.width / beams * beamsPercent;
  58. double scaleY = roiRect.height / samples;
  59. RectRegion region =
  60. RectRegion(left: 0, top: 0, right: 1 / scaleX, bottom: 1 / scaleY);
  61. V2dVisual v2dVisual = V2dVisual(visualData, region);
  62. urmVisuals = [v2dVisual];
  63. }
  64. }
  65. return urmVisuals;
  66. }
  67. double getBeamsPercent(VidUsLogicalCoordinate logicalCoordinate,
  68. VidUsPhysicalCoordinate physicalCoordinates) {
  69. if (physicalCoordinates is VidUsTissuePhysicalCoordinate) {
  70. return (physicalCoordinates.width /
  71. (logicalCoordinate.region.right - logicalCoordinate.region.left));
  72. }
  73. return 1;
  74. }
  75. @override
  76. PointInfo createPointInfo(Offset offset, PointInfoType type) {
  77. final width = displaySize.width;
  78. final height = displaySize.height;
  79. final x = offset.dx / width;
  80. final y = offset.dy / height;
  81. final percentOffset = Offset(x, y);
  82. final info = PointInfo.fromOffset(percentOffset, type);
  83. info.hostVisualArea = currentVisualArea; // 未切换区域则沿用当前区域
  84. if (isAnnotationWorking) {
  85. activeAnnotationItem?.execute(info);
  86. } else {
  87. activeMeasureItem?.execute(info);
  88. if (type == PointInfoType.touchMove) {
  89. mobileTouchEvent.emit(this, offset); // 传出移动事件
  90. }
  91. if (type == PointInfoType.touchUp) {
  92. mobileTouchEndEvent.emit(this, offset); // 传出触摸结束事件
  93. }
  94. }
  95. return info;
  96. }
  97. void loadURMVisuals() {
  98. loadVisuals();
  99. }
  100. Future<GetSRCurvatureResult?> getSRCurvatureResult(
  101. DPoint startPoint, DPoint endPoint) async {
  102. try {
  103. print("调接口获取测量值 getSRCurvatureAsync");
  104. GetSRCurvatureResult result =
  105. await RPCBridge.ins.rpc.aIDiagnosis.getSRCurvatureAsync(
  106. GetSRCurvatureRequest(
  107. remedicalCode: remedicalCode,
  108. measureMode: 5, // 5-URM测量Den,6-URM测量Vel
  109. dataWidth: resultWidth,
  110. dataHeight: resultHeight,
  111. startPointX: startPoint.x,
  112. startPointY: startPoint.y,
  113. endPointX: endPoint.x,
  114. endPointY: endPoint.y,
  115. token: RPCBridge.ins.userToken,
  116. ),
  117. );
  118. return result;
  119. } catch (e) {
  120. return null;
  121. }
  122. }
  123. Future<GetSRRoiVelResult?> getSRRoiVelResult(
  124. DPoint startPoint, DPoint endPoint) async {
  125. try {
  126. print("调接口获取测量值 getSRRoiVelAsync");
  127. GetSRRoiVelResult result =
  128. await RPCBridge.ins.rpc.aIDiagnosis.getSRRoiVelAsync(
  129. GetSRRoiVelRequest(
  130. remedicalCode: remedicalCode,
  131. measureMode: 5,
  132. dataWidth: resultWidth,
  133. dataHeight: resultHeight,
  134. startPointX: startPoint.x,
  135. startPointY: startPoint.y,
  136. endPointX: endPoint.x,
  137. endPointY: endPoint.y,
  138. token: RPCBridge.ins.userToken,
  139. ),
  140. );
  141. return result;
  142. } catch (e) {
  143. return null;
  144. }
  145. }
  146. Future<GetSRRoiFractalDimResult?> getSRRoiFractalDimResult(
  147. DPoint startPoint, DPoint endPoint) async {
  148. try {
  149. print("调接口获取测量值 getSRRoiFractalDimAsync");
  150. GetSRRoiFractalDimResult result =
  151. await RPCBridge.ins.rpc.aIDiagnosis.getSRRoiFractalDimAsync(
  152. GetSRRoiFractalDimRequest(
  153. remedicalCode: remedicalCode,
  154. measureMode: 5,
  155. dataWidth: resultWidth,
  156. dataHeight: resultHeight,
  157. startPointX: startPoint.x,
  158. startPointY: startPoint.y,
  159. endPointX: endPoint.x,
  160. endPointY: endPoint.y,
  161. token: RPCBridge.ins.userToken,
  162. ),
  163. );
  164. return result;
  165. } catch (e) {
  166. return null;
  167. }
  168. }
  169. Future<GetSRTraceVelResult?> getSRTraceVelResult(
  170. DPoint startPoint, DPoint endPoint) async {
  171. try {
  172. print("调接口获取测量值 getSRTraceVelAsync");
  173. GetSRTraceVelResult result =
  174. await RPCBridge.ins.rpc.aIDiagnosis.getSRTraceVelAsync(
  175. GetSRTraceVelRequest(
  176. remedicalCode: remedicalCode,
  177. measureMode: 5,
  178. dataWidth: resultWidth,
  179. dataHeight: resultHeight,
  180. // /FIXME 入参应该为点集
  181. startPointX: startPoint.x,
  182. startPointY: startPoint.y,
  183. endPointX: endPoint.x,
  184. endPointY: endPoint.y,
  185. token: RPCBridge.ins.userToken,
  186. ),
  187. );
  188. return result;
  189. } catch (e) {
  190. return null;
  191. }
  192. }
  193. Future<GetSRTraceFractalDimResult?> getSRTraceFractalDimResult(
  194. List<UrmPoint> points,
  195. ) async {
  196. try {
  197. print("调接口获取测量值 getSRTraceFractalDimAsync");
  198. GetSRTraceFractalDimResult result =
  199. await RPCBridge.ins.rpc.aIDiagnosis.getSRTraceFractalDimAsync(
  200. GetSRTraceFractalDimRequest(
  201. remedicalCode: remedicalCode,
  202. measureMode: 5,
  203. dataWidth: resultWidth,
  204. dataHeight: resultHeight,
  205. inputPoints: points,
  206. token: RPCBridge.ins.userToken,
  207. ),
  208. );
  209. return result;
  210. } catch (e) {
  211. return null;
  212. }
  213. }
  214. Future<GetSRLoactionVelResult?> getSRLoactionVelResult(
  215. DPoint point,
  216. ) async {
  217. try {
  218. print("调接口获取测量值 getSRLoactionVelAsync");
  219. GetSRLoactionVelResult result =
  220. await RPCBridge.ins.rpc.aIDiagnosis.getSRLoactionVelAsync(
  221. GetSRLoactionVelRequest(
  222. remedicalCode: remedicalCode,
  223. measureMode: 6,
  224. dataWidth: resultWidth,
  225. dataHeight: resultHeight,
  226. startPointX: point.x,
  227. startPointY: point.y,
  228. token: RPCBridge.ins.userToken,
  229. ),
  230. );
  231. return result;
  232. } catch (e) {
  233. return null;
  234. }
  235. }
  236. Future<GetSRRoiSpeedResult?> getSRRoiSpeedResult(
  237. DPoint startPoint,
  238. DPoint endPoint,
  239. ) async {
  240. try {
  241. print("调接口获取测量值 getSRRoiSpeedAsync");
  242. GetSRRoiSpeedResult result =
  243. await RPCBridge.ins.rpc.aIDiagnosis.getSRRoiSpeedAsync(
  244. GetSRRoiSpeedRequest(
  245. remedicalCode: remedicalCode,
  246. measureMode: 6,
  247. dataWidth: resultWidth,
  248. dataHeight: resultHeight,
  249. startPointX: startPoint.x,
  250. startPointY: startPoint.y,
  251. endPointX: endPoint.x,
  252. endPointY: endPoint.y,
  253. token: RPCBridge.ins.userToken,
  254. ),
  255. );
  256. return result;
  257. } catch (e) {
  258. return null;
  259. }
  260. }
  261. Future<GetURMDenMeasureResult?> getURMDenMeasureResult(
  262. DPoint startPoint, DPoint endPoint) async {
  263. try {
  264. print("调接口获取测量值 getURMDenMeasureAsync");
  265. GetURMDenMeasureResult result =
  266. await RPCBridge.ins.rpc.aIDiagnosis.getURMDenMeasureAsync(
  267. GetURMDenMeasureRequest(
  268. remedicalCode: remedicalCode,
  269. measureMode: 5,
  270. dataWidth: resultWidth,
  271. dataHeight: resultHeight,
  272. startPointX: startPoint.x,
  273. startPointY: startPoint.y,
  274. endPointX: endPoint.x,
  275. endPointY: endPoint.y,
  276. token: RPCBridge.ins.userToken,
  277. ),
  278. );
  279. return result;
  280. } catch (e) {
  281. return null;
  282. }
  283. }
  284. Future<GetURMVelMeasureResult?> getURMVelMeasureResult(
  285. DPoint startPoint, DPoint endPoint) async {
  286. try {
  287. print("调接口获取测量值 getURMVelMeasureAsync");
  288. GetURMVelMeasureResult result =
  289. await RPCBridge.ins.rpc.aIDiagnosis.getURMVelMeasureAsync(
  290. GetURMVelMeasureRequest(
  291. remedicalCode: remedicalCode,
  292. measureMode: 6,
  293. dataWidth: resultWidth,
  294. dataHeight: resultHeight,
  295. startPointX: startPoint.x,
  296. startPointY: startPoint.y,
  297. endPointX: endPoint.x,
  298. endPointY: endPoint.y,
  299. token: RPCBridge.ins.userToken,
  300. ),
  301. );
  302. return result;
  303. } catch (e) {
  304. return null;
  305. }
  306. }
  307. }