Преглед изворни кода

Merge branch 'urm_measure_pre_20240708' of http://git.ius.plus:88/Project-Wing/fis_lib_measure into urm_measure_pre_20240708

guanxinyi пре 9 месеци
родитељ
комит
e97261b4ea

+ 7 - 0
lib/interfaces/process/urm/urm_data_processor.dart

@@ -2,6 +2,13 @@ import 'package:fis_jsonrpc/services/aIDiagnosis.m.dart';
 
 abstract class IURMDataProcessor {
   URMMeasureParams getURMMeasureParams();
+  int get fullVisualPixelWidth; // 全图像素宽度
+  int get fullVisualPixelHeight; // 全图像素高度
+  int get screenWidth; // 视图宽度
+  int get screenHeight; // 视图高度
+  int get screenX; // 视图X坐标
+  int get screenY; // 视图Y坐标
+  double get phywidth; // 图像物理宽度
 }
 
 class URMMeasureParams {

+ 1 - 4
lib/process/calcuators/urm_calcuators/urm_curvature_trace_measure.dart

@@ -25,7 +25,7 @@ class URMCurvatureTraceMeasureCal extends Calculator<Trace, double> {
       // 添加入参
       List<UrmPoint> srcDPoints = [];
       for (var point in ref.feature!.innerPoints) {
-        srcDPoints.add(UrmPoint(x: point.x, y: point.y));
+        srcDPoints.add(urmApplication.localToView(point));
       }
 
       URMMeasureProcessResult? result =
@@ -37,13 +37,10 @@ class URMCurvatureTraceMeasureCal extends Calculator<Trace, double> {
       if (result != null) {
         print(
             "URM Measure curvature: ${result.resultData} nums: ${result.resultDPoints?.length}");
-        // TODO 将返回的视图百分比区域的点集转换为全图百分比点集,然会然后绘制
         final feature = ref.feature!;
         if (feature is! TraceCurvatureFeature) return;
         feature.autoLinePoints =
             urmApplication.urmPointsToDPoints(result.resultDPoints);
-
-        /// TODO 只转数据类型还不够,需要将 视图区域百分比坐标转为全图百分比坐标
         for (var output in ref.meta.outputs) {
           if (output.name == MeasureTerms.SRCurvature) {
             output.unit = VidUsUnit.None;

+ 1 - 7
lib/process/calcuators/urm_calcuators/urm_curve_curvature_line.dart

@@ -24,7 +24,7 @@ class URMCurveCurvatureLineCal extends Calculator<Trace, double> {
       // 添加入参
       List<UrmPoint> srcDPoints = [];
       for (var point in ref.feature!.innerPoints) {
-        srcDPoints.add(UrmPoint(x: point.x, y: point.y));
+        srcDPoints.add(urmApplication.localToView(point));
       }
 
       URMMeasureProcessResult? result =
@@ -36,13 +36,7 @@ class URMCurveCurvatureLineCal extends Calculator<Trace, double> {
       if (result != null) {
         print(
             "URM Measure curvature: ${result.resultData} nums: ${result.resultDPoints?.length}");
-        // TODO 将返回的视图百分比区域的点集转换为全图百分比点集,然会然后绘制
         final feature = ref.feature!;
-        // if (feature is! TraceCurvatureFeature) return;
-        // feature.autoLinePoints =
-        //     urmApplication.urmPointsToDPoints(result.resultDPoints);
-
-        /// TODO 只转数据类型还不够,需要将 视图区域百分比坐标转为全图百分比坐标
         for (var output in ref.meta.outputs) {
           if (output.name == MeasureTerms.SRCurvature) {
             output.unit = VidUsUnit.None;

+ 2 - 9
lib/process/calcuators/urm_calcuators/urm_straight_curvature_line.dart

@@ -20,11 +20,7 @@ class URMStraightCurvatureLineCal extends Calculator<StraightLine, double> {
     try {
       if (ref.feature == null) return;
       if (ref.application is! URMApplication) return;
-
-      Size urmResultSize = const Size(0, 0);
       final URMApplication urmApplication = ref.application as URMApplication;
-      urmResultSize = Size(urmApplication.resultWidth.toDouble(),
-          urmApplication.resultHeight.toDouble());
       final p1 = ref.feature!.startPoint;
       final p2 = ref.feature!.endPoint;
 
@@ -44,19 +40,16 @@ class URMStraightCurvatureLineCal extends Calculator<StraightLine, double> {
         urmMeasureType: URMMeasureType.URMCurvature,
         rOIType: URMROIType.placeHolder_0,
         srcDPoints: [
-          UrmPoint(x: startPoint.x, y: startPoint.y),
-          UrmPoint(x: endPoint.x, y: endPoint.y)
+          app.localToView(startPoint),
+          app.localToView(endPoint),
         ],
       );
       if (result != null) {
         print(
             "URM Measure curvature: ${result.resultData} nums: ${result.resultDPoints?.length}");
-        // TODO 将返回的视图百分比区域的点集转换为全图百分比点集,然会然后绘制
         final feature = ref.feature!;
         if (feature is! StraightCurvatureFeature) return;
         feature.autoLinePoints = app.urmPointsToDPoints(result.resultDPoints);
-
-        /// TODO 只转数据类型还不够,需要将 视图区域百分比坐标转为全图百分比坐标
         for (var output in ref.meta.outputs) {
           if (output.name == MeasureTerms.SRCurvature) {
             output.unit = VidUsUnit.None;

+ 32 - 223
lib/process/workspace/urm/application.dart

@@ -85,7 +85,7 @@ class URMApplication extends Application {
           srcDPoints: srcDPoints, // 手动传入
           cMlength: cMlength, // 手动传入
           shellWidth: shellWidth, // 手动传入
-          phywidth: 4.09846, // TODO 手动计算
+          phywidth: urmDataProcessor.phywidth, // 手动计算
           urmImageType: params.urmImageType, // 手动取值
           urmBlend: params.urmBlend, // 手动取值
           downsampleIndex: params.downsampleIndex, // 手动取值
@@ -123,246 +123,55 @@ class URMApplication extends Application {
 
   // 本地完整图像内归一化坐标转视图(接口中 screenWidth、screenHeight 指代区域)内归一化坐标
   UrmPoint localToView(DPoint point) {
+    double px =
+        point.x * urmDataProcessor.fullVisualPixelWidth; // a.width 是完整图像宽度
+    double py =
+        point.y * urmDataProcessor.fullVisualPixelHeight; // a.height 是完整图像高度
+    double pxB = px - urmDataProcessor.screenX; // b.left 是视图区域的左边界像素
+    double pyB = py - urmDataProcessor.screenY; // b.top 是视图区域的上边界像素
+    double pctX = pxB / urmDataProcessor.screenWidth; // b.width 是视图区域的宽度像素
+    double pctY = pyB / urmDataProcessor.screenHeight; // b.height 是视图区域的高度像素
     return UrmPoint(
-      x: point.x * resultWidth,
-      y: point.y * resultHeight,
+      x: pctX,
+      y: pctY,
     );
   }
 
   // 视图(接口中 screenWidth、screenHeight 指代区域)内归一化坐标转本地完整图像内归一化坐标
   DPoint viewToLocal(UrmPoint point) {
+    double px = point.x * urmDataProcessor.screenWidth; // b.width 是视图区域的宽度像素
+    double py = point.y * urmDataProcessor.screenHeight; // b.height 是视图区域的高度像素
+    double pxB = px + urmDataProcessor.screenX; // b.left 是视图区域的左边界像素
+    double pyB = py + urmDataProcessor.screenY; // b.top 是视图区域的上边界像素
+    double pctX =
+        pxB / urmDataProcessor.fullVisualPixelWidth; // a.width 是完整图像宽度
+    double pctY =
+        pyB / urmDataProcessor.fullVisualPixelHeight; // a.height 是完整图像高度
     return DPoint(
-      point.x / resultWidth,
-      point.y / resultHeight,
+      pctX,
+      pctY,
     );
   }
 
+  // 类型转换
+  List<UrmPoint> dPointsToUrmPoints(List<DPoint>? points) {
+    if (points == null) return [];
+    List<UrmPoint> urmPoints = [];
+    for (var point in points) {
+      urmPoints.add(localToView(point));
+    }
+    return urmPoints;
+  }
+
   // 类型转换
   List<DPoint> urmPointsToDPoints(List<UrmPoint>? points) {
     if (points == null) return [];
     List<DPoint> urmPoints = [];
     for (var point in points) {
-      urmPoints.add(DPoint(point.x, point.y));
+      urmPoints.add(viewToLocal(point));
     }
     return urmPoints;
   }
-
-  // Future<GetSRRoiVelResult?> getSRRoiVelResult(
-  //     DPoint startPoint, DPoint endPoint) async {
-  //   try {
-  //     print("调接口获取测量值 getSRRoiVelAsync");
-  //     GetSRRoiVelResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getSRRoiVelAsync(
-  //       GetSRRoiVelRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 5,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         startPointX: startPoint.x,
-  //         startPointY: startPoint.y,
-  //         endPointX: endPoint.x,
-  //         endPointY: endPoint.y,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetSRRoiFractalDimResult?> getSRRoiFractalDimResult(
-  //     DPoint startPoint, DPoint endPoint) async {
-  //   try {
-  //     print("调接口获取测量值 getSRRoiFractalDimAsync");
-  //     GetSRRoiFractalDimResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getSRRoiFractalDimAsync(
-  //       GetSRRoiFractalDimRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 5,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         startPointX: startPoint.x,
-  //         startPointY: startPoint.y,
-  //         endPointX: endPoint.x,
-  //         endPointY: endPoint.y,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetSRTraceVelResult?> getSRTraceVelResult(
-  //   List<UrmPoint> points,
-  // ) async {
-  //   try {
-  //     print("调接口获取测量值 getSRTraceVelResult");
-  //     GetSRTraceVelResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getSRTraceVelAsync(
-  //       GetSRTraceVelRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 5,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         inputPoints: points,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetSRTraceFractalDimResult?> getSRTraceFractalDimResult(
-  //   List<UrmPoint> points,
-  // ) async {
-  //   try {
-  //     print("调接口获取测量值 getSRTraceFractalDimAsync");
-  //     GetSRTraceFractalDimResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getSRTraceFractalDimAsync(
-  //       GetSRTraceFractalDimRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 5,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         inputPoints: points,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetSRLoactionVelResult?> getSRLoactionVelResult(
-  //   DPoint point,
-  // ) async {
-  //   try {
-  //     print("调接口获取测量值 getSRLoactionVelAsync");
-  //     GetSRLoactionVelResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getSRLoactionVelAsync(
-  //       GetSRLoactionVelRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 6,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         startPointX: point.x,
-  //         startPointY: point.y,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetSRRoiSpeedResult?> getSRRoiSpeedResult(
-  //   DPoint startPoint,
-  //   DPoint endPoint,
-  // ) async {
-  //   try {
-  //     print("调接口获取测量值 getSRRoiSpeedAsync");
-  //     GetSRRoiSpeedResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getSRRoiSpeedAsync(
-  //       GetSRRoiSpeedRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 6,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         startPointX: startPoint.x,
-  //         startPointY: startPoint.y,
-  //         endPointX: endPoint.x,
-  //         endPointY: endPoint.y,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetURMDenMeasureResult?> getURMDenMeasureResult(
-  //     DPoint startPoint, DPoint endPoint) async {
-  //   try {
-  //     print("调接口获取测量值 getURMDenMeasureAsync");
-  //     GetURMDenMeasureResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getURMDenMeasureAsync(
-  //       GetURMDenMeasureRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 5,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         startPointX: startPoint.x,
-  //         startPointY: startPoint.y,
-  //         endPointX: endPoint.x,
-  //         endPointY: endPoint.y,
-  //         areaScaler: areaScaler,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetURMVelMeasureResult?> getURMVelMeasureResult(
-  //     DPoint startPoint, DPoint endPoint) async {
-  //   try {
-  //     print("调接口获取测量值 getURMVelMeasureAsync");
-  //     GetURMVelMeasureResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getURMVelMeasureAsync(
-  //       GetURMVelMeasureRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 6,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         startPointX: startPoint.x,
-  //         startPointY: startPoint.y,
-  //         endPointX: endPoint.x,
-  //         endPointY: endPoint.y,
-  //         areaScaler: areaScaler,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
-
-  // Future<GetUrmVessMeasureResult?> getURMVessMeasureResult(
-  //     DPoint startPoint, DPoint endPoint, double cmlength) async {
-  //   try {
-  //     print("调接口获取测量值 getURMVelMeasureAsync");
-  //     GetUrmVessMeasureResult result =
-  //         await RPCBridge.ins.rpc.aIDiagnosis.getUrmVessMeasureAsync(
-  //       GetUrmVessMeasureRequest(
-  //         remedicalCode: remedicalCode,
-  //         measureMode: 6,
-  //         dataWidth: resultWidth,
-  //         dataHeight: resultHeight,
-  //         startPointX: startPoint.x,
-  //         startPointY: startPoint.y,
-  //         endPointX: endPoint.x,
-  //         endPointY: endPoint.y,
-  //         areaScaler: pixelscaler,
-  //         cmLength: cmlength,
-  //         token: RPCBridge.ins.userToken,
-  //       ),
-  //     );
-  //     return result;
-  //   } catch (e) {
-  //     return null;
-  //   }
-  // }
 }
 
 class URMChartParams {