controller.dart 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545
  1. // ignore_for_file: constant_identifier_names
  2. import 'dart:async';
  3. import 'dart:collection';
  4. import 'dart:convert';
  5. import 'package:fis_common/event/event_type.dart';
  6. import 'package:fis_common/logger/logger.dart';
  7. import 'package:fis_i18n/i18n.dart';
  8. import 'package:fis_jsonrpc/rpc.dart';
  9. import 'package:fis_measure/interfaces/date_types/rect_region.dart';
  10. import 'package:fis_measure/interfaces/process/player/play_controller.dart';
  11. import 'package:fis_measure/interfaces/process/workspace/application.dart';
  12. import 'package:fis_measure/process/primitives/multi_method/dop_trace_disp/data.dart';
  13. import 'package:fis_measure/process/workspace/rpc_bridge.dart';
  14. import 'package:fis_measure/view/player/buffer_waiter.dart';
  15. import 'package:fis_vid/data_channel/channel.dart';
  16. import 'package:flutter/foundation.dart';
  17. import 'package:get/get.dart';
  18. import 'package:vid/us/vid_us_image.dart';
  19. import 'package:vid/us/vid_us_image_data.dart';
  20. import 'package:vid/us/vid_us_probe.dart';
  21. import 'package:vid/us/vid_us_visual_area_type.dart';
  22. import 'enums.dart';
  23. import 'events.dart';
  24. /// Vid播放器控制器
  25. class VidPlayerController extends ChangeNotifier implements IPlayerController {
  26. /// Vid播放器控制器
  27. ///
  28. /// [url] Vid文件链接
  29. VidPlayerController(String url) {
  30. _url = url;
  31. _dataChannel = VidDataChannel.create(url);
  32. _bufferWaiter = VidBufferWaiter(_dataChannel);
  33. _isFirstFrame = true;
  34. eventHandler = FEventHandler<VidPlayerEvent>();
  35. frameUpdated = FEventHandler<VidUsImage>();
  36. firstFrameLoaded = FEventHandler<VidUsImage>();
  37. frameLoadStateChanged = FEventHandler<bool>();
  38. errorOccured = FEventHandler<String?>();
  39. }
  40. static const _CAN_PLAY_STATUS_ARR = [
  41. VidPlayStatus.ready,
  42. VidPlayStatus.pause
  43. ];
  44. static const _HAS_VIEW_STATUS_ARR = [VidPlayStatus.play, VidPlayStatus.pause];
  45. @override
  46. late final FEventHandler<VidPlayerEvent> eventHandler;
  47. @override
  48. late final FEventHandler<VidUsImage> frameUpdated;
  49. @override
  50. late final FEventHandler<VidUsImage> firstFrameLoaded;
  51. @override
  52. late final FEventHandler<bool> frameLoadStateChanged;
  53. @override
  54. late final FEventHandler<String?> errorOccured;
  55. final enableLoopChanged = FEventHandler<bool>();
  56. final crossFrameAnchorsUpdated = FEventHandler<List<int>>();
  57. /// 控制器释放触发事件
  58. final disposeOccured = FEventHandler<void>();
  59. late final String _url;
  60. late final VidDataChannel _dataChannel;
  61. late final VidBufferWaiter _bufferWaiter;
  62. _PlayAssistant? _playAssistant;
  63. VidPlayStatus _status = VidPlayStatus.init;
  64. int _frameIndex = -1;
  65. VidUsImage? _frame;
  66. bool _disposed = false;
  67. bool _isFirstFrame = false;
  68. bool _loading = false;
  69. bool _enableLoop = false;
  70. List<int> _crossFrameAnchors = [];
  71. /// 跨帧锚点
  72. List<int> get crossFrameAnchors => UnmodifiableListView(_crossFrameAnchors);
  73. set crossFrameAnchors(List<int> value) {
  74. _crossFrameAnchors = value;
  75. crossFrameAnchorsUpdated.emit(this, crossFrameAnchors);
  76. }
  77. /// 是否开启循环播放
  78. bool get enableLoop => _enableLoop;
  79. set enableLoop(bool val) {
  80. if (val != _enableLoop) {
  81. _enableLoop = val;
  82. enableLoopChanged.emit(this, val);
  83. }
  84. }
  85. @override
  86. String get url => _url;
  87. @override
  88. VidDataChannel get dataChannel => _dataChannel;
  89. @override
  90. bool get disposed => _disposed;
  91. @override
  92. VidPlayStatus get status => _status;
  93. @override
  94. VidUsImage? get currentFrame => _frame;
  95. /// Whether the player is playing
  96. bool get playing => status == VidPlayStatus.play;
  97. /// Whether the player can play
  98. bool get canPlay => _CAN_PLAY_STATUS_ARR.contains(status);
  99. /// Whether the player should has view
  100. @override
  101. bool get hasView => _HAS_VIEW_STATUS_ARR.contains(status);
  102. /// Current viewed frame index
  103. int get currentFrameIndex => _frameIndex;
  104. /// Total frames count of current vid
  105. int get totalFramesCount => _dataChannel.imageCount;
  106. double get frameRate => _dataChannel.probe.frameRate;
  107. VidUsProbe get probe => _dataChannel.probe;
  108. bool get isSingleFrame => totalFramesCount == 1;
  109. /// 是否播放结束
  110. bool get isEndOfPlay => currentFrameIndex == totalFramesCount - 1;
  111. /// 当前播放器亮度 初始值为 0
  112. double get brightness => _brightness;
  113. double _brightness = 0.0;
  114. /// 当前播放器对比度 初始值为 1
  115. double get contrast => _contrast;
  116. double _contrast = 1.0;
  117. /// vid头信息尺寸(含扩展)
  118. int get vidHeaderSize => _bufferWaiter.vidHeaderSize;
  119. @override
  120. Future<bool> load() async {
  121. final loaded = await _dataChannel.load(10 * 1000);
  122. if (loaded) {
  123. _bufferWaiter.init();
  124. _setStatus(VidPlayStatus.ready);
  125. logger.i(
  126. "Vid load successed. ImageCount:${_dataChannel.imageCount}, FrameRate:${_dataChannel.probe.frameRate} Url: $url");
  127. } else {
  128. _setStatus(VidPlayStatus.loadFail);
  129. logger.i("Vid load failed. Url: $url");
  130. }
  131. return loaded;
  132. }
  133. @override
  134. void play() {
  135. if (playing) return;
  136. if (!canPlay) return;
  137. if (isEndOfPlay) {
  138. _frameIndex = -1;
  139. }
  140. if (isSingleFrame) {
  141. locateTo(0);
  142. _setStatus(VidPlayStatus.pause);
  143. } else {
  144. _playAssistant ??= _PlayAssistant(this);
  145. _playAssistant!.play();
  146. _setStatus(VidPlayStatus.play);
  147. }
  148. }
  149. @override
  150. void pause() {
  151. if (!playing) {
  152. if (_status != VidPlayStatus.pause) {
  153. _setStatus(VidPlayStatus.pause);
  154. }
  155. return;
  156. }
  157. _playAssistant?.pause();
  158. _setStatus(VidPlayStatus.pause);
  159. }
  160. /// Pause and view next frame
  161. Future<bool> gotoNextFrame() {
  162. return locateTo(currentFrameIndex + 1);
  163. }
  164. /// Pause and view prev frame
  165. Future<bool> gotoPrevFrame() {
  166. return locateTo(currentFrameIndex - 1);
  167. }
  168. @override
  169. Future<bool> locateTo(int index) async {
  170. if (index < 0 || index >= totalFramesCount) return false;
  171. pause();
  172. _loading = false;
  173. gotoFrame(index);
  174. return true;
  175. }
  176. /// View target indexed frame
  177. ///
  178. /// [index] frame index
  179. Future<bool> gotoFrame(int index) async {
  180. if (index < 0 || index >= totalFramesCount) return false;
  181. if (_loading) return false;
  182. _frameIndex = index;
  183. _loading = true;
  184. _updateFrameLoadState(true);
  185. final start = DateTime.now();
  186. final result = await _waitUpdateFrame();
  187. final end = DateTime.now();
  188. if (result) {
  189. _updateFrameLoadState(false);
  190. final spendTime = end.difference(start).inMilliseconds;
  191. _bufferWaiter.recordFrameSpendTime(spendTime);
  192. }
  193. ///[TODO] 半自动 半自动自动测量获取的ai数据
  194. _getHalfAutoAiResult();
  195. return result;
  196. }
  197. /// Set frame brightness
  198. ///
  199. /// [value] brightness value
  200. void setBrightness(int value) {
  201. final brightnessCount = value / 100;
  202. if (brightnessCount < -1 || brightnessCount > 1) {
  203. return;
  204. }
  205. _brightness = brightnessCount * 255;
  206. final fliterMatrix = <double>[
  207. contrast, 0, 0, 0, brightness, // red
  208. 0, contrast, 0, 0, brightness, // green
  209. 0, 0, contrast, 0, brightness, // blue
  210. 0, 0, 0, 1, 0, // alpha // alpha
  211. ];
  212. eventHandler.emit(this, VidPlayerFilterChangeEvent(fliterMatrix));
  213. _reloadFrame();
  214. }
  215. /// Set frame contrast
  216. ///
  217. /// [value] contrast value
  218. void setContrast(int value) {
  219. double contrastCount = 1;
  220. if (value < 0) {
  221. contrastCount = (value + 100) / 100;
  222. } else if (value >= 0) {
  223. contrastCount = value / 100 * 9 + 1;
  224. }
  225. if (contrastCount < 0 || contrastCount > 10) {
  226. return;
  227. }
  228. _contrast = contrastCount;
  229. final fliterMatrix = <double>[
  230. contrast, 0, 0, 0, brightness, // red
  231. 0, contrast, 0, 0, brightness, // green
  232. 0, 0, contrast, 0, brightness, // blue
  233. 0, 0, 0, 1, 0, // alpha
  234. ];
  235. eventHandler.emit(this, VidPlayerFilterChangeEvent(fliterMatrix));
  236. _reloadFrame();
  237. }
  238. void setFilterMatrix(List<double> matrix) {
  239. eventHandler.emit(this, VidPlayerFilterChangeEvent(matrix));
  240. }
  241. /// 重置图像增益
  242. void resetTone() {
  243. setBrightness(0);
  244. setContrast(0);
  245. eventHandler.emit(this, VidPlayResetToneEvent());
  246. }
  247. void _reloadFrame() {
  248. gotoFrame(currentFrameIndex);
  249. }
  250. void _updateFrameLoadState(bool val) {
  251. _loading = val;
  252. frameLoadStateChanged.emit(this, _loading);
  253. }
  254. void _emitErrorOccured([String? msg]) {
  255. errorOccured.emit(this, msg);
  256. }
  257. /// 等待更新帧
  258. Future<bool> _waitUpdateFrame() async {
  259. if (_disposed) return false;
  260. Future<bool> _fetchOnce() async {
  261. int timeout = 500;
  262. if (!isSingleFrame) {
  263. // 视频:一帧刷新时长,buffer 10ms 处理渲染
  264. timeout = _playAssistant!._playIntervalMillSeconds - 10;
  265. }
  266. final image = await _dataChannel.getImage(currentFrameIndex, timeout);
  267. if (playing || _bufferWaiter.channel.isBufferedDone) {
  268. // 未缓冲完成且暂停中 不允许 跳到下一帧
  269. _frame = image;
  270. emitFrameUpdate();
  271. }
  272. return true;
  273. }
  274. try {
  275. return await _fetchOnce();
  276. } catch (e) {
  277. if (e is ReadTimeoutException) {
  278. try {
  279. // 等待一次缓存
  280. await _waitFrameBufferFluently();
  281. return await _fetchOnce();
  282. } catch (e) {
  283. _emitErrorOccured(i18nBook.measure.frameLoadTimeout.t);
  284. }
  285. } else {
  286. _emitErrorOccured(i18nBook.measure.frameLoadError.t);
  287. }
  288. }
  289. return false;
  290. }
  291. Future<void> _waitFrameBufferFluently() async {
  292. if (isSingleFrame) {
  293. await _bufferWaiter.waitSingleVid();
  294. } else {
  295. await _bufferWaiter.waitBuffer(_frameIndex);
  296. }
  297. }
  298. /// 获取半自动ai的结果
  299. void _getHalfAutoAiResult() async {
  300. final application = Get.find<IApplication>();
  301. RectRegion? rectRegion;
  302. application.visuals.first.visualAreas.firstWhereOrNull((element) {
  303. if (element.visualAreaType == VidUsVisualAreaType.Doppler) {
  304. rectRegion = element.layoutRegion;
  305. return true;
  306. }
  307. return false;
  308. });
  309. if (currentFrame != null && rectRegion != null) {
  310. VetAutoTraceImageResult? autoTraceImageData = await getAutoTraceImageData(
  311. imageInfo: VetAutoTraceImageDTO(
  312. imageBase64String: base64Encode(currentFrame!.imageData),
  313. height: currentFrame!.height,
  314. width: currentFrame!.width,
  315. sKColorType: 4,
  316. ),
  317. rectInfo: VetAutoTraceRectDTO(
  318. left: ((rectRegion?.left ?? 0) * currentFrame!.width).toInt(),
  319. top: ((rectRegion?.top ?? 0) * currentFrame!.height).toInt(),
  320. right: ((rectRegion?.right ?? 0) * currentFrame!.width).toInt(),
  321. bottom: ((rectRegion?.bottom ?? 0) * currentFrame!.height).toInt(),
  322. height: ((rectRegion?.height ?? 0) * currentFrame!.height).toInt(),
  323. width: ((rectRegion?.width ?? 0) * currentFrame!.width).toInt(),
  324. ),
  325. );
  326. TraceListData.setVetAutoTraceImageResult(autoTraceImageData);
  327. }
  328. }
  329. /// [TODO] 需要拆出去
  330. Future<VetAutoTraceImageResult?> getAutoTraceImageData({
  331. required VetAutoTraceImageDTO imageInfo,
  332. VetAutoTraceRectDTO? rectInfo,
  333. }) async {
  334. try {
  335. return await RPCBridge.ins.rpc.aIDiagnosis.vetAutoTraceImageAsync(
  336. VetAutoTraceImageRequest(
  337. token: RPCBridge.ins.userToken,
  338. imageInfo: imageInfo,
  339. rectInfo: rectInfo,
  340. ),
  341. );
  342. } catch (e) {
  343. logger.e('Project getAutoTraceImageData ex:', e);
  344. return null;
  345. }
  346. }
  347. /// [Carotid] ✅用于设置颈动脉单帧展示
  348. void set2DMeasureFrame(VidUsImage frame) {
  349. _emitFrameUpdated(frame);
  350. }
  351. /// [Carotid] ✅用于重置播放器
  352. void resetCurrentFrame() {
  353. _frameIndex = -1;
  354. play();
  355. }
  356. void emitFrameUpdate() {
  357. if (_isFirstFrame) {
  358. firstFrameLoaded.emit(this, _frame!);
  359. _isFirstFrame = false;
  360. resetTone();
  361. }
  362. _emitFrameUpdated();
  363. }
  364. void _emitFrameUpdated([VidUsImage? frame]) {
  365. frameUpdated.emit(this, frame ?? _frame!);
  366. final f = frame ?? _frame!;
  367. eventHandler.emit(
  368. this,
  369. VidPlayerFrameIndexChangeEvent(f.index, f.imageData, f.width, f.height),
  370. );
  371. }
  372. void _setStatus(VidPlayStatus value) {
  373. _status = value;
  374. _notifyStatus();
  375. }
  376. void _notifyStatus() {
  377. eventHandler.emit(this, VidPlayerStatusChangeEvent(status));
  378. }
  379. void _stop({bool needNotify = true}) {
  380. _playAssistant?.pause();
  381. if (needNotify) {
  382. _setStatus(VidPlayStatus.stop);
  383. }
  384. }
  385. @override
  386. void dispose() {
  387. disposeOccured.emit(this, null);
  388. _disposed = true;
  389. _stop(needNotify: false);
  390. eventHandler.dispose();
  391. _dataChannel.close();
  392. super.dispose();
  393. }
  394. /// 已禁用,请通过eventHandler监听事件
  395. @override
  396. void addListener(VoidCallback listener) {
  397. throw UnsupportedError(
  398. "method `addListener` has been limited.Pls use `eventHandler.addListener`.");
  399. }
  400. /// 已禁用,请通过eventHandler监听事件
  401. @override
  402. void removeListener(VoidCallback listener) {
  403. throw UnsupportedError(
  404. "method `removeListener` has been limited.Pls use `eventHandler.removeListener`.");
  405. }
  406. }
  407. class _PlayAssistant {
  408. _PlayAssistant(this.owner);
  409. final VidPlayerController owner;
  410. bool _ready = false;
  411. late double _frameRate;
  412. int get _playInterval => 1000 * 1000 ~/ _frameRate;
  413. int get _playIntervalMillSeconds => _playInterval ~/ 1000.0;
  414. Timer? _timer;
  415. void play() {
  416. if (!_ready) {
  417. _prepare();
  418. }
  419. if (_timer != null) {
  420. pause();
  421. }
  422. bool waiting = false;
  423. final duration = Duration(microseconds: _playInterval);
  424. // final duration = const Duration(milliseconds: 1000 ~/ 10);
  425. _timer = Timer.periodic(duration, (timer) async {
  426. if (waiting) return;
  427. waiting = true;
  428. final result = await owner.gotoFrame(owner.currentFrameIndex + 1);
  429. if (_timer == null) {
  430. return; // 已取消播放
  431. }
  432. waiting = false;
  433. if (result) {
  434. if (owner.currentFrameIndex == owner.totalFramesCount - 1) {
  435. // 播放到最后一帧
  436. if (owner.enableLoop) {
  437. pause();
  438. // 循环播放
  439. owner._frameIndex = -1;
  440. play();
  441. } else {
  442. owner.pause();
  443. }
  444. }
  445. }
  446. });
  447. }
  448. void pause() {
  449. _timer?.cancel();
  450. _timer = null;
  451. }
  452. void _prepare() {
  453. _frameRate = owner._dataChannel.probe.frameRate;
  454. _ready = true;
  455. }
  456. }