123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175 |
- #include"InferNetOnnxPaddleOcrCrnnRegC.h"
- InferNetOnnxPaddleOcrCrnnReg::InferNetOnnxPaddleOcrCrnnReg()
- {
- }
- void InferNetOnnxPaddleOcrCrnnReg::LoadNetwork(const void* modelDataRec, size_t modelDataLengthRec, const void* modelDataKeys, size_t modelDataLengthKeys)
- {
- if (_modelLoadedRec)
- {
- // 如果模型已加载,则释放之前的模型
- delete ort_session;
- ort_session = nullptr;
- }
- sessionOptions.SetGraphOptimizationLevel(ORT_ENABLE_BASIC);
- ort_session = new Session(env, modelDataRec, modelDataLengthRec, sessionOptions);
- size_t numInputNodes = ort_session->GetInputCount();
- size_t numOutputNodes = ort_session->GetOutputCount();
- AllocatorWithDefaultOptions allocator;
- for (int i = 0; i < numInputNodes; i++)
- {
- inputNames.push_back(ort_session->GetInputName(i, allocator));
- Ort::TypeInfo input_type_info = ort_session->GetInputTypeInfo(i);
- auto input_tensor_info = input_type_info.GetTensorTypeAndShapeInfo();
- auto input_dims = input_tensor_info.GetShape();
- inputNodeDims.push_back(input_dims);
- }
- for (int i = 0; i < numOutputNodes; i++)
- {
- outputNames.push_back(ort_session->GetOutputName(i, allocator));
- Ort::TypeInfo output_type_info = ort_session->GetOutputTypeInfo(i);
- auto output_tensor_info = output_type_info.GetTensorTypeAndShapeInfo();
- auto output_dims = output_tensor_info.GetShape();
- outputNodeDims.push_back(output_dims);
- }
- // 将字节数据转换为字符串
- std::string text(reinterpret_cast<const char*>(modelDataKeys), modelDataLengthKeys);
- // 使用字符串流处理字符串
- std::istringstream iss(text);
- std::string line;
- // 逐行读取并添加到 alphabet 中
- while (std::getline(iss, line))
- {
- this->alphabet.push_back(line);
- }
- this->alphabet.push_back(" ");
- names_len = this->alphabet.size();
- _modelLoadedRec = true;
- }
- std::string InferNetOnnxPaddleOcrCrnnReg::Process(cv::Mat& srcimgCv)
- {
- // 预处理图像
- cv::Mat dstimg = this->preprocess(srcimgCv);
- this->normalize_(dstimg); // 归一化图像
- // 定义输入张量的形状
- array<int64_t, 4> input_shape_{ 1, 3, this->inpHeight, this->inpWidth };
- // 创建 Ort 内存分配器
- auto allocator_info = MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU);
- // 创建输入张量
- Value input_tensor_ = Value::CreateTensor<float>(allocator_info, input_image_.data(), input_image_.size(), input_shape_.data(), input_shape_.size());
- // 开始推理
- std::vector<Value> ortOutputs = ort_session->Run(RunOptions{ nullptr }, &inputNames[0], &input_tensor_, 1, outputNames.data(), outputNames.size()); // 开始推理
- // 获取输出数据指针
- float* pdata = ortOutputs[0].GetTensorMutableData<float>();
- // 获取输出图像的高度和宽度
- int h = ortOutputs.at(0).GetTensorTypeAndShapeInfo().GetShape().at(2);
- int w = ortOutputs.at(0).GetTensorTypeAndShapeInfo().GetShape().at(1);
- // 存储预测的标签
- prebLabel.resize(w);
- string results;
- results = PostProcess(w, h, pdata);
- return results;
- }
- string InferNetOnnxPaddleOcrCrnnReg::PostProcess(int wIn, int hIn, float* pdataIn)
- {
- int i = 0, j = 0;
- // 遍历输出,获取每列的最大值的索引作为标签
- for (i = 0; i < wIn; i++)
- {
- int one_label_idx = 0;
- float max_data = -10000;
- for (j = 0; j < hIn; j++)
- {
- float data_ = pdataIn[i * hIn + j];
- if (data_ > max_data)
- {
- max_data = data_;
- one_label_idx = j;
- }
- }
- prebLabel[i] = one_label_idx;
- }
- // 存储去重后的非空白标签
- std::vector<int> no_repeat_blank_label;
- for (size_t elementIndex = 0; elementIndex < wIn; ++elementIndex)
- {
- if (prebLabel[elementIndex] != 0 && !(elementIndex > 0 && prebLabel[elementIndex - 1] == prebLabel[elementIndex]))
- {
- no_repeat_blank_label.push_back(prebLabel[elementIndex] - 1);
- }
- }
- // 构建最终的预测文本
- int len_s = no_repeat_blank_label.size();
- std::string plate_text;
- for (i = 0; i < len_s; i++)
- {
- plate_text += alphabet[no_repeat_blank_label[i]];
- }
- return plate_text;
- }
- cv::Mat InferNetOnnxPaddleOcrCrnnReg::preprocess(cv::Mat srcimg)
- {
- cv::Mat dstimg;
- int h = srcimg.rows;
- int w = srcimg.cols;
- const float ratio = w / float(h);
- int resized_w = int(ceil((float)this->inpHeight * ratio));
- if (ceil(this->inpHeight * ratio) > this->inpWidth)
- {
- resized_w = this->inpWidth;
- }
- resize(srcimg, dstimg, Size(resized_w, this->inpHeight), INTER_LINEAR);
- return dstimg;
- }
- void InferNetOnnxPaddleOcrCrnnReg::normalize_(cv::Mat img)
- {
- //img.convertTo(img, CV_32F);
- int row = img.rows;
- int col = img.cols;
- this->input_image_.resize(this->inpHeight * this->inpWidth * img.channels());
- for (int c = 0; c < 3; c++)
- {
- for (int i = 0; i < row; i++)
- {
- for (int j = 0; j < inpWidth; j++)
- {
- if (j < col)
- {
- float pix = img.ptr<uchar>(i)[j * 3 + c];
- this->input_image_[c * row * inpWidth + i * inpWidth + j] = (pix / 255.0 - 0.5) / 0.5;
- }
- else
- {
- this->input_image_[c * row * inpWidth + i * inpWidth + j] = 0;
- }
- }
- }
- }
- }
- void InferNetOnnxPaddleOcrCrnnReg::Dispose()
- {
- // 释放 ort_session 对象
- if (ort_session != nullptr)
- {
- delete ort_session;
- ort_session = nullptr;
- }
- }
|