!30175 [MS][lite][providers]codex

Merge pull request !30175 from KXiong/master
This commit is contained in:
i-robot 2022-02-18 08:15:10 +00:00 committed by Gitee
commit 3b9c7c75ad
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
5 changed files with 27 additions and 22 deletions

View File

@ -22,10 +22,10 @@
using mindspore::lite::RET_ERROR;
using mindspore::lite::RET_OK;
constexpr int kSleepUs = 100;
namespace mindspore {
namespace nnie {
constexpr int kSleepUs = 100;
static void NnieParamRelease(NnieParam *nnie_param) {
if (nnie_param == nullptr) {
return;
@ -179,7 +179,8 @@ static void GetBlobMemSize(SVP_NNIE_NODE_S nnie_node[], HI_U32 node_num, HI_U32
}
static int GetTaskAndBlobBufSize(NnieCfg *nnie_cfg, NnieParam *nnie_param, HI_U32 *total_task_buf_size,
HI_U32 *tmp_buf_size, NnieBlobSize blob_size[], HI_U32 *total_size) {
HI_U32 *tmp_buf_size, NnieBlobSize blob_size[], HI_U32 blob_size_len,
HI_U32 *total_size) {
HI_S32 ret = HI_SUCCESS;
HI_U32 i, j;
HI_U32 total_step = 0;
@ -199,7 +200,7 @@ static int GetTaskAndBlobBufSize(NnieCfg *nnie_cfg, NnieParam *nnie_param, HI_U3
*tmp_buf_size = nnie_param->model_->u32TmpBufSize;
*total_size += *total_task_buf_size + *tmp_buf_size;
for (i = 0; i < nnie_param->model_->u32NetSegNum; i++) {
for (i = 0; i < nnie_param->model_->u32NetSegNum && i < blob_size_len; i++) {
if (SVP_NNIE_NET_TYPE_RECURRENT == nnie_param->model_->astSeg[i].enNetType) {
for (j = 0; j < nnie_param->seg_data_[i].src_[0].u32Num; j++) {
total_step += *(reinterpret_cast<HI_S32 *>(
@ -219,9 +220,7 @@ static int GetTaskAndBlobBufSize(NnieCfg *nnie_cfg, NnieParam *nnie_param, HI_U3
static int NnieParamInit(NnieCfg *nnie_cfg, NnieParam *nnie_param) {
HI_U32 i, j;
HI_U32 total_size = 0;
HI_U32 total_task_buf_size = 0;
HI_U32 tmp_buf_size_ = 0;
HI_U32 total_size = 0, total_task_buf_size = 0, tmp_buf_size_ = 0;
HI_S32 ret = HI_SUCCESS;
HI_U32 off_set = 0;
HI_U64 phy_addr = 0;
@ -230,7 +229,9 @@ static int NnieParamInit(NnieCfg *nnie_cfg, NnieParam *nnie_param) {
FillForwardInfo(nnie_cfg, nnie_param);
ret = GetTaskAndBlobBufSize(nnie_cfg, nnie_param, &total_task_buf_size, &tmp_buf_size_, blob_size, &total_size);
HI_U32 blob_size_len = sizeof(blob_size) / sizeof(blob_size[0]);
ret = GetTaskAndBlobBufSize(nnie_cfg, nnie_param, &total_task_buf_size, &tmp_buf_size_, blob_size, blob_size_len,
&total_size);
if (HI_SUCCESS != ret) {
LOGE("Error,Malloc memory failed! ");
return RET_ERROR;

View File

@ -21,10 +21,10 @@
using mindspore::lite::RET_ERROR;
using mindspore::lite::RET_OK;
constexpr int kNumInput2 = 2;
namespace mindspore {
namespace nnie {
constexpr int kNumInput2 = 2;
int NNIEManager::CfgInit(int max_roi_num, int step, const std::vector<int> &core_id) {
memset(&nnie_cfg_, 0, sizeof(NnieRunCfg));
@ -80,10 +80,7 @@ int NNIEManager::Run(std::vector<mindspore::MSTensor> *outputs, unsigned int seg
return RET_OK;
}
void NNIEManager::Release() {
// NniePrintReportResult(&nnie_cfg_.param_);
NnieCommDelete(&nnie_cfg_.param_, &nnie_cfg_.model_);
}
void NNIEManager::Release() { NnieCommDelete(&nnie_cfg_.param_, &nnie_cfg_.model_); }
int NNIEManager::GetOutputData(std::vector<mindspore::MSTensor> *outputs,
const std::vector<std::vector<int64_t>> &outputs_shape, bool run_box) {

View File

@ -22,11 +22,11 @@
using mindspore::lite::RET_ERROR;
using mindspore::lite::RET_OK;
constexpr int kNumInput2 = 2;
constexpr int kNCHWDims = 4;
namespace mindspore {
namespace proposal {
constexpr int kNumInput2 = 2;
constexpr int kNCHWDims = 4;
uint32_t RpnTmpBufSize(uint32_t num_ratio_anchors, uint32_t num_scale_anchors, uint32_t input_height,
uint32_t input_width) {
uint32_t anchors_num = num_ratio_anchors * num_scale_anchors * input_height * input_width;
@ -156,6 +156,10 @@ static int32_t NonRecursiveArgQuickSort(int32_t *array, int32_t low, int32_t hig
static int32_t FilterLowScoreBbox(int32_t *proposals, uint32_t anchors_num, uint32_t filter_thresh,
uint32_t *num_after_filter) {
if (proposals == nullptr) {
LOGE("inputs proposals is nullptr");
return RET_ERROR;
}
uint32_t proposal_cnt = anchors_num;
if (filter_thresh > 0) {
@ -222,6 +226,10 @@ static int32_t SVP_NNIE_Overlap(int32_t x_min1, int32_t y_min1, int32_t x_max1,
static int32_t SVP_NNIE_NonMaxSuppression(int32_t *proposals, uint32_t anchors_num, uint32_t nms_thresh,
uint32_t max_roi_num) {
if (proposals == nullptr) {
LOGE("inputs proposals is nullptr");
return RET_ERROR;
}
/****** define variables *******/
int32_t x_min1;
int32_t y_min1;
@ -276,7 +284,6 @@ static void Rpn(float **inputs, uint32_t num_ratio_anchors, uint32_t num_scale_a
uint32_t min_size, uint32_t spatial_scale, uint32_t nms_thresh, uint32_t filter_thresh,
uint32_t num_before_nms, char *pu32MemPool, float *proposal_result, uint32_t dst_stride,
uint32_t *num_rois) {
#if 1
/******************** define parameters ****************/
uint32_t size;
int32_t *anchors = nullptr;
@ -400,6 +407,10 @@ static void Rpn(float **inputs, uint32_t num_ratio_anchors, uint32_t num_scale_a
/******************* Copy the anchors to every pixel in the feature map ******************/
ptr1 = anchors;
if (spatial_scale == 0) {
LOGE("inputs spatial_scale is zero.");
return;
}
pixel_interval = QUANT_BASE / spatial_scale;
for (p = 0; p < inputs_height[0]; p++) {
@ -527,7 +538,6 @@ static void Rpn(float **inputs, uint32_t num_ratio_anchors, uint32_t num_scale_a
}
*num_rois = roi_count;
#endif
}
int32_t ProposalInit(ProposalParam *param, const std::vector<mindspore::MSTensor> &inputs, uint32_t max_roi_num,

View File

@ -25,13 +25,13 @@
using mindspore::lite::RET_ERROR;
using mindspore::lite::RET_OK;
using mindspore::schema::PrimitiveType_Custom;
namespace mindspore {
namespace proposal {
constexpr int kMaxSize = 1024;
constexpr int kNumInput2 = 2;
constexpr int kDecimal = 10;
constexpr auto kMazRoiNum = "MaxROINum";
namespace mindspore {
namespace proposal {
bool IsValidUnsignedNum(const std::string &num_str) {
return !num_str.empty() && std::all_of(num_str.begin(), num_str.end(), ::isdigit);
}
@ -186,8 +186,6 @@ std::shared_ptr<mindspore::kernel::Kernel> ProposalCreateKernel(const std::vecto
}
auto kernel = std::make_shared<ProposalCPUKernel>(inputs, outputs, primitive, ctx, ndims, image_height, image_width);
// auto kernel = new (std::nothrow) ProposalCPUKernel(inputs, outputs, primitive, ctx, ndims, image_height,
// image_width);
if (kernel == nullptr) {
LOGE("new custom kernel is nullptr");
return nullptr;

View File

@ -67,7 +67,6 @@ Status ProposalInterface::Infer(std::vector<mindspore::MSTensor> *inputs, std::v
} // namespace mindspore
namespace mindspore {
namespace kernel {
// static KernelInterfaceReg a(aa, schema::PrimitiveType_Custom, CustomInferCreater);
REGISTER_CUSTOM_KERNEL_INTERFACE(NNIE, Proposal, proposal::ProposalInferCreater);
} // namespace kernel
} // namespace mindspore