!41100 [Huawei] Codex bug fix master

Merge pull request !41100 from 赵英灼/codecheck
This commit is contained in:
i-robot 2022-09-21 01:26:08 +00:00 committed by Gitee
commit a794cb1b82
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
8 changed files with 21 additions and 15 deletions

View File

@ -29,7 +29,7 @@ OpParameter *PopulateGatherDParameter(const void *prim) {
MS_LOG(ERROR) << "malloc GatherParameter failed.";
return nullptr;
}
memset(param, 0, sizeof(GatherParameter));
(void)memset(param, 0, sizeof(GatherParameter));
param->op_parameter_.type_ = primitive->value_type();
return reinterpret_cast<OpParameter *>(param);

View File

@ -31,7 +31,7 @@ class GatherDCPUKernel : public GatherBaseCPUKernel {
int Run() override;
private:
protected:
int AssignIndicesData(bool isIndicesInt32) override;
};
} // namespace mindspore::kernel

View File

@ -232,6 +232,7 @@ OpDefCopyer GetSimpleOpCopyer() {
STATUS AddTensor2Node(schema::MetaGraphT *graphT, uint32_t nodeIdx, std::unique_ptr<TensorT> tensor,
InsertPlace place) {
MS_CHECK_TRUE_MSG(graphT != nullptr, RET_NULL_PTR, "graphT is nullptr");
if (nodeIdx >= graphT->nodes.size()) {
MS_LOG(ERROR) << "nodeIdx out of range: " << nodeIdx;
return RET_PARAM_INVALID;
@ -539,7 +540,9 @@ TypeId GetAbstractTensorDtype(const abstract::AbstractTensorPtr &tensor) {
}
TypeId GetParameterDtype(const ParameterPtr &param_node) {
MS_CHECK_TRUE_MSG(param_node != nullptr, kTypeUnknown, "param_node is nullptr");
auto abstract_base = param_node->abstract();
MS_CHECK_TRUE_MSG(abstract_base != nullptr, kTypeUnknown, "abstract_base is nullptr");
auto abstract_tensor = abstract_base->cast<abstract::AbstractTensorPtr>();
MS_CHECK_TRUE_MSG(abstract_tensor != nullptr, kTypeUnknown, "Cast to abstract tensor failed!");
auto type_ptr = abstract_tensor->element()->GetTypeTrack();
@ -637,6 +640,7 @@ STATUS UpdateFuncGraphInputAndOutputNames(const FuncGraphPtr &func_graph) {
}
STATUS UpdateGraphOutputName(schema::MetaGraphT *meta_graph) {
MS_CHECK_TRUE_MSG(meta_graph != nullptr, RET_NULL_PTR, "meta_graph is nullptr");
auto output_names = ConverterInnerContext::GetInstance()->GetGraphOutputTensorNames();
if (output_names.size() > meta_graph->outputIndex.size()) {
MS_LOG(ERROR) << "the num of setting output_names is greater than actual, " << output_names.size() << " > "

View File

@ -152,7 +152,7 @@ STATUS UpdateNodeIndex(schema::CNodeT *node, uint32_t deleteIdx) {
}
STATUS RemoveTensor(schema::MetaGraphT *graphT, std::vector<uint32_t> toDeleteTensorIdxes, bool forceDelete) {
MS_ASSERT(graphT != nullptr);
MS_CHECK_TRUE_MSG(graphT != nullptr, RET_NULL_PTR, "graphT is nullptr");
for (auto iter = toDeleteTensorIdxes.begin(); iter != toDeleteTensorIdxes.end();) {
uint32_t deleteIdx = *iter;
if (!forceDelete) {
@ -213,8 +213,8 @@ STATUS RemoveTensor(schema::MetaGraphT *graphT, std::vector<uint32_t> toDeleteTe
}
STATUS IsolateNode(schema::MetaGraphT *graphT, schema::CNodeT *node) {
MS_ASSERT(graphT != nullptr);
MS_ASSERT(node != nullptr);
MS_CHECK_TRUE_MSG(graphT != nullptr, RET_NULL_PTR, "graphT is nullptr");
MS_CHECK_TRUE_MSG(node != nullptr, RET_NULL_PTR, "node is nullptr");
size_t nodeIdx = 0;
for (size_t i = 0; i < graphT->nodes.size(); i++) {
auto &inNode = graphT->nodes.at(i);
@ -261,7 +261,7 @@ STATUS IsolateNode(schema::MetaGraphT *graphT, schema::CNodeT *node) {
}
STATUS IsolateOneWayNode(schema::MetaGraphT *graphT, size_t nodeIdx, bool removeTensor) {
MS_ASSERT(graphT != nullptr);
MS_CHECK_TRUE_MSG(graphT != nullptr, RET_NULL_PTR, "graphT is nullptr");
if (graphT->nodes.size() <= nodeIdx) {
MS_LOG(ERROR) << "nodeIdx out of range: " << nodeIdx;
return RET_PARAM_INVALID;
@ -318,13 +318,13 @@ STATUS IsolateOneWayNode(schema::MetaGraphT *graphT, size_t nodeIdx, bool remove
}
STATUS IsolateOneWayNode(schema::MetaGraphT *graph, size_t subGraphIdx, size_t nodeIdx, bool removeTensor) {
MS_ASSERT(graph != nullptr);
MS_CHECK_TRUE_MSG(graph != nullptr, RET_NULL_PTR, "graph is nullptr");
return IsolateOneWayNode(graph, nodeIdx, removeTensor);
}
STATUS IsolateOneWayNode(schema::MetaGraphT *graphT, schema::CNodeT *node, bool removeTensor) {
MS_ASSERT(graphT != nullptr);
MS_ASSERT(node != nullptr);
MS_CHECK_TRUE_MSG(graphT != nullptr, RET_NULL_PTR, "graphT is nullptr");
MS_CHECK_TRUE_MSG(node != nullptr, RET_NULL_PTR, "node is nullptr");
bool isSubNode = false;
size_t nodeIdx = 0;
for (size_t i = 0; i < graphT->nodes.size(); i++) {

View File

@ -174,7 +174,7 @@ STATUS GetFilterDim(const std::vector<int32_t> &oriDims, kTransFilterType type,
STATUS SetFilterDim(schema::TensorT *tensor, kTransFilterType type, int32_t filterK, int32_t filterC, int32_t filterH,
int32_t filterW) {
MS_ASSERT(tensor != nullptr);
MS_CHECK_TRUE_MSG(tensor != nullptr, RET_NULL_PTR, "tensor is nullptr");
if (type == kKCHW2HWCK || type == kCKHW2HWCK || type == kNHWC2HWCK || type == kKHWC2HWCK || type == kCHWK2HWCK) {
tensor->dims = {filterH, filterW, filterC, filterK};
} else if (type == kKCHW2HWKC || type == kCKHW2HWKC) {
@ -291,9 +291,9 @@ STATUS TransFilterFormat(schema::TensorT *tensor, schema::Format dstFormat) {
}
size_t GetCNodeOutputsSize(const std::shared_ptr<AnfNode> &anf_node, bool train_flag) {
MS_ASSERT(anf_node != nullptr);
MS_CHECK_TRUE_MSG(anf_node != nullptr, 0, "anf_node is nullptr");
auto cnode = anf_node->cast<CNodePtr>();
MS_ASSERT(cnode != nullptr);
MS_CHECK_TRUE_MSG(cnode != nullptr, 0, "cnode is nullptr");
if (train_flag &&
(opt::CheckPrimitiveType(cnode, prim::kPrimConv2DFusion) || opt::CheckPrimitiveType(cnode, prim::kPrimAdam))) {
return 1;

View File

@ -270,7 +270,7 @@ std::unique_ptr<TensorT> CopyTensorDefT(const std::unique_ptr<TensorT> &oldTenso
}
size_t GetRefCount(MetaGraphT *graphT, uint32_t tensorIdx) {
MS_ASSERT(graphT != nullptr);
MS_CHECK_TRUE_MSG(graphT != nullptr, 0, "graphT is nullptr");
MS_ASSERT(graphT->allTensors.size() > tensorIdx);
size_t refCount = 0;
for (auto &node : graphT->nodes) {
@ -335,7 +335,7 @@ int GenerateRandomData(size_t size, void *data, int data_type) {
}
int GenerateRandomData(mindspore::MSTensor *tensor) {
MS_ASSERT(tensor != nullptr);
MS_CHECK_TRUE_MSG(tensor != nullptr, RET_NULL_PTR, "tensor is nullptr");
auto input_data = tensor->MutableData();
if (input_data == nullptr) {
MS_LOG(ERROR) << "MallocData for inTensor failed";

View File

@ -158,7 +158,7 @@ STATUS MindsporeImporter::TraceOutput(const AnfNodePtr &node) {
out_name = out_name + "_" + std::to_string(output_idx);
}
MS_LOG(INFO) << "Graph out name: " << out_name;
output_tensor_name_.emplace_back(out_name);
(void)output_tensor_name_.emplace_back(out_name);
}
return RET_OK;
}

View File

@ -396,6 +396,7 @@ STATUS CaffeModelParser::ConvertGraphInputsOfDim() {
auto graph = ConvertGraph(res_graph_);
MSLITE_CHECK_PTR(graph);
auto parameter = graph->add_parameter();
MSLITE_CHECK_PTR(parameter);
auto abstract = CreateTensorAbstract(shape, kNumberTypeFloat32);
if (abstract == nullptr) {
MS_LOG(ERROR) << "Create tensor abstarct failed";
@ -622,6 +623,7 @@ STATUS CaffeModelParser::ConvertBottom(const caffe::LayerParameter &layer, std::
}
STATUS CaffeModelParser::ConvertTop(const caffe::LayerParameter &layer, const CNodePtr &cnode) {
MSLITE_CHECK_PTR(cnode);
if (layer.top_size() == 1) {
auto abstract = CreateTensorAbstract({}, kNumberTypeFloat32);
if (abstract == nullptr) {