!30106 fix bug of codex and fuzz

Merge pull request !30106 from cjh9368/clean_static_error_1_6
This commit is contained in:
i-robot 2022-02-17 01:16:27 +00:00 committed by Gitee
commit 5085ef5e2b
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
19 changed files with 33 additions and 38 deletions

View File

@ -42,6 +42,6 @@ OpParameter *PopulateRandomStandardNormalParameter(const void *prim) {
return reinterpret_cast<OpParameter *>(param);
}
REG_POPULATE(PrimitiveType_UniformReal, DefaultPopulateParameter, SCHEMA_CUR)
REG_POPULATE(PrimitiveType_UniformReal, PopulateRandomStandardNormalParameter, SCHEMA_CUR)
} // namespace lite
} // namespace mindspore

View File

@ -71,7 +71,7 @@ int TensorArrayCPUKernel::Prepare() {
return RET_OK;
}
inline int TensorArrayCPUKernel::Run() {
int TensorArrayCPUKernel::Run() {
// set handle to outputs, fake malloc, call set_data
void *delta = InnerKernel::ms_context_->allocator->Malloc(sizeof(char *));
CHECK_NULL_RETURN(delta);

View File

@ -52,7 +52,7 @@ class TensorArrayBaseCPUKernel : public InnerKernel {
int Prepare() override;
int ReSize() override { return 0; }
inline int Run() override;
int Run() override;
protected:
lite::Tensor *handle_{nullptr};

View File

@ -419,7 +419,7 @@ int AffineFp32CPUKernel::FullSpliceRun() {
for (int r = 0; r < splice_dst_row_; ++r) {
for (int off = 0; off < affine_parameter_->context_size_; ++off) {
int r_off = r - src_to_dst_row_offset_ + affine_parameter_->context_[off];
const float *tmp_src_data = input_data + r_off * splice_src_col_;
const float *tmp_src_data = input_data + static_cast<int64_t>(r_off) * splice_src_col_;
float *tmp_dst_data = output_data + r * splice_dst_col_;
memcpy(tmp_dst_data + off * splice_src_col_, tmp_src_data, splice_src_col_ * sizeof(float));
}

View File

@ -56,7 +56,6 @@ class AffineFp32CPUKernel : public InnerKernel {
int FullSpliceRun();
int DoActivation(lite::Tensor *tensor);
private:
AffineParameter *affine_parameter_{nullptr};
kernel::InnerKernel *full_mult_kernel_{nullptr};
kernel::InnerKernel *increment_mult_kernel_{nullptr};

View File

@ -348,7 +348,7 @@ int AnfExporter::ExportPartialNode(const std::unique_ptr<schema::MetaGraphT> &me
auto fg = vnode->value()->cast<FuncGraphPtr>();
MS_CHECK_TRUE_MSG(fg != nullptr, RET_NULL_PTR, "func graph is nullptr.");
if (fg_subgraph_map_.find(fg) != fg_subgraph_map_.end()) {
partial_fusion_primc->sub_graph_index = fg_subgraph_map_.at(fg);
partial_fusion_primc->sub_graph_index = static_cast<int>(fg_subgraph_map_.at(fg));
return RET_OK;
}
@ -370,7 +370,7 @@ std::list<CNodePtr> AnfExporter::InsertCallNode(const FuncGraphPtr &func_graph)
if (fg != nullptr) {
auto partial_cnode = CreatePartialCnode(fg, (*it));
auto call_cnode = CreateCallCnode(fg, partial_cnode);
it++;
++it;
it = cnodes.insert(it, call_cnode);
continue;
} else {
@ -380,7 +380,7 @@ std::list<CNodePtr> AnfExporter::InsertCallNode(const FuncGraphPtr &func_graph)
(*it)->set_inputs(cnode_input);
}
}
it++;
++it;
}
return cnodes;
}
@ -766,8 +766,7 @@ int AnfExporter::ConvertInputParameter(const CNodePtr &cnode, size_t index, cons
return RET_OK;
}
DataInfo data_info;
if (FetchDataFromParameterNode(cnode, index, converter::FmkType(meta_graphT->fmkType), train_flag_, &data_info,
true) != RET_OK) {
if (FetchDataFromParameterNode(cnode, index, converter::FmkType(meta_graphT->fmkType), &data_info, true) != RET_OK) {
MS_LOG(ERROR) << "parse const node failed.";
return RET_ERROR;
}

View File

@ -20,6 +20,7 @@
#include <vector>
#include <unordered_map>
#include <map>
#include <utility>
#include "tools/converter/quant_param_holder.h"
#include "tools/optimizer/common/gllo_utils.h"
#include "utils/check_convert_utils.h"
@ -293,8 +294,8 @@ int FetchFromDefaultParam(const ParameterPtr &param_node, const converter::FmkTy
return RET_OK;
}
int FetchDataFromParameterNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, bool train_flag,
DataInfo *data_info, bool copy_data) {
int FetchDataFromParameterNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, DataInfo *data_info,
bool copy_data) {
MS_ASSERT(cnode != nullptr && data_info != nullptr);
auto param_node = cnode->input(index)->cast<ParameterPtr>();
MS_CHECK_TRUE_MSG(param_node != nullptr, RET_ERROR, "input node is not parameter node.");
@ -360,8 +361,7 @@ int FetchDataFromValueNode(const CNodePtr &cnode, size_t index, converter::FmkTy
return ret;
}
int FetchDataFromCNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, bool train_flag,
DataInfo *data_info) {
int FetchDataFromCNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, DataInfo *data_info) {
MS_ASSERT(cnode != nullptr && data_info != nullptr);
auto abstract = opt::GetCNodeInputAbstract(cnode, index);
if (abstract == nullptr) {
@ -532,7 +532,7 @@ int FetchOpParameterFromFuncGraph(const FuncGraphPtr &func_graph, std::map<std::
return ret;
}
parameter->thread_num_ = 1;
op_parameters->insert({cnode->fullname_with_scope(), parameter});
op_parameters->insert(std::pair<std::string, OpParameter *>(cnode->fullname_with_scope(), parameter));
}
return RET_OK;
}

View File

@ -42,14 +42,13 @@ struct DataInfo {
int FetchFromDefaultParam(const ParameterPtr &param_node, const converter::FmkType &fmk_type, DataInfo *data_info,
bool copy_data);
int FetchDataFromParameterNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, bool train_flag,
DataInfo *data_info, bool copy_data);
int FetchDataFromParameterNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, DataInfo *data_info,
bool copy_data);
int FetchDataFromValueNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, bool train_flag,
DataInfo *data_info, bool copy_data);
int FetchDataFromCNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, bool train_flag,
DataInfo *data_info);
int FetchDataFromCNode(const CNodePtr &cnode, size_t index, converter::FmkType fmk_type, DataInfo *data_info);
int RemoveIfDepend(const CNodePtr &cnode);

View File

@ -91,7 +91,7 @@ AnfNodePtr CloneParameterAndValueNode(const CNodePtr &cnode, size_t index, const
DataInfo data_info;
STATUS status;
if (utils::isa<Parameter>(node)) {
status = FetchDataFromParameterNode(cnode, index, flags->fmk, flags->trainModel, &data_info, true);
status = FetchDataFromParameterNode(cnode, index, flags->fmk, &data_info, true);
} else if (utils::isa<ValueNode>(node)) {
status = FetchDataFromValueNode(cnode, index, flags->fmk, flags->trainModel, &data_info, true);
} else {

View File

@ -39,7 +39,7 @@ bool GetInOutDataTypeValue(const CNodePtr &cast_cnode, int *output_type_value, i
DataInfo data_info;
auto output_type_node = cast_cnode->input(opt::kInputIndexTwo);
if (utils::isa<ParameterPtr>(output_type_node)) {
if (FetchDataFromParameterNode(cast_cnode, opt::kInputIndexTwo, converter::kFmkTypeMs, false, &data_info, true) !=
if (FetchDataFromParameterNode(cast_cnode, opt::kInputIndexTwo, converter::kFmkTypeMs, &data_info, true) !=
lite::RET_OK) {
MS_LOG(ERROR) << "Fetch data from parameter node failed.";
return false;

View File

@ -169,7 +169,7 @@ int ReplaceLstmNode(const FuncGraphManagerPtr &manager, const FuncGraphPtr &func
MS_CHECK_TRUE_MSG(lstm_weight_node != nullptr, RET_ERROR, "lstm_weight_node is nullptr.");
lite::DataInfo data_info;
if (lstm_weight_node->isa<Parameter>()) {
auto ret = FetchDataFromParameterNode(lstm_cnode, kLSTMWeightIndex, converter::kFmkTypeMs, false, &data_info, true);
auto ret = FetchDataFromParameterNode(lstm_cnode, kLSTMWeightIndex, converter::kFmkTypeMs, &data_info, true);
if (ret != RET_OK) {
MS_LOG(ERROR) << "parse const node failed.";
return RET_ERROR;

View File

@ -164,7 +164,7 @@ STATUS GetTransposePerm(const CNodePtr &cnode, std::vector<int> *perm) {
lite::DataInfo data_info;
int status;
if (utils::isa<ParameterPtr>(cnode->input(kInputIndexTwo))) {
status = lite::FetchDataFromParameterNode(cnode, kInputIndexTwo, converter::kFmkTypeMs, false, &data_info, true);
status = lite::FetchDataFromParameterNode(cnode, kInputIndexTwo, converter::kFmkTypeMs, &data_info, true);
} else {
status = lite::FetchDataFromValueNode(cnode, kInputIndexTwo, converter::kFmkTypeMs, false, &data_info, true);
}

View File

@ -157,8 +157,7 @@ int ConvBiasaddFusion::DoFuison(const FuncGraphPtr &func_graph, const AnfNodePtr
lite::DataInfo add_bias_info;
int status = lite::RET_ERROR;
if (add_bias->isa<Parameter>()) {
status =
lite::FetchDataFromParameterNode(add_cnode, kInputIndexTwo, converter::kFmkTypeMs, false, &add_bias_info, true);
status = lite::FetchDataFromParameterNode(add_cnode, kInputIndexTwo, converter::kFmkTypeMs, &add_bias_info, true);
} else if (add_bias->isa<ValueNode>()) {
status =
lite::FetchDataFromValueNode(add_cnode, kInputIndexTwo, converter::kFmkTypeMs, false, &add_bias_info, true);
@ -173,8 +172,8 @@ int ConvBiasaddFusion::DoFuison(const FuncGraphPtr &func_graph, const AnfNodePtr
if (conv_cnode->size() > kInputSizeThree) {
auto conv_bias = conv_cnode->input(kInputIndexThree);
if (conv_bias->isa<Parameter>()) {
status = lite::FetchDataFromParameterNode(conv_cnode, kInputIndexThree, converter::kFmkTypeMs, false,
&conv_bias_info, true);
status =
lite::FetchDataFromParameterNode(conv_cnode, kInputIndexThree, converter::kFmkTypeMs, &conv_bias_info, true);
} else if (conv_bias->isa<ValueNode>()) {
status =
lite::FetchDataFromValueNode(conv_cnode, kInputIndexThree, converter::kFmkTypeMs, false, &conv_bias_info, true);

View File

@ -33,7 +33,7 @@ STATUS GetIndexValue(const CNodePtr &cnode, std::vector<int> *index, int node_in
lite::DataInfo data_info;
int status = RET_ERROR;
if (utils::isa<ParameterPtr>(cnode->input(node_index))) {
status = lite::FetchDataFromParameterNode(cnode, node_index, converter::kFmkTypeMs, false, &data_info, true);
status = lite::FetchDataFromParameterNode(cnode, node_index, converter::kFmkTypeMs, &data_info, true);
} else {
status = lite::FetchDataFromValueNode(cnode, node_index, converter::kFmkTypeMs, false, &data_info, true);
}

View File

@ -160,7 +160,7 @@ int ConvertTensorToNCOrNH(const FuncGraphPtr &func_graph, const CNodePtr &cnode,
if (!input_node->has_default()) {
return lite::RET_OK;
}
status = lite::FetchDataFromParameterNode(cnode, index, fmk_type, train_flag, &data_info, true);
status = lite::FetchDataFromParameterNode(cnode, index, fmk_type, &data_info, true);
} else {
status = lite::FetchDataFromValueNode(cnode, index, fmk_type, train_flag, &data_info, true);
}

View File

@ -156,7 +156,7 @@ int GetCNodeConstInput(const CNodePtr &cnode, std::vector<TensorPtr> *const_ms_i
STATUS status;
lite::DataInfo data_info;
if (utils::isa<ParameterPtr>(cnode->input(i))) {
status = lite::FetchDataFromParameterNode(cnode, i, fmk_type, train_flag, &data_info, copy_data);
status = lite::FetchDataFromParameterNode(cnode, i, fmk_type, &data_info, copy_data);
} else {
status = lite::FetchDataFromValueNode(cnode, i, fmk_type, train_flag, &data_info, copy_data);
}
@ -172,8 +172,7 @@ int GetCNodeConstInput(const CNodePtr &cnode, std::vector<TensorPtr> *const_ms_i
return ConvertToLiteTensor(data_infos, const_ms_inputs);
}
int GetCNodeVarInput(const CNodePtr &cnode, std::vector<TensorPtr> *var_ms_inputs, converter::FmkType fmk_type,
bool train_flag) {
int GetCNodeVarInput(const CNodePtr &cnode, std::vector<TensorPtr> *var_ms_inputs, converter::FmkType fmk_type) {
MS_ASSERT(cnode != nullptr);
MS_ASSERT(var_ms_inputs != nullptr);
for (size_t i = 1; i < cnode->size(); ++i) {
@ -181,7 +180,7 @@ int GetCNodeVarInput(const CNodePtr &cnode, std::vector<TensorPtr> *var_ms_input
continue;
}
lite::DataInfo data_info;
if (lite::FetchDataFromCNode(cnode, i, fmk_type, train_flag, &data_info) != lite::RET_OK) {
if (lite::FetchDataFromCNode(cnode, i, fmk_type, &data_info) != lite::RET_OK) {
MS_LOG(ERROR) << "parse cnode failed.";
return lite::RET_ERROR;
}
@ -229,7 +228,7 @@ int LiteTensorExtractor::GetCNodeInputTensors(const CNodePtr &cnode, std::vector
return lite::RET_ERROR;
}
std::vector<TensorPtr> var_inputs;
if (GetCNodeVarInput(cnode, &var_inputs, fmk_type, train_flag) != lite::RET_OK) {
if (GetCNodeVarInput(cnode, &var_inputs, fmk_type) != lite::RET_OK) {
MS_LOG(ERROR) << "get var inputs failed.";
cnode->set_inputs(origin_inputs);
return lite::RET_ERROR;

View File

@ -174,9 +174,9 @@ std::vector<int> NodeInferShape::GetInputShape(const CNodePtr &cnode, size_t ind
position = 1;
}
if (utils::isa<CNode>(base_node->input(position))) {
status = lite::FetchDataFromCNode(base_node, position, fmk_type_, train_flag_, &data_info);
status = lite::FetchDataFromCNode(base_node, position, fmk_type_, &data_info);
} else if (utils::isa<Parameter>(base_node->input(position))) {
status = lite::FetchDataFromParameterNode(base_node, position, fmk_type_, train_flag_, &data_info, false);
status = lite::FetchDataFromParameterNode(base_node, position, fmk_type_, &data_info, false);
} else if (utils::isa<ValueNodePtr>(base_node->input(position))) {
status = lite::FetchDataFromValueNode(base_node, position, fmk_type_, train_flag_, &data_info, false);
} else {

View File

@ -270,7 +270,7 @@ int RemoveRedundantOpPass::GetConstDataFromInputNode(const CNodePtr &cnode, lite
auto padding_node = cnode->input(kInputIndexTwo);
MS_ASSERT(padding_node != nullptr);
if (utils::isa<Parameter>(padding_node)) {
auto status = lite::FetchDataFromParameterNode(cnode, 2, converter::kFmkTypeMs, false, data_info, true);
auto status = lite::FetchDataFromParameterNode(cnode, 2, converter::kFmkTypeMs, data_info, true);
if (status != lite::RET_OK && status != lite::RET_NO_CHANGE) {
MS_LOG(ERROR) << "fetch data from parameter node failed.";
return lite::RET_ERROR;

View File

@ -212,7 +212,7 @@ STATUS ChangeOpPad(const FuncGraphPtr &func_graph, const CNodePtr &cnode, Format
lite::DataInfo data_info;
int status;
if (utils::isa<Parameter>(second_input)) {
status = lite::FetchDataFromParameterNode(cnode, kInputIndexTwo, converter::kFmkTypeMs, false, &data_info, true);
status = lite::FetchDataFromParameterNode(cnode, kInputIndexTwo, converter::kFmkTypeMs, &data_info, true);
} else if (utils::isa<ValueNode>(second_input)) {
status = lite::FetchDataFromValueNode(cnode, kInputIndexTwo, converter::kFmkTypeMs, false, &data_info, true);
} else {