From 9ef4c1b9cc3683202eb1dc59d7818b1570b393a7 Mon Sep 17 00:00:00 2001 From: lyvette Date: Tue, 27 Oct 2020 20:21:41 +0800 Subject: [PATCH] modify static check --- .../legacy_optimizer/graph/tensor_quant_pass.cc | 9 ++++++++- .../lite/tools/converter/quantizer/aware_quantizer.cc | 10 ++++++++-- .../lite/tools/converter/quantizer/calc_quant_param.cc | 5 +++++ .../converter/quantizer/post_training_quantizer.cc | 6 +++++- 4 files changed, 26 insertions(+), 4 deletions(-) diff --git a/mindspore/lite/tools/converter/legacy_optimizer/graph/tensor_quant_pass.cc b/mindspore/lite/tools/converter/legacy_optimizer/graph/tensor_quant_pass.cc index 63b9bee8c69..a649edb0735 100644 --- a/mindspore/lite/tools/converter/legacy_optimizer/graph/tensor_quant_pass.cc +++ b/mindspore/lite/tools/converter/legacy_optimizer/graph/tensor_quant_pass.cc @@ -54,7 +54,14 @@ STATUS TensorQuantPass::Run(schema::MetaGraphT *graph) { tensor->quantParams.emplace_back(weightQauntParam.release()); } tensor->dataType = TypeId::kNumberTypeInt8; - ::memcpy(tensor->data.data(), qDatas.data(), wShapeSize); + tensor->data.clear(); + tensor->data.resize(wShapeSize * sizeof(int8_t)); + auto ret = + memcpy_s(tensor->data.data(), wShapeSize * sizeof(int8_t), qDatas.data(), wShapeSize * sizeof(int8_t)); + if (ret != EOK) { + MS_LOG(ERROR) << "memcpy_s failed: " << ret; + return RET_ERROR; + } } else if (quantParam->dstDtype == TypeId::kNumberTypeInt32) { // quant bias data auto bShapeSize = GetShapeSize(*(tensor.get())); diff --git a/mindspore/lite/tools/converter/quantizer/aware_quantizer.cc b/mindspore/lite/tools/converter/quantizer/aware_quantizer.cc index 6190de2dd37..90377bdac42 100644 --- a/mindspore/lite/tools/converter/quantizer/aware_quantizer.cc +++ b/mindspore/lite/tools/converter/quantizer/aware_quantizer.cc @@ -98,8 +98,14 @@ STATUS AwareQuantizer::DoQuantize() { tensor->quantParams.clear(); tensor->quantParams.emplace_back(weightQauntParam.release()); } - - ::memcpy(tensor->data.data(), qDatas.data(), wShapeSize); + tensor->data.clear(); + tensor->data.resize(wShapeSize * sizeof(int8_t)); + auto ret = + memcpy_s(tensor->data.data(), wShapeSize * sizeof(int8_t), qDatas.data(), wShapeSize * sizeof(int8_t)); + if (ret != EOK) { + MS_LOG(ERROR) << "memcpy_s failed: " << ret; + return RET_ERROR; + } } else if (quantParam->dstDtype == TypeId::kNumberTypeInt32) { // quant bias data auto bShapeSize = GetShapeSize(*(tensor.get())); diff --git a/mindspore/lite/tools/converter/quantizer/calc_quant_param.cc b/mindspore/lite/tools/converter/quantizer/calc_quant_param.cc index d5435db54de..63692922824 100644 --- a/mindspore/lite/tools/converter/quantizer/calc_quant_param.cc +++ b/mindspore/lite/tools/converter/quantizer/calc_quant_param.cc @@ -216,6 +216,7 @@ int LinearCalcer::Calc(MetaGraphT *graph, const CNodeT &node) { class CalcConcat : public QuantParamCalcer { public: CalcConcat() = default; + ~CalcConcat() override = default; int Calc(MetaGraphT *graph, const CNodeT &node) override { MS_ASSERT(node.outputIndex.size() == 1); @@ -280,6 +281,7 @@ class CalcConcat : public QuantParamCalcer { class CalcAdd : public QuantParamCalcer { public: CalcAdd() = default; + ~CalcAdd() override = default; int Calc(MetaGraphT *graph, const CNodeT &node) override { MS_ASSERT(node.inputIndex.size() == 2); @@ -357,6 +359,7 @@ class CalcAdd : public QuantParamCalcer { class CalcRealDiv : public QuantParamCalcer { public: CalcRealDiv() = default; + ~CalcRealDiv() override = default; int Calc(MetaGraphT *graph, const CNodeT &node) override { MS_ASSERT(node.inputIndex.size() == 2); @@ -424,6 +427,7 @@ class CalcRealDiv : public QuantParamCalcer { class CalcToSet : public QuantParamCalcer { public: CalcToSet(float min, float max) : min(min), max(max) {} + ~CalcToSet() override = default; int Calc(MetaGraphT *graph, const CNodeT &node) override { MS_ASSERT(node.inputIndex.size() == 1); @@ -468,6 +472,7 @@ class CalcToSet : public QuantParamCalcer { class CalcActivation : public QuantParamCalcer { public: CalcActivation() = default; + ~CalcActivation() override = default; int Calc(MetaGraphT *subGraph, const CNodeT &node) override { MS_ASSERT(node.inputIndex.size() == 1); diff --git a/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc b/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc index d9d5402284e..8aa8e587bc8 100644 --- a/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc +++ b/mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc @@ -1323,7 +1323,11 @@ STATUS PostTrainingQuantizer::BiasCorrection(FuncGraphPtr func_graph) { MS_LOG(ERROR) << "new char[] failed"; return RET_MEMORY_FAILED; } - std::memcpy(tensor_data, bias_diff.data(), size); + ret = ::memcpy_s(tensor_data, size * sizeof(char), bias_diff.data(), size * sizeof(char)); + if (ret != EOK) { + MS_LOG(ERROR) << "memcpy_s error: " << ret; + return false; + } param_value->set_tensor_addr(tensor_data); param_value->set_tensor_size(size); parameter->set_default_param(param_value);