From a5e643232e998f12350482a2bf01668822c448d9 Mon Sep 17 00:00:00 2001 From: albert-yan Date: Fri, 14 Oct 2022 19:10:16 +0800 Subject: [PATCH] fuzz check --- .../nnacl/infer/control/tensorlist_getitem_infer.c | 3 +++ mindspore/lite/src/litert/lite_mindrt.cc | 6 ++++++ mindspore/lite/src/litert/mindrt_executor.cc | 4 ++++ .../lite/tools/converter/quantizer/quantize_util.cc | 10 +++++++++- 4 files changed, 22 insertions(+), 1 deletion(-) diff --git a/mindspore/ccsrc/plugin/device/cpu/kernel/nnacl/infer/control/tensorlist_getitem_infer.c b/mindspore/ccsrc/plugin/device/cpu/kernel/nnacl/infer/control/tensorlist_getitem_infer.c index 956b2981b3d..b6290223ca1 100644 --- a/mindspore/ccsrc/plugin/device/cpu/kernel/nnacl/infer/control/tensorlist_getitem_infer.c +++ b/mindspore/ccsrc/plugin/device/cpu/kernel/nnacl/infer/control/tensorlist_getitem_infer.c @@ -25,6 +25,9 @@ int TensorListGetItemInferShape(const TensorC *const *inputs, size_t inputs_size return check_ret; } + if (inputs[0]->data_type_ != kObjectTypeTensorType) { + return NNACL_ERR; + } TensorListC *input0 = (TensorListC *)(inputs[0]); const TensorC *get_index = inputs[1]; if (get_index->data_ == NULL) { diff --git a/mindspore/lite/src/litert/lite_mindrt.cc b/mindspore/lite/src/litert/lite_mindrt.cc index f62e33a72d7..ec5b46c638c 100644 --- a/mindspore/lite/src/litert/lite_mindrt.cc +++ b/mindspore/lite/src/litert/lite_mindrt.cc @@ -360,6 +360,12 @@ bool LiteOpActor::NeedResize() { } int LiteOpActor::InitInputData() { + for (size_t i = 0; i < inputs_data_.size(); ++i) { + if (inputs_data_[i] == nullptr) { + MS_LOG(ERROR) << "inputs_data_ nullptr, index: " << i; + return RET_ERROR; + } + } bool need_resize = NeedResize(); auto ret = SetInputShape(); MS_CHECK_FALSE_MSG(ret != RET_OK, ret, "Set input shape failed."); diff --git a/mindspore/lite/src/litert/mindrt_executor.cc b/mindspore/lite/src/litert/mindrt_executor.cc index ba1b7cd9ee1..bdd33a69c53 100644 --- a/mindspore/lite/src/litert/mindrt_executor.cc +++ b/mindspore/lite/src/litert/mindrt_executor.cc @@ -128,6 +128,10 @@ int MindrtExecutor::PrepareGraphOutput(const std::vector & } } } + if (output_data_.empty()) { + MS_LOG(ERROR) << "output_data_ can not be empty."; + return RET_ERROR; + } return RET_OK; } diff --git a/mindspore/lite/tools/converter/quantizer/quantize_util.cc b/mindspore/lite/tools/converter/quantizer/quantize_util.cc index 538ee1cbafa..f97e00e1305 100644 --- a/mindspore/lite/tools/converter/quantizer/quantize_util.cc +++ b/mindspore/lite/tools/converter/quantizer/quantize_util.cc @@ -51,6 +51,7 @@ constexpr int kLstmBiasShapeSize = 2; constexpr int kLstmBiasIndex = 3; constexpr size_t kGatherAxisIndex = 3; constexpr size_t kAnfPrimitiveIndex = 0; +constexpr int kDefaultThreadNumFour = 4; } // namespace QuantParamHolderPtr GetCNodeQuantHolder(const CNodePtr &cnode) { @@ -304,13 +305,20 @@ Status BuildModelByFuncGraph(const std::shared_ptr &model, con return kLiteNullptr; } auto context = std::make_shared(); - context->SetThreadAffinity(kCpuBindMode); if (context == nullptr) { MS_LOG(ERROR) << "New context failed while running."; delete meta_graph; return kLiteNullptr; } + context->SetThreadAffinity(kCpuBindMode); + context->SetThreadNum(kDefaultThreadNumFour); + std::shared_ptr device_info = std::make_shared(); + if (device_info == nullptr) { + MS_LOG(ERROR) << "New device_info failed while running."; + delete meta_graph; + return kLiteNullptr; + } auto &device_list = context->MutableDeviceInfo(); device_list.push_back(device_info); auto ret = model->Build(content, *size, kMindIR, context);