From 386eec11c106040a5374f70ddb29be50194593f5 Mon Sep 17 00:00:00 2001 From: zhengyuanhua Date: Thu, 18 Aug 2022 09:57:55 +0800 Subject: [PATCH] fix dynamic shape bug on ascend --- .../src/extendrt/cxx_api/model/model_impl.cc | 26 +++++++- .../kernel/ascend/model/dyn_shape_process.cc | 9 ++- .../kernel/ascend/model/dyn_shape_process.h | 2 +- .../kernel/ascend/model/model_process.cc | 6 +- .../kernel/ascend/src/custom_ascend_kernel.cc | 3 +- .../lite/src/extendrt/single_op_session.cc | 60 ++++++++++++++++++- .../lite/src/extendrt/single_op_session.h | 2 + 7 files changed, 100 insertions(+), 8 deletions(-) diff --git a/mindspore/lite/src/extendrt/cxx_api/model/model_impl.cc b/mindspore/lite/src/extendrt/cxx_api/model/model_impl.cc index 5ec249df479..893457a263f 100644 --- a/mindspore/lite/src/extendrt/cxx_api/model/model_impl.cc +++ b/mindspore/lite/src/extendrt/cxx_api/model/model_impl.cc @@ -54,7 +54,31 @@ Status ModelImpl::Build(const std::string &model_path, ModelType model_type, } Status ModelImpl::Resize(const std::vector &inputs, const std::vector> &dims) { - return kSuccess; + MS_EXCEPTION_IF_NULL(session_); + + if (inputs.empty()) { + MS_LOG(ERROR) << "Inputs is null."; + return kLiteInputParamInvalid; + } + if (dims.empty()) { + MS_LOG(ERROR) << "Dims is null."; + return kLiteInputParamInvalid; + } + if (inputs.size() != dims.size()) { + MS_LOG(ERROR) << "The size of inputs does not match the size of dims."; + return kLiteInputParamInvalid; + } + auto model_inputs = session_->GetInputs(); + if (model_inputs.empty()) { + MS_LOG(ERROR) << "The inputs of model is null."; + return kLiteParamInvalid; + } + if (inputs.size() != model_inputs.size()) { + MS_LOG(ERROR) << "The size of inputs is incorrect."; + return kLiteInputParamInvalid; + } + std::vector resize_inputs = TensorUtils::MSTensorToTensorPtr(inputs); + return session_->Resize(resize_inputs, dims); } std::vector ModelImpl::GetInputs() { diff --git a/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.cc b/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.cc index d8d7f1c5774..1215adce357 100644 --- a/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.cc +++ b/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.cc @@ -177,7 +177,11 @@ int DynShapeProcess::GetRealImageSize(std::vector *const inputs return lite::RET_OK; } -void DynShapeProcess::DestroyDynamicInput() { +void DynShapeProcess::DestroyDynamicInput(std::vector *const inputs) { + if (inputs == nullptr) { + MS_LOG(ERROR) << "Inputs ptr is nullptr."; + return; + } if (batch_size_ptr_ != nullptr && batch_size_ptr_->addr != nullptr) { free(batch_size_ptr_->addr); batch_size_ptr_->addr = nullptr; @@ -188,6 +192,9 @@ void DynShapeProcess::DestroyDynamicInput() { image_size_ptr_->addr = nullptr; image_size_ptr_->size = 0; } + if (!inputs->empty()) { + (*inputs).pop_back(); + } } } // namespace acl } // namespace mindspore::kernel diff --git a/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.h b/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.h index 5048fd284ca..d341ee5f8e7 100644 --- a/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.h +++ b/mindspore/lite/src/extendrt/kernel/ascend/model/dyn_shape_process.h @@ -31,7 +31,7 @@ class DynShapeProcess { : acl_options_(options), input_data_idx_(input_data_idx), batch_size_ptr_(nullptr), image_size_ptr_(nullptr) {} int ProcDynamicInput(std::vector *const inputs); - void DestroyDynamicInput(); + void DestroyDynamicInput(std::vector *const inputs); private: int AddBatchSizeInput(std::vector *const inputs); diff --git a/mindspore/lite/src/extendrt/kernel/ascend/model/model_process.cc b/mindspore/lite/src/extendrt/kernel/ascend/model/model_process.cc index d97f241e4a0..5b4de891ed4 100644 --- a/mindspore/lite/src/extendrt/kernel/ascend/model/model_process.cc +++ b/mindspore/lite/src/extendrt/kernel/ascend/model/model_process.cc @@ -120,16 +120,16 @@ std::vector ModelProcess::GetInputFormat() { return std::vector(); } std::vector input_formats; - static const std::map acl_format_map = {{ACL_FORMAT_NCHW, NCHW}, {ACL_FORMAT_NHWC, NHWC}}; + static const std::map acl_format_map = { + {ACL_FORMAT_NCHW, NCHW}, {ACL_FORMAT_NHWC, NHWC}, {ACL_FORMAT_ND, NCHW}}; size_t input_size = aclmdlGetNumInputs(model_desc_); for (size_t i = 0; i < input_size; ++i) { aclFormat format = aclmdlGetInputFormat(model_desc_, i); auto iter = acl_format_map.find(format); if (iter != acl_format_map.end()) { input_formats.emplace_back(iter->second); - } else { - MS_LOG(WARNING) << "Find input " << i << " format failed, cur format: " << static_cast(format); } + MS_LOG(DEBUG) << "Format of Input " << i << " is " << static_cast(format); } return input_formats; } diff --git a/mindspore/lite/src/extendrt/kernel/ascend/src/custom_ascend_kernel.cc b/mindspore/lite/src/extendrt/kernel/ascend/src/custom_ascend_kernel.cc index dcda163aa90..a25e9cc82f7 100644 --- a/mindspore/lite/src/extendrt/kernel/ascend/src/custom_ascend_kernel.cc +++ b/mindspore/lite/src/extendrt/kernel/ascend/src/custom_ascend_kernel.cc @@ -163,6 +163,7 @@ int CustomAscendKernelMod::Resize(const BaseOperatorPtr &base_operator, const st return lite::RET_ERROR; } } + inputs_.assign(inputs.begin(), inputs.end() - 1); return lite::RET_OK; } @@ -237,7 +238,7 @@ bool CustomAscendKernelMod::Launch(const std::vector &inputs, const return false; } if (IsDynamicInput()) { - dyn_shape_proc_->DestroyDynamicInput(); + dyn_shape_proc_->DestroyDynamicInput(&inputs_); } UpdateOutputAddr(outputs); return true; diff --git a/mindspore/lite/src/extendrt/single_op_session.cc b/mindspore/lite/src/extendrt/single_op_session.cc index 510810003a2..6abfd005ad7 100644 --- a/mindspore/lite/src/extendrt/single_op_session.cc +++ b/mindspore/lite/src/extendrt/single_op_session.cc @@ -58,7 +58,7 @@ Status SingleOpInferSession::Init(const std::shared_ptr context) { kernel_graph_utils_ = std::make_shared(); if (AscendInit(context) != kSuccess) { MS_LOG(ERROR) << "Init ascend failed."; - return kMEInvalidInput; + return kLiteError; } return kSuccess; } @@ -183,10 +183,68 @@ Status SingleOpInferSession::RunGraph(const std::vector &inpu return kSuccess; } + +Status SingleOpInferSession::ResizeGraphInputs(const std::vector &inputs, + const std::vector> &dims) { + if (inputs_.size() != inputs.size()) { + MS_LOG(ERROR) << "Graph inputs tensor size[" << inputs_.size() << " is not equal with user input tensor size[" + << inputs.size() << "]"; + return kLiteError; + } + auto graph_inputs = RuntimeUtils::GetGraphDataInputs(kernel_graph_); + if (graph_inputs.size() != inputs.size()) { + MS_LOG(ERROR) << "Graph inputs size[" << graph_inputs.size() << " is not equal with user input size[" + << inputs.size() << "]"; + return kLiteError; + } + for (size_t i = 0; i < graph_inputs.size(); ++i) { + auto graph_input = graph_inputs[i]; + auto graph_input_addr = AnfAlgo::GetMutableOutputAddr(graph_input, 0); + auto type_id = graph_input_addr->type_id(); + size_t type_size = GetTypeByte(TypeIdToType(type_id)); + size_t tensor_size = dims[i].empty() + ? type_size + : std::accumulate(dims[i].begin(), dims[i].end(), type_size, std::multiplies()); + // update input size + if (graph_input_addr->ptr_ != nullptr) { + free(graph_input_addr->ptr_); + auto new_addr = malloc(tensor_size); + if (new_addr == nullptr) { + MS_LOG(ERROR) << " malloc memory of input " << i << " failed, memory size " << inputs[i]->Size(); + return kLiteError; + } + graph_input_addr->set_ptr(new_addr); + graph_input_addr->SetSize(tensor_size); + } + // update input shape + inputs_[i]->set_shape(dims[i]); + auto abstract = std::make_shared(TypeIdToType(type_id), dims[i]); + graph_input->set_abstract(abstract); + } + return kSuccess; +} // namespace mindspore + Status SingleOpInferSession::Resize(const std::vector &inputs, const std::vector> &dims) { + if (ResizeGraphInputs(inputs, dims) != kSuccess) { + MS_LOG(EXCEPTION) << "Resize graph input error. "; + } + auto &kernel_nodes = kernel_graph_->execution_order(); + for (const auto &kernel_node : kernel_nodes) { + std::string kernel_name = common::AnfAlgo::GetCNodeName(kernel_node); + MS_LOG(INFO) << "SingleOpInferSession::Resize " << kernel_name; + auto kernel_mod = AnfAlgo::GetKernelMod(kernel_node); + if (kernel_mod == nullptr) { + MS_LOG(EXCEPTION) << "Kernel mod is nullptr, kernel name: " << kernel_name; + } + auto args = kernel::AbstractArgsFromCNode(kernel_node); + if (kernel_mod->Resize(args.op, args.inputs, args.outputs) != kSuccess) { + MS_LOG(EXCEPTION) << "Kernel mod resize failed, kernel name: " << kernel_name; + } + } return kSuccess; } + std::vector SingleOpInferSession::GetOutputs() { return outputs_; } std::vector SingleOpInferSession::GetInputs() { return inputs_; } std::vector SingleOpInferSession::GetOutputNames() { return output_names_; } diff --git a/mindspore/lite/src/extendrt/single_op_session.h b/mindspore/lite/src/extendrt/single_op_session.h index ee69c647ba1..93941419ea8 100644 --- a/mindspore/lite/src/extendrt/single_op_session.h +++ b/mindspore/lite/src/extendrt/single_op_session.h @@ -43,6 +43,8 @@ class SingleOpInferSession : public InferSession { tensor::TensorPtr GetInputByTensorName(const std::string &name) override; private: + Status ResizeGraphInputs(const std::vector &inputs, const std::vector> &dims); + KernelGraphUtilsPtr kernel_graph_utils_; KernelGraphPtr kernel_graph_; std::vector inputs_;