From 3dbf47b38eb26efd6ba917f2dc18daf7e250e5b0 Mon Sep 17 00:00:00 2001 From: yangruoqi713 Date: Wed, 27 Jan 2021 14:42:08 +0800 Subject: [PATCH] [MSLITE][DEVELOP] fix bug of npu op: Rsqrt, Scale and Arithmetic --- .../src/runtime/kernel/npu/arithmetic_npu.cc | 53 +++++++++++-------- .../src/runtime/kernel/npu/arithmetic_npu.h | 1 + .../runtime/kernel/npu/arithmetic_self_npu.cc | 3 ++ .../runtime/kernel/npu/instance_norm_npu.cc | 8 +-- .../lite/src/runtime/kernel/npu/scale_npu.cc | 7 +++ 5 files changed, 46 insertions(+), 26 deletions(-) diff --git a/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.cc b/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.cc index f4a17deea57..143bf28cdc3 100644 --- a/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.cc @@ -45,13 +45,10 @@ using mindspore::schema::PrimitiveType_Sub; namespace mindspore::kernel { int ArithmeticNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) { - if (primitive_->Type() == PrimitiveType_Mul || primitive_->Type() == PrimitiveType_Div || - primitive_->Type() == PrimitiveType_Add || primitive_->Type() == PrimitiveType_Sub) { - if (inputs[0]->shape() != inputs[1]->shape()) { - MS_LOG(WARNING) << name_ << " for the two inputs, the corresponding dimensions must have the same value." - << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); - return RET_ERROR; - } + if (inputs[0]->shape() != inputs[1]->shape()) { + MS_LOG(WARNING) << name_ << " for the two inputs, the corresponding dimensions must have the same value." + << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); + return RET_ERROR; } return RET_OK; } @@ -68,6 +65,26 @@ ge::Operator *CreateOperator(const std::vector &npu_inputs, cons return op; } +int ArithmeticNPUKernel::SetActivation() { + if (activation_type_ != ActivationType_NO_ACTIVATION) { + act_ = new (std::nothrow) hiai::op::Activation(name_ + "_act"); + if (act_ == nullptr) { + MS_LOG(ERROR) << "New activation npu operator for op " << name_ << " failed."; + return RET_ERROR; + } + act_->set_input_x(*op_); + if (activation_type_ == ActivationType_RELU) { + act_->set_attr_mode(1); + } else if (activation_type_ == ActivationType_RELU6) { + act_->set_attr_mode(14); + } else { + MS_LOG(ERROR) << "Unsupported activation type for op " << name_; + return RET_ERROR; + } + } + return RET_OK; +} + int ArithmeticNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, const std::vector &npu_inputs) { @@ -100,6 +117,9 @@ int ArithmeticNPUKernel::SetNPUInputs(const std::vector &inputs, case PrimitiveType_Maximum: op = CreateOperator(npu_inputs, name_); break; + case PrimitiveType_Minimum: + op = CreateOperator(npu_inputs, name_); + break; case PrimitiveType_SquaredDifference: op = CreateOperator(npu_inputs, name_); break; @@ -132,21 +152,10 @@ int ArithmeticNPUKernel::SetNPUInputs(const std::vector &inputs, } op_ = op; - if (activation_type_ != ActivationType_NO_ACTIVATION) { - act_ = new (std::nothrow) hiai::op::Activation(name_ + "_act"); - if (act_ == nullptr) { - MS_LOG(ERROR) << "New activation npu operator for op " << name_ << " failed."; - return RET_ERROR; - } - act_->set_input_x(*op_); - if (activation_type_ == ActivationType_RELU) { - act_->set_attr_mode(1); - } else if (activation_type_ == ActivationType_RELU6) { - act_->set_attr_mode(14); - } else { - MS_LOG(ERROR) << "Unsupport activation type for op " << name_; - return RET_ERROR; - } + auto ret = SetActivation(); + if (ret != RET_OK) { + MS_LOG(ERROR) << "Arithmetic npu op set activation failed."; + return RET_ERROR; } return RET_OK; } diff --git a/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.h b/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.h index 470dc5e10ca..48be917d080 100644 --- a/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/arithmetic_npu.h @@ -39,6 +39,7 @@ class ArithmeticNPUKernel : public NPUKernel { ge::Operator *GetNPUOp() override; private: + int SetActivation(); int activation_type_; ge::Operator *op_ = nullptr; hiai::op::Activation *act_ = nullptr; diff --git a/mindspore/lite/src/runtime/kernel/npu/arithmetic_self_npu.cc b/mindspore/lite/src/runtime/kernel/npu/arithmetic_self_npu.cc index 042192309c2..6f1f3014a57 100644 --- a/mindspore/lite/src/runtime/kernel/npu/arithmetic_self_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/arithmetic_self_npu.cc @@ -68,6 +68,9 @@ int ArithmeticSelfNPUKernel::SetNPUInputs(const std::vector &inp case PrimitiveType_Sqrt: op = CreateOperator(npu_inputs[0], name_); break; + case PrimitiveType_Rsqrt: + op = CreateOperator(npu_inputs[0], name_); + break; case PrimitiveType_Sin: op = CreateOperator(npu_inputs[0], name_); break; diff --git a/mindspore/lite/src/runtime/kernel/npu/instance_norm_npu.cc b/mindspore/lite/src/runtime/kernel/npu/instance_norm_npu.cc index 4ea47e989e1..fc38efadd67 100644 --- a/mindspore/lite/src/runtime/kernel/npu/instance_norm_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/instance_norm_npu.cc @@ -53,7 +53,7 @@ int InstanceNormNPUKernel::SetNPUInputs(const std::vector &input ge::TensorDesc gamma_tensor_desc(lite::ConverterToNPUShape({1, gamma_shape[0], 1, 1}), ge::FORMAT_NCHW, lite::ConverterToNPUDataType(inputs[1]->data_type())); gamma_tensor->SetTensorDesc(gamma_tensor_desc); - gamma_tensor->SetData(reinterpret_cast(inputs.data()), inputs[1]->Size()); + gamma_tensor->SetData(reinterpret_cast(inputs[1]->data_c()), inputs[1]->Size()); op_->set_input_gamma(*gamma); auto beta = new (std::nothrow) hiai::op::Const(name_ + "_beta"); @@ -61,16 +61,16 @@ int InstanceNormNPUKernel::SetNPUInputs(const std::vector &input MS_LOG(ERROR) << "New beta const failed."; return RET_ERROR; } - auto beta_shape = inputs[1]->shape(); + auto beta_shape = inputs[2]->shape(); std::shared_ptr beta_tensor = std::shared_ptr(new (std::nothrow) ge::Tensor()); if (beta_tensor == nullptr) { MS_LOG(ERROR) << "new beta_tensor failed."; return RET_ERROR; } ge::TensorDesc beta_tensor_desc(lite::ConverterToNPUShape({1, beta_shape[0], 1, 1}), ge::FORMAT_NCHW, - lite::ConverterToNPUDataType(inputs[1]->data_type())); + lite::ConverterToNPUDataType(inputs[2]->data_type())); beta_tensor->SetTensorDesc(beta_tensor_desc); - beta_tensor->SetData(reinterpret_cast(inputs.data()), inputs[1]->Size()); + beta_tensor->SetData(reinterpret_cast(inputs[2]->data_c()), inputs[2]->Size()); op_->set_input_beta(*beta); op_->set_attr_epsilon(instance_norm_param_->epsilon_); return RET_OK; diff --git a/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc b/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc index ce11a248752..79d86d22607 100644 --- a/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc @@ -24,6 +24,13 @@ using mindspore::schema::PrimitiveType_Scale; namespace mindspore::kernel { int ScaleNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) { + if (scale_parameter_->axis_ < 0) { + scale_parameter_->axis_ = scale_parameter_->axis_ + inputs.size(); + } + if (scale_parameter_->axis_ != 1) { + MS_LOG(ERROR) << "Npu scale axis attr only support 1, now is " << scale_parameter_->axis_; + return RET_ERROR; + } return RET_OK; }