From 18ff1a52488e7ad3f611acb17bfe4ff22120661b Mon Sep 17 00:00:00 2001 From: wang_shaocong Date: Sat, 8 Jan 2022 10:22:30 +0800 Subject: [PATCH] [MSLITE] Support convert softplus to activation --- mindspore/core/ops/softplus.h | 3 ++- mindspore/lite/tools/converter/import/primitive_adjust.cc | 4 ++++ .../lite/tools/optimizer/const_fold/fold_along_infershape.cc | 5 +++-- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/mindspore/core/ops/softplus.h b/mindspore/core/ops/softplus.h index d317c1abf3d..3aad3c14481 100644 --- a/mindspore/core/ops/softplus.h +++ b/mindspore/core/ops/softplus.h @@ -27,11 +27,12 @@ namespace mindspore { namespace ops { +constexpr auto kNameSoftplus = "Softplus"; /// \brief Softplus activation function. Refer to Python API @ref mindspore.ops.Softplus for more details. class MS_CORE_API Softplus : public PrimitiveC { public: /// \brief Constructor. - Softplus() : PrimitiveC(prim::kPrimSoftplus->name()) { InitIOName({"x"}, {"output"}); } + Softplus() : PrimitiveC(kNameSoftplus) { InitIOName({"x"}, {"output"}); } /// \brief Destructor. ~Softplus() = default; MS_DECLARE_PARENT(Softplus, PrimitiveC); diff --git a/mindspore/lite/tools/converter/import/primitive_adjust.cc b/mindspore/lite/tools/converter/import/primitive_adjust.cc index 226eaa2a296..1d2dc05cfb3 100644 --- a/mindspore/lite/tools/converter/import/primitive_adjust.cc +++ b/mindspore/lite/tools/converter/import/primitive_adjust.cc @@ -72,6 +72,7 @@ #include "ops/sigmoid.h" #include "ops/stack.h" #include "ops/tanh.h" +#include "ops/softplus.h" #include "ops/sparse_softmax_cross_entropy_with_logits.h" #include "ops/grad/resize_grad.h" #include "ops/random_standard_normal.h" @@ -118,6 +119,7 @@ using mindspore::ops::kNameResizeBilinear; using mindspore::ops::kNameResizeNearestNeighbor; using mindspore::ops::kNameScale; using mindspore::ops::kNameSigmoid; +using mindspore::ops::kNameSoftplus; using mindspore::ops::kNameSparseSoftmaxCrossEntropyWithLogits; using mindspore::ops::kNameSub; using mindspore::ops::kNameTanh; @@ -160,6 +162,7 @@ std::map activation_map = {{ops::kNameEl {ops::kNameReLU6, mindspore::RELU6}, {ops::kNameSigmoid, mindspore::SIGMOID}, {ops::kNameTanh, mindspore::TANH}, + {ops::kNameSoftplus, mindspore::SOFTPLUS}, {kNameHSigmoid, mindspore::HSIGMOID}, {kNameHSigmoidGrad, mindspore::HSIGMOID}, {kNameHSwish, mindspore::HSWISH}, @@ -671,5 +674,6 @@ REGIST_PRIMITIVE_ADJUST(kNameSparseSoftmaxCrossEntropyWithLogits, MoveAttrMapCommon) REGIST_PRIMITIVE_ADJUST(kNameResizeBilinearGrad, MoveAttrMapResizeGrad) REGIST_PRIMITIVE_ADJUST(kNameResizeNearestNeighborGrad, MoveAttrMapResizeGrad) +REGIST_PRIMITIVE_ADJUST(kNameSoftplus, MoveAttrMapActivation) } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/tools/optimizer/const_fold/fold_along_infershape.cc b/mindspore/lite/tools/optimizer/const_fold/fold_along_infershape.cc index 1a11c0af1a7..1adb4aac304 100644 --- a/mindspore/lite/tools/optimizer/const_fold/fold_along_infershape.cc +++ b/mindspore/lite/tools/optimizer/const_fold/fold_along_infershape.cc @@ -49,8 +49,9 @@ bool ConstFoldAlongInferShape::CheckCanFold(const FuncGraphPtr &func_graph, cons if (!is_inferred) { return false; } - if (CheckPrimitiveType(cnode, prim::kPrimShape)) { - return lite::ConverterInnerContext::GetInstance()->GetGraphInputTensorShapeMapSize() != 0; + if (CheckPrimitiveType(cnode, prim::kPrimShape) && + lite::ConverterInnerContext::GetInstance()->GetGraphInputTensorShapeMapSize() != 0) { + return true; } auto inputs = cnode->inputs(); auto graph_inputs =