From b04036e13c480608d9d1edaf27b3ac0682228d03 Mon Sep 17 00:00:00 2001 From: zhangjie <543376780@qq.com> Date: Fri, 11 Jun 2021 20:53:04 +0800 Subject: [PATCH] [feat][assistant][I3PYD4] add new data operator HShrink and HShrinkGrad --- mindspore/core/base/core_ops.h | 1 + mindspore/core/ops/grad/hshrink_grad.cc | 63 +++++++++++++++++++ mindspore/core/ops/grad/hshrink_grad.h | 43 +++++++++++++ .../ops/_grad_experimental/grad_nn_ops.py | 12 ++++ mindspore/ops/_op_impl/tbe/__init__.py | 1 + mindspore/ops/_op_impl/tbe/hshrink_grad.py | 37 +++++++++++ mindspore/ops/operations/_grad_ops.py | 31 +++++++++ tests/ut/python/ops/test_ops.py | 5 ++ 8 files changed, 193 insertions(+) create mode 100644 mindspore/core/ops/grad/hshrink_grad.cc create mode 100644 mindspore/core/ops/grad/hshrink_grad.h create mode 100644 mindspore/ops/_op_impl/tbe/hshrink_grad.py diff --git a/mindspore/core/base/core_ops.h b/mindspore/core/base/core_ops.h index 74e1c1b08a7..becc48fece6 100644 --- a/mindspore/core/base/core_ops.h +++ b/mindspore/core/base/core_ops.h @@ -376,6 +376,7 @@ inline const PrimitivePtr kFusedMulAdd = std::make_shared("FusedMulAd inline const PrimitivePtr kPrimSoftShrink = std::make_shared("SoftShrink"); inline const PrimitivePtr kPrimSoftShrinkGrad = std::make_shared("SoftShrinkGrad"); inline const PrimitivePtr kPrimHShrink = std::make_shared("HShrink"); +inline const PrimitivePtr kPrimHShrinkGrad = std::make_shared("HShrinkGrad"); // Comm ops inline const PrimitivePtr kPrimMirror = std::make_shared("_MirrorOperator"); diff --git a/mindspore/core/ops/grad/hshrink_grad.cc b/mindspore/core/ops/grad/hshrink_grad.cc new file mode 100644 index 00000000000..0c85f9f4030 --- /dev/null +++ b/mindspore/core/ops/grad/hshrink_grad.cc @@ -0,0 +1,63 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ops/grad/hshrink_grad.h" +#include +#include +#include +#include +#include +#include +#include "ops/op_utils.h" +#include "utils/check_convert_utils.h" +#include "abstract/primitive_infer_map.h" + +namespace mindspore { +namespace ops { + +abstract::ShapePtr HShrinkGradInferShape(const PrimitivePtr &primitive, + const std::vector &input_args) { + MS_EXCEPTION_IF_NULL(primitive); + auto prim_name = primitive->name(); + auto gradients_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->BuildShape())[kShape]; + auto features_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[1]->BuildShape())[kShape]; + + CheckAndConvertUtils::Check("gradients_shape", gradients_shape, kEqual, "features_shape", features_shape, prim_name, + TypeError); + return std::make_shared(gradients_shape); +} + +TypePtr HShrinkGradInferType(const PrimitivePtr &prim, const std::vector &input_args) { + MS_EXCEPTION_IF_NULL(prim); + CheckAndConvertUtils::CheckInteger("input number", input_args.size(), kEqual, 2, prim->name()); + for (const auto &item : input_args) { + MS_EXCEPTION_IF_NULL(item); + } + std::map types; + const std::set valid_types = {kFloat16, kFloat32}; + types.emplace("gradients", input_args[0]->BuildType()); + types.emplace("features", input_args[1]->BuildType()); + return CheckAndConvertUtils::CheckTensorTypeSame(types, valid_types, prim->name()); +} + +AbstractBasePtr HShrinkGradInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive, + const std::vector &input_args) { + return std::make_shared(HShrinkGradInferType(primitive, input_args), + HShrinkGradInferShape(primitive, input_args)->shape()); +} +REGISTER_PRIMITIVE_EVAL_IMPL(HShrinkGrad, prim::kPrimHShrinkGrad, HShrinkGradInfer, nullptr, true); +} // namespace ops +} // namespace mindspore diff --git a/mindspore/core/ops/grad/hshrink_grad.h b/mindspore/core/ops/grad/hshrink_grad.h new file mode 100644 index 00000000000..70a62ba9518 --- /dev/null +++ b/mindspore/core/ops/grad/hshrink_grad.h @@ -0,0 +1,43 @@ +/** + * Copyright 2021 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_CORE_OPS_HShrink_GRAD_H_ +#define MINDSPORE_CORE_OPS_HShrink_GRAD_H_ +#include +#include +#include +#include +#include "ops/primitive_c.h" +#include "abstract/abstract_value.h" +#include "utils/check_convert_utils.h" + +namespace mindspore { +namespace ops { +constexpr auto kNameHShrinkGrad = "HShrinkGrad"; +class HShrinkGrad : public PrimitiveC { + public: + HShrinkGrad() : PrimitiveC(kNameHShrinkGrad) { InitIOName({"gradients", "features"}, {"backprops"}); } + ~HShrinkGrad() = default; + MS_DECLARE_PARENT(HShrinkGrad, PrimitiveC); +}; + +AbstractBasePtr HShrinkGradInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive, + const std::vector &input_args); +using PrimHShrinkGradPtr = std::shared_ptr; +} // namespace ops +} // namespace mindspore + +#endif // MINDSPORE_CORE_OPS_HShrink_GRAD_H_ diff --git a/mindspore/ops/_grad_experimental/grad_nn_ops.py b/mindspore/ops/_grad_experimental/grad_nn_ops.py index acb3f84dc31..58106b6c88b 100644 --- a/mindspore/ops/_grad_experimental/grad_nn_ops.py +++ b/mindspore/ops/_grad_experimental/grad_nn_ops.py @@ -44,3 +44,15 @@ def get_bprop_softshrink(self): return (dx,) return bprop + + +@bprop_getters.register(P.HShrink) +def get_bprop_hshrink(self): + """Grad definition for `HShrinkGrad` operation.""" + grad = G.HShrinkGrad() + + def bprop(features, out, gradients): + dx = grad(gradients, features) + return (dx,) + + return bprop diff --git a/mindspore/ops/_op_impl/tbe/__init__.py b/mindspore/ops/_op_impl/tbe/__init__.py index a52810073f0..91144d3176f 100644 --- a/mindspore/ops/_op_impl/tbe/__init__.py +++ b/mindspore/ops/_op_impl/tbe/__init__.py @@ -395,3 +395,4 @@ from .soft_shrink_grad import _soft_shrink_grad_tbe from .hsigmoid_grad import _hsigmoid_grad_tbe from .hsigmoid import _hsigmoid_tbe from .hshrink import _hshrink_tbe +from .hshrink_grad import _hshrink_grad_tbe diff --git a/mindspore/ops/_op_impl/tbe/hshrink_grad.py b/mindspore/ops/_op_impl/tbe/hshrink_grad.py new file mode 100644 index 00000000000..3d162e35aa0 --- /dev/null +++ b/mindspore/ops/_op_impl/tbe/hshrink_grad.py @@ -0,0 +1,37 @@ +# Copyright 2021 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +"""HShrinkGrad op""" +from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType + +hshrink_grad_op_info = TBERegOp("HShrinkGrad") \ + .fusion_type("OPAQUE") \ + .async_flag(False) \ + .binfile_name("hard_shrink_grad.so") \ + .compute_cost(10) \ + .kernel_name("hard_shrink_grad") \ + .partial_flag(True) \ + .attr("lambda", "optional", "float", "all", "0.5") \ + .input(0, "gradients", False, "required", "all") \ + .input(1, "features", False, "required", "all") \ + .output(0, "backprops", False, "required", "all") \ + .dtype_format(DataType.F16_Default, DataType.F16_Default, DataType.F16_Default) \ + .dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \ + .get_op_info() + + +@op_info_register(hshrink_grad_op_info) +def _hshrink_grad_tbe(): + """HShrinkGrad TBE register""" + return diff --git a/mindspore/ops/operations/_grad_ops.py b/mindspore/ops/operations/_grad_ops.py index 22f361f7060..b64f1157f97 100644 --- a/mindspore/ops/operations/_grad_ops.py +++ b/mindspore/ops/operations/_grad_ops.py @@ -2212,3 +2212,34 @@ class SoftShrinkGrad(Primitive): self.init_prim_io_names(inputs=['input_grad', 'input_x'], outputs=['output']) validator.check_value_type("lambd", lambd, [float], self.name) validator.check_number("lambd", lambd, 0, Rel.GE, self.name) + + +class HShrinkGrad(Primitive): + """ + Computes gradients for HShrinkGrad operation. + + Args: + lambd (float): the λ value for the Hardshrink formulation. Default: 0.5 + + Inputs: + - **gradients** (Tensor) - the gradients of loss to output of HShrink function. + Currently gradients data type only support float16 and float32. + - **features** (Tensor) - Must be the input `input_x` of the forward operator HSHrink. + Currently features data type only support float16 and float32. + + Outputs: + backprops - Tensor, with the same shape and data type as `features`. + + Rasise: + TypeError: If `lambd` is not a float. + TypeError: If shape of `gradients` is not the same as `features`. + TypeError: If dtype of `gradients` is not the same as `features`. + TypeError: If dtype of `gradients` or `features` is neither float16 nor float32. + + Supported Platforms: + ``Ascend`` + """ + + @prim_attr_register + def __init__(self, lambd=0.5): + validator.check_value_type("lambd", lambd, [float], self.name) diff --git a/tests/ut/python/ops/test_ops.py b/tests/ut/python/ops/test_ops.py index 19ac1e81987..bbff59c946f 100755 --- a/tests/ut/python/ops/test_ops.py +++ b/tests/ut/python/ops/test_ops.py @@ -2209,6 +2209,11 @@ test_case_nn_ops = [ 'desc_inputs': [Tensor(np.array([[0.5, 1, 2.0], [0.0533, 0.0776, -2.1233]]), mstype.float32)], 'desc_bprop': [], 'skip': ['backward']}), + ('HShrinkGrad', { + 'block': G.HShrinkGrad(), + 'desc_inputs': [Tensor(np.array([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]), mstype.float16), + Tensor(np.array([[-4, -3, -2], [1, 2, 4]]), mstype.float16)], + 'skip': ['backward']}), ] test_case_array_ops = [