[feat][assistant][I3PYD3] add new Ascend operator HSigmoidGrad

This commit is contained in:
wanglixin 2021-06-06 15:14:43 +08:00
parent a0644cf073
commit a377ade89f
6 changed files with 175 additions and 2 deletions

View File

@ -0,0 +1,63 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ops/grad/hsigmoid_grad.h"
#include <algorithm>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <vector>
#include "ops/op_utils.h"
#include "utils/check_convert_utils.h"
#include "abstract/primitive_infer_map.h"
namespace mindspore {
namespace ops {
namespace {
abstract::ShapePtr InferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(primitive);
auto prim_name = primitive->name();
auto grads_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->BuildShape())[kShape];
auto input_x_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[1]->BuildShape())[kShape];
CheckAndConvertUtils::Check("grads_shape", grads_shape, kEqual, "input_x_shape", input_x_shape, prim_name, TypeError);
return std::make_shared<abstract::Shape>(grads_shape);
}
TypePtr InferType(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) {
MS_EXCEPTION_IF_NULL(prim);
CheckAndConvertUtils::CheckInteger("input number", input_args.size(), kEqual, 2, prim->name());
for (const auto &item : input_args) {
MS_EXCEPTION_IF_NULL(item);
}
const std::set<TypePtr> valid_types = {kFloat16, kFloat32};
std::map<std::string, TypePtr> types;
types.emplace("grads", input_args[0]->BuildType());
types.emplace("input_x", input_args[1]->BuildType());
return CheckAndConvertUtils::CheckTensorTypeSame(types, valid_types, prim->name());
}
} // namespace
AbstractBasePtr HSigmoidGradInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args) {
return std::make_shared<abstract::AbstractTensor>(InferType(primitive, input_args),
InferShape(primitive, input_args)->shape());
}
REGISTER_PRIMITIVE_EVAL_IMPL(HSigmoidGrad, prim::kPrimHSigmoidGrad, HSigmoidGradInfer, nullptr, true);
} // namespace ops
} // namespace mindspore

View File

@ -0,0 +1,43 @@
/**
* Copyright 2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CORE_OPS_HSIGMOID_GRAD_H_
#define MINDSPORE_CORE_OPS_HSIGMOID_GRAD_H_
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "ops/primitive_c.h"
#include "abstract/abstract_value.h"
#include "utils/check_convert_utils.h"
namespace mindspore {
namespace ops {
constexpr auto kNameHSigmoidGrad = "HSigmoidGrad";
class HSigmoidGrad : public PrimitiveC {
public:
HSigmoidGrad() : PrimitiveC(kNameHSigmoidGrad) { InitIOName({"grads", "input_x"}, {"output"}); }
~HSigmoidGrad() = default;
MS_DECLARE_PARENT(HSigmoidGrad, PrimitiveC);
};
AbstractBasePtr HSigmoidGradInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
const std::vector<AbstractBasePtr> &input_args);
using PrimHSigmoidGradPtr = std::shared_ptr<HSigmoidGrad>;
} // namespace ops
} // namespace mindspore
#endif // MINDSPORE_CORE_OPS_HSIGMOID_GRAD_H_

View File

@ -391,3 +391,4 @@ from .ctc_loss_v2 import _ctc_loss_v2_tbe
from .ctc_loss_v2_grad import _ctc_loss_v2_grad_tbe
from .soft_shrink import _soft_shrink_tbe
from .soft_shrink_grad import _soft_shrink_grad_tbe
from .hsigmoid_grad import _hsigmoid_grad_tbe

View File

@ -0,0 +1,39 @@
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""HSigmoidGrad op"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
hsigmoid_grad_op_info = TBERegOp("HSigmoidGrad") \
.fusion_type("OPAQUE") \
.async_flag(False) \
.binfile_name("hard_sigmoid_grad.so") \
.compute_cost(10) \
.kernel_name("hard_sigmoid_grad") \
.partial_flag(True) \
.attr("alpha", "optional", "float", "all", "0.16666666") \
.attr("beta", "optional", "float", "all", "0.5") \
.input(0, "grads", False, "required", "all") \
.input(1, "input_x", False, "required", "all") \
.output(0, "output", False, "required", "all") \
.dtype_format(DataType.F16_Default, DataType.F16_Default, DataType.F16_Default) \
.dtype_format(DataType.F32_Default, DataType.F32_Default, DataType.F32_Default) \
.get_op_info()
@op_info_register(hsigmoid_grad_op_info)
def _hsigmoid_grad_tbe():
"""HSigmoidGrad TBE register"""
return

View File

@ -1740,8 +1740,31 @@ class HSwishGrad(_ActivationGrad):
"""Gets the gradient of HSwish operation."""
class HSigmoidGrad(_ActivationGrad):
"""Gets the gradient of HSigmoid operation."""
class HSigmoidGrad(Primitive):
"""
Gets the gradient of HSigmoid operation.
Inputs:
- **grads** (Tensor) - The gradients of loss to output of HSigmoid function. Currently
grads data type only support float16 and float32.
- **input_x** (Tensor) - Must be the input `input_x` of the forward operator HSigmoid.
Currentlyinput_x data type only support float16 and float32.
Outputs:
- **output** (Tensor) - With the same shape and data type as `input_x`.
Raises:
TypeError: If shape of `grads` is not the same as `input_x`.
TypeError: If dtype of `grads` is not the same as `input_x`.
TypeError: If dtype of `grads` or `input_x` is neither float16 nor float32.
Supported Platforms:
``Ascend`` ``GPU`` ``CPU``
"""
@prim_attr_register
def __init__(self):
self.init_prim_io_names(inputs=['grads', 'input_x'], outputs=['output'])
class SigmoidCrossEntropyWithLogitsGrad(PrimitiveWithInfer):

View File

@ -2154,6 +2154,10 @@ test_case_nn_ops = [
'desc_inputs': [Tensor(np.array([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]), mstype.float16),
Tensor(np.array([[-3, -2, 0], [1, 2, 4]]), mstype.float16)],
'desc_bprop': [],
('HSigmoidGrad', {
'block': G.HSigmoidGrad(),
'desc_inputs': [Tensor(np.array([[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]), mstype.float16),
Tensor(np.array([[-4, -3, -2], [1, 2, 4]]), mstype.float16)],
'skip': ['backward']}),
]