[feat][assistant][I3PYDB] add new Ascend operator SoftShrink
This commit is contained in:
parent
4d403f5a39
commit
be26e62080
|
@ -370,6 +370,7 @@ inline const PrimitivePtr kLambApplyWeightAssign = std::make_shared<Primitive>("
|
|||
inline const PrimitivePtr kSoftmaxGradExt = std::make_shared<Primitive>("SoftmaxGradExt");
|
||||
inline const PrimitivePtr kSquareSumV1 = std::make_shared<Primitive>("SquareSumV1");
|
||||
inline const PrimitivePtr kFusedMulAdd = std::make_shared<Primitive>("FusedMulAdd");
|
||||
inline const PrimitivePtr kPrimSoftShrink = std::make_shared<Primitive>("SoftShrink");
|
||||
|
||||
// Comm ops
|
||||
inline const PrimitivePtr kPrimMirror = std::make_shared<Primitive>("_MirrorOperator");
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#include "ops/soft_shrink.h"
|
||||
|
||||
#include <set>
|
||||
#include <algorithm>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "ops/op_utils.h"
|
||||
#include "utils/check_convert_utils.h"
|
||||
#include "abstract/primitive_infer_map.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace ops {
|
||||
|
||||
namespace {
|
||||
abstract::ShapePtr InferShape(const PrimitivePtr &primitive, const std::vector<AbstractBasePtr> &input_args) {
|
||||
MS_EXCEPTION_IF_NULL(primitive);
|
||||
CheckAndConvertUtils::CheckInteger("input number", input_args.size(), kEqual, 1, primitive->name());
|
||||
for (const auto &item : input_args) {
|
||||
MS_EXCEPTION_IF_NULL(item);
|
||||
}
|
||||
auto in_shape = CheckAndConvertUtils::ConvertShapePtrToShapeMap(input_args[0]->GetShapeTrack())[kShape];
|
||||
return std::make_shared<abstract::Shape>(in_shape);
|
||||
}
|
||||
TypePtr InferType(const PrimitivePtr &prim, const std::vector<AbstractBasePtr> &input_args) {
|
||||
MS_EXCEPTION_IF_NULL(prim);
|
||||
CheckAndConvertUtils::CheckInteger("input number", input_args.size(), kEqual, 1, prim->name());
|
||||
if (std::any_of(input_args.begin(), input_args.end(), [](const AbstractBasePtr &a) { return a == nullptr; })) {
|
||||
MS_LOG(EXCEPTION) << "nullptr";
|
||||
}
|
||||
const std::set<TypePtr> valid_types = {kFloat16, kFloat32};
|
||||
return CheckAndConvertUtils::CheckTensorTypeValid("input_x", input_args[0]->BuildType(), valid_types, prim->name());
|
||||
}
|
||||
} // namespace
|
||||
|
||||
AbstractBasePtr SoftShrinkInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
|
||||
const std::vector<AbstractBasePtr> &input_args) {
|
||||
return std::make_shared<abstract::AbstractTensor>(InferType(primitive, input_args),
|
||||
InferShape(primitive, input_args)->shape());
|
||||
}
|
||||
|
||||
REGISTER_PRIMITIVE_EVAL_IMPL(SoftShrink, prim::kPrimSoftShrink, SoftShrinkInfer, nullptr, true);
|
||||
} // namespace ops
|
||||
} // namespace mindspore
|
|
@ -0,0 +1,43 @@
|
|||
/**
|
||||
* Copyright 2021 Huawei Technologies Co., Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
#ifndef MINDSPORE_CORE_OPS_SOFTSHRINK_H_
|
||||
#define MINDSPORE_CORE_OPS_SOFTSHRINK_H_
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <string>
|
||||
#include "ops/primitive_c.h"
|
||||
#include "abstract/abstract_value.h"
|
||||
#include "utils/check_convert_utils.h"
|
||||
|
||||
namespace mindspore {
|
||||
namespace ops {
|
||||
constexpr auto kNameSoftShrink = "SoftShrink";
|
||||
class SoftShrink : public PrimitiveC {
|
||||
public:
|
||||
SoftShrink() : PrimitiveC(kNameSoftShrink) { InitIOName({"input_x"}, {"output"}); }
|
||||
~SoftShrink() = default;
|
||||
MS_DECLARE_PARENT(SoftShrink, PrimitiveC);
|
||||
};
|
||||
|
||||
AbstractBasePtr SoftShrinkInfer(const abstract::AnalysisEnginePtr &, const PrimitivePtr &primitive,
|
||||
const std::vector<AbstractBasePtr> &input_args);
|
||||
|
||||
using PrimSoftShrinkPtr = std::shared_ptr<SoftShrink>;
|
||||
|
||||
} // namespace ops
|
||||
} // namespace mindspore
|
||||
#endif // MINDSPORE_CORE_OPS_SOFTSHRINK_H_
|
|
@ -386,3 +386,4 @@ from .not_equal_ds import _not_ds_equal_tbe
|
|||
from .reciprocal_ds import _reciprocal_ds_tbe
|
||||
from .ctc_loss_v2 import _ctc_loss_v2_tbe
|
||||
from .ctc_loss_v2_grad import _ctc_loss_v2_grad_tbe
|
||||
from .soft_shrink import _soft_shrink_tbe
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
# Copyright 2021 Huawei Technologies Co., Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
# ============================================================================
|
||||
|
||||
"""SoftShrink op"""
|
||||
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
|
||||
|
||||
soft_shrink_op_info = TBERegOp("SoftShrink") \
|
||||
.fusion_type("ELEMWISE") \
|
||||
.async_flag(False) \
|
||||
.binfile_name("soft_shrink.so") \
|
||||
.compute_cost(10) \
|
||||
.kernel_name("soft_shrink") \
|
||||
.partial_flag(True) \
|
||||
.attr("lambd", "optional", "float", "all", "0.5") \
|
||||
.input(0, "input_x", False, "required", "all") \
|
||||
.output(0, "output_y", False, "required", "all") \
|
||||
.dtype_format(DataType.F16_Default, DataType.F16_Default) \
|
||||
.dtype_format(DataType.F32_Default, DataType.F32_Default) \
|
||||
.get_op_info()
|
||||
|
||||
@op_info_register(soft_shrink_op_info)
|
||||
def _soft_shrink_tbe():
|
||||
"""SoftShrink TBE register"""
|
||||
return
|
|
@ -86,7 +86,7 @@ from .nn_ops import (LSTM, SGD, Adam, FusedSparseAdam, FusedSparseLazyAdam, Adam
|
|||
FusedSparseFtrl, FusedSparseProximalAdagrad,
|
||||
ApplyAdaMax, ApplyAdadelta, ApplyAdagrad, ApplyAdagradV2,
|
||||
ApplyAddSign, ApplyPowerSign, ApplyGradientDescent, ApplyProximalGradientDescent,
|
||||
ApplyRMSProp, ApplyCenteredRMSProp, BasicLSTMCell, InTopK, AdaptiveAvgPool2D)
|
||||
ApplyRMSProp, ApplyCenteredRMSProp, BasicLSTMCell, InTopK, AdaptiveAvgPool2D, SoftShrink)
|
||||
from . import _quant_ops
|
||||
from ._quant_ops import *
|
||||
from .other_ops import (Assign, InplaceAssign, IOU, BoundingBoxDecode, BoundingBoxEncode,
|
||||
|
@ -482,6 +482,7 @@ __all__ = [
|
|||
"TensorScatterMax",
|
||||
"TensorScatterMin",
|
||||
"TensorScatterSub",
|
||||
"SoftShrink",
|
||||
]
|
||||
|
||||
__all__.sort()
|
||||
|
|
|
@ -8669,3 +8669,48 @@ class Conv3DTranspose(PrimitiveWithInfer):
|
|||
'dtype': x['dtype'],
|
||||
}
|
||||
return out
|
||||
|
||||
class SoftShrink(Primitive):
|
||||
r"""
|
||||
Applies the soft shrinkage function elementwise.
|
||||
.. math::
|
||||
\text{SoftShrink}(x) =
|
||||
\begin{cases}
|
||||
x - \lambda, & \text{ if } x > \lambda \\
|
||||
x + \lambda, & \text{ if } x < -\lambda \\
|
||||
0, & \text{ otherwise }
|
||||
\end{cases}
|
||||
|
||||
Args:
|
||||
lambd: the :math:`\lambda` must be no less than zero value for the Softshrink formulation. Default: 0.5.
|
||||
|
||||
Inputs:
|
||||
- **input_x** (Tensor) - The input of SoftShrink with data type of float16 or float32.
|
||||
Any number of additional dimensions.
|
||||
|
||||
Outputs:
|
||||
Tensor, has the same shape and data type as `input_x`.
|
||||
|
||||
Raises:
|
||||
TypeError: If lambd is not a float.
|
||||
TypeError: If input_x is not a Tensor.
|
||||
TypeError: If dtype of input_x is neither float16 nor float32.
|
||||
ValueError: If lambd is less than 0.
|
||||
|
||||
Supported Platforms:
|
||||
``Ascend``
|
||||
|
||||
Examples:
|
||||
>>> input_x = Tensor(np.array([[ 0.5297, 0.7871, 1.1754], [ 0.7836, 0.6218, -1.1542]]), mstype.float16)
|
||||
>>> softshrink = ops.SoftShrink()
|
||||
>>> output = softshrink(input_x)
|
||||
>>> print(output)
|
||||
[[ 0.02979 0.287 0.676 ]
|
||||
[ 0.2837 0.1216 -0.6543 ]]
|
||||
"""
|
||||
|
||||
@prim_attr_register
|
||||
def __init__(self, lambd=0.5):
|
||||
"""Initialize SoftShrink"""
|
||||
validator.check_value_type("lambd", lambd, [float], self.name)
|
||||
validator.check_number("lambd", lambd, 0, Rel.GE, self.name)
|
||||
|
|
|
@ -2145,6 +2145,10 @@ test_case_nn_ops = [
|
|||
Tensor(np.zeros((1, 1, 2, 2)), mstype.uint16)],
|
||||
'desc_bprop': [],
|
||||
'skip': ['backward']}),
|
||||
('SoftShrink', {
|
||||
'block': P.SoftShrink(),
|
||||
'desc_inputs': [Tensor(np.array([[0.5297, 0.7871, 1.1754], [0.7836, 0.6218, -1.1542]]), mstype.float32)],
|
||||
'desc_bprop': [Tensor(np.array([[0, 0.4, 1], [1, 2, 4]]), mstype.float32)]}),
|
||||
]
|
||||
|
||||
test_case_array_ops = [
|
||||
|
|
Loading…
Reference in New Issue