From 2c20b046c87055813b2ec2d5e964e8d1a97703aa Mon Sep 17 00:00:00 2001 From: guozhibin Date: Thu, 29 Dec 2022 20:47:14 +0800 Subject: [PATCH] add api mindspore.ops.tanhshrink --- docs/api/api_python/mindspore.ops.rst | 1 + .../ops/mindspore.ops.func_tanhshrink.rst | 8 ++ docs/api/api_python_en/mindspore.ops.rst | 1 + .../python/mindspore/nn/layer/activation.py | 3 +- .../python/mindspore/ops/function/__init__.py | 1 + .../mindspore/ops/function/math_func.py | 23 +++++ tests/st/ops/test_ops_tanhshrink.py | 92 +++++++++++++++++++ 7 files changed, 127 insertions(+), 2 deletions(-) create mode 100644 docs/api/api_python/ops/mindspore.ops.func_tanhshrink.rst create mode 100644 tests/st/ops/test_ops_tanhshrink.py diff --git a/docs/api/api_python/mindspore.ops.rst b/docs/api/api_python/mindspore.ops.rst index 8099ea4e418..1d66d971274 100644 --- a/docs/api/api_python/mindspore.ops.rst +++ b/docs/api/api_python/mindspore.ops.rst @@ -275,6 +275,7 @@ mindspore.ops mindspore.ops.svd mindspore.ops.t mindspore.ops.tan + mindspore.ops.tanhshrink mindspore.ops.true_divide mindspore.ops.trunc mindspore.ops.truncate_div diff --git a/docs/api/api_python/ops/mindspore.ops.func_tanhshrink.rst b/docs/api/api_python/ops/mindspore.ops.func_tanhshrink.rst new file mode 100644 index 00000000000..b93b6c31014 --- /dev/null +++ b/docs/api/api_python/ops/mindspore.ops.func_tanhshrink.rst @@ -0,0 +1,8 @@ +mindspore.ops.tanhshrink +========================= + +.. py:function:: mindspore.ops.tanhshrink(x) + + 按元素计算Tanhshrink函数。 + + 详情请查看 :class:`mindspore.nn.Tanhshrink` 。 \ No newline at end of file diff --git a/docs/api/api_python_en/mindspore.ops.rst b/docs/api/api_python_en/mindspore.ops.rst index 873daac048a..382c3167846 100644 --- a/docs/api/api_python_en/mindspore.ops.rst +++ b/docs/api/api_python_en/mindspore.ops.rst @@ -275,6 +275,7 @@ Element-by-Element Operations mindspore.ops.svd mindspore.ops.t mindspore.ops.tan + mindspore.ops.tanhshrink mindspore.ops.true_divide mindspore.ops.trunc mindspore.ops.truncate_div diff --git a/mindspore/python/mindspore/nn/layer/activation.py b/mindspore/python/mindspore/nn/layer/activation.py index 6deb8b91346..b581d160980 100644 --- a/mindspore/python/mindspore/nn/layer/activation.py +++ b/mindspore/python/mindspore/nn/layer/activation.py @@ -744,10 +744,9 @@ class Tanhshrink(Cell): def __init__(self): """Initialize Tanhshrink.""" super(Tanhshrink, self).__init__() - self.tanh = P.Tanh() def construct(self, x): - return x - self.tanh(x) + return F.tanhshrink(x) @constexpr diff --git a/mindspore/python/mindspore/ops/function/__init__.py b/mindspore/python/mindspore/ops/function/__init__.py index 73df4c7256c..5ece833db6c 100644 --- a/mindspore/python/mindspore/ops/function/__init__.py +++ b/mindspore/python/mindspore/ops/function/__init__.py @@ -272,6 +272,7 @@ from .math_func import ( sinh, cosh, tanh, + tanhshrink, asinh, arcsinh, acosh, diff --git a/mindspore/python/mindspore/ops/function/math_func.py b/mindspore/python/mindspore/ops/function/math_func.py index d82824982b5..0ee7e497908 100644 --- a/mindspore/python/mindspore/ops/function/math_func.py +++ b/mindspore/python/mindspore/ops/function/math_func.py @@ -9612,6 +9612,28 @@ def sum(x, dim=None, keepdim=False, *, dtype=None): return out +def tanhshrink(x): + ''' + Applies element-wise, :math:`Tanhshrink(x)=x-Tanh(x)` . + see :class:`mindspore.nn.Tanhshrink` for more details. + + Supported Platforms: + ``Ascend`` ``GPU`` ``CPU`` + + Examples: + >>> import mindspore as ms + >>> import mindspore.ops as ops + >>> from mindspore import Tensor + >>> import numpy as np + >>> x = Tensor(np.array([1, 2, 3, 2, 1]), ms.float16) + >>> output = ops.tanhshrink(x) + >>> print(output) + [0.2383 1.036 2.004 1.036 0.2383] + ''' + tanh_op = _get_cache_prim(P.Tanh)() + return x - tanh_op(x) + + __all__ = [ 'addn', 'absolute', @@ -9730,6 +9752,7 @@ __all__ = [ 'sinh', 'cosh', 'tanh', + 'tanhshrink', 'asinh', 'arcsinh', 'acosh', diff --git a/tests/st/ops/test_ops_tanhshrink.py b/tests/st/ops/test_ops_tanhshrink.py new file mode 100644 index 00000000000..cfb3d0e302e --- /dev/null +++ b/tests/st/ops/test_ops_tanhshrink.py @@ -0,0 +1,92 @@ +# Copyright 2022 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +import numpy as np +import pytest + +import mindspore as ms +import mindspore.nn as nn +import mindspore.ops as ops +from mindspore import Tensor + + +class Net(nn.Cell): + def construct(self, x): + return ops.tanhshrink(x) + + +@pytest.mark.level0 +@pytest.mark.platform_x86_cpu +@pytest.mark.platform_arm_cpu +@pytest.mark.platform_x86_gpu_training +@pytest.mark.platform_arm_ascend_training +@pytest.mark.platform_x86_ascend_training +@pytest.mark.env_onecard +@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE]) +def test_tanhshrink_normal(mode): + """ + Feature: Tanhshrink + Description: Verify the result of Tanhshrink with normal input + Expectation: success + """ + ms.set_context(mode=mode) + net = Net() + a = Tensor(np.array([1, 2, 3, 2, 1]).astype(np.float16)) + output = net(a).asnumpy() + expected_output = np.array([0.2383, 1.036, 2.004, 1.036, 0.2383]).astype(np.float16) + assert np.allclose(output, expected_output, 1e-3, 1e-3) + + +@pytest.mark.level0 +@pytest.mark.platform_x86_cpu +@pytest.mark.platform_arm_cpu +@pytest.mark.platform_x86_gpu_training +@pytest.mark.platform_arm_ascend_training +@pytest.mark.platform_x86_ascend_training +@pytest.mark.env_onecard +@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE]) +def test_tanhshrink_negative(mode): + """ + Feature: Tanhshrink + Description: Verify the result of Tanhshrink with negative input + Expectation: success + """ + ms.set_context(mode=mode) + net = Net() + a = Tensor(np.array([-1, -2, -3, -2, -1]).astype(np.float16)) + output = net(a).asnumpy() + expected_output = np.array([-0.2383, -1.036, -2.004, -1.036, -0.2383]).astype(np.float16) + assert np.allclose(output, expected_output, 1e-3, 1e-3) + + +@pytest.mark.level0 +@pytest.mark.platform_x86_cpu +@pytest.mark.platform_arm_cpu +@pytest.mark.platform_x86_gpu_training +@pytest.mark.platform_arm_ascend_training +@pytest.mark.platform_x86_ascend_training +@pytest.mark.env_onecard +@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE]) +def test_tanhshrink_zeros(mode): + """ + Feature: Tanhshrink + Description: Verify the result of Tanhshrink with zeros + Expectation: success + """ + ms.set_context(mode=mode) + net = Net() + a = Tensor(np.array([0, 0, 0, 0, 0]).astype(np.float16)) + output = net(a).asnumpy() + expected_output = np.array([0, 0, 0, 0, 0]).astype(np.float16) + assert np.allclose(output, expected_output, 1e-3, 1e-3)