!33956 Add a nn cell for softsign.

Merge pull request !33956 from liqiliang/softsign-cpu
This commit is contained in:
i-robot 2022-05-11 03:15:10 +00:00 committed by Gitee
commit a0a9c942c8
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
6 changed files with 95 additions and 1 deletions

View File

@ -115,6 +115,7 @@ MindSpore中 `mindspore.nn` 接口与上一版本相比,新增、删除和支
mindspore.nn.LeakyReLU
mindspore.nn.LogSigmoid
mindspore.nn.LogSoftmax
mindspore.nn.Softsign
mindspore.nn.PReLU
mindspore.nn.ReLU
mindspore.nn.ReLU6

View File

@ -0,0 +1,26 @@
mindspore.nn.Softsign
=====================
.. py:class:: mindspore.nn.Softsign
Softsign激活函数。
按元素计算Softsign激活函数。
Softsign函数定义为
.. math::
\text{SoftSign}(x) = \frac{x}{1 + |x|}
**输入:**
**input_x** (Tensor) - shape为 :math:`(N, *)` 的Tensor, 其中 :math:`*` 表示任意个数的维度。它的数据类型必须为float16或float32。
**输出:**
Tensor数据类型和shape与 `input_x` 相同。
**异常:**
- **TypeError** - `input_x` 不是Tensor。
- **TypeError** - `input_x` 的数据类型既不是float16也不是float32。

View File

@ -115,6 +115,7 @@ Nonlinear Activation Function Layer
mindspore.nn.LeakyReLU
mindspore.nn.LogSigmoid
mindspore.nn.LogSoftmax
mindspore.nn.Softsign
mindspore.nn.PReLU
mindspore.nn.ReLU
mindspore.nn.ReLU6

View File

@ -32,6 +32,7 @@ __all__ = ['Softmax',
'GELU',
'FastGelu',
'Sigmoid',
'Softsign',
'PReLU',
'get_activation',
'LeakyReLU',
@ -612,6 +613,49 @@ class Sigmoid(Cell):
return self.sigmoid(x)
class Softsign(Cell):
r"""
Softsign activation function.
Applies the Softsign function element-wise.
Softsign is defined as:
.. math::
\text{SoftSign}(x) = \frac{x}{1 + |x|}
Inputs:
- **input_x** (Tensor) - Tensor of shape :math:`(N, *)`, where :math:`*` means, any number of
additional dimensions, with float16 or float32 data type.
Outputs:
Tensor, with the same type and shape as the `input_x`.
Raises:
TypeError: If `input_x` is not a Tensor.
TypeError: If dtype of `input_x` is neither float16 nor float32.
Supported Platforms:
``Ascend`` `CPU``
Examples:
>>> input_x = Tensor(np.array([0, -1, 2, 30, -30]), mindspore.float32)
>>> softsign = nn.Softsign()
>>> output = softsign(input_x)
>>> print(output)
[ 0. -0.5 0.6666667 0.9677419 -0.9677419]
"""
def __init__(self):
"""Initialize Softsign."""
super(Softsign, self).__init__()
self.softsign = P.Softsign()
def construct(self, x):
return self.softsign(x)
class PReLU(Cell):
r"""
PReLU activation function.
@ -665,6 +709,7 @@ class PReLU(Cell):
[0.9 0.9]]]]
"""
@cell_attr_register(attrs="")
def __init__(self, channel=1, w=0.25):
"""Initialize PReLU."""
@ -952,6 +997,7 @@ _activation = {
'fast_gelu': FastGelu,
'elu': ELU,
'sigmoid': Sigmoid,
'softsign': Softsign,
'prelu': PReLU,
'leakyrelu': LeakyReLU,
'hswish': HSwish,

View File

@ -433,7 +433,7 @@ class Softsign(Primitive):
.. math::
\text{SoftSign}(x) = \frac{x}{ 1 + |x|}
\text{SoftSign}(x) = \frac{x}{1 + |x|}
Inputs:
- **input_x** (Tensor) - Tensor of shape :math:`(N, *)`, where :math:`*` means, any number of

View File

@ -105,3 +105,23 @@ def test_compile_leaky_relu():
net = NetLeakyReLU(alpha=0.1)
input_data = Tensor(np.array([[1.6, 0, 0.6], [6, 0, -6]], dtype=np.float32))
_cell_graph_executor.compile(net, input_data)
class NetSoftsign(nn.Cell):
def __init__(self):
super(NetSoftsign, self).__init__()
self.softsign = nn.Softsign()
def construct(self, x):
return self.softsign(x)
def test_compile_softsign():
"""
Feature: ALL To ALL
Description: test cases for Softsign
Expectation: no exception
"""
net = NetSoftsign()
x = np.array([0, -1, 2, 30, -30], dtype=np.float32)
_cell_graph_executor.compile(net, Tensor(x))