forked from mindspore-Ecosystem/mindspore
ops softmin
This commit is contained in:
parent
79b53c74af
commit
7b92d9c569
|
@ -92,6 +92,7 @@ mindspore.ops
|
||||||
mindspore.ops.selu
|
mindspore.ops.selu
|
||||||
mindspore.ops.soft_shrink
|
mindspore.ops.soft_shrink
|
||||||
mindspore.ops.softmax
|
mindspore.ops.softmax
|
||||||
|
mindspore.ops.softmin
|
||||||
mindspore.ops.softsign
|
mindspore.ops.softsign
|
||||||
mindspore.ops.tanh
|
mindspore.ops.tanh
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,24 @@
|
||||||
|
mindspore.ops.softmin
|
||||||
|
=====================
|
||||||
|
|
||||||
|
.. py:function:: mindspore.ops.softmin(x, axis=-1)
|
||||||
|
|
||||||
|
在指定轴上对输入Tensor执行Softmin函数做归一化操作。假设指定轴 :math:`x` 上有切片,那么每个元素 :math:`x_i` 所对应的Softmin函数如下所示:
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
\text{output}(x_i) = \frac{exp(-x_i)}{\sum_{j = 0}^{N-1}\exp(-x_j)},
|
||||||
|
|
||||||
|
其中 :math:`N` 代表Tensor的长度。
|
||||||
|
|
||||||
|
参数:
|
||||||
|
- **x** (Tensor) - Softmin的输入,任意维度的Tensor。其数据类型为float16或float32。
|
||||||
|
- **axis** (Union[int, tuple[int]], 可选) - 指定Softmin操作的轴。默认值:-1。
|
||||||
|
|
||||||
|
返回:
|
||||||
|
Tensor,数据类型和shape与 `x` 相同。
|
||||||
|
|
||||||
|
异常:
|
||||||
|
- **TypeError** - `axis` 不是int或者tuple。
|
||||||
|
- **TypeError** - `x` 的数据类型既不是float16也不是float32。
|
||||||
|
- **ValueError** - `axis` 是长度小于1的tuple。
|
||||||
|
- **ValueError** - `axis` 是一个tuple,其元素不全在[-len(x.shape), len(x.shape))范围中。
|
|
@ -94,6 +94,7 @@ Activation Functions
|
||||||
mindspore.ops.softsign
|
mindspore.ops.softsign
|
||||||
mindspore.ops.soft_shrink
|
mindspore.ops.soft_shrink
|
||||||
mindspore.ops.softmax
|
mindspore.ops.softmax
|
||||||
|
mindspore.ops.softmin
|
||||||
mindspore.ops.tanh
|
mindspore.ops.tanh
|
||||||
|
|
||||||
Distance Functions
|
Distance Functions
|
||||||
|
|
|
@ -27,6 +27,7 @@ from mindspore.ops import operations as P
|
||||||
from mindspore.ops.operations import nn_ops as NN_OPS
|
from mindspore.ops.operations import nn_ops as NN_OPS
|
||||||
from mindspore.ops.primitive import constexpr
|
from mindspore.ops.primitive import constexpr
|
||||||
from mindspore.nn.cell import Cell
|
from mindspore.nn.cell import Cell
|
||||||
|
from mindspore import ops
|
||||||
|
|
||||||
__all__ = ['Softmin',
|
__all__ = ['Softmin',
|
||||||
'Softmax',
|
'Softmax',
|
||||||
|
@ -156,11 +157,10 @@ class Softmin(Cell):
|
||||||
def __init__(self, axis=-1):
|
def __init__(self, axis=-1):
|
||||||
"""Initialize Softmin."""
|
"""Initialize Softmin."""
|
||||||
super(Softmin, self).__init__()
|
super(Softmin, self).__init__()
|
||||||
self.softmax = P.Softmax(axis)
|
self.axis = axis
|
||||||
|
|
||||||
def construct(self, x):
|
def construct(self, x):
|
||||||
x = -1 * x
|
return ops.function.softmin(x, self.axis)
|
||||||
return self.softmax(x)
|
|
||||||
|
|
||||||
|
|
||||||
class Softmax2d(Cell):
|
class Softmax2d(Cell):
|
||||||
|
|
|
@ -415,6 +415,7 @@ from .nn_func import (
|
||||||
softsign,
|
softsign,
|
||||||
selu,
|
selu,
|
||||||
softmax,
|
softmax,
|
||||||
|
softmin,
|
||||||
pdist,
|
pdist,
|
||||||
pad,
|
pad,
|
||||||
prelu,
|
prelu,
|
||||||
|
|
|
@ -2135,6 +2135,45 @@ def softmax(x, axis=-1):
|
||||||
return softmax_(x)
|
return softmax_(x)
|
||||||
|
|
||||||
|
|
||||||
|
def softmin(x, axis=-1):
|
||||||
|
r"""
|
||||||
|
Applies the Softmin operation to the input tensor on the specified axis.
|
||||||
|
Suppose a slice in the given axis :math:`x`, then for each element :math:`x_i`,
|
||||||
|
the Softmin function is shown as follows:
|
||||||
|
|
||||||
|
.. math::
|
||||||
|
\text{output}(x_i) = \frac{exp(-x_i)}{\sum_{j = 0}^{N-1}\exp(-x_j)},
|
||||||
|
|
||||||
|
where :math:`N` is the length of the tensor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
axis (Union[int, tuple[int]], optional): The axis to perform the Softmin operation. Default: -1.
|
||||||
|
x (Tensor): Tensor of shape :math:`(N, *)`, where :math:`*` means, any number of
|
||||||
|
additional dimensions, with float16 or float32 data type.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tensor, with the same type and shape as the logits.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: If `axis` is not an int or a tuple.
|
||||||
|
TypeError: If dtype of `x` is neither float16 nor float32.
|
||||||
|
ValueError: If `axis` is a tuple whose length is less than 1.
|
||||||
|
ValueError: If `axis` is a tuple whose elements are not all in range [-len(logits.shape), len(logits.shape)).
|
||||||
|
|
||||||
|
Supported Platforms:
|
||||||
|
``Ascend`` ``GPU`` ``CPU``
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
>>> x = Tensor(np.array([-1, -2, 0, 2, 1]), mindspore.float16)
|
||||||
|
>>> output = ops.softmin(x)
|
||||||
|
>>> print(output)
|
||||||
|
[0.2341 0.636 0.0862 0.01165 0.03168 ]
|
||||||
|
"""
|
||||||
|
|
||||||
|
softmax_ = _get_cache_prim(P.Softmax)(axis=axis)
|
||||||
|
return softmax_(-x)
|
||||||
|
|
||||||
|
|
||||||
def soft_shrink(x, lambd=0.5):
|
def soft_shrink(x, lambd=0.5):
|
||||||
r"""
|
r"""
|
||||||
Applies the SoftShrink function element-wise.
|
Applies the SoftShrink function element-wise.
|
||||||
|
@ -5155,6 +5194,7 @@ __all__ = [
|
||||||
'softsign',
|
'softsign',
|
||||||
'selu',
|
'selu',
|
||||||
'softmax',
|
'softmax',
|
||||||
|
'softmin',
|
||||||
'pdist',
|
'pdist',
|
||||||
'pad',
|
'pad',
|
||||||
'prelu',
|
'prelu',
|
||||||
|
|
|
@ -0,0 +1,47 @@
|
||||||
|
# Copyright 2022 Huawei Technologies Co., Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
# ============================================================================
|
||||||
|
import numpy as np
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
import mindspore as ms
|
||||||
|
from mindspore import Tensor, nn
|
||||||
|
import mindspore.ops.function as F
|
||||||
|
|
||||||
|
|
||||||
|
class NetSoftmin(nn.Cell):
|
||||||
|
def construct(self, x, axis=-1):
|
||||||
|
return F.softmin(x, axis)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.level0
|
||||||
|
@pytest.mark.platform_x86_cpu
|
||||||
|
@pytest.mark.platform_arm_cpu
|
||||||
|
@pytest.mark.platform_x86_gpu_training
|
||||||
|
@pytest.mark.platform_arm_ascend_training
|
||||||
|
@pytest.mark.platform_x86_ascend_training
|
||||||
|
@pytest.mark.env_onecard
|
||||||
|
@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE])
|
||||||
|
def test_net(mode):
|
||||||
|
"""
|
||||||
|
Feature: test softmin op
|
||||||
|
Description: verify the result of softmin
|
||||||
|
Expectation: assertion success
|
||||||
|
"""
|
||||||
|
ms.set_context(mode=mode)
|
||||||
|
x = Tensor(np.array([1, 2, 3, 4, 5]), ms.float32)
|
||||||
|
softmin = NetSoftmin()
|
||||||
|
output = softmin(x)
|
||||||
|
np_out = np.array([0.6364086, 0.23412165, 0.08612854, 0.03168492, 0.01165623])
|
||||||
|
assert np.allclose(output.asnumpy(), np_out)
|
Loading…
Reference in New Issue