forked from mindspore-Ecosystem/mindspore
remove tensor
This commit is contained in:
parent
6d6327882f
commit
2b4c0d15f5
|
@ -598,22 +598,6 @@ mindspore.Tensor
|
|||
- **TypeError** - `lambd` 不是float。
|
||||
- **TypeError** - 原始Tensor的dtype既不是float16也不是float32。
|
||||
|
||||
.. py:method:: hardswish()
|
||||
|
||||
Hard Swish激活函数。
|
||||
|
||||
对输入的每个元素计算Hard Swish。
|
||||
|
||||
更多细节参考 :func:`mindspore.ops.hardswish`。
|
||||
|
||||
**返回:**
|
||||
|
||||
Tensor,具有与输入Tensor相同的数据类型和shape。
|
||||
|
||||
**异常:**
|
||||
|
||||
- **TypeError** - 输入Tensor的数据类型既不是float16也不是float32。
|
||||
|
||||
.. py:method:: has_init
|
||||
:property:
|
||||
|
||||
|
|
|
@ -250,7 +250,6 @@ BuiltInTypeMap &GetMethodMap() {
|
|||
{"cdist", std::string("cdist")}, // P.cdist
|
||||
{"celu", std::string("celu")}, // P.celu
|
||||
{"hardshrink", std::string("hardshrink")}, // P.hshrink
|
||||
{"hardswish", std::string("hardswish")}, // P.HSwish
|
||||
{"soft_shrink", std::string("soft_shrink")}, // P.SoftShrink
|
||||
{"one_hot", std::string("one_hot")}, // P.OneHot
|
||||
{"intopk", std::string("intopk")}, // P.InTopK
|
||||
|
|
|
@ -1571,34 +1571,6 @@ def adaptive_avgpool2d(x, output_size):
|
|||
return F.adaptive_avgpool2d(x, output_size)
|
||||
|
||||
|
||||
def hardswish(x):
|
||||
r"""
|
||||
Hard swish activation function.
|
||||
|
||||
Calculate Hard Swish for each element of input.
|
||||
|
||||
Args:
|
||||
x (Tensor): Input tensor.
|
||||
|
||||
Returns:
|
||||
Tensor, with the same type and shape as the `x`.
|
||||
|
||||
Raises:
|
||||
TypeError: If `x` is not a Tensor.
|
||||
TypeError: If dtype of `x` is neither float16 nor float32.
|
||||
|
||||
Supported Platforms:
|
||||
``Ascend`` ``GPU`` ``CPU``
|
||||
|
||||
Examples:
|
||||
>>> import mindspore.numpy as np
|
||||
>>> x = np.array([-1, -2, 0, 2, 1])
|
||||
>>> print(x.hardswish())
|
||||
[-0.3333 -0.3333 0 1.666 0.6665]
|
||||
"""
|
||||
return P.HSwish()(x)
|
||||
|
||||
|
||||
def getitem(data, index):
|
||||
"""Implementation of `getitem`."""
|
||||
return data.__getitem__(index)
|
||||
|
|
|
@ -3921,34 +3921,6 @@ class Tensor(Tensor_):
|
|||
self._init_check()
|
||||
return tensor_operator_registry.get('hardshrink')(lambd)(self)
|
||||
|
||||
def hardswish(self):
|
||||
r"""
|
||||
Hard swish activation function.
|
||||
|
||||
Calculate Hard Swish for each element of input.
|
||||
|
||||
Refer to :func:`mindspore.ops.hardswish` for more detail.
|
||||
|
||||
Returns:
|
||||
Tensor, with the same type and shape as the input.
|
||||
|
||||
Raises:
|
||||
TypeError: If dtype of input is neither float16 nor float32.
|
||||
|
||||
Supported Platforms:
|
||||
``Ascend`` ``GPU`` ``CPU``
|
||||
|
||||
Examples:
|
||||
>>> from mindspore import Tensor
|
||||
>>> a = Tensor([-1, -2, 0, 2, 1]).astype("float16")
|
||||
>>> output = a.hardswish()
|
||||
>>> print(output)
|
||||
[-0.3333 -0.3333 0 1.666 0.6665]
|
||||
"""
|
||||
|
||||
self._init_check()
|
||||
return tensor_operator_registry.get('hardswish')()(self)
|
||||
|
||||
def soft_shrink(self, lambd=0.5):
|
||||
"""
|
||||
Apply the soft shrink function for a tensor. Calculates the output according to the input elements.
|
||||
|
|
|
@ -965,7 +965,6 @@ tensor_operator_registry.register('matrix_band_part', matrix_band_part)
|
|||
tensor_operator_registry.register('padding', padding)
|
||||
tensor_operator_registry.register('celu', P.CeLU)
|
||||
tensor_operator_registry.register('hardshrink', P.HShrink)
|
||||
tensor_operator_registry.register('hardswish', P.HSwish)
|
||||
tensor_operator_registry.register('soft_shrink', P.SoftShrink)
|
||||
tensor_operator_registry.register('svd', linalg_ops.Svd)
|
||||
tensor_operator_registry.register('diag', P.Diag)
|
||||
|
|
Loading…
Reference in New Issue