add dropout api.
This commit is contained in:
parent
b6b12e7868
commit
d5deb33184
|
@ -82,6 +82,7 @@ functional算子是经过初始化后的Primitive,可以直接作为函数使
|
|||
:template: classtemplate.rst
|
||||
|
||||
mindspore.ops.celu
|
||||
mindspore.ops.dropout
|
||||
mindspore.ops.fast_gelu
|
||||
mindspore.ops.gumbel_softmax
|
||||
mindspore.ops.hardshrink
|
||||
|
|
|
@ -21,4 +21,9 @@
|
|||
- **TypeError** - `keep_prob` 不是float。
|
||||
- **TypeError** - `Seed0` 或 `Seed1` 不是int。
|
||||
- **TypeError** - `x` 的数据类型既不是float16也不是float32。
|
||||
- **TypeError** - `x` 不是Tensor。
|
||||
- **TypeError** - `x` 不是Tensor。
|
||||
|
||||
更多细节请参考 :func:`mindspore.ops.dropout` 。
|
||||
|
||||
支持平台:
|
||||
``Ascend`` ``GPU`` ``CPU``
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
mindspore.ops.dropout
|
||||
======================
|
||||
|
||||
.. py:function:: mindspore.ops.dropout(x, keep_prob=0.5, Seed0=0, Seed1=0)
|
||||
|
||||
dropout是一种正则化手段,通过在训练中以 :math:`1 - keep\_prob` 的概率随机将神经元输出设置为0,起到减少神经元相关性的作用,避免过拟合。
|
||||
|
||||
**参数:**
|
||||
|
||||
- **x** (Tensor) - dropout的输入,任意维度的Tensor,其数据类型为float16或float32。
|
||||
- **keep_prob** (float) - 输入神经元保留概率,数值范围在0到1之间。例如,keep_prob=0.9,删除10%的神经元。默认值:0.5。
|
||||
- **Seed0** (int) - 算子层的随机种子,用于生成随机数。默认值:0。
|
||||
- **Seed1** (int) - 全局的随机种子,和算子层的随机种子共同决定最终生成的随机数。默认值:0。
|
||||
|
||||
**返回:**
|
||||
|
||||
- **output** (Tensor) - shape和数据类型与 `x` 相同。
|
||||
- **mask** (Tensor) - shape与 `x` 相同。
|
||||
|
||||
**异常:**
|
||||
|
||||
- **TypeError** - `keep_prob` 不是float。
|
||||
- **TypeError** - `Seed0` 或 `Seed1` 不是int。
|
||||
- **TypeError** - `x` 的数据类型既不是float16也不是float32。
|
||||
- **TypeError** - `x` 不是Tensor。
|
|
@ -420,6 +420,7 @@ def get_bias_add_grad_vmap_rule(prim, axis_size):
|
|||
return vmap_rule
|
||||
|
||||
|
||||
@vmap_rules_getters.register(P.Dropout)
|
||||
@vmap_rules_getters.register(P.Dropout2D)
|
||||
@vmap_rules_getters.register(P.Dropout3D)
|
||||
def get_dropout_nd_vmap_rule(prim, axis_size):
|
||||
|
|
|
@ -282,7 +282,8 @@ from .nn_func import (
|
|||
cross_entropy,
|
||||
grid_sample,
|
||||
ctc_greedy_decoder,
|
||||
ctc_loss
|
||||
ctc_loss,
|
||||
dropout
|
||||
)
|
||||
from .linalg_func import (
|
||||
svd,
|
||||
|
|
|
@ -326,6 +326,38 @@ def binary_cross_entropy_with_logits(logits, label, weight, pos_weight, reductio
|
|||
return bce_with_logits_loss_op(logits, label, weight, pos_weight)
|
||||
|
||||
|
||||
def dropout(x, p=0.5, seed0=0, seed1=0):
|
||||
"""
|
||||
During training, randomly zeroes some of the elements of the input tensor
|
||||
with probability 1-`keep_prob` from a Bernoulli distribution.
|
||||
Args:
|
||||
x (Tensor): The input of Dropout, a Tensor of any shape with data type of float16 or float32.
|
||||
p (float): The keep rate, between 0 and 1, e.g. p = 0.1,
|
||||
means dropping out 10% of input units. Default: 0.5.
|
||||
seed0 (int): Seed0 value for random generating. Default: 0.
|
||||
seed1 (int): Seed1 value for random generating. Default: 0.
|
||||
Returns:
|
||||
- **output** (Tensor) - With the same shape and data type as `x`.
|
||||
- **mask** (Tensor) - With the same shape as `x`.
|
||||
Raises:
|
||||
TypeError: If `keep_prob` is not a float.
|
||||
TypeError: If `Seed0` or `Seed1` is not an int.
|
||||
TypeError: If dtype of `x` is neither float16 nor float32.
|
||||
TypeError: If `x` is not a Tensor.
|
||||
Supported Platforms:
|
||||
``Ascend`` ``GPU`` ``CPU``
|
||||
Examples:
|
||||
>>> from mindspore.ops import dropout
|
||||
>>> x = Tensor(((20, 16), (50, 50)), mindspore.float32)
|
||||
>>> output, mask = dropout(x, keep_prob=0.5)
|
||||
>>> print(output.shape)
|
||||
(2, 2)
|
||||
"""
|
||||
keep_prob = 1 - p
|
||||
dropout_ = P.Dropout(keep_prob=keep_prob, Seed0=seed0, Seed1=seed1)
|
||||
return dropout_(x)
|
||||
|
||||
|
||||
def celu(x, alpha=1.0):
|
||||
r"""
|
||||
Computes celu (Continuously differentiable exponential linear units) of input tensors element-wise.
|
||||
|
@ -1889,6 +1921,7 @@ __all__ = [
|
|||
'smooth_l1_loss',
|
||||
'nll_loss',
|
||||
'ctc_loss',
|
||||
'ctc_greedy_decoder'
|
||||
'ctc_greedy_decoder',
|
||||
'dropout'
|
||||
]
|
||||
__all__.sort()
|
||||
|
|
|
@ -7111,34 +7111,10 @@ class Dropout(PrimitiveWithCheck):
|
|||
During training, randomly zeroes some of the elements of the input tensor
|
||||
with probability 1-`keep_prob` from a Bernoulli distribution.
|
||||
|
||||
Args:
|
||||
keep_prob (float): The keep rate, between 0 and 1, e.g. keep_prob = 0.9,
|
||||
means dropping out 10% of input units. Default: 0.5.
|
||||
Seed0 (int): Seed0 value for random generating. Default: 0.
|
||||
Seed1 (int): Seed1 value for random generating. Default: 0.
|
||||
|
||||
Inputs:
|
||||
- **x** (Tensor) - The input of Dropout, a Tensor of any shape with data type of float16 or float32.
|
||||
|
||||
Outputs:
|
||||
- **output** (Tensor) - With the same shape and data type as `x`.
|
||||
- **mask** (Tensor) - With the same shape as `x`.
|
||||
|
||||
Raises:
|
||||
TypeError: If `keep_prob` is not a float.
|
||||
TypeError: If `Seed0` or `Seed1` is not an int.
|
||||
TypeError: If dtype of `x` is neither float16 nor float32.
|
||||
TypeError: If `x` is not a Tensor.
|
||||
Refer to :func:`mindspore.ops.dropout` for more detail.
|
||||
|
||||
Supported Platforms:
|
||||
``Ascend`` ``GPU`` ``CPU``
|
||||
|
||||
Examples:
|
||||
>>> dropout = ops.Dropout(keep_prob=0.5)
|
||||
>>> x = Tensor(((20, 16), (50, 50)), mindspore.float32)
|
||||
>>> output, mask = dropout(x)
|
||||
>>> print(output.shape)
|
||||
(2, 2)
|
||||
"""
|
||||
|
||||
@prim_attr_register
|
||||
|
|
Loading…
Reference in New Issue