fix rrelu api
This commit is contained in:
parent
0b75102dc9
commit
fb28595079
|
@ -1,7 +1,7 @@
|
|||
mindspore.nn.RReLU
|
||||
==================
|
||||
|
||||
.. py:class:: mindspore.nn.RReLU(lower=0.125, upper=float(1. / 3))
|
||||
.. py:class:: mindspore.nn.RReLU(lower=1/8, upper=1/3)
|
||||
|
||||
Randomized Leaky ReLU激活函数。
|
||||
|
||||
|
|
|
@ -470,7 +470,7 @@ class RReLU(Cell):
|
|||
\text{RReLU}(x_{ji}) = \begin{cases}x_{ji}, &\text{if } x_{ji} \geq 0; \cr
|
||||
{\alpha_{ji}} * x_{ji}, &\text{otherwise.}\end{cases}
|
||||
|
||||
where :math:`\alpha_{ji}` ~ :math:`U(l, u)`, :math: `l \le u`.
|
||||
where :math:`\alpha_{ji}` ~ :math:`U(l, u)`, :math:`l \le u`.
|
||||
|
||||
Args:
|
||||
lower (Union[int, float]): Slope of the activation function at x < 0. Default: 1/8.
|
||||
|
|
Loading…
Reference in New Issue