From 6b0948c62b3b8eac281efe09d5767f300a0ae582 Mon Sep 17 00:00:00 2001 From: wangshuide2020 Date: Mon, 22 Mar 2021 20:36:58 +0800 Subject: [PATCH] update the documentation of Mish operator. --- mindspore/ops/operations/nn_ops.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 1ecfba4f8da..e1f573a0088 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -372,7 +372,7 @@ class ReLU(PrimitiveWithCheck): class Mish(PrimitiveWithInfer): r""" - Computes MISH of input tensors element-wise. + Computes MISH(A Self Regularized Non-Monotonic Neural Activation Function) of input tensors element-wise. The function is shown as follows: @@ -380,6 +380,9 @@ class Mish(PrimitiveWithInfer): \text{output} = x * \tan(\log(1 + \exp(\text{x}))) + See more details in `A Self Regularized Non-Monotonic Neural Activation Function + `_. + Inputs: - **x** (Tensor) - The input tensor. Only support float16 and float32. @@ -390,7 +393,7 @@ class Mish(PrimitiveWithInfer): ``Ascend`` Raise: - TypeError: If num_features data type not float16 and float32 Tensor. + TypeError: If dtype of `x` is neither float16 nor float32. Examples: >>> input_x = Tensor(np.array([[-1.0, 4.0, -8.0], [2.0, -5.0, 9.0]]), mindspore.float32)