!30194 modify initializer for master

Merge pull request !30194 from lilei/modify_initializer_for_master
This commit is contained in:
i-robot 2022-02-21 06:23:50 +00:00 committed by Gitee
commit 6c301b6e1f
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
1 changed files with 8 additions and 4 deletions

View File

@ -324,7 +324,7 @@ class HeNormal(Initializer):
For details of HeUniform algorithm, please check `<https://arxiv.org/abs/1502.01852>`_.
Args:
negative_slope (int, float, bool): The negative slope of the rectifier used after this layer
negative_slope (int, float): The negative slope of the rectifier used after this layer
(only used when `nonlinearity` is 'leaky_relu'). Default: 0.
mode (str): Either 'fan_in' or 'fan_out'. Choosing 'fan_in' preserves the magnitude of the
variance of the weights in the forward pass. Choosing 'fan_out' preserves the magnitudes
@ -703,11 +703,15 @@ def initializer(init, shape=None, dtype=mstype.float32):
Examples:
>>> import numpy as np
>>> import mindspore
>>> from mindspore import Tensor
>>> from mindspore.common.initializer import initializer, One
>>> tensor1 = initializer('ones', [1, 2, 3], mindspore.float32)
>>> tensor2 = initializer(One(), [1, 2, 3], mindspore.float32)
>>> tensor3 = initializer(0, [1, 2, 3], mindspore.float32)
>>> data = Tensor(np.zeros([1, 2, 3]), mindspore.float32)
>>> tensor1 = initializer(data, [1, 2, 3], mindspore.float32)
>>> tensor2 = initializer('ones', [1, 2, 3], mindspore.float32)
>>> tensor3 = initializer(One(), [1, 2, 3], mindspore.float32)
>>> tensor4 = initializer(0, [1, 2, 3], mindspore.float32)
"""
if not isinstance(init, (Tensor, numbers.Number, str, Initializer)):
raise TypeError("The type of the 'init' argument should be 'Tensor', 'number', 'string' "