!28021 modify the example in chinese api comments

Merge pull request !28021 from wangnan39/code_docs_frontend_example
This commit is contained in:
i-robot 2021-12-22 06:13:27 +00:00 committed by Gitee
commit bc9fbf345c
16 changed files with 53 additions and 17 deletions

View File

@ -20,6 +20,7 @@ mindspore.nn.Accuracy
**示例:**
>>> import numpy as np
>>> import mindspore
>>> from mindspore import nn, Tensor
>>>
>>> x = Tensor(np.array([[0.2, 0.5], [0.3, 0.1], [0.9, 0.6]]), mindspore.float32)

View File

@ -39,10 +39,13 @@ mindspore.nn.CosineDecayLR
**样例:**
>>> import mindspore
>>> from mindspore import Tensor, nn
>>>
>>> min_lr = 0.01
>>> max_lr = 0.1
>>> decay_steps = 4
>>> global_steps = Tensor(2, mstype.int32)
>>> global_steps = Tensor(2, mindspore.int32)
>>> cosine_decay_lr = nn.CosineDecayLR(min_lr, max_lr, decay_steps)
>>> result = cosine_decay_lr(global_steps)
>>> print(result)

View File

@ -48,10 +48,13 @@ mindspore.nn.ExponentialDecayLR
**样例:**
>>> import mindspore
>>> from mindspore import Tensor, nn
>>>
>>> learning_rate = 0.1
>>> decay_rate = 0.9
>>> decay_steps = 4
>>> global_step = Tensor(2, mstype.int32)
>>> global_step = Tensor(2, mindspore.int32)
>>> exponential_decay_lr = nn.ExponentialDecayLR(learning_rate, decay_rate, decay_steps)
>>> result = exponential_decay_lr(global_step)
>>> print(result)

View File

@ -32,13 +32,16 @@ mindspore.nn.ForwardValueAndGrad
**样例:**
>>> import numpy as np
>>> from mindspore import Tensor, nn, common, ops, ParameterTuple, Parameter
>>>
>>> class Net(nn.Cell):
... def __init__(self)
... def __init__(self):
... super(Net, self).__init__()
... self.weight = Parameter(Tensor(np.ones([2, 2]).astype(np.float32)), name="weight")
... self.matmul = P.MatMul()
... self.matmul = ops.MatMul()
...
... def construct(self, x)
... def construct(self, x):
... out = self.matmul(x, self.weight)
... return out
...

View File

@ -48,10 +48,13 @@ mindspore.nn.InverseDecayLR
**样例:**
>>> import mindspore
>>> from mindspore import Tensor, nn
>>>
>>> learning_rate = 0.1
>>> decay_rate = 0.9
>>> decay_steps = 4
>>> global_step = Tensor(2, mstype.int32)
>>> global_step = Tensor(2, mindspore.int32)
>>> inverse_decay_lr = nn.InverseDecayLR(learning_rate, decay_rate, decay_steps, True)
>>> result = inverse_decay_lr(global_step)
>>> print(result)

View File

@ -48,10 +48,13 @@ mindspore.nn.NaturalExpDecayLR
**样例:**
>>> import mindspore
>>> from mindspore import Tensor, nn
>>>
>>> learning_rate = 0.1
>>> decay_rate = 0.9
>>> decay_steps = 4
>>> global_step = Tensor(2, mstype.int32)
>>> global_step = Tensor(2, mindspore.int32)
>>> natural_exp_decay_lr = nn.NaturalExpDecayLR(learning_rate, decay_rate, decay_steps, True)
>>> result = natural_exp_decay_lr(global_step)
>>> print(result)

View File

@ -50,11 +50,14 @@ mindspore.nn.PolynomialDecayLR
**样例:**
>>> import mindspore
>>> from mindspore import Tensor, nn
>>>
>>> learning_rate = 0.1
>>> end_learning_rate = 0.01
>>> decay_steps = 4
>>> power = 0.5
>>> global_step = Tensor(2, mstype.int32)
>>> global_step = Tensor(2, mindspore.int32)
>>> polynomial_decay_lr = nn.PolynomialDecayLR(learning_rate, end_learning_rate, decay_steps, power)
>>> result = polynomial_decay_lr(global_step)
>>> print(result)

View File

@ -38,6 +38,7 @@ mindspore.nn.TrainOneStepWithLossScaleCell
**样例:**
>>> import numpy as np
>>> import mindspore
>>> from mindspore import Tensor, Parameter, nn, ops
>>> from mindspore import dtype as mstype
>>>

View File

@ -41,9 +41,12 @@ mindspore.nn.WarmUpLR
**样例:**
>>> import mindspore
>>> from mindspore import Tensor, nn
>>>
>>> learning_rate = 0.1
>>> warmup_steps = 2
>>> global_step = Tensor(2, mstype.int32)
>>> global_step = Tensor(2, mindspore.int32)
>>> warmup_lr = nn.WarmUpLR(learning_rate, warmup_steps)
>>> result = warmup_lr(global_step)
>>> print(result)

View File

@ -27,11 +27,13 @@ mindspore.nn.cosine_decay_lr
**样例:**
>>> import mindspore.nn as nn
>>>
>>> min_lr = 0.01
>>> max_lr = 0.1
>>> total_step = 6
>>> step_per_epoch = 2
>>> decay_epoch = 2
>>> output = cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch)
>>> output = nn.cosine_decay_lr(min_lr, max_lr, total_step, step_per_epoch, decay_epoch)
>>> print(output)
[0.1, 0.1, 0.05500000000000001, 0.05500000000000001, 0.01, 0.01]

View File

@ -27,11 +27,13 @@ mindspore.nn.exponential_decay_lr
**样例:**
>>> import mindspore.nn as nn
>>>
>>> learning_rate = 0.1
>>> decay_rate = 0.9
>>> total_step = 6
>>> step_per_epoch = 2
>>> decay_epoch = 1
>>> output = exponential_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch)
>>> output = nn.exponential_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch)
>>> print(output)
[0.1, 0.1, 0.09000000000000001, 0.09000000000000001, 0.08100000000000002, 0.08100000000000002]

View File

@ -27,11 +27,13 @@ mindspore.nn.inverse_decay_lr
**样例:**
>>> import mindspore.nn as nn
>>>
>>> learning_rate = 0.1
>>> decay_rate = 0.5
>>> total_step = 6
>>> step_per_epoch = 1
>>> decay_epoch = 1
>>> output = inverse_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, True)
>>> output = nn.inverse_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, True)
>>> print(output)
[0.1, 0.06666666666666667, 0.05, 0.04, 0.03333333333333333, 0.028571428571428574]

View File

@ -27,11 +27,13 @@ mindspore.nn.natural_exp_decay_lr
**样例:**
>>> import mindspore.nn as nn
>>>
>>> learning_rate = 0.1
>>> decay_rate = 0.9
>>> total_step = 6
>>> step_per_epoch = 2
>>> decay_epoch = 2
>>> output = natural_exp_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, True)
>>> output = nn.natural_exp_decay_lr(learning_rate, decay_rate, total_step, step_per_epoch, decay_epoch, True)
>>> print(output)
[0.1, 0.1, 0.1, 0.1, 0.016529888822158657, 0.016529888822158657]

View File

@ -21,9 +21,10 @@ mindspore.nn.piecewise_constant_lr
list[float]。列表的大小为 :math:`M_N`
**样例:**
>>> import mindspore.nn as nn
>>>
>>> milestone = [2, 5, 10]
>>> learning_rates = [0.1, 0.05, 0.01]
>>> output = piecewise_constant_lr(milestone, learning_rates)
>>> output = nn.piecewise_constant_lr(milestone, learning_rates)
>>> print(output)
[0.1, 0.1, 0.05, 0.05, 0.05, 0.01, 0.01, 0.01, 0.01, 0.01]

View File

@ -43,12 +43,14 @@ mindspore.nn.polynomial_decay_lr
**样例:**
>>> import mindspore.nn as nn
>>>
>>> learning_rate = 0.1
>>> end_learning_rate = 0.01
>>> total_step = 6
>>> step_per_epoch = 2
>>> decay_epoch = 2
>>> power = 0.5
>>> r = polynomial_decay_lr(learning_rate, end_learning_rate, total_step, step_per_epoch, decay_epoch, power)
>>> r = nn.polynomial_decay_lr(learning_rate, end_learning_rate, total_step, step_per_epoch, decay_epoch, power)
>>> print(r)
[0.1, 0.1, 0.07363961030678928, 0.07363961030678928, 0.01, 0.01]

View File

@ -25,10 +25,12 @@ mindspore.nn.warmup_lr
**样例:**
>>> import mindspore.nn as nn
>>>
>>> learning_rate = 0.1
>>> total_step = 6
>>> step_per_epoch = 2
>>> warmup_epoch = 2
>>> output = warmup_lr(learning_rate, total_step, step_per_epoch, warmup_epoch)
>>> output = nn.warmup_lr(learning_rate, total_step, step_per_epoch, warmup_epoch)
>>> print(output)
[0.0, 0.0, 0.05, 0.05, 0.1, 0.1]