From 9b545e4982ec5dcb37b1d1a9967b526199dfc969 Mon Sep 17 00:00:00 2001 From: "wangnan39@huawei.com" Date: Tue, 1 Dec 2020 11:37:12 +0800 Subject: [PATCH] fix bug in example of Lamb --- mindspore/nn/optim/lamb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mindspore/nn/optim/lamb.py b/mindspore/nn/optim/lamb.py index 99f4b179783..997c5304d50 100755 --- a/mindspore/nn/optim/lamb.py +++ b/mindspore/nn/optim/lamb.py @@ -241,7 +241,7 @@ class Lamb(Optimizer): Examples: >>> net = Net() >>> #1) All parameters use the same learning rate and weight decay - >>> optim = nn.Lamb(params=net.trainable_params(learning_rate=0.1)) + >>> optim = nn.Lamb(params=net.trainable_params(), learning_rate=0.1) >>> >>> #2) Use parameter groups and set different values >>> poly_decay_lr = learning_rate_schedule.PolynomialDecayLR()