From 675fe52042b7713c475a3468ec89efb6fe3fb0db Mon Sep 17 00:00:00 2001 From: "wangnan39@huawei.com" Date: Wed, 17 Jun 2020 09:28:17 +0800 Subject: [PATCH] fix error in the example of SparseApplyLazyAdam --- mindspore/ops/operations/nn_ops.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 00ebc17f5f..f113235e21 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -2938,9 +2938,9 @@ class SparseApplyLazyAdam(PrimitiveWithInfer): >>> def __init__(self): >>> super(Net, self).__init__() >>> self.sparse_apply_lazyadam = P.SparseApplyLazyAdam() - >>> self.var = Parameter(Tensor(np.ones([3, 3, 3]).astype(np.float32)), name="var") - >>> self.m = Parameter(Tensor(np.ones([3, 3, 3]).astype(np.float32)), name="m") - >>> self.v = Parameter(Tensor(np.ones([3, 3, 3]).astype(np.float32)), name="v") + >>> self.var = Parameter(Tensor(np.ones([3, 1, 2]).astype(np.float32)), name="var") + >>> self.m = Parameter(Tensor(np.ones([3, 1, 2]).astype(np.float32)), name="m") + >>> self.v = Parameter(Tensor(np.ones([3, 1, 2]).astype(np.float32)), name="v") >>> def construct(self, beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad, indices): >>> out = self.sparse_apply_lazyadam(self.var, self.m, self.v, beta1_power, beta2_power, lr, beta1, >>> beta2, epsilon, grad, indices)