diff --git a/mindspore/ops/operations/nn_ops.py b/mindspore/ops/operations/nn_ops.py index 00ebc17f5f..f113235e21 100644 --- a/mindspore/ops/operations/nn_ops.py +++ b/mindspore/ops/operations/nn_ops.py @@ -2938,9 +2938,9 @@ class SparseApplyLazyAdam(PrimitiveWithInfer): >>> def __init__(self): >>> super(Net, self).__init__() >>> self.sparse_apply_lazyadam = P.SparseApplyLazyAdam() - >>> self.var = Parameter(Tensor(np.ones([3, 3, 3]).astype(np.float32)), name="var") - >>> self.m = Parameter(Tensor(np.ones([3, 3, 3]).astype(np.float32)), name="m") - >>> self.v = Parameter(Tensor(np.ones([3, 3, 3]).astype(np.float32)), name="v") + >>> self.var = Parameter(Tensor(np.ones([3, 1, 2]).astype(np.float32)), name="var") + >>> self.m = Parameter(Tensor(np.ones([3, 1, 2]).astype(np.float32)), name="m") + >>> self.v = Parameter(Tensor(np.ones([3, 1, 2]).astype(np.float32)), name="v") >>> def construct(self, beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad, indices): >>> out = self.sparse_apply_lazyadam(self.var, self.m, self.v, beta1_power, beta2_power, lr, beta1, >>> beta2, epsilon, grad, indices)