forked from mindspore-Ecosystem/mindspore
!358 Fix optimizer.decay_weight bug
Merge pull request !358 from fanglei/master
This commit is contained in:
commit
c241b19a53
|
@ -109,7 +109,7 @@ class Optimizer(Cell):
|
|||
tuple[Tensor], The gradients after weight decay.
|
||||
"""
|
||||
if self.weight_decay > 0:
|
||||
params = self.params
|
||||
params = self.parameters
|
||||
gradients = self.hyper_map(F.partial(apply_decay, self.weight_decay), self.decay_flags, params, gradients)
|
||||
|
||||
return gradients
|
||||
|
|
Loading…
Reference in New Issue