forked from mindspore-Ecosystem/mindspore
!778 fix bug of ApplyRMSProp
Merge pull request !778 from fary86/fix_bug_of_ApplyRMSProp
This commit is contained in:
commit
1b5fb395cc
|
@ -1586,9 +1586,11 @@ class ApplyRMSProp(PrimitiveWithInfer):
|
|||
args = {"var": var_dtype, "mean_square": mean_square_dtype, "moment": moment_dtype, "grad": grad_dtype}
|
||||
validator.check_tensor_type_same(args, mstype.number_type, self.name)
|
||||
|
||||
args = {"learning_rate": learning_rate_dtype, "decay": decay_dtype,
|
||||
'momentum': momentum_dtype, "epsilon": epsilon_dtype}
|
||||
validator.check_scalar_or_tensor_type_same(args, [mstype.float16, mstype.float32], self.name)
|
||||
valid_types = [mstype.float16, mstype.float32]
|
||||
args_decay = {"decay": decay_dtype, 'momentum': momentum_dtype, "epsilon": epsilon_dtype}
|
||||
validator.check_type_same(args_decay, valid_types, self.name)
|
||||
args_lr = {"learning_rate": learning_rate_dtype, "decay": decay_dtype}
|
||||
validator.check_scalar_or_tensor_type_same(args_lr, valid_types, self.name, allow_mix=True)
|
||||
return var_dtype
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue