diff --git a/mindspore/common/parameter.py b/mindspore/common/parameter.py index 0ab6a18a105..dbb4625873f 100644 --- a/mindspore/common/parameter.py +++ b/mindspore/common/parameter.py @@ -316,7 +316,8 @@ class Parameter(MetaTensor_): Args: data (Union[Tensor, MetaTensor, int, float]): new data. - slice_shape (bool): If slice the Parameter, will not check if shape is match. Default: False. + slice_shape (bool): If slice the parameter is set to true, the shape is not checked for consistency. + Default: False. Returns: Parameter, the parameter after set data. diff --git a/mindspore/nn/optim/adam.py b/mindspore/nn/optim/adam.py index b134672bc13..017eaf1c4e7 100755 --- a/mindspore/nn/optim/adam.py +++ b/mindspore/nn/optim/adam.py @@ -336,6 +336,9 @@ class Adam(Optimizer): def target(self, value): """If the input value is set to "CPU", the parameters will be updated on the host using the Fused optimizer operation.""" + if not isinstance(value, str): + raise ValueError("The value must be str type, but got value type is {}".format(type(value))) + if value not in ('CPU', 'Ascend'): raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value)) diff --git a/mindspore/nn/optim/ftrl.py b/mindspore/nn/optim/ftrl.py index 38e95f71cd5..0dd42283351 100644 --- a/mindspore/nn/optim/ftrl.py +++ b/mindspore/nn/optim/ftrl.py @@ -189,6 +189,8 @@ class FTRL(Optimizer): def target(self, value): """If the input value is set to "CPU", the parameters will be updated on the host using the Fused optimizer operation.""" + if not isinstance(value, str): + raise ValueError("The value must be str type, but got value type is {}".format(type(value))) if value not in ('CPU', 'Ascend'): raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value)) diff --git a/mindspore/nn/optim/lazyadam.py b/mindspore/nn/optim/lazyadam.py index 442815cbabe..827843feded 100644 --- a/mindspore/nn/optim/lazyadam.py +++ b/mindspore/nn/optim/lazyadam.py @@ -254,6 +254,9 @@ class LazyAdam(Optimizer): def target(self, value): """If the input value is set to "CPU", the parameters will be updated on the host using the Fused optimizer operation.""" + if not isinstance(value, str): + raise ValueError("The value must be str type, but got value type is {}".format(type(value))) + if value not in ('CPU', 'Ascend'): raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value)) diff --git a/mindspore/nn/optim/proximal_ada_grad.py b/mindspore/nn/optim/proximal_ada_grad.py index edd1f6947bd..d58f2302702 100644 --- a/mindspore/nn/optim/proximal_ada_grad.py +++ b/mindspore/nn/optim/proximal_ada_grad.py @@ -158,6 +158,8 @@ class ProximalAdagrad(Optimizer): def target(self, value): """If the input value is set to "CPU", the parameters will be updated on the host using the Fused optimizer operation.""" + if not isinstance(value, str): + raise ValueError("The value must be str type, but got value type is {}".format(type(value))) if value not in ('CPU', 'Ascend'): raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value))