diff --git a/.jenkins/check/config/whitelizard.txt b/.jenkins/check/config/whitelizard.txt index 55599ae55ed..b5adf296204 100644 --- a/.jenkins/check/config/whitelizard.txt +++ b/.jenkins/check/config/whitelizard.txt @@ -17,6 +17,7 @@ mindspore/model_zoo/official/recommend/wide_and_deep/src/wide_and_deep.py:__init mindspore/model_zoo/official/recommend/wide_and_deep_multitable/src/wide_and_deep.py:__init__ mindspore/mindspore/ccsrc/pipeline/jit/resource.cc:mindspore::pipeline::GetMethodMap mindspore/mindspore/ops/operations/array_ops.py:_compute_slicing_shape +mindspore/mindspore/context.py:set_auto_parallel_context mindspore/mindspore/common/tensor.py:__init__ mindspore/mindspore/common/parameter.py:set_data mindspore/mindspore/ccsrc/pybind_api/ir/tensor_py.cc:mindspore::tensor::GetDataType diff --git a/mindspore/_checkparam.py b/mindspore/_checkparam.py index f5e077886aa..0f2bc36fad2 100644 --- a/mindspore/_checkparam.py +++ b/mindspore/_checkparam.py @@ -157,7 +157,7 @@ def check_number(arg_value, value, rel, arg_type=int, arg_name=None, prim_name=N if isinstance(arg_value, arg_type): if math.isinf(arg_value) or math.isnan(arg_value) or np.isinf(arg_value) or np.isnan(arg_value): - raise ValueError(f'{arg_name} {prim_name} must be legal value, but got `{arg_value}`.') + raise ValueError(f'{arg_name} {prim_name} must be a legal value, but got `{arg_value}`.') else: raise TypeError(f'{arg_name} {prim_name} must be {arg_type.__name__}, but got `{type(arg_value).__name__}`') @@ -184,7 +184,7 @@ def check_is_number(arg_value, arg_type, arg_name=None, prim_name=None): arg_name = f"\'{arg_name}\'" if arg_name else 'input value' if isinstance(arg_value, arg_type) and not isinstance(arg_value, bool): if math.isinf(arg_value) or math.isnan(arg_value) or np.isinf(arg_value) or np.isnan(arg_value): - raise ValueError(f'{prim_name} {arg_name} must be legal float, but got `{arg_value}`.') + raise ValueError(f'{prim_name} {arg_name} must be a legal float, but got `{arg_value}`.') return arg_value raise TypeError(f'{prim_name} type of {arg_name} must be {arg_type.__name__}, but got `{type(arg_value).__name__}`') @@ -665,7 +665,7 @@ class Validator: # if multiple arguments provided, it must be `ndim` number of ints if len(axes) != ndim: - raise ValueError("The number of axes must equal to the dimension of tensor.") + raise ValueError("The number of axes must be equal to the dimension of tensor.") return axes @staticmethod @@ -705,11 +705,11 @@ class Validator: if isinstance(axes, (tuple, list)): for axis in axes: if not isinstance(axis, int): - raise TypeError(f"axis argument should be integer, but got {type(axis)}.") + raise TypeError(f"The axis argument should be integer, but got {type(axis)}.") Validator.check_axis_in_range(axis, ndim) axes = tuple(map(lambda x: x % ndim, axes)) return axes - raise TypeError(f"axes should be integer, list or tuple for check, but got {type(axes)}.") + raise TypeError(f"The axes should be integer, list or tuple for check, but got {type(axes)}.") @staticmethod def prepare_shape_for_squeeze(shape, axes): @@ -730,33 +730,33 @@ class Validator: # Convert to set if isinstance(axes, int): if axes >= ndim or axes < -ndim: - raise ValueError(f"axis {axes} is out of bounds for tensor of dimension {ndim}") + raise ValueError(f"The axis {axes} is out of bounds for tensor of dimension {ndim}") axes = {axes} elif isinstance(axes, (list, tuple)): for axis in axes: if axis >= ndim or axis < -ndim: - raise ValueError(f"axis {axis} is out of bounds for tensor of dimension {ndim}") + raise ValueError(f"The axis {axis} is out of bounds for tensor of dimension {ndim}") axes = set(axes) else: - raise TypeError(f"only int, tuple and list are allowed for axes, but got {type(axes)}") + raise TypeError(f"Only int, tuple and list are allowed for axes, but got {type(axes)}") for idx, s in enumerate(shape): if s != 1 or (idx not in axes) and (idx - ndim not in axes): new_shape.append(s) # if an axis is selected with shape entry greater than one, an error is raised. if s != 1 and ((idx in axes) or (idx - ndim in axes)): - raise ValueError(f"axis {axes} has shape entry {s} > 1, cannot be squeezed.") + raise ValueError(f"The axis {axes} has shape entry {s} > 1, cannot be squeezed.") return tuple(new_shape) @staticmethod def check_axis_in_range(axis, ndim): """Checks axes are with the bounds of ndim""" if not isinstance(axis, int): - raise TypeError(f'axes should be integers, not {type(axis)}') + raise TypeError(f'The axes should be integers, not {type(axis)}') if not -ndim <= axis < ndim: - raise ValueError(f'axis {axis} is out of bounds for array of dimension {ndim}') + raise ValueError(f'The axis {axis} is out of bounds for array of dimension {ndim}') return axis % ndim @staticmethod @@ -809,7 +809,7 @@ class Validator: for items in zip_longest(*reversed_shapes, fillvalue=1): max_size = 0 if 0 in items else max(items) if any(item not in (1, max_size) for item in items): - raise ValueError(f'operands could not be broadcast together with shapes {*shapes,}') + raise ValueError(f'The operands could not be broadcast together with shapes {*shapes,}') shape_out.appendleft(max_size) return tuple(shape_out) @@ -835,7 +835,7 @@ class Validator: type_str += "tuple, " if type_list: type_str += "list, " - raise TypeError(f"Axis should be {type_str}but got {type(axis)}.") + raise TypeError(f"The axis should be {type_str}but got {type(axis)}.") @staticmethod def check_and_canonicalize_axes(axes, ndim): @@ -846,7 +846,7 @@ class Validator: if not isinstance(ax, int): raise TypeError((f"Each axis should be integer, but got {type(ax)} in {axes}.")) if not -ndim <= ax < ndim: - raise ValueError(f'axis {ax} is out of bounds for array of dimension {ndim}') + raise ValueError(f'The axis {ax} is out of bounds for array of dimension {ndim}') ax = ax if ax >= 0 else ax + ndim new_axes += (ax,) if any(new_axes.count(el) > 1 for el in new_axes): @@ -956,7 +956,7 @@ def args_type_check(*type_args, **type_kwargs): for name, value in argument_dict.items(): if name in bound_types: if value is not None and not isinstance(value, bound_types[name]): - raise TypeError('Argument {} must be {}'.format(name, bound_types[name])) + raise TypeError('The argument {} must be {}'.format(name, bound_types[name])) return func(*args, **kwargs) return wrapper diff --git a/mindspore/context.py b/mindspore/context.py index 92c52aaf00e..c6535c322ec 100644 --- a/mindspore/context.py +++ b/mindspore/context.py @@ -141,7 +141,7 @@ class _Context: Note: Create a context through instantiating Context object is not recommended. - should use context() to get the context since Context is singleton. + should use context() to get the context since Context is a singleton. """ _instance = None _instance_lock = threading.Lock() diff --git a/mindspore/nn/cell.py b/mindspore/nn/cell.py index 068555b77a0..96eff09c7e2 100755 --- a/mindspore/nn/cell.py +++ b/mindspore/nn/cell.py @@ -244,11 +244,11 @@ class Cell(Cell_): @pipeline_stage.setter def pipeline_stage(self, value): if isinstance(value, bool): - raise TypeError("'pipeline_stage' must be int type, but got bool.") + raise TypeError("'pipeline_stage' must be a int type, but got bool.") if not isinstance(value, int): - raise TypeError("'pipeline_stage' must be int type.") + raise TypeError("'pipeline_stage' must be a int type.") if value < 0: - raise TypeError("'pipeline_stage' can not less than 0.") + raise TypeError("'pipeline_stage' can not be less than 0.") self._pipeline_stage = value for item in self.trainable_params(): item.add_pipeline_stage(value) diff --git a/mindspore/nn/loss/loss.py b/mindspore/nn/loss/loss.py index 861b73fa032..bea6e9b5336 100644 --- a/mindspore/nn/loss/loss.py +++ b/mindspore/nn/loss/loss.py @@ -710,7 +710,6 @@ def _check_ndim_multi(logits_dim, label_dim, prim_name=None): if label_dim < 2: raise ValueError(f"{msg_prefix} Label dimension should be greater than 1, but got {label_dim}.") - @constexpr def _check_weights(weight_shape, label_shape, prim_name=None): """Internal function, used to check whether the reduced shape meets the requirements.""" @@ -1293,7 +1292,6 @@ def _check_ndim(logits_nidm, labels_ndim, prime_name=None): raise ValueError(f"{msg_prefix} dimensions of 'logits' and 'labels' must be equal, but got" f"dimension of 'logits' {logits_nidm} and dimension of 'labels' {labels_ndim}.") - @constexpr def _check_channel_and_shape(logits, labels, prime_name=None): '''Internal function, used to check whether the channels or shape of logits and labels meets the requirements.''' diff --git a/mindspore/train/serialization.py b/mindspore/train/serialization.py index a910cf62145..f479ab49e90 100644 --- a/mindspore/train/serialization.py +++ b/mindspore/train/serialization.py @@ -96,7 +96,7 @@ def _update_param(param, new_param, strict_load): if param.data.shape != new_param.data.shape: if not _special_process_par(param, new_param): logger.error("Failed to combine the net and the parameters for param %s.", param.name) - msg = ("Net parameters {} shape({}) different from parameter_dict's({})" + msg = ("Net parameters {} shape({}) are different from parameter_dict's({})" .format(param.name, param.data.shape, new_param.data.shape)) raise RuntimeError(msg) @@ -107,7 +107,7 @@ def _update_param(param, new_param, strict_load): return logger.error("Failed to combine the net and the parameters for param %s.", param.name) - msg = ("Net parameters {} type({}) different from parameter_dict's({})" + msg = ("Net parameters {} type({}) are different from parameter_dict's({})" .format(param.name, param.data.dtype, new_param.data.dtype)) raise RuntimeError(msg) @@ -124,7 +124,7 @@ def _update_param(param, new_param, strict_load): elif isinstance(new_param.data, Tensor) and not isinstance(param.data, Tensor): logger.error("Failed to combine the net and the parameters for param %s.", param.name) - msg = ("Net parameters {} type({}) different from parameter_dict's({})" + msg = ("Net parameters {} type({}) are different from parameter_dict's({})" .format(param.name, type(param.data), type(new_param.data))) raise RuntimeError(msg) @@ -572,11 +572,11 @@ def load_param_into_net(net, parameter_dict, strict_load=False): def _load_dismatch_prefix_params(net, parameter_dict, param_not_load, strict_load): - """When some net parameter did not load, try to continue load.""" + """When some net parameter did not load, try to continue loading.""" prefix_name = "" longest_name = param_not_load[0] while prefix_name != longest_name and param_not_load: - logger.debug("Count: {} parameters has not been loaded, try to load continue.".format(len(param_not_load))) + logger.debug("Count: {} parameters has not been loaded, try to continue loading.".format(len(param_not_load))) prefix_name = longest_name for net_param_name in param_not_load: for dict_name in parameter_dict: @@ -628,7 +628,7 @@ def _get_merged_param_data(net, param_name, param_data, integrated_save): """ layout = net.parameter_layout_dict[param_name] if len(layout) < 6: - logger.info("layout dict does not contain the key %s", param_name) + logger.info("The layout dict does not contain the key %s", param_name) return param_data dev_mat = layout[0] @@ -645,7 +645,7 @@ def _get_merged_param_data(net, param_name, param_data, integrated_save): if param_name in net.parallel_parameter_merge_net_dict: allgather_net = net.parallel_parameter_merge_net_dict[param_name] else: - logger.info("need to create allgather net for %s", param_name) + logger.info("Need to create allgather net for %s", param_name) if integrated_save: if context.get_auto_parallel_context("pipeline_stages") > 1: raise RuntimeError("Pipeline Parallel don't support Integrated save checkpoint now.") @@ -739,7 +739,7 @@ def export(net, *inputs, file_name, file_format='AIR', **kwargs): net = _quant_export(net, *inputs, file_format=file_format, **kwargs) if 'enc_key' in kwargs.keys(): if file_format != 'MINDIR': - raise ValueError(f"enc_key can be passed in only when file_format=='MINDIR', but got {file_format}") + raise ValueError(f"The enc_key can be passed in only when file_format=='MINDIR', but got {file_format}") enc_key = Validator.check_isinstance('enc_key', kwargs['enc_key'], bytes) enc_mode = 'AES-GCM' @@ -908,8 +908,8 @@ def _save_mindir_together(net_dict, model, file_name, is_encrypt, **kwargs): param_data = net_dict[param_name].data.asnumpy().tobytes() param_proto.raw_data = param_data else: - logger.error("The parameter %s in the graph are not in the network.", param_name) - raise ValueError("The parameter in the graph must in the network.") + logger.error("The parameter %s in the graph is not in the network.", param_name) + raise ValueError("The parameter in the graph must be in the network.") if not file_name.endswith('.mindir'): file_name += ".mindir" current_path = os.path.abspath(file_name) @@ -968,7 +968,7 @@ def _quant_export(network, *inputs, file_format, **kwargs): quant_mode = kwargs['quant_mode'] if quant_mode not in quant_mode_formats: - raise KeyError(f'Quant_mode input is wrong, Please choose the right mode of the quant_mode.') + raise KeyError(f'The quant_mode input is wrong, Please choose the right mode of the quant_mode.') if quant_mode == 'NONQUANT': return network quant_net = copy.deepcopy(network) @@ -1049,7 +1049,7 @@ def parse_print(print_file_name): pb_content = f.read() print_list.ParseFromString(pb_content) except BaseException as e: - logger.error("Failed to read the print file %s, please check the correct of the file.", print_file_name) + logger.error("Failed to read the print file %s, please check the correctness of the file.", print_file_name) raise ValueError(e.__str__()) tensor_list = []