diff --git a/mindspore/python/mindspore/common/api.py b/mindspore/python/mindspore/common/api.py index e7da63f5668..f9e4c8746e1 100644 --- a/mindspore/python/mindspore/common/api.py +++ b/mindspore/python/mindspore/common/api.py @@ -148,7 +148,7 @@ def __get_compile_cache_dep_files(file_path, compile_cache_dep_files, pkg): whole_module = n.name else: whole_module = module_name - if not n.name is None: + if n.name is not None: whole_module += "." + n.name try: module_spec = importlib.util.find_spec(whole_module, pkg) @@ -163,7 +163,7 @@ def __get_compile_cache_dep_files(file_path, compile_cache_dep_files, pkg): else: continue # Exclude the installed modules. - if not _in_sys_path(dep_file_path) and not dep_file_path in compile_cache_dep_files: + if not _in_sys_path(dep_file_path) and dep_file_path not in compile_cache_dep_files: logger.debug(f"dependent file path: {dep_file_path}") compile_cache_dep_files.append(dep_file_path) __get_compile_cache_dep_files(dep_file_path, compile_cache_dep_files, module.__package__) @@ -184,8 +184,8 @@ def _get_compile_cache_dep_files(): def _restore_mutable_attr(args_list, compile_args): """Restore the mutable attr for every arg.""" new_compile_args = () - for idx in range(len(args_list)): - if hasattr(args_list[idx], "__ms_mutable__") and getattr(args_list[idx], "__ms_mutable__"): + for idx, arg in enumerate(args_list): + if hasattr(arg, "__ms_mutable__") and getattr(arg, "__ms_mutable__"): new_compile_args += (mutable(compile_args[idx]),) else: new_compile_args += (compile_args[idx],) diff --git a/mindspore/python/mindspore/common/dtype.py b/mindspore/python/mindspore/common/dtype.py index 180ae8b3435..6fde2ee0103 100644 --- a/mindspore/python/mindspore/common/dtype.py +++ b/mindspore/python/mindspore/common/dtype.py @@ -190,7 +190,7 @@ def pytype_to_dtype(obj): if not isinstance(obj, type): raise TypeError("For 'pytype_to_dtype', the argument 'obj' must be a python type object," "such as int, float, str, etc. But got type {}.".format(type(obj))) - elif obj in _simple_types: + if obj in _simple_types: return _simple_types[obj] raise NotImplementedError(f"The python type {obj} cannot be converted to MindSpore type.") diff --git a/mindspore/python/mindspore/common/tensor.py b/mindspore/python/mindspore/common/tensor.py index 0b8ee6cd42d..e31ba3dd691 100644 --- a/mindspore/python/mindspore/common/tensor.py +++ b/mindspore/python/mindspore/common/tensor.py @@ -3346,7 +3346,7 @@ class Tensor(Tensor_): res = tensor_operator_registry.get('reduce_sum')(prod.astype(mstype.float32), -1) begin = () - for i in range(ndim - 2): + for _ in range(ndim - 2): begin += (0,) last_dim_begin = max(0, -offset) begin += (last_dim_begin,) diff --git a/mindspore/python/mindspore/train/serialization.py b/mindspore/python/mindspore/train/serialization.py index 4fd3acaa917..3e00a855c2e 100644 --- a/mindspore/python/mindspore/train/serialization.py +++ b/mindspore/python/mindspore/train/serialization.py @@ -1449,9 +1449,9 @@ def _merge_param_with_strategy(sliced_data, parameter_name, strategy, is_even): tensor_slices_new_inner = [] for j in range(ele_count): new_tensor = tensor_slices_new[j * tensor_strategy[dim_len - 1 - i]] - for l in range(j * tensor_strategy[dim_len - 1 - i] + 1, + for k in range(j * tensor_strategy[dim_len - 1 - i] + 1, (j + 1) * tensor_strategy[dim_len - 1 - i]): - new_tensor = np.concatenate((new_tensor, tensor_slices_new[l]), axis=dim_len - 1 - i) + new_tensor = np.concatenate((new_tensor, tensor_slices_new[k]), axis=dim_len - 1 - i) tensor_slices_new_inner.insert(len(tensor_slices_new_inner), np.array(new_tensor)) tensor_slices_new = tensor_slices_new_inner merged_tensor = Tensor(tensor_slices_new[0])