From 7065684a841fa39eb1de17ce53a8da9526b53f83 Mon Sep 17 00:00:00 2001 From: linqingke Date: Fri, 18 Mar 2022 16:48:43 +0800 Subject: [PATCH] pclint clean code. --- .../ccsrc/backend/common/session/anf_runtime_algorithm.cc | 4 ++-- mindspore/ccsrc/kernel/ascend_kernel_mod.h | 2 +- .../ascend/hal/device/executor/aicpu_ext_info_handle.cc | 6 +++--- .../device/ascend/hal/device/ge_runtime/task_info.h | 2 +- .../ascend/hal/hccl_adapter/all_to_all_v_calc_param.cc | 4 ++-- .../ccsrc/plugin/device/ascend/kernel/rts/label_set.cc | 2 +- .../ascend/kernel/tbe/tbe_json/fusion_tbe_json_creator.cc | 3 +-- .../ascend/optimizer/format_type/set_fracz_group_attr.cc | 2 +- .../device/ascend/optimizer/ir_fission/split_fission.cc | 8 ++++---- .../ascend/optimizer/mindir/dropout_unify_mindir.cc | 2 +- 10 files changed, 17 insertions(+), 18 deletions(-) diff --git a/mindspore/ccsrc/backend/common/session/anf_runtime_algorithm.cc b/mindspore/ccsrc/backend/common/session/anf_runtime_algorithm.cc index 2a736e93f1f..87854dc868a 100644 --- a/mindspore/ccsrc/backend/common/session/anf_runtime_algorithm.cc +++ b/mindspore/ccsrc/backend/common/session/anf_runtime_algorithm.cc @@ -928,14 +928,14 @@ bool AnfRuntimeAlgorithm::IsIndependentNode(const CNodePtr &node) { static inline void GetMaxOrDefaultShape(const std::vector &max_shape, std::vector *device_shape) { if (!max_shape.empty()) { - std::transform(max_shape.begin(), max_shape.end(), device_shape->begin(), IntToSize); + (void)std::transform(max_shape.begin(), max_shape.end(), device_shape->begin(), IntToSize); } else { constexpr size_t kDefaultValueForDynamicDim = 16; auto tmp_shape = *device_shape; auto ConvertNegOneToDefalut = [&kDefaultValueForDynamicDim](size_t size) { return static_cast(size) < 0 ? kDefaultValueForDynamicDim : size; }; - std::transform(tmp_shape.begin(), tmp_shape.end(), device_shape->begin(), ConvertNegOneToDefalut); + (void)std::transform(tmp_shape.begin(), tmp_shape.end(), device_shape->begin(), ConvertNegOneToDefalut); } } diff --git a/mindspore/ccsrc/kernel/ascend_kernel_mod.h b/mindspore/ccsrc/kernel/ascend_kernel_mod.h index c68b53ca3a4..f6b669ed2c3 100644 --- a/mindspore/ccsrc/kernel/ascend_kernel_mod.h +++ b/mindspore/ccsrc/kernel/ascend_kernel_mod.h @@ -35,7 +35,7 @@ class AscendKernelMod : public KernelMod { explicit AscendKernelMod(const AnfNodePtr &anf_node_ptr) : KernelMod(anf_node_ptr) {} virtual std::vector GenTask(const std::vector &, const std::vector &, const std::vector &, uint32_t) = 0; - uint32_t block_dim() { return block_dim_; } + uint32_t block_dim() const { return block_dim_; } uint32_t stream_id() const { return stream_id_; } virtual bool NeedDump() { #ifndef ENABLE_SECURITY diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/executor/aicpu_ext_info_handle.cc b/mindspore/ccsrc/plugin/device/ascend/hal/device/executor/aicpu_ext_info_handle.cc index 81fe6e5e261..6144f4cdb3c 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/executor/aicpu_ext_info_handle.cc +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/executor/aicpu_ext_info_handle.cc @@ -120,7 +120,7 @@ bool AicpuExtInfoHandler::ParseExtInputShape(AicpuExtInfo *aicpu_ext_info) { auto input = reinterpret_cast(aicpu_ext_info->infoMsg); for (uint32_t index = 0; index < input_num_; ++index) { - input_shape_and_type_.emplace_back(&input[index]); + (void)input_shape_and_type_.emplace_back(&input[index]); } MS_LOG(INFO) << "Node:" << node_name_ << " parse ext input shape success infoLen=" << aicpu_ext_info->infoLen; return true; @@ -138,7 +138,7 @@ bool AicpuExtInfoHandler::ParseExtOutputShape(AicpuExtInfo *aicpu_ext_info) { auto output = reinterpret_cast(aicpu_ext_info->infoMsg); for (uint32_t index = 0; index < output_num_; ++index) { - output_shape_and_type_.emplace_back(&output[index]); + (void)output_shape_and_type_.emplace_back(&output[index]); } MS_LOG(INFO) << "Node:" << node_name_ << " parse ext output shape success infoLen=" << aicpu_ext_info->infoLen; return true; @@ -236,7 +236,7 @@ void AicpuExtInfoHandler::GetShapeAndType(NotNull sha if (tmpDim == kDimEndFlag) { break; } - shape->emplace_back(tmpDim); + (void)shape->emplace_back(tmpDim); MS_LOG(DEBUG) << "Debug tmpDim:" << tmpDim; } diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/device/ge_runtime/task_info.h b/mindspore/ccsrc/plugin/device/ascend/hal/device/ge_runtime/task_info.h index 18b1c9bbfff..90b77017e70 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/device/ge_runtime/task_info.h +++ b/mindspore/ccsrc/plugin/device/ascend/hal/device/ge_runtime/task_info.h @@ -323,7 +323,7 @@ class MemcpyAsyncTaskInfo : public TaskInfo { uint64_t dst_max_; void *src_; uint64_t count_; - int32_t kind_; + uint32_t kind_; }; class StreamSwitchTaskInfo : public TaskInfo { diff --git a/mindspore/ccsrc/plugin/device/ascend/hal/hccl_adapter/all_to_all_v_calc_param.cc b/mindspore/ccsrc/plugin/device/ascend/hal/hccl_adapter/all_to_all_v_calc_param.cc index 57f1702739d..fb88abef357 100644 --- a/mindspore/ccsrc/plugin/device/ascend/hal/hccl_adapter/all_to_all_v_calc_param.cc +++ b/mindspore/ccsrc/plugin/device/ascend/hal/hccl_adapter/all_to_all_v_calc_param.cc @@ -115,12 +115,12 @@ void AllToAllvCalcParam::CalcMemOffset(const std::vector &mem_sizes, con if (rank_ids[i] < 0 || static_cast(rank_ids[i]) >= rank_size_) { MS_LOG(EXCEPTION) << "Invalid rank id " << rank_ids[i] << " at index " << i << " as rank size " << rank_size_; } - rank_id_map.emplace(rank_ids[i], i); + (void)rank_id_map.emplace(rank_ids[i], i); } size_t offset = 0; for (uint32_t i = 0; i < rank_size_; ++i) { - (*displs)[i] = offset; + (*displs)[i] = SizeToLong(offset); auto iter = rank_id_map.find(i); if (iter != rank_id_map.end()) { (*counts)[i] = real_sizes[iter->second]; diff --git a/mindspore/ccsrc/plugin/device/ascend/kernel/rts/label_set.cc b/mindspore/ccsrc/plugin/device/ascend/kernel/rts/label_set.cc index 19873f1e1b3..e14d1427b09 100644 --- a/mindspore/ccsrc/plugin/device/ascend/kernel/rts/label_set.cc +++ b/mindspore/ccsrc/plugin/device/ascend/kernel/rts/label_set.cc @@ -55,7 +55,7 @@ std::vector LabelSetKernel::GenTask(const std::vector & std::vector task_info_list; std::shared_ptr task_info_ptr = std::make_shared(unique_name_, stream_id, label_); MS_EXCEPTION_IF_NULL(task_info_ptr); - task_info_list.emplace_back(task_info_ptr); + (void)task_info_list.emplace_back(task_info_ptr); return task_info_list; } } // namespace kernel diff --git a/mindspore/ccsrc/plugin/device/ascend/kernel/tbe/tbe_json/fusion_tbe_json_creator.cc b/mindspore/ccsrc/plugin/device/ascend/kernel/tbe/tbe_json/fusion_tbe_json_creator.cc index 3e0803f6c55..b8ae04bc039 100644 --- a/mindspore/ccsrc/plugin/device/ascend/kernel/tbe/tbe_json/fusion_tbe_json_creator.cc +++ b/mindspore/ccsrc/plugin/device/ascend/kernel/tbe/tbe_json/fusion_tbe_json_creator.cc @@ -301,8 +301,7 @@ std::vector FusionBuildTbeJsonCreator::GetDescOutputIndex(const std::vec return desc_output_index; } -bool FusionBuildTbeJsonCreator::AttrsJsonPostProcessing(const AnfNodePtr &anf_node, const OpInfoPtr &op_info_ptr, - nlohmann::json *attrs_json) { +bool FusionBuildTbeJsonCreator::AttrsJsonPostProcessing(const AnfNodePtr &, const OpInfoPtr &, nlohmann::json *) { return true; } diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/format_type/set_fracz_group_attr.cc b/mindspore/ccsrc/plugin/device/ascend/optimizer/format_type/set_fracz_group_attr.cc index 2403dfb0fe3..b74fa9e4a7e 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/format_type/set_fracz_group_attr.cc +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/format_type/set_fracz_group_attr.cc @@ -87,7 +87,7 @@ bool HasFraczGroupAttrAndSet(const AnfNodePtr &node, size_t index, int64_t group if (common::AnfAlgo::HasNodeAttr(kAttrFracZGroupIdx, cnode)) { fz_group_idx = common::AnfAlgo::GetNodeAttr>(cnode, kAttrFracZGroupIdx); if (input_num > fz_group_idx.size()) { - fz_group_idx.insert(fz_group_idx.begin(), input_num - fz_group_idx.size(), 1); + (void)fz_group_idx.insert(fz_group_idx.begin(), input_num - fz_group_idx.size(), 1); } if (fz_group_idx[index] == 1) { fz_group_idx[index] = groups; diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/split_fission.cc b/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/split_fission.cc index 53334f3770e..05bf99b9f83 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/split_fission.cc +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/ir_fission/split_fission.cc @@ -133,7 +133,7 @@ AnfNodePtr SplitFission::DoFission(const FuncGraphPtr &func_graph, const CNodePt int64_t cur_output_index = 0; while (num_split - cur_output_index > divisor) { auto tuple_getitem = CreateTupleGetItem(func_graph, base_splitv, nodes_num); - base_splitv_outputs.push_back(tuple_getitem); + (void)base_splitv_outputs.push_back(tuple_getitem); CNodePtr new_splitv = CreateSplitVNode(func_graph, tuple_getitem); std::vector size_splits_new(size_splits.begin() + nodes_num * divisor, size_splits.begin() + (nodes_num + 1) * divisor); @@ -154,7 +154,7 @@ AnfNodePtr SplitFission::DoFission(const FuncGraphPtr &func_graph, const CNodePt auto last_node_num_split = num_split - cur_output_index; if (last_node_num_split > 1) { auto tuple_getitem = CreateTupleGetItem(func_graph, base_splitv, nodes_num); - base_splitv_outputs.push_back(tuple_getitem); + (void)base_splitv_outputs.push_back(tuple_getitem); CNodePtr new_splitv = CreateSplitVNode(func_graph, tuple_getitem); std::vector size_splits_new_last(size_splits.begin() + nodes_num * divisor, size_splits.end()); SetAttrForSplitVNode(new_splitv, size_splits_new_last, split_dim, last_node_num_split); @@ -168,9 +168,9 @@ AnfNodePtr SplitFission::DoFission(const FuncGraphPtr &func_graph, const CNodePt size_splits_base.emplace_back(last_split_size); } else { auto tuple_getitem = CreateTupleGetItem(func_graph, base_splitv, nodes_num); - base_splitv_outputs.push_back(tuple_getitem); + (void)base_splitv_outputs.push_back(tuple_getitem); (void)make_tuple_inputs.emplace_back(tuple_getitem); - size_splits_base.emplace_back(size_splits[size_splits.size() - 1]); + (void)size_splits_base.emplace_back(size_splits[size_splits.size() - 1]); } nodes_num++; } diff --git a/mindspore/ccsrc/plugin/device/ascend/optimizer/mindir/dropout_unify_mindir.cc b/mindspore/ccsrc/plugin/device/ascend/optimizer/mindir/dropout_unify_mindir.cc index de541324a8b..5765318a8a4 100644 --- a/mindspore/ccsrc/plugin/device/ascend/optimizer/mindir/dropout_unify_mindir.cc +++ b/mindspore/ccsrc/plugin/device/ascend/optimizer/mindir/dropout_unify_mindir.cc @@ -162,7 +162,7 @@ CNodePtr CreateDynamicShapeCNode(const FuncGraphPtr &func_graph, const AnfNodePt CNodePtr CreateDropoutGenMaskCNode(const FuncGraphPtr &func_graph, const CNodePtr &dropout, const ValueNodePtr &keep_prob_value, const abstract::ShapePtr &input_shape, - const bool use_v3, const PatternProcessPass &pass) { + const bool use_v3, const PatternProcessPass &) { MS_EXCEPTION_IF_NULL(func_graph); MS_EXCEPTION_IF_NULL(dropout); MS_EXCEPTION_IF_NULL(input_shape);